text
stringlengths
2
14k
meta
dict
(function($K) { $K.add('module', 'autocomplete', { init: function(app, context) { this.app = app; this.$doc = app.$doc; this.$win = app.$win; this.$body = app.$body; this.animate = app.animate; // defaults var defaults = { url: false, min: 2, labelClass: false, target: false, param: false }; // context this.context = context; this.params = context.getParams(defaults); this.$element = context.getElement(); this.$target = context.getTarget(); }, start: function() { this._build(); this.timeout = null; this.$element.on('keyup.kube.autocomplete', this._open.bind(this)); }, stop: function() { this.$box.remove(); this.$element.off('.kube.autocomplete'); this.$doc.off('.kube.autocomplete'); this.$win.off('.kube.autocomplete'); }, // private _build: function() { this.$box = $K.dom('<div />'); this.$box.addClass('autocomplete'); this.$box.addClass('is-hidden'); this.$body.append(this.$box); if (this.$target && !this._isInputTarget()) { this.$target.addClass('autocomplete-labels'); var $closes = this.$target.find('.close'); $closes.on('click', this._removeLabel.bind(this)); } }, _open: function(e) { if (e) e.preventDefault(); clearTimeout(this.timeout); var value = this.$element.val(); if (value.length >= this.params.min) { this._resize(); this.$win.on('resize.kube.autocomplete', this._resize.bind(this)); this.$doc.on('click.kube.autocomplete', this._close.bind(this)); this.$box.addClass('is-open'); this._listen(e); } else { this._close(e); } }, _close: function(e) { if (e) e.preventDefault(); this.$box.removeClass('is-open'); this.$box.addClass('is-hidden'); this.$doc.off('.kube.autocomplete'); this.$win.off('.kube.autocomplete'); }, _getPlacement: function(pos, height) { return ((this.$doc.height() - (pos.top + height)) < this.$box.height()) ? 'top' : 'bottom'; }, _resize: function() { this.$box.width(this.$element.width()); }, _getParamName: function() { return (this.params.param) ? this.params.param : this.$element.attr('name'); }, _getTargetName: function() { var name = this.$target.attr('data-name'); return (name) ? name : this.$target.attr('id'); }, _lookup: function() { var data = this._getParamName() + '=' + this.$element.val(); $K.ajax.post({ url: this.params.url, data: data, success: this._complete.bind(this) }); }, _complete: function(json) { this.$box.html(''); if (json.length === 0) return this._close(); for (var i = 0; i < json.length; i++) { var $item = $K.dom('<a>'); $item.attr('href', '#'); $item.attr('rel', json[i].id); $item.html(json[i].name); $item.on('click', this._set.bind(this)); this.$box.append($item); } var pos = this.$element.offset(); var height = this.$element.height(); var width = this.$element.width(); var placement = this._getPlacement(pos, height); var top = (placement === 'top') ? (pos.top - this.$box.height() - height) : (pos.top + height); this.$box.css({ width: width + 'px', top: top + 'px', left: pos.left + 'px' }); this.$box.removeClass('is-hidden'); }, _listen: function(e) { switch(e.which) { case 40: // down e.preventDefault(); this._select('next'); break; case 38: // up e.preventDefault(); this._select('prev'); break; case 13: // enter e.preventDefault(); this._set(); break; case 27: // esc this._close(e); break; default: this.timeout = setTimeout(this._lookup.bind(this), 300); break; } }, _select: function(type) { var $links = this.$box.find('a'); var $active = this.$box.find('.is-active'); $links.removeClass('is-active'); var $item = this._selectItem($active, $links, type); $item.addClass('is-active'); }, _selectItem: function($active, $links, type) { var $item; var isActive = ($active.length !== 0); var size = (type === 'next') ? 0 : ($links.length - 1); if (isActive) { $item = $active[type](); } if (!isActive || !$item || $item.length === 0) { $item = $links.eq(size); } return $item; }, _set: function(e) { var $active = this.$box.find('.is-active'); if (e) { e.preventDefault(); $active = $K.dom(e.target); } var id = $active.attr('rel'); var value = $active.html(); if (this.$target.length !== 0) { if (this._isInputTarget()) { this.$target.val(value); } else { var $added = this.$target.find('[data-id="' + id + '"]'); if ($added.length === 0) { this._addLabel(id, value); } } this.$element.val(''); } else { this.$element.val(value); } this.$element.focus(); this.app.broadcast('autocomplete.set', this, value); this._close(); }, _addLabel: function(id, name) { var $label = $K.dom('<span>'); $label.addClass('label'); $label.attr('data-id', id); $label.text(name + ' ');
{ "pile_set_name": "Github" }
/* [auto_generated] boost/numeric/odeint/util/ublas_wrapper.hpp [begin_description] Resizing for ublas::vector and ublas::matrix [end_description] Copyright 2011-2013 Mario Mulansky Copyright 2011-2013 Karsten Ahnert Distributed under the Boost Software License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */ #ifndef BOOST_NUMERIC_ODEINT_UTIL_UBLAS_WRAPPER_HPP_INCLUDED #define BOOST_NUMERIC_ODEINT_UTIL_UBLAS_WRAPPER_HPP_INCLUDED #include <boost/type_traits/integral_constant.hpp> #include <boost/numeric/ublas/vector.hpp> #include <boost/numeric/ublas/matrix.hpp> #include <boost/numeric/ublas/lu.hpp> #include <boost/numeric/ublas/vector_expression.hpp> #include <boost/numeric/ublas/matrix_expression.hpp> #include <boost/numeric/odeint/algebra/vector_space_algebra.hpp> #include <boost/numeric/odeint/algebra/default_operations.hpp> #include <boost/numeric/odeint/util/is_resizeable.hpp> #include <boost/numeric/odeint/util/state_wrapper.hpp> /* extend ublas by a few operations */ /* map norm_inf onto reduce( v , default_operations::maximum ) */ namespace boost { namespace numeric { namespace odeint { template< typename T , typename A > struct vector_space_norm_inf< boost::numeric::ublas::vector<T,A> > { typedef T result_type; result_type operator()( const boost::numeric::ublas::vector<T,A> &x ) const { return boost::numeric::ublas::norm_inf( x ); } }; template< class T , class L , class A > struct vector_space_norm_inf< boost::numeric::ublas::matrix<T,L,A> > { typedef T result_type; result_type operator()( const boost::numeric::ublas::matrix<T,L,A> &x ) const { return boost::numeric::ublas::norm_inf( x ); } }; } } } /* additional operations: * abs( v ) * v / w * a + v */ namespace boost { namespace numeric { namespace ublas { // elementwise abs - calculates absolute values of the elements template<class T> struct scalar_abs: public scalar_unary_functor<T> { typedef typename scalar_unary_functor<T>::value_type value_type; typedef typename scalar_unary_functor<T>::argument_type argument_type; typedef typename scalar_unary_functor<T>::result_type result_type; static BOOST_UBLAS_INLINE result_type apply (argument_type t) { using std::abs; return abs (t); } }; // (abs v) [i] = abs (v [i]) template<class E> BOOST_UBLAS_INLINE typename vector_unary_traits<E, scalar_abs<typename E::value_type> >::result_type abs (const vector_expression<E> &e) { typedef typename vector_unary_traits<E, scalar_abs<typename E::value_type> >::expression_type expression_type; return expression_type (e ()); } // (abs m) [i] = abs (m [i]) template<class E> BOOST_UBLAS_INLINE typename matrix_unary1_traits<E, scalar_abs<typename E::value_type> >::result_type abs (const matrix_expression<E> &e) { typedef typename matrix_unary1_traits<E, scalar_abs<typename E::value_type> >::expression_type expression_type; return expression_type (e ()); } // elementwise division (v1 / v2) [i] = v1 [i] / v2 [i] template<class E1, class E2> BOOST_UBLAS_INLINE typename vector_binary_traits<E1, E2, scalar_divides<typename E1::value_type, typename E2::value_type> >::result_type operator / (const vector_expression<E1> &e1, const vector_expression<E2> &e2) { typedef typename vector_binary_traits<E1, E2, scalar_divides<typename E1::value_type, typename E2::value_type> >::expression_type expression_type; return expression_type (e1 (), e2 ()); } // elementwise division (m1 / m2) [i] = m1 [i] / m2 [i] template<class E1, class E2> BOOST_UBLAS_INLINE typename matrix_binary_traits<E1, E2, scalar_divides<typename E1::value_type, typename E2::value_type> >::result_type operator / (const matrix_expression<E1> &e1, const matrix_expression<E2> &e2) { typedef typename matrix_binary_traits<E1, E2, scalar_divides<typename E1::value_type, typename E2::value_type> >::expression_type expression_type; return expression_type (e1 (), e2 ()); } // addition with scalar // (t + v) [i] = t + v [i] template<class T1, class E2> BOOST_UBLAS_INLINE typename enable_if< is_convertible<T1, typename E2::value_type >, typename vector_binary_scalar1_traits<const T1, E2, scalar_plus<T1, typename E2::value_type> >::result_type >::type operator + (const T1 &e1, const vector_expression<E2> &e2) { typedef typename vector_binary_scalar1_traits<const T1, E2, scalar_plus<T1, typename E2::value_type> >::expression_type expression_type; return expression_type (e1, e2 ()); } // addition with scalar // (t + m) [i] = t + m [i] template<class T1, class E2> BOOST_UBLAS_INLINE typename enable_if< is_convertible<T1, typename E2::value_type >, typename matrix_binary_scalar1_traits<const T1, E2, scalar_plus<T1, typename E2::value_type> >::result_type >::type operator + (const T1 &e1, const matrix_expression<E2> &e2) { typedef typename matrix_binary_scalar1_traits<const T1, E2, scalar_plus<T1, typename E2::value_type> >::expression_type expression_type; return expression_type (e1, e2 ()); } } } } /* add resize functionality */ namespace boost { namespace numeric { namespace odeint { /* * resizeable specialization for boost::numeric::ublas::vector */ template< class T , class A > struct is_resizeable< boost::numeric::ublas::vector< T , A > > { typedef boost::true_type type; const static bool value = type::value; }; /* * resizeable specialization for boost::numeric::ublas::matrix */ template< class T , class L , class A > struct is_resizeable< boost::numeric::ublas::matrix< T , L , A > > { typedef boost::true_type type; const static bool value = type::value; }; /* * resizeable specialization for boost::numeric::ublas::permutation_matrix */ template< class T , class A > struct is_resizeable< boost::numeric::ublas::permutation_matrix< T , A > > { typedef boost::true_type type;
{ "pile_set_name": "Github" }
require "backup/config/dsl" require "backup/config/helpers" module Backup module Config class Error < Backup::Error; end DEFAULTS = { config_file: "config.rb", data_path: ".data", tmp_path: ".tmp" } class << self include Utilities::Helpers attr_reader :user, :root_path, :config_file, :data_path, :tmp_path # Loads the user's +config.rb+ and all model files. def load(options = {}) update(options) # from the command line unless File.exist?(config_file) raise Error, "Could not find configuration file: '#{config_file}'." end config = File.read(config_file) version = Backup::VERSION.split(".").first unless config =~ /^# Backup v#{ version }\.x Configuration$/ raise Error, <<-EOS Invalid Configuration File The configuration file at '#{config_file}' does not appear to be a Backup v#{version}.x configuration file. If you have upgraded to v#{version}.x from a previous version, you need to upgrade your configuration file. Please see the instructions for upgrading in the Backup documentation. EOS end dsl = DSL.new dsl.instance_eval(config, config_file) update(dsl._config_options) # from config.rb update(options) # command line takes precedence Dir[File.join(File.dirname(config_file), "models", "*.rb")].each do |model| dsl.instance_eval(File.read(model), model) end end def hostname @hostname ||= run(utility(:hostname)) end private # If :root_path is set in the options, all paths will be updated. # Otherwise, only the paths given will be updated. def update(options = {}) root_path = options[:root_path].to_s.strip new_root = root_path.empty? ? false : set_root_path(root_path) DEFAULTS.each do |name, ending| set_path_variable(name, options[name], ending, new_root) end end # Sets the @root_path to the given +path+ and returns it. # Raises an error if the given +path+ does not exist. def set_root_path(path) # allows #reset! to set the default @root_path, # then use #update to set all other paths, # without requiring that @root_path exist. return @root_path if path == @root_path path = File.expand_path(path) unless File.directory?(path) raise Error, <<-EOS Root Path Not Found When specifying a --root-path, the path must exist. Path was: #{path} EOS end @root_path = path end def set_path_variable(name, path, ending, root_path) # strip any trailing '/' in case the user supplied this as part of # an absolute path, so we can match it against File.expand_path() path = path.to_s.sub(/\/\s*$/, "").lstrip new_path = false # If no path is given, the variable will not be set/updated # unless a root_path was given. In which case the value will # be updated with our default ending. if path.empty? new_path = File.join(root_path, ending) if root_path else # When a path is given, the variable will be set/updated. # If the path is relative, it will be joined with root_path (if given), # or expanded relative to PWD. new_path = File.expand_path(path) unless path == new_path new_path = File.join(root_path, path) if root_path end end instance_variable_set(:"@#{name}", new_path) if new_path end def reset! @user = ENV["USER"] || Etc.getpwuid.name @root_path = File.join(File.expand_path(ENV["HOME"] || ""), "Backup") update(root_path: @root_path) end end reset! # set defaults on load end end
{ "pile_set_name": "Github" }
// CodeContracts // // Copyright (c) Microsoft Corporation // // All rights reserved. // // MIT License // // Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. using System; using System.Collections.Generic; using System.Text; using System.Diagnostics.Contracts; namespace Tests.Sources { [ContractClass(typeof(GenericAbstractClassContracts<,>))] public abstract class GenericAbstractClass<A,B> where A: class,B { public abstract bool IsMatch(B b, A a); public abstract B ReturnFirst(B[] args, A match, bool behave); public abstract A[][] Collection(int x, int y); public abstract A FirstNonNullMatch(bool behave, A[] elems); public abstract C[] GenericMethod<C>(A[] elems); } [ContractClassFor(typeof(GenericAbstractClass<,>))] internal abstract class GenericAbstractClassContracts<A,B> : GenericAbstractClass<A,B> where A : class, B { public override bool IsMatch(B b, A a) { throw new NotImplementedException(); } public override B ReturnFirst(B[] args, A match, bool behave) { Contract.Requires(args != null); Contract.Requires(args.Length > 0); Contract.Ensures(Contract.Exists(0, args.Length, i => args[i].Equals(Contract.Result<B>()) && IsMatch(args[i], match))); return default(B); } public override A[][] Collection(int x, int y) { Contract.Ensures(Contract.ForAll(Contract.Result<A[][]>(), nested => nested != null && nested.Length == y && Contract.ForAll(nested, elem => elem != null))); Contract.Ensures(Contract.ForAll(0, x, index => Contract.Result<A[][]>()[index] != null)); throw new NotImplementedException(); } public override A FirstNonNullMatch(bool behave, A[] elems) { // meaningless, but testing our closures, in particular inner one with a static closure referring to result. Contract.Ensures(Contract.Exists(0, elems.Length, index => elems[index] != null && elems[index] == Contract.Result<A>() && Contract.ForAll(0, index, prior => Contract.Result<A>() != null))); // See if we are properly sharing fields. Contract.Ensures(Contract.Exists(0, elems.Length, index => elems[index] != null && elems[index] == Contract.Result<A>() && Contract.ForAll(0, index, prior => Contract.Result<A>() != null))); // See if we are properly sharing fields. Contract.Ensures(Contract.Exists(0, elems.Length, index => elems[index] != null && Contract.ForAll(0, index, prior => Contract.Result<A>() != null))); throw new NotImplementedException(); } public override C[] GenericMethod<C>(A[] elems) { Contract.Requires(elems != null); Contract.Ensures(Contract.Result<C[]>() != null); Contract.Ensures(Contract.ForAll(Contract.Result<C[]>(), resultElem => Contract.Exists(elems, orig => resultElem.Equals(orig)))); throw new NotImplementedException(); } } public class ImplForGenericAbstractClass : GenericAbstractClass<string, string> { public override bool IsMatch(string b, string a) { return b == a; } public override string ReturnFirst(string[] args, string match, bool behave) { for (int i = 0; i < args.Length; i++) { if (IsMatch(args[i], match)) return args[i]; } return default(string); } public override string[][] Collection(int x, int y) { var result = new string[x][]; for (int i=0; i<result.Length; i++) { result[i] = new string[y]; for (int j = 0; j < y; j++) { if (x == 5 && y == 5 && i == 4 && j == 4) { // behave badly continue; } result[i][j] = "Foo"; } } return result; } public override string FirstNonNullMatch(bool behave, string[] elems) { if (!behave) return "foobar"; for (int i = 0; i < elems.Length; i++) { if (elems[i] != null) return elems[i]; } return null; } public override C[] GenericMethod<C>(string[] elems) { List<C> result = new List<C>(); foreach (var elem in elems) { if (elem is C) { result.Add((C)(object)elem); } } if (typeof(C) == typeof(int)) { // behave badly result.Add((C)(object)55); } return result.ToArray(); } } partial class TestMain { partial void Run() { var i = new ImplForGenericAbstractClass(); i.FirstNonNullMatch(behave, new string[]{null, "a",null,"b"}); } public ContractFailureKind NegativeExpectedKind = ContractFailureKind.Postcondition; public string NegativeExpectedCondition = "Contract.Exists(0, elems.Length, index => elems[index] != null && elems[index] == Contract.Result<A>() && Contract.ForAll(0, index, prior => Contract.Result<A>() != null))"; } }
{ "pile_set_name": "Github" }
<textarea name={{ UEditor.name }} id=id_{{ UEditor.name }} style="display:inline-block;width:{{ UEditor.width }}px;{{ UEditor.css }}">{{UEditor.value}}</textarea> <script type="text/javascript"> var id_{{ UEditor.name }}= new baidu.editor.ui.Editor({ "UEDITOR_HOME_URL":"{{ STATIC_URL }}ueditor/", {% ifnotequal UEditor.toolbars None %}"toolbars":{{ UEditor.toolbars|safe }},{% endifnotequal %} "imageUrl":"/ueditor/ImageUp/{{ UEditor.imagePath }}", "imagePath":"{{ MEDIA_URL }}{{ UEditor.imagePath }}", "scrawlUrl":"/ueditor/scrawlUp/{{ UEditor.scrawlPath }}", "scrawlPath":"{{ MEDIA_URL }}{{ UEditor.scrawlPath }}", "imageManagerUrl":"/ueditor/ImageManager/{{ UEditor.imageManagerPath }}", "imageManagerPath":"{{ MEDIA_URL }}{{ UEditor.imageManagerPath }}", "catcherUrl":"/ueditor/RemoteCatchImage/{{ UEditor.imagePath }}", "catcherPath":"{{ MEDIA_URL }}{{ UEditor.imagePath }}", "fileUrl":"/ueditor/FileUp/{{ UEditor.filePath }}", "filePath":"{{ MEDIA_URL }}{{ UEditor.filePath }}", "getMovieUrl":"/ueditor/SearchMovie/", "sourceEditorFirst":{{ UEditor.sourceEditorFirst }} {% ifnotequal UEditor.options '' %},{{ UEditor.options|safe }}{% endifnotequal %} }); id_{{UEditor.name}}.render('id_{{ UEditor.name }}'); id_{{UEditor.name}}.addListener('ready',function(){ id_{{UEditor.name}}.setHeight({{ UEditor.height }}); }); </script>
{ "pile_set_name": "Github" }
package problem0958 import ( "testing" "github.com/aQuaYi/LeetCode-in-Go/kit" "github.com/stretchr/testify/assert" ) // tcs is testcase slice var tcs = []struct { root []int ans bool }{ { []int{1, 2, 3, 4, 5, 6}, true, }, { []int{1, 2, 3, 4, 5, kit.NULL, 7}, false, }, // 可以有多个 testcase } func Test_isCompleteTree(t *testing.T) { ast := assert.New(t) for _, tc := range tcs { root := kit.Ints2TreeNode(tc.root) ast.Equal(tc.ans, isCompleteTree(root), "输入:%v", tc) } } func Benchmark_isCompleteTree(b *testing.B) { for i := 0; i < b.N; i++ { for _, tc := range tcs { root := kit.Ints2TreeNode(tc.root) isCompleteTree(root) } } }
{ "pile_set_name": "Github" }
require('../../modules/es6.number.is-finite'); module.exports = require('../../modules/$.core').Number.isFinite;
{ "pile_set_name": "Github" }
/* * CDDL HEADER START * * The contents of this file are subject to the terms of the * Common Development and Distribution License, Version 1.0 only * (the "License"). You may not use this file except in compliance * with the License. * * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE * or http://www.opensolaris.org/os/licensing. * See the License for the specific language governing permissions * and limitations under the License. * * When distributing Covered Code, include this CDDL HEADER in each * file and include the License file at usr/src/OPENSOLARIS.LICENSE. * If applicable, add the following below this CDDL HEADER, with the * fields enclosed by brackets "[]" replaced with your own identifying * information: Portions Copyright [yyyy] [name of copyright owner] * * CDDL HEADER END */ /* * Copyright 2004 Sun Microsystems, Inc. All rights reserved. * Use is subject to license terms. */ #ifndef _MACH_SDT_H #define _MACH_SDT_H #include <mach/machine/sdt.h> #endif /* _MACH_SDT_H */
{ "pile_set_name": "Github" }
// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef BASE_LOCATION_H_ #define BASE_LOCATION_H_ #include <stddef.h> #include <cassert> #include <functional> #include <string> #include "base/base_export.h" #include "base/debug/debugging_buildflags.h" #include "base/hash/hash.h" #include "build/build_config.h" namespace base { #if defined(__has_builtin) // Clang allows detection of these builtins. #define SUPPORTS_LOCATION_BUILTINS \ (__has_builtin(__builtin_FUNCTION) && __has_builtin(__builtin_FILE) && \ __has_builtin(__builtin_LINE)) #elif defined(COMPILER_GCC) && __GNUC__ >= 7 // GCC has supported these for a long time, but they point at the function // declaration in the case of default arguments, rather than at the call site. #define SUPPORTS_LOCATION_BUILTINS 1 #else #define SUPPORTS_LOCATION_BUILTINS 0 #endif // Location provides basic info where of an object was constructed, or was // significantly brought to life. class BASE_EXPORT Location { public: Location(); Location(const Location& other); // Only initializes the file name and program counter, the source information // will be null for the strings, and -1 for the line number. // TODO(http://crbug.com/760702) remove file name from this constructor. Location(const char* file_name, const void* program_counter); // Constructor should be called with a long-lived char*, such as __FILE__. // It assumes the provided value will persist as a global constant, and it // will not make a copy of it. Location(const char* function_name, const char* file_name, int line_number, const void* program_counter); // Comparator for hash map insertion. The program counter should uniquely // identify a location. bool operator==(const Location& other) const { return program_counter_ == other.program_counter_; } // Returns true if there is source code location info. If this is false, // the Location object only contains a program counter or is // default-initialized (the program counter is also null). bool has_source_info() const { return function_name_ && file_name_; } // Will be nullptr for default initialized Location objects and when source // names are disabled. const char* function_name() const { return function_name_; } // Will be nullptr for default initialized Location objects and when source // names are disabled. const char* file_name() const { return file_name_; } // Will be -1 for default initialized Location objects and when source names // are disabled. int line_number() const { return line_number_; } // The address of the code generating this Location object. Should always be // valid except for default initialized Location objects, which will be // nullptr. const void* program_counter() const { return program_counter_; } // Converts to the most user-readable form possible. If function and filename // are not available, this will return "pc:<hex address>". std::string ToString() const; static Location CreateFromHere(const char* file_name); static Location CreateFromHere(const char* function_name, const char* file_name, int line_number); #if SUPPORTS_LOCATION_BUILTINS && BUILDFLAG(ENABLE_LOCATION_SOURCE) static Location Current(const char* function_name = __builtin_FUNCTION(), const char* file_name = __builtin_FILE(), int line_number = __builtin_LINE()); #elif SUPPORTS_LOCATION_BUILTINS static Location Current(const char* file_name = __builtin_FILE()); #else static Location Current(); #endif private: const char* function_name_ = nullptr; const char* file_name_ = nullptr; int line_number_ = -1; const void* program_counter_ = nullptr; }; BASE_EXPORT const void* GetProgramCounter(); // The macros defined here will expand to the current function. #if BUILDFLAG(ENABLE_LOCATION_SOURCE) // Full source information should be included. #define FROM_HERE FROM_HERE_WITH_EXPLICIT_FUNCTION(__func__) #define FROM_HERE_WITH_EXPLICIT_FUNCTION(function_name) \ ::base::Location::CreateFromHere(function_name, __FILE__, __LINE__) #else // TODO(http://crbug.com/760702) remove the __FILE__ argument from these calls. #define FROM_HERE ::base::Location::CreateFromHere(__FILE__) #define FROM_HERE_WITH_EXPLICIT_FUNCTION(function_name) \ ::base::Location::CreateFromHere(function_name, __FILE__, -1) #endif } // namespace base namespace std { // Specialization for using Location in hash tables. template <> struct hash<::base::Location> { std::size_t operator()(const ::base::Location& loc) const { const void* program_counter = loc.program_counter(); return base::FastHash(base::as_bytes(base::make_span(&program_counter, 1))); } }; } // namespace std #endif // BASE_LOCATION_H_
{ "pile_set_name": "Github" }
// Copyright 2008-2018 Yolo Technologies, Inc. All Rights Reserved. https://www.comblockengine.com #include "db_exception.h" #include "db_interface_redis.h" #include "db_interface/db_interface.h" namespace KBEngine { //------------------------------------------------------------------------------------- DBException::DBException(DBInterface* pdbi) : errStr_(static_cast<DBInterfaceRedis*>(pdbi)->getstrerror()), errNum_(static_cast<DBInterfaceRedis*>(pdbi)->getlasterror()) { } //------------------------------------------------------------------------------------- DBException::~DBException() throw() { } //------------------------------------------------------------------------------------- bool DBException::shouldRetry() const { return (errNum_== REDIS_ERR_OOM) || (errNum_ == REDIS_ERR_OTHER); } //------------------------------------------------------------------------------------- bool DBException::isLostConnection() const { return (errNum_ == REDIS_ERR_IO) || (errNum_ == REDIS_ERR_EOF); } //------------------------------------------------------------------------------------- } // db_exception.cpp
{ "pile_set_name": "Github" }
#!/bin/bash # Script for running all tests in this directory # This script has to be run in its directory, as shows the usage. # main ------------------------------------------------------------------------ if test "$#" -ne 0; then echo "Usage: ./test_all.sh" exit 1 fi mkdir -p log TIMESTAMP=`date +'%Y-%m-%d-%H-%M-%S'` LOGFILE=log/$TIMESTAMP-test_all.sh GITVERSION=`git version` if [[ "$GITVERSION" ]]; then echo 'Git is available in the working directory:' >> $LOGFILE 2>&1 echo ' Merlin version: ' "`git describe --tags --always`" >> $LOGFILE 2>&1 echo ' branch: ' "`git rev-parse --abbrev-ref HEAD`" >> $LOGFILE 2>&1 echo ' status: ' >> ${LOGFILE}.gitstatus 2>&1 git status >> ${LOGFILE}.gitstatus 2>&1 echo ' diff to Merlin version: ' >> ${LOGFILE}.gitdiff 2>&1 git diff >> ${LOGFILE}.gitdiff 2>&1 echo ' ' fi bash ./test_install.sh >> $LOGFILE 2>&1 source ../src/setup_env.sh python ./test_classes.py >> $LOGFILE 2>&1 bash ./test_training.sh >> $LOGFILE 2>&1
{ "pile_set_name": "Github" }
<?xml version="1.0"?> <ZopeData> <record id="1" aka="AAAAAAAAAAE="> <pickle> <global name="ProxyField" module="Products.ERP5Form.ProxyField"/> </pickle> <pickle> <dictionary> <item> <key> <string>id</string> </key> <value> <string>my_description</string> </value> </item> <item> <key> <string>message_values</string> </key> <value> <dictionary> <item> <key> <string>external_validator_failed</string> </key> <value> <string>The input failed the external validator.</string> </value> </item> </dictionary> </value> </item> <item> <key> <string>overrides</string> </key> <value> <dictionary> <item> <key> <string>extra_context</string> </key> <value> <string></string> </value> </item> <item> <key> <string>field_id</string> </key> <value> <string></string> </value> </item> <item> <key> <string>form_id</string> </key> <value> <string></string> </value> </item> </dictionary> </value> </item> <item> <key> <string>tales</string> </key> <value> <dictionary> <item> <key> <string>extra_context</string> </key> <value> <string></string> </value> </item> <item> <key> <string>field_id</string> </key> <value> <string></string> </value> </item> <item> <key> <string>form_id</string> </key> <value> <string></string> </value> </item> </dictionary> </value> </item> <item> <key> <string>values</string> </key> <value> <dictionary> <item> <key> <string>extra_context</string> </key> <value> <list/> </value> </item> <item> <key> <string>field_id</string> </key> <value> <string>my_description</string> </value> </item> <item> <key> <string>form_id</string> </key> <value> <string>BaseConsulting_FieldLibrary</string> </value> </item> </dictionary> </value> </item> </dictionary> </pickle> </record> </ZopeData>
{ "pile_set_name": "Github" }
%YAML 1.1 %TAG !u! tag:unity3d.com,2011: --- !u!126 &1 NavMeshProjectSettings: m_ObjectHideFlags: 0 serializedVersion: 2 areas: - name: Walkable cost: 1 - name: Not Walkable cost: 1 - name: Jump cost: 2 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 m_LastAgentTypeID: -887442657 m_Settings: - serializedVersion: 2 agentTypeID: 0 agentRadius: 0.5 agentHeight: 2 agentSlope: 45 agentClimb: 0.75 ledgeDropHeight: 0 maxJumpAcrossDistance: 0 minRegionArea: 2 manualCellSize: 0 cellSize: 0.16666667 manualTileSize: 0 tileSize: 256 accuratePlacement: 0 debug: m_Flags: 0 m_SettingNames: - Humanoid
{ "pile_set_name": "Github" }
-- Loads pre-trained word embeddings from either Word2Vec or Glove assert(get_id_from_word) assert(common_w2v_freq_words) assert(total_num_words) word_vecs_size = 300 -- Loads pre-trained glove or word2vec embeddings: if opt.word_vecs == 'glove' then -- Glove downloaded from: http://nlp.stanford.edu/projects/glove/ w2v_txtfilename = default_path .. 'Glove/glove.840B.300d.txt' w2v_t7filename = opt.root_data_dir .. 'generated/glove.840B.300d.t7' w2v_reader = 'words/w2v/glove_reader.lua' elseif opt.word_vecs == 'w2v' then -- Word2Vec downloaded from: https://code.google.com/archive/p/word2vec/ w2v_binfilename = default_path .. 'Word2Vec/GoogleNews-vectors-negative300.bin' w2v_t7filename = opt.root_data_dir .. 'generated/GoogleNews-vectors-negative300.t7' w2v_reader = 'words/w2v/word2vec_reader.lua' end ---------------------- Code: ----------------------- w2vutils = {} print('==> Loading ' .. opt.word_vecs .. ' vectors') if not paths.filep(w2v_t7filename) then print(' ---> t7 file NOT found. Loading w2v from the bin/txt file instead (slower).') w2vutils.M = require(w2v_reader) print('Writing t7 File for future usage. Next time Word2Vec loading will be faster!') torch.save(w2v_t7filename, w2vutils.M) else print(' ---> from t7 file.') w2vutils.M = torch.load(w2v_t7filename) end -- Move the word embedding matrix on the GPU if we do some training. -- In this way we can perform word embedding lookup much faster. if opt and string.find(opt.type, 'cuda') then w2vutils.M = w2vutils.M:cuda() end ---------- Define additional functions ----------------- -- word -> vec w2vutils.get_w_vec = function (self,word) local w_id = get_id_from_word(word) return w2vutils.M[w_id]:clone() end -- word_id -> vec w2vutils.get_w_vec_from_id = function (self,w_id) return w2vutils.M[w_id]:clone() end w2vutils.lookup_w_vecs = function (self,word_id_tensor) assert(word_id_tensor:dim() <= 2, 'Only word id tensors w/ 1 or 2 dimensions are supported.') local output = torch.FloatTensor() local word_ids = word_id_tensor:long() if opt and string.find(opt.type, 'cuda') then output = output:cuda() word_ids = word_ids:cuda() end if word_ids:dim() == 2 then output:index(w2vutils.M, 1, word_ids:view(-1)) output = output:view(word_ids:size(1), word_ids:size(2), w2vutils.M:size(2)) elseif word_ids:dim() == 1 then output:index(w2vutils.M, 1, word_ids) output = output:view(word_ids:size(1), w2vutils.M:size(2)) end return output end -- Normalize word vectors to have norm 1 . w2vutils.renormalize = function (self) w2vutils.M[unk_w_id]:mul(0) w2vutils.M[unk_w_id]:add(1) w2vutils.M:cdiv(w2vutils.M:norm(2,2):expand(w2vutils.M:size())) local x = w2vutils.M:norm(2,2):view(-1) - 1 assert(x:norm() < 0.1, x:norm()) assert(w2vutils.M[100]:norm() < 1.001 and w2vutils.M[100]:norm() > 0.99) w2vutils.M[unk_w_id]:mul(0) end w2vutils:renormalize() print(' Done reading w2v data. Word vocab size = ' .. w2vutils.M:size(1)) -- Phrase embedding using average of vectors of words in the phrase w2vutils.phrase_avg_vec = function(self, phrase) local words = split_in_words(phrase) local num_words = table_len(words) local num_existent_words = 0 local vec = torch.zeros(word_vecs_size) for i = 1,num_words do local w = words[i] local w_id = get_id_from_word(w) if w_id ~= unk_w_id then vec:add(w2vutils:get_w_vec_from_id(w_id)) num_existent_words = num_existent_words + 1 end end if (num_existent_words > 0) then vec:div(num_existent_words) end return vec end w2vutils.top_k_closest_words = function (self,vec, k, mat) local k = k or 1 vec = vec:float() local distances = torch.mv(mat, vec) local best_scores, best_word_ids = topk(distances, k) local returnwords = {} local returndistances = {} for i = 1,k do local w = get_word_from_id(best_word_ids[i]) if is_stop_word_or_number(w) then table.insert(returnwords, red(w)) else table.insert(returnwords, w) end assert(best_scores[i] == distances[best_word_ids[i]], best_scores[i] .. ' ' .. distances[best_word_ids[i]]) table.insert(returndistances, distances[best_word_ids[i]]) end return returnwords, returndistances end w2vutils.most_similar2word = function(self, word, k) local k = k or 1 local v = w2vutils:get_w_vec(word) neighbors, scores = w2vutils:top_k_closest_words(v, k, w2vutils.M) print('To word ' .. skyblue(word) .. ' : ' .. list_with_scores_to_str(neighbors, scores)) end w2vutils.most_similar2vec = function(self, vec, k) local k = k or 1 neighbors, scores = w2vutils:top_k_closest_words(vec, k, w2vutils.M) print(list_with_scores_to_str(neighbors, scores)) end --------------------- Unit tests ---------------------------------------- local unit_tests = opt.unit_tests or false if (unit_tests) then print('\nWord to word similarity test:') w2vutils:most_similar2word('nice', 5) w2vutils:most_similar2word('france', 5) w2vutils:most_similar2word('hello', 5) end -- Computes for each word w : \sum_v exp(<v,w>) and \sum_v <v,w> w2vutils.total_word_correlation = function(self, k, j) local exp_Z = torch.zeros(w2vutils.M:narrow(1, 1, j):size(1)) local sum_t = w2vutils.M:narrow(1
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="UTF-8" standalone="no"?> <databaseChangeLog xmlns="http://www.liquibase.org/xml/ns/dbchangelog" xmlns:ext="http://www.liquibase.org/xml/ns/dbchangelog-ext" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog-ext http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-ext.xsd http://www.liquibase.org/xml/ns/dbchangelog http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-3.3.xsd"> <changeSet author="toja" id="1-indices"> <createIndex tableName="LOGRECORD" indexName="LOGRECORD_TIMESTAMPRECORD_fkey"> <column name="TIMESTAMPRECORD"/> </createIndex> </changeSet> </databaseChangeLog>
{ "pile_set_name": "Github" }
import {MigrationInterface, QueryRunner} from 'typeorm'; export class tipsAndBitsMessagesToText1573942908160 implements MigrationInterface { name = 'tipsAndBitsMessagesToText1573942908160'; public async up(queryRunner: QueryRunner): Promise<any> { await queryRunner.query(`CREATE TABLE "temporary_user_tip" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "amount" float NOT NULL, "currency" varchar NOT NULL, "message" varchar NOT NULL DEFAULT (''), "tippedAt" bigint NOT NULL DEFAULT (0), "sortAmount" float NOT NULL, "userUserId" integer, CONSTRAINT "FK_36683fb221201263b38344a9880" FOREIGN KEY ("userUserId") REFERENCES "user" ("userId") ON DELETE CASCADE ON UPDATE CASCADE)`, undefined); await queryRunner.query(`INSERT INTO "temporary_user_tip"("id", "amount", "currency", "message", "tippedAt", "sortAmount", "userUserId") SELECT "id", "amount", "currency", "message", "tippedAt", "sortAmount", "userUserId" FROM "user_tip"`, undefined); await queryRunner.query(`DROP TABLE "user_tip"`, undefined); await queryRunner.query(`ALTER TABLE "temporary_user_tip" RENAME TO "user_tip"`, undefined); await queryRunner.query(`CREATE TABLE "temporary_user_bit" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "amount" bigint NOT NULL, "message" varchar NOT NULL DEFAULT (''), "cheeredAt" bigint NOT NULL DEFAULT (0), "userUserId" integer, CONSTRAINT "FK_cca96526faa532e7d20a0f775b0" FOREIGN KEY ("userUserId") REFERENCES "user" ("userId") ON DELETE CASCADE ON UPDATE CASCADE)`, undefined); await queryRunner.query(`INSERT INTO "temporary_user_bit"("id", "amount", "message", "cheeredAt", "userUserId") SELECT "id", "amount", "message", "cheeredAt", "userUserId" FROM "user_bit"`, undefined); await queryRunner.query(`DROP TABLE "user_bit"`, undefined); await queryRunner.query(`ALTER TABLE "temporary_user_bit" RENAME TO "user_bit"`, undefined); await queryRunner.query(`CREATE TABLE "temporary_user_tip" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "amount" float NOT NULL, "currency" varchar NOT NULL, "message" text NOT NULL DEFAULT (''), "tippedAt" bigint NOT NULL DEFAULT (0), "sortAmount" float NOT NULL, "userUserId" integer, CONSTRAINT "FK_36683fb221201263b38344a9880" FOREIGN KEY ("userUserId") REFERENCES "user" ("userId") ON DELETE CASCADE ON UPDATE CASCADE)`, undefined); await queryRunner.query(`INSERT INTO "temporary_user_tip"("id", "amount", "currency", "message", "tippedAt", "sortAmount", "userUserId") SELECT "id", "amount", "currency", "message", "tippedAt", "sortAmount", "userUserId" FROM "user_tip"`, undefined); await queryRunner.query(`DROP TABLE "user_tip"`, undefined); await queryRunner.query(`ALTER TABLE "temporary_user_tip" RENAME TO "user_tip"`, undefined); await queryRunner.query(`CREATE TABLE "temporary_user_bit" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "amount" bigint NOT NULL, "message" text NOT NULL DEFAULT (''), "cheeredAt" bigint NOT NULL DEFAULT (0), "userUserId" integer, CONSTRAINT "FK_cca96526faa532e7d20a0f775b0" FOREIGN KEY ("userUserId") REFERENCES "user" ("userId") ON DELETE CASCADE ON UPDATE CASCADE)`, undefined); await queryRunner.query(`INSERT INTO "temporary_user_bit"("id", "amount", "message", "cheeredAt", "userUserId") SELECT "id", "amount", "message", "cheeredAt", "userUserId" FROM "user_bit"`, undefined); await queryRunner.query(`DROP TABLE "user_bit"`, undefined); await queryRunner.query(`ALTER TABLE "temporary_user_bit" RENAME TO "user_bit"`, undefined); await queryRunner.query(`DROP INDEX "IDX_4d8108fc3e8dcbe5c112f53dd3"`, undefined); await queryRunner.query(`CREATE TABLE "temporary_twitch_tag_localization_description" ("id" varchar PRIMARY KEY NOT NULL, "locale" varchar NOT NULL, "value" varchar NOT NULL, "tagId" varchar, CONSTRAINT "FK_4d8108fc3e8dcbe5c112f53dd3f" FOREIGN KEY ("tagId") REFERENCES "twitch_tag" ("tag_id") ON DELETE CASCADE ON UPDATE CASCADE)`, undefined); await queryRunner.query(`INSERT INTO "temporary_twitch_tag_localization_description"("id", "locale", "value", "tagId") SELECT "id", "locale", "value", "tagId" FROM "twitch_tag_localization_description"`, undefined); await queryRunner.query(`DROP TABLE "twitch_tag_localization_description"`, undefined); await queryRunner.query(`ALTER TABLE "temporary_twitch_tag_localization_description" RENAME TO "twitch_tag_localization_description"`, undefined); await queryRunner.query(`CREATE INDEX "IDX_4d8108fc3e8dcbe5c112f53dd3" ON "twitch_tag_localization_description" ("tagId") `, undefined); await queryRunner.query(`DROP INDEX "IDX_4d8108fc3e8dcbe5c112f53dd3"`, undefined); await queryRunner.query(`CREATE TABLE "temporary_twitch_tag_localization_description" ("id" varchar PRIMARY KEY NOT NULL, "locale" varchar NOT NULL, "value" text NOT NULL, "tagId" varchar, CONSTRAINT "FK_4d8108fc3e8dcbe5c112f53dd3f" FOREIGN KEY ("tagId") REFERENCES "twitch_tag" ("tag_id") ON DELETE CASCADE ON UPDATE CASCADE)`, undefined); await queryRunner.query(`INSERT INTO "temporary_twitch_tag_localization_description"("id", "locale", "value", "tagId") SELECT "id", "locale", "value", "tagId" FROM "twitch_tag_localization_description"`, undefined); await queryRunner.query(`DROP TABLE "twitch_tag_localization_description"`, undefined); await queryRunner.query(`ALTER TABLE "temporary_twitch_tag_localization_description" RENAME TO "twitch_tag_localization_description"`, undefined); await queryRunner.query(`CREATE INDEX "IDX_4d8108fc3e8dcbe5c112f53dd3" ON "twitch_tag_localization_description" ("tagId") `, undefined); } public async down(queryRunner: QueryRunner): Promise<any> { await queryRunner.query(`ALTER TABLE "user_bit" RENAME TO "temporary_user_bit"`, undefined); await queryRunner.query(`CREATE TABLE "user_bit" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "amount" bigint NOT NULL, "message" varchar NOT NULL DEFAULT (''), "cheeredAt" bigint NOT NULL DEFAULT (0), "userUserId" integer, CONSTRAINT "FK_cca96526faa532e7d20a0f775b0" FOREIGN KEY ("userUserId") REFERENCES "user" ("userId") ON DELETE CASCADE ON UPDATE CASCADE)`, undefined); await queryRunner.query(`INSERT INTO "user_bit"("id", "amount", "message", "cheeredAt", "userUserId") SELECT "id", "amount", "message", "cheeredAt", "userUserId" FROM "temporary_user_bit"`,
{ "pile_set_name": "Github" }
# This file is a Tcl script to test the code in the file tkTextIndex.c. # This file is organized in the standard fashion for Tcl tests. # # Copyright (c) 1994 The Regents of the University of California. # Copyright (c) 1994 Sun Microsystems, Inc. # Copyright (c) 1998-1999 by Scriptics Corporation. # All rights reserved. package require tcltest 2.1 eval tcltest::configure $argv tcltest::loadTestedCommands namespace import -force tcltest::test catch {destroy .t} text .t -font {Courier -12} -width 20 -height 10 pack append . .t {top expand fill} update .t debug on wm geometry . {} # The statements below reset the main window; it's needed if the window # manager is mwm to make mwm forget about a previous minimum size setting. wm withdraw . wm minsize . 1 1 wm positionfrom . user wm deiconify . .t insert 1.0 "Line 1 abcdefghijklm 12345 Line 4 b\u4e4fy GIrl .#@? x_yz !@#$% Line 7" image create photo textimage -width 10 -height 10 textimage put red -to 0 0 9 9 test textIndex-1.1 {TkTextMakeByteIndex} {testtext} { # (lineIndex < 0) testtext .t byteindex -1 3 } {1.0 0} test textIndex-1.2 {TkTextMakeByteIndex} {testtext} { # (lineIndex < 0), because lineIndex == strtol(argv[2]) - 1 testtext .t byteindex 0 3 } {1.0 0} test textIndex-1.3 {TkTextMakeByteIndex} {testtext} { # not (lineIndex < 0) testtext .t byteindex 1 3 } {1.3 3} test textIndex-1.4 {TkTextMakeByteIndex} {testtext} { # (byteIndex < 0) testtext .t byteindex 3 -1 } {3.0 0} test textIndex-1.5 {TkTextMakeByteIndex} {testtext} { # not (byteIndex < 0) testtext .t byteindex 3 3 } {3.3 3} test textIndex-1.6 {TkTextMakeByteIndex} {testtext} { # (indexPtr->linePtr == NULL) testtext .t byteindex 9 2 } {8.0 0} test textIndex-1.7 {TkTextMakeByteIndex} {testtext} { # not (indexPtr->linePtr == NULL) testtext .t byteindex 7 2 } {7.2 2} test textIndex-1.8 {TkTextMakeByteIndex: shortcut for 0} {testtext} { # (byteIndex == 0) testtext .t byteindex 1 0 } {1.0 0} test textIndex-1.9 {TkTextMakeByteIndex: shortcut for 0} {testtext} { # not (byteIndex == 0) testtext .t byteindex 3 80 } {3.5 5} test textIndex-1.10 {TkTextMakeByteIndex: verify index is in range} {testtext} { # for (segPtr = indexPtr->linePtr->segPtr; ; segPtr = segPtr->nextPtr) # one segment testtext .t byteindex 3 5 } {3.5 5} test textIndex-1.11 {TkTextMakeByteIndex: verify index is in range} {testtext} { # for (segPtr = indexPtr->linePtr->segPtr; ; segPtr = segPtr->nextPtr) # index += segPtr->size # Multiple segments, make sure add segment size to index. .t mark set foo 3.2 set x [testtext .t byteindex 3 7] .t mark unset foo set x } {3.5 5} test textIndex-1.12 {TkTextMakeByteIndex: verify index is in range} {testtext} { # (segPtr == NULL) testtext .t byteindex 3 7 } {3.5 5} test textIndex-1.13 {TkTextMakeByteIndex: verify index is in range} {testtext} { # not (segPtr == NULL) testtext .t byteindex 3 4 } {3.4 4} test textIndex-1.14 {TkTextMakeByteIndex: verify index is in range} {testtext} { # (index + segPtr->size > byteIndex) # in this segment. testtext .t byteindex 3 4 } {3.4 4} test textIndex-1.15 {TkTextMakeByteIndex: verify index is in range} {testtext} { # (index + segPtr->size > byteIndex), index != 0 # in this segment. .t mark set foo 3.2 set x [testtext .t byteindex 3 4] .t mark unset foo set x } {3.4 4} test textIndex-1.16 {TkTextMakeByteIndex: UTF-8 characters} {testtext} { testtext .t byteindex 5 100 } {5.18 20} test textIndex-1.17 {TkTextMakeByteIndex: prevent splitting UTF-8 character} \ {testtext} { # ((byteIndex > index) && (segPtr->typePtr == &tkTextCharType)) # Wrong answer would be \xb9 (the 2nd byte of UTF rep of 0x4e4f). set x [testtext .t byteindex 5 2] list $x [.t get insert] } {{5.2 4} y} test textIndex-1.18 {TkTextMakeByteIndex: prevent splitting UTF-8 character} \ {testtext} { # ((byteIndex > index) && (segPtr->typePtr == &tkTextCharType)) testtext .t byteindex 5 1 .t get insert } "\u4e4f" test textIndex-2.1 {TkTextMakeCharIndex} { # (lineIndex < 0) .t index -1.3 } 1.0 test textIndex-2.2 {TkTextMakeCharIndex} { # (lineIndex < 0), because lineIndex == strtol(argv[2]) - 1 .t index 0.3 } 1.0 test textIndex-2.3 {TkTextMakeCharIndex} { # not (lineIndex < 0) .t index 1.3 } 1.3 test textIndex-2.4 {TkTextMakeCharIndex} { # (charIndex < 0) .t index 3.-1 } 3.0 test textIndex-2.5 {TkTextMakeCharIndex} { # (charIndex < 0) .t index 3.3 } 3.3 test textIndex-2.6 {TkTextMakeCharIndex} { # (indexPtr->linePtr == NULL) .t index 9.2 } 8.0 test textIndex-2.7 {TkTextMakeCharIndex} { # not (indexPtr->linePtr == NULL) .t index 7.2 } 7.2 test textIndex-2.8 {TkTextMakeCharIndex: verify index is in range} { # for (segPtr = indexPtr->linePtr->segPtr; ; segPtr = segPtr->nextPtr) # one segment .t index 3.5 } 3.5 test textIndex-2.9 {TkTextMakeCharIndex: verify index is in range} { # for (segPtr = indexPtr->linePtr->segPtr; ; segPtr = segPtr->nextPtr) # Multiple segments, make sure add segment size to index. .t mark set foo 3.2 set x [.t index 3.7] .t mark unset foo set x } 3.5 test textIndex-2.10 {TkTextMakeCharIndex: verify index is in range} { # (segPtr == NULL) .t index 3.7 } 3.5 test textIndex-2.11 {TkTextMakeCharIndex: verify index is in range} { # not (segPtr == NULL) .t index 3.4 } 3.4 test textIndex-2
{ "pile_set_name": "Github" }
package me.coley.recaf.ui.controls; import javafx.scene.control.TextField; /** * TextField that with a numeric text parse. * * @author Matt */ public class NumericText extends TextField { /** * @return Generic number, {@code null} if text does not represent any number format. */ public Number get() { String text = getText(); if(text.matches("\\d+")) return Integer.parseInt(text); else if(text.matches("\\d+\\.?\\d*[dD]?")) { if(text.toLowerCase().contains("d")) return Double.parseDouble(text.substring(0, text.length() - 1)); else return Double.parseDouble(text); } else if(text.matches("\\d+\\.?\\d*[fF]")) return Float.parseFloat(text.substring(0, text.length() - 1)); else if(text.matches("\\d+\\.?\\d*[lL]")) return Long.parseLong(text.substring(0, text.length() - 1)); return null; } }
{ "pile_set_name": "Github" }
//********************************************************* // // Copyright (c) Microsoft. All rights reserved. // This code is licensed under the MIT License (MIT). // THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF // ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY // IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR // PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. // //********************************************************* using System; using System.Collections.Generic; using System.Threading.Tasks; using Windows.UI.Xaml; using Windows.UI.Xaml.Controls; using Windows.UI.Xaml.Navigation; using Windows.ApplicationModel.Contacts; namespace SDKTemplate { /// <summary> /// An empty page that can be used on its own or navigated to within a Frame. /// </summary> public sealed partial class Scenario1_CreateContacts : Page { private MainPage rootPage = MainPage.Current; public Scenario1_CreateContacts() { this.InitializeComponent(); } private async Task<ContactList> _GetContactList() { ContactStore store = await ContactManager.RequestStoreAsync(ContactStoreAccessType.AppContactsReadWrite); if (null == store) { rootPage.NotifyUser("Unable to get a contacts store.", NotifyType.ErrorMessage); return null; } ContactList contactList; IReadOnlyList<ContactList> contactLists = await store.FindContactListsAsync(); if (0 == contactLists.Count) { contactList = await store.CreateContactListAsync("TestContactList"); } else { contactList = contactLists[0]; } return contactList; } private async Task<ContactAnnotationList> _GetContactAnnotationList() { ContactAnnotationStore annotationStore = await ContactManager.RequestAnnotationStoreAsync(ContactAnnotationStoreAccessType.AppAnnotationsReadWrite); if (null == annotationStore) { rootPage.NotifyUser("Unable to get an annotations store.", NotifyType.ErrorMessage); return null; } ContactAnnotationList annotationList; IReadOnlyList<ContactAnnotationList> annotationLists = await annotationStore.FindAnnotationListsAsync(); if (0 == annotationLists.Count) { annotationList = await annotationStore.CreateAnnotationListAsync(); } else { annotationList = annotationLists[0]; } return annotationList; } private async void CreateTestContacts() { // // Creating two test contacts with email address and phone number. // Contact contact1 = new Contact(); contact1.FirstName = "TestContact1"; ContactEmail email1 = new ContactEmail(); email1.Address = "TestContact1@contoso.com"; contact1.Emails.Add(email1); ContactPhone phone1 = new ContactPhone(); phone1.Number = "4255550100"; contact1.Phones.Add(phone1); Contact contact2 = new Contact(); contact2.FirstName = "TestContact2"; ContactEmail email2 = new ContactEmail(); email2.Address = "TestContact2@contoso.com"; email2.Kind = ContactEmailKind.Other; contact2.Emails.Add(email2); ContactPhone phone2 = new ContactPhone(); phone2.Number = "4255550101"; phone2.Kind = ContactPhoneKind.Mobile; contact2.Phones.Add(phone2); // Save the contacts ContactList contactList = await _GetContactList(); if (null == contactList) { return; } await contactList.SaveContactAsync(contact1); await contactList.SaveContactAsync(contact2); // // Create annotations for those test contacts. // Annotation is the contact meta data that allows People App to generate deep links // in the contact card that takes the user back into this app. // ContactAnnotationList annotationList = await _GetContactAnnotationList(); if (null == annotationList) { return; } ContactAnnotation annotation = new ContactAnnotation(); annotation.ContactId = contact1.Id; // Remote ID: The identifier of the user relevant for this app. When this app is // launched into from the People App, this id will be provided as context on which user // the operation (e.g. ContactProfile) is for. annotation.RemoteId = "user12"; // The supported operations flags indicate that this app can fulfill these operations // for this contact. These flags are read by apps such as the People App to create deep // links back into this app. This app must also be registered for the relevant // protocols in the Package.appxmanifest (in this case, ms-contact-profile). annotation.SupportedOperations = ContactAnnotationOperations.ContactProfile; if (!await annotationList.TrySaveAnnotationAsync(annotation)) { rootPage.NotifyUser("Failed to save annotation for TestContact1 to the store.", NotifyType.ErrorMessage); return; } annotation = new ContactAnnotation(); annotation.ContactId = contact2.Id; annotation.RemoteId = "user22"; // You can also specify multiple supported operations for a contact in a single // annotation. In this case, this annotation indicates that the user can be // communicated via VOIP call, Video Call, or IM via this application. annotation.SupportedOperations = ContactAnnotationOperations.Message | ContactAnnotationOperations.AudioCall | ContactAnnotationOperations.VideoCall; if (!await annotationList.TrySaveAnnotationAsync(annotation)) { rootPage.NotifyUser("Failed to save annotation for TestContact2 to the store.", NotifyType.ErrorMessage); return; } rootPage.NotifyUser("Sample data created successfully.", NotifyType.StatusMessage); } private async void DeleteTestContacts() { ContactList contactList = null; ContactStore store = await ContactManager.RequestStoreAsync(ContactStoreAccessType.AppContactsReadWrite); if (null != store) { IReadOnlyList<ContactList> contactLists = await store.FindContactListsAsync(); if (0 < contactLists.Count) { contactList = contactLists[0]; } } if (null != contactList) { await contactList.DeleteAsync(); rootPage.NotifyUser("Sample data deleted.", NotifyType.StatusMessage); } else { rootPage.NotifyUser("Could not delete sample data.", NotifyType.ErrorMessage); } } } }
{ "pile_set_name": "Github" }
/** @file Contains the global variables used in LabelMe. */ // Parsed LabelMe XML file. Manipulate this variable with jquery. var LM_xml; // URL of CGI script to submit XML annotation: var SubmitXmlUrl = 'annotationTools/perl/submit.cgi'; // LabelMe username: var username = 'anonymous'; // Boolean indicating whether user is currently signing in (this should be abstracted into class): var username_flag = 0; // Boolean indicating if we will use attributes. This should be read from the URL and set to 0 by default. var use_attributes = 1; // if this is 0, then it will remove all the attributes from the bubble. var use_parts = 1; // if this is 0 disapears the message from the bubble // for now, let's remove the attributes in MT mode. Just in case anybody is trying this. if (getQueryVariable('mode')=='mt'){ //use_attributes=0; //use_parts = 0; } // Boolean indicating whether the control points were edited: var editedControlPoints = 0; // Scalar indicating which polygon is selected; -1 means no polygon is selected var selected_poly = -1; // Class with functions to handle actions/events. var main_handler; // Canvas that renders polygons at rest state. var main_canvas; // Holds image. var main_media; // URL of XHTML namespace. This is needed for generating SVG elements. var xhtmlNS = 'http://www.w3.org/1999/xhtml'; // Website that refers to LabelMe: var ref; // Indicates whether we are in segmentation or polygon mode var drawing_mode = 0; var showImgName = false; // Scribble mode: var scribble_mode = true; var threed_mode = false; var video_mode = false; var bounding_box = false; var bbox_mode = true; var autocomplete_mode = false; var wait_for_input; var edit_popup_open = 0; var num_orig_anno; var global_count = 0; var req_submit; // Indicates if polygon has been edited. var submission_edited = 0; // Allowable user actions: var action_CreatePolygon = 1; var action_RenameExistingObjects = 0; var action_ModifyControlExistingObjects = 0; var action_DeleteExistingObjects = 0; // Which polygons are visible: var view_Existing = 1; var view_Deleted = 0; // Flag for right-hand object list: var view_ObjList = true; // Mechanical Turk variables: var LMbaseurl = 'http://' + window.location.host + window.location.pathname; var MThelpPage = 'annotationTools/html/mt_instructions.html'; var externalSubmitURL = 'https://www.mturk.com/mturk/externalSubmit'; var externalSubmitURLsandbox = 'https://workersandbox.mturk.com/mturk/externalSubmit'; var mt_N = 'inf'; var object_choices = '...'; var loaded_once = false;
{ "pile_set_name": "Github" }
// Copyright 2019 ETH Zurich // Copyright 2020 ETH Zurich, Anapaya Systems // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package json_test import ( "encoding/json" "flag" "io/ioutil" "strings" "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/scionproto/scion/go/lib/common" jsontopo "github.com/scionproto/scion/go/lib/topology/json" ) var ( update = flag.Bool("update", false, "set to true to update golden files") ) func TestLoadRawFromFile(t *testing.T) { referenceTopology := &jsontopo.Topology{ Timestamp: 168562800, TimestampHuman: "May 6 00:00:00 CET 1975", IA: "6-ff00:0:362", MTU: 1472, Attributes: []jsontopo.Attribute{jsontopo.Authoritative, jsontopo.AttrCore, jsontopo.Issuing, jsontopo.Voting}, BorderRouters: map[string]*jsontopo.BRInfo{ "borderrouter6-f00:0:362-1": { InternalAddr: "10.1.0.1:0", CtrlAddr: "10.1.0.1:30098", Interfaces: map[common.IFIDType]*jsontopo.BRInterface{ 91: { Underlay: jsontopo.Underlay{ Public: "192.0.2.1:4997", Remote: "192.0.2.2:4998", Bind: "10.0.0.1", }, Bandwidth: 100000, IA: "6-ff00:0:363", LinkTo: "CORE", MTU: 1472, }, }, }, "borderrouter6-f00:0:362-9": { InternalAddr: "[2001:db8:a0b:12f0::2]:0", CtrlAddr: "[2001:db8:a0b:12f0::2300]:30098", Interfaces: map[common.IFIDType]*jsontopo.BRInterface{ 32: { Underlay: jsontopo.Underlay{ Public: "[2001:db8:a0b:12f0::1]:4997", Remote: "[2001:db8:a0b:12f0::2]:4998", Bind: "2001:db8:a0b:12f0::8", }, Bandwidth: 5000, IA: "6-ff00:0:364", LinkTo: "CHILD", MTU: 4430, }, }, }, }, } if *update { b, err := json.MarshalIndent(referenceTopology, "", " ") require.NoError(t, err) b = append(b, []byte("\n")...) err = ioutil.WriteFile("testdata/topology.json", b, 0644) require.NoError(t, err) } t.Run("unmarshaled struct matches", func(t *testing.T) { loadedTopology, err := jsontopo.LoadFromFile("testdata/topology.json") assert.NoError(t, err) assert.Equal(t, referenceTopology, loadedTopology) }) t.Run("marshaled bytes match", func(t *testing.T) { referenceTopologyBytes, err := ioutil.ReadFile("testdata/topology.json") require.NoError(t, err) topologyBytes, err := json.MarshalIndent(referenceTopology, "", " ") require.NoError(t, err) assert.Equal(t, strings.TrimSpace(string(referenceTopologyBytes)), strings.TrimSpace(string(topologyBytes)), ) }) }
{ "pile_set_name": "Github" }
/* Header describing `ar' archive file format. Copyright (C) 1996 Free Software Foundation, Inc. This file is part of the GNU C Library. The GNU C Library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. The GNU C Library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with the GNU C Library; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA. */ #ifndef _AR_H #define _AR_H 1 #include <sys/cdefs.h> /* Archive files start with the ARMAG identifying string. Then follows a `struct ar_hdr', and as many bytes of member file data as its `ar_size' member indicates, for each member file. */ #define ARMAG "!<arch>\n" /* String that begins an archive file. */ #define SARMAG 8 /* Size of that string. */ #define ARFMAG "`\n" /* String in ar_fmag at end of each header. */ __BEGIN_DECLS struct ar_hdr { char ar_name[16]; /* Member file name, sometimes / terminated. */ char ar_date[12]; /* File date, decimal seconds since Epoch. */ char ar_uid[6], ar_gid[6]; /* User and group IDs, in ASCII decimal. */ char ar_mode[8]; /* File mode, in ASCII octal. */ char ar_size[10]; /* File size, in ASCII decimal. */ char ar_fmag[2]; /* Always contains ARFMAG. */ }; __END_DECLS #endif /* ar.h */
{ "pile_set_name": "Github" }
/* * @brief LPC11xx ROM API declarations and functions * * @note * Copyright(C) NXP Semiconductors, 2012 * All rights reserved. * * @par * Software that is described herein is for illustrative purposes only * which provides customers with programming information regarding the * LPC products. This software is supplied "AS IS" without any warranties of * any kind, and NXP Semiconductors and its licensor disclaim any and * all warranties, express or implied, including all implied warranties of * merchantability, fitness for a particular purpose and non-infringement of * intellectual property rights. NXP Semiconductors assumes no responsibility * or liability for the use of the software, conveys no license or rights under any * patent, copyright, mask work right, or any other intellectual property rights in * or to any products. NXP Semiconductors reserves the right to make changes * in the software without notification. NXP Semiconductors also makes no * representation or warranty that such application will be suitable for the * specified use without further testing or modification. * * @par * Permission to use, copy, modify, and distribute this software and its * documentation is hereby granted, under NXP Semiconductors' and its * licensor's relevant copyrights in the software, without fee, provided that it * is used in conjunction with NXP Semiconductors microcontrollers. This * copyright, permission, and disclaimer notice must appear in all copies of * this code. */ #ifndef __ROMAPI_11XX_H_ #define __ROMAPI_11XX_H_ #include "error.h" #ifdef __cplusplus extern "C" { #endif /** @defgroup ROMAPI_11XX CHIP: LPC11XX ROM API declarations and functions * @ingroup CHIP_11XX_Drivers * @{ */ /** * @brief LPC11XX High level ROM API structure */ typedef struct { const uint32_t usbdApiBase; /*!< USBD API function table base address */ const uint32_t reserved0; /*!< Reserved */ const uint32_t candApiBase; /*!< CAN API function table base address */ const uint32_t pwrApiBase; /*!< Power API function table base address */ const uint32_t reserved1; /*!< Reserved */ const uint32_t reserved2; /*!< Reserved */ const uint32_t reserved3; /*!< Reserved */ const uint32_t reserved4; /*!< Reserved */ } LPC_ROM_API_T; /** * @brief LPC11XX IAP_ENTRY API function type */ typedef void (*IAP_ENTRY_T)(unsigned int[], unsigned int[]); static INLINE void iap_entry(unsigned int cmd_param[], unsigned int status_result[]) { ((IAP_ENTRY_T) IAP_ENTRY_LOCATION)(cmd_param, status_result); } /** * @} */ #ifdef __cplusplus } #endif #endif /* __ROMAPI_11XX_H_ */
{ "pile_set_name": "Github" }
<?xml version="1.0"?> <window xmlns:html="http://www.w3.org/1999/xhtml" class="reftest-wait" xmlns="http://www.mozilla.org/keymaster/gatekeeper/there.is.only.xul" title="Testcase bug 311661 - Evil xul testcase, using display:table-row causes crash [@ nsTableRowGroupFrame::GetFirstRow]"> <html:script><![CDATA[ function doe() { document.documentElement.getElementsByTagName('*')[1].style.display='table-row'; setTimeout(doe2,20); } function doe2(){ document.documentElement.getElementsByTagName('*')[1].style.display=''; setTimeout(doe,20); } ]]></html:script> <button id="button" onclick="doe()" label="Mozilla should not crash, when clicking this button"/> <div style="display:table-row"/> <html:script> function clickbutton() { var ev = document.createEvent('MouseEvents'); ev.initMouseEvent("click", true, true, window, 0, 0, 0, 0, 0, false, false, false, false, 0, null); var button = document.getElementById('button'); button.dispatchEvent(ev); setTimeout(function() { document.documentElement.className = "" }, 500); } window.addEventListener("load", clickbutton, false); </html:script> </window>
{ "pile_set_name": "Github" }
config BR2_PACKAGE_LIBVA_UTILS bool "libva-utils" depends on BR2_INSTALL_LIBSTDCPP depends on !BR2_STATIC_LIBS # libva depends on BR2_TOOLCHAIN_HAS_THREADS # libva select BR2_PACKAGE_LIBVA help Libva-utils is a collection of tests for VA-API (Video Acceleration API) https://01.org/vaapi comment "libva-utils needs a toolchain w/ C++, threads, dynamic library" depends on !BR2_INSTALL_LIBSTDCPP || \ BR2_STATIC_LIBS || !BR2_TOOLCHAIN_HAS_THREADS
{ "pile_set_name": "Github" }
/* * libjingle * Copyright 2013, Google Inc. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * 3. The name of the author may not be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ //downloaded from https://code.google.com/p/libjingle/source/browse/trunk/talk/base/?r=273 #ifndef TALK_BASE_IFADDRS_ANDROID_H_ #define TALK_BASE_IFADDRS_ANDROID_H_ #include <stdio.h> #include <sys/socket.h> // Implementation of getifaddrs for Android. // Fills out a list of ifaddr structs (see below) which contain information // about every network interface available on the host. // See 'man getifaddrs' on Linux or OS X (nb: it is not a POSIX function). struct ifaddrs { struct ifaddrs* ifa_next; char* ifa_name; unsigned int ifa_flags; struct sockaddr* ifa_addr; struct sockaddr* ifa_netmask; // Real ifaddrs has broadcast, point to point and data members. // We don't need them (yet?). }; int getifaddrs(struct ifaddrs** result); void freeifaddrs(struct ifaddrs* addrs); #endif // TALK_BASE_IFADDRS_ANDROID_H_
{ "pile_set_name": "Github" }
// Copyright 2013 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package ipv6 import ( "encoding/binary" "errors" "net" "unsafe" ) var ( errMissingAddress = errors.New("missing address") errHeaderTooShort = errors.New("header too short") errInvalidConnType = errors.New("invalid conn type") errOpNoSupport = errors.New("operation not supported") errNoSuchInterface = errors.New("no such interface") nativeEndian binary.ByteOrder ) func init() { i := uint32(1) b := (*[4]byte)(unsafe.Pointer(&i)) if b[0] == 1 { nativeEndian = binary.LittleEndian } else { nativeEndian = binary.BigEndian } } func boolint(b bool) int { if b { return 1 } return 0 } func netAddrToIP16(a net.Addr) net.IP { switch v := a.(type) { case *net.UDPAddr: if ip := v.IP.To16(); ip != nil && ip.To4() == nil { return ip } case *net.IPAddr: if ip := v.IP.To16(); ip != nil && ip.To4() == nil { return ip } } return nil }
{ "pile_set_name": "Github" }
package unused import ( "fmt" "go/ast" "go/token" "go/types" "io" "strings" "sync" "sync/atomic" "golang.org/x/tools/go/analysis" "honnef.co/go/tools/code" "honnef.co/go/tools/go/types/typeutil" "honnef.co/go/tools/internal/passes/buildir" "honnef.co/go/tools/ir" "honnef.co/go/tools/lint" ) // The graph we construct omits nodes along a path that do not // contribute any new information to the solution. For example, the // full graph for a function with a receiver would be Func -> // Signature -> Var -> Type. However, since signatures cannot be // unused, and receivers are always considered used, we can compact // the graph down to Func -> Type. This makes the graph smaller, but // harder to debug. // TODO(dh): conversions between structs mark fields as used, but the // conversion itself isn't part of that subgraph. even if the function // containing the conversion is unused, the fields will be marked as // used. // TODO(dh): we cannot observe function calls in assembly files. /* - packages use: - (1.1) exported named types (unless in package main) - (1.2) exported functions (unless in package main) - (1.3) exported variables (unless in package main) - (1.4) exported constants (unless in package main) - (1.5) init functions - (1.6) functions exported to cgo - (1.7) the main function iff in the main package - (1.8) symbols linked via go:linkname - named types use: - (2.1) exported methods - (2.2) the type they're based on - (2.3) all their aliases. we can't easily track uses of aliases because go/types turns them into uses of the aliased types. assume that if a type is used, so are all of its aliases. - (2.4) the pointer type. this aids with eagerly implementing interfaces. if a method that implements an interface is defined on a pointer receiver, and the pointer type is never used, but the named type is, then we still want to mark the method as used. - variables and constants use: - their types - functions use: - (4.1) all their arguments, return parameters and receivers - (4.2) anonymous functions defined beneath them - (4.3) closures and bound methods. this implements a simplified model where a function is used merely by being referenced, even if it is never called. that way we don't have to keep track of closures escaping functions. - (4.4) functions they return. we assume that someone else will call the returned function - (4.5) functions/interface methods they call - types they instantiate or convert to - (4.7) fields they access - (4.8) types of all instructions - (4.9) package-level variables they assign to iff in tests (sinks for benchmarks) - conversions use: - (5.1) when converting between two equivalent structs, the fields in either struct use each other. the fields are relevant for the conversion, but only if the fields are also accessed outside the conversion. - (5.2) when converting to or from unsafe.Pointer, mark all fields as used. - structs use: - (6.1) fields of type NoCopy sentinel - (6.2) exported fields - (6.3) embedded fields that help implement interfaces (either fully implements it, or contributes required methods) (recursively) - (6.4) embedded fields that have exported methods (recursively) - (6.5) embedded structs that have exported fields (recursively) - (7.1) field accesses use fields - (7.2) fields use their types - (8.0) How we handle interfaces: - (8.1) We do not technically care about interfaces that only consist of exported methods. Exported methods on concrete types are always marked as used. - Any concrete type implements all known interfaces. Even if it isn't assigned to any interfaces in our code, the user may receive a value of the type and expect to pass it back to us through an interface. Concrete types use their methods that implement interfaces. If the type is used, it uses those methods. Otherwise, it doesn't. This way, types aren't incorrectly marked reachable through the edge from method to type. - (8.3) All interface methods are marked as used, even if they never get called. This is to accommodate sum types (unexported interface method that must exist but never gets called.) - (8.4) All embedded interfaces are marked as used. This is an extension of 8.3, but we have to explicitly track embedded interfaces because in a chain C->B->A, B wouldn't be marked as used by 8.3 just because it contributes A's methods to C. - Inherent uses: - thunks and other generated wrappers call the real function - (9.2) variables use their types - (9.3) types use their underlying and element types - (9.4) conversions use the type they convert to - (9.5) instructions use their operands - (9.6) instructions use their operands' types - (9.7) variable _reads_ use variables, writes do not, except in tests - (9.8) runtime functions that may be called from user code via the compiler - const groups: (10.1) if one constant out of a block of constants is used, mark all of them used. a lot of the time, unused constants exist for the sake of completeness. See also https://github.com/dominikh/go-tools/issues/365 - (11.1) anonymous struct types use all their fields. we cannot deduplicate struct types, as that leads to order-dependent reportings. we can't not deduplicate struct types while still tracking fields, because then each instance of the unnamed type in the data flow chain will get its own fields, causing false positives. Thus, we only accurately track fields of named struct types, and assume that unnamed struct types use all their fields. - Differences in whole program mode: - (e2) types aim to implement all exported interfaces from all packages - (e3) exported identifiers aren't automatically used. for fields and methods this poses extra issues due to reflection. We assume that all exported fields are used. We also maintain a list of known reflection-based method callers. */ func assert(b bool) { if !b { panic("failed assertion") } } func typString(obj types.Object) string { switch obj := obj.(type) { case *types.Func: return "func" case *types.Var: if obj.IsField() { return "field" } return "var" case *types.Const: return "const" case *types.TypeName: return "type" default: return "identifier" } } // /usr/lib/go/src/runtime/proc.go:433:6: func badmorestackg0 is unused (U1000) // Functions defined in the Go runtime that may be called through // compiler magic or via assembly. var runtimeFuncs = map[string]bool{ // The first part of the list is copied from // cmd/compile/internal/gc/builtin.go, var runtimeDecls "newobject": true, "panicindex": true, "panicslice": true, "panicdivide": true, "panicmakeslicelen": true, "throwinit": true, "panicwrap": true, "gopanic": true, "gorecover": true, "goschedguarded": true, "printbool": true, "printfloat": true, "printint": true, "printhex": true, "printuint": true
{ "pile_set_name": "Github" }
/* Copyright 2017 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // This file was autogenerated by go-to-protobuf. Do not edit it manually! syntax = 'proto2'; package k8s.io.apimachinery.pkg.apis.meta.v1; import "k8s.io/apimachinery/pkg/runtime/generated.proto"; import "k8s.io/apimachinery/pkg/runtime/schema/generated.proto"; import "k8s.io/apimachinery/pkg/util/intstr/generated.proto"; // Package-wide variables from generator "generated". option go_package = "v1"; // APIGroup contains the name, the supported versions, and the preferred version // of a group. message APIGroup { // name is the name of the group. optional string name = 1; // versions are the versions supported in this group. repeated GroupVersionForDiscovery versions = 2; // preferredVersion is the version preferred by the API server, which // probably is the storage version. // +optional optional GroupVersionForDiscovery preferredVersion = 3; // a map of client CIDR to server address that is serving this group. // This is to help clients reach servers in the most network-efficient way possible. // Clients can use the appropriate server address as per the CIDR that they match. // In case of multiple matches, clients should use the longest matching CIDR. // The server returns only those CIDRs that it thinks that the client can match. // For example: the master will return an internal IP CIDR only, if the client reaches the server using an internal IP. // Server looks at X-Forwarded-For header or X-Real-Ip header or request.RemoteAddr (in that order) to get the client IP. repeated ServerAddressByClientCIDR serverAddressByClientCIDRs = 4; } // APIGroupList is a list of APIGroup, to allow clients to discover the API at // /apis. message APIGroupList { // groups is a list of APIGroup. repeated APIGroup groups = 1; } // APIResource specifies the name of a resource and whether it is namespaced. message APIResource { // name is the name of the resource. optional string name = 1; // namespaced indicates if a resource is namespaced or not. optional bool namespaced = 2; // kind is the kind for the resource (e.g. 'Foo' is the kind for a resource 'foo') optional string kind = 3; // verbs is a list of supported kube verbs (this includes get, list, watch, create, // update, patch, delete, deletecollection, and proxy) optional Verbs verbs = 4; // shortNames is a list of suggested short names of the resource. repeated string shortNames = 5; } // APIResourceList is a list of APIResource, it is used to expose the name of the // resources supported in a specific group and version, and if the resource // is namespaced. message APIResourceList { // groupVersion is the group and version this APIResourceList is for. optional string groupVersion = 1; // resources contains the name of the resources and if they are namespaced. repeated APIResource resources = 2; } // APIVersions lists the versions that are available, to allow clients to // discover the API at /api, which is the root path of the legacy v1 API. // // +protobuf.options.(gogoproto.goproto_stringer)=false message APIVersions { // versions are the api versions that are available. repeated string versions = 1; // a map of client CIDR to server address that is serving this group. // This is to help clients reach servers in the most network-efficient way possible. // Clients can use the appropriate server address as per the CIDR that they match. // In case of multiple matches, clients should use the longest matching CIDR. // The server returns only those CIDRs that it thinks that the client can match. // For example: the master will return an internal IP CIDR only, if the client reaches the server using an internal IP. // Server looks at X-Forwarded-For header or X-Real-Ip header or request.RemoteAddr (in that order) to get the client IP. repeated ServerAddressByClientCIDR serverAddressByClientCIDRs = 2; } // DeleteOptions may be provided when deleting an API object. message DeleteOptions { // The duration in seconds before the object should be deleted. Value must be non-negative integer. // The value zero indicates delete immediately. If this value is nil, the default grace period for the // specified type will be used. // Defaults to a per object value if not specified. zero means delete immediately. // +optional optional int64 gracePeriodSeconds = 1; // Must be fulfilled before a deletion is carried out. If not possible, a 409 Conflict status will be // returned. // +optional optional Preconditions preconditions = 2; // Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. // Should the dependent objects be orphaned. If true/false, the "orphan" // finalizer will be added to/removed from the object's finalizers list. // Either this field or PropagationPolicy may be set, but not both. // +optional optional bool orphanDependents = 3; // Whether and how garbage collection will be performed. // Either this field or OrphanDependents may be set, but not both. // The default policy is decided by the existing finalizer set in the // metadata.finalizers and the resource-specific default policy. // +optional optional string propagationPolicy = 4; } // Duration is a wrapper around time.Duration which supports correct // marshaling to YAML and JSON. In particular, it marshals into strings, which // can be used as map keys in json. message Duration { optional int64 duration = 1; } // ExportOptions is the query options to the standard REST get call. message ExportOptions { // Should this value be exported. Export strips fields that a user can not specify. optional bool export = 1; // Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'. optional bool exact = 2; } // GetOptions is the standard query options to the standard REST get call. message GetOptions { // When specified: // - if unset, then the result is returned from remote storage based on quorum-read flag; // - if it's 0, then we simply return what we currently have in cache, no guarantee; // - if set to non zero, then the result is at least as fresh as given rv. optional string resourceVersion = 1; } // GroupKind specifies a Group and a Kind, but does not force a version. This is useful for identifying // concepts during lookup stages without having partially valid types // // +protobuf.options.(gogoproto.goproto_stringer)=false message GroupKind { optional string group = 1; optional string kind = 2; } // GroupResource specifies a Group and a Resource, but does not force a version. This is useful for identifying // concepts during lookup stages without having partially valid types // // +protobuf.options.(gogoproto.goproto_stringer)=false message GroupResource { optional string group = 1; optional string resource = 2; } // GroupVersion contains the "group" and the "version", which uniquely identifies the API. // // +protobuf.options.(gogoproto.goproto_stringer)=false message GroupVersion { optional string group = 1; optional string version = 2; } // GroupVersion contains the "group/version" and "version" string of a version. // It is made a struct to keep extensibility. message GroupVersionForDiscovery { //
{ "pile_set_name": "Github" }
// // Generated by class-dump 3.5 (64 bit). // // class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2013 by Steve Nygard. // #import "CBaseContact.h" #import "WCDBCoding.h" @class EnterpriseRoomData, NSString; @interface CEnterpriseContact : CBaseContact <WCDBCoding> { _Bool m_bHeadImageUpdateFlag; _Bool m_bUpdateFlag; unsigned int m_uiUserFlag; unsigned int m_uiContactType; NSString *m_nsContactDisplayName; unsigned long long m_uiContactVer; NSString *m_nsProfileJumpUrl; NSString *m_nsAddMemberUrl; EnterpriseRoomData *m_oRoomData; NSString *m_nsBrandUserName; long long m___rowID; } + (id)contactFromBizChatUser:(id)arg1 brandUserName:(id)arg2; + (const basic_string_a490aa4c *)getWCDBPrimaryColumnName; + (const struct WCDBIndexHelper *)getWCDBIndexArray; + (unsigned long long)getWCDBIndexArrayCount; + (const map_0e718273 *)getFileValueTagIndexMap; + (id)getFileValueTypeTable; + (const map_0e718273 *)getPackedValueTagIndexMap; + (id)getPackedValueTypeTable; + (const map_7a576766 *)getValueNameIndexMap; + (id)getValueTable; + (id)dummyObject; @property(nonatomic) long long __rowID; // @synthesize __rowID=m___rowID; @property(nonatomic) _Bool m_bUpdateFlag; // @synthesize m_bUpdateFlag; @property(retain, nonatomic) NSString *m_nsBrandUserName; // @synthesize m_nsBrandUserName; @property(nonatomic) _Bool m_bHeadImageUpdateFlag; // @synthesize m_bHeadImageUpdateFlag; @property(retain, nonatomic) EnterpriseRoomData *m_oRoomData; // @synthesize m_oRoomData; @property(nonatomic) unsigned int m_uiContactType; // @synthesize m_uiContactType; @property(nonatomic) unsigned int m_uiUserFlag; // @synthesize m_uiUserFlag; @property(retain, nonatomic) NSString *m_nsAddMemberUrl; // @synthesize m_nsAddMemberUrl; @property(retain, nonatomic) NSString *m_nsProfileJumpUrl; // @synthesize m_nsProfileJumpUrl; @property(nonatomic) unsigned long long m_uiContactVer; // @synthesize m_uiContactVer; @property(retain, nonatomic) NSString *m_nsContactDisplayName; // @synthesize m_nsContactDisplayName; - (void).cxx_destruct; - (_Bool)isContactTop; - (_Bool)isFavorite; - (_Bool)isSelf; - (_Bool)isChatStatusNotifyOpen; - (_Bool)isChatroom; - (const map_0e718273 *)getValueTagIndexMap; - (id)getValueTypeTable; - (const WCDBCondition_d7690721 *)db_m_bUpdateFlag; - (const WCDBCondition_c6db074e *)db_m_uiDraftTime; - (const WCDBCondition_22fabacd *)db_m_nsDraft; - (const WCDBCondition_22fabacd *)db_m_nsAtUserList; - (const WCDBCondition_22fabacd *)db_m_nsBrandUserName; - (const WCDBCondition_d7690721 *)db_m_bHeadImageUpdateFlag; - (const WCDBCondition_8dd2b00c *)db_m_oRoomData; - (const WCDBCondition_c6db074e *)db_m_uiContactType; - (const WCDBCondition_c6db074e *)db_m_uiUserFlag; - (const WCDBCondition_22fabacd *)db_m_nsAddMemberUrl; - (const WCDBCondition_22fabacd *)db_m_nsHeadHDImgUrl; - (const WCDBCondition_22fabacd *)db_m_nsProfileJumpUrl; - (const WCDBCondition_7786cbb5 *)db_m_uiContactVer; - (const WCDBCondition_22fabacd *)db_m_nsContactDisplayName; - (const WCDBCondition_22fabacd *)db_m_nsUsrName; // Remaining properties @property(readonly, copy) NSString *debugDescription; @property(readonly, copy) NSString *description; @property(readonly) unsigned long long hash; @property(retain, nonatomic) NSString *m_nsAtUserList; @property(retain, nonatomic) NSString *m_nsDraft; @property(retain, nonatomic) NSString *m_nsHeadHDImgUrl; @property(retain, nonatomic) NSString *m_nsUsrName; @property(nonatomic) unsigned int m_uiDraftTime; @property(readonly) Class superclass; @end
{ "pile_set_name": "Github" }
var rules = require('../../../../lib/rules/util').rules; module.exports = function(req, res) { rules.disableDefault(); res.json({ec: 0, em: 'success', defaultRulesIsDisabled: rules.defaultRulesIsDisabled(), list: rules.getSelectedList()}); };
{ "pile_set_name": "Github" }
<?php /** * Magento * * NOTICE OF LICENSE * * This source file is subject to the Open Software License (OSL 3.0) * that is bundled with this package in the file LICENSE.txt. * It is also available through the world-wide-web at this URL: * http://opensource.org/licenses/osl-3.0.php * If you did not receive a copy of the license and are unable to * obtain it through the world-wide-web, please send an email * to license@magento.com so we can send you a copy immediately. * * DISCLAIMER * * Do not edit or add to this file if you wish to upgrade Magento to newer * versions in the future. If you wish to customize Magento for your * needs please refer to http://www.magento.com for more information. * * @category Mage * @package Mage_Api * @copyright Copyright (c) 2006-2020 Magento, Inc. (http://www.magento.com) * @license http://opensource.org/licenses/osl-3.0.php Open Software License (OSL 3.0) */ /** * Enter description here ... * * @method Mage_Api_Model_Resource_Rules _getResource() * @method Mage_Api_Model_Resource_Rules getResource() * @method int getRoleId() * @method Mage_Api_Model_Rules setRoleId(int $value) * @method string getResourceId() * @method Mage_Api_Model_Rules setResourceId(string $value) * @method string getPrivileges() * @method Mage_Api_Model_Rules setPrivileges(string $value) * @method int getAssertId() * @method Mage_Api_Model_Rules setAssertId(int $value) * @method string getRoleType() * @method Mage_Api_Model_Rules setRoleType(string $value) * @method string getPermission() * @method Mage_Api_Model_Rules setPermission(string $value) * * @category Mage * @package Mage_Api * @author Magento Core Team <core@magentocommerce.com> */ class Mage_Api_Model_Rules extends Mage_Core_Model_Abstract { protected function _construct() { $this->_init('api/rules'); } public function update() { $this->getResource()->update($this); return $this; } public function getCollection() { return Mage::getResourceModel('api/permissions_collection'); } public function saveRel() { $this->getResource()->saveRel($this); return $this; } }
{ "pile_set_name": "Github" }
function [improvedRxns, intermediateSlns] = analyzeGCdesign(modelRed, selectedRxns, target, deletions, maxKOs, objFunction, delPenalty, intermediateSlns) % Analyzes results with replacement knockouts % should get closer to local maxima. Must have num `KOs` > 1. % % USAGE: % % [improvedRxns, intermediateSlns] = analyzeGCdesign(modelRed, selectedRxns, target, deletions, maxKOs, objFunction, delPenalty, intermediateSlns) % % INPUTS: % modelRed: reduced model % selectedRxns: selected reaction list from the reduced model % target: exchange `rxn` to optimize % deletions: initial set of `KO` `rxns` (must have at least 1 `rxn`) % % OPTIONAL INPUTS: % maxKOs: maximum number of `rxn` `KOs` to allow (Default = 10) % objFunction: pick an objective function to use (Default = 1): % % 1. `obj = maxRate` (yield) % 2. `obj = growth*maxRate` (SSP) % 3. `obj = maxRate*(delPenalty^numDels)` (yield with KO penalty) % 4. `obj = growth*maxRate*(delPenalty^numDels)` (SSP with KO penalty) % 5. `obj = maxRate*(slope^(-1))` (GC_yield) % 6. `obj = growth*maxRate*(slope^(-1))` (GC_SSP) % 7. `obj = maxRate*(delPenalty^numDels)*(slope^(-1))` (GC_yield with KO penalty) % 8. `obj = growth*maxRate*(delPenalty^numDels)*(slope^(-1))` (GC_SSP with KO penalty) % delPenalty: penalty on extra `rxn` deletions (Default = .99) % intermediateSlns: Previous set of solutions (Default = deletions) % % OUTPUTS: % improvedRxns: the `KO` `rxns` for an improved strain % intermediateSlns: all the sets of best `KO` `rxns` that are picked before the % final set is reached % .. Authors: % - Jeff Orth 7/25/07 % - Richard Que 1/19/10 Replaced try/catch blocks if (nargin < 5) maxKOs = 10; end if (nargin < 6) objFunction = 1; end if (nargin < 7) delPenalty = .99; end if (nargin < 8) intermediateSlns = {deletions}; end %set the objective function switch objFunction case 1 objectiveFunction = 'maxRate'; hasSlope = false; case 2 objectiveFunction = 'growth*maxRate'; hasSlope = false; case 3 objectiveFunction = 'maxRate*(delPenalty^numDels)'; hasSlope = false; case 4 objectiveFunction = 'growth*maxRate*(delPenalty^numDels)'; hasSlope = false; case 5 objectiveFunction = 'maxRate*(slope^(-1))'; hasSlope = true; case 6 objectiveFunction = 'growth*maxRate*(slope^(-1))'; hasSlope = true; case 7 objectiveFunction = 'maxRate*(delPenalty^numDels)*(slope^(-1))'; hasSlope = true; case 8 objectiveFunction = 'growth*maxRate*(delPenalty^numDels)*(slope^(-1))'; hasSlope = true; end if isempty(deletions) error('no knockout reactions defined') end delArraySize = size(deletions); %make sure deletions list is horizontal if delArraySize(1) > 1 rxns = deletions'; else rxns = deletions; end BOF = modelRed.rxns(modelRed.c==1); %get biomass objective function modelKO = changeRxnBounds(modelRed,rxns,0,'b'); FBAsol1 = optimizeCbModel(modelKO,'max',0,true); %find max growth rate of strain if FBAsol1.stat>0 modelKOfixed = changeRxnBounds(modelKO,BOF,FBAsol1.f-1e-6,'l'); %fix the growth rate modelKOfixed = changeObjective(modelKOfixed,target); %set target as the objective FBAsol2 = optimizeCbModel(modelKOfixed,'min',0,true); %find minimum target rate at this growth rate growth = FBAsol1.f; maxRate = FBAsol2.f; numDels = length(rxns); if hasSlope %only calculate these if the obj function includes slope modelTarget = changeObjective(modelKO,target); %set target as the objective FBAsol4 = optimizeCbModel(modelTarget,'min',0,true); %find min production rate modelTargetFixed = changeRxnBounds(modelKO,target,FBAsol4.f,'b'); %fix production to minimum FBAsol5 = optimizeCbModel(modelTargetFixed,'max',0,true); %find max growth at min production minProdRate = FBAsol4.f; maxGrowthMinRate = FBAsol5.f; if growth ~= maxGrowthMinRate slope = (maxRate-minProdRate)/(growth-maxGrowthMinRate); else slope = 1; %don't consider slope if div by 0 end end objective = eval(objectiveFunction); bestObjective = objective bestRxns = rxns; % if the initial reactions are lethal else bestObjective = 0 bestRxns = rxns; end % loop through each KO rxn and replace with every rxn from selectedRxns to % search for a possible improvement showprogress(0, 'improving knockout design'); for i = 1:length(rxns)+1 bestObjective2 = bestObjective; bestRxns2 = bestRxns; for j = 1:length(selectedRxns)+1 showprogress((j+(i-1)*length(selectedRxns))/((length(rxns)+1)*(length(selectedRxns)+1))); newRxns = rxns; if (i==length(rxns)+1)&&(j==length(selectedRxns)+1) %don't do anything at the very end elseif j ~= length(selectedRxns)+1 newRxns{i} = selectedRxns{j}; %replace rxn with different one elseif i == 1 %or else remove one of the rxns newRxns = rxns(2:length(rxns)); elseif i == length(rxns) newRxns = rxns(1:length(rxns)-1); else newRxns = cat(2,rxns(1:i-1),rxns(i+1:length(rxns))); end if length(newRxns) <= maxKOs %limit the total number of knockouts modelKO = changeRxnBounds(modelRed,newRxns,0,'b'); FBAsol1 = optimizeCbModel(modelKO,'max',0,true); %find max growth rate of strain if FBAsol1.stat>0 modelKOfixed = changeRxnBounds(modelKO,BOF,FBAsol1.f-1e-6,'l'); %fix the growth rate modelKOfixed = changeObjective(modelKOfixed,target); %set target as the objective FBAsol2 = optimizeCbModel(modelKOfixed,'min',0,true); %find minimum target rate at this growth rate FBAsol3 = optimizeCbModel(modelKOfixed,'max',0,true); %find maximum target rate at this growth rate growth =
{ "pile_set_name": "Github" }
UPDATE quest_template_addon SET RequiredSkillID = 2557, RequiredSkillPoints = 50 WHERE (ID = 52223); UPDATE quest_template_addon SET RequiredSkillID = 2557, RequiredSkillPoints = 50 WHERE (ID = 52225); UPDATE quest_template_addon SET RequiredSkillID = 2557, RequiredSkillPoints = 100 WHERE (ID = 52227); UPDATE quest_template_addon SET RequiredSkillID = 2557, RequiredSkillPoints = 150 WHERE (ID = 52226); UPDATE quest_template_addon SET RequiredSkillID = 2557, RequiredSkillPoints = 150 WHERE (ID = 52228); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 50 WHERE (ID = 48752); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 50 WHERE (ID = 48764); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 150 WHERE (ID = 48767); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 150 WHERE (ID = 48770); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 50 WHERE (ID = 48752); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 150 WHERE (ID = 48761); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 50 WHERE (ID = 51568); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 150 WHERE (ID = 48768); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 150 WHERE (ID = 52044); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 150 WHERE (ID = 52049); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 150 WHERE (ID = 52050); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 150 WHERE (ID = 51380); UPDATE quest_template_addon SET RequiredSkillID = 2549, RequiredSkillPoints = 50 WHERE (ID = 52050); UPDATE quest_template_addon SET RequiredSkillID = 2549, RequiredSkillPoints = 150 WHERE (ID = 48754); UPDATE quest_template_addon SET RequiredSkillID = 2549, RequiredSkillPoints = 150 WHERE (ID = 48755); UPDATE quest_template_addon SET RequiredSkillID = 2549, RequiredSkillPoints = 50 WHERE (ID = 48756); UPDATE quest_template_addon SET RequiredSkillID = 2549, RequiredSkillPoints = 150 WHERE (ID = 48757); UPDATE quest_template_addon SET RequiredSkillID = 2549, RequiredSkillPoints = 150 WHERE (ID = 48769); UPDATE quest_template_addon SET RequiredSkillID = 2549, RequiredSkillPoints = 50 WHERE (ID = 51016); UPDATE quest_template_addon SET RequiredSkillID = 2549, RequiredSkillPoints = 50 WHERE (ID = 51313); UPDATE quest_template_addon SET RequiredSkillID = 2549, RequiredSkillPoints = 150 WHERE (ID = 51361); UPDATE quest_template_addon SET RequiredSkillID = 2549, RequiredSkillPoints = 50 WHERE (ID = 51365); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 50 WHERE (ID = 51962); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 130 WHERE (ID = 51964); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 50 WHERE (ID = 51965); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 50 WHERE (ID = 52014); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 135 WHERE (ID = 52015); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 130 WHERE (ID = 52016); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 150 WHERE (ID = 52017); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 150 WHERE (ID = 52055); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 130 WHERE (ID = 51971); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 130 WHERE (ID = 52046); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 150 WHERE (ID = 52053); UPDATE quest_template_addon SET RequiredSkillID = 2557, RequiredSkillPoints = 60 WHERE (ID = 51575); UPDATE quest_template_addon SET RequiredSkillID = 2557, RequiredSkillPoints = 100 WHERE (ID = 52213); UPDATE quest_template_addon SET RequiredSkillID = 2557, RequiredSkillPoints = 100 WHERE (ID = 52216); UPDATE quest_template_addon SET RequiredSkillID = 2557, RequiredSkillPoints = 150 WHERE (ID = 52217); UPDATE quest_template_addon SET RequiredSkillID = 2557, RequiredSkillPoints = 60 WHERE (ID = 52214); UPDATE quest_template_addon SET RequiredSkillID = 2557, RequiredSkillPoints = 150 WHERE (ID = 52215); UPDATE quest_template_addon SET RequiredSkillID = 2549, RequiredSkillPoints = 50 WHERE (ID = 51230); UPDATE quest_template_addon SET RequiredSkillID = 2549, RequiredSkillPoints = 150 WHERE (ID = 51243); UPDATE quest_template_addon SET RequiredSkillID = 2549, RequiredSkillPoints = 50 WHERE (ID = 51448); UPDATE quest_template_addon SET RequiredSkillID = 2549, RequiredSkillPoints = 150 WHERE (ID = 51452); UPDATE quest_template_addon SET RequiredSkillID = 2549, RequiredSkillPoints = 50 WHERE (ID = 51464); UPDATE quest_template_addon SET RequiredSkillID = 2549, RequiredSkillPoints = 150 WHERE (ID = 51478); UPDATE quest_template_addon SET RequiredSkillID = 2549, RequiredSkillPoints = 150 WHERE (ID = 51481); UPDATE quest_template_addon SET RequiredSkillID = 2549, RequiredSkillPoints = 50 WHERE (ID = 51482); UPDATE quest_template_addon SET RequiredSkillID = 2549, RequiredSkillPoints = 50 WHERE (ID = 51498); UPDATE quest_template_addon SET RequiredSkillID = 2549, RequiredSkillPoints = 150 WHERE (ID = 51503);
{ "pile_set_name": "Github" }
/// @ref simd /// @file glm/simd/integer.h #pragma once #if GLM_ARCH & GLM_ARCH_SSE2_BIT GLM_FUNC_QUALIFIER glm_uvec4 glm_i128_interleave(glm_uvec4 x) { glm_uvec4 const Mask4 = _mm_set1_epi32(0x0000FFFF); glm_uvec4 const Mask3 = _mm_set1_epi32(0x00FF00FF); glm_uvec4 const Mask2 = _mm_set1_epi32(0x0F0F0F0F); glm_uvec4 const Mask1 = _mm_set1_epi32(0x33333333); glm_uvec4 const Mask0 = _mm_set1_epi32(0x55555555); glm_uvec4 Reg1; glm_uvec4 Reg2; // REG1 = x; // REG2 = y; //Reg1 = _mm_unpacklo_epi64(x, y); Reg1 = x; //REG1 = ((REG1 << 16) | REG1) & glm::uint64(0x0000FFFF0000FFFF); //REG2 = ((REG2 << 16) | REG2) & glm::uint64(0x0000FFFF0000FFFF); Reg2 = _mm_slli_si128(Reg1, 2); Reg1 = _mm_or_si128(Reg2, Reg1); Reg1 = _mm_and_si128(Reg1, Mask4); //REG1 = ((REG1 << 8) | REG1) & glm::uint64(0x00FF00FF00FF00FF); //REG2 = ((REG2 << 8) | REG2) & glm::uint64(0x00FF00FF00FF00FF); Reg2 = _mm_slli_si128(Reg1, 1); Reg1 = _mm_or_si128(Reg2, Reg1); Reg1 = _mm_and_si128(Reg1, Mask3); //REG1 = ((REG1 << 4) | REG1) & glm::uint64(0x0F0F0F0F0F0F0F0F); //REG2 = ((REG2 << 4) | REG2) & glm::uint64(0x0F0F0F0F0F0F0F0F); Reg2 = _mm_slli_epi32(Reg1, 4); Reg1 = _mm_or_si128(Reg2, Reg1); Reg1 = _mm_and_si128(Reg1, Mask2); //REG1 = ((REG1 << 2) | REG1) & glm::uint64(0x3333333333333333); //REG2 = ((REG2 << 2) | REG2) & glm::uint64(0x3333333333333333); Reg2 = _mm_slli_epi32(Reg1, 2); Reg1 = _mm_or_si128(Reg2, Reg1); Reg1 = _mm_and_si128(Reg1, Mask1); //REG1 = ((REG1 << 1) | REG1) & glm::uint64(0x5555555555555555); //REG2 = ((REG2 << 1) | REG2) & glm::uint64(0x5555555555555555); Reg2 = _mm_slli_epi32(Reg1, 1); Reg1 = _mm_or_si128(Reg2, Reg1); Reg1 = _mm_and_si128(Reg1, Mask0); //return REG1 | (REG2 << 1); Reg2 = _mm_slli_epi32(Reg1, 1); Reg2 = _mm_srli_si128(Reg2, 8); Reg1 = _mm_or_si128(Reg1, Reg2); return Reg1; } GLM_FUNC_QUALIFIER glm_uvec4 glm_i128_interleave2(glm_uvec4 x, glm_uvec4 y) { glm_uvec4 const Mask4 = _mm_set1_epi32(0x0000FFFF); glm_uvec4 const Mask3 = _mm_set1_epi32(0x00FF00FF); glm_uvec4 const Mask2 = _mm_set1_epi32(0x0F0F0F0F); glm_uvec4 const Mask1 = _mm_set1_epi32(0x33333333); glm_uvec4 const Mask0 = _mm_set1_epi32(0x55555555); glm_uvec4 Reg1; glm_uvec4 Reg2; // REG1 = x; // REG2 = y; Reg1 = _mm_unpacklo_epi64(x, y); //REG1 = ((REG1 << 16) | REG1) & glm::uint64(0x0000FFFF0000FFFF); //REG2 = ((REG2 << 16) | REG2) & glm::uint64(0x0000FFFF0000FFFF); Reg2 = _mm_slli_si128(Reg1, 2); Reg1 = _mm_or_si128(Reg2, Reg1); Reg1 = _mm_and_si128(Reg1, Mask4); //REG1 = ((REG1 << 8) | REG1) & glm::uint64(0x00FF00FF00FF00FF); //REG2 = ((REG2 << 8) | REG2) & glm::uint64(0x00FF00FF00FF00FF); Reg2 = _mm_slli_si128(Reg1, 1); Reg1 = _mm_or_si128(Reg2, Reg1); Reg1 = _mm_and_si128(Reg1, Mask3); //REG1 = ((REG1 << 4) | REG1) & glm::uint64(0x0F0F0F0F0F0F0F0F); //REG2 = ((REG2 << 4) | REG2) & glm::uint64(0x0F0F0F0F0F0F0F0F); Reg2 = _mm_slli_epi32(Reg1, 4); Reg1 = _mm_or_si128(Reg2, Reg1); Reg1 = _mm_and_si128(Reg1, Mask2); //REG1 = ((REG1 << 2) | REG1) & glm::uint64(0x3333333333333333); //REG2 = ((REG2 << 2) | REG2) & glm::uint64(0x3333333333333333); Reg2 = _mm_slli_epi32(Reg1, 2); Reg1 = _mm_or_si128(Reg2, Reg1); Reg1 = _mm_and_si128(Reg1, Mask1); //REG1 = ((REG1 << 1) | REG1) & glm::uint64(0x5555555555555555); //REG2 = ((REG2 << 1) | REG2) & glm::uint64(0x5555555555555555); Reg2 = _mm_slli_epi32(Reg1, 1); Reg1 = _mm_or_si128(Reg2, Reg1); Reg1 = _mm_and_si128(Reg1, Mask0); //return REG1 | (REG2 << 1); Reg2 = _mm_slli_epi32(Reg1, 1); Reg2 = _mm_srli_si128(Reg2, 8); Reg1 = _mm_or_si128(Reg1, Reg2); return Reg1; } #endif//GLM_ARCH & GLM_ARCH_SSE2_BIT
{ "pile_set_name": "Github" }
// 文件 // 有时候别人对你的好要记得 class IO{ }
{ "pile_set_name": "Github" }
/* * Copyright 2010-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ #import "EC2Response.h" /** * Authorize Security Group Ingress */ @interface EC2AuthorizeSecurityGroupIngressResponse:EC2Response { } @end
{ "pile_set_name": "Github" }
/* * Copyright 2008 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; /** * Tests for {@link RenameLabels}. */ public class RenameLabelsTest extends CompilerTestCase { @Override protected CompilerPass getProcessor(Compiler compiler) { return new RenameLabels(compiler); } public void testRenameInFunction() { test("function x(){ Foo:a(); }", "function x(){ a(); }"); test("function x(){ Foo:{ a(); break Foo; } }", "function x(){ a:{ a(); break a; } }"); test("function x() { " + "Foo:{ " + "function goo() {" + "Foo: {" + "a(); " + "break Foo; " + "}" + "}" + "}" + "}", "function x(){function goo(){a:{ a(); break a; }}}"); test("function x() { " + "Foo:{ " + "function goo() {" + "Foo: {" + "a(); " + "break Foo; " + "}" + "}" + "break Foo;" + "}" + "}", "function x(){a:{function goo(){a:{ a(); break a; }} break a;}}"); } public void testRenameGlobals() { test("Foo:{a();}", "a();"); test("Foo:{a(); break Foo;}", "a:{a(); break a;}"); test("Foo:{Goo:a(); break Foo;}", "a:{a(); break a;}"); test("Foo:{Goo:while(1){a(); continue Goo; break Foo;}}", "a:{b:while(1){a(); continue b;break a;}}"); test("Foo:Goo:while(1){a(); continue Goo; break Foo;}", "a:b:while(1){a(); continue b;break a;}"); test("Foo:Bar:X:{ break Bar; }", "a:{ break a; }"); test("Foo:Bar:X:{ break Bar; break X; }", "a:b:{ break a; break b;}"); test("Foo:Bar:X:{ break Bar; break Foo; }", "a:b:{ break b; break a;}"); test("Foo:while (1){a(); break;}", "while (1){a(); break;}"); // Remove label that is not referenced. test("Foo:{a(); while (1) break;}", "a(); while (1) break;"); } public void testRenameReused() { test("foo:{break foo}; foo:{break foo}", "a:{break a};a:{break a}"); } }
{ "pile_set_name": "Github" }
# -*- coding: utf-8 -*- # # This file is part of EventGhost. # Copyright © 2005-2020 EventGhost Project <http://www.eventghost.net/> # # EventGhost is free software: you can redistribute it and/or modify it under # the terms of the GNU General Public License as published by the Free # Software Foundation, either version 2 of the License, or (at your option) # any later version. # # EventGhost is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or # FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for # more details. # # You should have received a copy of the GNU General Public License along # with EventGhost. If not, see <http://www.gnu.org/licenses/>. import errno import os import time from docutils.core import publish_parts from jinja2 import Environment, FileSystemLoader from os.path import abspath, join # Local imports import builder class BuildWebsite(builder.Task): description = "Build website" def Setup(self): if self.buildSetup.showGui: self.activated = False else: self.activated = bool(self.buildSetup.args.sync) def DoTask(self): buildSetup = self.buildSetup menuTabs = (HomePage, DocsPage, WikiPage, ForumPage, DownloadPage) env = Environment( loader=FileSystemLoader( abspath(join(buildSetup.dataDir, 'templates')) ), trim_blocks=True ) env.globals = { "files": GetSetupFiles(join(buildSetup.websiteDir, "downloads")), "MENU_TABS": menuTabs, } env.filters = {'rst2html': rst2html} for page in menuTabs: path = os.path.abspath(join(buildSetup.websiteDir, page.outfile)) try: os.makedirs(os.path.dirname(path)) except os.error, exc: if exc.errno != errno.EEXIST: raise env.get_template(page.template).stream(CURRENT=page).dump(path) class FileData(object): def __init__(self, path): self.path = path self.target = os.path.basename(path) parts = self.target.split("_") self.name = " ".join(parts[:2]) fileStat = os.stat(path) self.time = time.strftime("%b %d %Y", time.gmtime(fileStat.st_mtime)) self.size = "%0.1f MB" % (fileStat.st_size / 1024.0 / 1024) class Page(object): def __init__(self): pass class HomePage(Page): name = "Home" target = "/" outfile = "index.html" template = "home.tmpl" class DocsPage(Page): name = "Documentation" target = "/docs/" outfile = "css/header_docs.html" template = "header_only.tmpl" class DownloadPage(Page): name = "Downloads" target = "/downloads/" outfile = "downloads/index.html" template = "download.tmpl" class ForumPage(Page): name = "Forum" target = "/forum/" outfile = "css/header_forum.html" template = "header_only.tmpl" class WikiPage(Page): name = "Wiki" target = "/mediawiki/" outfile = "css/header_wiki.html" template = "header_only.tmpl" def GetSetupFiles(srcDir): if not os.path.exists(srcDir): return [] files = [] for name in os.listdir(srcDir): if name.lower().startswith("eventghost_"): if name.lower().endswith("_setup.exe"): path = join(srcDir, name) fileData = FileData(path) files.append(fileData) def Cmp(x, y): x = x.target.split("_")[1].replace("r", "").split(".") y = y.target.split("_")[1].replace("r", "").split(".") x = [int(s) for s in x] y = [int(s) for s in y] return cmp(x, y) return list(reversed(sorted(files, cmp=Cmp))) def rst2html(rst): return publish_parts(rst, writer_name="html")["fragment"]
{ "pile_set_name": "Github" }
/* * **************************************************************************** * Cloud Foundry * Copyright (c) [2009-2016] Pivotal Software, Inc. All Rights Reserved. * * This product is licensed to you under the Apache License, Version 2.0 (the "License"). * You may not use this product except in compliance with the License. * * This product includes a number of subcomponents with * separate copyright notices and license terms. Your use of these * subcomponents is subject to the terms and conditions of the * subcomponent's license, as noted in the LICENSE file. * **************************************************************************** */ package org.cloudfoundry.identity.uaa.oauth.jwt; import org.cloudfoundry.identity.uaa.oauth.jwk.JsonWebKey; import org.cloudfoundry.identity.uaa.oauth.jwk.JsonWebKeySet; import org.springframework.security.jwt.crypto.sign.InvalidSignatureException; import org.springframework.security.jwt.crypto.sign.SignatureVerifier; import java.util.ArrayList; import java.util.Collections; import java.util.List; public class ChainedSignatureVerifier implements SignatureVerifier { private final List<SignatureVerifier> delegates; public ChainedSignatureVerifier(JsonWebKeySet<? extends JsonWebKey> keys) { if(keys == null || keys.getKeys() == null || keys.getKeys().isEmpty()) { throw new IllegalArgumentException("keys cannot be null or empty"); } List<SignatureVerifier> ds = new ArrayList<>(keys.getKeys().size()); for (JsonWebKey key : keys.getKeys()) { ds.add(new CommonSignatureVerifier(key.getValue())); } delegates = Collections.unmodifiableList(ds); } public ChainedSignatureVerifier(List<SignatureVerifier> delegates) { this.delegates = delegates; } @Override public void verify(byte[] content, byte[] signature) { Exception last = new InvalidSignatureException("No matching keys found."); for (SignatureVerifier delegate : delegates) { try { delegate.verify(content, signature); //success return; } catch (Exception e) { last = e; } } throw (last instanceof RuntimeException) ? (RuntimeException) last : new RuntimeException(last); } @Override public String algorithm() { return null; } }
{ "pile_set_name": "Github" }
/* * @(#)DeclarationScanner.java 1.5 04/04/20 * * Copyright (c) 2004, Sun Microsystems, Inc. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the Sun Microsystems, Inc. nor the names of * its contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS * IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A * PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER * OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.sun.mirror.util; import com.sun.mirror.declaration.*; /** * A visitor for declarations that scans declarations contained within * the given declaration. For example, when visiting a class, the * methods, fields, constructors, and nested types of the class are * also visited. * * <p> To control the processing done on a declaration, users of this * class pass in their own visitors for pre and post processing. The * preprocessing visitor is called before the contained declarations * are scanned; the postprocessing visitor is called after the * contained declarations are scanned. * * @author Joseph D. Darcy * @author Scott Seligman * @version 1.5 04/04/20 * @since 1.5 */ class DeclarationScanner implements DeclarationVisitor { protected DeclarationVisitor pre; protected DeclarationVisitor post; DeclarationScanner(DeclarationVisitor pre, DeclarationVisitor post) { this.pre = pre; this.post = post; } @Override public void visitDeclaration(Declaration d) { d.accept(pre); d.accept(post); } @Override public void visitPackageDeclaration(PackageDeclaration d) { d.accept(pre); for(ClassDeclaration classDecl: d.getClasses()) { classDecl.accept(this); } for(InterfaceDeclaration interfaceDecl: d.getInterfaces()) { interfaceDecl.accept(this); } d.accept(post); } @Override public void visitMemberDeclaration(MemberDeclaration d) { visitDeclaration(d); } @Override public void visitTypeDeclaration(TypeDeclaration d) { d.accept(pre); for(TypeParameterDeclaration tpDecl: d.getFormalTypeParameters()) { tpDecl.accept(this); } for(FieldDeclaration fieldDecl: d.getFields()) { fieldDecl.accept(this); } for(MethodDeclaration methodDecl: d.getMethods()) { methodDecl.accept(this); } for(TypeDeclaration typeDecl: d.getNestedTypes()) { typeDecl.accept(this); } d.accept(post); } @Override public void visitClassDeclaration(ClassDeclaration d) { d.accept(pre); for(TypeParameterDeclaration tpDecl: d.getFormalTypeParameters()) { tpDecl.accept(this); } for(FieldDeclaration fieldDecl: d.getFields()) { fieldDecl.accept(this); } for(MethodDeclaration methodDecl: d.getMethods()) { methodDecl.accept(this); } for(TypeDeclaration typeDecl: d.getNestedTypes()) { typeDecl.accept(this); } for(ConstructorDeclaration ctorDecl: d.getConstructors()) { ctorDecl.accept(this); } d.accept(post); } @Override public void visitEnumDeclaration(EnumDeclaration d) { visitClassDeclaration(d); } @Override public void visitInterfaceDeclaration(InterfaceDeclaration d) { visitTypeDeclaration(d); } @Override public void visitAnnotationTypeDeclaration(AnnotationTypeDeclaration d) { visitInterfaceDeclaration(d); } @Override public void visitFieldDeclaration(FieldDeclaration d) { visitMemberDeclaration(d); } @Override public void visitEnumConstantDeclaration(EnumConstantDeclaration d) { visitFieldDeclaration(d); } @Override public void visitExecutableDeclaration(ExecutableDeclaration d) { d.accept(pre); for(TypeParameterDeclaration tpDecl: d.getFormalTypeParameters()) { tpDecl.accept(this); } for(ParameterDeclaration pDecl: d.getParameters()) { pDecl.accept(this); } d.accept(post); } @Override public void visitConstructorDeclaration(ConstructorDeclaration d) { visitExecutableDeclaration(d); } @Override public void visitMethodDeclaration(MethodDeclaration d) { visitExecutableDeclaration(d); } @Override public void visitAnnotationTypeElementDeclaration( AnnotationTypeElementDeclaration d) { visitMethodDeclaration(d); } @Override public void visitParameterDeclaration(ParameterDeclaration d) { visitDeclaration(d); } @Override public void visitTypeParameterDeclaration(TypeParameterDeclaration d) { visitDeclaration(d); } }
{ "pile_set_name": "Github" }
#ifndef OPENTISSUE_CORE_CONTAINERS_GRID_UTIL_GRID_BISECTION_LINE_SEARCH_H #define OPENTISSUE_CORE_CONTAINERS_GRID_UTIL_GRID_BISECTION_LINE_SEARCH_H // // OpenTissue Template Library // - A generic toolbox for physics-based modeling and simulation. // Copyright (C) 2008 Department of Computer Science, University of Copenhagen. // // OTTL is licensed under zlib: http://opensource.org/licenses/zlib-license.php // #include <OpenTissue/configuration.h> #include <OpenTissue/core/containers/grid/util/grid_gradient_at_point.h> namespace OpenTissue { namespace grid { /** * Grid Bisection Line Search * * @param q_a * @param q_b * @param phi * @param maximize If true the bisection method tries to find the maximimum value between q_a and q_b otherwise it tries to find the minimum value. * * @return The point that maximizes the value of phi on the line between q_a and q_b. */ template<typename vector3_type,typename grid_type> inline vector3_type bisection_line_search(vector3_type q_a, vector3_type q_b, grid_type & phi, bool maximize = true) { using std::fabs; typedef typename vector3_type::value_type real_type; real_type const precision = 10e-5;//OpenTissue::math::working_precision<real_type>(100); real_type const too_small_interval = sqr_length(q_b-q_a)*0.0001; //--- 1/100'th of distance! vector3_type n = unit(gradient_at_point(phi,q_a)); vector3_type r; real_type const sign = maximize? 1.0 : -1.0; bool forever = true; do { vector3_type q_c = (q_a + q_b)*.5; if( sqr_length(q_a - q_b) < too_small_interval ) { r = q_c; break; } vector3_type dir = unit(gradient_at_point(phi,q_c)); real_type n_dot_dir = inner_prod(n , dir)*sign; if(fabs(n_dot_dir) < precision) { r = q_c; break; } if(n_dot_dir > 0) { q_a = q_c; } if(n_dot_dir < 0) { q_b = q_c; } } while (forever); return r; } } // namespace grid } // namespace OpenTissue // OPENTISSUE_CORE_CONTAINERS_GRID_UTIL_GRID_BISECTION_LINE_SEARCH_H #endif
{ "pile_set_name": "Github" }
package org.javaee8.jsonp.merge; import javax.json.Json; import javax.json.JsonMergePatch; import javax.json.JsonObject; import javax.json.JsonValue; import org.jboss.arquillian.container.test.api.Deployment; import org.jboss.arquillian.junit.Arquillian; import org.jboss.shrinkwrap.api.ShrinkWrap; import org.jboss.shrinkwrap.api.spec.JavaArchive; import static org.junit.Assert.assertTrue; import org.junit.Test; import org.junit.runner.RunWith; /** * Class that tests and demonstrates the JSON-P 1.1 Merge Operations. * @author Andrew Pielage */ @RunWith(Arquillian.class) public class JsonpMergeTest { // Create a JsonObject with some values to be used in each test private static final JsonObject json = Json.createObjectBuilder() .add("Wibbly", "Wobbly") .add("Replaced", false) .add("Lexicon", Json.createArrayBuilder() .add("Wibbles") .add("Wobbles") .build()) .add("Nested", Json.createObjectBuilder() .add("Birdie", "Wordie") .add("Bestiary", Json.createArrayBuilder() .add("Drowner") .add("Werewolf") .add("Chimera") .build()) .build()) .build(); @Deployment public static JavaArchive createDeployment() { // Create a JavaArchive to deploy JavaArchive jar = ShrinkWrap.create(JavaArchive.class); // Print out directory contents System.out.println(jar.toString(true)); // Return Arquillian Test Archive for application server return jar; } /** * Test that the JSON Merge operation replaces values as intended. */ @Test public void replaceTest() { // Create a JSON object that we'll merge into the class variable, replacing object members and array values JsonObject jsonToMerge = Json.createObjectBuilder() .add("Wibbly", "Bibbly") .add("Replaced", "Yes") .add("Lexicon", Json.createArrayBuilder() .add("Wibbles") .add("Bibbles") .build()) .add("Nested", Json.createObjectBuilder() .add("Bestiary", Json.createArrayBuilder() .add("Slyzard") .add("Dragon") .add("Ekimmara") .build()) .build()) .build(); // Create a merge patch and apply it JsonMergePatch mergePatch = Json.createMergePatch(jsonToMerge); JsonValue mergedJson = mergePatch.apply(json); // Print out to more easily see what we've done System.out.println("JsonpMergeTest.replaceTest: Before Merge: " + json); System.out.println("JsonpMergeTest.replaceTest: JSON to Merge: " + jsonToMerge); System.out.println("JsonpMergeTest.replaceTest: After Merge: " + mergedJson); // Test that everything is as it should be JsonObject mergedJsonObject = mergedJson.asJsonObject(); assertTrue("Merged JSON didn't merge correctly!", mergedJsonObject.getString("Wibbly").equals("Bibbly")); assertTrue("Merged JSON didn't merge correctly!", mergedJsonObject.getString("Replaced").equals("Yes")); assertTrue("JSON Array didn't merge correctly!", mergedJsonObject.getJsonArray("Lexicon").getString(0).equals("Wibbles") && mergedJsonObject.getJsonArray("Lexicon").getString(1).equals("Bibbles")); assertTrue("Nested JSON didn't merge correctly!", mergedJsonObject.getJsonObject("Nested").getString("Birdie").equals("Wordie")); assertTrue("Nested JSON Array didn't merge correctly!", mergedJsonObject.getJsonObject("Nested").getJsonArray("Bestiary").getString(0).equals("Slyzard") && mergedJsonObject.getJsonObject("Nested").getJsonArray("Bestiary").getString(1).equals("Dragon") && mergedJsonObject.getJsonObject("Nested").getJsonArray("Bestiary").getString(2).equals("Ekimmara")); } /** * Test that the JSON Merge operation adds values as intended. */ @Test public void addTest() { // Create a JSON object that we'll merge into the class variable, adding object members and array values JsonObject jsonToMerge = Json.createObjectBuilder() .add("Bibbly", "Bobbly") .add("Lexicon", Json.createArrayBuilder() .add("Wibbles") .add("Wobbles") .add("Bibbles") .add("Bobbles") .build()) .build(); // Create a merge patch and apply it JsonMergePatch mergePatch = Json.createMergePatch(jsonToMerge); JsonValue mergedJson = mergePatch.apply(json); // Print out to more easily see what we've done System.out.println("JsonpMergeTest.addTest: Before Merge: " + json); System.out.println("JsonpMergeTest.addTest: JSON to Merge: " + jsonToMerge); System.out.println("JsonpMergeTest.addTest: After Merge: " + mergedJson); // Test that everything is as it should be JsonObject mergedJsonObject = mergedJson.asJsonObject(); assertTrue("Merged JSON didn't merge correctly!", mergedJsonObject.getString("Wibbly").equals("Wobbly")); assertTrue("Merged JSON didn't merge correctly!", mergedJsonObject.getString("Bibbly").equals("Bobbly")); assertTrue("Merged JSON didn't merge correctly!", !mergedJsonObject.getBoolean("Replaced")); assertTrue("JSON Array didn't merge correctly!", mergedJsonObject.getJsonArray("Lexicon").getString(0).equals("Wibbles") && mergedJsonObject.getJsonArray("Lexicon").getString(1).equals("Wobbles") && mergedJsonObject.getJsonArray("Lexicon").getString(2).equals("Bibbles") && mergedJsonObject.getJsonArray("Lexicon").getString(3).equals("Bobbles")); assertTrue("Nested JSON didn't merge correctly!", mergedJsonObject.getJsonObject("Nested").getString("Birdie").equals("Wordie")); assertTrue("Nested JSON Array didn't merge correctly!", mergedJsonObject.getJsonObject("Nested").getJsonArray("Bestiary").getString(0).equals("Drowner") && mergedJsonObject.getJsonObject("Nested").getJsonArray("Bestiary").getString(1).equals("Werewolf") && mergedJsonObject.getJsonObject("Nested").getJsonArray("Bestiary").getString(2).equals("Chimera")); } /** * Test that the JSON Merge operation removes values as intended. */ @Test public void removeTest() { // Create a JSON object that we'll merge into the class variable, removing object members and array values JsonObject jsonToMerge = Json.createObjectBuilder() .addNull("Wibbly") .add("Lexicon", Json.createArrayBuilder() .add("Wibbles") .build()) .add("Nested", Json.createObjectBuilder() .addNull("Bestiary") .build()) .build(); // Create a merge patch and apply it JsonMergePatch mergePatch = Json.createMergePatch(jsonToMerge); JsonValue mergedJson = mergePatch.apply(json); // Print out to more easily see what we've done System.out.println("JsonpMergeTest.removeTest: Before Merge: " + json); System.out.println("JsonpMergeTest.removeTest: JSON to Merge: " + jsonToMerge); System.out.println("JsonpMergeTest.removeTest: After Merge: " + mergedJson); // Test that everything is as it should be JsonObject mergedJsonObject
{ "pile_set_name": "Github" }
--- title: 如何把一个 RegularJS 组件打成 npm 包 date: 2017-07-04 --- 本篇基于 RegularJS 热区组件,简单分享一下组件打包发布的全流程及主要遇到的问题。 ## 目录 1. 项目初始化 2. 开发环境准备,安装基础依赖 3. 将组件打包输出成多种模式!important 4. 进入开发 5. 包装工作 6. 最终发布 <!-- more --> ## 1. 项目初始化 #### 1. 在 GitHub 上创建项目仓库,添加 README 和 License 没什么好说的,License 一般设置成 MIT(开源万岁),详细协议介绍可查:[HELP](https://choosealicense.com/)。 #### 2. clone 到本地,设置 git config 本地全局的 git config 文件一般设置为公司的邮箱和用户名。为了避免泄露信息,可在初始化时提前进行项目层面的 config 设置: ```shell $ git config user.name "GitHub 用户名" $ git config user.email "GitHub 邮箱" ``` 这样提交代码时就以该用户名及邮箱作为用户信息了,此时执行查看命令可以看到: ```shell $ cat .git/config [user] name = GitHub 用户名 email = GitHub 邮箱 ``` #### 3. 执行 npm init,生成 package.json 按提示一步步来完成配置即可。 ## 2. 开发环境准备,安装基础依赖 这里偷了个懒,直接使用了 vue-cli 的 [webpack-simple](https://github.com/vuejs-templates/webpack-simple) 模式生成的 webpack.config.js 和 package.json,并调整成实际需要的配置。 配置比较简单,可以直接 [**戳我**](https://github.com/Deol/regular-hotzone/blob/master/webpack.config.js) 看一下(具体配置解释直接查阅 [文档](https://webpack.js.org/configuration/output/#output-librarytarget))。 ## 3. 将组件打包输出成多种模式!important 既然是 RegularJS 组件,那么打包后的组件无论是直接以 `<script>` 标签形式引入,或者用 AMD / CommonJS 方式引入都应该可以使用。 ### 第一部分,webpack 配置 与此相关的配置项是这三个: - output.library && output.libraryTarget library 属性能让打包后的整个组件被当成一个全局变量使用。考虑命名污染及冲突,可以将 `library` 属性的值起得相对复杂些,如 `regularHotZone`。 另外,为了让组件在多种模式下都可运行,使用 `libraryTarget` 配置该组件的暴露方式为 **umd**。该模式意味着组件在 CommonJS、AMD 及 global 环境下都能运行: ``` output: { library: 'regularHotZone', libraryTarget: 'umd' } ``` - externals 这个配置是为了排除外部依赖,不将它们一起打包进去。对于 RegularJS 组件来说,并不需要把 RegularJS 也打包进去,此时就应该用 externals。 而配置中是这么写的: ``` externals: { regularjs: { root: 'Regular', commonjs: 'regularjs', commonjs2: 'regularjs', amd: 'regularjs' } } ``` 这是由于上述的 libraryTarget 设置为 umd,那么这里必须设置成这种形式,RegularJS 才能在 AMD 和 CommonJS 模式下通过 regularjs 被访问,但在全局变量下通过 Regular 被访问。 ### 第二部分,package 配置 另一方面,我们需要在组件的 package.json 中需要将 RegularJS 设置为同伴依赖 (`peerDependencies`): ``` // 建议:不同于一般的依赖,同伴依赖需要降低版本限制。不应该将同伴依赖锁定在特定的版本号。 "peerDependencies": { "regularjs": "^0.4.3" } ``` 因为 RegularJS 组件是 RegularJS 框架的拓展,它不能脱离于框架独立存在。 也就是说,如果需要以 npm 包形式引入 RegularJS 组件,那么 RegularJS 框架必须也被引入,不管是以 npm 包形式引入,还是用 `script` 标签引入并配置 externals。 **注意**:如果安装组件包时,找不到 RegularJS 或者其**不符合同伴依赖的版本要求**,终端将抛出警告: ``` `-- UNMET PEER DEPENDENCY regularjs@^0.4.3 npm WARN regular-hotzone@0.1.14 requires a peer of regularjs@^0.4.3 but none was installed. ``` npm 使用的版本规则「[**在此**](https://docs.npmjs.com/misc/semver)」查看。 可以知道,上面设定 RegularJS 版本为 `^0.4.3`,相当于 version >= 0.4.3 && version < 0.5.0。 ## 4. 进入开发 跑个 `npm run startdev`,balabalabala... ## 5. 包装工作 1. 完成整体开发后,修改 package.json 中的 version(版本介绍「[**在此**](http://semver.org/lang/zh-CN/)」,每次发布都必须修改,否则无法发布),并利用 `npm run build` 打包输出 dist 文件夹。 2. 编写 Readme,可参考「[如何写好 Github 中的 readme? - 知乎](https://www.zhihu.com/question/29100816/answer/68750410)」。 ## 6. 最终发布 最终阶段,进入 https://www.npmjs.com/ 完成注册后,执行: ``` $ npm publish ``` 完成登录后可能会发布失败,因为我们可能会将 npm 源设置为淘宝源,此时需要添加 `//` 暂时将其注释: ``` $ vi ~/.npmrc //registry=https://registry.npm.taobao.org ``` 保存后重新执行发布操作即可。 此时
{ "pile_set_name": "Github" }
if TARGET_TQM834X config SYS_BOARD default "tqm834x" config SYS_VENDOR default "tqc" config SYS_CONFIG_NAME default "TQM834x" endif
{ "pile_set_name": "Github" }
--- title: 关于公共服务的思考 date: 2016-12-07 21:12:53 tags: [] author: xizhibei issue_link: https://github.com/xizhibei/blog/issues/32 --- 前几日与一同行交流,一一交流下来,发现什么叫『固步自封』,跟外界交流少了,很多东西便会搞不清楚,甚至脱离主流。 比如最近一直在为团队做基础设施方面的工作,但是,会有一种吃力不讨好的感觉,虽然搭建完毕之后自己会很很有成就感,但是随之而来的维护成本却是很让人头疼。 是的,**『能花钱的,就不要花时间』**。 我也想反驳,但后来仔细回想,没有立即反驳是因为我认同这句话。我当时想反驳的便是:公共服务就像公交车,的确有时候会很方便,可是一旦你想自由些便是很困难,这时候便需要私家车。没错,私家车成本高,还要花时间金钱维护,但是,它就是比公交车方便。 所以,对于一个创业公司来说,你完全可以用公交服务,比如代码托管,文档管理,项目管理,云服务器,监控服务,CI&CD 服务。实际上,现在公共服务越来越多,创业成本实际上越来越低,最后可能到一种程度了之后,只要使用的人搭积木即可了,所有的服务都可以是公共的、现成的。 只是,我觉得高成本的服务才是有做成公共服务的硬需求的,也是价值非常高的,比如云服务主机,安全,APM,大数据等。 好了,话说回来,我列出有些部分为什么不用公共服务: 两个词:** 成本与收益 ** #### 成本: - 金钱:应该算是机器了,无论云服务或者自己买机器 - 时间:搭建与维护 - 人力:需要专门的人去维护 - 安全:数据不会泄露 #### 收益 - 时间:反馈时间,形成一个高效的正负反馈系统 - 可用性:满足需求,甚至比公共服务好用 这里插一句前提,国内的很多公共服务并不怎么好用,而且让人怀疑,虽然他们一再声明不会去查看甚至泄露用户数据。而国外的服务有非常多好用的,但是由于网络问题,得投入 VPN 成本,另外,他们很多是使用美元结算的,换算成 RMB 之后,很贵,不过他们的成本本来就很高。(T_T 国外好幸福。。。 比如当初选择 gitlab,一个是因为 github 有时候太慢,线上部署代码的时候太慢,换成 coding 之后,又觉得不如 github 好用。于是自己用 gitlab,然后那时候,gitlab 已经比较新,自带了 pipeline,可以直接作为 CI&CD。 然后是日志系统,目前国内好用的服务比较少,国外有个 loggy 不错,只是按照我们的需求来的话,至少得要 $5000 + 了,还不如自己搭建了,另外,由于需要上传数据到国外服务器,VPN 的成本不会太低,更别说有延时了。(其实还有点小私心,想要借此机会熟悉 ELK 好了,每个人都会有自己的选择,但是从个人角度来说:** 生命不息,折腾不止 **。 ### PS 目前就我接触下来,用过的产品中,ping++ 挺棒,起码会让人觉得好用,文档非常棒,虽然现在的公司因为担心数据没用。再吐槽下个推,文档真让人头大。。。 ### PPS 国内的服务目前还处于发展阶段,我觉得做出好产品是需要高成本的,并且跟创业公司是互相成就的,有条件的情况下还是多支持国内的同胞吧。。。 ### Reference 1. https://github.com/qinghuaiorg/free-for-dev-zh 2. https://github.com/ripienaar/free-for-dev *** 首发于 Github issues: https://github.com/xizhibei/blog/issues/32 ,欢迎 Star 以及 Watch {% post_link footer %} ***
{ "pile_set_name": "Github" }
/* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com) * All rights reserved. * * This package is an SSL implementation written * by Eric Young (eay@cryptsoft.com). * The implementation was written so as to conform with Netscapes SSL. * * This library is free for commercial and non-commercial use as long as * the following conditions are aheared to. The following conditions * apply to all code found in this distribution, be it the RC4, RSA, * lhash, DES, etc., code; not just the SSL code. The SSL documentation * included with this distribution is covered by the same copyright terms * except that the holder is Tim Hudson (tjh@cryptsoft.com). * * Copyright remains Eric Young's, and as such any Copyright notices in * the code are not to be removed. * If this package is used in a product, Eric Young should be given attribution * as the author of the parts of the library used. * This can be in the form of a textual message at program startup or * in documentation (online or textual) provided with the package. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. All advertising materials mentioning features or use of this software * must display the following acknowledgement: * "This product includes cryptographic software written by * Eric Young (eay@cryptsoft.com)" * The word 'cryptographic' can be left out if the rouines from the library * being used are not cryptographic related :-). * 4. If you include any Windows specific code (or a derivative thereof) from * the apps directory (application code) you must include an acknowledgement: * "This product includes software written by Tim Hudson (tjh@cryptsoft.com)" * * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * * The licence and distribution terms for any publically available version or * derivative of this code cannot be changed. i.e. this code cannot simply be * copied and put under another distribution licence * [including the GNU Public Licence.] */ #include <openssl/cipher.h> #include <assert.h> #include <string.h> #include <openssl/err.h> #include <openssl/mem.h> #include <openssl/nid.h> #include "internal.h" #include "../internal.h" const EVP_CIPHER *EVP_get_cipherbynid(int nid) { switch (nid) { case NID_rc2_cbc: return EVP_rc2_cbc(); case NID_rc2_40_cbc: return EVP_rc2_40_cbc(); case NID_des_ede3_cbc: return EVP_des_ede3_cbc(); case NID_des_ede_cbc: return EVP_des_cbc(); case NID_aes_128_cbc: return EVP_aes_128_cbc(); case NID_aes_192_cbc: return EVP_aes_192_cbc(); case NID_aes_256_cbc: return EVP_aes_256_cbc(); default: return NULL; } } void EVP_CIPHER_CTX_init(EVP_CIPHER_CTX *ctx) { OPENSSL_memset(ctx, 0, sizeof(EVP_CIPHER_CTX)); } EVP_CIPHER_CTX *EVP_CIPHER_CTX_new(void) { EVP_CIPHER_CTX *ctx = OPENSSL_malloc(sizeof(EVP_CIPHER_CTX)); if (ctx) { EVP_CIPHER_CTX_init(ctx); } return ctx; } int EVP_CIPHER_CTX_cleanup(EVP_CIPHER_CTX *c) { if (c->cipher != NULL) { if (c->cipher->cleanup) { c->cipher->cleanup(c); } OPENSSL_cleanse(c->cipher_data, c->cipher->ctx_size); } OPENSSL_free(c->cipher_data); OPENSSL_memset(c, 0, sizeof(EVP_CIPHER_CTX)); return 1; } void EVP_CIPHER_CTX_free(EVP_CIPHER_CTX *ctx) { if (ctx) { EVP_CIPHER_CTX_cleanup(ctx); OPENSSL_free(ctx); } } int EVP_CIPHER_CTX_copy(EVP_CIPHER_CTX *out, const EVP_CIPHER_CTX *in) { if (in == NULL || in->cipher == NULL) { OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_INPUT_NOT_INITIALIZED); return 0; } EVP_CIPHER_CTX_cleanup(out); OPENSSL_memcpy(out, in, sizeof(EVP_CIPHER_CTX)); if (in->cipher_data && in->cipher->ctx_size) { out->cipher_data = OPENSSL_malloc(in->cipher->ctx_size); if (!out->cipher_data) { out->cipher = NULL; OPENSSL_PUT_ERROR(CIPHER, ERR_R_MALLOC_FAILURE); return 0; } OPENSSL_memcpy(out->cipher_data, in->cipher_data, in->cipher->ctx_size); } if (in->cipher->flags & EVP_CIPH_CUSTOM_COPY) { if (!in->cipher->ctrl((EVP_CIPHER_CTX *)in, EVP_CTRL_COPY, 0, out)) { out->cipher = NULL; return 0; } } return 1; } int EVP_CipherInit_ex(EVP_CIPHER_CTX *ctx, const EVP_CIPHER *cipher, ENGINE *engine, const uint8_t *key, const uint8_t *iv, int enc) { if (enc == -1) { enc = ctx->encrypt; } else { if (enc) { enc = 1; } ctx->encrypt = enc; } if (cipher) { /* Ensure a context left from last time is cleared (the previous check * attempted to avoid this if the same ENGINE and EVP_CIPHER could be * used). */ if (ctx->cipher) { EVP_CIPHER_CTX_cleanup(ctx); /* Restore encrypt and flags */ ctx->encrypt = enc; } ctx->cipher = cipher; if (ctx->cipher->ctx_size) { ctx->cipher_data = OPENSSL_malloc(ctx->cipher->ctx_size); if (!ctx->cipher_data) { ctx->cipher = NULL; OPENSSL_PUT_ERROR(CIPHER, ERR_R_MALLOC_FAILURE); return 0; } } else { ctx->cipher_data = NULL; } ctx->key_len = cipher->key_len; ctx->flags = 0; if (ctx->cipher->flags & EVP_
{ "pile_set_name": "Github" }
{ "name": "bluetoothconnector", "full_name": "bluetoothconnector", "oldname": null, "aliases": [ ], "versioned_formulae": [ ], "desc": "Connect and disconnect Bluetooth devices", "license": "MIT", "homepage": "https://github.com/lapfelix/BluetoothConnector", "versions": { "stable": "2.0.0", "head": "HEAD", "bottle": true }, "urls": { "stable": { "url": "https://github.com/lapfelix/BluetoothConnector/archive/2.0.0.tar.gz", "tag": null, "revision": null } }, "revision": 0, "version_scheme": 0, "bottle": { "stable": { "rebuild": 0, "cellar": ":any_skip_relocation", "prefix": "/home/linuxbrew/.linuxbrew", "root_url": "https://linuxbrew.bintray.com/bottles", "files": { "catalina": { "url": "https://linuxbrew.bintray.com/bottles/bluetoothconnector-2.0.0.catalina.bottle.tar.gz", "sha256": "38d8b5c89fd8fee4a746eadaceb399d5b7e1148db2cee896381b6e093aef56e3" }, "mojave": { "url": "https://linuxbrew.bintray.com/bottles/bluetoothconnector-2.0.0.mojave.bottle.tar.gz", "sha256": "1a0c1e83b5640a35c48ba982f1b7cf5b1bebdda6fd4957368262c3e001c740e3" } } } }, "keg_only": false, "bottle_disabled": false, "options": [ ], "build_dependencies": [ ], "dependencies": [ ], "recommended_dependencies": [ ], "optional_dependencies": [ ], "uses_from_macos": [ ], "requirements": [ { "name": "xcode", "cask": null, "download": null, "version": "11.0", "contexts": [ "build" ] } ], "conflicts_with": [ ], "caveats": null, "installed": [ ], "linked_keg": null, "pinned": false, "outdated": false, "deprecated": false, "disabled": false }
{ "pile_set_name": "Github" }
<?xml version='1.0'?> <xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" xmlns:fo="http://www.w3.org/1999/XSL/Format" xmlns:exsl="http://exslt.org/common" exclude-result-prefixes="exsl" version='1.0'> <!-- ******************************************************************** $Id: xref.xsl,v 1.1 2005/08/28 00:35:05 cbauer Exp $ ******************************************************************** This file is part of the XSL DocBook Stylesheet distribution. See ../README or http://nwalsh.com/docbook/xsl/ for copyright and other information. ******************************************************************** --> <!-- Create keys for quickly looking up olink targets --> <xsl:key name="targetdoc-key" match="document" use="@targetdoc" /> <xsl:key name="targetptr-key" match="div|obj" use="concat(ancestor::document/@targetdoc, '/', @targetptr)" /> <!-- ==================================================================== --> <xsl:template match="anchor"> <fo:wrapper id="{@id}"/> </xsl:template> <!-- ==================================================================== --> <xsl:template match="xref" name="xref"> <xsl:variable name="targets" select="key('id',@linkend)"/> <xsl:variable name="target" select="$targets[1]"/> <xsl:variable name="refelem" select="local-name($target)"/> <xsl:call-template name="check.id.unique"> <xsl:with-param name="linkend" select="@linkend"/> </xsl:call-template> <xsl:choose> <xsl:when test="$refelem=''"> <xsl:message> <xsl:text>XRef to nonexistent id: </xsl:text> <xsl:value-of select="@linkend"/> </xsl:message> <xsl:text>???</xsl:text> </xsl:when> <xsl:when test="@endterm"> <fo:basic-link internal-destination="{@linkend}" xsl:use-attribute-sets="xref.properties"> <xsl:variable name="etargets" select="key('id',@endterm)"/> <xsl:variable name="etarget" select="$etargets[1]"/> <xsl:choose> <xsl:when test="count($etarget) = 0"> <xsl:message> <xsl:value-of select="count($etargets)"/> <xsl:text>Endterm points to nonexistent ID: </xsl:text> <xsl:value-of select="@endterm"/> </xsl:message> <xsl:text>???</xsl:text> </xsl:when> <xsl:otherwise> <xsl:apply-templates select="$etarget" mode="endterm"/> </xsl:otherwise> </xsl:choose> </fo:basic-link> </xsl:when> <xsl:when test="$target/@xreflabel"> <fo:basic-link internal-destination="{@linkend}" xsl:use-attribute-sets="xref.properties"> <xsl:call-template name="xref.xreflabel"> <xsl:with-param name="target" select="$target"/> </xsl:call-template> </fo:basic-link> </xsl:when> <xsl:otherwise> <fo:basic-link internal-destination="{@linkend}" xsl:use-attribute-sets="xref.properties"> <xsl:apply-templates select="$target" mode="xref-to"> <xsl:with-param name="referrer" select="."/> <xsl:with-param name="xrefstyle"> <xsl:choose> <xsl:when test="@role and not(@xrefstyle) and $use.role.as.xrefstyle != 0"> <xsl:value-of select="@role"/> </xsl:when> <xsl:otherwise> <xsl:value-of select="@xrefstyle"/> </xsl:otherwise> </xsl:choose> </xsl:with-param> </xsl:apply-templates> </fo:basic-link> </xsl:otherwise> </xsl:choose> <!-- Add standard page reference? --> <xsl:if test="not(starts-with(normalize-space(@xrefstyle), 'select:') != '' and (contains(@xrefstyle, 'page') or contains(@xrefstyle, 'Page'))) and ( $insert.xref.page.number = 'yes' or $insert.xref.page.number = '1') or local-name($target) = 'para'"> <fo:basic-link internal-destination="{@linkend}" xsl:use-attribute-sets="xref.properties"> <xsl:apply-templates select="$target" mode="page.citation"> <xsl:with-param name="id" select="@linkend"/> </xsl:apply-templates> </fo:basic-link> </xsl:if> </xsl:template> <!-- ==================================================================== --> <xsl:template match="*" mode="endterm"> <!-- Process the children of the endterm element --> <xsl:variable name="endterm"> <xsl:apply-templates select="child::node()"/> </xsl:variable> <xsl:choose> <xsl:when test="function-available('exsl:node-set')"> <xsl:apply-templates select="exsl:node-set($endterm)" mode="remove-ids"/> </xsl:when> <xsl:otherwise> <xsl:copy-of select="$endterm"/> </xsl:otherwise> </xsl:choose> </xsl:template> <xsl:template match="*" mode="remove-ids"> <xsl:copy> <xsl:for-each select="@*"> <xsl:choose> <xsl:when test="name(.) != 'id'"> <xsl:copy/> </xsl:when> <xsl:otherwise> <xsl:message>removing <xsl:value-of select="name(.)"/></xsl:message> </xsl:otherwise> </xsl:choose> </xsl:for-each> <xsl:apply-templates mode="remove-ids"/> </xsl:copy> </xsl:template> <!--- ==================================================================== --> <xsl:template match="*" mode="xref-to"> <xsl:param name="referrer"/> <xsl:param name="xrefstyle"/> <xsl:message> <xsl:text>Don't know what gentext to create for xref to: "</xsl:text> <xsl:value-of select="name(.)"/> <xsl:text>"</xsl:text> </xsl:message> <xsl:text>???</xsl:text> </xsl:template> <xsl:template match="title" mode="xref-to"> <xsl:param name="referrer"/> <xsl:param name="xrefstyle"/> <!-- if you xref to a title, xref to the parent... --> <xsl:choose> <!-- FIXME: how reliable is this? --> <xsl:when test="contains(local-name(parent::*), 'info')"> <xsl:apply-templates select="parent::*[2]" mode="xref-to"> <xsl:with-param name="referrer" select="$referrer"/> <xsl:with-param name="xrefstyle" select="$xrefstyle"/> </xsl:apply-templates> </xsl:when> <xsl:otherwise> <xsl:apply-templates select="parent::*" mode="xref-to"> <xsl:with-param name="referrer" select="$referrer"/> <xsl:with-param name="xrefstyle" select="$xrefstyle"/> </xsl:apply-templates> </xsl:otherwise> </xsl:choose> </xsl:template> <xsl:template match="abstract|article|authorblurb|bibliodiv|bibliomset |biblioset|blockquote|calloutlist|caution
{ "pile_set_name": "Github" }
package cdn //Licensed under the Apache License, Version 2.0 (the "License"); //you may not use this file except in compliance with the License. //You may obtain a copy of the License at // //http://www.apache.org/licenses/LICENSE-2.0 // //Unless required by applicable law or agreed to in writing, software //distributed under the License is distributed on an "AS IS" BASIS, //WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. //See the License for the specific language governing permissions and //limitations under the License. // // Code generated by Alibaba Cloud SDK Code Generator. // Changes may cause incorrect behavior and will be lost if the code is regenerated. // HttpCodeData is a nested struct in cdn response type HttpCodeData struct { UsageData []UsageDataInDescribeDomainHttpCodeData `json:"UsageData" xml:"UsageData"` }
{ "pile_set_name": "Github" }
/* * Copyright 2018 StreamSets Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.streamsets.pipeline.stage.processor.parser.sql; import com.streamsets.pipeline.api.base.BaseEnumChooserValues; public class JDBCTypeChooserValues extends BaseEnumChooserValues<JDBCTypes> { public JDBCTypeChooserValues() { super(JDBCTypes.class); } }
{ "pile_set_name": "Github" }
--- id: i18n title: Internationalization --- ## Add a language ### Edit your bot configs In the Admin section > Your bots > Configs ![Bot Config](assets/i18n-configs.png) ### Switch language Go back to Studio and switch language ![Switch Language](assets/i18n-switch-lang.png) You'll see a "missing translation" notification on your content ![Missing Translation](assets/i18n-missing-translation.png) ### Translate your content Edit the content and add a translation ![Edit Content](assets/i18n-edit-content.png) ![Edited Content](assets/i18n-edited-content.png) ## Change the language Botpress use the browser language to detect the user language. This is stored in the `language` field of the user attributes. It is possible to change the language of a user by modifying this field. See [updateAttributes](https://botpress.com/reference/modules/_botpress_sdk_.users.html#updateattributes) Example usage: ```js await bp.users.updateAttributes('web', 'someId', { language: 'fr' }) ```
{ "pile_set_name": "Github" }
package cli import ( "fmt" "io" "os" "strings" "text/tabwriter" "text/template" ) // AppHelpTemplate is the text template for the Default help topic. // cli.go uses text/template to render templates. You can // render custom help text by setting this variable. var AppHelpTemplate = `NAME: {{.Name}}{{if .Usage}} - {{.Usage}}{{end}} USAGE: {{if .UsageText}}{{.UsageText}}{{else}}{{.HelpName}} {{if .VisibleFlags}}[global options]{{end}}{{if .Commands}} command [command options]{{end}} {{if .ArgsUsage}}{{.ArgsUsage}}{{else}}[arguments...]{{end}}{{end}}{{if .Version}}{{if not .HideVersion}} VERSION: {{.Version}}{{end}}{{end}}{{if .Description}} DESCRIPTION: {{.Description}}{{end}}{{if len .Authors}} AUTHOR{{with $length := len .Authors}}{{if ne 1 $length}}S{{end}}{{end}}: {{range $index, $author := .Authors}}{{if $index}} {{end}}{{$author}}{{end}}{{end}}{{if .VisibleCommands}} COMMANDS:{{range .VisibleCategories}}{{if .Name}} {{.Name}}:{{end}}{{range .VisibleCommands}} {{join .Names ", "}}{{"\t"}}{{.Usage}}{{end}}{{end}}{{end}}{{if .VisibleFlags}} GLOBAL OPTIONS: {{range $index, $option := .VisibleFlags}}{{if $index}} {{end}}{{$option}}{{end}}{{end}}{{if .Copyright}} COPYRIGHT: {{.Copyright}}{{end}} ` // CommandHelpTemplate is the text template for the command help topic. // cli.go uses text/template to render templates. You can // render custom help text by setting this variable. var CommandHelpTemplate = `NAME: {{.HelpName}} - {{.Usage}} USAGE: {{if .UsageText}}{{.UsageText}}{{else}}{{.HelpName}}{{if .VisibleFlags}} [command options]{{end}} {{if .ArgsUsage}}{{.ArgsUsage}}{{else}}[arguments...]{{end}}{{end}}{{if .Category}} CATEGORY: {{.Category}}{{end}}{{if .Description}} DESCRIPTION: {{.Description}}{{end}}{{if .VisibleFlags}} OPTIONS: {{range .VisibleFlags}}{{.}} {{end}}{{end}} ` // SubcommandHelpTemplate is the text template for the subcommand help topic. // cli.go uses text/template to render templates. You can // render custom help text by setting this variable. var SubcommandHelpTemplate = `NAME: {{.HelpName}} - {{if .Description}}{{.Description}}{{else}}{{.Usage}}{{end}} USAGE: {{if .UsageText}}{{.UsageText}}{{else}}{{.HelpName}} command{{if .VisibleFlags}} [command options]{{end}} {{if .ArgsUsage}}{{.ArgsUsage}}{{else}}[arguments...]{{end}}{{end}} COMMANDS:{{range .VisibleCategories}}{{if .Name}} {{.Name}}:{{end}}{{range .VisibleCommands}} {{join .Names ", "}}{{"\t"}}{{.Usage}}{{end}} {{end}}{{if .VisibleFlags}} OPTIONS: {{range .VisibleFlags}}{{.}} {{end}}{{end}} ` var helpCommand = Command{ Name: "help", Aliases: []string{"h"}, Usage: "Shows a list of commands or help for one command", ArgsUsage: "[command]", Action: func(c *Context) error { args := c.Args() if args.Present() { return ShowCommandHelp(c, args.First()) } ShowAppHelp(c) return nil }, } var helpSubcommand = Command{ Name: "help", Aliases: []string{"h"}, Usage: "Shows a list of commands or help for one command", ArgsUsage: "[command]", Action: func(c *Context) error { args := c.Args() if args.Present() { return ShowCommandHelp(c, args.First()) } return ShowSubcommandHelp(c) }, } // Prints help for the App or Command type helpPrinter func(w io.Writer, templ string, data interface{}) // Prints help for the App or Command with custom template function. type helpPrinterCustom func(w io.Writer, templ string, data interface{}, customFunc map[string]interface{}) // HelpPrinter is a function that writes the help output. If not set a default // is used. The function signature is: // func(w io.Writer, templ string, data interface{}) var HelpPrinter helpPrinter = printHelp // HelpPrinterCustom is same as HelpPrinter but // takes a custom function for template function map. var HelpPrinterCustom helpPrinterCustom = printHelpCustom // VersionPrinter prints the version for the App var VersionPrinter = printVersion // ShowAppHelpAndExit - Prints the list of subcommands for the app and exits with exit code. func ShowAppHelpAndExit(c *Context, exitCode int) { ShowAppHelp(c) os.Exit(exitCode) } // ShowAppHelp is an action that displays the help. func ShowAppHelp(c *Context) (err error) { if c.App.CustomAppHelpTemplate == "" { HelpPrinter(c.App.Writer, AppHelpTemplate, c.App) return } customAppData := func() map[string]interface{} { if c.App.ExtraInfo == nil { return nil } return map[string]interface{}{ "ExtraInfo": c.App.ExtraInfo, } } HelpPrinterCustom(c.App.Writer, c.App.CustomAppHelpTemplate, c.App, customAppData()) return nil } // DefaultAppComplete prints the list of subcommands as the default app completion method func DefaultAppComplete(c *Context) { for _, command := range c.App.Commands { if command.Hidden { continue } for _, name := range command.Names() { fmt.Fprintln(c.App.Writer, name) } } } // ShowCommandHelpAndExit - exits with code after showing help func ShowCommandHelpAndExit(c *Context, command string, code int) { ShowCommandHelp(c, command) os.Exit(code) } // ShowCommandHelp prints help for the given command func ShowCommandHelp(ctx *Context, command string) error { // show the subcommand help for a command with subcommands if command == "" { HelpPrinter(ctx.App.Writer, SubcommandHelpTemplate, ctx.App) return nil } for _, c := range ctx.App.Commands { if c.HasName(command) { if c.CustomHelpTemplate != "" { HelpPrinterCustom(ctx.App.Writer, c.CustomHelpTemplate, c, nil) } else { HelpPrinter(ctx.App.Writer, CommandHelpTemplate, c) } return nil } } if ctx.App.CommandNotFound == nil { return NewExitError(fmt.Sprintf("No help topic for '%v'", command), 3) } ctx.App.CommandNotFound(ctx, command) return nil } // ShowSubcommandHelp prints help for the given subcommand func ShowSubcommandHelp(c *Context) error { return ShowCommandHelp(c, c.Command.Name) } // ShowVersion prints the version number of the App func ShowVersion(c *Context) { VersionPrinter(c) } func printVersion(c *Context) { fmt.Fprintf(c.App.Writer, "%v version %v\n", c.App.Name, c.App.Version) } // ShowCompletions prints the lists of commands within a given context func ShowCompletions(c *Context) { a := c.App if a != nil && a.BashComplete != nil {
{ "pile_set_name": "Github" }
/* * Copyright (c) 2015, 2016, Oracle and/or its affiliates. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * - Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * - Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * - Neither the name of Oracle nor the names of its * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS * IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #ifndef LIBJIMAGE_INTTYPES_HPP #define LIBJIMAGE_INTTYPES_HPP typedef unsigned char u1; typedef char s1; typedef unsigned short u2; typedef short s2; typedef unsigned int u4; typedef int s4; #ifdef LP64 typedef unsigned long u8; typedef long s8; #else typedef unsigned long long u8; typedef long long s8; #endif #endif // LIBJIMAGE_INTTYPES_HPP
{ "pile_set_name": "Github" }
-- -- User: mike -- Date: 03.06.2018 -- Time: 22:51 -- This file is part of Remixed Pixel Dungeon. -- local RPD = require "scripts/lib/commonClasses" local spell = require "scripts/lib/spell" local mob = require "scripts/lib/mob" local storage = require "scripts/lib/storage" local latest_kill_index = "__latest_dead_mob" local function updateLatestDeadMob(mob) local mobClass = mob:getMobClassName() if mob:canBePet() and mobClass ~= "MirrorImage" then storage.put(latest_kill_index, {class = mob:getMobClassName(), pos = mob:getPos()}) end end mob.installOnDieCallback(updateLatestDeadMob) return spell.init{ desc = function () return { image = 2, imageFile = "spellsIcons/necromancy.png", name = "RaiseDead_Name", info = "RaiseDead_Info", magicAffinity = "Necromancy", targetingType = "none", spellCost = 15, castTime = 3, level = 4 } end, cast = function(self, spell, chr) local latestDeadMob = storage.get(latest_kill_index) or {} if latestDeadMob.class ~= nil then local mob = RPD.MobFactory:mobByName(latestDeadMob.class) storage.put(latest_kill_index, {}) local level = RPD.Dungeon.level local mobPos = latestDeadMob.pos if level:cellValid(mobPos) then mob:setPos(mobPos) mob:loot(RPD.ItemFactory:itemByName("Gold")) RPD.Mob:makePet(mob, chr) level:spawnMob(mob) chr:getSprite():emitter():burst( RPD.Sfx.ShadowParticle.CURSE, 6 ) mob:getSprite():emitter():burst( RPD.Sfx.ShadowParticle.CURSE, 6 ) RPD.playSound( "snd_cursed" ) return true else RPD.glog("RaiseDead_NoSpace") return false end end RPD.glog("RaiseDead_NoKill") return false end }
{ "pile_set_name": "Github" }
#ifndef NUMBA_PY_MODULE_H_ #define NUMBA_PY_MODULE_H_ #define PY_SSIZE_T_CLEAN #include <Python.h> #include <structmember.h> #include <frameobject.h> #if PY_MAJOR_VERSION >= 3 #define MOD_ERROR_VAL NULL #define MOD_SUCCESS_VAL(val) val #define MOD_INIT(name) PyMODINIT_FUNC PyInit_##name(void) #define MOD_DEF(ob, name, doc, methods) { \ static struct PyModuleDef moduledef = { \ PyModuleDef_HEAD_INIT, name, doc, -1, methods, }; \ ob = PyModule_Create(&moduledef); } #define MOD_INIT_EXEC(name) PyInit_##name(); #else #define MOD_ERROR_VAL #define MOD_SUCCESS_VAL(val) #define MOD_INIT(name) PyMODINIT_FUNC init##name(void) #define MOD_DEF(ob, name, doc, methods) \ ob = Py_InitModule3(name, methods, doc); #define MOD_INIT_EXEC(name) init##name(); #endif #if PY_MAJOR_VERSION >= 3 #define PyString_AsString PyUnicode_AsUTF8 #define PyString_Check PyUnicode_Check #define PyString_FromFormat PyUnicode_FromFormat #define PyString_FromString PyUnicode_FromString #define PyString_InternFromString PyUnicode_InternFromString #define PyInt_Type PyLong_Type #define PyInt_Check PyLong_Check #define PyInt_CheckExact PyLong_CheckExact #else #define Py_hash_t long #define Py_uhash_t unsigned long #endif #if PY_MAJOR_VERSION < 3 || (PY_MAJOR_VERSION == 3 && PY_MINOR_VERSION < 4) #define PyMem_RawMalloc malloc #define PyMem_RawRealloc realloc #define PyMem_RawFree free #endif #ifndef Py_MIN #define Py_MIN(x, y) (((x) > (y)) ? (y) : (x)) #endif #ifndef Py_MAX #define Py_MAX(x, y) (((x) < (y)) ? (y) : (x)) #endif #endif /* NUMBA_PY_MODULE_H_ */
{ "pile_set_name": "Github" }
<manifest package="com.eighteengray.imageprocesslibrary" xmlns:android="http://schemas.android.com/apk/res/android" android:installLocation="auto" > <application android:allowBackup="true" android:label="@string/app_name" android:supportsRtl="true" > <activity android:name=".cvdemo.camera.DisplayModeActivity"/> <activity android:name=".cvdemo.camera.CameraViewActivity"/> <activity android:name=".cvdemo.CVTestActivity"/> </application> </manifest>
{ "pile_set_name": "Github" }
// Protocol Buffers - Google's data interchange format // Copyright 2008 Google Inc. All rights reserved. // https://developers.google.com/protocol-buffers/ // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #include <ext/spl/spl_iterators.h> #include <Zend/zend_API.h> #include <Zend/zend_interfaces.h> #include "protobuf.h" #include "utf8.h" ZEND_BEGIN_ARG_INFO_EX(arginfo_offsetGet, 0, 0, 1) ZEND_ARG_INFO(0, index) ZEND_END_ARG_INFO() ZEND_BEGIN_ARG_INFO_EX(arginfo_offsetSet, 0, 0, 2) ZEND_ARG_INFO(0, index) ZEND_ARG_INFO(0, newval) ZEND_END_ARG_INFO() ZEND_BEGIN_ARG_INFO(arginfo_void, 0) ZEND_END_ARG_INFO() // Utilities void* upb_value_memory(upb_value* v) { return (void*)(&v->val); } // ----------------------------------------------------------------------------- // Basic map operations on top of upb's strtable. // // Note that we roll our own `Map` container here because, as for // `RepeatedField`, we want a strongly-typed container. This is so that any user // errors due to incorrect map key or value types are raised as close as // possible to the error site, rather than at some deferred point (e.g., // serialization). // // We build our `Map` on top of upb_strtable so that we're able to take // advantage of the native_slot storage abstraction, as RepeatedField does. // (This is not quite a perfect mapping -- see the key conversions below -- but // gives us full support and error-checking for all value types for free.) // ----------------------------------------------------------------------------- // Map values are stored using the native_slot abstraction (as with repeated // field values), but keys are a bit special. Since we use a strtable, we need // to store keys as sequences of bytes such that equality of those bytes maps // one-to-one to equality of keys. We store strings directly (i.e., they map to // their own bytes) and integers as native integers (using the native_slot // abstraction). // Note that there is another tradeoff here in keeping string keys as native // strings rather than PHP strings: traversing the Map requires conversion to // PHP string values on every traversal, potentially creating more garbage. We // should consider ways to cache a PHP version of the key if this becomes an // issue later. // Forms a key to use with the underlying strtable from a PHP key value. |buf| // must point to TABLE_KEY_BUF_LENGTH bytes of temporary space, used to // construct a key byte sequence if needed. |out_key| and |out_length| provide // the resulting key data/length. #define TABLE_KEY_BUF_LENGTH 8 // sizeof(uint64_t) static bool table_key(Map* self, zval* key, char* buf, const char** out_key, size_t* out_length TSRMLS_DC) { switch (self->key_type) { case UPB_TYPE_STRING: if (!protobuf_convert_to_string(key)) { return false; } if (!is_structurally_valid_utf8(Z_STRVAL_P(key), Z_STRLEN_P(key))) { zend_error(E_USER_ERROR, "Given key is not UTF8 encoded."); return false; } *out_key = Z_STRVAL_P(key); *out_length = Z_STRLEN_P(key); break; #define CASE_TYPE(upb_type, type, c_type, php_type) \ case UPB_TYPE_##upb_type: { \ c_type type##_value; \ if (!protobuf_convert_to_##type(key, &type##_value)) { \ return false; \ } \ native_slot_set_by_array(self->key_type, NULL, buf, key TSRMLS_CC); \ *out_key = buf; \ *out_length = native_slot_size(self->key_type); \ break; \ } CASE_TYPE(BOOL, bool, int8_t, BOOL) CASE_TYPE(INT32, int32, int32_t, LONG) CASE_TYPE(INT64, int64, int64_t, LONG) CASE_TYPE(UINT32, uint32, uint32_t, LONG) CASE_TYPE(UINT64, uint64, uint64_t, LONG) #undef CASE_TYPE default: // Map constructor should not allow a Map with another key type to be // constructed. assert(false); break; } return true; } // ----------------------------------------------------------------------------- // MapField methods // ----------------------------------------------------------------------------- static zend_function_entry map_field_methods[] = { PHP_ME(MapField, __construct, NULL, ZEND_ACC_PUBLIC) PHP_ME(MapField, offsetExists, arginfo_offsetGet, ZEND_ACC_PUBLIC) PHP_ME(MapField, offsetGet, arginfo_offsetGet, ZEND_ACC_PUBLIC) PHP_ME(MapField, offsetSet, arginfo_offsetSet, ZEND_ACC_PUBLIC) PHP_ME(MapField, offsetUnset, arginfo_offsetGet, ZEND_ACC_PUBLIC) PHP_ME(MapField, count, arginfo_void, ZEND_ACC_PUBLIC) PHP_ME(MapField, getIterator, arginfo_void, ZEND_ACC_PUBLIC) ZEND_FE_END }; // Forward declare static functions. static void map_field_write_dimension(zval *object, zval *key, zval *value TSRMLS_DC); // ----------------------------------------------------------------------------- // MapField creation/desctruction // ----------------------------------------------------------------------------- zend_class_entry* map_field_type; zend_class_entry* map_field_iter_type; zend_object_handlers* map_field_handlers; zend_object_handlers* map_field_iter_handlers; static void map_begin_internal(Map *map, MapIter *iter) { iter->self = map; upb_strtable_begin(&iter->it, &map->table); } static HashTable *map_field_get_gc(zval *object, CACHED_VALUE **table, int *n) { // TODO(teboring): Unfortunately, zend engine does not support garbage // collection for custom array. We have to use zend engine's native array // instead. *table = NULL; *n = 0; return NULL; }
{ "pile_set_name": "Github" }
//------------------------------------------------------------------------------ // <copyright file="DetailsViewMode.cs" company="Microsoft"> // Copyright (c) Microsoft Corporation. All rights reserved. // </copyright> //------------------------------------------------------------------------------ namespace System.Web.UI.WebControls { using System; /// <devdoc> /// <para>Specifies the DetailsView edit/view mode.</para> /// </devdoc> public enum DetailsViewMode { /// <devdoc> /// <para> /// The control is in read-only mode.</para> /// </devdoc> ReadOnly = 0, /// <devdoc> /// <para> /// The control is editing an existing record for update.</para> /// </devdoc> Edit = 1, /// <devdoc> /// <para> /// The control is editing a new record for insert.</para> /// </devdoc> Insert = 2 } }
{ "pile_set_name": "Github" }
/* Esperanto initialisation for the jQuery UI date picker plugin. */ /* Written by Olivier M. (olivierweb@ifrance.com). */ jQuery(function($){ $.datepicker.regional['eo'] = { closeText: 'Fermi', prevText: '&lt;Anta', nextText: 'Sekv&gt;', currentText: 'Nuna', monthNames: ['Januaro','Februaro','Marto','Aprilo','Majo','Junio', 'Julio','Aŭgusto','Septembro','Oktobro','Novembro','Decembro'], monthNamesShort: ['Jan','Feb','Mar','Apr','Maj','Jun', 'Jul','Aŭg','Sep','Okt','Nov','Dec'], dayNames: ['Dimanĉo','Lundo','Mardo','Merkredo','Ĵaŭdo','Vendredo','Sabato'], dayNamesShort: ['Dim','Lun','Mar','Mer','Ĵaŭ','Ven','Sab'], dayNamesMin: ['Di','Lu','Ma','Me','Ĵa','Ve','Sa'], weekHeader: 'Sb', dateFormat: 'dd/mm/yy', firstDay: 0, isRTL: false, showMonthAfterYear: false, yearSuffix: ''}; $.datepicker.setDefaults($.datepicker.regional['eo']); });
{ "pile_set_name": "Github" }
require_relative '../spec_helper' require_relative '../fixtures/classes' describe "UNIXSocket#addr" do platform_is_not :windows do before :each do @path = SocketSpecs.socket_path @server = UNIXServer.open(@path) @client = UNIXSocket.open(@path) end after :each do @client.close @server.close SocketSpecs.rm_socket @path end it "returns an array" do @client.addr.should be_kind_of(Array) end it "returns the address family of this socket in an array" do @client.addr[0].should == "AF_UNIX" @server.addr[0].should == "AF_UNIX" end it "returns the path of the socket in an array if it's a server" do @server.addr[1].should == @path end it "returns an empty string for path if it's a client" do @client.addr[1].should == "" end end end
{ "pile_set_name": "Github" }
<!-- Generated by pkgdown: do not edit by hand --> <!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8"> <meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> <title>Create a numeric input control — numericInput • SHINY.SEMANTIC</title> <!-- jquery --> <script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/3.4.1/jquery.min.js" integrity="sha256-CSXorXvZcTkaix6Yvo6HppcZGetbYMGWSFlBw8HfCJo=" crossorigin="anonymous"></script> <!-- Bootstrap --> <link href="https://cdnjs.cloudflare.com/ajax/libs/bootswatch/3.4.0/yeti/bootstrap.min.css" rel="stylesheet" crossorigin="anonymous" /> <script src="https://cdnjs.cloudflare.com/ajax/libs/twitter-bootstrap/3.4.1/js/bootstrap.min.js" integrity="sha256-nuL8/2cJ5NDSSwnKD8VqreErSWHtnEP9E7AySL+1ev4=" crossorigin="anonymous"></script> <!-- bootstrap-toc --> <link rel="stylesheet" href="../bootstrap-toc.css"> <script src="../bootstrap-toc.js"></script> <!-- Font Awesome icons --> <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/5.12.1/css/all.min.css" integrity="sha256-mmgLkCYLUQbXn0B1SRqzHar6dCnv9oZFPEC1g1cwlkk=" crossorigin="anonymous" /> <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/5.12.1/css/v4-shims.min.css" integrity="sha256-wZjR52fzng1pJHwx4aV2AO3yyTOXrcDW7jBpJtTwVxw=" crossorigin="anonymous" /> <!-- clipboard.js --> <script src="https://cdnjs.cloudflare.com/ajax/libs/clipboard.js/2.0.6/clipboard.min.js" integrity="sha256-inc5kl9MA1hkeYUt+EC3BhlIgyp/2jDIyBLS6k3UxPI=" crossorigin="anonymous"></script> <!-- headroom.js --> <script src="https://cdnjs.cloudflare.com/ajax/libs/headroom/0.11.0/headroom.min.js" integrity="sha256-AsUX4SJE1+yuDu5+mAVzJbuYNPHj/WroHuZ8Ir/CkE0=" crossorigin="anonymous"></script> <script src="https://cdnjs.cloudflare.com/ajax/libs/headroom/0.11.0/jQuery.headroom.min.js" integrity="sha256-ZX/yNShbjqsohH1k95liqY9Gd8uOiE1S4vZc+9KQ1K4=" crossorigin="anonymous"></script> <!-- pkgdown --> <link href="../pkgdown.css" rel="stylesheet"> <script src="../pkgdown.js"></script> <meta property="og:title" content="Create a numeric input control — numericInput" /> <meta property="og:description" content="Create a numeric input control" /> <!-- mathjax --> <script src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/MathJax.js" integrity="sha256-nvJJv9wWKEm88qvoQl9ekL2J+k/RWIsaSScxxlsrv8k=" crossorigin="anonymous"></script> <script src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/config/TeX-AMS-MML_HTMLorMML.js" integrity="sha256-84DKXVJXs0/F8OTMzX4UR909+jtl4G7SPypPavF+GfA=" crossorigin="anonymous"></script> <!--[if lt IE 9]> <script src="https://oss.maxcdn.com/html5shiv/3.7.3/html5shiv.min.js"></script> <script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script> <![endif]--> </head> <body data-spy="scroll" data-target="#toc"> <div class="container template-reference-topic"> <header> <div class="navbar navbar-inverse navbar-fixed-top" role="navigation"> <div class="container"> <div class="navbar-header"> <button type="button" class="navbar-toggle collapsed" data-toggle="collapse" data-target="#navbar" aria-expanded="false"> <span class="sr-only">Toggle navigation</span> <span class="icon-bar"></span> <span class="icon-bar"></span> <span class="icon-bar"></span> </button> <span class="navbar-brand"> <a class="navbar-link" href="../index.html">SHINY.SEMANTIC</a> <span class="version label label-default" data-toggle="tooltip" data-placement="bottom" title="Released version">0.3.1</span> </span> </div> <div id="navbar" class="navbar-collapse collapse"> <ul class="nav navbar-nav"> <li> <a href="../index.html"> <span class="fa fa-home"></span> Start </a> </li> </ul> <ul class="nav navbar-nav navbar-right"> <li> <a href="../reference/index.html"> <span class="fa fa-file-code-o"></span> Functions </a> </li> <li> <a href="../CHANGELOG.html"> <span class="fa fa-newspaper-o"></span> Changes </a> </li> <li> <a href="../CODE_OF_CONDUCT.html"> <span class="fa fa-user-o"></span> CoC </a> </li> <li> <a href="https://github.com/Appsilon/shiny.semantic"> <span class="fa fa-github fa-lg"></span> </a> </li> <li> <a href="https://twitter.com/Appsilon"> <span class="fa fa-twitter fa-lg"></span> </a> </li> </ul> </div><!--/.nav-collapse --> </div><!--/.container --> </div><!--/.navbar --> </header> <div class="row"> <div class="col-md-9 contents"> <div class="page-header"> <h1>Create a numeric input control</h1> <small class="dont-index">Source: <a href='https://github.com/Appsilon/shiny.semantic/blob/master/R/input.R'><code>R/input.R</code></a></small> <div class="hidden name"><code>numericInput.Rd</code></div> </div> <div class="ref-description"> <p>Create a numeric input control</p> </div> <pre class="usage"><span class='fu'>numericInput</span>( <span class='no'>inputId</span>, <span class='no'>label</span>, <span class='no'>value</span>, <span class='kw'>min</span> <span class='kw'>=</span> <span class='fl'>NA</span>, <span class='kw'>max</span> <span class='kw'>=</span> <span class='fl'>NA</span>,
{ "pile_set_name": "Github" }
var mkdirp = require('../').mkdirp; var path = require('path'); var fs = require('fs'); var test = require('tap').test; var _0777 = parseInt('0777', 8); var _0755 = parseInt('0755', 8); var _0744 = parseInt('0744', 8); var ps = [ '', 'tmp' ]; for (var i = 0; i < 25; i++) { var dir = Math.floor(Math.random() * Math.pow(16,4)).toString(16); ps.push(dir); } var file = ps.join('/'); test('chmod-pre', function (t) { var mode = _0744 mkdirp(file, mode, function (er) { t.ifError(er, 'should not error'); fs.stat(file, function (er, stat) { t.ifError(er, 'should exist'); t.ok(stat && stat.isDirectory(), 'should be directory'); t.equal(stat && stat.mode & _0777, mode, 'should be 0744'); t.end(); }); }); }); test('chmod', function (t) { var mode = _0755 mkdirp(file, mode, function (er) { t.ifError(er, 'should not error'); fs.stat(file, function (er, stat) { t.ifError(er, 'should exist'); t.ok(stat && stat.isDirectory(), 'should be directory'); t.end(); }); }); });
{ "pile_set_name": "Github" }
/*! * # Semantic UI - Nag * http://github.com/semantic-org/semantic-ui/ * * * Released under the MIT license * http://opensource.org/licenses/MIT * */ ;(function ($, window, document, undefined) { "use strict"; window = (typeof window != 'undefined' && window.Math == Math) ? window : (typeof self != 'undefined' && self.Math == Math) ? self : Function('return this')() ; $.fn.nag = function(parameters) { var $allModules = $(this), moduleSelector = $allModules.selector || '', time = new Date().getTime(), performance = [], query = arguments[0], methodInvoked = (typeof query == 'string'), queryArguments = [].slice.call(arguments, 1), returnedValue ; $allModules .each(function() { var settings = ( $.isPlainObject(parameters) ) ? $.extend(true, {}, $.fn.nag.settings, parameters) : $.extend({}, $.fn.nag.settings), className = settings.className, selector = settings.selector, error = settings.error, namespace = settings.namespace, eventNamespace = '.' + namespace, moduleNamespace = namespace + '-module', $module = $(this), $close = $module.find(selector.close), $context = (settings.context) ? $(settings.context) : $('body'), element = this, instance = $module.data(moduleNamespace), moduleOffset, moduleHeight, contextWidth, contextHeight, contextOffset, yOffset, yPosition, timer, module, requestAnimationFrame = window.requestAnimationFrame || window.mozRequestAnimationFrame || window.webkitRequestAnimationFrame || window.msRequestAnimationFrame || function(callback) { setTimeout(callback, 0); } ; module = { initialize: function() { module.verbose('Initializing element'); $module .on('click' + eventNamespace, selector.close, module.dismiss) .data(moduleNamespace, module) ; if(settings.detachable && $module.parent()[0] !== $context[0]) { $module .detach() .prependTo($context) ; } if(settings.displayTime > 0) { setTimeout(module.hide, settings.displayTime); } module.show(); }, destroy: function() { module.verbose('Destroying instance'); $module .removeData(moduleNamespace) .off(eventNamespace) ; }, show: function() { if( module.should.show() && !$module.is(':visible') ) { module.debug('Showing nag', settings.animation.show); if(settings.animation.show == 'fade') { $module .fadeIn(settings.duration, settings.easing) ; } else { $module .slideDown(settings.duration, settings.easing) ; } } }, hide: function() { module.debug('Showing nag', settings.animation.hide); if(settings.animation.show == 'fade') { $module .fadeIn(settings.duration, settings.easing) ; } else { $module .slideUp(settings.duration, settings.easing) ; } }, onHide: function() { module.debug('Removing nag', settings.animation.hide); $module.remove(); if (settings.onHide) { settings.onHide(); } }, dismiss: function(event) { if(settings.storageMethod) { module.storage.set(settings.key, settings.value); } module.hide(); event.stopImmediatePropagation(); event.preventDefault(); }, should: { show: function() { if(settings.persist) { module.debug('Persistent nag is set, can show nag'); return true; } if( module.storage.get(settings.key) != settings.value.toString() ) { module.debug('Stored value is not set, can show nag', module.storage.get(settings.key)); return true; } module.debug('Stored value is set, cannot show nag', module.storage.get(settings.key)); return false; } }, get: { storageOptions: function() { var options = {} ; if(settings.expires) { options.expires = settings.expires; } if(settings.domain) { options.domain = settings.domain; } if(settings.path) { options.path = settings.path; } return options; } }, clear: function() { module.storage.remove(settings.key); }, storage: { set: function(key, value) { var options = module.get.storageOptions() ; if(settings.storageMethod == 'localstorage' && window.localStorage !== undefined) { window.localStorage.setItem(key, value); module.debug('Value stored using local storage', key, value); } else if(settings.storageMethod == 'sessionstorage' && window.sessionStorage !== undefined) { window.sessionStorage.setItem(key, value); module.debug('Value stored using session storage', key, value); } else if($.cookie !== undefined) { $.cookie(key, value, options); module.debug('Value stored using cookie', key, value, options); } else { module.error(error.noCookieStorage); return; } }, get: function(key, value) { var storedValue ; if(settings.storageMethod == 'localstorage' && window.localStorage !== undefined) { storedValue = window.localStorage.getItem(key); } else if(settings.storageMethod == 'sessionstorage' && window.sessionStorage !== undefined) { storedValue = window.sessionStorage.getItem(key); } // get by cookie else if($.cookie !== undefined) { storedValue = $.cookie(key); } else { module.error(error.noCookieStorage); } if(storedValue == 'undefined' || storedValue == 'null' || storedValue === undefined || storedValue === null) { storedValue = undefined; } return storedValue; }, remove: function(key) { var options = module.get.storageOptions() ; if(settings.storageMethod == 'localstorage' && window.localStorage !== undefined) { window.localStorage.removeItem(key); } else if(settings.storageMethod == 'sessionstorage' && window.sessionStorage !== undefined) { window.sessionStorage.removeItem(key); } // store by cookie else if($.cookie !== undefined) { $.removeCookie(key, options); } else { module.error(error.noStorage); } } }, setting: function(name, value) { module.debug('Changing setting', name, value); if( $.isPlainObject(name) ) { $.extend(true, settings, name); } else if(value !== undefined) { if($.isPlainObject(settings[name])) { $.extend(true, settings[name], value); } else { settings[name] = value; } } else { return settings[name]; } }, internal: function(name, value) { if( $.isPlainObject(name) ) {
{ "pile_set_name": "Github" }
/* * Copyright © 2003 Keith Packard * * Permission to use, copy, modify, distribute, and sell this software and its * documentation for any purpose is hereby granted without fee, provided that * the above copyright notice appear in all copies and that both that * copyright notice and this permission notice appear in supporting * documentation, and that the name of Keith Packard not be used in * advertising or publicity pertaining to distribution of the software without * specific, written prior permission. Keith Packard makes no * representations about the suitability of this software for any purpose. It * is provided "as is" without express or implied warranty. * * KEITH PACKARD DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, * INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO * EVENT SHALL KEITH PACKARD BE LIABLE FOR ANY SPECIAL, INDIRECT OR * CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, * DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER * TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR * PERFORMANCE OF THIS SOFTWARE. */ #ifdef HAVE_CONFIG_H #include <config.h> #endif #include <limits.h> #include "Xfixesint.h" XserverRegion XFixesCreateRegion (Display *dpy, XRectangle *rectangles, int nrectangles) { XFixesExtDisplayInfo *info = XFixesFindDisplay (dpy); xXFixesCreateRegionReq *req; long len; XserverRegion region; XFixesCheckExtension (dpy, info, 0); LockDisplay (dpy); GetReq (XFixesCreateRegion, req); req->reqType = info->codes->major_opcode; req->xfixesReqType = X_XFixesCreateRegion; region = req->region = XAllocID (dpy); len = ((long) nrectangles) << 1; SetReqLen (req, len, len); len <<= 2; Data16 (dpy, (short *) rectangles, len); UnlockDisplay (dpy); SyncHandle(); return region; } XserverRegion XFixesCreateRegionFromBitmap (Display *dpy, Pixmap bitmap) { XFixesExtDisplayInfo *info = XFixesFindDisplay (dpy); xXFixesCreateRegionFromBitmapReq *req; XserverRegion region; XFixesCheckExtension (dpy, info, 0); LockDisplay (dpy); GetReq (XFixesCreateRegionFromBitmap, req); req->reqType = info->codes->major_opcode; req->xfixesReqType = X_XFixesCreateRegionFromBitmap; region = req->region = XAllocID (dpy); req->bitmap = bitmap; UnlockDisplay (dpy); SyncHandle(); return region; } XserverRegion XFixesCreateRegionFromWindow (Display *dpy, Window window, int kind) { XFixesExtDisplayInfo *info = XFixesFindDisplay (dpy); xXFixesCreateRegionFromWindowReq *req; XserverRegion region; XFixesCheckExtension (dpy, info, 0); LockDisplay (dpy); GetReq (XFixesCreateRegionFromWindow, req); req->reqType = info->codes->major_opcode; req->xfixesReqType = X_XFixesCreateRegionFromWindow; region = req->region = XAllocID (dpy); req->window = window; req->kind = kind; UnlockDisplay (dpy); SyncHandle(); return region; } XserverRegion XFixesCreateRegionFromGC (Display *dpy, GC gc) { XFixesExtDisplayInfo *info = XFixesFindDisplay (dpy); xXFixesCreateRegionFromGCReq *req; XserverRegion region; XFixesCheckExtension (dpy, info, 0); LockDisplay (dpy); GetReq (XFixesCreateRegionFromGC, req); req->reqType = info->codes->major_opcode; req->xfixesReqType = X_XFixesCreateRegionFromGC; region = req->region = XAllocID (dpy); req->gc = gc->gid; UnlockDisplay (dpy); SyncHandle(); return region; } XserverRegion XFixesCreateRegionFromPicture (Display *dpy, XID picture) { XFixesExtDisplayInfo *info = XFixesFindDisplay (dpy); xXFixesCreateRegionFromPictureReq *req; XserverRegion region; XFixesCheckExtension (dpy, info, 0); LockDisplay (dpy); GetReq (XFixesCreateRegionFromPicture, req); req->reqType = info->codes->major_opcode; req->xfixesReqType = X_XFixesCreateRegionFromPicture; region = req->region = XAllocID (dpy); req->picture = picture; UnlockDisplay (dpy); SyncHandle(); return region; } void XFixesDestroyRegion (Display *dpy, XserverRegion region) { XFixesExtDisplayInfo *info = XFixesFindDisplay (dpy); xXFixesDestroyRegionReq *req; XFixesSimpleCheckExtension (dpy, info); LockDisplay (dpy); GetReq (XFixesDestroyRegion, req); req->reqType = info->codes->major_opcode; req->xfixesReqType = X_XFixesDestroyRegion; req->region = region; UnlockDisplay (dpy); SyncHandle(); } void XFixesSetRegion (Display *dpy, XserverRegion region, XRectangle *rectangles, int nrectangles) { XFixesExtDisplayInfo *info = XFixesFindDisplay (dpy); xXFixesSetRegionReq *req; long len; XFixesSimpleCheckExtension (dpy, info); LockDisplay (dpy); GetReq (XFixesSetRegion, req); req->reqType = info->codes->major_opcode; req->xfixesReqType = X_XFixesSetRegion; req->region = region; len = ((long) nrectangles) << 1; SetReqLen (req, len, len); len <<= 2; Data16 (dpy, (short *) rectangles, len); UnlockDisplay (dpy); SyncHandle(); } void XFixesCopyRegion (Display *dpy, XserverRegion dst, XserverRegion src) { XFixesExtDisplayInfo *info = XFixesFindDisplay (dpy); xXFixesCopyRegionReq *req; XFixesSimpleCheckExtension (dpy, info); LockDisplay (dpy); GetReq (XFixesCopyRegion, req); req->reqType = info->codes->major_opcode; req->xfixesReqType = X_XFixesCopyRegion; req->source = src; req->destination = dst; UnlockDisplay (dpy); SyncHandle(); } void XFixesUnionRegion (Display *dpy, XserverRegion dst, XserverRegion src1, XserverRegion src2) { XFixesExtDisplayInfo *info = XFixesFindDisplay (dpy); xXFixesUnionRegionReq *req; XFixesSimpleCheckExtension (dpy, info); LockDisplay (dpy); GetReq (XFixesUnionRegion, req); req->reqType = info->codes->major_opcode; req->xfixesReqType = X_XFixesUnionRegion; req->source1 = src1; req->source2 = src2; req->destination = dst; UnlockDisplay (dpy); SyncHandle(); } void XFixesIntersectRegion (Display *dpy, XserverRegion dst, XserverRegion src1, XserverRegion src2) { XFixes
{ "pile_set_name": "Github" }
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"> <html xmlns="http://www.w3.org/1999/xhtml"> <head> <meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/> <meta http-equiv="X-UA-Compatible" content="IE=9"/> <meta name="generator" content="Doxygen 1.8.6"/> <title>libguac: Data Fields</title> <link href="tabs.css" rel="stylesheet" type="text/css"/> <script type="text/javascript" src="jquery.js"></script> <script type="text/javascript" src="dynsections.js"></script> <link href="search/search.css" rel="stylesheet" type="text/css"/> <script type="text/javascript" src="search/search.js"></script> <script type="text/javascript"> $(document).ready(function() { searchBox.OnSelectItem(0); }); </script> <link href="doxygen.css" rel="stylesheet" type="text/css" /> </head> <body> <div id="top"><!-- do not remove this div, it is closed by doxygen! --> <div id="titlearea"> <table cellspacing="0" cellpadding="0"> <tbody> <tr style="height: 56px;"> <td style="padding-left: 0.5em;"> <div id="projectname">libguac &#160;<span id="projectnumber">0.9.3</span> </div> </td> </tr> </tbody> </table> </div> <!-- end header part --> <!-- Generated by Doxygen 1.8.6 --> <script type="text/javascript"> var searchBox = new SearchBox("searchBox", "search",false,'Search'); </script> <div id="navrow1" class="tabs"> <ul class="tablist"> <li><a href="index.html"><span>Main&#160;Page</span></a></li> <li class="current"><a href="annotated.html"><span>Data&#160;Structures</span></a></li> <li><a href="files.html"><span>Files</span></a></li> <li> <div id="MSearchBox" class="MSearchBoxInactive"> <span class="left"> <img id="MSearchSelect" src="search/mag_sel.png" onmouseover="return searchBox.OnSearchSelectShow()" onmouseout="return searchBox.OnSearchSelectHide()" alt=""/> <input type="text" id="MSearchField" value="Search" accesskey="S" onfocus="searchBox.OnSearchFieldFocus(true)" onblur="searchBox.OnSearchFieldFocus(false)" onkeyup="searchBox.OnSearchFieldChange(event)"/> </span><span class="right"> <a id="MSearchClose" href="javascript:searchBox.CloseResultsWindow()"><img id="MSearchCloseImg" border="0" src="search/close.png" alt=""/></a> </span> </div> </li> </ul> </div> <div id="navrow2" class="tabs2"> <ul class="tablist"> <li><a href="annotated.html"><span>Data&#160;Structures</span></a></li> <li class="current"><a href="functions.html"><span>Data&#160;Fields</span></a></li> </ul> </div> <div id="navrow3" class="tabs2"> <ul class="tablist"> <li class="current"><a href="functions.html"><span>All</span></a></li> <li><a href="functions_vars.html"><span>Variables</span></a></li> </ul> </div> <div id="navrow4" class="tabs3"> <ul class="tablist"> <li><a href="#index_a"><span>a</span></a></li> <li><a href="#index_b"><span>b</span></a></li> <li><a href="#index_c"><span>c</span></a></li> <li><a href="#index_d"><span>d</span></a></li> <li><a href="#index_e"><span>e</span></a></li> <li><a href="#index_f"><span>f</span></a></li> <li><a href="#index_h"><span>h</span></a></li> <li><a href="#index_i"><span>i</span></a></li> <li><a href="#index_k"><span>k</span></a></li> <li><a href="#index_l"><span>l</span></a></li> <li><a href="#index_m"><span>m</span></a></li> <li><a href="#index_o"><span>o</span></a></li> <li><a href="#index_p"><span>p</span></a></li> <li><a href="#index_r"><span>r</span></a></li> <li><a href="#index_s"><span>s</span></a></li> <li><a href="#index_u"><span>u</span></a></li> <li><a href="#index_v"><span>v</span></a></li> <li class="current"><a href="#index_w"><span>w</span></a></li> </ul> </div> </div><!-- top --> <!-- window showing the filter options --> <div id="MSearchSelectWindow" onmouseover="return searchBox.OnSearchSelectShow()" onmouseout="return searchBox.OnSearchSelectHide()" onkeydown="return searchBox.OnSearchSelectKey(event)"> <a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(0)"><span class="SelectionMark">&#160;</span>All</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(1)"><span class="SelectionMark">&#160;</span>Data Structures</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(2)"><span class="SelectionMark">&#160;</span>Files</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(3)"><span class="SelectionMark">&#160;</span>Functions</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(4)"><span class="SelectionMark">&#160;</span>Variables</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(5)"><span class="SelectionMark">&#160;</span>Typedefs</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(6)"><span class="SelectionMark">&#160;</span>Enumerations</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(7)"><span class="SelectionMark">&#160;</span>Enumerator</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(8)"><span class="SelectionMark">&#160;</span>Macros</a></div> <!-- iframe showing the search results (closed by default) --> <div id="MSearchResultsWindow"> <iframe src="javascript:void(0)" frameborder="0" name="MSearchResults" id="MSearchResults"> </iframe> </div> <div class="contents"> <div class="textblock">Here is a list of all documented struct and union fields with links to the struct/union documentation for each field:</div> <h3><a class="anchor" id="index_a"></a>- a -</h3><ul> <li>ack_handler : <a class="el" href="structguac__client.html#ada98af16d05a2571650
{ "pile_set_name": "Github" }
/////////////////////////////////////////////////////////////////////////////////// /// OpenGL Mathematics (glm.g-truc.net) /// /// Copyright (c) 2005 - 2014 G-Truc Creation (www.g-truc.net) /// Permission is hereby granted, free of charge, to any person obtaining a copy /// of this software and associated documentation files (the "Software"), to deal /// in the Software without restriction, including without limitation the rights /// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell /// copies of the Software, and to permit persons to whom the Software is /// furnished to do so, subject to the following conditions: /// /// The above copyright notice and this permission notice shall be included in /// all copies or substantial portions of the Software. /// /// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR /// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, /// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE /// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER /// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, /// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN /// THE SOFTWARE. /// /// @ref core /// @file glm/core/func_vector_relational.inl /// @date 2008-08-03 / 2011-09-09 /// @author Christophe Riccio /////////////////////////////////////////////////////////////////////////////////// #include <limits> namespace glm { template <typename T, precision P, template <typename, precision> class vecType> GLM_FUNC_QUALIFIER typename vecType<T, P>::bool_type lessThan ( vecType<T, P> const & x, vecType<T, P> const & y ) { GLM_STATIC_ASSERT(std::numeric_limits<T>::is_iec559 || std::numeric_limits<T>::is_integer, "Invalid template instantiation of 'lessThan', GLM vector types required floating-point or integer value types vectors"); assert(x.length() == y.length()); typename vecType<bool, P>::bool_type Result(vecType<bool, P>::_null); for(int i = 0; i < x.length(); ++i) Result[i] = x[i] < y[i]; return Result; } template <typename T, precision P, template <typename, precision> class vecType> GLM_FUNC_QUALIFIER typename vecType<T, P>::bool_type lessThanEqual ( vecType<T, P> const & x, vecType<T, P> const & y ) { GLM_STATIC_ASSERT(std::numeric_limits<T>::is_iec559 || std::numeric_limits<T>::is_integer, "Invalid template instantiation of 'lessThanEqual', GLM vector types required floating-point or integer value types vectors"); assert(x.length() == y.length()); typename vecType<bool, P>::bool_type Result(vecType<bool, P>::_null); for(int i = 0; i < x.length(); ++i) Result[i] = x[i] <= y[i]; return Result; } template <typename T, precision P, template <typename, precision> class vecType> GLM_FUNC_QUALIFIER typename vecType<T, P>::bool_type greaterThan ( vecType<T, P> const & x, vecType<T, P> const & y ) { GLM_STATIC_ASSERT(std::numeric_limits<T>::is_iec559 || std::numeric_limits<T>::is_integer, "Invalid template instantiation of 'greaterThan', GLM vector types required floating-point or integer value types vectors"); assert(x.length() == y.length()); typename vecType<bool, P>::bool_type Result(vecType<bool, P>::_null); for(int i = 0; i < x.length(); ++i) Result[i] = x[i] > y[i]; return Result; } template <typename T, precision P, template <typename, precision> class vecType> GLM_FUNC_QUALIFIER typename vecType<T, P>::bool_type greaterThanEqual ( vecType<T, P> const & x, vecType<T, P> const & y ) { GLM_STATIC_ASSERT(std::numeric_limits<T>::is_iec559 || std::numeric_limits<T>::is_integer, "Invalid template instantiation of 'greaterThanEqual', GLM vector types required floating-point or integer value types vectors"); assert(x.length() == y.length()); typename vecType<bool, P>::bool_type Result(vecType<bool, P>::_null); for(int i = 0; i < x.length(); ++i) Result[i] = x[i] >= y[i]; return Result; } template <typename T, precision P, template <typename, precision> class vecType> GLM_FUNC_QUALIFIER typename vecType<T, P>::bool_type equal ( vecType<T, P> const & x, vecType<T, P> const & y ) { assert(x.length() == y.length()); typename vecType<bool, P>::bool_type Result(vecType<bool, P>::_null); for(int i = 0; i < x.length(); ++i) Result[i] = x[i] == y[i]; return Result; } template <typename T, precision P, template <typename, precision> class vecType> GLM_FUNC_QUALIFIER typename vecType<T, P>::bool_type notEqual ( vecType<T, P> const & x, vecType<T, P> const & y ) { assert(x.length() == y.length()); typename vecType<bool, P>::bool_type Result(vecType<bool, P>::_null); for(int i = 0; i < x.length(); ++i) Result[i] = x[i] != y[i]; return Result; } template <precision P, template <typename, precision> class vecType> GLM_FUNC_QUALIFIER bool any(vecType<bool, P> const & v) { bool Result = false; for(int i = 0; i < v.length(); ++i) Result = Result || v[i]; return Result; } template <precision P, template <typename, precision> class vecType> GLM_FUNC_QUALIFIER bool all(vecType<bool, P> const & v) { bool Result = true; for(int i = 0; i < v.length(); ++i) Result = Result && v[i]; return Result; } template <precision P, template <typename, precision> class vecType> GLM_FUNC_QUALIFIER vecType<bool, P> not_(vecType<bool, P> const & v) { typename vecType<bool, P>::bool_type Result(vecType<bool, P>::_null); for(int i = 0; i < v.length(); ++i) Result[i] = !v[i]; return Result; } }//namespace glm
{ "pile_set_name": "Github" }
# script for stm32 interface ft2232 ft2232_device_desc "Olimex OpenOCD JTAG" ft2232_layout olimex-jtag ft2232_vid_pid 0x15ba 0x0003 if { [info exists CHIPNAME] } { set _CHIPNAME $CHIPNAME } else { set _CHIPNAME stm32 } if { [info exists ENDIAN] } { set _ENDIAN $ENDIAN } else { set _ENDIAN little } # jtag speed jtag_khz 600 #use combined on interfaces or targets that can't set TRST/SRST separately reset_config trst_and_srst #jtag scan chain if { [info exists CPUTAPID ] } { set _CPUTAPID $CPUTAPID } else { # See STM Document RM0008 # Section 26.6.3 set _CPUTAPID 0x3ba00477 } jtag newtap $_CHIPNAME cpu -irlen 4 -ircapture 0x1 -irmask 0xf -expected-id $_CPUTAPID if { [info exists BSTAPID ] } { set _BSTAPID $BSTAPID } else { # See STM Document RM0008 # Section 26.6.2 # Medium Density RevA set _BSTAPID 0x06410041 # Rev B and Rev Z set _BSTAPID 0x16410041 # High Density Devices, Rev A #set _BSTAPID 0x06414041 } jtag newtap $_CHIPNAME bs -irlen 5 -ircapture 0x1 -irmask 0x1 -expected-id $_BSTAPID set _TARGETNAME [format "%s.cpu" $_CHIPNAME] target create $_TARGETNAME cortex_m3 -endian $_ENDIAN -chain-position $_TARGETNAME $_TARGETNAME configure -work-area-virt 0 -work-area-phys 0x20000000 -work-area-size 0x5000 -work-area-backup 0 #$_TARGETNAME configure -event halted halt_handle #flash bank stm32x 0 0 0 0 0 #target create cortex_m3 -endian little #run_and_halt_time 0 30 #working_area 0 0x20000000 0x4000 nobackup flash bank stm32x 0x08000000 0x00010000 0 0 0 # For more information about the configuration files, take a look at: # openocd.texi #script flash.script proc halt_handle {} { resume } proc flash_test {} { puts "Trying to flash" sleep 100 halt sleep 300 stm32x mass_erase 0 sleep 20 flash write_bank 0 tmpflash.bin 0 sleep 50 # reset run # sleep 500 reset run shutdown } init flash_test
{ "pile_set_name": "Github" }
# Blender v2.67 (sub 0) OBJ File: 'base.blend' # www.blender.org g base v -0.500000 0.000000 0.500000 v -0.500000 0.000000 -0.500000 v 0.500000 0.000000 -0.500000 v 0.500000 0.000000 0.500000 v -0.500000 0.125000 0.500000 v -0.500000 0.125000 -0.500000 v 0.500000 0.125000 -0.500000 v 0.500000 0.125000 0.500000 vt 0.000000 0.875000 vt 1.000000 0.875000 vt 1.000000 1.000000 vt 0.000000 1.000000 vt 0.000000 0.000000 vt 1.000000 0.000000 vn -1.000000 0.000000 0.000000 vn 0.000000 0.000000 -1.000000 vn 1.000000 -0.000000 0.000000 vn 0.000000 -0.000000 1.000000 vn -0.000000 -1.000000 0.000000 vn -0.000000 1.000000 0.000000 s off f 5/1/1 6/2/1 2/3/1 1/4/1 f 6/1/2 7/2/2 3/3/2 2/4/2 f 7/1/3 8/2/3 4/3/3 3/4/3 f 8/1/4 5/2/4 1/3/4 4/4/4 f 1/5/5 2/6/5 3/3/5 4/4/5 f 8/5/6 7/6/6 6/3/6 5/4/6
{ "pile_set_name": "Github" }
/* RetroArch - A frontend for libretro. * Copyright (C) 2011-2017 - Daniel De Matteis * * RetroArch is free software: you can redistribute it and/or modify it under the terms * of the GNU General Public License as published by the Free Software Found- * ation, either version 3 of the License, or (at your option) any later version. * * RetroArch is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR * PURPOSE. See the GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along with RetroArch. * If not, see <http://www.gnu.org/licenses/>. */ #include <stdint.h> #include <boolean.h> #include <stddef.h> #include <stdlib.h> #include <string.h> #include "../../gfx/common/win32_common.h" #include <windows.h> #include <commdlg.h> #include <commctrl.h> #include "../../ui_companion_driver.h" #include "../../configuration.h" static bool ui_browser_window_win32_core( ui_browser_window_state_t *state, bool save) { OPENFILENAME ofn; bool okay = false; settings_t *settings = config_get_ptr(); bool video_fullscreen = settings->bools.video_fullscreen; ofn.lStructSize = sizeof(OPENFILENAME); ofn.hwndOwner = (HWND)state->window; ofn.hInstance = NULL; ofn.lpstrFilter = state->filters; /* actually const */ ofn.lpstrCustomFilter = NULL; ofn.nMaxCustFilter = 0; ofn.nFilterIndex = 0; ofn.lpstrFile = state->path; ofn.nMaxFile = PATH_MAX; ofn.lpstrFileTitle = NULL; ofn.nMaxFileTitle = 0; ofn.lpstrInitialDir = state->startdir; ofn.lpstrTitle = state->title; ofn.Flags = OFN_FILEMUSTEXIST | OFN_HIDEREADONLY | OFN_NOCHANGEDIR; ofn.nFileOffset = 0; ofn.nFileExtension = 0; ofn.lpstrDefExt = ""; ofn.lCustData = 0; ofn.lpfnHook = NULL; ofn.lpTemplateName = NULL; #if (_WIN32_WINNT >= 0x0500) ofn.pvReserved = NULL; ofn.dwReserved = 0; ofn.FlagsEx = 0; #endif /* Full Screen: Show mouse for the file dialog */ if (video_fullscreen) video_driver_show_mouse(); okay = true; if (!save && !GetOpenFileName(&ofn)) okay = false; if (save && !GetSaveFileName(&ofn)) okay = false; /* Full screen: Hide mouse after the file dialog */ if (video_fullscreen) video_driver_hide_mouse(); return okay; } static bool ui_browser_window_win32_open(ui_browser_window_state_t *state) { return ui_browser_window_win32_core(state, false); } static bool ui_browser_window_win32_save(ui_browser_window_state_t *state) { return ui_browser_window_win32_core(state, true); } ui_browser_window_t ui_browser_window_win32 = { ui_browser_window_win32_open, ui_browser_window_win32_save, "win32" };
{ "pile_set_name": "Github" }
""" Use this module directly: import xarray.plot as xplt Or use the methods on a DataArray or Dataset: DataArray.plot._____ Dataset.plot._____ """ import functools import numpy as np import pandas as pd from .facetgrid import _easy_facetgrid from .utils import ( _add_colorbar, _assert_valid_xy, _ensure_plottable, _infer_interval_breaks, _infer_xy_labels, _process_cmap_cbar_kwargs, _rescale_imshow_rgb, _resolve_intervals_1dplot, _resolve_intervals_2dplot, _update_axes, get_axis, import_matplotlib_pyplot, label_from_attrs, ) def _infer_line_data(darray, x, y, hue): ndims = len(darray.dims) if x is not None and y is not None: raise ValueError("Cannot specify both x and y kwargs for line plots.") if x is not None: _assert_valid_xy(darray, x, "x") if y is not None: _assert_valid_xy(darray, y, "y") if ndims == 1: huename = None hueplt = None huelabel = "" if x is not None: xplt = darray[x] yplt = darray elif y is not None: xplt = darray yplt = darray[y] else: # Both x & y are None dim = darray.dims[0] xplt = darray[dim] yplt = darray else: if x is None and y is None and hue is None: raise ValueError("For 2D inputs, please specify either hue, x or y.") if y is None: xname, huename = _infer_xy_labels(darray=darray, x=x, y=hue) xplt = darray[xname] if xplt.ndim > 1: if huename in darray.dims: otherindex = 1 if darray.dims.index(huename) == 0 else 0 otherdim = darray.dims[otherindex] yplt = darray.transpose(otherdim, huename, transpose_coords=False) xplt = xplt.transpose(otherdim, huename, transpose_coords=False) else: raise ValueError( "For 2D inputs, hue must be a dimension" " i.e. one of " + repr(darray.dims) ) else: (xdim,) = darray[xname].dims (huedim,) = darray[huename].dims yplt = darray.transpose(xdim, huedim) else: yname, huename = _infer_xy_labels(darray=darray, x=y, y=hue) yplt = darray[yname] if yplt.ndim > 1: if huename in darray.dims: otherindex = 1 if darray.dims.index(huename) == 0 else 0 otherdim = darray.dims[otherindex] xplt = darray.transpose(otherdim, huename, transpose_coords=False) yplt = yplt.transpose(otherdim, huename, transpose_coords=False) else: raise ValueError( "For 2D inputs, hue must be a dimension" " i.e. one of " + repr(darray.dims) ) else: (ydim,) = darray[yname].dims (huedim,) = darray[huename].dims xplt = darray.transpose(ydim, huedim) huelabel = label_from_attrs(darray[huename]) hueplt = darray[huename] xlabel = label_from_attrs(xplt) ylabel = label_from_attrs(yplt) return xplt, yplt, hueplt, xlabel, ylabel, huelabel def plot( darray, row=None, col=None, col_wrap=None, ax=None, hue=None, rtol=0.01, subplot_kws=None, **kwargs, ): """ Default plot of DataArray using matplotlib.pyplot. Calls xarray plotting function based on the dimensions of darray.squeeze() =============== =========================== Dimensions Plotting function --------------- --------------------------- 1 :py:func:`xarray.plot.line` 2 :py:func:`xarray.plot.pcolormesh` Anything else :py:func:`xarray.plot.hist` =============== =========================== Parameters ---------- darray : DataArray row : str, optional If passed, make row faceted plots on this dimension name col : str, optional If passed, make column faceted plots on this dimension name hue : str, optional If passed, make faceted line plots with hue on this dimension name col_wrap : int, optional Use together with ``col`` to wrap faceted plots ax : matplotlib.axes.Axes, optional If None, uses the current axis. Not applicable when using facets. rtol : float, optional Relative tolerance used to determine if the indexes are uniformly spaced. Usually a small positive number. subplot_kws : dict, optional Dictionary of keyword arguments for matplotlib subplots. **kwargs : optional Additional keyword arguments to matplotlib """ darray = darray.squeeze().compute() plot_dims = set(darray.dims) plot_dims.discard(row) plot_dims.discard(col) plot_dims.discard(hue) ndims = len(plot_dims) error_msg = ( "Only 1d and 2d plots are supported for facets in xarray. " "See the package `Seaborn` for more options." ) if ndims in [1, 2]: if row or col: kwargs["subplot_kws"] = subplot_kws kwargs["row"] = row kwargs["col"] = col kwargs["col_wrap"] = col_wrap if ndims == 1: plotfunc = line kwargs["hue"] = hue elif ndims == 2: if hue: plotfunc = line kwargs["hue"] = hue else: plotfunc = pcolormesh kwargs["subplot_kws"] = subplot_kws else: if row or col or hue: raise ValueError(error_msg) plotfunc = hist kwargs["ax"] = ax return plotfunc(darray, **kwargs) # This function signature should not change so that it can use # matplotlib format strings def line( darray, *args, row=None, col=None, figsize=None, aspect=None, size=None, ax=None, hue=None, x=None, y=None, xincrease=None, yincrease=None, xscale=None, yscale=None, xticks=None, yticks=None, xlim=None, ylim=None, add_legend=True, _labels=True, **kwargs, ): """ Line plot of DataArray index against values Wraps :func:`matplotlib:matplotlib.pyplot.plot` Parameters ---------- darray : DataArray Must be 1 dimensional figsize : tuple, optional A tuple (width, height) of the figure in inches. Mutually exclusive with ``size`` and ``ax``. aspect : scalar, optional
{ "pile_set_name": "Github" }
/* * Minio Cloud Storage (C) 2018 Minio, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import Moment from "moment" import storage from "local-storage-fallback" import * as alertActions from "../alert/actions" import * as objectsActions from "../objects/actions" import { getCurrentBucket } from "../buckets/selectors" import { getCurrentPrefix } from "../objects/selectors" import { minioBrowserPrefix } from "../constants" export const ADD = "uploads/ADD" export const UPDATE_PROGRESS = "uploads/UPDATE_PROGRESS" export const STOP = "uploads/STOP" export const SHOW_ABORT_MODAL = "uploads/SHOW_ABORT_MODAL" export const add = (slug, size, name) => ({ type: ADD, slug, size, name }) export const updateProgress = (slug, loaded) => ({ type: UPDATE_PROGRESS, slug, loaded }) export const stop = slug => ({ type: STOP, slug }) export const showAbortModal = () => ({ type: SHOW_ABORT_MODAL, show: true }) export const hideAbortModal = () => ({ type: SHOW_ABORT_MODAL, show: false }) let requests = {} export const addUpload = (xhr, slug, size, name) => { return function(dispatch) { requests[slug] = xhr dispatch(add(slug, size, name)) } } export const abortUpload = slug => { return function(dispatch) { const xhr = requests[slug] if (xhr) { xhr.abort() } dispatch(stop(slug)) dispatch(hideAbortModal()) } } export const uploadFile = file => { return function(dispatch, getState) { const state = getState() const currentBucket = getCurrentBucket(state) if (!currentBucket) { dispatch( alertActions.set({ type: "danger", message: "Please choose a bucket before trying to upload files." }) ) return } const currentPrefix = getCurrentPrefix(state) const objectName = `${currentPrefix}${file.name}` const uploadUrl = `${ window.location.origin }${minioBrowserPrefix}/upload/${currentBucket}/${objectName}` const slug = `${currentBucket}-${currentPrefix}-${file.name}` let xhr = new XMLHttpRequest() xhr.open("PUT", uploadUrl, true) xhr.withCredentials = false const token = storage.getItem("token") if (token) { xhr.setRequestHeader( "Authorization", "Bearer " + storage.getItem("token") ) } xhr.setRequestHeader( "x-amz-date", Moment() .utc() .format("YYYYMMDDTHHmmss") + "Z" ) dispatch(addUpload(xhr, slug, file.size, file.name)) xhr.onload = function(event) { if (xhr.status == 401 || xhr.status == 403) { dispatch(hideAbortModal()) dispatch(stop(slug)) dispatch( alertActions.set({ type: "danger", message: "Unauthorized request." }) ) } if (xhr.status == 500) { dispatch(hideAbortModal()) dispatch(stop(slug)) dispatch( alertActions.set({ type: "danger", message: xhr.responseText }) ) } if (xhr.status == 200) { dispatch(hideAbortModal()) dispatch(stop(slug)) dispatch( alertActions.set({ type: "success", message: "File '" + file.name + "' uploaded successfully." }) ) dispatch(objectsActions.selectPrefix(currentPrefix)) } } xhr.upload.addEventListener("error", event => { dispatch(stop(slug)) dispatch( alertActions.set({ type: "danger", message: "Error occurred uploading '" + file.name + "'." }) ) }) xhr.upload.addEventListener("progress", event => { if (event.lengthComputable) { let loaded = event.loaded let total = event.total // Update the counter dispatch(updateProgress(slug, loaded)) } }) xhr.send(file) } }
{ "pile_set_name": "Github" }
/* **************************************************************************** * * Copyright (c) Microsoft Corporation. * * This source code is subject to terms and conditions of the Apache License, Version 2.0. A * copy of the license can be found in the License.html file at the root of this distribution. If * you cannot locate the Apache License, Version 2.0, please send an email to * dlr@microsoft.com. By using this source code in any fashion, you are agreeing to be bound * by the terms of the Apache License, Version 2.0. * * You must not remove this notice, or any other, from this software. * * * ***************************************************************************/ using System.Diagnostics; namespace System.Management.Automation.Interpreter { internal abstract class GreaterThanInstruction : Instruction { private static Instruction s_SByte, s_int16, s_char, s_int32, s_int64, s_byte, s_UInt16, s_UInt32, s_UInt64, s_single, s_double; public override int ConsumedStack { get { return 2; } } public override int ProducedStack { get { return 1; } } private GreaterThanInstruction() { } internal sealed class GreaterThanSByte : GreaterThanInstruction { public override int Run(InterpretedFrame frame) { sbyte right = (sbyte)frame.Pop(); frame.Push(((sbyte)frame.Pop()) > right); return +1; } } internal sealed class GreaterThanInt16 : GreaterThanInstruction { public override int Run(InterpretedFrame frame) { Int16 right = (Int16)frame.Pop(); frame.Push(((Int16)frame.Pop()) > right); return +1; } } internal sealed class GreaterThanChar : GreaterThanInstruction { public override int Run(InterpretedFrame frame) { char right = (char)frame.Pop(); frame.Push(((char)frame.Pop()) > right); return +1; } } internal sealed class GreaterThanInt32 : GreaterThanInstruction { public override int Run(InterpretedFrame frame) { Int32 right = (Int32)frame.Pop(); frame.Push(((Int32)frame.Pop()) > right); return +1; } } internal sealed class GreaterThanInt64 : GreaterThanInstruction { public override int Run(InterpretedFrame frame) { Int64 right = (Int64)frame.Pop(); frame.Push(((Int64)frame.Pop()) > right); return +1; } } internal sealed class GreaterThanByte : GreaterThanInstruction { public override int Run(InterpretedFrame frame) { byte right = (byte)frame.Pop(); frame.Push(((byte)frame.Pop()) > right); return +1; } } internal sealed class GreaterThanUInt16 : GreaterThanInstruction { public override int Run(InterpretedFrame frame) { UInt16 right = (UInt16)frame.Pop(); frame.Push(((UInt16)frame.Pop()) > right); return +1; } } internal sealed class GreaterThanUInt32 : GreaterThanInstruction { public override int Run(InterpretedFrame frame) { UInt32 right = (UInt32)frame.Pop(); frame.Push(((UInt32)frame.Pop()) > right); return +1; } } internal sealed class GreaterThanUInt64 : GreaterThanInstruction { public override int Run(InterpretedFrame frame) { UInt64 right = (UInt64)frame.Pop(); frame.Push(((UInt64)frame.Pop()) > right); return +1; } } internal sealed class GreaterThanSingle : GreaterThanInstruction { public override int Run(InterpretedFrame frame) { Single right = (Single)frame.Pop(); frame.Push(((Single)frame.Pop()) > right); return +1; } } internal sealed class GreaterThanDouble : GreaterThanInstruction { public override int Run(InterpretedFrame frame) { double right = (double)frame.Pop(); frame.Push(((double)frame.Pop()) > right); return +1; } } public static Instruction Create(Type type) { Debug.Assert(!type.IsEnum); switch (type.GetTypeCode()) { case TypeCode.SByte: return s_SByte ?? (s_SByte = new GreaterThanSByte()); case TypeCode.Byte: return s_byte ?? (s_byte = new GreaterThanByte()); case TypeCode.Char: return s_char ?? (s_char = new GreaterThanChar()); case TypeCode.Int16: return s_int16 ?? (s_int16 = new GreaterThanInt16()); case TypeCode.Int32: return s_int32 ?? (s_int32 = new GreaterThanInt32()); case TypeCode.Int64: return s_int64 ?? (s_int64 = new GreaterThanInt64()); case TypeCode.UInt16: return s_UInt16 ?? (s_UInt16 = new GreaterThanUInt16()); case TypeCode.UInt32: return s_UInt32 ?? (s_UInt32 = new GreaterThanUInt32()); case TypeCode.UInt64: return s_UInt64 ?? (s_UInt64 = new GreaterThanUInt64()); case TypeCode.Single: return s_single ?? (s_single = new GreaterThanSingle()); case TypeCode.Double: return s_double ?? (s_double = new GreaterThanDouble()); default: throw Assert.Unreachable; } } public override string ToString() { return "GreaterThan()"; } } }
{ "pile_set_name": "Github" }
/****************************************************************************/ // Eclipse SUMO, Simulation of Urban MObility; see https://eclipse.org/sumo // Copyright (C) 2001-2020 German Aerospace Center (DLR) and others. // This program and the accompanying materials are made available under the // terms of the Eclipse Public License 2.0 which is available at // https://www.eclipse.org/legal/epl-2.0/ // This Source Code may also be made available under the following Secondary // Licenses when the conditions for such availability set forth in the Eclipse // Public License 2.0 are satisfied: GNU General Public License, version 2 // or later which is available at // https://www.gnu.org/licenses/old-licenses/gpl-2.0-standalone.html // SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later /****************************************************************************/ /// @file RailEdge.h /// @author Jakob Erdmann /// @date 26.02.2020 /// // The RailEdge is a wrapper around a ROEdge or a MSEdge used for railway routing /****************************************************************************/ #pragma once #include <config.h> #include <cassert> //#define RailEdge_DEBUG_TURNS //#define RailEdge_DEBUG_INIT //#define RailEdge_DEBUG_SUCCESSORS #define RailEdge_DEBUGID "" //#define RailEdge_DEBUG_COND(obj) ((obj != 0 && (obj)->getID() == RailEdge_DEBUGID)) #define RailEdge_DEBUG_COND(obj) (true) // =========================================================================== // class definitions // =========================================================================== /// @brief the edge type representing backward edges template<class E, class V> class RailEdge { public: typedef RailEdge<E, V> _RailEdge; typedef std::vector<std::pair<const _RailEdge*, const _RailEdge*> > ConstEdgePairVector; RailEdge(const E* orig) : myNumericalID(orig->getNumericalID()), myOriginal(orig), myTurnaround(nullptr), myIsVirtual(true) { } RailEdge(const E* turnStart, const E* turnEnd, int numericalID) : myNumericalID(numericalID), myID("TrainReversal!" + turnStart->getID() + "->" + turnEnd->getID()), myOriginal(nullptr), myTurnaround(nullptr), myIsVirtual(true), myMaxLength(turnStart->getLength()), myStartLength(turnStart->getLength()) { myViaSuccessors.push_back(std::make_pair(turnEnd->getRailwayRoutingEdge(), nullptr)); } void update(double maxTrainLength, const std::vector<const E*>& replacementEdges) { if (maxTrainLength > myMaxLength) { myMaxLength = maxTrainLength; myReplacementEdges = replacementEdges; #ifdef RailEdge_DEBUG_INIT std::cout << " update RailEdge " << getID() << " myMaxLength=" << myMaxLength << " repl=" << toString(myReplacementEdges) << "\n"; #endif } } void addVirtualTurns(const E* forward, const E* backward, std::vector<_RailEdge*>& railEdges, int& numericalID, double dist, double maxTrainLength, const std::vector<const E*>& replacementEdges) { // search backwards until dist and add virtual turnaround edges with // replacement edges up to the real turnaround #ifdef RailEdge_DEBUG_INIT std::cout << "addVirtualTurns forward=" << forward->getID() << " backward=" << backward->getID() << " dist=" << dist << " maxLength=" << maxTrainLength << " repl=" << toString(replacementEdges) << "\n"; #endif if (dist <= 0) { return; } for (const E* prev : forward->getPredecessors()) { if (prev == backward) { continue; } const E* bidi = prev->getBidiEdge(); if (backward->isConnectedTo(*bidi, SVC_IGNORING)) { _RailEdge* prevRailEdge = prev->getRailwayRoutingEdge(); if (prevRailEdge->myTurnaround == nullptr) { prevRailEdge->myTurnaround = new _RailEdge(prev, bidi, numericalID++); prevRailEdge->myViaSuccessors.push_back(std::make_pair(prevRailEdge->myTurnaround, nullptr)); railEdges.push_back(prevRailEdge->myTurnaround); #ifdef RailEdge_DEBUG_INIT std::cout << " RailEdge " << prevRailEdge->getID() << " virtual turnaround " << prevRailEdge->myTurnaround->getID() << "\n"; #endif } prevRailEdge->myTurnaround->update(prev->getLength() + maxTrainLength, replacementEdges); std::vector<const E*> replacementEdges2; replacementEdges2.push_back(prev); replacementEdges2.insert(replacementEdges2.end(), replacementEdges.begin(), replacementEdges.end()); addVirtualTurns(prev, bidi, railEdges, numericalID, dist - prev->getLength(), maxTrainLength + prev->getLength(), replacementEdges2); } } } void init(std::vector<_RailEdge*>& railEdges, int& numericalID, double maxTrainLength) { // replace turnaround-via with an explicit RailEdge that checks length for (const auto& viaPair : myOriginal->getViaSuccessors()) { if (viaPair.first == myOriginal->getBidiEdge()) { // direction reversal if (myTurnaround == nullptr) { myTurnaround = new _RailEdge(myOriginal, viaPair.first, numericalID++); myViaSuccessors.push_back(std::make_pair(myTurnaround, nullptr)); railEdges.push_back(myTurnaround); #ifdef RailEdge_DEBUG_INIT std::cout << "RailEdge " << getID() << " actual turnaround " << myTurnaround->getID() << "\n"; #endif } myTurnaround->myIsVirtual = false; addVirtualTurns(myOriginal, viaPair.first, railEdges, numericalID, maxTrainLength - getLength(), getLength(), std::vector<const E*> {myOriginal}); } else { myViaSuccessors.push_back(std::make_pair(viaPair.first->getRailwayRoutingEdge(), viaPair.second == nullptr ? nullptr : viaPair.second->getRailwayRoutingEdge())); } } #ifdef RailEdge_DEBUG_SUCCESSORS std::cout << "RailEdge " << getID() << " successors=" << myViaSuccessors.size() << " orig=" << myOriginal->getViaSuccessors().size() << "\n"; for (const auto& viaPair : myViaSuccessors) { std::cout << " " << viaPair.first->getID() << "\n"; } #endif } /// @brief Returns the index (numeric id) of the edge inline int getNumericalID() const { return myNumericalID; } /// @brief Returns the original edge const E* getOriginal() const { return myOriginal; } /** @brief Returns the id of the edge * @return The original edge's id */ const std::string& getID() const { return myOriginal != nullptr ? myOriginal->getID() : myID; } void insertOriginalEdges(double length, std::vector<const E*>& into) const { if (myOriginal != nullptr) { into.push_back(myOriginal); } else { double seen = myStartLength; int nPushed = 0; if (seen >= length && !myIsVirtual) { return; } // we need to find a replacement edge that has a real turn for (const E* edge : myReplacementEdges) { into.push_back(edge); nPushed++; seen += edge->getLength(); if (seen >= length && edge->isConnectedTo(*edge->getBidiEdge(), SVC_IGNORING)) {
{ "pile_set_name": "Github" }
# readable-stream ***Node-core streams for userland*** [![Build Status](https://travis-ci.org/nodejs/readable-stream.svg?branch=master)](https://travis-ci.org/nodejs/readable-stream) [![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/) [![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/) [![Sauce Test Status](https://saucelabs.com/browser-matrix/readable-stream.svg)](https://saucelabs.com/u/readable-stream) ```bash npm install --save readable-stream ``` ***Node-core streams for userland*** This package is a mirror of the Streams2 and Streams3 implementations in Node-core, including [documentation](doc/stream.markdown). If you want to guarantee a stable streams base, regardless of what version of Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). As of version 2.0.0 **readable-stream** uses semantic versioning. # Streams WG Team Members * **Chris Dickinson** ([@chrisdickinson](https://github.com/chrisdickinson)) &lt;christopher.s.dickinson@gmail.com&gt; - Release GPG key: 9554F04D7259F04124DE6B476D5A82AC7E37093B * **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) &lt;calvin.metcalf@gmail.com&gt; - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242 * **Rod Vagg** ([@rvagg](https://github.com/rvagg)) &lt;rod@vagg.org&gt; - Release GPG key: DD8F2338BAE7501E3DD5AC78C273792F7D83545D * **Sam Newman** ([@sonewman](https://github.com/sonewman)) &lt;newmansam@outlook.com&gt; * **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) &lt;mathiasbuus@gmail.com&gt; * **Domenic Denicola** ([@domenic](https://github.com/domenic)) &lt;d@domenic.me&gt;
{ "pile_set_name": "Github" }
/** * Loads a Wavefront .mtl file specifying materials * * @author angelxuanchang */ THREE.MTLLoader = function( baseUrl, options, crossOrigin ) { this.baseUrl = baseUrl; this.options = options; this.crossOrigin = crossOrigin; }; THREE.MTLLoader.prototype = { constructor: THREE.MTLLoader, load: function ( url, onLoad, onProgress, onError ) { var scope = this; var loader = new THREE.XHRLoader(); loader.setCrossOrigin( this.crossOrigin ); loader.load( url, function ( text ) { onLoad( scope.parse( text ) ); }, onProgress, onError ); }, /** * Parses loaded MTL file * @param text - Content of MTL file * @return {THREE.MTLLoader.MaterialCreator} */ parse: function ( text ) { var lines = text.split( "\n" ); var info = {}; var delimiter_pattern = /\s+/; var materialsInfo = {}; for ( var i = 0; i < lines.length; i ++ ) { var line = lines[ i ]; line = line.trim(); if ( line.length === 0 || line.charAt( 0 ) === '#' ) { // Blank line or comment ignore continue; } var pos = line.indexOf( ' ' ); var key = ( pos >= 0 ) ? line.substring( 0, pos ) : line; key = key.toLowerCase(); var value = ( pos >= 0 ) ? line.substring( pos + 1 ) : ""; value = value.trim(); if ( key === "newmtl" ) { // New material info = { name: value }; materialsInfo[ value ] = info; } else if ( info ) { if ( key === "ka" || key === "kd" || key === "ks" ) { var ss = value.split( delimiter_pattern, 3 ); info[ key ] = [ parseFloat( ss[0] ), parseFloat( ss[1] ), parseFloat( ss[2] ) ]; } else { info[ key ] = value; } } } var materialCreator = new THREE.MTLLoader.MaterialCreator( this.baseUrl, this.options ); materialCreator.setMaterials( materialsInfo ); return materialCreator; } }; /** * Create a new THREE-MTLLoader.MaterialCreator * @param baseUrl - Url relative to which textures are loaded * @param options - Set of options on how to construct the materials * side: Which side to apply the material * THREE.FrontSide (default), THREE.BackSide, THREE.DoubleSide * wrap: What type of wrapping to apply for textures * THREE.RepeatWrapping (default), THREE.ClampToEdgeWrapping, THREE.MirroredRepeatWrapping * normalizeRGB: RGBs need to be normalized to 0-1 from 0-255 * Default: false, assumed to be already normalized * ignoreZeroRGBs: Ignore values of RGBs (Ka,Kd,Ks) that are all 0's * Default: false * invertTransparency: If transparency need to be inverted (inversion is needed if d = 0 is fully opaque) * Default: false (d = 1 is fully opaque) * @constructor */ THREE.MTLLoader.MaterialCreator = function( baseUrl, options ) { this.baseUrl = baseUrl; this.options = options; this.materialsInfo = {}; this.materials = {}; this.materialsArray = []; this.nameLookup = {}; this.side = ( this.options && this.options.side )? this.options.side: THREE.FrontSide; this.wrap = ( this.options && this.options.wrap )? this.options.wrap: THREE.RepeatWrapping; }; THREE.MTLLoader.MaterialCreator.prototype = { constructor: THREE.MTLLoader.MaterialCreator, setMaterials: function( materialsInfo ) { this.materialsInfo = this.convert( materialsInfo ); this.materials = {}; this.materialsArray = []; this.nameLookup = {}; }, convert: function( materialsInfo ) { if ( !this.options ) return materialsInfo; var converted = {}; for ( var mn in materialsInfo ) { // Convert materials info into normalized form based on options var mat = materialsInfo[ mn ]; var covmat = {}; converted[ mn ] = covmat; for ( var prop in mat ) { var save = true; var value = mat[ prop ]; var lprop = prop.toLowerCase(); switch ( lprop ) { case 'kd': case 'ka': case 'ks': // Diffuse color (color under white light) using RGB values if ( this.options && this.options.normalizeRGB ) { value = [ value[ 0 ] / 255, value[ 1 ] / 255, value[ 2 ] / 255 ]; } if ( this.options && this.options.ignoreZeroRGBs ) { if ( value[ 0 ] === 0 && value[ 1 ] === 0 && value[ 1 ] === 0 ) { // ignore save = false; } } break; case 'd': // According to MTL format (http://paulbourke.net/dataformats/mtl/): // d is dissolve for current material // factor of 1.0 is fully opaque, a factor of 0 is fully dissolved (completely transparent) if ( this.options && this.options.invertTransparency ) { value = 1 - value; } break; default: break; } if ( save ) { covmat[ lprop ] = value; } } } return converted; }, preload: function () { for ( var mn in this.materialsInfo ) { this.create( mn ); } }, getIndex: function( materialName ) { return this.nameLookup[ materialName ]; }, getAsArray: function() { var index = 0; for ( var mn in this.materialsInfo ) { this.materialsArray[ index ] = this.create( mn ); this.nameLookup[ mn ] = index; index ++; } return this.materialsArray; }, create: function ( materialName ) { if ( this.materials[ materialName ] === undefined ) { this.createMaterial_( materialName ); } return this.materials[ materialName ]; }, createMaterial_: function ( materialName ) { // Create material var mat = this.materialsInfo[ materialName ]; var params = { name: materialName, side: this.side }; for ( var prop in mat ) { var value = mat[ prop ]; switch ( prop.toLowerCase() ) { // Ns is material specular exponent case 'kd': // Diffuse color (color under white light) using RGB values params[ 'diffuse' ] = new THREE.Color().fromArray( value ); break; case 'ka': // Ambient color (color under shadow) using RGB values params[ 'ambient' ] = new THREE.Color().fromArray( value ); break; case 'ks': // Specular color (color when light is reflected from shiny surface) using RGB values params[ 'specular' ] = new THREE.Color().fromArray( value ); break; case 'map_kd': // Diffuse texture map params[ 'map' ] = this.loadTexture( this.baseUrl + value ); params[ 'map' ].wrapS = this.wrap; params[ 'map' ].wrapT = this.wrap; break; case 'ns': // The specular exponent (defines the focus of the specular highlight) // A high exponent results in a tight, concentrated highlight.
{ "pile_set_name": "Github" }
var lodash = require('./lodash'); /** * Creates a `lodash` object that wraps `value` with explicit method * chaining enabled. * * @static * @memberOf _ * @category Chain * @param {*} value The value to wrap. * @returns {Object} Returns the new `lodash` wrapper instance. * @example * * var users = [ * { 'user': 'barney', 'age': 36 }, * { 'user': 'fred', 'age': 40 }, * { 'user': 'pebbles', 'age': 1 } * ]; * * var youngest = _.chain(users) * .sortBy('age') * .map(function(chr) { * return chr.user + ' is ' + chr.age; * }) * .first() * .value(); * // => 'pebbles is 1' */ function chain(value) { var result = lodash(value); result.__chain__ = true; return result; } module.exports = chain;
{ "pile_set_name": "Github" }
<div {!! $attributes !!}> @if ($visibled) @if (!empty($value)) <a href="{{ $value }}" data-toggle="lightbox"> <img class="thumbnail" src="{{ $value }}"> </a> @endif {!! $append !!} @if($small) <small class="clearfix">{!! $small !!}</small> @endif @endif </div>
{ "pile_set_name": "Github" }
// // Generated by class-dump 3.5 (64 bit). // // class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2013 by Steve Nygard. // #pragma mark - // // File: /Applications/Xcode-7GM.app/Contents/Developer/Platforms/WatchSimulator.platform/Developer/SDKs/WatchSimulator.sdk/System/Library/PrivateFrameworks/GraphicsServices.framework/GraphicsServices // UUID: BC90061B-82E0-3083-B8ED-645846C96184 // // Arch: i386 // Current version: 14.0.0 // Compatibility version: 1.0.0 // Source version: 622.0.0.0.0 // // // This file does not contain any Objective-C runtime information. //
{ "pile_set_name": "Github" }
"use strict"; var isImplemented = require("../../../../string/#/@@iterator/is-implemented"); module.exports = function (a) { a(isImplemented(), true); };
{ "pile_set_name": "Github" }
import React, { useState } from 'react'; import { useHistory } from 'react-router-dom'; import { Card, PageSection } from '@patternfly/react-core'; import { CardBody } from '../../../components/Card'; import ProjectForm from '../shared/ProjectForm'; import { ProjectsAPI } from '../../../api'; function ProjectAdd() { const [formSubmitError, setFormSubmitError] = useState(null); const history = useHistory(); const handleSubmit = async values => { if (values.scm_type === 'manual') { values.scm_type = ''; } if (!values.credential) { // Depending on the permissions of the user submitting the form, // the API might throw an unexpected error if our creation request // has a zero-length string as its credential field. As a work-around, // normalize falsey credential fields by deleting them. delete values.credential; } setFormSubmitError(null); try { const { data: { id }, } = await ProjectsAPI.create({ ...values, organization: values.organization.id, }); history.push(`/projects/${id}/details`); } catch (error) { setFormSubmitError(error); } }; const handleCancel = () => { history.push(`/projects`); }; return ( <PageSection> <Card> <CardBody> <ProjectForm handleCancel={handleCancel} handleSubmit={handleSubmit} submitError={formSubmitError} /> </CardBody> </Card> </PageSection> ); } export default ProjectAdd;
{ "pile_set_name": "Github" }
package web // FieldError is used to indicate an error with a specific request field. type FieldError struct { Field string `json:"field"` Error string `json:"error"` } // ErrorResponse is the form used for API responses from failures in the API. type ErrorResponse struct { Error string `json:"error"` Fields []FieldError `json:"fields,omitempty"` } // Error is used to pass an error during the request through the // application with web specific context. type Error struct { Err error Status int Fields []FieldError } // NewRequestError wraps a provided error with an HTTP status code. This // function should be used when handlers encounter expected errors. func NewRequestError(err error, status int) error { return &Error{err, status, nil} } // Error implements the error interface. It uses the default message of the // wrapped error. This is what will be shown in the services' logs. func (err *Error) Error() string { return err.Err.Error() }
{ "pile_set_name": "Github" }
# Developing with Minikube This guide will guide you through the process of deploying and running your locally built Nuclio sources on a local Kubernetes cluster in your Minikube VM. This is helpful when you're developing new functionality in the Kubernetes platform and need to test it against a real Kubernetes cluster. ## Working assumptions This guide assumes that: - You set up your Minikube VM as described in the [Minikube getting started guide](/docs/setup/minikube/getting-started-minikube.md) - You have previously deployed a _released_ Nuclio version on top of it and interacted with it - You have a working Nuclio development environment and you're on a branch containing changes you made which pertain to the Kubernetes platform ## Get your local images onto Minikube When you install Nuclio's services onto Minikube (using `kubectl apply`), Kubernetes examines the given resource specification to determine which images to use for Nuclio's controller and dashboard services. To get it to take your images, we must first push them onto the local Docker registry running inside the Minikube VM. To do this: - Make sure you've built container images with your changes (`make build`) - Push them by running the script located at `hack/minikube/scripts/push_images.py`. Keep in mind the script assumes the local Docker registry to be listening on port 5000 of the Minikube VM. It does the following: - Iterates over the existing Nuclio container images on the host machine - For each such image: - Tags it locally as `$(minikube ip):5000/<image>` (i.e., `192.168.64.4:5000/processor:latest-amd64`) - Pushes it to the Docker registry. Since the image's tag refers to a registry, it's pushed to the Minikube registry. - Untags it locally - _(in the Minikube VM)_ Pulls the image, specifying the local Docker registry (i.e., `docker pull localhost:5000/processor:latest-amd64`) - _(in the Minikube VM)_ Tags it with the `nuclio/` prefix (i.e., `nuclio/processor:latest-amd64`) - _(in the Minikube VM)_ Untags the Minikube-specific tag This will make the latest versions of our locally-built images available from the Docker registry in the Minikube VM. ## Deploy a custom version of the Nuclio services The `nuclio.yaml` resource specification that we feed `kubectl apply` with when deploying a released Nuclio version always points to controller and dashboard images fixed to that version. In our case, we must use a modified version: ```sh kubectl apply -f https://raw.githubusercontent.com/nuclio/nuclio/development/hack/minikube/resources/devel/nuclio.yaml ``` It differs from the usual `nuclio.yaml` in that: 1) Controller/dashboard images are "latest", resulting in the images you pushed in the last step being used 2) Controller/dashboard images are never pulled from Docker Hub 3) Dashboard is told (via an environment variable) not to pull base images when deploying functions (it'll use the images you pushed) You should now have a functional Kubernetes cluster using images built from your local changes, and can test against it to make sure they work as expected. Keep in mind when using a locally-built latest `nuctl`, to specify `--no-pull` such that the base images you pushed are used.
{ "pile_set_name": "Github" }
smallrye.messaging.worker.my-pool.max-concurrency=2 smallrye.messaging.worker.another-pool.max-concurrency=5
{ "pile_set_name": "Github" }
FieldType name = "FLD_SUBSPACE_RIFT" description = "FLD_SUBSPACE_RIFT_DESC" stealth = 0 effectsgroups = [ EffectsGroup // pull in objects scope = And [ System Not Contains Source WithinDistance distance = Source.Size condition = Source ] effects = MoveTowards speed = 5 target = Source EffectsGroup // destroy close objects scope = And [ Not Source Not Contains Source WithinDistance distance = 10 condition = Source ] effects = Destroy EffectsGroup // shrink at same speed objects are pulled scope = Source effects = SetSize value = Target.Size - 5 EffectsGroup // collapse upon self when small enough scope = Source activation = Size high = 5 effects = Destroy ] graphic = "nebulae/nebula9.png"
{ "pile_set_name": "Github" }
{\rtf1\ansi\ansicpg1252\cocoartf1038\cocoasubrtf320 {\fonttbl\f0\fnil\fcharset0 Verdana;\f1\fnil\fcharset0 LucidaGrande;} {\colortbl;\red255\green255\blue255;\red73\green73\blue73;} {\*\listtable{\list\listtemplateid1\listhybrid{\listlevel\levelnfc23\levelnfcn23\leveljc0\leveljcn0\levelfollow0\levelstartat1\levelspace360\levelindent0{\*\levelmarker \{disc\}}{\leveltext\leveltemplateid1\'01\uc0\u8226 ;}{\levelnumbers;}\fi-360\li720\lin720 }{\listname ;}\listid1}} {\*\listoverridetable{\listoverride\listid1\listoverridecount0\ls1}} \vieww9000\viewh8400\viewkind0 \deftab720 \pard\pardeftab720\sl400\sa280\ql\qnatural \f0\fs24 \cf2 Copyright (c) 2010, Oomph Inc. \f1 \uc0\u8232 \f0 All rights reserved.\ Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:\ \pard\tx220\tx720\pardeftab720\li720\fi-720\sl400\sa20\ql\qnatural \ls1\ilvl0\cf2 {\listtext \'95 }Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.\ {\listtext \'95 }Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.\ {\listtext \'95 }Neither the name of Oomph Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.\ \pard\pardeftab720\sl400\sa280\ql\qnatural \cf2 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.}
{ "pile_set_name": "Github" }
c() => null;
{ "pile_set_name": "Github" }
// Copyright 2009 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // Windows system calls. package syscall import ( errorspkg "errors" "internal/race" "sync" "unicode/utf16" "unsafe" ) //go:generate go run mksyscall_windows.go -systemdll -output zsyscall_windows.go syscall_windows.go security_windows.go type Handle uintptr const InvalidHandle = ^Handle(0) // StringToUTF16 returns the UTF-16 encoding of the UTF-8 string s, // with a terminating NUL added. If s contains a NUL byte this // function panics instead of returning an error. // // Deprecated: Use UTF16FromString instead. func StringToUTF16(s string) []uint16 { a, err := UTF16FromString(s) if err != nil { panic("syscall: string with NUL passed to StringToUTF16") } return a } // UTF16FromString returns the UTF-16 encoding of the UTF-8 string // s, with a terminating NUL added. If s contains a NUL byte at any // location, it returns (nil, EINVAL). func UTF16FromString(s string) ([]uint16, error) { for i := 0; i < len(s); i++ { if s[i] == 0 { return nil, EINVAL } } return utf16.Encode([]rune(s + "\x00")), nil } // UTF16ToString returns the UTF-8 encoding of the UTF-16 sequence s, // with a terminating NUL removed. func UTF16ToString(s []uint16) string { for i, v := range s { if v == 0 { s = s[0:i] break } } return string(utf16.Decode(s)) } // StringToUTF16Ptr returns pointer to the UTF-16 encoding of // the UTF-8 string s, with a terminating NUL added. If s // If s contains a NUL byte this function panics instead of // returning an error. // // Deprecated: Use UTF16PtrFromString instead. func StringToUTF16Ptr(s string) *uint16 { return &StringToUTF16(s)[0] } // UTF16PtrFromString returns pointer to the UTF-16 encoding of // the UTF-8 string s, with a terminating NUL added. If s // contains a NUL byte at any location, it returns (nil, EINVAL). func UTF16PtrFromString(s string) (*uint16, error) { a, err := UTF16FromString(s) if err != nil { return nil, err } return &a[0], nil } func Getpagesize() int { return 4096 } // Errno is the Windows error number. type Errno uintptr func langid(pri, sub uint16) uint32 { return uint32(sub)<<10 | uint32(pri) } // FormatMessage is deprecated (msgsrc should be uintptr, not uint32, but can // not be changed due to the Go 1 compatibility guarantee). // // Deprecated: Use FormatMessage from golang.org/x/sys/windows instead. func FormatMessage(flags uint32, msgsrc uint32, msgid uint32, langid uint32, buf []uint16, args *byte) (n uint32, err error) { return formatMessage(flags, uintptr(msgsrc), msgid, langid, buf, args) } func (e Errno) Error() string { // deal with special go errors idx := int(e - APPLICATION_ERROR) if 0 <= idx && idx < len(errors) { return errors[idx] } // ask windows for the remaining errors var flags uint32 = FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_ARGUMENT_ARRAY | FORMAT_MESSAGE_IGNORE_INSERTS b := make([]uint16, 300) n, err := formatMessage(flags, 0, uint32(e), langid(LANG_ENGLISH, SUBLANG_ENGLISH_US), b, nil) if err != nil { n, err = formatMessage(flags, 0, uint32(e), 0, b, nil) if err != nil { return "winapi error #" + itoa(int(e)) } } // trim terminating \r and \n for ; n > 0 && (b[n-1] == '\n' || b[n-1] == '\r'); n-- { } return string(utf16.Decode(b[:n])) } func (e Errno) Temporary() bool { return e == EINTR || e == EMFILE || e.Timeout() } func (e Errno) Timeout() bool { return e == EAGAIN || e == EWOULDBLOCK || e == ETIMEDOUT } // Implemented in runtime/syscall_windows.go. func compileCallback(fn interface{}, cleanstack bool) uintptr // Converts a Go function to a function pointer conforming // to the stdcall calling convention. This is useful when // interoperating with Windows code requiring callbacks. func NewCallback(fn interface{}) uintptr { return compileCallback(fn, true) } // Converts a Go function to a function pointer conforming // to the cdecl calling convention. This is useful when // interoperating with Windows code requiring callbacks. func NewCallbackCDecl(fn interface{}) uintptr { return compileCallback(fn, false) } // windows api calls //sys GetLastError() (lasterr error) //sys LoadLibrary(libname string) (handle Handle, err error) = LoadLibraryW //sys FreeLibrary(handle Handle) (err error) //sys GetProcAddress(module Handle, procname string) (proc uintptr, err error) //sys GetVersion() (ver uint32, err error) //sys formatMessage(flags uint32, msgsrc uintptr, msgid uint32, langid uint32, buf []uint16, args *byte) (n uint32, err error) = FormatMessageW //sys ExitProcess(exitcode uint32) //sys CreateFile(name *uint16, access uint32, mode uint32, sa *SecurityAttributes, createmode uint32, attrs uint32, templatefile int32) (handle Handle, err error) [failretval==InvalidHandle] = CreateFileW //sys ReadFile(handle Handle, buf []byte, done *uint32, overlapped *Overlapped) (err error) //sys WriteFile(handle Handle, buf []byte, done *uint32, overlapped *Overlapped) (err error) //sys SetFilePointer(handle Handle, lowoffset int32, highoffsetptr *int32, whence uint32) (newlowoffset uint32, err error) [failretval==0xffffffff] //sys CloseHandle(handle Handle) (err error) //sys GetStdHandle(stdhandle int) (handle Handle, err error) [failretval==InvalidHandle] //sys findFirstFile1(name *uint16, data *win32finddata1) (handle Handle, err error) [failretval==InvalidHandle] = FindFirstFileW //sys findNextFile1(handle Handle, data *win32finddata1) (err error) = FindNextFileW //sys FindClose(handle Handle) (err error) //sys GetFileInformationByHandle(handle Handle, data *ByHandleFileInformation) (err error) //sys GetCurrentDirectory(buflen uint32, buf *uint16) (n uint32, err error) = GetCurrentDirectoryW //sys SetCurrentDirectory(path *uint16) (err error) = SetCurrentDirectoryW //sys CreateDirectory(path *uint16, sa *SecurityAttributes) (err error) = CreateDirectoryW //sys RemoveDirectory(path *uint16) (err error) = RemoveDirectoryW //sys DeleteFile(path *uint16) (err error) = DeleteFileW //sys MoveFile(from *uint16, to *uint16) (err error) = MoveFileW //sys GetComputerName(buf *uint16, n *uint32) (err error) = GetComputerNameW //sys SetEndOfFile(handle Handle) (err error) //sys GetSystemTimeAsFileTime(time *Filetime) //sys GetTimeZoneInformation(tzi *Timezoneinformation) (rc uint32, err error) [failretval
{ "pile_set_name": "Github" }
class Api::V1::AccountsController < Api::V1::ApiController before_action -> { doorkeeper_authorize! :account } def me render json: current_resource_owner.as_json end end
{ "pile_set_name": "Github" }
{ "_from": "ansi-escapes@^3.0.0", "_id": "ansi-escapes@3.1.0", "_inBundle": false, "_integrity": "sha512-UgAb8H9D41AQnu/PbWlCofQVcnV4Gs2bBJi9eZPxfU/hgglFh3SMDMENRIqdr7H6XFnXdoknctFByVsCOotTVw==", "_location": "/ansi-escapes", "_phantomChildren": {}, "_requested": { "type": "range", "registry": true, "raw": "ansi-escapes@^3.0.0", "name": "ansi-escapes", "escapedName": "ansi-escapes", "rawSpec": "^3.0.0", "saveSpec": null, "fetchSpec": "^3.0.0" }, "_requiredBy": [ "/inquirer" ], "_resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.1.0.tgz", "_shasum": "f73207bb81207d75fd6c83f125af26eea378ca30", "_spec": "ansi-escapes@^3.0.0", "_where": "/Users/sunxin/DOClever/node_modules/inquirer", "author": { "name": "Sindre Sorhus", "email": "sindresorhus@gmail.com", "url": "sindresorhus.com" }, "bugs": { "url": "https://github.com/sindresorhus/ansi-escapes/issues" }, "bundleDependencies": false, "deprecated": false, "description": "ANSI escape codes for manipulating the terminal", "devDependencies": { "ava": "*", "xo": "*" }, "engines": { "node": ">=4" }, "files": [ "index.js" ], "homepage": "https://github.com/sindresorhus/ansi-escapes#readme", "keywords": [ "ansi", "terminal", "console", "cli", "string", "tty", "escape", "escapes", "formatting", "shell", "xterm", "log", "logging", "command-line", "text", "vt100", "sequence", "control", "code", "codes", "cursor", "iterm", "iterm2" ], "license": "MIT", "name": "ansi-escapes", "repository": { "type": "git", "url": "git+https://github.com/sindresorhus/ansi-escapes.git" }, "scripts": { "test": "xo && ava" }, "version": "3.1.0" }
{ "pile_set_name": "Github" }
 ACROS Security 0patch (0PatchServicex64.exe) Unquoted Service Path Privilege Escalation Vendor: ACROS, d.o.o. Product web page: https://www.0patch.com Affected version: 2016.05.19.539 Summary: 0patch (pronounced 'zero patch') is a platform for instantly distributing, applying and removing microscopic binary patches to/from running processes without having to restart these processes (much less reboot the entire computer). Desc: The application suffers from an unquoted search path issue impacting the service '0patchservice' for Windows deployed as part of 0patch solution. This could potentially allow an authorized but non-privileged local user to execute arbitrary code with elevated privileges on the system. A successful attempt would require the local user to be able to insert their code in the system root path undetected by the OS or other security applications where it could potentially be executed during application startup or reboot. If successful, the local user’s code would execute with the elevated privileges of the application. Tested on: Microsoft Windows 7 Ultimate SP1 (EN) Microsoft Windows 7 Professional SP1 (EN) Vulnerability discovered by Gjoko 'LiquidWorm' Krstic @zeroscience Advisory ID: ZSL-2016-5331 Advisory URL: http://www.zeroscience.mk/en/vulnerabilities/ZSL-2016-5331.php Vendor: https://0patch.blogspot.com/2016/06/new-release-0patch-agent-20160614850.html 08.06.2016 -- C:\>sc qc 0patchservice [SC] QueryServiceConfig SUCCESS SERVICE_NAME: 0patchservice TYPE : 10 WIN32_OWN_PROCESS START_TYPE : 2 AUTO_START ERROR_CONTROL : 1 NORMAL BINARY_PATH_NAME : C:\Program Files (x86)\0patch\Agent\0PatchServicex64.exe LOAD_ORDER_GROUP : TAG : 0 DISPLAY_NAME : 0patch Service DEPENDENCIES : SERVICE_START_NAME : LocalSystem C:\>cacls "C:\Program Files (x86)\0patch\Agent\0PatchServicex64.exe" C:\Program Files (x86)\0patch\Agent\0patchServicex64.exe NT AUTHORITY\SYSTEM:(ID)F BUILTIN\Administrators:(ID)F BUILTIN\Users:(ID)R C:\>
{ "pile_set_name": "Github" }
# Configure paths for libopusenc # Jean-Marc Valin <jmvalin@jmvalin.ca> 11-12-2017 # Jack Moffitt <jack@icecast.org> 10-21-2000 # Shamelessly stolen from Owen Taylor and Manish Singh dnl XIPH_PATH_LIBOPUSENC([ACTION-IF-FOUND [, ACTION-IF-NOT-FOUND]]) dnl Test for libopusenc, and define LIBOPUSENC_CFLAGS and LIBOPUSENC_LIBS dnl AC_DEFUN([XIPH_PATH_LIBOPUSENC], [dnl dnl Get the cflags and libraries dnl AC_ARG_WITH(libopusenc,AC_HELP_STRING([--with-libopusenc=PFX],[Prefix where libopusenc is installed (optional)]), libopusenc_prefix="$withval", libopusenc_prefix="") AC_ARG_WITH(libopusenc-libraries,AC_HELP_STRING([--with-libopusenc-libraries=DIR],[Directory where libopusenc library is installed (optional)]), libopusenc_libraries="$withval", libopusenc_libraries="") AC_ARG_WITH(libopusenc-includes,AC_HELP_STRING([--with-libopusenc-includes=DIR],[Directory where libopusenc header files are installed (optional)]), libopusenc_includes="$withval", libopusenc_includes="") AC_ARG_ENABLE(libopusenctest,AC_HELP_STRING([--disable-libopusenctest],[Do not try to compile and run a test libopusenc program]),, enable_libopusenctest=yes) if test "x$libopusenc_libraries" != "x" ; then LIBOPUSENC_LIBS="-L$libopusenc_libraries" elif test "x$libopusenc_prefix" = "xno" || test "x$libopusenc_prefix" = "xyes" ; then LIBOPUSENC_LIBS="" elif test "x$libopusenc_prefix" != "x" ; then LIBOPUSENC_LIBS="-L$libopusenc_prefix/lib" elif test "x$prefix" != "xNONE" ; then LIBOPUSENC_LIBS="-L$prefix/lib" fi if test "x$libopusenc_prefix" != "xno" ; then LIBOPUSENC_LIBS="$LIBOPUSENC_LIBS -lopusenc" fi if test "x$libopusenc_includes" != "x" ; then LIBOPUSENC_CFLAGS="-I$libopusenc_includes" elif test "x$libopusenc_prefix" = "xno" || test "x$libopusenc_prefix" = "xyes" ; then LIBOPUSENC_CFLAGS="" elif test "x$libopusenc_prefix" != "x" ; then LIBOPUSENC_CFLAGS="-I$libopusenc_prefix/include/opus" elif test "x$prefix" != "xNONE"; then LIBOPUSENC_CFLAGS="-I$prefix/include/opus" fi AC_MSG_CHECKING(for libopusenc) if test "x$libopusenc_prefix" = "xno" ; then no_libopusenc="disabled" enable_libopusenctest="no" else no_libopusenc="" fi if test "x$enable_libopusenctest" = "xyes" ; then ac_save_CFLAGS="$CFLAGS" ac_save_LIBS="$LIBS" CFLAGS="$CFLAGS $LIBOPUSENC_CFLAGS $OPUS_CFLAGS" LIBS="$LIBS $LIBOPUSENC_LIBS $OPUS_LIBS" dnl dnl Now check if the installed libopusenc is sufficiently new. dnl rm -f conf.libopusenctest AC_TRY_RUN([ #include <stdio.h> #include <stdlib.h> #include <string.h> #include <opusenc.h> int main () { system("touch conf.libopusenctest"); return 0; } ],, no_libopusenc=yes,[echo $ac_n "cross compiling; assumed OK... $ac_c"]) CFLAGS="$ac_save_CFLAGS" LIBS="$ac_save_LIBS" fi if test "x$no_libopusenc" = "xdisabled" ; then AC_MSG_RESULT(no) ifelse([$2], , :, [$2]) elif test "x$no_libopusenc" = "x" ; then AC_MSG_RESULT(yes) ifelse([$1], , :, [$1]) else AC_MSG_RESULT(no) if test -f conf.libopusenctest ; then : else echo "*** Could not run libopusenc test program, checking why..." CFLAGS="$CFLAGS $LIBOPUSENC_CFLAGS" LIBS="$LIBS $LIBOPUSENC_LIBS" AC_TRY_LINK([ #include <stdio.h> #include <opusenc.h> ], [ return 0; ], [ echo "*** The test program compiled, but did not run. This usually means" echo "*** that the run-time linker is not finding libopusenc or finding the wrong" echo "*** version of libopusenc. If it is not finding libopusenc, you'll need to set your" echo "*** LD_LIBRARY_PATH environment variable, or edit /etc/ld.so.conf to point" echo "*** to the installed location Also, make sure you have run ldconfig if that" echo "*** is required on your system" echo "***" echo "*** If you have an old version installed, it is best to remove it, although" echo "*** you may also be able to get things to work by modifying LD_LIBRARY_PATH"], [ echo "*** The test program failed to compile or link. See the file config.log for the" echo "*** exact error that occured. This usually means libopusenc was incorrectly installed" echo "*** or that you have moved libopusenc since it was installed." ]) CFLAGS="$ac_save_CFLAGS" LIBS="$ac_save_LIBS" fi LIBOPUSENC_CFLAGS="" LIBOPUSENC_LIBS="" ifelse([$2], , :, [$2]) fi AC_SUBST(LIBOPUSENC_CFLAGS) AC_SUBST(LIBOPUSENC_LIBS) rm -f conf.libopusenctest ])
{ "pile_set_name": "Github" }
define( [ "./core", "./var/document", "./var/rnotwhite", "./var/slice", "./data/var/dataPriv", "./core/init", "./selector" ], function( jQuery, document, rnotwhite, slice, dataPriv ) { var rkeyEvent = /^key/, rmouseEvent = /^(?:mouse|pointer|contextmenu|drag|drop)|click/, rtypenamespace = /^([^.]*)(?:\.(.+)|)/; function returnTrue() { return true; } function returnFalse() { return false; } // Support: IE9 // See #13393 for more info function safeActiveElement() { try { return document.activeElement; } catch ( err ) { } } function on( elem, types, selector, data, fn, one ) { var origFn, type; // Types can be a map of types/handlers if ( typeof types === "object" ) { // ( types-Object, selector, data ) if ( typeof selector !== "string" ) { // ( types-Object, data ) data = data || selector; selector = undefined; } for ( type in types ) { on( elem, type, selector, data, types[ type ], one ); } return elem; } if ( data == null && fn == null ) { // ( types, fn ) fn = selector; data = selector = undefined; } else if ( fn == null ) { if ( typeof selector === "string" ) { // ( types, selector, fn ) fn = data; data = undefined; } else { // ( types, data, fn ) fn = data; data = selector; selector = undefined; } } if ( fn === false ) { fn = returnFalse; } else if ( !fn ) { return elem; } if ( one === 1 ) { origFn = fn; fn = function( event ) { // Can use an empty set, since event contains the info jQuery().off( event ); return origFn.apply( this, arguments ); }; // Use same guid so caller can remove using origFn fn.guid = origFn.guid || ( origFn.guid = jQuery.guid++ ); } return elem.each( function() { jQuery.event.add( this, types, fn, data, selector ); } ); } /* * Helper functions for managing events -- not part of the public interface. * Props to Dean Edwards' addEvent library for many of the ideas. */ jQuery.event = { global: {}, add: function( elem, types, handler, data, selector ) { var handleObjIn, eventHandle, tmp, events, t, handleObj, special, handlers, type, namespaces, origType, elemData = dataPriv.get( elem ); // Don't attach events to noData or text/comment nodes (but allow plain objects) if ( !elemData ) { return; } // Caller can pass in an object of custom data in lieu of the handler if ( handler.handler ) { handleObjIn = handler; handler = handleObjIn.handler; selector = handleObjIn.selector; } // Make sure that the handler has a unique ID, used to find/remove it later if ( !handler.guid ) { handler.guid = jQuery.guid++; } // Init the element's event structure and main handler, if this is the first if ( !( events = elemData.events ) ) { events = elemData.events = {}; } if ( !( eventHandle = elemData.handle ) ) { eventHandle = elemData.handle = function( e ) { // Discard the second event of a jQuery.event.trigger() and // when an event is called after a page has unloaded return typeof jQuery !== "undefined" && jQuery.event.triggered !== e.type ? jQuery.event.dispatch.apply( elem, arguments ) : undefined; }; } // Handle multiple events separated by a space types = ( types || "" ).match( rnotwhite ) || [ "" ]; t = types.length; while ( t-- ) { tmp = rtypenamespace.exec( types[ t ] ) || []; type = origType = tmp[ 1 ]; namespaces = ( tmp[ 2 ] || "" ).split( "." ).sort(); // There *must* be a type, no attaching namespace-only handlers if ( !type ) { continue; } // If event changes its type, use the special event handlers for the changed type special = jQuery.event.special[ type ] || {}; // If selector defined, determine special event api type, otherwise given type type = ( selector ? special.delegateType : special.bindType ) || type; // Update special based on newly reset type special = jQuery.event.special[ type ] || {}; // handleObj is passed to all event handlers handleObj = jQuery.extend( { type: type, origType: origType, data: data, handler: handler, guid: handler.guid, selector: selector, needsContext: selector && jQuery.expr.match.needsContext.test( selector ), namespace: namespaces.join( "." ) }, handleObjIn ); // Init the event handler queue if we're the first if ( !( handlers = events[ type ] ) ) { handlers = events[ type ] = []; handlers.delegateCount = 0; // Only use addEventListener if the special events handler returns false if ( !special.setup || special.setup.call( elem, data, namespaces, eventHandle ) === false ) { if ( elem.addEventListener ) { elem.addEventListener( type, eventHandle ); } } } if ( special.add ) { special.add.call( elem, handleObj ); if ( !handleObj.handler.guid ) { handleObj.handler.guid = handler.guid; } } // Add to the element's handler list, delegates in front if ( selector ) { handlers.splice( handlers.delegateCount++, 0, handleObj ); } else { handlers.push( handleObj ); } // Keep track of which events have ever been used, for event optimization jQuery.event.global[ type ] = true; } }, // Detach an event or set of events from an element remove: function( elem, types, handler, selector, mappedTypes ) { var j, origCount, tmp, events, t, handleObj, special, handlers, type, namespaces, origType, elemData = dataPriv.hasData( elem ) && dataPriv.get( elem ); if ( !elemData || !( events = elemData.events ) ) { return; } // Once for each type.namespace in types; type may be omitted types = ( types || "" ).match( rnotwhite ) || [ "" ]; t = types.length; while ( t-- ) { tmp = rtypenamespace.exec( types[ t ] ) || []; type = origType = tmp[ 1 ]; namespaces = ( tmp[ 2 ] || "" ).split( "." ).sort(); // Unbind all events (on this namespace, if provided) for the element if ( !type ) { for ( type in events ) { jQuery.event.remove( elem, type + types[ t ], handler, selector, true ); } continue; } special = jQuery.event.special[ type ] || {}; type = ( selector ? special.delegateType : special.bindType ) || type; handlers = events[ type ] || []; tmp = tmp[ 2 ] && new RegExp( "(^|\\.)" + namespaces.join( "\\.(?:.*\\.|)" ) + "(\\.|$)" ); // Remove matching events origCount = j = handlers.length; while ( j-- ) { handleObj = handlers[ j ]; if ( ( mappedTypes || origType === handleObj.origType ) && ( !handler || handler.guid
{ "pile_set_name": "Github" }
package structure; import java.util.AbstractSet; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; /** A simple Map implementation, implemented in terms of a * pair of ArrayLists just to show what a Map has to do (it would * have been easier, but less informative, to subclass AbstractMap). * This Map implementation, like TreeSet, guarantees that the * Map's contents will be kept in ascending element order, * sorted according to the natural order of the elements; * see Comparable. This does not (yet) allow you to specify your own * Comparator. * <p> * It is a requirement that all objects inserted be able to * call compareTo on all other objects, i.e., they must all * be of the same or related classes. * <p> * Be warned that the entrySet() method is <b>not implemented</b> yet. */ public class MyMap<K,V> implements Map<K,V> { private ArrayList<K> keys; private ArrayList<V> values; public MyMap() { keys = new ArrayList<K>(); values = new ArrayList<V>(); } /** Return the number of mappings in this Map. */ public int size() { return keys.size(); } /** Return true if this map is empty. */ public boolean isEmpty() { return size() == 0; } /** Return true if o is contained as a Key in this Map. */ public boolean containsKey(Object o) { return keys.contains(o); } /** Return true if o is contained as a Value in this Map. */ public boolean containsValue(Object o) { return values.contains(o); } /** Get the object value corresponding to key k. */ public V get(Object k) { int i = keys.indexOf(k); if (i == -1) return null; return values.get(i); } /** Put the given pair (k, v) into this map, by maintaining "keys" * in sorted order. */ @SuppressWarnings("unchecked") public V put(Object k, Object v) { for (int i=0; i < keys.size(); i++) { /* Does the key already exist? */ if (((Comparable<K>)k).compareTo(keys.get(i)) == 0) { values.set(i, (V)v); return values.get(i); } /* Did we just go past where to put it? * i.e., keep keys in sorted order. */ if (((Comparable<K>)k).compareTo(keys.get(i)) == +1) { int where = i > 0 ? i -1 : 0; keys.add(where, (K)k); values.add(where, (V)v); return null; } } // Else it goes at the end. keys.add((K) k); values.add((V) v); return null; } /** Put all the pairs from oldMap into this map */ @Override public void putAll(@SuppressWarnings("rawtypes") Map oldMap) { @SuppressWarnings("unchecked") Iterator<K> keysIter = oldMap.keySet().iterator(); while (keysIter.hasNext()) { Object k = keysIter.next(); Object v = oldMap.get(k); put(k, v); } } public V remove(Object k) { int i = keys.indexOf(k); if (i == -1) return null; V old = values.get(i); keys.remove(i); values.remove(i); return old; } public void clear() { keys.clear(); values.clear(); } public java.util.Set<K> keySet() { return new TreeSet<K>(keys); } public java.util.Collection<V> values() { return values; } /** The Map.Entry objects contained in the Set returned by entrySet(). */ @SuppressWarnings("rawtypes") private class MyMapEntry implements Map.Entry<K,V>, Comparable { private K key; private V value; MyMapEntry(K k, V v) { key = k; value = v; } public K getKey() { return key; } public V getValue() { return value; } public V setValue(V nv) { throw new UnsupportedOperationException("setValue"); } @SuppressWarnings("unchecked") public int compareTo(Object o2) { // if (!(o2 instanceof MyMapEntry)) // throw new IllegalArgumentException( // "Huh? Not a MapEntry?"); Object otherKey = ((MyMapEntry)o2).getKey(); return ((Comparable)key).compareTo((Comparable)otherKey); } } /** The set of Map.Entry objects returned from entrySet(). */ private class MyMapSet<T> extends AbstractSet<T> { List<T> list; MyMapSet(List<T> al) { list = al; } public Iterator<T> iterator() { return list.iterator(); } public int size() { return list.size(); } } /** Returns a set view of the mappings contained in this Map. * Each element in the returned set is a Map.Entry. * NOT guaranteed fully to implement the contract of entrySet * declared in java.util.Map. */ @SuppressWarnings({ "unchecked", "rawtypes" }) public Set entrySet() { if (keys.size() != values.size()) throw new IllegalStateException( "InternalError: keys and values out of sync"); ArrayList<MyMapEntry> al = new ArrayList<MyMapEntry>(); for (int i=0; i<keys.size(); i++) { al.add(new MyMapEntry(keys.get(i), values.get(i))); } return new MyMapSet<MyMapEntry>(al); } }
{ "pile_set_name": "Github" }
#さらに詳しく知るために <!-- 参考書 --> <!-- ビットコイン関連 --> <!-- mastering bitcoin, blockchain -->
{ "pile_set_name": "Github" }
// // MediaObserver.swift // RCTAgora // // Created by LXH on 2020/4/10. // Copyright © 2020 Syan. All rights reserved. // import Foundation import AgoraRtcKit class MediaObserver: NSObject { private var emitter: (_ data: Dictionary<String, Any?>?) -> Void private var maxMetadataSize = 0 private var metadataList = [String]() init(_ emitter: @escaping (_ data: Dictionary<String, Any?>?) -> Void) { self.emitter = emitter } func addMetadata(_ metadata: String) { metadataList.append(metadata) } func setMaxMetadataSize(_ size: Int) { maxMetadataSize = size } } extension MediaObserver: AgoraMediaMetadataDataSource { func metadataMaxSize() -> Int { maxMetadataSize } func readyToSendMetadata(atTimestamp timestamp: TimeInterval) -> Data? { if metadataList.count > 0 { return metadataList.remove(at: 0).data(using: .utf8) } return nil } } extension MediaObserver: AgoraMediaMetadataDelegate { func receiveMetadata(_ data: Data, fromUser uid: Int, atTimestamp timestamp: TimeInterval) { emitter([ "buffer": String(data: data, encoding: .utf8), "uid": uid, "timeStampMs": timestamp ]) } }
{ "pile_set_name": "Github" }
--- title: JSON Protocol menu: influxdb_09: weight: 20 parent: write_protocols --- The JSON write protocol is deprecated as of InfluxDB 0.9.1. It is still present but it will be removed when InfluxDB 1.0 is released. The [line protocol](/influxdb/v0.9/write_protocols/line/) is the primary write protocol for InfluxDB 0.9.1+. For reasons behind the deprecation, please see the comments on the line protocol pull request, particularly the comments on JSON serialization [CPU costs](https://github.com/influxdb/influxdb/pull/2696#issuecomment-106968181) and on the [ease of use](https://github.com/influxdb/influxdb/pull/2696#issuecomment-107043910) concerns.
{ "pile_set_name": "Github" }
<!DOCTYPE RCC><RCC version="1.0"> <qresource> <file>images/copy.png</file> <file>images/cut.png</file> <file>images/new.png</file> <file>images/open.png</file> <file>images/paste.png</file> <file>images/save.png</file> </qresource> </RCC>
{ "pile_set_name": "Github" }
{ "cells": [ { "cell_type": "markdown", "metadata": {}, "source": [ "Title: Hyperparameter Tuning Using Random Search \n", "Slug: hyperparameter_tuning_using_random_search \n", "Summary: How to conduct random search for hyperparameter tuning in scikit-learn for machine learning in Python. \n", "Date: 2017-09-18 12:00 \n", "Category: Machine Learning \n", "Tags: Model Selection\n", "Authors: Chris Albon" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Preliminaries" ] }, { "cell_type": "code", "execution_count": 9, "metadata": { "collapsed": true }, "outputs": [], "source": [ "# Load libraries\n", "from scipy.stats import uniform\n", "from sklearn import linear_model, datasets\n", "from sklearn.model_selection import RandomizedSearchCV" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Load Iris Dataset" ] }, { "cell_type": "code", "execution_count": 10, "metadata": { "collapsed": true }, "outputs": [], "source": [ "# Load data\n", "iris = datasets.load_iris()\n", "X = iris.data\n", "y = iris.target" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Create Logistic Regression" ] }, { "cell_type": "code", "execution_count": 11, "metadata": { "collapsed": true }, "outputs": [], "source": [ "# Create logistic regression\n", "logistic = linear_model.LogisticRegression()" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Create Hyperparameter Search Space" ] }, { "cell_type": "code", "execution_count": 12, "metadata": { "collapsed": true }, "outputs": [], "source": [ "# Create regularization penalty space\n", "penalty = ['l1', 'l2']\n", "\n", "# Create regularization hyperparameter distribution using uniform distribution\n", "C = uniform(loc=0, scale=4)\n", "\n", "# Create hyperparameter options\n", "hyperparameters = dict(C=C, penalty=penalty)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Create Random Search" ] }, { "cell_type": "code", "execution_count": 13, "metadata": { "collapsed": true }, "outputs": [], "source": [ "# Create randomized search 5-fold cross validation and 100 iterations\n", "clf = RandomizedSearchCV(logistic, hyperparameters, random_state=1, n_iter=100, cv=5, verbose=0, n_jobs=-1)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Conduct Random Search" ] }, { "cell_type": "code", "execution_count": 14, "metadata": { "collapsed": true }, "outputs": [], "source": [ "# Fit randomized search\n", "best_model = clf.fit(X, y)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## View Hyperparameter Values Of Best Model" ] }, { "cell_type": "code", "execution_count": 15, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Best Penalty: l1\n", "Best C: 1.66808801881\n" ] } ], "source": [ "# View best hyperparameters\n", "print('Best Penalty:', best_model.best_estimator_.get_params()['penalty'])\n", "print('Best C:', best_model.best_estimator_.get_params()['C'])" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Predict Using Best Model" ] }, { "cell_type": "code", "execution_count": 16, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "array([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", " 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n", " 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 1, 1, 1, 1, 1, 1,\n", " 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,\n", " 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 2,\n", " 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2])" ] }, "execution_count": 16, "metadata": {}, "output_type": "execute_result" } ], "source": [ "# Predict target vector\n", "best_model.predict(X)" ] } ], "metadata": { "anaconda-cloud": {}, "kernelspec": { "display_name": "Python [default]", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.5.3" } }, "nbformat": 4, "nbformat_minor": 1 }
{ "pile_set_name": "Github" }
fileFormatVersion: 2 guid: d16c92d59af8a9a4a9a0db824132b067 timeCreated: 1467670142 licenseType: Pro MonoImporter: serializedVersion: 2 defaultReferences: [] executionOrder: 0 icon: {instanceID: 0} userData: assetBundleName: assetBundleVariant:
{ "pile_set_name": "Github" }