code
stringlengths
3
1.05M
repo_name
stringlengths
4
116
path
stringlengths
4
991
language
stringclasses
9 values
license
stringclasses
15 values
size
int32
3
1.05M
require 'chef/knife/base_sync' require 'chef/knife/core/object_loader' class Chef class Knife class PackSync < Chef::Knife include ::BaseSync VISIBILITY_ALT_NS_TAG = 'enableForOrg' banner "Loads packs into OneOps.\nUsage:\n circuit pack [OPTIONS] [PACKS...]" option :all, :short => "-a", :long => "--all", :description => "Sync all packs" option :register, :short => "-r REGISTER", :long => "--register REGISTER", :description => "Specify the source register name to use during sync" option :version, :short => "-v VERSION", :long => "--version VERSION", :description => "Specify the source register version to use during sync" option :pack_path, :short => "-o PATH:PATH", :long => "--pack-path PATH:PATH", :description => "A colon-separated path to look for packs in", :proc => lambda {|o| o.split(":")} option :reload, :long => "--reload", :description => "Force pack sync even if digest signatue has not changed (not applicable for packs with semantic versioning)" option :clean, :long => "--clean", :description => "Remove the current pack (and corresponding namespace) and then sync - 'fresh start' (not applicable for packs with semantic versioning)" option :semver, :long => "--semver", :description => "Creates new patch version for each change" def run t1 = Time.now ENV['CMS_TRACE'] = 'true' if config[:cms_trace] config[:pack_path] ||= Chef::Config[:pack_path] config[:register] ||= Chef::Config[:register] config[:version] ||= Chef::Config[:version] config[:semver] ||= ENV['SEMVER'].present? Chef::Pack.config = config @packs_loader ||= Knife::Core::ObjectLoader.new(Chef::Pack, ui) validate_packs # safety measure: make sure no packs conflict in scope circuit_ns_path = get_packs_ns unless Cms::Namespace.first(:params => {:nsPath => circuit_ns_path}) ui.error("Can't find namespace #{circuit_ns_path}. Please register your source first with the register command.") exit 1 end if config[:all] files = config[:pack_path].inject([]) {|a, dir| a + Dir.glob("#{dir}/*.rb").sort} else files = @name_args.inject([]) {|a, pack| a << "#{pack}.rb"} end if files.blank? ui.error 'You must specify pack name(s) or use the --all option to sync all.' exit(1) end comments = "#{ENV['USER']}:#{$0} #{config[:msg]}" loaded_files = files.inject([]) {|a, f| a << f if sync_pack(f, comments); a} t2 = Time.now ui.info("\nProcessed #{files.size} files, loaded #{loaded_files.size} packs.\nDone at #{t2} in #{(t2 - t1).round(1)}sec") end def validate_packs pack_map = {} config[:pack_path].each do |dir| Dir.glob("#{dir}/*.rb").each do |file| pack = @packs_loader.load_from(config[:pack_path], file) key = "#{get_source}**#{pack.name.downcase}**#{pack.version.presence || config[:version].split('.').first}" if pack_map.has_key?(key) ui.error("Conflict of pack source-name-version: #{key} is defined in #{file} and #{pack_map[key]}") exit 1 else pack_map[key] = file end end end end private def get_source config[:register] end def get_packs_ns "#{Chef::Config[:nspath]}/#{get_source}/packs" end def get_pack_ns(pack) "#{get_packs_ns}/#{pack.name}/#{pack.version}" end def sync_pack(file, comments) @existing_pack_ci_map ||= Cms::Ci.all(:params => {:nsPath => get_packs_ns, :ciClassName => 'mgmt.Pack'}). inject({}) {|h, p| h[p.ciName.downcase] = p; h} pack = @packs_loader.load_from(config[:pack_path], file) pack_ci = @existing_pack_ci_map[pack.name.downcase] pack.name(pack_ci ? pack_ci.ciName : pack.name.downcase) # This kludge is deal with legacy problem of some existing packs loaded but not converted to down case. if pack.ignore ui.info("Ignoring pack #{pack.name} version #{pack.version.presence || config[:version]}") return false elsif config[:semver] || pack.semver? signature = sync_pack_semver(pack, comments) else signature = sync_pack_no_semver(pack, comments) end sync_docs(pack) ui.info("Successfully synched pack #{pack.name} version #{pack.version} #{"[signature: #{signature}]" if signature}".green) return signature end def sync_pack_semver(pack, comments) ui.info("\n--------------------------------------------------") ui.info(" #{pack.name} #{pack.version} ".blue(true)) ui.info('--------------------------------------------------') if config[:reload] ui.warn('Reload option is not available in semver mode, all pack versions are '\ 'immutable. If you need to force a new patch version, make a change in '\ 'the pack (i.e. pack description) or specify patch version explicitly.') end signature = check_pack_version_ver_update(pack) return false unless signature # If pack signature matches nothing to do. Log.debug(pack.to_yaml) if Log.debug? version_ci = setup_pack_version(pack, comments, signature) begin ns = get_pack_ns(pack) # Upload design template sync_env(ns, 'mgmt.catalog', pack, '_default', pack.design_resources, comments) # Upload manifest templates pack.environments.each do |env, _| setup_mode(pack, env, comments) sync_env("#{ns}/#{env}", 'mgmt.manifest', pack, env, pack.environment_resources(env), comments) end rescue Exception => e ui.error(e.message) ui.info('Attempting to clean up...') begin version_ci.destroy rescue Exception ui.warn("Failed to clean up pack #{pack.name} version #{pack.version}!") end raise e end return signature end def sync_pack_no_semver(pack, comments) signature = Digest::MD5.hexdigest(pack.signature) pack.version((pack.version.presence || config[:version]).split('.').first) # default to the global knife version if not specified ui.info("\n--------------------------------------------------") ui.info(" #{pack.name} ver.#{pack.version} ".blue(true)) ui.info('--------------------------------------------------') pack_ci = @existing_pack_ci_map[pack.name.downcase] if pack_ci && config[:clean] @existing_pack_ci_map.delete(pack.name.downcase) pack_ci.destroy end # If pack signature matches but reload option is not set - bail return false if !config[:reload] && check_pack_version_no_ver_update(pack, signature) Log.debug(pack.to_yaml) if Log.debug? # First, check to see if anything from CMS need to flip to pending_deletion fix_delta_cms(pack) version_ci = setup_pack_version(pack, comments, '') ns = get_pack_ns(pack) # Upload design template sync_env(ns, 'mgmt.catalog', pack, '_default', pack.design_resources, comments) # Upload manifest templates pack.environments.each do |env, _| setup_mode(pack, env, comments) sync_env("#{ns}/#{env}", 'mgmt.manifest', pack, env, pack.environment_resources(env), comments) end version_ci.ciAttributes.commit = signature unless save(version_ci) ui.warn("Failed to update signature for pack #{pack.name} version #{pack.version}") end return signature end def fix_delta_cms(pack) nsPath = get_pack_ns(pack) cmsEnvs = ['_default'] + Cms::Ci.all(:params => {:nsPath => nsPath, :ciClassName => 'mgmt.Mode'}).map(&:ciName) cmsEnvs.each do |env| relations = fix_rels_from_cms(pack, env) fix_ci_from_cms(pack, env, relations, cmsEnvs) end end def fix_rels_from_cms(pack, env = '_default') pack_rels = pack.relations target_rels = [] scope = (env == '_default') ? '' : "/#{env}" Cms::Relation.all(:params => {:nsPath => "#{get_pack_ns(pack)}#{scope}", :includeToCi => true, :includeFromCi => true}).each do |r| new_state = nil fromCiName = r.fromCi.ciName toCiName = r.toCi.ciName relationShort = r.relationName.split('.').last key = "#{fromCiName}::#{relationShort.scan(/[A-Z][a-z]+/).join('_').downcase}::#{toCiName}" exists_in_pack = pack_rels.include?(key) # Search through resource to determine if relation exists or not unless exists_in_pack case relationShort when 'Payload' exists_in_pack = pack.resources[fromCiName] && pack.resources[fromCiName].include?('payloads') && pack.resources[fromCiName]['payloads'].include?(toCiName) when 'WatchedBy' exists_in_pack = pack.resources[fromCiName] && pack.resources[fromCiName].include?('monitors') && pack.resources[fromCiName]['monitors'].include?(toCiName) when 'Requires' exists_in_pack = pack.resources[fromCiName] && pack.resources[toCiName] when 'Entrypoint' exists_in_pack = pack.entrypoints.include?(toCiName) end end target_rels.push(toCiName) if exists_in_pack && !target_rels.include?(toCiName) if exists_in_pack && r.relationState == 'pending_deletion' new_state = 'default' elsif !exists_in_pack && r.relationState != 'pending_deletion' new_state = 'pending_deletion' end if new_state r.relationState = new_state if save(r) ui.debug("Successfuly updated ciRelationState to #{new_state} #{r.relationName} #{r.fromCi.ciName} <-> #{r.toCi.ciName} for #{env}") else ui.error("Failed to update ciRelationState to #{new_state} #{r.relationName} #{r.fromCi.ciName} <-> #{r.toCi.ciName} for #{env}") end end end target_rels end def fix_ci_from_cms(pack, env, relations, environments) scope = (env == '_default') ? '' : "/#{env}" pack_resources = pack.resources Cms::Ci.all(:params => {:nsPath => "#{get_pack_ns(pack)}#{scope}"}).each do |resource| new_state = nil exists_in_pack = pack_resources.include?(resource.ciName) || relations.include?(resource.ciName) || environments.include?(resource.ciName) if exists_in_pack && resource.ciState == 'pending_deletion' new_state = 'default' elsif !exists_in_pack && resource.ciState != 'pending_deletion' new_state = 'pending_deletion' end if new_state resource.ciState = new_state if save(resource) ui.debug("Successfuly updated ciState to #{new_state} for #{resource.ciName} for #{env}") else ui.error("Failed to update ciState to #{new_state} for #{resource.ciName} for #{env}") end end end end def check_pack_version_ver_update(pack) all_versions = Cms::Ci.all(:params => {:nsPath => "#{get_packs_ns}/#{pack.name}", :ciClassName => 'mgmt.Version', :includeAltNs => VISIBILITY_ALT_NS_TAG}) major, minor, patch = (pack.version.blank? ? config[:version] : pack.version).split('.') minor = '0' if minor.blank? # Need to filter version for the same major and find latest patch version for the same minor. latest_patch = nil latest_patch_number = -1 versions = all_versions.select do |ci_v| split = ci_v.ciName.split('.') if major == split[0] && minor == split[1] && split[2].to_i > latest_patch_number latest_patch = ci_v latest_patch_number = split[2].to_i end major == split[0] end if versions.size > 0 version_ci = latest_patch || versions.sort_by(&:ciName).last # Carry over 'enable' and 'visibility' from the latest patch or latest version overall. pack.enabled(version_ci.ciAttributes.attributes['enabled'] != 'false') pack.visibility(version_ci.altNs.attributes[VISIBILITY_ALT_NS_TAG]) end if patch.present? # Check to make sure version does not already exist. version = "#{major}.#{minor}.#{patch}" if versions.find {|ci_v| ci_v.ciName == version} ui.warn("Pack #{pack.name} version #{pack.version} explicitly specified but it already exists, ignore it - will SKIP pack loading, but will try to update docs.") return nil else pack.version(version) ui.info("Pack #{pack.name} version #{pack.version} explicitly specified and it does not exist yet, will load.") return pack.signature end else ui.info("Pack #{pack.name} version #{pack.version} - patch version is not explicitly specified, continue with checking for latest patch version for it.") end if latest_patch pack.version(latest_patch.ciName) signature = pack.signature if latest_patch.ciAttributes.attributes['commit'] == signature ui.info("Pack #{pack.name} latest patch version #{latest_patch.ciName} matches signature (#{signature}), will skip pack loading, but will try to update docs.") return nil else ui.info("Pack #{pack.name} latest patch version #{latest_patch.ciName} signature is different from new pack signature #{signature}, will increment patch version and load.") pack.version("#{major}.#{minor}.#{latest_patch.ciName.split('.')[2].to_i + 1}") return pack.signature end else ui.info("No patches found for #{pack.name} version #{major}.#{minor}, start at patch 0 and load.") pack.version("#{major}.#{minor}.0") return pack.signature end end def check_pack_version_no_ver_update(pack, signature) pack_version = Cms::Ci.first(:params => {:nsPath => "#{get_packs_ns}/#{pack.name}", :ciClassName => 'mgmt.Version', :ciName => pack.version}) if pack_version.nil? ui.info("Pack #{pack.name} version #{pack.version} not found") return false else if pack_version.ciAttributes.attributes.key?('commit') && pack_version.ciAttributes.commit == signature ui.info("Pack #{pack.name} version #{pack.version} matches signature #{signature}, use --reload to force load.") return true else ui.warn("Pack #{pack.name} version #{pack.version} signature is different from file signature #{signature}") return false end end end def setup_pack_version(pack, comments, signature) pack_ci = @existing_pack_ci_map[pack.name.downcase] packs_ns = get_packs_ns if pack_ci ui.debug("Updating pack #{pack.name}") else ui.info("Creating pack CI #{pack.name}") pack_ci = build('Cms::Ci', :nsPath => packs_ns, :ciClassName => 'mgmt.Pack', :ciName => pack.name) end pack_ci.comments = comments pack_ci.ciAttributes.pack_type = pack.type pack_ci.ciAttributes.description = pack.description pack_ci.ciAttributes.category = pack.category pack_ci.ciAttributes.owner = pack.owner if save(pack_ci) ui.debug("Successfuly saved pack CI #{pack.name}") @existing_pack_ci_map[pack.name.downcase] = pack_ci pack_version = Cms::Ci.first(:params => {:nsPath => "#{packs_ns}/#{pack.name}", :ciClassName => 'mgmt.Version', :ciName => pack.version}) if pack_version ui.debug("Updating pack CI #{pack.name} version #{pack.version}") else ui.info("Creating pack CI #{pack.name} version #{pack.version}") pack_version = build('Cms::Ci', :nsPath => "#{packs_ns}/#{pack.name}", :ciClassName => 'mgmt.Version', :ciName => pack.version, :ciAttributes => {:enabled => pack.enabled}, :altNs => {VISIBILITY_ALT_NS_TAG => pack.visibility}) end pack_version.comments = comments pack_version.ciAttributes.description = pack.description pack_version.ciAttributes.commit = signature if save(pack_version) ui.debug("Successfuly saved pack version CI for: #{pack.name} #{pack.version}") return pack_version else ui.error("Could not save pack version CI for: #{pack.name} #{pack.version}") end else ui.error("Could not save pack CI #{pack.name}") end message = "Unable to setup namespace for pack #{pack.name} version #{pack.version}" raise Exception.new(message) end def setup_mode(pack, env, comments) ns = get_pack_ns(pack) mode = Cms::Ci.first(:params => {:nsPath => ns, :ciClassName => 'mgmt.Mode', :ciName => env}) if mode ui.debug("Updating pack #{pack.name} version #{pack.version} environment mode #{env}") else ui.info("Creating pack #{pack.name} version #{pack.version} environment mode #{env}") mode = build('Cms::Ci', :nsPath => ns, :ciClassName => 'mgmt.Mode', :ciName => env) end mode.comments = comments mode.ciAttributes.description = pack.description if save(mode) ui.debug("Successfuly saved pack mode CI #{env}") return mode else message = "Unable to setup environment namespace for pack #{pack.name} version #{pack.version} environment mode #{env}" ui.error(message) raise Exception.new(message) end end def sync_env(ns_path, package, pack, env, resources, comments) ui.info("======> #{env == '_default' ? 'design' : env}") Log.debug([pack.name, pack.version, package, ns_path, resources, comments].to_yaml) if Log.debug? platform = sync_platform(ns_path, package, pack, comments) if platform components = sync_components(package, ns_path, platform, resources, comments) %w(DependsOn ManagedVia SecuredBy).each do |relation_name| sync_relations(relation_name, package, ns_path, pack.env_relations(env, relation_name), components) end upload_template_entrypoint(ns_path, pack, resources, components, platform, env) upload_template_procedures(ns_path, pack, platform, env) upload_template_variables(ns_path, pack, package, platform, env) upload_template_policies(ns_path, pack, package, env) sync_monitors(package, ns_path, resources, components) sync_payloads(ns_path, resources, components) if package == 'mgmt.manifest' end end def sync_platform(nspath, package, pack, comments) ci_class_name = "#{package}.#{pack.type.capitalize}" platform = Cms::Ci.first(:params => {:nsPath => nspath, :ciClassName => ci_class_name, :ciName => pack.name}) if platform ui.debug("Updating #{ci_class_name}") else ui.info("Creating #{ci_class_name}") platform = build('Cms::Ci', :nsPath => nspath, :ciClassName => ci_class_name, :ciName => pack.name) end plat_attrs = pack.platform && pack.platform[:attributes] if plat_attrs attrs = platform.ciAttributes.attributes attrs.each {|name, _| attrs[name] = plat_attrs[name] if plat_attrs.has_key?(name)} end platform.comments = comments platform.ciAttributes.description = pack.description platform.ciAttributes.source = get_source platform.ciAttributes.pack = pack.name platform.ciAttributes.version = pack.version if save(platform) ui.debug("Successfuly saved #{ci_class_name}") return platform else ui.error("Could not save #{ci_class_name}, skipping pack") return false end end def sync_components(package, ns_path, platform, resources, comments) relations = [] existing = Cms::Relation.all(:params => {:ciId => platform.ciId, :direction => 'from', :relationShortName => 'Requires', :includeToCi => true}) resources.each do |resource_name, resource| class_name_parts = resource[:cookbook].split('.') class_name_parts[-1] = class_name_parts[-1].capitalize class_name_parts = class_name_parts.unshift(resource[:source]) if resource[:source] class_name_parts = class_name_parts.unshift(package) ci_class_name = class_name_parts.join('.') relation = existing.find {|r| r.toCi.ciName == resource_name && r.toCi.ciClassName == ci_class_name} if relation ui.debug("Updating resource #{resource_name}") else ui.info("Creating resource #{resource_name}") relation = build('Cms::Relation', :relationName => 'mgmt.Requires', :nsPath => ns_path, :fromCiId => platform.ciId, :toCiId => 0, :toCi => build('Cms::Ci', :nsPath => ns_path, :ciClassName => ci_class_name, :ciName => resource_name)) end relation.comments = comments relation.toCi.comments = comments relation.relationAttributes.template = resource_name # default value for template attribute is the resource name requires_attrs = resource[:requires] if requires_attrs attrs = relation.relationAttributes.attributes attrs.each {|name, _| attrs[name] = requires_attrs[name] if requires_attrs[name]} end component_attrs = resource[:attributes] if component_attrs attrs = relation.toCi.ciAttributes.attributes attrs.each {|name, _| attrs[name] = component_attrs[name] if component_attrs.has_key?(name)} end relations << relation end relations, error = Cms::Relation.bulk(relations) unless relations ui.error("Could not save components: #{error}") raise(error) end ui.info("synced #{relations.size} components") return relations.inject({}) {|h, r| h[r.toCi.ciName] = r.toCiId; h} end def sync_relations(short_name, package, ns_path, pack_rels, components) relation_name = "#{package}.#{short_name}" existing_rels = Cms::Relation.all(:params => {:nsPath => ns_path, :relationName => relation_name}) relations = pack_rels.inject([]) do |rels_to_save, pack_rel| from = pack_rel[:from_resource] to = pack_rel[:to_resource] from_id = components[from] to_id = components[to] problems = [] problems << "component #{from} not found" unless from_id problems << "component #{to} not found" unless to_id if problems.present? ui.warn("Can't process #{short_name} from #{from} to #{to}: #{problems.join('; ')}") next rels_to_save end relation = rels_to_save.find {|d| d.fromCiId == from_id && d.toCiId == to_id} if relation ui.debug("Updating again #{short_name} from #{from} to #{to}") else relation = existing_rels.find {|d| d.fromCiId == from_id && d.toCiId == to_id} if relation ui.debug("Updating #{short_name} from #{from} to #{to}") else ui.info("Creating #{short_name} between #{from} to #{to}") relation = build('Cms::Relation', :relationName => relation_name, :nsPath => ns_path, :fromCiId => from_id, :toCiId => to_id) end rels_to_save << relation end relation.merge_attributes(pack_rel[:attributes]) rels_to_save end if relations.present? relations, error = Cms::Relation.bulk(relations) unless relations ui.error("Could not save #{short_name} relations: #{error}") raise(error) end ui.info("synched #{relations.size} #{short_name} relations") end end def upload_template_entrypoint(nspath, pack, resources, components, platform, env) relation_name = 'mgmt.Entrypoint' relations = Cms::Relation.all(:params => {:ciId => platform.ciId, :nsPath => nspath, :direction => 'from', :relationName => relation_name}) resources.each do |resource_name, _| next unless pack.environment_entrypoints(env)[resource_name] entrypoint = relations.find {|r| r.toCi.ciId == components[resource_name]} if entrypoint ui.debug("Updating entrypoint between platform and #{resource_name}") else ui.info("Creating entrypoint between platform and #{resource_name}") entrypoint = build('Cms::Relation', :relationName => relation_name, :nsPath => nspath, :fromCiId => platform.ciId, :toCiId => components[resource_name]) end entrypoint_attrs = pack.entrypoints[resource_name]['attributes'] attrs = entrypoint.relationAttributes.attributes attrs.each {|name, __| attrs[name] = entrypoint_attrs[name] if entrypoint_attrs[name]} if save(entrypoint) ui.debug("Successfuly saved entrypoint between platform and #{resource_name}") else ui.error("Could not save entrypoint between platform and #{resource_name}, skipping it") end end end def sync_monitors(package, ns_path, resources, components) relation_name = "#{package}.WatchedBy" ci_class_name = "#{package}.Monitor" relations = Cms::Relation.all(:params => {:nsPath => ns_path, :relationName => relation_name, :includeToCi => true}).to_a resources.each do |resource_name, resource| next unless resource[:monitors] resource[:monitors].each do |monitor_name, monitor| relation = relations.find {|r| r.fromCiId == components[resource_name] && r.toCi.ciName == monitor_name} if relation ui.debug("Updating monitor #{monitor_name} for #{resource_name} in #{package}") else ui.info("Creating monitor #{monitor_name} for #{resource_name}") relation = build('Cms::Relation', :relationName => relation_name, :nsPath => ns_path, :fromCiId => components[resource_name]) # For legacy reasons, we might have monitors with same name, so several components # link (via relation) to the same CI in the pack template. Therefore, # monitor CI may already exists. duplicate_ci_name_rel = relations.find {|r| r.toCi.ciName == monitor_name} if duplicate_ci_name_rel ui.warn("Monitor #{monitor_name} for component #{resource_name} is not uniquely named, will re-use existing monitor CI with the same name") relation.toCiId = duplicate_ci_name_rel.toCiId if save(relation) relation.toCi = duplicate_ci_name_rel.toCi else ui.error("Could not create WatchedBy relation #{monitor_name} for #{resource_name}, skipping it") next end else relation.toCiId = 0 relation.toCi = build('Cms::Ci', :nsPath => ns_path, :ciClassName => ci_class_name, :ciName => monitor_name) end relations << relation end attrs = relation.toCi.ciAttributes.attributes attrs.each do |name, _| if monitor[name] monitor[name] = monitor[name].to_json if monitor[name].is_a?(Hash) attrs[name] = monitor[name] end end if save(relation) ui.debug("Successfuly saved monitor #{monitor_name} for #{resource_name} in #{package}") else ui.error("Could not save monitor #{monitor_name} for #{resource_name}, skipping it") end end end end def sync_payloads(ns_path, resources, components) relation_name = 'mgmt.manifest.Payload' ci_class_name = 'mgmt.manifest.Qpath' relations = Cms::Relation.all(:params => {:nsPath => ns_path, :relationName => relation_name, :targetClassName => ci_class_name, :includeToCi => true}) existing_rels = relations.inject({}) {|h, r| h[r.toCi.ciName.downcase] = r; h} resources.each do |resource_name, resource| next unless resource[:payloads] resource[:payloads].each do |payload_name, payload| relation = relations.find {|r| r.toCi.ciName == payload_name && r.fromCiId == components[resource_name]} # For legacy reasons, we might have payloads with same name, so several components # link (via relation) to the same pyaload CI in the pack template. Therefore, # payload CI may already exists. duplicate_ci_name_rel = existing_rels[payload_name.downcase] if duplicate_ci_name_rel && (!relation || relation.fromCiId != duplicate_ci_name_rel.fromCiId) ui.warn("Payload #{payload_name} for component #{resource_name} is not uniquely named, will re-use existing payload CI with the same name") end if relation ui.debug("Updating payload #{payload_name} for #{resource_name}") else ui.info("Creating payload #{payload_name} for #{resource_name}") relation = build('Cms::Relation', :relationName => relation_name, :nsPath => ns_path, :fromCiId => components[resource_name]) if duplicate_ci_name_rel relation.toCiId = duplicate_ci_name_rel.toCiId unless save(relation) ui.error("Could not create Payload relation #{payload_name} for #{resource_name}, skipping it") next end relation.toCi = duplicate_ci_name_rel.toCi else relation.toCiId = 0 relation.toCi = build('Cms::Ci', :nsPath => ns_path, :ciClassName => ci_class_name, :ciName => payload_name) end end attrs = relation.toCi.ciAttributes.attributes attrs.each {|name, _| attrs[name] = payload[name] if payload[name]} if save(relation) existing_rels[payload_name.downcase] = relation unless duplicate_ci_name_rel ui.debug("Successfuly saved payload #{payload_name} for #{resource_name}") else ui.error("Could not save payload #{payload_name} for #{resource_name}, skipping it") end end end end def upload_template_procedures(nspath, pack, platform, env) relation_name = 'mgmt.manifest.ControlledBy' ci_class_name = 'mgmt.manifest.Procedure' relations = Cms::Relation.all(:params => {:ciId => platform.ciId, :nsPath => nspath, :direction => 'from', :relationName => relation_name, :targetClassName => ci_class_name, :includeToCi => true}) pack.environment_procedures(env).each do |procedure_name, procedure_attributes| relation = relations.find {|r| r.toCi.ciName == procedure_name} if relation ui.debug("Updating procedure #{procedure_name} for environment #{env}") else ui.info("Creating procedure #{procedure_name} for environment #{env}") relation = build('Cms::Relation', :relationName => relation_name, :nsPath => nspath, :fromCiId => platform.ciId, :toCiId => 0, :toCi => build('Cms::Ci', :nsPath => nspath, :ciClassName => ci_class_name, :ciName => procedure_name)) end attrs = relation.toCi.ciAttributes.attributes attrs.each do |name, _| if procedure_attributes[name] if name == 'arguments' && procedure_attributes[name].is_a?(Hash) procedure_attributes[name] = procedure_attributes[name].to_json end attrs[name] = procedure_attributes[name] end end if save(relation) ui.debug("Successfuly saved procedure #{procedure_name} for environment #{env}") else ui.error("Could not save procedure #{procedure_name} for environment #{env}, skipping it") end end end def upload_template_variables(nspath, pack, package, platform, env) relation_name = "#{package}.ValueFor" ci_class_name = "#{package}.Localvar" relations = Cms::Relation.all(:params => {:ciId => platform.ciId, :direction => 'to', :relationName => relation_name, :targetClassName => ci_class_name, :includeFromCi => true}) pack.environment_variables(env).each do |variable_name, var_attrs| relation = relations.find {|r| r.fromCi.ciName == variable_name} if relation ui.debug("Updating variable #{variable_name} for environment #{env}") else ui.info("Creating variable #{variable_name} for environment #{env}") relation = build('Cms::Relation', :relationName => relation_name, :nsPath => nspath, :toCiId => platform.ciId, :fromCiId => 0, :fromCi => build('Cms::Ci', :nsPath => nspath, :ciClassName => ci_class_name, :ciName => variable_name)) end attrs = relation.fromCi.ciAttributes.attributes attrs.each {|name, _| attrs[name] = var_attrs[name] if var_attrs[name]} if save(relation) ui.debug("Successfuly saved variable #{variable_name} for environment #{env}") else ui.error("Could not save variable #{variable_name} for environment #{env}, skipping it") end end end def upload_template_policies(nspath, pack, package, env) ci_class_name = "#{package}.Policy" policies = Cms::Ci.all(:params => {:nsPath => nspath, :ciClassName => ci_class_name}) pack.environment_policies(env).each do |policy_name, policy_attrs| policy = policies.find {|p| p.ciName == policy_name} unless policy policy = build('Cms::Ci', :nsPath => nspath, :ciClassName => ci_class_name, :ciName => policy_name) end attrs = policy.ciAttributes.attributes attrs.each {|name, _| attrs[name] = policy_attrs[name] if policy_attrs[name]} if save(policy) ui.debug("Successfuly saved policy #{policy_name} attributes for environment #{env} and #{pack}") else ui.error("Could not save policy #{policy_name} attributes for environment #{env} and #{pack}, skipping it") end end end def sync_docs(pack) return unless sync_docs? doc_dir = File.expand_path('doc', File.dirname(pack.filename)) files = Dir.glob("#{doc_dir}/#{pack.name}.*") if files.present? ui.info('docs and images:') files.each {|file| sync_doc_file(file, file.gsub(doc_dir, "#{get_source}/packs/#{pack.name}/#{pack.version}"))} end end end end end
oneops/OneOps
oneops-admin/lib/chef/knife/pack_sync.rb
Ruby
apache-2.0
40,395
// Copyright 2014 Docker authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the DOCKER-LICENSE file. package archive import ( "archive/tar" "bufio" "compress/gzip" "fmt" "io" "os" "path/filepath" "strings" "github.com/docker/docker/pkg/fileutils" "github.com/docker/docker/pkg/idtools" "github.com/docker/docker/pkg/pools" "github.com/docker/docker/pkg/system" "github.com/sirupsen/logrus" ) const ( // Uncompressed represents the uncompressed. Uncompressed Compression = iota // Bzip2 is bzip2 compression algorithm. Bzip2 // Gzip is gzip compression algorithm. Gzip // Xz is xz compression algorithm. Xz ) const ( modeISDIR = 040000 // Directory modeISFIFO = 010000 // FIFO modeISREG = 0100000 // Regular file modeISLNK = 0120000 // Symbolic link modeISBLK = 060000 // Block special file modeISCHR = 020000 // Character special file modeISSOCK = 0140000 // Socket ) // Compression is the state represents if compressed or not. type Compression int // Extension returns the extension of a file that uses the specified compression algorithm. func (compression *Compression) Extension() string { switch *compression { case Uncompressed: return "tar" case Bzip2: return "tar.bz2" case Gzip: return "tar.gz" case Xz: return "tar.xz" } return "" } // WhiteoutFormat is the format of whiteouts unpacked type WhiteoutFormat int // TarOptions wraps the tar options. type TarOptions struct { IncludeFiles []string ExcludePatterns []string Compression Compression NoLchown bool UIDMaps []idtools.IDMap GIDMaps []idtools.IDMap ChownOpts *idtools.Identity IncludeSourceDir bool // WhiteoutFormat is the expected on disk format for whiteout files. // This format will be converted to the standard format on pack // and from the standard format on unpack. WhiteoutFormat WhiteoutFormat // When unpacking, specifies whether overwriting a directory with a // non-directory is allowed and vice versa. NoOverwriteDirNonDir bool // For each include when creating an archive, the included name will be // replaced with the matching name from this map. RebaseNames map[string]string InUserNS bool } // TarWithOptions creates an archive from the directory at `path`, only including files whose relative // paths are included in `options.IncludeFiles` (if non-nil) or not in `options.ExcludePatterns`. func TarWithOptions(srcPath string, options *TarOptions) (io.ReadCloser, error) { // Fix the source path to work with long path names. This is a no-op // on platforms other than Windows. srcPath = fixVolumePathPrefix(srcPath) pm, err := fileutils.NewPatternMatcher(options.ExcludePatterns) if err != nil { return nil, err } pipeReader, pipeWriter := io.Pipe() compressWriter, err := CompressStream(pipeWriter, options.Compression) if err != nil { return nil, err } go func() { ta := newTarAppender( idtools.NewIDMappingsFromMaps(options.UIDMaps, options.GIDMaps), compressWriter, options.ChownOpts, ) ta.WhiteoutConverter = getWhiteoutConverter(options.WhiteoutFormat) defer func() { // Make sure to check the error on Close. if err := ta.TarWriter.Close(); err != nil { logrus.Errorf("Can't close tar writer: %s", err) } if err := compressWriter.Close(); err != nil { logrus.Errorf("Can't close compress writer: %s", err) } if err := pipeWriter.Close(); err != nil { logrus.Errorf("Can't close pipe writer: %s", err) } }() // this buffer is needed for the duration of this piped stream defer pools.BufioWriter32KPool.Put(ta.Buffer) // In general we log errors here but ignore them because // during e.g. a diff operation the container can continue // mutating the filesystem and we can see transient errors // from this stat, err := os.Lstat(srcPath) if err != nil { return } if !stat.IsDir() { // We can't later join a non-dir with any includes because the // 'walk' will error if "file/." is stat-ed and "file" is not a // directory. So, we must split the source path and use the // basename as the include. if len(options.IncludeFiles) > 0 { logrus.Warn("Tar: Can't archive a file with includes") } dir, base := SplitPathDirEntry(srcPath) srcPath = dir options.IncludeFiles = []string{base} } if len(options.IncludeFiles) == 0 { options.IncludeFiles = []string{"."} } seen := make(map[string]bool) for _, include := range options.IncludeFiles { rebaseName := options.RebaseNames[include] walkRoot := getWalkRoot(srcPath, include) filepath.Walk(walkRoot, func(filePath string, f os.FileInfo, err error) error { if err != nil { logrus.Errorf("Tar: Can't stat file %s to tar: %s", srcPath, err) return nil } relFilePath, err := filepath.Rel(srcPath, filePath) if err != nil || (!options.IncludeSourceDir && relFilePath == "." && f.IsDir()) { // Error getting relative path OR we are looking // at the source directory path. Skip in both situations. return nil } if options.IncludeSourceDir && include == "." && relFilePath != "." { relFilePath = strings.Join([]string{".", relFilePath}, string(filepath.Separator)) } skip := false // If "include" is an exact match for the current file // then even if there's an "excludePatterns" pattern that // matches it, don't skip it. IOW, assume an explicit 'include' // is asking for that file no matter what - which is true // for some files, like .dockerignore and Dockerfile (sometimes) if include != relFilePath { skip, err = pm.Matches(relFilePath) if err != nil { logrus.Errorf("Error matching %s: %v", relFilePath, err) return err } } if skip { // If we want to skip this file and its a directory // then we should first check to see if there's an // excludes pattern (e.g. !dir/file) that starts with this // dir. If so then we can't skip this dir. // Its not a dir then so we can just return/skip. if !f.IsDir() { return nil } // No exceptions (!...) in patterns so just skip dir if !pm.Exclusions() { return filepath.SkipDir } dirSlash := relFilePath + string(filepath.Separator) for _, pat := range pm.Patterns() { if !pat.Exclusion() { continue } if strings.HasPrefix(pat.String()+string(filepath.Separator), dirSlash) { // found a match - so can't skip this dir return nil } } // No matching exclusion dir so just skip dir return filepath.SkipDir } if seen[relFilePath] { return nil } seen[relFilePath] = true // Rename the base resource. if rebaseName != "" { var replacement string if rebaseName != string(filepath.Separator) { // Special case the root directory to replace with an // empty string instead so that we don't end up with // double slashes in the paths. replacement = rebaseName } relFilePath = strings.Replace(relFilePath, include, replacement, 1) } if err := ta.addTarFile(filePath, relFilePath); err != nil { logrus.Errorf("Can't add file %s to tar: %s", filePath, err) // if pipe is broken, stop writing tar stream to it if err == io.ErrClosedPipe { return err } } return nil }) } }() return pipeReader, nil } // CompressStream compresses the dest with specified compression algorithm. func CompressStream(dest io.Writer, compression Compression) (io.WriteCloser, error) { p := pools.BufioWriter32KPool buf := p.Get(dest) switch compression { case Uncompressed: writeBufWrapper := p.NewWriteCloserWrapper(buf, buf) return writeBufWrapper, nil case Gzip: gzWriter := gzip.NewWriter(dest) writeBufWrapper := p.NewWriteCloserWrapper(buf, gzWriter) return writeBufWrapper, nil case Bzip2, Xz: // archive/bzip2 does not support writing, and there is no xz support at all // However, this is not a problem as docker only currently generates gzipped tars return nil, fmt.Errorf("Unsupported compression format %s", (&compression).Extension()) default: return nil, fmt.Errorf("Unsupported compression format %s", (&compression).Extension()) } } type tarWhiteoutConverter interface { ConvertWrite(*tar.Header, string, os.FileInfo) (*tar.Header, error) ConvertRead(*tar.Header, string) (bool, error) } type tarAppender struct { TarWriter *tar.Writer Buffer *bufio.Writer // for hardlink mapping SeenFiles map[uint64]string IdentityMapping *idtools.IdentityMapping ChownOpts *idtools.Identity // For packing and unpacking whiteout files in the // non standard format. The whiteout files defined // by the AUFS standard are used as the tar whiteout // standard. WhiteoutConverter tarWhiteoutConverter } func newTarAppender(idMapping *idtools.IdentityMapping, writer io.Writer, chownOpts *idtools.Identity) *tarAppender { return &tarAppender{ SeenFiles: make(map[uint64]string), TarWriter: tar.NewWriter(writer), Buffer: pools.BufioWriter32KPool.Get(nil), IdentityMapping: idMapping, ChownOpts: chownOpts, } } // addTarFile adds to the tar archive a file from `path` as `name` func (ta *tarAppender) addTarFile(path, name string) error { fi, err := os.Lstat(path) if err != nil { return err } var link string if fi.Mode()&os.ModeSymlink != 0 { var err error link, err = os.Readlink(path) if err != nil { return err } } hdr, err := FileInfoHeader(name, fi, link) if err != nil { return err } if err := ReadSecurityXattrToTarHeader(path, hdr); err != nil { return err } // if it's not a directory and has more than 1 link, // it's hard linked, so set the type flag accordingly if !fi.IsDir() && hasHardlinks(fi) { inode, err := getInodeFromStat(fi.Sys()) if err != nil { return err } // a link should have a name that it links too // and that linked name should be first in the tar archive if oldpath, ok := ta.SeenFiles[inode]; ok { hdr.Typeflag = tar.TypeLink hdr.Linkname = oldpath hdr.Size = 0 // This Must be here for the writer math to add up! } else { ta.SeenFiles[inode] = name } } //check whether the file is overlayfs whiteout //if yes, skip re-mapping container ID mappings. isOverlayWhiteout := fi.Mode()&os.ModeCharDevice != 0 && hdr.Devmajor == 0 && hdr.Devminor == 0 //handle re-mapping container ID mappings back to host ID mappings before //writing tar headers/files. We skip whiteout files because they were written //by the kernel and already have proper ownership relative to the host if !isOverlayWhiteout && !strings.HasPrefix(filepath.Base(hdr.Name), WhiteoutPrefix) && !ta.IdentityMapping.Empty() { fileIdentity, err := getFileIdentity(fi.Sys()) if err != nil { return err } hdr.Uid, hdr.Gid, err = ta.IdentityMapping.ToContainer(fileIdentity) if err != nil { return err } } // explicitly override with ChownOpts if ta.ChownOpts != nil { hdr.Uid = ta.ChownOpts.UID hdr.Gid = ta.ChownOpts.GID } if ta.WhiteoutConverter != nil { wo, err := ta.WhiteoutConverter.ConvertWrite(hdr, path, fi) if err != nil { return err } // If a new whiteout file exists, write original hdr, then // replace hdr with wo to be written after. Whiteouts should // always be written after the original. Note the original // hdr may have been updated to be a whiteout with returning // a whiteout header if wo != nil { if err := ta.TarWriter.WriteHeader(hdr); err != nil { return err } if hdr.Typeflag == tar.TypeReg && hdr.Size > 0 { return fmt.Errorf("tar: cannot use whiteout for non-empty file") } hdr = wo } } if err := ta.TarWriter.WriteHeader(hdr); err != nil { return err } if hdr.Typeflag == tar.TypeReg && hdr.Size > 0 { // We use system.OpenSequential to ensure we use sequential file // access on Windows to avoid depleting the standby list. // On Linux, this equates to a regular os.Open. file, err := system.OpenSequential(path) if err != nil { return err } ta.Buffer.Reset(ta.TarWriter) defer ta.Buffer.Reset(nil) _, err = io.Copy(ta.Buffer, file) file.Close() if err != nil { return err } err = ta.Buffer.Flush() if err != nil { return err } } return nil } // ReadSecurityXattrToTarHeader reads security.capability xattr from filesystem // to a tar header func ReadSecurityXattrToTarHeader(path string, hdr *tar.Header) error { capability, _ := system.Lgetxattr(path, "security.capability") if capability != nil { hdr.Xattrs = make(map[string]string) hdr.Xattrs["security.capability"] = string(capability) } return nil } // FileInfoHeader creates a populated Header from fi. // Compared to archive pkg this function fills in more information. // Also, regardless of Go version, this function fills file type bits (e.g. hdr.Mode |= modeISDIR), // which have been deleted since Go 1.9 archive/tar. func FileInfoHeader(name string, fi os.FileInfo, link string) (*tar.Header, error) { hdr, err := tar.FileInfoHeader(fi, link) if err != nil { return nil, err } hdr.Mode = fillGo18FileTypeBits(int64(chmodTarEntry(os.FileMode(hdr.Mode))), fi) name, err = canonicalTarName(name, fi.IsDir()) if err != nil { return nil, fmt.Errorf("tar: cannot canonicalize path: %v", err) } hdr.Name = name if err := setHeaderForSpecialDevice(hdr, name, fi.Sys()); err != nil { return nil, err } return hdr, nil } // fillGo18FileTypeBits fills type bits which have been removed on Go 1.9 archive/tar // https://github.com/golang/go/commit/66b5a2f func fillGo18FileTypeBits(mode int64, fi os.FileInfo) int64 { fm := fi.Mode() switch { case fm.IsRegular(): mode |= modeISREG case fi.IsDir(): mode |= modeISDIR case fm&os.ModeSymlink != 0: mode |= modeISLNK case fm&os.ModeDevice != 0: if fm&os.ModeCharDevice != 0 { mode |= modeISCHR } else { mode |= modeISBLK } case fm&os.ModeNamedPipe != 0: mode |= modeISFIFO case fm&os.ModeSocket != 0: mode |= modeISSOCK } return mode } // canonicalTarName provides a platform-independent and consistent posix-style //path for files and directories to be archived regardless of the platform. func canonicalTarName(name string, isDir bool) (string, error) { name, err := CanonicalTarNameForPath(name) if err != nil { return "", err } // suffix with '/' for directories if isDir && !strings.HasSuffix(name, "/") { name += "/" } return name, nil }
rhatdan/cri-o
vendor/github.com/fsouza/go-dockerclient/internal/archive/archive.go
GO
apache-2.0
14,629
/** * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.openwire.v4; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import org.apache.activemq.openwire.*; import org.apache.activemq.command.*; /** * Test case for the OpenWire marshalling for RemoveSubscriptionInfo * * * NOTE!: This file is auto generated - do not modify! * if you need to make a change, please see the modify the groovy scripts in the * under src/gram/script and then use maven openwire:generate to regenerate * this file. * * */ public class RemoveSubscriptionInfoTest extends BaseCommandTestSupport { public static RemoveSubscriptionInfoTest SINGLETON = new RemoveSubscriptionInfoTest(); public Object createObject() throws Exception { RemoveSubscriptionInfo info = new RemoveSubscriptionInfo(); populateObject(info); return info; } protected void populateObject(Object object) throws Exception { super.populateObject(object); RemoveSubscriptionInfo info = (RemoveSubscriptionInfo) object; info.setConnectionId(createConnectionId("ConnectionId:1")); info.setSubscriptionName("SubcriptionName:2"); info.setClientId("ClientId:3"); } }
ryanemerson/activemq-artemis
tests/activemq5-unit-tests/src/test/java/org/apache/activemq/openwire/v4/RemoveSubscriptionInfoTest.java
Java
apache-2.0
2,049
/* * Copyright 2002-2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cache.annotation; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** * {@code @CacheConfig} provides a mechanism for sharing common cache-related * settings at the class level. * * <p>When this annotation is present on a given class, it provides a set * of default settings for any cache operation defined in that class. * * @author Stephane Nicoll * @author Sam Brannen * @since 4.1 */ @Target(ElementType.TYPE) @Retention(RetentionPolicy.RUNTIME) @Documented public @interface CacheConfig { /** * Names of the default caches to consider for caching operations defined * in the annotated class. * <p>If none is set at the operation level, these are used instead of the default. * <p>May be used to determine the target cache (or caches), matching the * qualifier value or the bean names of a specific bean definition. */ String[] cacheNames() default {}; /** * The bean name of the default {@link org.springframework.cache.interceptor.KeyGenerator} to * use for the class. * <p>If none is set at the operation level, this one is used instead of the default. * <p>The key generator is mutually exclusive with the use of a custom key. When such key is * defined for the operation, the value of this key generator is ignored. */ String keyGenerator() default ""; /** * The bean name of the custom {@link org.springframework.cache.CacheManager} to use to * create a default {@link org.springframework.cache.interceptor.CacheResolver} if none * is set already. * <p>If no resolver and no cache manager are set at the operation level, and no cache * resolver is set via {@link #cacheResolver}, this one is used instead of the default. * @see org.springframework.cache.interceptor.SimpleCacheResolver */ String cacheManager() default ""; /** * The bean name of the custom {@link org.springframework.cache.interceptor.CacheResolver} to use. * <p>If no resolver and no cache manager are set at the operation level, this one is used * instead of the default. */ String cacheResolver() default ""; }
shivpun/spring-framework
spring-context/src/main/java/org/springframework/cache/annotation/CacheConfig.java
Java
apache-2.0
2,865
/* * Copyright 2014 Avanza Bank AB * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.avanza.astrix.beans.ft; import com.avanza.astrix.beans.core.AstrixBeanKey; import com.avanza.astrix.beans.core.BeanProxy; import com.avanza.astrix.beans.service.ServiceBeanProxyFactory; /** * * @author Elias Lindholm * */ final class FaultToleranceServiceBeanProxyFactory implements ServiceBeanProxyFactory { private final BeanFaultToleranceFactory ftFactory; public FaultToleranceServiceBeanProxyFactory(BeanFaultToleranceFactory ftFactory) { this.ftFactory = ftFactory; } @Override public BeanProxy create(AstrixBeanKey<?> beanKey) { return ftFactory.createFaultToleranceProxy(beanKey); } @Override public int order() { return 1; } }
jensim/astrix
astrix-context/src/main/java/com/avanza/astrix/beans/ft/FaultToleranceServiceBeanProxyFactory.java
Java
apache-2.0
1,277
// Copyright (C) 2017 Google Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package f32_test import ( "testing" "github.com/google/gapid/core/assert" "github.com/google/gapid/core/math/f32" ) func TestV4DSqrMagnitude(t *testing.T) { assert := assert.To(t) for _, test := range []struct { v f32.Vec4 r float32 }{ {f32.Vec4{0, 0, 0, 0}, 0}, {f32.Vec4{1, 0, 0, 0}, 1}, {f32.Vec4{0, 2, 0, 0}, 4}, {f32.Vec4{0, 0, -3, 0}, 9}, {f32.Vec4{0, 0, 0, -4}, 16}, {f32.Vec4{1, 1, 1, 1}, 4}, } { assert.For("%v.SqrMagnitude", test.v).That(test.v.SqrMagnitude()).Equals(test.r) } } func TestV4DMagnitude(t *testing.T) { assert := assert.To(t) for _, test := range []struct { v f32.Vec4 r float32 }{ {f32.Vec4{0, 0, 0, 0}, 0}, {f32.Vec4{1, 0, 0, 0}, 1}, {f32.Vec4{0, 2, 0, 0}, 2}, {f32.Vec4{0, 0, -3, 0}, 3}, {f32.Vec4{0, 0, 0, -4}, 4}, {f32.Vec4{1, 1, 1, 1}, 2}, } { assert.For("%v.Magnitude", test.v).That(test.v.Magnitude()).Equals(test.r) } } func TestV4DScale(t *testing.T) { assert := assert.To(t) for _, test := range []struct { v f32.Vec4 s float32 r f32.Vec4 }{ {f32.Vec4{1, 0, 0, 0}, -1, f32.Vec4{-1, 0, 0, 0}}, {f32.Vec4{0, 2, 0, 0}, -2, f32.Vec4{0, -4, 0, 0}}, {f32.Vec4{0, 0, 3, 0}, -3, f32.Vec4{0, 0, -9, 0}}, {f32.Vec4{0, 0, 0, 4}, -4, f32.Vec4{0, 0, 0, -16}}, {f32.Vec4{1, 1, 1, 1}, 0, f32.Vec4{0, 0, 0, 0}}, } { assert.For("%v.Scale", test.v).That(test.v.Scale(test.s)).Equals(test.r) } } func TestV4DNormalize(t *testing.T) { assert := assert.To(t) for _, test := range []struct { v f32.Vec4 r f32.Vec4 }{ {f32.Vec4{1, 0, 0, 0}, f32.Vec4{1, 0, 0, 0}}, {f32.Vec4{0, -2, 0, 0}, f32.Vec4{0, -1, 0, 0}}, {f32.Vec4{0, 0, 3, 0}, f32.Vec4{0, 0, 1, 0}}, {f32.Vec4{0, 0, 0, -4}, f32.Vec4{0, 0, 0, -1}}, {f32.Vec4{1, 2, -2, 4}, f32.Vec4{1. / 5, 2. / 5, -2. / 5, 4. / 5}}, } { assert.For("%v.Normalize", test.v).That(test.v.Normalize()).Equals(test.r) } } func TestV4DXYZ(t *testing.T) { assert := assert.To(t) for _, test := range []struct { v f32.Vec4 r f32.Vec3 }{ {f32.Vec4{0, 0, 0, 0}, f32.Vec3{0, 0, 0}}, {f32.Vec4{1, 2, 3, 4}, f32.Vec3{1, 2, 3}}, } { assert.For("%v.V3D", test.v).That(test.v.XYZ()).Equals(test.r) } } func TestAdd4D(t *testing.T) { assert := assert.To(t) for _, test := range []struct { a f32.Vec4 b f32.Vec4 r f32.Vec4 }{ {f32.Vec4{0, 0, 0, 0}, f32.Vec4{0, 0, 0, 0}, f32.Vec4{0, 0, 0, 0}}, {f32.Vec4{1, 2, 3, 4}, f32.Vec4{0, 0, 0, 0}, f32.Vec4{1, 2, 3, 4}}, {f32.Vec4{0, 0, 0, 0}, f32.Vec4{4, 3, 2, 1}, f32.Vec4{4, 3, 2, 1}}, {f32.Vec4{1, 2, 3, 4}, f32.Vec4{-1, -2, -3, -4}, f32.Vec4{0, 0, 0, 0}}, } { assert.For("Add4D(%v, %v)", test.a, test.b). That(f32.Add4D(test.a, test.b)).Equals(test.r) } } func TestSub4D(t *testing.T) { assert := assert.To(t) for _, test := range []struct { a f32.Vec4 b f32.Vec4 r f32.Vec4 }{ {f32.Vec4{0, 0, 0, 0}, f32.Vec4{0, 0, 0, 0}, f32.Vec4{0, 0, 0, 0}}, {f32.Vec4{1, 2, 3, 4}, f32.Vec4{0, 0, 0, 0}, f32.Vec4{1, 2, 3, 4}}, {f32.Vec4{0, 0, 0, 0}, f32.Vec4{4, 3, 2, 1}, f32.Vec4{-4, -3, -2, -1}}, {f32.Vec4{1, 2, 3, 4}, f32.Vec4{-1, -2, -3, -4}, f32.Vec4{2, 4, 6, 8}}, } { assert.For("Sub4D(%v, %v)", test.a, test.b). That(f32.Sub4D(test.a, test.b)).Equals(test.r) } }
Qining/gapid
core/math/f32/vec4_test.go
GO
apache-2.0
3,798
using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; // General Information about an assembly is controlled through the following // set of attributes. Change these attribute values to modify the information // associated with an assembly. [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] // Setting ComVisible to false makes the types in this assembly not visible // to COM components. If you need to access a type in this assembly from // COM, set the ComVisible attribute to true on that type. [assembly: ComVisible(false)] // The following GUID is for the ID of the typelib if this project is exposed to COM [assembly: Guid("535a68d1-d545-4e49-95e2-a5db29b76f20")] // Version information for an assembly consists of the following four values: // // Major Version // Minor Version // Build Number // Revision // // You can specify all the values or you can default the Build and Revision Numbers // by using the '*' as shown below: // [assembly: AssemblyVersion("1.0.*")]
halcwb/FSharp.Formatting
src/CSharpFormat/Properties/LocalAssemblyInfo.cs
C#
apache-2.0
1,144
/* * Copyright (C) 2010 Toni Menzel * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.ops4j.pax.exam.container.remote; import static org.ops4j.pax.exam.OptionUtils.filter; import org.ops4j.pax.exam.Option; import org.ops4j.pax.exam.container.remote.options.RBCLookupTimeoutOption; import org.ops4j.pax.exam.container.remote.options.RBCPortOption; /** * Minimal parser for the rbcremote fragment. */ public class Parser { private String host; private Integer port; private long timeout; public Parser(Option[] options) { extractArguments(filter(RBCPortOption.class, options)); extractArguments(filter(RBCLookupTimeoutOption.class, options)); assert port != null : "Port should never be null."; assert host != null : "Host should never be null."; } private void extractArguments(RBCLookupTimeoutOption[] options) { for (RBCLookupTimeoutOption op : options) { timeout = op.getTimeout(); } } private void extractArguments(RBCPortOption[] rbcPortOptions) { for (RBCPortOption op : rbcPortOptions) { host = op.getHost(); port = op.getPort(); } } public String getHost() { return host; } public Integer getRMIPort() { return port; } public long getRMILookupTimpout() { return timeout; } public Integer getPort() { return port; } }
ops4j/org.ops4j.pax.exam2
containers/pax-exam-container-remote/src/main/java/org/ops4j/pax/exam/container/remote/Parser.java
Java
apache-2.0
1,966
/* * Copyright (c) 2012, salesforce.com, inc. * All rights reserved. * Redistribution and use of this software in source and binary forms, with or * without modification, are permitted provided that the following conditions * are met: * - Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * - Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * - Neither the name of salesforce.com, inc. nor the names of its contributors * may be used to endorse or promote products derived from this software without * specific prior written permission of salesforce.com, inc. * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ package com.salesforce.androidsdk.smartstore.store; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import com.salesforce.androidsdk.smartstore.store.SmartStore.Type; /** * Simple class to represent index spec */ public class IndexSpec { public final String path; public final Type type; public final String columnName; public IndexSpec(String path, Type type) { this.path = path; this.type = type; this.columnName = null; // undefined } public IndexSpec(String path, Type type, String columnName) { this.path = path; this.type = type; this.columnName = columnName; } @Override public int hashCode() { int result = 17; result = 31 * result + path.hashCode(); result = 31 * result + type.hashCode(); if (columnName != null) result = 31 * result + columnName.hashCode(); return result; } @Override public boolean equals(Object obj) { if (obj == null) return false; if (obj == this) return true; if (!(obj instanceof IndexSpec)) return false; IndexSpec rhs = (IndexSpec) obj; boolean result = true; result = result && path.equals(rhs.path); result = result && type.equals(rhs.type); if (columnName == null) result = result && (columnName == rhs.columnName); else result = result && columnName.equals(rhs.columnName); return result; } /** * @return path | type */ public String getPathType() { return path + "|" + type; } /** * @return JSONObject for this IndexSpec * @throws JSONException */ public JSONObject toJSON() throws JSONException { JSONObject json = new JSONObject(); json.put("path", path); json.put("type", type); json.put("columnName", columnName); return json; } /** * @param indexSpecs * @return JSONArray for the array of IndexSpec's * @throws JSONException */ public static JSONArray toJSON(IndexSpec[] indexSpecs) throws JSONException { JSONArray json = new JSONArray(); for(IndexSpec indexSpec : indexSpecs) { json.put(indexSpec.toJSON()); } return json; } /** * @param jsonArray * @return IndexSpec[] from a JSONArray * @throws JSONException */ public static IndexSpec[] fromJSON(JSONArray jsonArray) throws JSONException { List<IndexSpec> list = new ArrayList<IndexSpec>(); for(int i=0; i<jsonArray.length(); i++) { list.add(IndexSpec.fromJSON(jsonArray.getJSONObject(i))); } return list.toArray(new IndexSpec[0]); } /** * Return IndexSpec given JSONObject * @param json * @return * @throws JSONException */ public static IndexSpec fromJSON(JSONObject json) throws JSONException { return new IndexSpec(json.getString("path"), Type.valueOf(json.getString("type")), json.optString("columnName")); } /** * @param indexSpecs * @return map index spec path to index spec */ public static Map<String, IndexSpec> mapForIndexSpecs(IndexSpec[] indexSpecs) { Map<String, IndexSpec> map = new HashMap<String, IndexSpec>(); for (IndexSpec indexSpec : indexSpecs) { map.put(indexSpec.path, indexSpec); } return map; } /** * @param indexSpecs * @return true if at least one of the indexSpec is of type full_text */ public static boolean hasFTS(IndexSpec[] indexSpecs) { for (IndexSpec indexSpec : indexSpecs) { if (indexSpec.type == Type.full_text) { return true; } } return false; } }
huminzhi/SalesforceMobileSDK-Android
libs/SmartStore/src/com/salesforce/androidsdk/smartstore/store/IndexSpec.java
Java
apache-2.0
5,296
/* * author: the5fire * blog: the5fire.com * date: 2014-03-16 * */ $(function(){ WEB_SOCKET_SWF_LOCATION = "/static/WebSocketMain.swf"; WEB_SOCKET_DEBUG = true; var socket = io.connect(); socket.on('connect', function(){ console.log('connected'); }); $(window).bind("beforeunload", function() { socket.disconnect(); }); var User = Backbone.Model.extend({ urlRoot: '/user', }); var Topic = Backbone.Model.extend({ urlRoot: '/topic', }); var Message = Backbone.Model.extend({ urlRoot: '/message', sync: function(method, model, options) { if (method === 'create') { socket.emit('message', model.attributes); // 错误处理没做 $('#comment').val(''); } else { return Backbone.sync(method, model, options); }; }, }); var Topics = Backbone.Collection.extend({ url: '/topic', model: Topic, }); var Messages = Backbone.Collection.extend({ url: '/message', model: Message, }); var topics = new Topics; var TopicView = Backbone.View.extend({ tagName: "div class='column'", templ: _.template($('#topic-template').html()), // 渲染列表页模板 render: function() { $(this.el).html(this.templ(this.model.toJSON())); return this; }, }); var messages = new Messages; var MessageView = Backbone.View.extend({ tagName: "div class='comment'", templ: _.template($('#message-template').html()), // 渲染列表页模板 render: function() { $(this.el).html(this.templ(this.model.toJSON())); return this; }, }); var AppView = Backbone.View.extend({ el: "#main", topic_list: $("#topic_list"), topic_section: $("#topic_section"), message_section: $("#message_section"), message_list: $("#message_list"), message_head: $("#message_head"), events: { 'click .submit': 'saveMessage', 'click .submit_topic': 'saveTopic', 'keypress #comment': 'saveMessageEvent', }, initialize: function() { _.bindAll(this, 'addTopic', 'addMessage'); topics.bind('add', this.addTopic); // 定义消息列表池,每个topic有自己的message collection // 这样保证每个主题下得消息不冲突 this.message_pool = {}; this.socket = null; this.message_list_div = document.getElementById('message_list'); }, addTopic: function(topic) { var view = new TopicView({model: topic}); this.topic_list.append(view.render().el); }, addMessage: function(message) { var view = new MessageView({model: message}); this.message_list.append(view.render().el); self.message_list.scrollTop(self.message_list_div.scrollHeight); }, saveMessageEvent: function(evt) { if (evt.keyCode == 13) { this.saveMessage(evt); } }, saveMessage: function(evt) { var comment_box = $('#comment') var content = comment_box.val(); if (content == '') { alert('内容不能为空'); return false; } var topic_id = comment_box.attr('topic_id'); var message = new Message({ content: content, topic_id: topic_id, }); var messages = this.message_pool[topic_id]; message.save(); // 依赖上面对sync的重载 }, saveTopic: function(evt) { var topic_title = $('#topic_title'); if (topic_title.val() == '') { alert('主题不能为空!'); return false } var topic = new Topic({ title: topic_title.val(), }); self = this; topic.save(null, { success: function(model, response, options){ topics.add(response); topic_title.val(''); }, error: function(model, resp, options) { alert(resp.responseText); } }); }, showTopic: function(){ topics.fetch(); this.topic_section.show(); this.message_section.hide(); this.message_list.html(''); this.goOut() }, goOut: function(){ // 退出房间 socket.emit('go_out'); socket.removeAllListeners('message'); }, initMessage: function(topic_id) { var messages = new Messages; messages.bind('add', this.addMessage); this.message_pool[topic_id] = messages; }, showMessage: function(topic_id) { this.initMessage(topic_id); this.message_section.show(); this.topic_section.hide(); this.showMessageHead(topic_id); $('#comment').attr('topic_id', topic_id); var messages = this.message_pool[topic_id]; // 进入房间 socket.emit('topic', topic_id); // 监听message事件,添加对话到messages中 socket.on('message', function(response) { messages.add(response); }); messages.fetch({ data: {topic_id: topic_id}, success: function(resp) { self.message_list.scrollTop(self.message_list_div.scrollHeight) }, error: function(model, resp, options) { alert(resp.responseText); } }); }, showMessageHead: function(topic_id) { var topic = new Topic({id: topic_id}); self = this; topic.fetch({ success: function(resp, model, options){ self.message_head.html(model.title); }, error: function(model, resp, options) { alert(resp.responseText); } }); }, }); var LoginView = Backbone.View.extend({ el: "#login", wrapper: $('#wrapper'), events: { 'keypress #login_pwd': 'loginEvent', 'click .login_submit': 'login', 'keypress #reg_pwd_repeat': 'registeEvent', 'click .registe_submit': 'registe', }, hide: function() { this.wrapper.hide(); }, show: function() { this.wrapper.show(); }, loginEvent: function(evt) { if (evt.keyCode == 13) { this.login(evt); } }, login: function(evt){ var username_input = $('#login_username'); var pwd_input = $('#login_pwd'); var u = new User({ username: username_input.val(), password: pwd_input.val(), }); u.save(null, { url: '/login', success: function(model, resp, options){ g_user = resp; // 跳转到index appRouter.navigate('index', {trigger: true}); }, error: function(model, resp, options) { alert(resp.responseText); } }); }, registeEvent: function(evt) { if (evt.keyCode == 13) { this.registe(evt); } }, registe: function(evt){ var reg_username_input = $('#reg_username'); var reg_pwd_input = $('#reg_pwd'); var reg_pwd_repeat_input = $('#reg_pwd_repeat'); var u = new User({ username: reg_username_input.val(), password: reg_pwd_input.val(), password_repeat: reg_pwd_repeat_input.val(), }); u.save(null, { success: function(model, resp, options){ g_user = resp; // 跳转到index appRouter.navigate('index', {trigger: true}); }, error: function(model, resp, options) { alert(resp.responseText); } }); }, }); var UserView = Backbone.View.extend({ el: "#user_info", username: $('#username'), show: function(username) { this.username.html(username); this.$el.show(); }, }); var AppRouter = Backbone.Router.extend({ routes: { "login": "login", "index": "index", "topic/:id" : "topic", }, initialize: function(){ // 初始化项目, 显示首页 this.appView = new AppView(); this.loginView = new LoginView(); this.userView = new UserView(); this.indexFlag = false; }, login: function(){ this.loginView.show(); }, index: function(){ if (g_user && g_user.id != undefined) { this.appView.showTopic(); this.userView.show(g_user.username); this.loginView.hide(); this.indexFlag = true; // 标志已经到达主页了 } }, topic: function(topic_id) { if (g_user && g_user.id != undefined) { this.appView.showMessage(topic_id); this.userView.show(g_user.username); this.loginView.hide(); this.indexFlag = true; // 标志已经到达主页了 } }, }); var appRouter = new AppRouter(); var g_user = new User; g_user.fetch({ success: function(model, resp, options){ g_user = resp; Backbone.history.start({pustState: true}); if(g_user === null || g_user.id === undefined) { // 跳转到登录页面 appRouter.navigate('login', {trigger: true}); } else if (appRouter.indexFlag == false){ // 跳转到首页 appRouter.navigate('index', {trigger: true}); } }, error: function(model, resp, options) { alert(resp.responseText); } }); // 获取当前用户 });
yhbyun/wechat-1
src/static/js/chat.js
JavaScript
apache-2.0
10,736
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.registry.ui; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.openqa.selenium.Alert; import org.openqa.selenium.By; import org.openqa.selenium.JavascriptExecutor; import org.openqa.selenium.NoAlertPresentException; import org.openqa.selenium.NoSuchElementException; import org.openqa.selenium.WebDriver; import org.openqa.selenium.WebElement; import org.openqa.selenium.chrome.ChromeDriver; import org.openqa.selenium.interactions.Actions; import org.openqa.selenium.support.ui.ExpectedConditions; import org.openqa.selenium.support.ui.WebDriverWait; import io.github.bonigarcia.wdm.WebDriverManager; import java.util.List; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; public class ITCreateDuplicateBucket { private WebDriver driver; private String baseUrl; private boolean acceptNextAlert = true; private WebDriverWait wait; private StringBuffer verificationErrors = new StringBuffer(); @Before public void setUp() throws Exception { WebDriverManager.chromedriver().setup(); driver = new ChromeDriver(); baseUrl = "http://localhost:18080/nifi-registry"; wait = new WebDriverWait(driver, 30); } @Test public void testCreateDuplicateBucket() throws Exception { // go directly to settings by URL driver.get(baseUrl + "/#/administration/workflow"); // wait for administration route to load wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("[data-automation-id='no-buckets-message']"))); // confirm new bucket button exists wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("[data-automation-id='new-bucket-button']"))); // select new bucket button WebElement newBucketButton = driver.findElement(By.cssSelector("[data-automation-id='new-bucket-button']")); newBucketButton.click(); // wait for new bucket dialog wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("#nifi-registry-admin-create-bucket-dialog"))); // confirm bucket name field exists wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("#nifi-registry-admin-create-bucket-dialog input"))); // place cursor in bucket name field WebElement bucketNameInput = driver.findElement(By.cssSelector("#nifi-registry-admin-create-bucket-dialog input")); bucketNameInput.clear(); // name the bucket ABC bucketNameInput.sendKeys("ABC"); // confirm create bucket button exists wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("[data-automation-id='create-new-bucket-button']"))); // select create bucket button WebElement createNewBucketButton = driver.findElement(By.cssSelector("[data-automation-id='create-new-bucket-button']")); createNewBucketButton.click(); // wait for create bucket dialog to close wait.until(ExpectedConditions.invisibilityOfElementLocated(By.cssSelector("#nifi-registry-admin-create-bucket-dialog"))); // verify bucket added List<WebElement> bucketCount = driver.findElements(By.cssSelector("#nifi-registry-workflow-administration-buckets-list-container > div")); assertEquals(1, bucketCount.size()); // confirm new bucket button exists wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("[data-automation-id='new-bucket-button']"))); // select new bucket button newBucketButton = driver.findElement(By.cssSelector("[data-automation-id='new-bucket-button']")); newBucketButton.click(); // wait for new bucket dialog wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("#nifi-registry-admin-create-bucket-dialog"))); // confirm bucket name field exists wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("#nifi-registry-admin-create-bucket-dialog input"))); // place cursor in bucket name field bucketNameInput = wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("#nifi-registry-admin-create-bucket-dialog input"))); bucketNameInput.clear(); // name the bucket ABC again bucketNameInput.sendKeys("ABC"); // confirm create bucket button exists wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("[data-automation-id='create-new-bucket-button']"))); // select create bucket button createNewBucketButton = driver.findElement(By.cssSelector("[data-automation-id='create-new-bucket-button']")); createNewBucketButton.click(); // wait for the new bucket dialog to close wait.until(ExpectedConditions.invisibilityOfElementLocated(By.cssSelector("#nifi-registry-admin-create-bucket-dialog"))); // wait for error dialog wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("div.cdk-overlay-pane"))); // confirm the duplicate bucket error WebElement selectOKButton = wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("div.cdk-overlay-pane"))); Actions actions = new Actions(driver); actions.moveToElement(selectOKButton).click().build().perform(); // wait for the confirm dialog to close wait.until(ExpectedConditions.invisibilityOfElementLocated(By.cssSelector("div.cdk-overlay-pane"))); // verify bucket ABC still there bucketCount = driver.findElements(By.cssSelector("#nifi-registry-workflow-administration-buckets-list-container > div")); assertEquals(1, bucketCount.size()); } @After public void tearDown() throws Exception { // bucket cleanup // confirm all buckets checkbox exists wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("#nifi-registry-workflow-administration-buckets-list-container-column-header div.mat-checkbox-inner-container"))); // select all buckets checkbox WebElement selectAllCheckbox = driver.findElement(By.cssSelector("#nifi-registry-workflow-administration-buckets-list-container-column-header div.mat-checkbox-inner-container")); selectAllCheckbox.click(); // confirm actions drop down menu exists wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("#nifi-registry-workflow-administration-perspective-buckets-container button.mat-fds-primary"))); // select actions drop down WebElement selectActions = driver.findElement(By.cssSelector("#nifi-registry-workflow-administration-perspective-buckets-container button.mat-fds-primary")); selectActions.click(); // select delete WebElement selectDeleteBucket = driver.findElement(By.cssSelector("div.mat-menu-content button.mat-menu-item")); JavascriptExecutor executor = (JavascriptExecutor)driver; executor.executeScript("arguments[0].click();", selectDeleteBucket); // verify bucket deleted WebElement confirmDelete = wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("div.fds-dialog-actions button.mat-fds-warn"))); confirmDelete.click(); wait.until(ExpectedConditions.visibilityOfElementLocated(By.cssSelector("[data-automation-id='no-buckets-message']"))); driver.quit(); String verificationErrorString = verificationErrors.toString(); if (!"".equals(verificationErrorString)) { fail(verificationErrorString); } } private boolean isElementPresent(By by) { try { driver.findElement(by); return true; } catch (NoSuchElementException e) { return false; } } private boolean isAlertPresent() { try { driver.switchTo().alert(); return true; } catch (NoAlertPresentException e) { return false; } } private String closeAlertAndGetItsText() { try { Alert alert = driver.switchTo().alert(); String alertText = alert.getText(); if (acceptNextAlert) { alert.accept(); } else { alert.dismiss(); } return alertText; } finally { acceptNextAlert = true; } } }
MikeThomsen/nifi
nifi-registry/nifi-registry-core/nifi-registry-web-ui/src/test/java/org/apache/nifi/registry/ui/ITCreateDuplicateBucket.java
Java
apache-2.0
9,295
/* * Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wso2.siddhi.core.util.extension.holder; import org.wso2.siddhi.core.config.ExecutionPlanContext; import org.wso2.siddhi.core.query.processor.stream.window.WindowProcessor; public class WindowProcessorExtensionHolder extends AbstractExtensionHolder { private static WindowProcessorExtensionHolder instance; private WindowProcessorExtensionHolder(ExecutionPlanContext executionPlanContext) { super(WindowProcessor.class, executionPlanContext); } public static WindowProcessorExtensionHolder getInstance(ExecutionPlanContext executionPlanContext) { if (instance == null) { instance = new WindowProcessorExtensionHolder(executionPlanContext); } return instance; } }
keizer619/siddhi
modules/siddhi-core/src/main/java/org/wso2/siddhi/core/util/extension/holder/WindowProcessorExtensionHolder.java
Java
apache-2.0
1,383
package com.vaadin.tests.elements.abstracttextfield; import com.vaadin.server.VaadinRequest; import com.vaadin.tests.components.AbstractTestUI; import com.vaadin.ui.AbstractField; import com.vaadin.ui.AbstractMultiSelect; import com.vaadin.ui.AbstractSingleSelect; import com.vaadin.ui.CheckBox; import com.vaadin.ui.CheckBoxGroup; import com.vaadin.ui.ComboBox; import com.vaadin.ui.DateField; import com.vaadin.ui.ListSelect; import com.vaadin.ui.NativeSelect; import com.vaadin.ui.PasswordField; import com.vaadin.ui.RadioButtonGroup; import com.vaadin.ui.RichTextArea; import com.vaadin.ui.Slider; import com.vaadin.ui.TextArea; import com.vaadin.ui.TextField; import com.vaadin.ui.TwinColSelect; public class AbstractFieldElementSetValueReadOnly extends AbstractTestUI { private AbstractField<?>[] fields = { new TextArea(), new TextField(), new DateField(), new PasswordField(), new CheckBox(), new RichTextArea(), new Slider() }; private AbstractMultiSelect<?>[] multiSelects = { new ListSelect(), new CheckBoxGroup(), new TwinColSelect() }; private AbstractSingleSelect<?>[] singleSelects = { new ComboBox(), new NativeSelect(), new RadioButtonGroup() }; @Override protected void setup(VaadinRequest request) { for (AbstractField field : fields) { field.setReadOnly(true); addComponent(field); } for (AbstractMultiSelect multiSelect : multiSelects) { multiSelect.setReadOnly(true); addComponent(multiSelect); } for (AbstractSingleSelect singleSelect : singleSelects) { singleSelect.setReadOnly(true); addComponent(singleSelect); } } @Override protected String getTestDescription() { return "When vaadin element is set ReadOnly, setValue() method should raise an exception"; } @Override protected Integer getTicketNumber() { return 14068; } }
peterl1084/framework
uitest/src/main/java/com/vaadin/tests/elements/abstracttextfield/AbstractFieldElementSetValueReadOnly.java
Java
apache-2.0
1,989
/* * Copyright 2017-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.rules.query; import com.facebook.buck.query.CachingQueryEvaluator; import com.facebook.buck.query.QueryEvaluator; import com.facebook.buck.query.QueryException; import com.facebook.buck.query.QueryExpression; import com.facebook.buck.rules.TargetGraph; import com.google.common.annotations.VisibleForTesting; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; import java.util.Objects; import java.util.concurrent.ExecutionException; /** Cache that evaluates and stores the result of a dependency {@link Query}. */ public class QueryCache { private final LoadingCache<TargetGraph, CachingQueryEvaluator> evaluators; public QueryCache() { evaluators = CacheBuilder.newBuilder().build(CacheLoader.from(CachingQueryEvaluator::new)); } QueryEvaluator getQueryEvaluator(TargetGraph targetGraph) { try { return evaluators.get(targetGraph); } catch (ExecutionException e) { throw new RuntimeException("Failed to obtain query evaluator", e); } } @VisibleForTesting boolean isPresent(TargetGraph targetGraph, GraphEnhancementQueryEnvironment env, Query query) throws ExecutionException, QueryException { CachingQueryEvaluator evaluator = evaluators.getIfPresent(targetGraph); return Objects.nonNull(evaluator) && evaluator.isPresent(QueryExpression.parse(query.getQuery(), env)); } }
dsyang/buck
src/com/facebook/buck/rules/query/QueryCache.java
Java
apache-2.0
2,062
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.wso2.andes.server.management; import java.lang.reflect.InvocationHandler; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Proxy; import java.security.AccessControlContext; import java.security.AccessController; import java.util.Set; import javax.management.Attribute; import javax.management.JMException; import javax.management.MBeanInfo; import javax.management.MBeanOperationInfo; import javax.management.MBeanServer; import javax.management.Notification; import javax.management.NotificationListener; import javax.management.ObjectName; import javax.management.remote.JMXConnectionNotification; import javax.management.remote.JMXPrincipal; import javax.management.remote.MBeanServerForwarder; import javax.security.auth.Subject; import org.apache.log4j.Logger; import org.wso2.andes.server.logging.actors.ManagementActor; import org.wso2.andes.server.logging.messages.ManagementConsoleMessages; import org.wso2.andes.server.registry.ApplicationRegistry; import org.wso2.andes.server.security.SecurityManager; import org.wso2.andes.server.security.access.Operation; /** * This class can be used by the JMXConnectorServer as an InvocationHandler for the mbean operations. It delegates * JMX access decisions to the SecurityPlugin. */ public class MBeanInvocationHandlerImpl implements InvocationHandler, NotificationListener { private static final Logger _logger = Logger.getLogger(MBeanInvocationHandlerImpl.class); private final static String DELEGATE = "JMImplementation:type=MBeanServerDelegate"; private MBeanServer _mbs; private static ManagementActor _logActor; public static MBeanServerForwarder newProxyInstance() { final InvocationHandler handler = new MBeanInvocationHandlerImpl(); final Class<?>[] interfaces = new Class[] { MBeanServerForwarder.class }; _logActor = new ManagementActor(ApplicationRegistry.getInstance().getRootMessageLogger()); Object proxy = Proxy.newProxyInstance(MBeanServerForwarder.class.getClassLoader(), interfaces, handler); return MBeanServerForwarder.class.cast(proxy); } public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { final String methodName = getMethodName(method, args); if (methodName.equals("getMBeanServer")) { return _mbs; } if (methodName.equals("setMBeanServer")) { if (args[0] == null) { throw new IllegalArgumentException("Null MBeanServer"); } if (_mbs != null) { throw new IllegalArgumentException("MBeanServer object already initialized"); } _mbs = (MBeanServer) args[0]; return null; } // Retrieve Subject from current AccessControlContext AccessControlContext acc = AccessController.getContext(); Subject subject = Subject.getSubject(acc); try { // Allow operations performed locally on behalf of the connector server itself if (subject == null) { return method.invoke(_mbs, args); } if (args == null || DELEGATE.equals(args[0])) { return method.invoke(_mbs, args); } // Restrict access to "createMBean" and "unregisterMBean" to any user if (methodName.equals("createMBean") || methodName.equals("unregisterMBean")) { _logger.debug("User trying to create or unregister an MBean"); throw new SecurityException("Access denied: " + methodName); } // Allow querying available object names if (methodName.equals("queryNames")) { return method.invoke(_mbs, args); } // Retrieve JMXPrincipal from Subject Set<JMXPrincipal> principals = subject.getPrincipals(JMXPrincipal.class); if (principals == null || principals.isEmpty()) { throw new SecurityException("Access denied: no JMX principal"); } // Save the subject SecurityManager.setThreadSubject(subject); // Get the component, type and impact, which may be null String type = getType(method, args); String vhost = getVirtualHost(method, args); int impact = getImpact(method, args); // Get the security manager for the virtual host (if set) SecurityManager security; if (vhost == null) { security = ApplicationRegistry.getInstance().getSecurityManager(); } else { security = ApplicationRegistry.getInstance().getVirtualHostRegistry().getVirtualHost(vhost).getSecurityManager(); } if (isAccessMethod(methodName) || impact == MBeanOperationInfo.INFO) { // Check for read-only method invocation permission if (!security.authoriseMethod(Operation.ACCESS, type, methodName)) { throw new SecurityException("Permission denied: Access " + methodName); } } else if (isUpdateMethod(methodName)) { // Check for setting properties permission if (!security.authoriseMethod(Operation.UPDATE, type, methodName)) { throw new SecurityException("Permission denied: Update " + methodName); } } else { // Check for invoking/executing method action/operation permission if (!security.authoriseMethod(Operation.EXECUTE, type, methodName)) { throw new SecurityException("Permission denied: Execute " + methodName); } } // Actually invoke the method return method.invoke(_mbs, args); } catch (InvocationTargetException e) { throw e.getTargetException(); } } private String getType(Method method, Object[] args) { if (args[0] instanceof ObjectName) { ObjectName object = (ObjectName) args[0]; String type = object.getKeyProperty("type"); return type; } return null; } private String getVirtualHost(Method method, Object[] args) { if (args[0] instanceof ObjectName) { ObjectName object = (ObjectName) args[0]; String vhost = object.getKeyProperty("VirtualHost"); if(vhost != null) { try { //if the name is quoted in the ObjectName, unquote it vhost = ObjectName.unquote(vhost); } catch(IllegalArgumentException e) { //ignore, this just means the name is not quoted //and can be left unchanged } } return vhost; } return null; } private String getMethodName(Method method, Object[] args) { String methodName = method.getName(); // if arguments are set, try and work out real method name if (args != null && args.length >= 1 && args[0] instanceof ObjectName) { if (methodName.equals("getAttribute")) { methodName = "get" + (String) args[1]; } else if (methodName.equals("setAttribute")) { methodName = "set" + ((Attribute) args[1]).getName(); } else if (methodName.equals("invoke")) { methodName = (String) args[1]; } } return methodName; } private int getImpact(Method method, Object[] args) { //handle invocation of other methods on mbeans if ((args[0] instanceof ObjectName) && (method.getName().equals("invoke"))) { //get invoked method name String mbeanMethod = (args.length > 1) ? (String) args[1] : null; if (mbeanMethod == null) { return -1; } try { //Get the impact attribute MBeanInfo mbeanInfo = _mbs.getMBeanInfo((ObjectName) args[0]); if (mbeanInfo != null) { MBeanOperationInfo[] opInfos = mbeanInfo.getOperations(); for (MBeanOperationInfo opInfo : opInfos) { if (opInfo.getName().equals(mbeanMethod)) { return opInfo.getImpact(); } } } } catch (JMException ex) { _logger.error("Unable to determine mbean impact for method : " + mbeanMethod, ex); } } return -1; } private boolean isAccessMethod(String methodName) { //handle standard get/query/is methods from MBeanServer return (methodName.startsWith("query") || methodName.startsWith("get") || methodName.startsWith("is")); } private boolean isUpdateMethod(String methodName) { //handle standard set methods from MBeanServer return methodName.startsWith("set"); } public void handleNotification(Notification notification, Object handback) { assert notification instanceof JMXConnectionNotification; // only RMI Connections are serviced here, Local API atta // rmi://169.24.29.116 guest 3 String[] connectionData = ((JMXConnectionNotification) notification).getConnectionId().split(" "); String user = connectionData[1]; if (notification.getType().equals(JMXConnectionNotification.OPENED)) { _logActor.message(ManagementConsoleMessages.OPEN(user)); } else if (notification.getType().equals(JMXConnectionNotification.CLOSED) || notification.getType().equals(JMXConnectionNotification.FAILED)) { _logActor.message(ManagementConsoleMessages.CLOSE()); } } }
akalankapagoda/andes
modules/andes-core/broker/src/main/java/org/wso2/andes/server/management/MBeanInvocationHandlerImpl.java
Java
apache-2.0
11,273
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # coding: utf-8 # pylint: disable=wildcard-import """Cauchy distribution""" __all__ = ['Cauchy'] from numbers import Number from numpy import nan, pi from .constraint import Real from .distribution import Distribution from .utils import sample_n_shape_converter from .... import np class Cauchy(Distribution): r"""Create a relaxed Cauchy distribution object. Parameters ---------- loc : Tensor or scalar, default 0 mode or median of the distribution scale : Tensor or scalar, default 1 half width at half maximum """ # pylint: disable=abstract-method has_grad = True support = Real() arg_constraints = {'loc': Real(), 'scale': Real()} def __init__(self, loc=0.0, scale=1.0, validate_args=None): self.loc = loc self.scale = scale super(Cauchy, self).__init__( event_dim=0, validate_args=validate_args) @property def mean(self): return nan @property def variance(self): return nan def sample(self, size=None): # TODO: Implement sampling op in the backend. # `np.zeros_like` does not support scalar at this moment. if (isinstance(self.loc, Number), isinstance(self.scale, Number)) == (True, True): u = np.random.uniform(size=size) else: u = np.random.uniform(np.zeros_like( # pylint: disable=too-many-function-args self.loc + self.scale), size=size) return self.icdf(u) def sample_n(self, size=None): return self.sample(sample_n_shape_converter(size)) def log_prob(self, value): if self._validate_args: self._validate_samples(value) return (-np.log(pi) - np.log(self.scale) - np.log(1 + ((value - self.loc) / self.scale) ** 2)) def cdf(self, value): if self._validate_args: self._validate_samples(value) return np.arctan((value - self.loc) / self.scale) / pi + 0.5 def icdf(self, value): return np.tan(pi * (value - 0.5)) * self.scale + self.loc def entropy(self): return np.log(4 * pi) + np.log(self.scale)
szha/mxnet
python/mxnet/gluon/probability/distributions/cauchy.py
Python
apache-2.0
2,935
package org.batfish.datamodel.vendor_family.juniper; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.annotations.VisibleForTesting; import java.io.Serializable; import java.util.Collections; import java.util.SortedMap; import java.util.TreeMap; import org.batfish.datamodel.AaaAuthenticationLoginList; import org.batfish.datamodel.AuthenticationMethod; import org.batfish.datamodel.Line; public class JuniperFamily implements Serializable { private static final String PROP_LINES = "lines"; private static final String PROP_ROOT_AUTHENTICATION_ENCRYPTED_PASSWORD = "rootAuthenticationEncryptedPassword"; private static final String PROP_SYSTEM_AUTHENTICATION_ORDER = "systemAuthenticationOrder"; private static final String PROP_TACPLUS_SERVERS = "tacplusServers"; @VisibleForTesting public static final String CONSOLE_LINE_NAME = "console"; @VisibleForTesting public static final String AUXILIARY_LINE_NAME = "auxiliary"; private SortedMap<String, Line> _lines; private String _rootAuthenticationEncryptedPassword; private AaaAuthenticationLoginList _systemAuthenticationOrder; private SortedMap<String, TacplusServer> _tacplusServers; public JuniperFamily() { _lines = new TreeMap<>(); _tacplusServers = new TreeMap<>(); _systemAuthenticationOrder = // default authentication order is just password authentication new AaaAuthenticationLoginList( Collections.singletonList(AuthenticationMethod.PASSWORD), true); // Juniper has by default the console and aux lines enabled Line console = new Line(CONSOLE_LINE_NAME); console.setAaaAuthenticationLoginList(_systemAuthenticationOrder); _lines.put(CONSOLE_LINE_NAME, console); Line aux = new Line(AUXILIARY_LINE_NAME); aux.setAaaAuthenticationLoginList(_systemAuthenticationOrder); _lines.put(AUXILIARY_LINE_NAME, aux); } @JsonProperty(PROP_LINES) public SortedMap<String, Line> getLines() { return _lines; } @JsonProperty(PROP_ROOT_AUTHENTICATION_ENCRYPTED_PASSWORD) public String getRootAuthenticationEncryptedPassword() { return _rootAuthenticationEncryptedPassword; } @JsonProperty(PROP_SYSTEM_AUTHENTICATION_ORDER) public AaaAuthenticationLoginList getSystemAuthenticationOrder() { return _systemAuthenticationOrder; } @JsonProperty(PROP_TACPLUS_SERVERS) public SortedMap<String, TacplusServer> getTacplusServers() { return _tacplusServers; } @JsonProperty(PROP_LINES) public void setLines(SortedMap<String, Line> lines) { _lines = lines; } @JsonProperty(PROP_ROOT_AUTHENTICATION_ENCRYPTED_PASSWORD) public void setRootAuthenticationEncryptedPassword(String rootAuthenticationEncryptedPassword) { _rootAuthenticationEncryptedPassword = rootAuthenticationEncryptedPassword; } @JsonProperty(PROP_SYSTEM_AUTHENTICATION_ORDER) public void setSystemAuthenticationOrder(AaaAuthenticationLoginList authenticationOrder) { _systemAuthenticationOrder = authenticationOrder; } @JsonProperty(PROP_TACPLUS_SERVERS) public void setTacplusServers(SortedMap<String, TacplusServer> tacplusServers) { _tacplusServers = tacplusServers; } }
batfish/batfish
projects/batfish-common-protocol/src/main/java/org/batfish/datamodel/vendor_family/juniper/JuniperFamily.java
Java
apache-2.0
3,192
package client import ( "runtime" "text/template" "time" "github.com/docker/docker/api" "github.com/docker/docker/api/types" Cli "github.com/docker/docker/cli" "github.com/docker/docker/dockerversion" flag "github.com/docker/docker/pkg/mflag" "github.com/docker/docker/utils" ) var versionTemplate = `Client: Version: {{.Client.Version}} API version: {{.Client.APIVersion}} Go version: {{.Client.GoVersion}} Git commit: {{.Client.GitCommit}} Built: {{.Client.BuildTime}} OS/Arch: {{.Client.Os}}/{{.Client.Arch}}{{if .Client.Experimental}} Experimental: {{.Client.Experimental}}{{end}}{{if .ServerOK}} Server: Version: {{.Server.Version}} API version: {{.Server.APIVersion}} Go version: {{.Server.GoVersion}} Git commit: {{.Server.GitCommit}} Built: {{.Server.BuildTime}} OS/Arch: {{.Server.Os}}/{{.Server.Arch}}{{if .Server.Experimental}} Experimental: {{.Server.Experimental}}{{end}}{{end}}` // CmdVersion shows Docker version information. // // Available version information is shown for: client Docker version, client API version, client Go version, client Git commit, client OS/Arch, server Docker version, server API version, server Go version, server Git commit, and server OS/Arch. // // Usage: docker version func (cli *DockerCli) CmdVersion(args ...string) (err error) { cmd := Cli.Subcmd("version", nil, Cli.DockerCommands["version"].Description, true) tmplStr := cmd.String([]string{"f", "#format", "-format"}, "", "Format the output using the given go template") cmd.Require(flag.Exact, 0) cmd.ParseFlags(args, true) templateFormat := versionTemplate if *tmplStr != "" { templateFormat = *tmplStr } var tmpl *template.Template if tmpl, err = template.New("").Funcs(funcMap).Parse(templateFormat); err != nil { return Cli.StatusError{StatusCode: 64, Status: "Template parsing error: " + err.Error()} } vd := types.VersionResponse{ Client: &types.Version{ Version: dockerversion.Version, APIVersion: api.Version, GoVersion: runtime.Version(), GitCommit: dockerversion.GitCommit, BuildTime: dockerversion.BuildTime, Os: runtime.GOOS, Arch: runtime.GOARCH, Experimental: utils.ExperimentalBuild(), }, } serverVersion, err := cli.client.ServerVersion() if err == nil { vd.Server = &serverVersion } // first we need to make BuildTime more human friendly t, errTime := time.Parse(time.RFC3339Nano, vd.Client.BuildTime) if errTime == nil { vd.Client.BuildTime = t.Format(time.ANSIC) } if vd.ServerOK() { t, errTime = time.Parse(time.RFC3339Nano, vd.Server.BuildTime) if errTime == nil { vd.Server.BuildTime = t.Format(time.ANSIC) } } if err2 := tmpl.Execute(cli.out, vd); err2 != nil && err == nil { err = err2 } cli.out.Write([]byte{'\n'}) return err }
mauidev/docker
api/client/version.go
GO
apache-2.0
2,847
/* */ "format cjs"; /** * Determines whether two sequences are equal by comparing the elements pairwise using a specified equality comparer. * * @example * var res = res = source.sequenceEqual([1,2,3]); * var res = res = source.sequenceEqual([{ value: 42 }], function (x, y) { return x.value === y.value; }); * 3 - res = source.sequenceEqual(Rx.Observable.returnValue(42)); * 4 - res = source.sequenceEqual(Rx.Observable.returnValue({ value: 42 }), function (x, y) { return x.value === y.value; }); * @param {Observable} second Second observable sequence or array to compare. * @param {Function} [comparer] Comparer used to compare elements of both sequences. * @returns {Observable} An observable sequence that contains a single element which indicates whether both sequences are of equal length and their corresponding elements are equal according to the specified equality comparer. */ observableProto.sequenceEqual = function (second, comparer) { var first = this; comparer || (comparer = defaultComparer); return new AnonymousObservable(function (o) { var donel = false, doner = false, ql = [], qr = []; var subscription1 = first.subscribe(function (x) { var equal, v; if (qr.length > 0) { v = qr.shift(); try { equal = comparer(v, x); } catch (e) { o.onError(e); return; } if (!equal) { o.onNext(false); o.onCompleted(); } } else if (doner) { o.onNext(false); o.onCompleted(); } else { ql.push(x); } }, function(e) { o.onError(e); }, function () { donel = true; if (ql.length === 0) { if (qr.length > 0) { o.onNext(false); o.onCompleted(); } else if (doner) { o.onNext(true); o.onCompleted(); } } }); (isArrayLike(second) || isIterable(second)) && (second = observableFrom(second)); isPromise(second) && (second = observableFromPromise(second)); var subscription2 = second.subscribe(function (x) { var equal; if (ql.length > 0) { var v = ql.shift(); try { equal = comparer(v, x); } catch (exception) { o.onError(exception); return; } if (!equal) { o.onNext(false); o.onCompleted(); } } else if (donel) { o.onNext(false); o.onCompleted(); } else { qr.push(x); } }, function(e) { o.onError(e); }, function () { doner = true; if (qr.length === 0) { if (ql.length > 0) { o.onNext(false); o.onCompleted(); } else if (donel) { o.onNext(true); o.onCompleted(); } } }); return new CompositeDisposable(subscription1, subscription2); }, first); };
cfraz89/moonrock-js-starter
jspm_packages/npm/rx@2.5.3/src/core/linq/observable/sequenceequal.js
JavaScript
apache-2.0
3,046
/* * JBoss, Home of Professional Open Source * Copyright 2010 Red Hat Inc. and/or its affiliates and other * contributors as indicated by the @author tags. All rights reserved. * See the copyright.txt in the distribution for a full listing of * individual contributors. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ /** * Entries which are stored in data containers. This package contains different implementations of * entries based on the information needed to store an entry. Certain entries need more information - such as timestamps * and lifespans, if they are used - than others, and the appropriate implementation is selected dynamically. This * helps minimize Infinispan's memory requirements without storing unnecessary metadata. */ package org.infinispan.container.entries;
nmldiegues/stibt
infinispan/core/src/main/java/org/infinispan/container/entries/package-info.java
Java
apache-2.0
1,532
from django.conf.urls import url from admin.nodes import views app_name = 'admin' urlpatterns = [ url(r'^$', views.NodeFormView.as_view(), name='search'), url(r'^flagged_spam$', views.NodeFlaggedSpamList.as_view(), name='flagged-spam'), url(r'^known_spam$', views.NodeKnownSpamList.as_view(), name='known-spam'), url(r'^known_ham$', views.NodeKnownHamList.as_view(), name='known-ham'), url(r'^(?P<guid>[a-z0-9]+)/$', views.NodeView.as_view(), name='node'), url(r'^(?P<guid>[a-z0-9]+)/logs/$', views.AdminNodeLogView.as_view(), name='node-logs'), url(r'^registration_list/$', views.RegistrationListView.as_view(), name='registrations'), url(r'^stuck_registration_list/$', views.StuckRegistrationListView.as_view(), name='stuck-registrations'), url(r'^(?P<guid>[a-z0-9]+)/update_embargo/$', views.RegistrationUpdateEmbargoView.as_view(), name='update_embargo'), url(r'^(?P<guid>[a-z0-9]+)/remove/$', views.NodeDeleteView.as_view(), name='remove'), url(r'^(?P<guid>[a-z0-9]+)/restore/$', views.NodeDeleteView.as_view(), name='restore'), url(r'^(?P<guid>[a-z0-9]+)/confirm_spam/$', views.NodeConfirmSpamView.as_view(), name='confirm-spam'), url(r'^(?P<guid>[a-z0-9]+)/confirm_ham/$', views.NodeConfirmHamView.as_view(), name='confirm-ham'), url(r'^(?P<guid>[a-z0-9]+)/reindex_share_node/$', views.NodeReindexShare.as_view(), name='reindex-share-node'), url(r'^(?P<guid>[a-z0-9]+)/reindex_elastic_node/$', views.NodeReindexElastic.as_view(), name='reindex-elastic-node'), url(r'^(?P<guid>[a-z0-9]+)/restart_stuck_registrations/$', views.RestartStuckRegistrationsView.as_view(), name='restart-stuck-registrations'), url(r'^(?P<guid>[a-z0-9]+)/remove_stuck_registrations/$', views.RemoveStuckRegistrationsView.as_view(), name='remove-stuck-registrations'), url(r'^(?P<guid>[a-z0-9]+)/remove_user/(?P<user_id>[a-z0-9]+)/$', views.NodeRemoveContributorView.as_view(), name='remove_user'), ]
pattisdr/osf.io
admin/nodes/urls.py
Python
apache-2.0
2,100
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.asterix.external.classad; import java.nio.charset.StandardCharsets; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.Date; import java.util.Random; import java.util.TimeZone; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.asterix.om.base.AMutableInt32; public class Util { // convert escapes in-place // the string can only shrink while converting escapes so we can safely convert in-place. private static final Pattern OCTAL = Pattern.compile("\\\\([0-3][0-7]{0,2})"); public static boolean convertEscapes(AMutableCharArrayString text) { boolean validStr = true; if (text.getLength() == 0) { return true; } int dest = 0; boolean hasOctal = false; for (int source = 0; source < text.getLength(); ++source) { char ch = text.charAt(source); // scan for escapes, a terminating slash cannot be an escape if (ch == '\\' && source < text.getLength() - 1) { ++source; // skip the \ character ch = text.charAt(source); // The escape part should be re-validated switch (ch) { case 'b': ch = '\b'; break; case 'f': ch = '\f'; break; case 'n': ch = '\n'; break; case 'r': ch = '\r'; break; case 't': ch = '\t'; break; case '\\': ch = '\\'; break; default: if (Lexer.isodigit(ch)) { hasOctal = true; ++dest; } break; } } if (dest == source) { // no need to assign ch to text when we haven't seen any escapes yet. // text[dest] = ch; ++dest; } else { try { text.erase(dest); text.setChar(dest, ch); ++dest; --source; } catch (Throwable th) { th.printStackTrace(); } } } if (dest < text.getLength()) { text.erase(dest); text.setLength(dest); } // silly, but to fulfull the original contract for this function // we need to remove the last character in the string if it is a '\0' // (earlier logic guaranteed that a '\0' can ONLY be the last character) if (text.getLength() > 0 && (text.charAt(text.getLength() - 1) == '\0')) { text.erase(text.getLength() - 1); } if (hasOctal) { Matcher m = OCTAL.matcher(text.toString()); StringBuffer out = new StringBuffer(); while (m.find()) { int octet = Integer.parseInt(m.group(1), 8); if (octet == 0 || octet > 255) { return false; } m.appendReplacement(out, String.valueOf((char) octet)); } m.appendTail(out); text.setValue(new String(out.toString().getBytes(StandardCharsets.ISO_8859_1), StandardCharsets.UTF_8)); } return validStr; } public static Random initialized = new Random((new Date()).getTime()); public static int getRandomInteger() { return initialized.nextInt(); } public static double getRandomReal() { return initialized.nextDouble(); } public static int timezoneOffset(ClassAdTime clock) { return clock.getOffset(); } public static void getLocalTime(ClassAdTime now, ClassAdTime localtm) { localtm.setValue(Calendar.getInstance(), now); localtm.isAbsolute(true); } public static void absTimeToString(ClassAdTime atime, AMutableCharArrayString buffer) { DateFormat formatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss"); //"yyyy-MM-dd'T'HH:mm:ss" //2004-01-01T00:00:00+11:00 formatter.setTimeZone(TimeZone.getTimeZone("GMT")); buffer.appendString(formatter.format(atime.getCalendar().getTime())); buffer.appendString( (atime.getOffset() >= 0 ? "+" : "-") + String.format("%02d", (Math.abs(atime.getOffset()) / 3600000)) + ":" + String.format("%02d", ((Math.abs(atime.getOffset() / 60) % 60)))); } public static void relTimeToString(long rsecs, AMutableCharArrayString buffer) { double fractional_seconds; int days, hrs, mins; double secs; if (rsecs < 0) { buffer.appendChar('-'); rsecs = -rsecs; } fractional_seconds = rsecs % 1000; days = (int) (rsecs / 1000); hrs = days % 86400; mins = hrs % 3600; secs = (mins % 60) + (fractional_seconds / 1000.0); days = days / 86400; hrs = hrs / 3600; mins = mins / 60; if (days != 0) { if (fractional_seconds == 0) { buffer.appendString(String.format("%d+%02d:%02d:%02d", days, hrs, mins, (int) secs)); } else { buffer.appendString(String.format("%d+%02d:%02d:%g", days, hrs, mins, secs)); } } else if (hrs != 0) { if (fractional_seconds == 0) { buffer.appendString(String.format("%02d:%02d:%02d", hrs, mins, (int) secs)); } else { buffer.appendString(String.format("%02d:%02d:%02g", hrs, mins, secs)); } } else if (mins != 0) { if (fractional_seconds == 0) { buffer.appendString(String.format("%02d:%02d", mins, (int) secs)); } else { buffer.appendString(String.format("%02d:%02g", mins, secs)); } return; } else { if (fractional_seconds == 0) { buffer.appendString(String.format("%02d", (int) secs)); } else { buffer.appendString(String.format("%02g", secs)); } } } public static void dayNumbers(int year, int month, int day, AMutableInt32 weekday, AMutableInt32 yearday) { int fixed = fixedFromGregorian(year, month, day); int jan1_fixed = fixedFromGregorian(year, 1, 1); weekday.setValue(fixed % 7); yearday.setValue(fixed - jan1_fixed); return; } public static int fixedFromGregorian(int year, int month, int day) { int fixed; int month_adjustment; if (month <= 2) { month_adjustment = 0; } else if (isLeapYear(year)) { month_adjustment = -1; } else { month_adjustment = -2; } fixed = 365 * (year - 1) + ((year - 1) / 4) - ((year - 1) / 100) + ((year - 1) / 400) + ((367 * month - 362) / 12) + month_adjustment + day; return fixed; } public static boolean isLeapYear(int year) { int mod4; int mod400; boolean leap_year; mod4 = year % 4; mod400 = year % 400; if (mod4 == 0 && mod400 != 100 && mod400 != 200 && mod400 != 300) { leap_year = true; } else { leap_year = false; } return leap_year; } public static int isInf(double x) { if (Double.isInfinite(x)) { return (x < 0.0) ? (-1) : 1; } return 0; } public static boolean isNan(double x) { return Double.isNaN(x); } }
ty1er/incubator-asterixdb
asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/classad/Util.java
Java
apache-2.0
8,750
// Copyright 2016 LINE Corporation // // LINE Corporation licenses this file to you under the Apache License, // version 2.0 (the "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at: // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, WITHOUT // WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the // License for the specific language governing permissions and limitations // under the License. package linebot import ( "context" "fmt" ) // IssueLinkToken method // https://developers.line.me/en/reference/messaging-api/#issue-link-token func (client *Client) IssueLinkToken(userID string) *IssueLinkTokenCall { return &IssueLinkTokenCall{ c: client, userID: userID, } } // IssueLinkTokenCall type type IssueLinkTokenCall struct { c *Client ctx context.Context userID string } // WithContext method func (call *IssueLinkTokenCall) WithContext(ctx context.Context) *IssueLinkTokenCall { call.ctx = ctx return call } // Do method func (call *IssueLinkTokenCall) Do() (*LinkTokenResponse, error) { endpoint := fmt.Sprintf(APIEndpointLinkToken, call.userID) res, err := call.c.post(call.ctx, endpoint, nil) if err != nil { return nil, err } defer closeResponse(res) return decodeToLinkTokenResponse(res) }
kkdai/LineBotTemplate
vendor/github.com/line/line-bot-sdk-go/v7/linebot/account_link.go
GO
apache-2.0
1,468
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from airflow.models import BaseOperator from airflow.utils.decorators import apply_defaults class DummyOperator(BaseOperator): """ Operator that does literally nothing. It can be used to group tasks in a DAG. """ ui_color = '#e8f7e4' @apply_defaults def __init__(self, *args, **kwargs) -> None: super().__init__(*args, **kwargs) def execute(self, context): pass
wileeam/airflow
airflow/operators/dummy_operator.py
Python
apache-2.0
1,203
/* * Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wso2.carbon.identity.entitlement.common.dto; public class ElementCountDTO { private int subElementCount; private int attributeDesignatorsElementCount; private int attributeValueElementCount; private int attributeSelectorElementCount; public int getSubElementCount() { return subElementCount; } public void setSubElementCount(int subElementCount) { this.subElementCount = subElementCount; } public int getAttributeSelectorElementCount() { return attributeSelectorElementCount; } public void setAttributeSelectorElementCount(int attributeSelectorElementCount) { this.attributeSelectorElementCount = attributeSelectorElementCount; } public int getAttributeValueElementCount() { return attributeValueElementCount; } public void setAttributeValueElementCount(int attributeValueElementCount) { this.attributeValueElementCount = attributeValueElementCount; } public int getAttributeDesignatorsElementCount() { return attributeDesignatorsElementCount; } public void setAttributeDesignatorsElementCount(int attributeDesignatorsElementCount) { this.attributeDesignatorsElementCount = attributeDesignatorsElementCount; } }
dharshanaw/carbon-identity-framework
components/entitlement/org.wso2.carbon.identity.entitlement.common/src/main/java/org/wso2/carbon/identity/entitlement/common/dto/ElementCountDTO.java
Java
apache-2.0
1,931
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.distributedlog.common.concurrent; import java.util.LinkedList; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.concurrent.RejectedExecutionException; import java.util.function.Supplier; import javax.annotation.concurrent.GuardedBy; import org.apache.bookkeeper.common.concurrent.FutureUtils; import org.apache.distributedlog.common.util.Permit; /** * An AsyncSemaphore is a traditional semaphore but with asynchronous * execution. * * <p>Grabbing a permit returns a `Future[Permit]`. * * <p>Basic usage: * {{{ * val semaphore = new AsyncSemaphore(n) * ... * semaphore.acquireAndRun() { * somethingThatReturnsFutureT() * } * }}} * * <p>Calls to acquire() and acquireAndRun are serialized, and tickets are * given out fairly (in order of arrival). */ public class AsyncSemaphore { private final Optional<Integer> maxWaiters; private final Permit semaphorePermit = new Permit() { @Override public void release() { releasePermit(this); } }; @GuardedBy("this") private Optional<Throwable> closed = Optional.empty(); @GuardedBy("this") private final LinkedList<CompletableFuture<Permit>> waitq; @GuardedBy("this") private int availablePermits; public AsyncSemaphore(int initialPermits, Optional<Integer> maxWaiters) { this.availablePermits = initialPermits; this.waitq = new LinkedList<>(); this.maxWaiters = maxWaiters; } private synchronized void releasePermit(Permit permit) { CompletableFuture<Permit> next = waitq.pollFirst(); if (null != next) { next.complete(permit); } else { availablePermits += 1; } } private CompletableFuture<Permit> newFuturePermit() { return FutureUtils.value(semaphorePermit); } /** * Acquire a [[Permit]], asynchronously. * * <p>Be sure to `permit.release()` in a * - `finally` block of your `onSuccess` callback * - `ensure` block of your future chain * * <p>Interrupting this future is only advisory, and will not release the permit * if the future has already been satisfied. * * @note This method always return the same instance of [[Permit]]. * @return a `Future[Permit]` when the `Future` is satisfied, computation can proceed, * or a Future.Exception[RejectedExecutionException]` if the configured maximum * number of waiters would be exceeded. */ public synchronized CompletableFuture<Permit> acquire() { if (closed.isPresent()) { return FutureUtils.exception(closed.get()); } if (availablePermits > 0) { availablePermits -= 1; return newFuturePermit(); } else { if (maxWaiters.isPresent() && waitq.size() >= maxWaiters.get()) { return FutureUtils.exception(new RejectedExecutionException("Max waiters exceeded")); } else { CompletableFuture<Permit> future = FutureUtils.createFuture(); future.whenComplete((value, cause) -> { synchronized (AsyncSemaphore.this) { waitq.remove(future); } }); waitq.addLast(future); return future; } } } /** * Fail the semaphore and stop it from distributing further permits. Subsequent * attempts to acquire a permit fail with `exc`. This semaphore's queued waiters * are also failed with `exc`. */ public synchronized void fail(Throwable exc) { closed = Optional.of(exc); for (CompletableFuture<Permit> future : waitq) { future.cancel(true); } waitq.clear(); } /** * Execute the function asynchronously when a permit becomes available. * * <p>If the function throws a non-fatal exception, the exception is returned as part of the Future. * For all exceptions, the permit would be released before returning. * * @return a Future[T] equivalent to the return value of the input function. If the configured * maximum value of waitq is reached, Future.Exception[RejectedExecutionException] is * returned. */ public <T> CompletableFuture<T> acquireAndRun(Supplier<CompletableFuture<T>> func) { return acquire().thenCompose(permit -> { CompletableFuture<T> future; try { future = func.get(); future.whenComplete((value, cause) -> permit.release()); return future; } catch (Throwable cause) { permit.release(); throw cause; } }); } }
sijie/bookkeeper
stream/distributedlog/common/src/main/java/org/apache/distributedlog/common/concurrent/AsyncSemaphore.java
Java
apache-2.0
5,678
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.vfs.encoding; import com.intellij.AppTopics; import com.intellij.openapi.Disposable; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.fileEditor.FileDocumentManagerAdapter; import com.intellij.openapi.fileEditor.impl.LoadTextUtil; import com.intellij.openapi.fileTypes.FileType; import com.intellij.openapi.fileTypes.FileTypes; import com.intellij.openapi.fileTypes.StdFileTypes; import com.intellij.openapi.project.Project; import com.intellij.openapi.project.ProjectLocator; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.ThrowableComputable; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.*; import com.intellij.refactoring.util.CommonRefactoringUtil; import com.intellij.util.ArrayUtil; import com.intellij.util.messages.MessageBusConnection; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.IOException; import java.nio.charset.Charset; import java.util.Arrays; public class EncodingUtil { enum Magic8 { ABSOLUTELY, WELL_IF_YOU_INSIST, NO_WAY } // check if file can be loaded in the encoding correctly: // returns true if bytes on disk, converted to text with the charset, converted back to bytes matched static Magic8 isSafeToReloadIn(@NotNull VirtualFile virtualFile, @NotNull String text, @NotNull byte[] bytes, @NotNull Charset charset) { // file has BOM but the charset hasn't byte[] bom = virtualFile.getBOM(); if (bom != null && !CharsetToolkit.canHaveBom(charset, bom)) return Magic8.NO_WAY; // the charset has mandatory BOM (e.g. UTF-xx) but the file hasn't or has wrong byte[] mandatoryBom = CharsetToolkit.getMandatoryBom(charset); if (mandatoryBom != null && !ArrayUtil.startsWith(bytes, mandatoryBom)) return Magic8.NO_WAY; String loaded = LoadTextUtil.getTextByBinaryPresentation(bytes, charset).toString(); String separator = FileDocumentManager.getInstance().getLineSeparator(virtualFile, null); String toSave = StringUtil.convertLineSeparators(loaded, separator); String failReason = LoadTextUtil.wasCharsetDetectedFromBytes(virtualFile); if (failReason != null && CharsetToolkit.UTF8_CHARSET.equals(virtualFile.getCharset()) && !CharsetToolkit.UTF8_CHARSET.equals(charset)) { return Magic8.NO_WAY; // can't reload utf8-autodetected file in another charset } byte[] bytesToSave; try { bytesToSave = toSave.getBytes(charset); } catch (UnsupportedOperationException e) { return Magic8.NO_WAY; } if (bom != null && !ArrayUtil.startsWith(bytesToSave, bom)) { bytesToSave = ArrayUtil.mergeArrays(bom, bytesToSave); // for 2-byte encodings String.getBytes(Charset) adds BOM automatically } return !Arrays.equals(bytesToSave, bytes) ? Magic8.NO_WAY : loaded.equals(text) ? Magic8.ABSOLUTELY : Magic8.WELL_IF_YOU_INSIST; } static Magic8 isSafeToConvertTo(@NotNull VirtualFile virtualFile, @NotNull String text, @NotNull byte[] bytesOnDisk, @NotNull Charset charset) { try { String lineSeparator = FileDocumentManager.getInstance().getLineSeparator(virtualFile, null); String textToSave = lineSeparator.equals("\n") ? text : StringUtil.convertLineSeparators(text, lineSeparator); Pair<Charset, byte[]> chosen = LoadTextUtil.chooseMostlyHarmlessCharset(virtualFile.getCharset(), charset, textToSave); byte[] saved = chosen.second; CharSequence textLoadedBack = LoadTextUtil.getTextByBinaryPresentation(saved, charset); return !text.equals(textLoadedBack.toString()) ? Magic8.NO_WAY : Arrays.equals(saved, bytesOnDisk) ? Magic8.ABSOLUTELY : Magic8.WELL_IF_YOU_INSIST; } catch (UnsupportedOperationException e) { // unsupported encoding return Magic8.NO_WAY; } } static void saveIn(@NotNull final Document document, final Editor editor, @NotNull final VirtualFile virtualFile, @NotNull final Charset charset) { FileDocumentManager documentManager = FileDocumentManager.getInstance(); documentManager.saveDocument(document); final Project project = ProjectLocator.getInstance().guessProjectForFile(virtualFile); boolean writable = project == null ? virtualFile.isWritable() : ReadonlyStatusHandler.ensureFilesWritable(project, virtualFile); if (!writable) { CommonRefactoringUtil.showErrorHint(project, editor, "Cannot save the file " + virtualFile.getPresentableUrl(), "Unable to Save", null); return; } // first, save the file in the new charset and then mark the file as having the correct encoding try { ApplicationManager.getApplication().runWriteAction(new ThrowableComputable<Object, IOException>() { @Override public Object compute() throws IOException { virtualFile.setCharset(charset); LoadTextUtil.write(project, virtualFile, virtualFile, document.getText(), document.getModificationStamp()); return null; } }); } catch (IOException io) { Messages.showErrorDialog(project, io.getMessage(), "Error Writing File"); } EncodingProjectManagerImpl.suppressReloadDuring(() -> EncodingManager.getInstance().setEncoding(virtualFile, charset)); } static void reloadIn(@NotNull final VirtualFile virtualFile, @NotNull final Charset charset) { final FileDocumentManager documentManager = FileDocumentManager.getInstance(); //Project project = ProjectLocator.getInstance().guessProjectForFile(myFile); //if (documentManager.isFileModified(myFile)) { // int result = Messages.showDialog(project, "File is modified. Reload file anyway?", "File is Modified", new String[]{"Reload", "Cancel"}, 0, AllIcons.General.WarningDialog); // if (result != 0) return; //} if (documentManager.getCachedDocument(virtualFile) == null) { // no need to reload document EncodingManager.getInstance().setEncoding(virtualFile, charset); return; } final Disposable disposable = Disposer.newDisposable(); MessageBusConnection connection = ApplicationManager.getApplication().getMessageBus().connect(disposable); connection.subscribe(AppTopics.FILE_DOCUMENT_SYNC, new FileDocumentManagerAdapter() { @Override public void beforeFileContentReload(VirtualFile file, @NotNull Document document) { if (!file.equals(virtualFile)) return; Disposer.dispose(disposable); // disconnect EncodingManager.getInstance().setEncoding(file, charset); LoadTextUtil.setCharsetWasDetectedFromBytes(file, null); } }); // if file was modified, the user will be asked here try { EncodingProjectManagerImpl.suppressReloadDuring(() -> ((VirtualFileListener)documentManager).contentsChanged( new VirtualFileEvent(null, virtualFile, virtualFile.getName(), virtualFile.getParent()))); } finally { Disposer.dispose(disposable); } } // returns (hardcoded charset from the file type, explanation) or (null, null) if file type does not restrict encoding @NotNull private static Pair<Charset, String> checkHardcodedCharsetFileType(@NotNull VirtualFile virtualFile) { FileType fileType = virtualFile.getFileType(); if (fileType.isBinary()) return Pair.create(null, "binary file"); // in lesser IDEs all special file types are plain text so check for that first if (fileType == FileTypes.PLAIN_TEXT) return Pair.create(null, null); if (fileType == StdFileTypes.GUI_DESIGNER_FORM) return Pair.create(CharsetToolkit.UTF8_CHARSET, "IDEA GUI Designer form"); if (fileType == StdFileTypes.IDEA_MODULE) return Pair.create(CharsetToolkit.UTF8_CHARSET, "IDEA module file"); if (fileType == StdFileTypes.IDEA_PROJECT) return Pair.create(CharsetToolkit.UTF8_CHARSET, "IDEA project file"); if (fileType == StdFileTypes.IDEA_WORKSPACE) return Pair.create(CharsetToolkit.UTF8_CHARSET, "IDEA workspace file"); if (fileType == StdFileTypes.PROPERTIES) return Pair.create(virtualFile.getCharset(), ".properties file"); if (fileType == StdFileTypes.XML || fileType == StdFileTypes.JSPX) { return Pair.create(virtualFile.getCharset(), "XML file"); } return Pair.create(null, null); } @NotNull // returns pair (existing charset (null means N/A); failReason: null means enabled, notnull means disabled and contains error message) public static Pair<Charset, String> checkCanReload(@NotNull VirtualFile virtualFile) { if (virtualFile.isDirectory()) { return Pair.create(null, "file is a directory"); } FileDocumentManager documentManager = FileDocumentManager.getInstance(); Document document = documentManager.getDocument(virtualFile); if (document == null) return Pair.create(null, "binary file"); Charset charsetFromContent = ((EncodingManagerImpl)EncodingManager.getInstance()).computeCharsetFromContent(virtualFile); Charset existing = charsetFromContent; String failReason = LoadTextUtil.wasCharsetDetectedFromBytes(virtualFile); if (failReason != null) { // no point changing encoding if it was auto-detected existing = virtualFile.getCharset(); } else if (charsetFromContent != null) { failReason = "hard coded in text"; } else { Pair<Charset, String> fileTypeCheck = checkHardcodedCharsetFileType(virtualFile); if (fileTypeCheck.second != null) { failReason = fileTypeCheck.second; existing = fileTypeCheck.first; } } if (failReason != null) { return Pair.create(existing, failReason); } return Pair.create(virtualFile.getCharset(), null); } @Nullable("null means enabled, notnull means disabled and contains error message") static String checkCanConvert(@NotNull VirtualFile virtualFile) { if (virtualFile.isDirectory()) { return "file is a directory"; } String failReason = null; Charset charsetFromContent = ((EncodingManagerImpl)EncodingManager.getInstance()).computeCharsetFromContent(virtualFile); if (charsetFromContent != null) { failReason = "Encoding is hard-coded in the text"; } else { Pair<Charset, String> check = checkHardcodedCharsetFileType(virtualFile); if (check.second != null) { failReason = check.second; } } if (failReason != null) { return failReason; } return null; } // null means enabled, (current charset, error description) otherwise @Nullable public static Pair<Charset, String> checkSomeActionEnabled(@NotNull VirtualFile selectedFile) { String saveError = checkCanConvert(selectedFile); if (saveError == null) return null; Pair<Charset, String> reloadError = checkCanReload(selectedFile); if (reloadError.second == null) return null; return Pair.create(reloadError.first, saveError); } }
idea4bsd/idea4bsd
platform/platform-impl/src/com/intellij/openapi/vfs/encoding/EncodingUtil.java
Java
apache-2.0
11,775
#!/usr/bin/env python # Licensed to Cloudera, Inc. under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. Cloudera, Inc. licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import json from lxml import objectify, etree from django.contrib.auth.models import Group, User from useradmin.models import HuePermission, GroupPermission, get_default_user_group from hadoop import cluster from desktop.lib import fsmanager def grant_access(username, groupname, appname): add_permission(username, groupname, 'access', appname) def add_permission(username, groupname, permname, appname): user = User.objects.get(username=username) group, created = Group.objects.get_or_create(name=groupname) perm, created = HuePermission.objects.get_or_create(app=appname, action=permname) GroupPermission.objects.get_or_create(group=group, hue_permission=perm) if not user.groups.filter(name=group.name).exists(): user.groups.add(group) user.save() def add_to_group(username, groupname=None): if groupname is None: group = get_default_user_group() assert group is not None groupname = group.name user = User.objects.get(username=username) group, created = Group.objects.get_or_create(name=groupname) if not user.groups.filter(name=group.name).exists(): user.groups.add(group) user.save() def remove_from_group(username, groupname): user = User.objects.get(username=username) group, created = Group.objects.get_or_create(name=groupname) if user.groups.filter(name=group.name).exists(): user.groups.remove(group) user.save() def reformat_json(json_obj): if isinstance(json_obj, basestring): return json.dumps(json.loads(json_obj)) else: return json.dumps(json_obj) def reformat_xml(xml_obj): if isinstance(xml_obj, basestring): return etree.tostring(objectify.fromstring(xml_obj, etree.XMLParser(strip_cdata=False, remove_blank_text=True))) else: return etree.tostring(xml_obj) def clear_sys_caches(): return cluster.clear_caches(), fsmanager.clear_cache() def restore_sys_caches(old_caches): cluster.restore_caches(old_caches[0]) fsmanager.restore_cache(old_caches[1])
Peddle/hue
desktop/core/src/desktop/lib/test_utils.py
Python
apache-2.0
2,861
function getUrlVars() { var vars = [], hash; var hashes = window.location.href.slice(window.location.href.indexOf('?') + 1).split('&'); for (var i = 0; i < hashes.length; i++) { hash = hashes[i].split('='); vars.push(hash[0]); vars[hash[0]] = hash[1]; } return vars; }
SreejithNS/com.sreejithn
www/js/geturi.js
JavaScript
apache-2.0
312
export const CREATE_COURSE = 'CREATE_COURSE';
bluSCALE4/react-hello-world
src/actions/actionTypes.js
JavaScript
apache-2.0
46
/*** Wrapper/Helper Class for datagrid based on jQuery Datatable Plugin ***/ var Datatable = function () { var tableOptions; // main options var dataTable; // datatable object var table; // actual table jquery object var tableContainer; // actual table container object var tableWrapper; // actual table wrapper jquery object var tableInitialized = false; var ajaxParams = {}; // set filter mode var countSelectedRecords = function() { var selected = $('tbody > tr > td:nth-child(1) input[type="checkbox"]:checked', table).size(); var text = tableOptions.dataTable.oLanguage.sGroupActions; if (selected > 0) { $('.table-group-actions > span', tableWrapper).text(text.replace("_TOTAL_", selected)); } else { $('.table-group-actions > span', tableWrapper).text(""); } } return { //main function to initiate the module init: function (options) { if (!$().dataTable) { return; } var the = this; // default settings options = $.extend(true, { src: "", // actual table filterApplyAction: "filter", filterCancelAction: "filter_cancel", resetGroupActionInputOnSuccess: true, dataTable: { "sDom" : "<'row'<'col-md-8 col-sm-12'pli><'col-md-4 col-sm-12'<'table-group-actions pull-right'>>r><'table-scrollable't><'row'<'col-md-8 col-sm-12'pli><'col-md-4 col-sm-12'>r>>", // datatable layout "aLengthMenu": [ // set available records per page [10, 25, 50, 100, -1], [10, 25, 50, 100, "All"] ], "iDisplayLength": 10, // default records per page "oLanguage": { // language settings "sProcessing": '<img src="' + Metronic.getGlobalImgPath() + 'loading-spinner-grey.gif"/><span>&nbsp;&nbsp;Loading...</span>', "sLengthMenu": "<span class='seperator'>|</span>View _MENU_ records", "sInfo": "<span class='seperator'>|</span>Found total _TOTAL_ records", "sInfoEmpty": "No records found to show", "sGroupActions": "_TOTAL_ records selected: ", "sAjaxRequestGeneralError": "Could not complete request. Please check your internet connection", "sEmptyTable": "No data available in table", "sZeroRecords": "No matching records found", "oPaginate": { "sPrevious": "Prev", "sNext": "Next", "sPage": "Page", "sPageOf": "of" } }, "aoColumnDefs" : [{ // define columns sorting options(by default all columns are sortable extept the first checkbox column) 'bSortable' : false, 'aTargets' : [ 0 ] }], "bAutoWidth": false, // disable fixed width and enable fluid table "bSortCellsTop": true, // make sortable only the first row in thead "sPaginationType": "bootstrap_extended", // pagination type(bootstrap, bootstrap_full_number or bootstrap_extended) "bProcessing": true, // enable/disable display message box on record load "bServerSide": true, // enable/disable server side ajax loading "sAjaxSource": "", // define ajax source URL "sServerMethod": "POST", // handle ajax request "fnServerData": function ( sSource, aoData, fnCallback, oSettings ) { oSettings.jqXHR = $.ajax( { "dataType": 'json', "type": "POST", "url": sSource, "data": aoData, "success": function(res, textStatus, jqXHR) { if (res.sMessage) { Metronic.alert({type: (res.sStatus == 'OK' ? 'success' : 'danger'), icon: (res.sStatus == 'OK' ? 'check' : 'warning'), message: res.sMessage, container: tableWrapper, place: 'prepend'}); } if (res.sStatus) { if (tableOptions.resetGroupActionInputOnSuccess) { $('.table-group-action-input', tableWrapper).val(""); } } if ($('.group-checkable', table).size() === 1) { $('.group-checkable', table).attr("checked", false); $.uniform.update($('.group-checkable', table)); } if (tableOptions.onSuccess) { tableOptions.onSuccess.call(undefined, the); } fnCallback(res, textStatus, jqXHR); }, "error": function() { if (tableOptions.onError) { tableOptions.onError.call(undefined, the); } Metronic.alert({type: 'danger', icon: 'warning', message: tableOptions.dataTable.oLanguage.sAjaxRequestGeneralError, container: tableWrapper, place: 'prepend'}); $('.dataTables_processing', tableWrapper).remove(); } } ); }, // pass additional parameter "fnServerParams": function ( aoData ) { //here can be added an external ajax request parameters. $.each(ajaxParams, function( key, value ) { aoData.push({"name" : key, "value": value}); }); }, "fnDrawCallback": function( oSettings ) { // run some code on table redraw if (tableInitialized === false) { // check if table has been initialized tableInitialized = true; // set table initialized table.show(); // display table } Metronic.initUniform($('input[type="checkbox"]', table)); // reinitialize uniform checkboxes on each table reload countSelectedRecords(); // reset selected records indicator } } }, options); tableOptions = options; // create table's jquery object table = $(options.src); tableContainer = table.parents(".table-container"); // apply the special class that used to restyle the default datatable $.fn.dataTableExt.oStdClasses.sWrapper = $.fn.dataTableExt.oStdClasses.sWrapper + " dataTables_extended_wrapper"; // initialize a datatable dataTable = table.dataTable(options.dataTable); tableWrapper = table.parents('.dataTables_wrapper'); // modify table per page dropdown input by appliying some classes $('.dataTables_length select', tableWrapper).addClass("form-control input-xsmall input-sm"); // build table group actions panel if ($('.table-actions-wrapper', tableContainer).size() === 1) { $('.table-group-actions', tableWrapper).html($('.table-actions-wrapper', tableContainer).html()); // place the panel inside the wrapper $('.table-actions-wrapper', tableContainer).remove(); // remove the template container } // handle group checkboxes check/uncheck $('.group-checkable', table).change(function () { var set = $('tbody > tr > td:nth-child(1) input[type="checkbox"]', table); var checked = $(this).is(":checked"); $(set).each(function () { $(this).attr("checked", checked); }); $.uniform.update(set); countSelectedRecords(); }); // handle row's checkbox click table.on('change', 'tbody > tr > td:nth-child(1) input[type="checkbox"]', function(){ countSelectedRecords(); }); // handle filter submit button click table.on('click', '.filter-submit', function(e){ e.preventDefault(); the.setAjaxParam("sAction", tableOptions.filterApplyAction); // get all typeable inputs $('textarea.form-filter, select.form-filter, input.form-filter:not([type="radio"],[type="checkbox"])', table).each(function(){ the.setAjaxParam($(this).attr("name"), $(this).val()); }); // get all checkable inputs $('input.form-filter[type="checkbox"]:checked, input.form-filter[type="radio"]:checked', table).each(function(){ the.setAjaxParam($(this).attr("name"), $(this).val()); }); dataTable.fnDraw(); }); // handle filter cancel button click table.on('click', '.filter-cancel', function(e){ e.preventDefault(); $('textarea.form-filter, select.form-filter, input.form-filter', table).each(function(){ $(this).val(""); }); $('input.form-filter[type="checkbox"]', table).each(function(){ $(this).attr("checked", false); }); the.clearAjaxParams(); the.setAjaxParam("sAction", tableOptions.filterCancelAction); dataTable.fnDraw(); }); }, getSelectedRowsCount: function() { return $('tbody > tr > td:nth-child(1) input[type="checkbox"]:checked', table).size(); }, getSelectedRows: function() { var rows = []; $('tbody > tr > td:nth-child(1) input[type="checkbox"]:checked', table).each(function(){ rows.push({name: $(this).attr("name"), value: $(this).val()}); }); return rows; }, addAjaxParam: function(name, value) { ajaxParams[name] = value; }, setAjaxParam: function(name, value) { ajaxParams[name] = value; }, clearAjaxParams: function(name, value) { ajaxParams = []; }, getDataTable: function() { return dataTable; }, getTableWrapper: function() { return tableWrapper; }, gettableContainer: function() { return tableContainer; }, getTable: function() { return table; } }; };
zeickan/Infected-Engine
static/global/scripts/datatable.js
JavaScript
apache-2.0
11,607
"""Support for the Foobot indoor air quality monitor.""" import asyncio from datetime import timedelta import logging import aiohttp from foobot_async import FoobotClient import voluptuous as vol from homeassistant.const import ( ATTR_TEMPERATURE, ATTR_TIME, CONF_TOKEN, CONF_USERNAME, TEMP_CELSIUS, ) from homeassistant.exceptions import PlatformNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from homeassistant.helpers.config_validation import PLATFORM_SCHEMA from homeassistant.helpers.entity import Entity from homeassistant.util import Throttle _LOGGER = logging.getLogger(__name__) ATTR_HUMIDITY = "humidity" ATTR_PM2_5 = "PM2.5" ATTR_CARBON_DIOXIDE = "CO2" ATTR_VOLATILE_ORGANIC_COMPOUNDS = "VOC" ATTR_FOOBOT_INDEX = "index" SENSOR_TYPES = { "time": [ATTR_TIME, "s"], "pm": [ATTR_PM2_5, "µg/m3", "mdi:cloud"], "tmp": [ATTR_TEMPERATURE, TEMP_CELSIUS, "mdi:thermometer"], "hum": [ATTR_HUMIDITY, "%", "mdi:water-percent"], "co2": [ATTR_CARBON_DIOXIDE, "ppm", "mdi:periodic-table-co2"], "voc": [ATTR_VOLATILE_ORGANIC_COMPOUNDS, "ppb", "mdi:cloud"], "allpollu": [ATTR_FOOBOT_INDEX, "%", "mdi:percent"], } SCAN_INTERVAL = timedelta(minutes=10) PARALLEL_UPDATES = 1 TIMEOUT = 10 PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( {vol.Required(CONF_TOKEN): cv.string, vol.Required(CONF_USERNAME): cv.string} ) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the devices associated with the account.""" token = config.get(CONF_TOKEN) username = config.get(CONF_USERNAME) client = FoobotClient( token, username, async_get_clientsession(hass), timeout=TIMEOUT ) dev = [] try: devices = await client.get_devices() _LOGGER.debug("The following devices were found: %s", devices) for device in devices: foobot_data = FoobotData(client, device["uuid"]) for sensor_type in SENSOR_TYPES: if sensor_type == "time": continue foobot_sensor = FoobotSensor(foobot_data, device, sensor_type) dev.append(foobot_sensor) except ( aiohttp.client_exceptions.ClientConnectorError, asyncio.TimeoutError, FoobotClient.TooManyRequests, FoobotClient.InternalError, ): _LOGGER.exception("Failed to connect to foobot servers.") raise PlatformNotReady except FoobotClient.ClientError: _LOGGER.error("Failed to fetch data from foobot servers.") return async_add_entities(dev, True) class FoobotSensor(Entity): """Implementation of a Foobot sensor.""" def __init__(self, data, device, sensor_type): """Initialize the sensor.""" self._uuid = device["uuid"] self.foobot_data = data self._name = "Foobot {} {}".format(device["name"], SENSOR_TYPES[sensor_type][0]) self.type = sensor_type self._unit_of_measurement = SENSOR_TYPES[sensor_type][1] @property def name(self): """Return the name of the sensor.""" return self._name @property def icon(self): """Icon to use in the frontend.""" return SENSOR_TYPES[self.type][2] @property def state(self): """Return the state of the device.""" try: data = self.foobot_data.data[self.type] except (KeyError, TypeError): data = None return data @property def unique_id(self): """Return the unique id of this entity.""" return f"{self._uuid}_{self.type}" @property def unit_of_measurement(self): """Return the unit of measurement of this entity.""" return self._unit_of_measurement async def async_update(self): """Get the latest data.""" await self.foobot_data.async_update() class FoobotData(Entity): """Get data from Foobot API.""" def __init__(self, client, uuid): """Initialize the data object.""" self._client = client self._uuid = uuid self.data = {} @Throttle(SCAN_INTERVAL) async def async_update(self): """Get the data from Foobot API.""" interval = SCAN_INTERVAL.total_seconds() try: response = await self._client.get_last_data( self._uuid, interval, interval + 1 ) except ( aiohttp.client_exceptions.ClientConnectorError, asyncio.TimeoutError, self._client.TooManyRequests, self._client.InternalError, ): _LOGGER.debug("Couldn't fetch data") return False _LOGGER.debug("The data response is: %s", response) self.data = {k: round(v, 1) for k, v in response[0].items()} return True
leppa/home-assistant
homeassistant/components/foobot/sensor.py
Python
apache-2.0
4,894
/* Copyright 2016 - 2017 Huawei Technologies Co., Ltd. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ import { Component, OnInit } from '@angular/core'; @Component({ selector: 'app-git', templateUrl: './git.component.html', styleUrls: ['./git.component.scss'] }) export class GitComponent implements OnInit { constructor() { } ngOnInit() { } }
victorwangyang/containerops
tenant/angular4/src/app/repository/git/git.component.ts
TypeScript
apache-2.0
863
import six from hamcrest.core.base_matcher import Matcher from hamcrest.core.core.isequal import equal_to __author__ = "Jon Reid" __copyright__ = "Copyright 2011 hamcrest.org" __license__ = "BSD, see License.txt" import types def wrap_matcher(x): """Wraps argument in a matcher, if necessary. :returns: the argument as-is if it is already a matcher, otherwise wrapped in an :py:func:`~hamcrest.core.core.isequal.equal_to` matcher. """ if isinstance(x, Matcher): return x else: return equal_to(x) def is_matchable_type(expected_type): if isinstance(expected_type, type): return True if isinstance(expected_type, six.class_types): return True if isinstance(expected_type, tuple) and \ expected_type and \ all(map(is_matchable_type, expected_type)): return True return False
axbaretto/beam
sdks/python/.tox/py27gcp/lib/python2.7/site-packages/hamcrest/core/helpers/wrap_matcher.py
Python
apache-2.0
880
// Copyright 2012 the V8 project authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "src/parsing/rewriter.h" #include "src/ast/ast.h" #include "src/ast/scopes.h" #include "src/parsing/parse-info.h" #include "src/parsing/parser.h" namespace v8 { namespace internal { class Processor final : public AstVisitor<Processor> { public: Processor(Isolate* isolate, DeclarationScope* closure_scope, Variable* result, AstValueFactory* ast_value_factory) : result_(result), result_assigned_(false), replacement_(nullptr), is_set_(false), breakable_(false), zone_(ast_value_factory->zone()), closure_scope_(closure_scope), factory_(ast_value_factory) { DCHECK_EQ(closure_scope, closure_scope->GetClosureScope()); InitializeAstVisitor(isolate); } Processor(Parser* parser, DeclarationScope* closure_scope, Variable* result, AstValueFactory* ast_value_factory) : result_(result), result_assigned_(false), replacement_(nullptr), is_set_(false), breakable_(false), zone_(ast_value_factory->zone()), closure_scope_(closure_scope), factory_(ast_value_factory) { DCHECK_EQ(closure_scope, closure_scope->GetClosureScope()); InitializeAstVisitor(parser->stack_limit()); } void Process(ZoneList<Statement*>* statements); bool result_assigned() const { return result_assigned_; } Zone* zone() { return zone_; } DeclarationScope* closure_scope() { return closure_scope_; } AstNodeFactory* factory() { return &factory_; } // Returns ".result = value" Expression* SetResult(Expression* value) { result_assigned_ = true; VariableProxy* result_proxy = factory()->NewVariableProxy(result_); return factory()->NewAssignment(Token::ASSIGN, result_proxy, value, kNoSourcePosition); } // Inserts '.result = undefined' in front of the given statement. Statement* AssignUndefinedBefore(Statement* s); private: Variable* result_; // We are not tracking result usage via the result_'s use // counts (we leave the accurate computation to the // usage analyzer). Instead we simple remember if // there was ever an assignment to result_. bool result_assigned_; // When visiting a node, we "return" a replacement for that node in // [replacement_]. In many cases this will just be the original node. Statement* replacement_; // To avoid storing to .result all the time, we eliminate some of // the stores by keeping track of whether or not we're sure .result // will be overwritten anyway. This is a bit more tricky than what I // was hoping for. bool is_set_; bool breakable_; class BreakableScope final { public: explicit BreakableScope(Processor* processor, bool breakable = true) : processor_(processor), previous_(processor->breakable_) { processor->breakable_ = processor->breakable_ || breakable; } ~BreakableScope() { processor_->breakable_ = previous_; } private: Processor* processor_; bool previous_; }; Zone* zone_; DeclarationScope* closure_scope_; AstNodeFactory factory_; // Node visitors. #define DEF_VISIT(type) void Visit##type(type* node); AST_NODE_LIST(DEF_VISIT) #undef DEF_VISIT void VisitIterationStatement(IterationStatement* stmt); DEFINE_AST_VISITOR_SUBCLASS_MEMBERS(); }; Statement* Processor::AssignUndefinedBefore(Statement* s) { Expression* result_proxy = factory()->NewVariableProxy(result_); Expression* undef = factory()->NewUndefinedLiteral(kNoSourcePosition); Expression* assignment = factory()->NewAssignment(Token::ASSIGN, result_proxy, undef, kNoSourcePosition); Block* b = factory()->NewBlock(NULL, 2, false, kNoSourcePosition); b->statements()->Add( factory()->NewExpressionStatement(assignment, kNoSourcePosition), zone()); b->statements()->Add(s, zone()); return b; } void Processor::Process(ZoneList<Statement*>* statements) { // If we're in a breakable scope (named block, iteration, or switch), we walk // all statements. The last value producing statement before the break needs // to assign to .result. If we're not in a breakable scope, only the last // value producing statement in the block assigns to .result, so we can stop // early. for (int i = statements->length() - 1; i >= 0 && (breakable_ || !is_set_); --i) { Visit(statements->at(i)); statements->Set(i, replacement_); } } void Processor::VisitBlock(Block* node) { // An initializer block is the rewritten form of a variable declaration // with initialization expressions. The initializer block contains the // list of assignments corresponding to the initialization expressions. // While unclear from the spec (ECMA-262, 3rd., 12.2), the value of // a variable declaration with initialization expression is 'undefined' // with some JS VMs: For instance, using smjs, print(eval('var x = 7')) // returns 'undefined'. To obtain the same behavior with v8, we need // to prevent rewriting in that case. if (!node->ignore_completion_value()) { BreakableScope scope(this, node->labels() != nullptr); Process(node->statements()); } replacement_ = node; } void Processor::VisitExpressionStatement(ExpressionStatement* node) { // Rewrite : <x>; -> .result = <x>; if (!is_set_) { node->set_expression(SetResult(node->expression())); is_set_ = true; } replacement_ = node; } void Processor::VisitIfStatement(IfStatement* node) { // Rewrite both branches. bool set_after = is_set_; Visit(node->then_statement()); node->set_then_statement(replacement_); bool set_in_then = is_set_; is_set_ = set_after; Visit(node->else_statement()); node->set_else_statement(replacement_); replacement_ = set_in_then && is_set_ ? node : AssignUndefinedBefore(node); is_set_ = true; } void Processor::VisitIterationStatement(IterationStatement* node) { // The statement may have to produce a value, so always assign undefined // before. // TODO(verwaest): Omit it if we know that there's no break/continue leaving // it early. DCHECK(breakable_ || !is_set_); BreakableScope scope(this); Visit(node->body()); node->set_body(replacement_); replacement_ = AssignUndefinedBefore(node); is_set_ = true; } void Processor::VisitDoWhileStatement(DoWhileStatement* node) { VisitIterationStatement(node); } void Processor::VisitWhileStatement(WhileStatement* node) { VisitIterationStatement(node); } void Processor::VisitForStatement(ForStatement* node) { VisitIterationStatement(node); } void Processor::VisitForInStatement(ForInStatement* node) { VisitIterationStatement(node); } void Processor::VisitForOfStatement(ForOfStatement* node) { VisitIterationStatement(node); } void Processor::VisitTryCatchStatement(TryCatchStatement* node) { // Rewrite both try and catch block. bool set_after = is_set_; Visit(node->try_block()); node->set_try_block(static_cast<Block*>(replacement_)); bool set_in_try = is_set_; is_set_ = set_after; Visit(node->catch_block()); node->set_catch_block(static_cast<Block*>(replacement_)); replacement_ = is_set_ && set_in_try ? node : AssignUndefinedBefore(node); is_set_ = true; } void Processor::VisitTryFinallyStatement(TryFinallyStatement* node) { // Only rewrite finally if it could contain 'break' or 'continue'. Always // rewrite try. if (breakable_) { // Only set result before a 'break' or 'continue'. is_set_ = true; Visit(node->finally_block()); node->set_finally_block(replacement_->AsBlock()); // Save .result value at the beginning of the finally block and restore it // at the end again: ".backup = .result; ...; .result = .backup" // This is necessary because the finally block does not normally contribute // to the completion value. CHECK_NOT_NULL(closure_scope()); Variable* backup = closure_scope()->NewTemporary( factory()->ast_value_factory()->dot_result_string()); Expression* backup_proxy = factory()->NewVariableProxy(backup); Expression* result_proxy = factory()->NewVariableProxy(result_); Expression* save = factory()->NewAssignment( Token::ASSIGN, backup_proxy, result_proxy, kNoSourcePosition); Expression* restore = factory()->NewAssignment( Token::ASSIGN, result_proxy, backup_proxy, kNoSourcePosition); node->finally_block()->statements()->InsertAt( 0, factory()->NewExpressionStatement(save, kNoSourcePosition), zone()); node->finally_block()->statements()->Add( factory()->NewExpressionStatement(restore, kNoSourcePosition), zone()); } Visit(node->try_block()); node->set_try_block(replacement_->AsBlock()); replacement_ = is_set_ ? node : AssignUndefinedBefore(node); is_set_ = true; } void Processor::VisitSwitchStatement(SwitchStatement* node) { // The statement may have to produce a value, so always assign undefined // before. // TODO(verwaest): Omit it if we know that there's no break/continue leaving // it early. DCHECK(breakable_ || !is_set_); BreakableScope scope(this); // Rewrite statements in all case clauses. ZoneList<CaseClause*>* clauses = node->cases(); for (int i = clauses->length() - 1; i >= 0; --i) { CaseClause* clause = clauses->at(i); Process(clause->statements()); } replacement_ = AssignUndefinedBefore(node); is_set_ = true; } void Processor::VisitContinueStatement(ContinueStatement* node) { is_set_ = false; replacement_ = node; } void Processor::VisitBreakStatement(BreakStatement* node) { is_set_ = false; replacement_ = node; } void Processor::VisitWithStatement(WithStatement* node) { Visit(node->statement()); node->set_statement(replacement_); replacement_ = is_set_ ? node : AssignUndefinedBefore(node); is_set_ = true; } void Processor::VisitSloppyBlockFunctionStatement( SloppyBlockFunctionStatement* node) { Visit(node->statement()); node->set_statement(replacement_); replacement_ = node; } void Processor::VisitEmptyStatement(EmptyStatement* node) { replacement_ = node; } void Processor::VisitReturnStatement(ReturnStatement* node) { is_set_ = true; replacement_ = node; } void Processor::VisitDebuggerStatement(DebuggerStatement* node) { replacement_ = node; } // Expressions are never visited. #define DEF_VISIT(type) \ void Processor::Visit##type(type* expr) { UNREACHABLE(); } EXPRESSION_NODE_LIST(DEF_VISIT) #undef DEF_VISIT // Declarations are never visited. #define DEF_VISIT(type) \ void Processor::Visit##type(type* expr) { UNREACHABLE(); } DECLARATION_NODE_LIST(DEF_VISIT) #undef DEF_VISIT // Assumes code has been parsed. Mutates the AST, so the AST should not // continue to be used in the case of failure. bool Rewriter::Rewrite(ParseInfo* info) { FunctionLiteral* function = info->literal(); DCHECK_NOT_NULL(function); Scope* scope = function->scope(); DCHECK_NOT_NULL(scope); if (!scope->is_script_scope() && !scope->is_eval_scope()) return true; DeclarationScope* closure_scope = scope->GetClosureScope(); ZoneList<Statement*>* body = function->body(); if (!body->is_empty()) { Variable* result = closure_scope->NewTemporary( info->ast_value_factory()->dot_result_string()); // The name string must be internalized at this point. info->ast_value_factory()->Internalize(info->isolate()); DCHECK(!result->name().is_null()); Processor processor(info->isolate(), closure_scope, result, info->ast_value_factory()); processor.Process(body); // Internalize any values created during rewriting. info->ast_value_factory()->Internalize(info->isolate()); if (processor.HasStackOverflow()) return false; if (processor.result_assigned()) { int pos = kNoSourcePosition; VariableProxy* result_proxy = processor.factory()->NewVariableProxy(result, pos); Statement* result_statement = processor.factory()->NewReturnStatement(result_proxy, pos); body->Add(result_statement, info->zone()); } } return true; } bool Rewriter::Rewrite(Parser* parser, DeclarationScope* closure_scope, DoExpression* expr, AstValueFactory* factory) { Block* block = expr->block(); DCHECK_EQ(closure_scope, closure_scope->GetClosureScope()); DCHECK(block->scope() == nullptr || block->scope()->GetClosureScope() == closure_scope); ZoneList<Statement*>* body = block->statements(); VariableProxy* result = expr->result(); Variable* result_var = result->var(); if (!body->is_empty()) { Processor processor(parser, closure_scope, result_var, factory); processor.Process(body); if (processor.HasStackOverflow()) return false; if (!processor.result_assigned()) { AstNodeFactory* node_factory = processor.factory(); Expression* undef = node_factory->NewUndefinedLiteral(kNoSourcePosition); Statement* completion = node_factory->NewExpressionStatement( processor.SetResult(undef), expr->position()); body->Add(completion, factory->zone()); } } return true; } } // namespace internal } // namespace v8
zero-rp/miniblink49
v8_5_7/src/parsing/rewriter.cc
C++
apache-2.0
13,412
/** * Copyright (C) 2009-2014 Dell, Inc. * See annotations for authorship information * * ==================================================================== * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ==================================================================== */ package org.dasein.cloud; /** * Defines what level data gets cached at. * <p>Created by AndyLyall: 02/25/14 13:35 PM</p> * @author Andy Lyall * @version 2014.03 initial version * @since 2014.03 */ public enum VisibleScope { /** * Resource is visibile across the entire account */ ACCOUNT_GLOBAL, /** * Resource is visible across one whole region */ ACCOUNT_REGION, /** * Resource is visible across one whole datacenter */ ACCOUNT_DATACENTER }
OSS-TheWeatherCompany/dasein-cloud-core
src/main/java/org/dasein/cloud/VisibleScope.java
Java
apache-2.0
1,298
/* * Copyright © 2014 Cask Data, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License */ package co.cask.cdap.cli.completer.element; import co.cask.cdap.api.service.http.ServiceHttpEndpoint; import co.cask.cdap.cli.CLIConfig; import co.cask.cdap.cli.ProgramIdArgument; import co.cask.cdap.cli.util.ArgumentParser; import co.cask.cdap.client.ServiceClient; import co.cask.cdap.common.NotFoundException; import co.cask.cdap.common.UnauthorizedException; import co.cask.cdap.proto.Id; import co.cask.common.cli.completers.PrefixCompleter; import com.google.common.collect.Lists; import java.io.IOException; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; /** * Prefix completer for Http methods. */ public class HttpMethodPrefixCompleter extends PrefixCompleter { private static final String PROGRAM_ID = "programId"; private static final String PATTERN = String.format("call service <%s>", PROGRAM_ID); private final ServiceClient serviceClient; private final EndpointCompleter completer; private final CLIConfig cliConfig; public HttpMethodPrefixCompleter(final ServiceClient serviceClient, final CLIConfig cliConfig, String prefix, EndpointCompleter completer) { super(prefix, completer); this.cliConfig = cliConfig; this.serviceClient = serviceClient; this.completer = completer; } @Override public int complete(String buffer, int cursor, List<CharSequence> candidates) { Map<String, String> arguments = ArgumentParser.getArguments(buffer, PATTERN); ProgramIdArgument programIdArgument = ArgumentParser.parseProgramId(arguments.get(PROGRAM_ID)); if (programIdArgument != null) { Id.Service service = Id.Service.from(cliConfig.getCurrentNamespace(), programIdArgument.getAppId(), programIdArgument.getProgramId()); completer.setEndpoints(getMethods(service)); } else { completer.setEndpoints(Collections.<String>emptyList()); } return super.complete(buffer, cursor, candidates); } public Collection<String> getMethods(Id.Service serviceId) { Collection<String> httpMethods = Lists.newArrayList(); try { for (ServiceHttpEndpoint endpoint : serviceClient.getEndpoints(serviceId)) { String method = endpoint.getMethod(); if (!httpMethods.contains(method)) { httpMethods.add(method); } } } catch (IOException | UnauthorizedException | NotFoundException ignored) { } return httpMethods; } }
chtyim/cdap
cdap-cli/src/main/java/co/cask/cdap/cli/completer/element/HttpMethodPrefixCompleter.java
Java
apache-2.0
3,087
/* * Copyright 2000-2013 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.plugins.javaFX.fxml.refs; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Computable; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.*; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.search.LocalSearchScope; import com.intellij.psi.search.SearchScope; import com.intellij.psi.search.searches.ReferencesSearch; import com.intellij.psi.util.PsiUtilCore; import com.intellij.psi.xml.XmlAttribute; import com.intellij.psi.xml.XmlAttributeValue; import com.intellij.util.Processor; import com.intellij.util.QueryExecutor; import org.jetbrains.annotations.NotNull; import org.jetbrains.plugins.javaFX.fxml.FxmlConstants; import org.jetbrains.plugins.javaFX.indexing.JavaFxControllerClassIndex; import java.util.List; /** * User: anna * Date: 3/29/13 */ public class JavaFxControllerFieldSearcher implements QueryExecutor<PsiReference, ReferencesSearch.SearchParameters>{ @Override public boolean execute(@NotNull final ReferencesSearch.SearchParameters queryParameters, @NotNull final Processor<PsiReference> consumer) { final PsiElement elementToSearch = queryParameters.getElementToSearch(); if (elementToSearch instanceof PsiField) { final PsiField field = (PsiField)elementToSearch; final PsiClass containingClass = ApplicationManager.getApplication().runReadAction(new Computable<PsiClass>() { @Override public PsiClass compute() { return field.getContainingClass(); } }); if (containingClass != null) { final String qualifiedName = ApplicationManager.getApplication().runReadAction(new Computable<String>() { @Override public String compute() { return containingClass.getQualifiedName(); } }); if (qualifiedName != null) { Project project = PsiUtilCore.getProjectInReadAction(containingClass); final List<PsiFile> fxmlWithController = JavaFxControllerClassIndex.findFxmlWithController(project, qualifiedName); for (final PsiFile file : fxmlWithController) { ApplicationManager.getApplication().runReadAction(() -> { final String fieldName = field.getName(); if (fieldName == null) return; final VirtualFile virtualFile = file.getViewProvider().getVirtualFile(); final SearchScope searchScope = queryParameters.getEffectiveSearchScope(); boolean contains = searchScope instanceof LocalSearchScope ? ((LocalSearchScope)searchScope).isInScope(virtualFile) : ((GlobalSearchScope)searchScope).contains(virtualFile); if (contains) { file.accept(new XmlRecursiveElementVisitor() { @Override public void visitXmlAttributeValue(final XmlAttributeValue value) { final PsiReference reference = value.getReference(); if (reference != null) { final PsiElement resolve = reference.resolve(); if (resolve instanceof XmlAttributeValue) { final PsiElement parent = resolve.getParent(); if (parent instanceof XmlAttribute) { final XmlAttribute attribute = (XmlAttribute)parent; if (FxmlConstants.FX_ID.equals(attribute.getName()) && fieldName.equals(attribute.getValue())) { consumer.process(reference); } } } } } }); } }); } } } } return true; } }
hurricup/intellij-community
plugins/javaFX/src/org/jetbrains/plugins/javaFX/fxml/refs/JavaFxControllerFieldSearcher.java
Java
apache-2.0
4,457
########################################################################## # Copyright 2015 ThoughtWorks, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ########################################################################## module ApiV1 module Dashboard class PipelineGroupRepresenter < ApiV1::BaseRepresenter alias_method :pipeline_dashboard, :represented link :self do |opts| opts[:url_builder].pipeline_group_config_list_api_url end link :doc do 'https://api.gocd.org/#pipeline-groups' end property :getName, as: :name collection :pipelines, embedded: true, exec_context: :decorator, decorator: PipelineRepresenter def pipelines pipeline_dashboard.getPipelineModels() end end end end
stephen-murby/gocd
server/webapp/WEB-INF/rails.new/app/presenters/api_v1/dashboard/pipeline_group_representer.rb
Ruby
apache-2.0
1,291
#!/usr/bin/env ruby # This example demonstrates creating a server image with the Rackpace Open Cloud require 'rubygems' #required for Ruby 1.8.x require 'fog' def get_user_input(prompt) print "#{prompt}: " gets.chomp end def select_server(servers) abort "\nThere are not any servers available to image in the Chicago region. Try running create_server.rb\n\n" if servers.empty? puts "\nSelect Server To Image:\n\n" servers.each_with_index do |server, i| puts "\t #{i}. #{server.name} [#{server.public_ip_address}]" end selected_str = get_user_input "\nEnter Server Number" servers[selected_str.to_i] end # Use username defined in ~/.fog file, if absent prompt for username. # For more details on ~/.fog refer to http://fog.io/about/getting_started.html def rackspace_username Fog.credentials[:rackspace_username] || get_user_input("Enter Rackspace Username") end # Use api key defined in ~/.fog file, if absent prompt for api key # For more details on ~/.fog refer to http://fog.io/about/getting_started.html def rackspace_api_key Fog.credentials[:rackspace_api_key] || get_user_input("Enter Rackspace API key") end # create Next Generation Cloud Server service service = Fog::Compute.new({ :provider => 'rackspace', :rackspace_username => rackspace_username, :rackspace_api_key => rackspace_api_key, :version => :v2, # Use Next Gen Cloud Servers :rackspace_region => :ord #Use Chicago Region }) # retrieve list of servers servers = service.servers # prompt user for server server = select_server(servers) # prompt user for image name image_name = get_user_input "Enter Image Name" # creates image for server server.create_image image_name puts "\nImage #{image_name} is being created for server #{server.name}.\n\n" puts "To delete the image please execute the delete_image.rb script\n\n"
luna1x/chef-server
vendor/ruby/1.9.1/gems/fog-1.15.0/lib/fog/rackspace/examples/compute_v2/create_image.rb
Ruby
apache-2.0
1,864
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.twitter.springboot; import org.springframework.boot.context.properties.ConfigurationProperties; /** * This component integrates with Twitter to send tweets or search for tweets * and more. * * Generated by camel-package-maven-plugin - do not edit this file! */ @ConfigurationProperties(prefix = "camel.component.twitter") public class TwitterComponentConfiguration { /** * The access token */ private String accessToken; /** * The access token secret */ private String accessTokenSecret; /** * The consumer key */ private String consumerKey; /** * The consumer secret */ private String consumerSecret; /** * The http proxy host which can be used for the camel-twitter. */ private String httpProxyHost; /** * The http proxy user which can be used for the camel-twitter. */ private String httpProxyUser; /** * The http proxy password which can be used for the camel-twitter. */ private String httpProxyPassword; /** * The http proxy port which can be used for the camel-twitter. */ private int httpProxyPort; public String getAccessToken() { return accessToken; } public void setAccessToken(String accessToken) { this.accessToken = accessToken; } public String getAccessTokenSecret() { return accessTokenSecret; } public void setAccessTokenSecret(String accessTokenSecret) { this.accessTokenSecret = accessTokenSecret; } public String getConsumerKey() { return consumerKey; } public void setConsumerKey(String consumerKey) { this.consumerKey = consumerKey; } public String getConsumerSecret() { return consumerSecret; } public void setConsumerSecret(String consumerSecret) { this.consumerSecret = consumerSecret; } public String getHttpProxyHost() { return httpProxyHost; } public void setHttpProxyHost(String httpProxyHost) { this.httpProxyHost = httpProxyHost; } public String getHttpProxyUser() { return httpProxyUser; } public void setHttpProxyUser(String httpProxyUser) { this.httpProxyUser = httpProxyUser; } public String getHttpProxyPassword() { return httpProxyPassword; } public void setHttpProxyPassword(String httpProxyPassword) { this.httpProxyPassword = httpProxyPassword; } public int getHttpProxyPort() { return httpProxyPort; } public void setHttpProxyPort(int httpProxyPort) { this.httpProxyPort = httpProxyPort; } }
jmandawg/camel
components/camel-twitter/src/main/java/org/apache/camel/component/twitter/springboot/TwitterComponentConfiguration.java
Java
apache-2.0
3,496
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.hive; import com.facebook.presto.hive.HdfsEnvironment.HdfsContext; import com.facebook.presto.hive.metastore.Database; import com.facebook.presto.hive.metastore.Partition; import com.facebook.presto.hive.metastore.SemiTransactionalHiveMetastore; import com.facebook.presto.hive.metastore.Storage; import com.facebook.presto.hive.metastore.Table; import com.facebook.presto.hive.s3.PrestoS3FileSystem; import com.facebook.presto.spi.PrestoException; import com.facebook.presto.spi.SchemaNotFoundException; import com.facebook.presto.spi.SchemaTableName; import com.facebook.presto.spi.StandardErrorCode; import com.facebook.presto.spi.block.Block; import com.facebook.presto.spi.type.BigintType; import com.facebook.presto.spi.type.BooleanType; import com.facebook.presto.spi.type.CharType; import com.facebook.presto.spi.type.DateType; import com.facebook.presto.spi.type.DecimalType; import com.facebook.presto.spi.type.Decimals; import com.facebook.presto.spi.type.DoubleType; import com.facebook.presto.spi.type.IntegerType; import com.facebook.presto.spi.type.RealType; import com.facebook.presto.spi.type.SmallintType; import com.facebook.presto.spi.type.TimestampType; import com.facebook.presto.spi.type.TinyintType; import com.facebook.presto.spi.type.Type; import com.facebook.presto.spi.type.VarbinaryType; import com.facebook.presto.spi.type.VarcharType; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.primitives.Shorts; import com.google.common.primitives.SignedBytes; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FilterFileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveVarchar; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.ProtectMode; import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter; import org.apache.hadoop.hive.ql.io.HiveOutputFormat; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.Serializer; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; import org.apache.hadoop.hive.serde2.io.ShortWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; import org.apache.hadoop.hive.serde2.objectinspector.SettableStructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.StructField; import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.io.BooleanWritable; import org.apache.hadoop.io.ByteWritable; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.FloatWritable; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.Reporter; import org.joda.time.DateTimeZone; import java.io.IOException; import java.math.BigInteger; import java.sql.Date; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Properties; import java.util.concurrent.TimeUnit; import static com.facebook.presto.hive.HiveErrorCode.HIVE_DATABASE_LOCATION_ERROR; import static com.facebook.presto.hive.HiveErrorCode.HIVE_FILESYSTEM_ERROR; import static com.facebook.presto.hive.HiveErrorCode.HIVE_WRITER_DATA_ERROR; import static com.facebook.presto.hive.HiveUtil.checkCondition; import static com.facebook.presto.hive.HiveUtil.isArrayType; import static com.facebook.presto.hive.HiveUtil.isMapType; import static com.facebook.presto.hive.HiveUtil.isRowType; import static com.facebook.presto.hive.metastore.MetastoreUtil.getProtectMode; import static com.facebook.presto.hive.metastore.MetastoreUtil.verifyOnline; import static com.facebook.presto.spi.StandardErrorCode.NOT_SUPPORTED; import static com.facebook.presto.spi.type.Chars.isCharType; import static com.google.common.base.Strings.padEnd; import static java.lang.Float.intBitsToFloat; import static java.lang.Math.toIntExact; import static java.lang.String.format; import static java.util.Objects.requireNonNull; import static java.util.UUID.randomUUID; import static java.util.stream.Collectors.toList; import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.COMPRESSRESULT; import static org.apache.hadoop.hive.metastore.TableType.MANAGED_TABLE; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaBooleanObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaByteArrayObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaByteObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaDateObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaDoubleObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaFloatObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaIntObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaLongObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaShortObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaTimestampObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableBinaryObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableBooleanObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableByteObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableDateObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableDoubleObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableFloatObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableHiveCharObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableIntObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableLongObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableShortObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableStringObjectInspector; import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableTimestampObjectInspector; import static org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.getCharTypeInfo; import static org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.getVarcharTypeInfo; import static org.joda.time.DateTimeZone.UTC; public final class HiveWriteUtils { @SuppressWarnings("OctalInteger") private static final FsPermission ALL_PERMISSIONS = new FsPermission((short) 0777); private HiveWriteUtils() { } public static RecordWriter createRecordWriter(Path target, JobConf conf, Properties properties, String outputFormatName) { try { boolean compress = HiveConf.getBoolVar(conf, COMPRESSRESULT); Object writer = Class.forName(outputFormatName).getConstructor().newInstance(); return ((HiveOutputFormat<?, ?>) writer).getHiveRecordWriter(conf, target, Text.class, compress, properties, Reporter.NULL); } catch (IOException | ReflectiveOperationException e) { throw new PrestoException(HIVE_WRITER_DATA_ERROR, e); } } @SuppressWarnings("deprecation") public static Serializer initializeSerializer(Configuration conf, Properties properties, String serializerName) { try { Serializer result = (Serializer) Class.forName(serializerName).getConstructor().newInstance(); result.initialize(conf, properties); return result; } catch (SerDeException | ReflectiveOperationException e) { throw Throwables.propagate(e); } } public static ObjectInspector getJavaObjectInspector(Type type) { if (type.equals(BooleanType.BOOLEAN)) { return javaBooleanObjectInspector; } else if (type.equals(BigintType.BIGINT)) { return javaLongObjectInspector; } else if (type.equals(IntegerType.INTEGER)) { return javaIntObjectInspector; } else if (type.equals(SmallintType.SMALLINT)) { return javaShortObjectInspector; } else if (type.equals(TinyintType.TINYINT)) { return javaByteObjectInspector; } else if (type.equals(RealType.REAL)) { return javaFloatObjectInspector; } else if (type.equals(DoubleType.DOUBLE)) { return javaDoubleObjectInspector; } else if (type instanceof VarcharType) { return writableStringObjectInspector; } else if (type instanceof CharType) { return writableHiveCharObjectInspector; } else if (type.equals(VarbinaryType.VARBINARY)) { return javaByteArrayObjectInspector; } else if (type.equals(DateType.DATE)) { return javaDateObjectInspector; } else if (type.equals(TimestampType.TIMESTAMP)) { return javaTimestampObjectInspector; } else if (type instanceof DecimalType) { DecimalType decimalType = (DecimalType) type; return getPrimitiveJavaObjectInspector(new DecimalTypeInfo(decimalType.getPrecision(), decimalType.getScale())); } else if (isArrayType(type)) { return ObjectInspectorFactory.getStandardListObjectInspector(getJavaObjectInspector(type.getTypeParameters().get(0))); } else if (isMapType(type)) { ObjectInspector keyObjectInspector = getJavaObjectInspector(type.getTypeParameters().get(0)); ObjectInspector valueObjectInspector = getJavaObjectInspector(type.getTypeParameters().get(1)); return ObjectInspectorFactory.getStandardMapObjectInspector(keyObjectInspector, valueObjectInspector); } else if (isRowType(type)) { return ObjectInspectorFactory.getStandardStructObjectInspector( type.getTypeSignature().getParameters().stream() .map(parameter -> parameter.getNamedTypeSignature().getName()) .collect(toList()), type.getTypeParameters().stream() .map(HiveWriteUtils::getJavaObjectInspector) .collect(toList())); } throw new IllegalArgumentException("unsupported type: " + type); } public static Object getField(Type type, Block block, int position) { if (block.isNull(position)) { return null; } if (BooleanType.BOOLEAN.equals(type)) { return type.getBoolean(block, position); } if (BigintType.BIGINT.equals(type)) { return type.getLong(block, position); } if (IntegerType.INTEGER.equals(type)) { return (int) type.getLong(block, position); } if (SmallintType.SMALLINT.equals(type)) { return (short) type.getLong(block, position); } if (TinyintType.TINYINT.equals(type)) { return (byte) type.getLong(block, position); } if (RealType.REAL.equals(type)) { return intBitsToFloat((int) type.getLong(block, position)); } if (DoubleType.DOUBLE.equals(type)) { return type.getDouble(block, position); } if (type instanceof VarcharType) { return new Text(type.getSlice(block, position).getBytes()); } if (type instanceof CharType) { CharType charType = (CharType) type; return new Text(padEnd(type.getSlice(block, position).toStringUtf8(), charType.getLength(), ' ')); } if (VarbinaryType.VARBINARY.equals(type)) { return type.getSlice(block, position).getBytes(); } if (DateType.DATE.equals(type)) { long days = type.getLong(block, position); return new Date(UTC.getMillisKeepLocal(DateTimeZone.getDefault(), TimeUnit.DAYS.toMillis(days))); } if (TimestampType.TIMESTAMP.equals(type)) { long millisUtc = type.getLong(block, position); return new Timestamp(millisUtc); } if (type instanceof DecimalType) { DecimalType decimalType = (DecimalType) type; return getHiveDecimal(decimalType, block, position); } if (isArrayType(type)) { Type elementType = type.getTypeParameters().get(0); Block arrayBlock = block.getObject(position, Block.class); List<Object> list = new ArrayList<>(arrayBlock.getPositionCount()); for (int i = 0; i < arrayBlock.getPositionCount(); i++) { Object element = getField(elementType, arrayBlock, i); list.add(element); } return Collections.unmodifiableList(list); } if (isMapType(type)) { Type keyType = type.getTypeParameters().get(0); Type valueType = type.getTypeParameters().get(1); Block mapBlock = block.getObject(position, Block.class); Map<Object, Object> map = new HashMap<>(); for (int i = 0; i < mapBlock.getPositionCount(); i += 2) { Object key = getField(keyType, mapBlock, i); Object value = getField(valueType, mapBlock, i + 1); map.put(key, value); } return Collections.unmodifiableMap(map); } if (isRowType(type)) { Block rowBlock = block.getObject(position, Block.class); List<Type> fieldTypes = type.getTypeParameters(); checkCondition(fieldTypes.size() == rowBlock.getPositionCount(), StandardErrorCode.GENERIC_INTERNAL_ERROR, "Expected row value field count does not match type field count"); List<Object> row = new ArrayList<>(rowBlock.getPositionCount()); for (int i = 0; i < rowBlock.getPositionCount(); i++) { Object element = getField(fieldTypes.get(i), rowBlock, i); row.add(element); } return Collections.unmodifiableList(row); } throw new PrestoException(NOT_SUPPORTED, "unsupported type: " + type); } public static void checkTableIsWritable(Table table, boolean writesToNonManagedTablesEnabled) { if (!writesToNonManagedTablesEnabled && !table.getTableType().equals(MANAGED_TABLE.toString())) { throw new PrestoException(NOT_SUPPORTED, "Cannot write to non-managed Hive table"); } checkWritable( new SchemaTableName(table.getDatabaseName(), table.getTableName()), Optional.empty(), getProtectMode(table), table.getParameters(), table.getStorage()); } public static void checkPartitionIsWritable(String partitionName, Partition partition) { checkWritable( new SchemaTableName(partition.getDatabaseName(), partition.getTableName()), Optional.of(partitionName), getProtectMode(partition), partition.getParameters(), partition.getStorage()); } private static void checkWritable( SchemaTableName tableName, Optional<String> partitionName, ProtectMode protectMode, Map<String, String> parameters, Storage storage) { String tablePartitionDescription = "Table '" + tableName + "'"; if (partitionName.isPresent()) { tablePartitionDescription += " partition '" + partitionName.get() + "'"; } // verify online verifyOnline(tableName, partitionName, protectMode, parameters); // verify not read only if (protectMode.readOnly) { throw new HiveReadOnlyException(tableName, partitionName); } // verify sorting if (storage.isSorted()) { throw new PrestoException(NOT_SUPPORTED, format("Inserting into bucketed sorted tables is not supported. %s", tablePartitionDescription)); } // verify skew info if (storage.isSkewed()) { throw new PrestoException(NOT_SUPPORTED, format("Inserting into bucketed tables with skew is not supported. %s", tablePartitionDescription)); } } public static Path getTableDefaultLocation(HdfsContext context, SemiTransactionalHiveMetastore metastore, HdfsEnvironment hdfsEnvironment, String schemaName, String tableName) { Optional<String> location = getDatabase(metastore, schemaName).getLocation(); if (!location.isPresent() || location.get().isEmpty()) { throw new PrestoException(HIVE_DATABASE_LOCATION_ERROR, format("Database '%s' location is not set", schemaName)); } Path databasePath = new Path(location.get()); if (!isS3FileSystem(context, hdfsEnvironment, databasePath)) { if (!pathExists(context, hdfsEnvironment, databasePath)) { throw new PrestoException(HIVE_DATABASE_LOCATION_ERROR, format("Database '%s' location does not exist: %s", schemaName, databasePath)); } if (!isDirectory(context, hdfsEnvironment, databasePath)) { throw new PrestoException(HIVE_DATABASE_LOCATION_ERROR, format("Database '%s' location is not a directory: %s", schemaName, databasePath)); } } return new Path(databasePath, tableName); } private static Database getDatabase(SemiTransactionalHiveMetastore metastore, String database) { return metastore.getDatabase(database).orElseThrow(() -> new SchemaNotFoundException(database)); } public static boolean pathExists(HdfsContext context, HdfsEnvironment hdfsEnvironment, Path path) { try { return hdfsEnvironment.getFileSystem(context, path).exists(path); } catch (IOException e) { throw new PrestoException(HIVE_FILESYSTEM_ERROR, "Failed checking path: " + path, e); } } public static boolean isS3FileSystem(HdfsContext context, HdfsEnvironment hdfsEnvironment, Path path) { try { return getRawFileSystem(hdfsEnvironment.getFileSystem(context, path)) instanceof PrestoS3FileSystem; } catch (IOException e) { throw new PrestoException(HIVE_FILESYSTEM_ERROR, "Failed checking path: " + path, e); } } public static boolean isViewFileSystem(HdfsContext context, HdfsEnvironment hdfsEnvironment, Path path) { try { // Hadoop 1.x does not have the ViewFileSystem class return getRawFileSystem(hdfsEnvironment.getFileSystem(context, path)) .getClass().getName().equals("org.apache.hadoop.fs.viewfs.ViewFileSystem"); } catch (IOException e) { throw new PrestoException(HIVE_FILESYSTEM_ERROR, "Failed checking path: " + path, e); } } private static FileSystem getRawFileSystem(FileSystem fileSystem) { if (fileSystem instanceof FilterFileSystem) { return getRawFileSystem(((FilterFileSystem) fileSystem).getRawFileSystem()); } return fileSystem; } private static boolean isDirectory(HdfsContext context, HdfsEnvironment hdfsEnvironment, Path path) { try { return hdfsEnvironment.getFileSystem(context, path).isDirectory(path); } catch (IOException e) { throw new PrestoException(HIVE_FILESYSTEM_ERROR, "Failed checking path: " + path, e); } } public static Path createTemporaryPath(HdfsContext context, HdfsEnvironment hdfsEnvironment, Path targetPath) { // use a per-user temporary directory to avoid permission problems String temporaryPrefix = "/tmp/presto-" + context.getIdentity().getUser(); // use relative temporary directory on ViewFS if (isViewFileSystem(context, hdfsEnvironment, targetPath)) { temporaryPrefix = ".hive-staging"; } // create a temporary directory on the same filesystem Path temporaryRoot = new Path(targetPath, temporaryPrefix); Path temporaryPath = new Path(temporaryRoot, randomUUID().toString()); createDirectory(context, hdfsEnvironment, temporaryPath); return temporaryPath; } public static void createDirectory(HdfsContext context, HdfsEnvironment hdfsEnvironment, Path path) { try { if (!hdfsEnvironment.getFileSystem(context, path).mkdirs(path, ALL_PERMISSIONS)) { throw new IOException("mkdirs returned false"); } } catch (IOException e) { throw new PrestoException(HIVE_FILESYSTEM_ERROR, "Failed to create directory: " + path, e); } // explicitly set permission since the default umask overrides it on creation try { hdfsEnvironment.getFileSystem(context, path).setPermission(path, ALL_PERMISSIONS); } catch (IOException e) { throw new PrestoException(HIVE_FILESYSTEM_ERROR, "Failed to set permission on directory: " + path, e); } } public static boolean isWritableType(HiveType hiveType) { return isWritableType(hiveType.getTypeInfo()); } private static boolean isWritableType(TypeInfo typeInfo) { switch (typeInfo.getCategory()) { case PRIMITIVE: PrimitiveCategory primitiveCategory = ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory(); return isWritablePrimitiveType(primitiveCategory); case MAP: MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo; return isWritableType(mapTypeInfo.getMapKeyTypeInfo()) && isWritableType(mapTypeInfo.getMapValueTypeInfo()); case LIST: ListTypeInfo listTypeInfo = (ListTypeInfo) typeInfo; return isWritableType(listTypeInfo.getListElementTypeInfo()); case STRUCT: StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo; return structTypeInfo.getAllStructFieldTypeInfos().stream().allMatch(HiveWriteUtils::isWritableType); } return false; } private static boolean isWritablePrimitiveType(PrimitiveCategory primitiveCategory) { switch (primitiveCategory) { case BOOLEAN: case LONG: case INT: case SHORT: case BYTE: case FLOAT: case DOUBLE: case STRING: case DATE: case TIMESTAMP: case BINARY: case DECIMAL: case VARCHAR: case CHAR: return true; } return false; } public static List<ObjectInspector> getRowColumnInspectors(List<Type> types) { return types.stream() .map(HiveWriteUtils::getRowColumnInspector) .collect(toList()); } public static ObjectInspector getRowColumnInspector(Type type) { if (type.equals(BooleanType.BOOLEAN)) { return writableBooleanObjectInspector; } if (type.equals(BigintType.BIGINT)) { return writableLongObjectInspector; } if (type.equals(IntegerType.INTEGER)) { return writableIntObjectInspector; } if (type.equals(SmallintType.SMALLINT)) { return writableShortObjectInspector; } if (type.equals(TinyintType.TINYINT)) { return writableByteObjectInspector; } if (type.equals(RealType.REAL)) { return writableFloatObjectInspector; } if (type.equals(DoubleType.DOUBLE)) { return writableDoubleObjectInspector; } if (type instanceof VarcharType) { VarcharType varcharType = (VarcharType) type; int varcharLength = varcharType.getLength(); // VARCHAR columns with the length less than or equal to 65535 are supported natively by Hive if (varcharLength <= HiveVarchar.MAX_VARCHAR_LENGTH) { return getPrimitiveWritableObjectInspector(getVarcharTypeInfo(varcharLength)); } // Unbounded VARCHAR is not supported by Hive. // Values for such columns must be stored as STRING in Hive else if (varcharLength == VarcharType.UNBOUNDED_LENGTH) { return writableStringObjectInspector; } } if (isCharType(type)) { CharType charType = (CharType) type; int charLength = charType.getLength(); return getPrimitiveWritableObjectInspector(getCharTypeInfo(charLength)); } if (type.equals(VarbinaryType.VARBINARY)) { return writableBinaryObjectInspector; } if (type.equals(DateType.DATE)) { return writableDateObjectInspector; } if (type.equals(TimestampType.TIMESTAMP)) { return writableTimestampObjectInspector; } if (type instanceof DecimalType) { DecimalType decimalType = (DecimalType) type; return getPrimitiveWritableObjectInspector(new DecimalTypeInfo(decimalType.getPrecision(), decimalType.getScale())); } if (isArrayType(type) || isMapType(type) || isRowType(type)) { return getJavaObjectInspector(type); } throw new IllegalArgumentException("unsupported type: " + type); } public static FieldSetter createFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field, Type type) { if (type.equals(BooleanType.BOOLEAN)) { return new BooleanFieldSetter(rowInspector, row, field); } if (type.equals(BigintType.BIGINT)) { return new BigintFieldBuilder(rowInspector, row, field); } if (type.equals(IntegerType.INTEGER)) { return new IntFieldSetter(rowInspector, row, field); } if (type.equals(SmallintType.SMALLINT)) { return new SmallintFieldSetter(rowInspector, row, field); } if (type.equals(TinyintType.TINYINT)) { return new TinyintFieldSetter(rowInspector, row, field); } if (type.equals(RealType.REAL)) { return new FloatFieldSetter(rowInspector, row, field); } if (type.equals(DoubleType.DOUBLE)) { return new DoubleFieldSetter(rowInspector, row, field); } if (type instanceof VarcharType) { return new VarcharFieldSetter(rowInspector, row, field, type); } if (type instanceof CharType) { return new CharFieldSetter(rowInspector, row, field, type); } if (type.equals(VarbinaryType.VARBINARY)) { return new BinaryFieldSetter(rowInspector, row, field); } if (type.equals(DateType.DATE)) { return new DateFieldSetter(rowInspector, row, field); } if (type.equals(TimestampType.TIMESTAMP)) { return new TimestampFieldSetter(rowInspector, row, field); } if (type instanceof DecimalType) { DecimalType decimalType = (DecimalType) type; return new DecimalFieldSetter(rowInspector, row, field, decimalType); } if (isArrayType(type)) { return new ArrayFieldSetter(rowInspector, row, field, type.getTypeParameters().get(0)); } if (isMapType(type)) { return new MapFieldSetter(rowInspector, row, field, type.getTypeParameters().get(0), type.getTypeParameters().get(1)); } if (isRowType(type)) { return new RowFieldSetter(rowInspector, row, field, type.getTypeParameters()); } throw new IllegalArgumentException("unsupported type: " + type); } public abstract static class FieldSetter { protected final SettableStructObjectInspector rowInspector; protected final Object row; protected final StructField field; protected FieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field) { this.rowInspector = requireNonNull(rowInspector, "rowInspector is null"); this.row = requireNonNull(row, "row is null"); this.field = requireNonNull(field, "field is null"); } public abstract void setField(Block block, int position); } private static class BooleanFieldSetter extends FieldSetter { private final BooleanWritable value = new BooleanWritable(); public BooleanFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field) { super(rowInspector, row, field); } @Override public void setField(Block block, int position) { value.set(BooleanType.BOOLEAN.getBoolean(block, position)); rowInspector.setStructFieldData(row, field, value); } } private static class BigintFieldBuilder extends FieldSetter { private final LongWritable value = new LongWritable(); public BigintFieldBuilder(SettableStructObjectInspector rowInspector, Object row, StructField field) { super(rowInspector, row, field); } @Override public void setField(Block block, int position) { value.set(BigintType.BIGINT.getLong(block, position)); rowInspector.setStructFieldData(row, field, value); } } private static class IntFieldSetter extends FieldSetter { private final IntWritable value = new IntWritable(); public IntFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field) { super(rowInspector, row, field); } @Override public void setField(Block block, int position) { value.set(toIntExact(IntegerType.INTEGER.getLong(block, position))); rowInspector.setStructFieldData(row, field, value); } } private static class SmallintFieldSetter extends FieldSetter { private final ShortWritable value = new ShortWritable(); public SmallintFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field) { super(rowInspector, row, field); } @Override public void setField(Block block, int position) { value.set(Shorts.checkedCast(SmallintType.SMALLINT.getLong(block, position))); rowInspector.setStructFieldData(row, field, value); } } private static class TinyintFieldSetter extends FieldSetter { private final ByteWritable value = new ByteWritable(); public TinyintFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field) { super(rowInspector, row, field); } @Override public void setField(Block block, int position) { value.set(SignedBytes.checkedCast(TinyintType.TINYINT.getLong(block, position))); rowInspector.setStructFieldData(row, field, value); } } private static class DoubleFieldSetter extends FieldSetter { private final DoubleWritable value = new DoubleWritable(); public DoubleFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field) { super(rowInspector, row, field); } @Override public void setField(Block block, int position) { value.set(DoubleType.DOUBLE.getDouble(block, position)); rowInspector.setStructFieldData(row, field, value); } } private static class FloatFieldSetter extends FieldSetter { private final FloatWritable value = new FloatWritable(); public FloatFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field) { super(rowInspector, row, field); } @Override public void setField(Block block, int position) { value.set(intBitsToFloat((int) RealType.REAL.getLong(block, position))); rowInspector.setStructFieldData(row, field, value); } } private static class VarcharFieldSetter extends FieldSetter { private final Text value = new Text(); private final Type type; public VarcharFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field, Type type) { super(rowInspector, row, field); this.type = type; } @Override public void setField(Block block, int position) { value.set(type.getSlice(block, position).getBytes()); rowInspector.setStructFieldData(row, field, value); } } private static class CharFieldSetter extends FieldSetter { private final Text value = new Text(); private final Type type; public CharFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field, Type type) { super(rowInspector, row, field); this.type = type; } @Override public void setField(Block block, int position) { value.set(type.getSlice(block, position).getBytes()); rowInspector.setStructFieldData(row, field, value); } } private static class BinaryFieldSetter extends FieldSetter { private final BytesWritable value = new BytesWritable(); public BinaryFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field) { super(rowInspector, row, field); } @Override public void setField(Block block, int position) { byte[] bytes = VarbinaryType.VARBINARY.getSlice(block, position).getBytes(); value.set(bytes, 0, bytes.length); rowInspector.setStructFieldData(row, field, value); } } private static class DateFieldSetter extends FieldSetter { private final DateWritable value = new DateWritable(); public DateFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field) { super(rowInspector, row, field); } @Override public void setField(Block block, int position) { value.set(toIntExact(DateType.DATE.getLong(block, position))); rowInspector.setStructFieldData(row, field, value); } } private static class TimestampFieldSetter extends FieldSetter { private final TimestampWritable value = new TimestampWritable(); public TimestampFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field) { super(rowInspector, row, field); } @Override public void setField(Block block, int position) { long millisUtc = TimestampType.TIMESTAMP.getLong(block, position); value.setTime(millisUtc); rowInspector.setStructFieldData(row, field, value); } } private static class DecimalFieldSetter extends FieldSetter { private final HiveDecimalWritable value = new HiveDecimalWritable(); private final DecimalType decimalType; public DecimalFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field, DecimalType decimalType) { super(rowInspector, row, field); this.decimalType = decimalType; } @Override public void setField(Block block, int position) { value.set(getHiveDecimal(decimalType, block, position)); rowInspector.setStructFieldData(row, field, value); } } private static HiveDecimal getHiveDecimal(DecimalType decimalType, Block block, int position) { BigInteger unscaledValue; if (decimalType.isShort()) { unscaledValue = BigInteger.valueOf(decimalType.getLong(block, position)); } else { unscaledValue = Decimals.decodeUnscaledValue(decimalType.getSlice(block, position)); } return HiveDecimal.create(unscaledValue, decimalType.getScale()); } private static class ArrayFieldSetter extends FieldSetter { private final Type elementType; public ArrayFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field, Type elementType) { super(rowInspector, row, field); this.elementType = requireNonNull(elementType, "elementType is null"); } @Override public void setField(Block block, int position) { Block arrayBlock = block.getObject(position, Block.class); List<Object> list = new ArrayList<>(arrayBlock.getPositionCount()); for (int i = 0; i < arrayBlock.getPositionCount(); i++) { Object element = getField(elementType, arrayBlock, i); list.add(element); } rowInspector.setStructFieldData(row, field, list); } } private static class MapFieldSetter extends FieldSetter { private final Type keyType; private final Type valueType; public MapFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field, Type keyType, Type valueType) { super(rowInspector, row, field); this.keyType = requireNonNull(keyType, "keyType is null"); this.valueType = requireNonNull(valueType, "valueType is null"); } @Override public void setField(Block block, int position) { Block mapBlock = block.getObject(position, Block.class); Map<Object, Object> map = new HashMap<>(mapBlock.getPositionCount() * 2); for (int i = 0; i < mapBlock.getPositionCount(); i += 2) { Object key = getField(keyType, mapBlock, i); Object value = getField(valueType, mapBlock, i + 1); map.put(key, value); } rowInspector.setStructFieldData(row, field, map); } } private static class RowFieldSetter extends FieldSetter { private final List<Type> fieldTypes; public RowFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field, List<Type> fieldTypes) { super(rowInspector, row, field); this.fieldTypes = ImmutableList.copyOf(fieldTypes); } @Override public void setField(Block block, int position) { Block rowBlock = block.getObject(position, Block.class); // TODO reuse row object and use FieldSetters, like we do at the top level // Ideally, we'd use the same recursive structure starting from the top, but // this requires modeling row types in the same way we model table rows // (multiple blocks vs all fields packed in a single block) List<Object> value = new ArrayList<>(fieldTypes.size()); for (int i = 0; i < fieldTypes.size(); i++) { Object element = getField(fieldTypes.get(i), rowBlock, i); value.add(element); } rowInspector.setStructFieldData(row, field, value); } } }
gh351135612/presto
presto-hive/src/main/java/com/facebook/presto/hive/HiveWriteUtils.java
Java
apache-2.0
42,741
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.siyeh.ig.logging; import com.intellij.codeInspection.CommonQuickFixBundle; import com.intellij.codeInspection.ProblemDescriptor; import com.intellij.codeInspection.ui.ListTable; import com.intellij.codeInspection.ui.ListWrappingTableModel; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.InvalidDataException; import com.intellij.openapi.util.WriteExternalException; import com.intellij.psi.*; import com.intellij.psi.util.PsiUtil; import com.intellij.util.xmlb.Accessor; import com.intellij.util.xmlb.SerializationFilterBase; import com.intellij.util.xmlb.XmlSerializer; import com.siyeh.InspectionGadgetsBundle; import com.siyeh.ig.BaseInspection; import com.siyeh.ig.BaseInspectionVisitor; import com.siyeh.ig.InspectionGadgetsFix; import com.siyeh.ig.PsiReplacementUtil; import com.siyeh.ig.psiutils.ClassUtils; import com.siyeh.ig.psiutils.CommentTracker; import com.siyeh.ig.ui.UiUtils; import org.jdom.Element; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.util.ArrayList; import java.util.Arrays; import java.util.List; public class LoggerInitializedWithForeignClassInspection extends BaseInspection { @NonNls private static final String DEFAULT_FACTORY_CLASS_NAMES = // Log4J 1 "org.apache.log4j.Logger," + // SLF4J "org.slf4j.LoggerFactory," + // Apache Commons Logging "org.apache.commons.logging.LogFactory," + // Java Util Logging "java.util.logging.Logger," + // Log4J 2 "org.apache.logging.log4j.LogManager"; @NonNls private static final String DEFAULT_FACTORY_METHOD_NAMES = //Log4J 1 "getLogger," + // SLF4J "getLogger," + // Apache Commons Logging "getLog," + // Java Util Logging "getLogger," + // Log4J 2 "getLogger"; protected final List<String> loggerFactoryClassNames = new ArrayList<>(); protected final List<String> loggerFactoryMethodNames = new ArrayList<>(); @SuppressWarnings("PublicField") public String loggerClassName = DEFAULT_FACTORY_CLASS_NAMES; @SuppressWarnings("PublicField") public @NonNls String loggerFactoryMethodName = DEFAULT_FACTORY_METHOD_NAMES; { parseString(loggerClassName, loggerFactoryClassNames); parseString(loggerFactoryMethodName, loggerFactoryMethodNames); } @Override public JComponent createOptionsPanel() { final ListTable table = new ListTable( new ListWrappingTableModel(Arrays.asList(loggerFactoryClassNames, loggerFactoryMethodNames), InspectionGadgetsBundle.message("logger.factory.class.name"), InspectionGadgetsBundle.message("logger.factory.method.name"))); final String title = InspectionGadgetsBundle.message("logger.initialized.with.foreign.options.title"); return UiUtils.createAddRemoveTreeClassChooserPanel(table, title); } @Override @NotNull protected String buildErrorString(Object... infos) { return InspectionGadgetsBundle.message("logger.initialized.with.foreign.class.problem.descriptor"); } @Override @Nullable protected InspectionGadgetsFix buildFix(Object... infos) { return new LoggerInitializedWithForeignClassFix((String)infos[0]); } @Override public BaseInspectionVisitor buildVisitor() { return new LoggerInitializedWithForeignClassVisitor(); } @Override public void readSettings(@NotNull Element element) throws InvalidDataException { super.readSettings(element); parseString(loggerClassName, loggerFactoryClassNames); parseString(loggerFactoryMethodName, loggerFactoryMethodNames); if (loggerFactoryClassNames.size() != loggerFactoryMethodNames.size() || loggerFactoryClassNames.isEmpty()) { parseString(DEFAULT_FACTORY_CLASS_NAMES, loggerFactoryClassNames); parseString(DEFAULT_FACTORY_METHOD_NAMES, loggerFactoryMethodNames); } } @Override public void writeSettings(@NotNull Element element) throws WriteExternalException { loggerClassName = formatString(loggerFactoryClassNames); loggerFactoryMethodName = formatString(loggerFactoryMethodNames); if (loggerFactoryMethodName.equals(DEFAULT_FACTORY_METHOD_NAMES) && loggerClassName.equals(DEFAULT_FACTORY_CLASS_NAMES)) { // to prevent changing inspection profile with new default, which is mistakenly always written because of bug in serialization below. loggerFactoryMethodName = "getLogger," + "getLogger," + "getLog," + "getLogger"; // these broken settings are restored correctly in readSettings() } XmlSerializer.serializeInto(this, element, new SerializationFilterBase() { @Override protected boolean accepts(@NotNull Accessor accessor, @NotNull Object bean, @Nullable Object beanValue) { final @NonNls String factoryName = accessor.getName(); if ("loggerClassName".equals(factoryName) && DEFAULT_FACTORY_CLASS_NAMES.equals(beanValue)) { return false; } if ("loggerFactoryMethodNames".equals(factoryName) && DEFAULT_FACTORY_METHOD_NAMES.equals(beanValue)) { return false; } return true; } }); } private static final class LoggerInitializedWithForeignClassFix extends InspectionGadgetsFix { private final String newClassName; private LoggerInitializedWithForeignClassFix(String newClassName) { this.newClassName = newClassName; } @Override @NotNull public String getName() { return CommonQuickFixBundle.message("fix.replace.with.x", newClassName+".class"); } @NotNull @Override public String getFamilyName() { return InspectionGadgetsBundle.message("logger.initialized.with.foreign.class.fix.family.name"); } @Override protected void doFix(Project project, ProblemDescriptor descriptor) { final PsiElement element = descriptor.getPsiElement(); if (!(element instanceof PsiClassObjectAccessExpression)) { return; } final PsiClassObjectAccessExpression classObjectAccessExpression = (PsiClassObjectAccessExpression)element; PsiReplacementUtil.replaceExpression(classObjectAccessExpression, newClassName + ".class", new CommentTracker()); } } private class LoggerInitializedWithForeignClassVisitor extends BaseInspectionVisitor { @Override public void visitClassObjectAccessExpression(PsiClassObjectAccessExpression expression) { super.visitClassObjectAccessExpression(expression); PsiElement parent = expression.getParent(); if (parent instanceof PsiReferenceExpression) { final PsiReferenceExpression referenceExpression = (PsiReferenceExpression)parent; if (!expression.equals(referenceExpression.getQualifierExpression())) { return; } @NonNls final String name = referenceExpression.getReferenceName(); if (!"getName".equals(name)) { return; } final PsiElement grandParent = referenceExpression.getParent(); if (!(grandParent instanceof PsiMethodCallExpression)) { return; } final PsiMethodCallExpression methodCallExpression = (PsiMethodCallExpression)grandParent; final PsiExpressionList list = methodCallExpression.getArgumentList(); if (!list.isEmpty()) { return; } parent = methodCallExpression.getParent(); } if (!(parent instanceof PsiExpressionList)) { return; } final PsiElement grandParent = parent.getParent(); if (!(grandParent instanceof PsiMethodCallExpression)) { return; } final PsiMethodCallExpression methodCallExpression = (PsiMethodCallExpression)grandParent; final PsiExpressionList argumentList = methodCallExpression.getArgumentList(); final PsiExpression[] expressions = argumentList.getExpressions(); if (expressions.length != 1) { return; } PsiClass containingClass = ClassUtils.getContainingClass(expression); while (containingClass instanceof PsiAnonymousClass) { containingClass = ClassUtils.getContainingClass(containingClass); } if (containingClass == null) { return; } final String containingClassName = containingClass.getName(); if (containingClassName == null) { return; } final PsiMethod method = methodCallExpression.resolveMethod(); if (method == null) { return; } final PsiClass aClass = method.getContainingClass(); if (aClass == null) { return; } final String className = aClass.getQualifiedName(); final int index = loggerFactoryClassNames.indexOf(className); if (index < 0) { return; } final PsiReferenceExpression methodExpression = methodCallExpression.getMethodExpression(); final String referenceName = methodExpression.getReferenceName(); final String loggerFactoryMethodName = loggerFactoryMethodNames.get(index); if (!loggerFactoryMethodName.equals(referenceName)) { return; } final PsiTypeElement operand = expression.getOperand(); final PsiClass initializerClass = PsiUtil.resolveClassInClassTypeOnly(operand.getType()); if (initializerClass == null) { return; } if (containingClass.equals(initializerClass)) { return; } registerError(expression, containingClassName); } } }
dahlstrom-g/intellij-community
plugins/InspectionGadgets/src/com/siyeh/ig/logging/LoggerInitializedWithForeignClassInspection.java
Java
apache-2.0
9,735
#pragma once #include "indexer/index.hpp" #include "geometry/point2d.hpp" #include "geometry/rect2d.hpp" #include "geometry/tree4d.hpp" #include "std/set.hpp" class Index; namespace search { struct LocalityItem { m2::RectD m_rect; string m_name; uint32_t m_population; typedef uint32_t ID; ID m_id; LocalityItem(m2::RectD const & rect, uint32_t population, ID id, string const & name); m2::RectD const & GetLimitRect() const { return m_rect; } }; class LocalityFinder { struct Cache { m4::Tree<LocalityItem> m_tree; set<LocalityItem::ID> m_loaded; mutable uint32_t m_usage; m2::RectD m_rect; Cache() : m_usage(0) {} void Clear(); void GetLocality(m2::PointD const & pt, string & name) const; }; public: LocalityFinder(Index const * pIndex); void SetLanguage(int8_t lang) { if (m_lang != lang) { ClearCacheAll(); m_lang = lang; } } void SetViewportByIndex(m2::RectD const & viewport, size_t idx); /// Set new viewport for the reserved slot only if it's no a part of the previous one. void SetReservedViewportIfNeeded(m2::RectD const & viewport); /// Check for localities in pre-cached viewports only. void GetLocalityInViewport(m2::PointD const & pt, string & name) const; /// Check for localities in all Index and make new cache if needed. void GetLocalityCreateCache(m2::PointD const & pt, string & name); void ClearCacheAll(); void ClearCache(size_t idx); protected: void CorrectMinimalRect(m2::RectD & rect) const; void RecreateCache(Cache & cache, m2::RectD rect) const; private: friend class DoLoader; Index const * m_pIndex; enum { MAX_VIEWPORT_COUNT = 3 }; Cache m_cache[MAX_VIEWPORT_COUNT]; int8_t m_lang; }; } // namespace search
programming086/omim
search/locality_finder.hpp
C++
apache-2.0
1,775
class ErrorsController < ApplicationController def error_404 respond_to do |format| format.html { render template: 'errors/error_404', layout: 'layouts/application', status: 404 } format.all { render nothing: true, status: 404 } end end def error_500 end end
mariaro/cypress
app/controllers/errors_controller.rb
Ruby
apache-2.0
287
# Cloud Foundry Java Buildpack # Copyright 2013-2017 the original author or authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. require 'spec_helper' require 'component_helper' require 'java_buildpack/framework/introscope_agent' describe JavaBuildpack::Framework::IntroscopeAgent do include_context 'component_helper' let(:configuration) do { 'default_agent_name' => "$(expr \"$VCAP_APPLICATION\" : '.*application_name[\": ]*\\([A-Za-z0-9_-]*\\).*')" } end let(:vcap_application) do { 'application_name' => 'test-application-name', 'application_uris' => %w[test-application-uri-0 test-application-uri-1] } end it 'does not detect without introscope-n/a service' do expect(component.detect).to be_nil end context do let(:credentials) { {} } before do allow(services).to receive(:one_service?).with(/introscope/, 'host-name').and_return(true) allow(services).to receive(:find_service).and_return('credentials' => credentials) end it 'detects with introscope-n/a service' do expect(component.detect).to eq("introscope-agent=#{version}") end it 'expands Introscope agent zip', cache_fixture: 'stub-introscope-agent.tar' do component.compile expect(sandbox + 'Agent.jar').to exist end it 'raises error if host-name not specified' do expect { component.release }.to raise_error(/'host-name' credential must be set/) end context do let(:credentials) { { 'host-name' => 'test-host-name' } } it 'updates JAVA_OPTS' do component.release expect(java_opts).to include('-javaagent:$PWD/.java-buildpack/introscope_agent/Agent.jar') expect(java_opts).to include('-Dcom.wily.introscope.agentProfile=$PWD/.java-buildpack/introscope_agent/core' \ '/config/IntroscopeAgent.profile') expect(java_opts).to include('-Dintroscope.agent.defaultProcessName=test-application-name') expect(java_opts).to include('-Dintroscope.agent.hostName=test-application-uri-0') expect(java_opts).to include('-Dintroscope.agent.enterprisemanager.transport.tcp.host.DEFAULT=test-host-name') expect(java_opts).to include('-Dcom.wily.introscope.agent.agentName=$(expr "$VCAP_APPLICATION" : ' \ '\'.*application_name[": ]*\\([A-Za-z0-9_-]*\\).*\')') end context do let(:credentials) { super().merge 'agent-name' => 'another-test-agent-name' } it 'adds agent-name from credentials to JAVA_OPTS if specified' do component.release expect(java_opts).to include('-Dcom.wily.introscope.agent.agentName=another-test-agent-name') end end context do let(:credentials) { super().merge 'port' => 'test-port' } it 'adds port from credentials to JAVA_OPTS if specified' do component.release expect(java_opts).to include('-Dintroscope.agent.enterprisemanager.transport.tcp.port.DEFAULT=test-port') end end context do let(:credentials) { super().merge 'ssl' => 'true' } it 'adds ssl socket factory from credentials to JAVA_OPTS if specified' do component.release expect(java_opts).to include('-Dintroscope.agent.enterprisemanager.transport.tcp.socketfactory.DEFAULT=' \ 'com.wily.isengard.postofficehub.link.net.SSLSocketFactory') end end end end end
afalak/java-buildpack
spec/java_buildpack/framework/introscope_agent_spec.rb
Ruby
apache-2.0
4,002
// ---------------------------------------------------------------------------------- // // Copyright Microsoft Corporation // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // ---------------------------------------------------------------------------------- using Microsoft.Azure; using Microsoft.Azure.Commands.Common.Authentication; using Microsoft.Azure.Commands.Common.Authentication.Models; using Microsoft.Rest; using System; using System.Security; namespace Microsoft.WindowsAzure.Commands.Common.Test.Mocks { public class MockTokenAuthenticationFactory : IAuthenticationFactory { public IAccessToken Token { get; set; } public Func<AzureAccount, AzureEnvironment, string, IAccessToken> TokenProvider { get; set; } public MockTokenAuthenticationFactory() { Token = new MockAccessToken { UserId = "Test", LoginType = LoginType.OrgId, AccessToken = "abc" }; TokenProvider = (account, environment, tenant) => Token = new MockAccessToken { UserId = account.Id, LoginType = LoginType.OrgId, AccessToken = Token.AccessToken }; } public MockTokenAuthenticationFactory(string userId, string accessToken) { Token = new MockAccessToken { UserId = userId, LoginType = LoginType.OrgId, AccessToken = accessToken, }; TokenProvider = ((account, environment, tenant) => Token); } public MockTokenAuthenticationFactory(string userId, string accessToken, string tenantId) { Token = new MockAccessToken { UserId = userId, LoginType = LoginType.OrgId, AccessToken = accessToken, TenantId = tenantId }; TokenProvider = ((account, environment, tenant) => Token); } public IAccessToken Authenticate( AzureAccount account, AzureEnvironment environment, string tenant, SecureString password, ShowDialog promptBehavior, IdentityModel.Clients.ActiveDirectory.TokenCache tokenCache, AzureEnvironment.Endpoint resourceId = AzureEnvironment.Endpoint.ActiveDirectoryServiceEndpointResourceId) { if (account.Id == null) { account.Id = "test"; } if (TokenProvider == null) { return new MockAccessToken() { AccessToken = account.Id, LoginType = LoginType.OrgId, UserId = account.Id }; } else { return TokenProvider(account, environment, tenant); } } public IAccessToken Authenticate( AzureAccount account, AzureEnvironment environment, string tenant, SecureString password, ShowDialog promptBehavior, AzureEnvironment.Endpoint resourceId = AzureEnvironment.Endpoint.ActiveDirectoryServiceEndpointResourceId) { return Authenticate(account, environment, tenant, password, promptBehavior, AzureSession.TokenCache, resourceId); } public SubscriptionCloudCredentials GetSubscriptionCloudCredentials(AzureContext context) { return new AccessTokenCredential(context.Subscription.Id, Token); } public Microsoft.Rest.ServiceClientCredentials GetServiceClientCredentials(AzureContext context) { return new Microsoft.Rest.TokenCredentials(Token.AccessToken); } public SubscriptionCloudCredentials GetSubscriptionCloudCredentials(AzureContext context, AzureEnvironment.Endpoint targetEndpoint) { return new AccessTokenCredential(context.Subscription.Id, Token); } public ServiceClientCredentials GetServiceClientCredentials(AzureContext context, AzureEnvironment.Endpoint targetEndpoint) { throw new NotImplementedException(); } } }
hovsepm/azure-powershell
src/ResourceManager/Common/Commands.ScenarioTests.ResourceManager.Common/Mocks/MockTokenAuthenticationFactory.cs
C#
apache-2.0
4,798
/* * Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.client.spi; import com.hazelcast.core.Partition; import com.hazelcast.nio.Address; import com.hazelcast.nio.serialization.Data; /** * Partition service for Hazelcast clients. * * Allows to retrieve information about the partition count, the partition owner or the partitionId of a key. */ public interface ClientPartitionService { Address getPartitionOwner(int partitionId); int getPartitionId(Data key); int getPartitionId(Object key); int getPartitionCount(); Partition getPartition(int partitionId); }
tombujok/hazelcast
hazelcast-client/src/main/java/com/hazelcast/client/spi/ClientPartitionService.java
Java
apache-2.0
1,185
/** * @file * <a href="https://travis-ci.org/Xotic750/has-to-string-tag-x" * title="Travis status"> * <img * src="https://travis-ci.org/Xotic750/has-to-string-tag-x.svg?branch=master" * alt="Travis status" height="18"> * </a> * <a href="https://david-dm.org/Xotic750/has-to-string-tag-x" * title="Dependency status"> * <img src="https://david-dm.org/Xotic750/has-to-string-tag-x.svg" * alt="Dependency status" height="18"/> * </a> * <a * href="https://david-dm.org/Xotic750/has-to-string-tag-x#info=devDependencies" * title="devDependency status"> * <img src="https://david-dm.org/Xotic750/has-to-string-tag-x/dev-status.svg" * alt="devDependency status" height="18"/> * </a> * <a href="https://badge.fury.io/js/has-to-string-tag-x" title="npm version"> * <img src="https://badge.fury.io/js/has-to-string-tag-x.svg" * alt="npm version" height="18"> * </a> * * hasToStringTag tests if @@toStringTag is supported. `true` if supported. * * <h2>ECMAScript compatibility shims for legacy JavaScript engines</h2> * `es5-shim.js` monkey-patches a JavaScript context to contain all EcmaScript 5 * methods that can be faithfully emulated with a legacy JavaScript engine. * * `es5-sham.js` monkey-patches other ES5 methods as closely as possible. * For these methods, as closely as possible to ES5 is not very close. * Many of these shams are intended only to allow code to be written to ES5 * without causing run-time errors in older engines. In many cases, * this means that these shams cause many ES5 methods to silently fail. * Decide carefully whether this is what you want. Note: es5-sham.js requires * es5-shim.js to be able to work properly. * * `json3.js` monkey-patches the EcmaScript 5 JSON implimentation faithfully. * * `es6.shim.js` provides compatibility shims so that legacy JavaScript engines * behave as closely as possible to ECMAScript 6 (Harmony). * * @version 1.1.0 * @author Xotic750 <Xotic750@gmail.com> * @copyright Xotic750 * @license {@link <https://opensource.org/licenses/MIT> MIT} * @module has-to-string-tag-x */ /* jslint maxlen:80, es6:true, white:true */ /* jshint bitwise:true, camelcase:true, curly:true, eqeqeq:true, forin:true, freeze:true, futurehostile:true, latedef:true, newcap:true, nocomma:true, nonbsp:true, singleGroups:true, strict:true, undef:true, unused:true, es3:false, esnext:true, plusplus:true, maxparams:1, maxdepth:1, maxstatements:3, maxcomplexity:2 */ /* eslint strict: 1, max-statements: 1 */ /* global module */ ;(function () { // eslint-disable-line no-extra-semi 'use strict'; /** * Indicates if `Symbol.toStringTag`exists and is the correct type. * `true`, if it exists and is the correct type, otherwise `false`. * * @type boolean */ module.exports = require('has-symbol-support-x') && typeof Symbol.toStringTag === 'symbol'; }());
BluelabUnifor/sunny-messeger
node_modules/watson-developer-cloud/node_modules/buffer-from/node_modules/is-array-buffer-x/node_modules/has-to-string-tag-x/index.js
JavaScript
apache-2.0
2,875
#pragma once #include <Register/Utility.hpp> namespace Kvasir { //Serial Peripheral Interface namespace Spi1Mcr{ ///<Module Configuration Register using Addr = Register::Address<0x4002d000,0x02e000fe,0x00000000,unsigned>; ///Halt enum class HaltVal { v0=0x00000000, ///<Start transfers. v1=0x00000001, ///<Stop transfers. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(0,0),Register::ReadWriteAccess,HaltVal> halt{}; namespace HaltValC{ constexpr Register::FieldValue<decltype(halt)::Type,HaltVal::v0> v0{}; constexpr Register::FieldValue<decltype(halt)::Type,HaltVal::v1> v1{}; } ///Sample Point enum class SmplptVal { v00=0x00000000, ///<0 protocol clock cycles between SCK edge and SIN sample v01=0x00000001, ///<1 protocol clock cycle between SCK edge and SIN sample v10=0x00000002, ///<2 protocol clock cycles between SCK edge and SIN sample }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(9,8),Register::ReadWriteAccess,SmplptVal> smplPt{}; namespace SmplptValC{ constexpr Register::FieldValue<decltype(smplPt)::Type,SmplptVal::v00> v00{}; constexpr Register::FieldValue<decltype(smplPt)::Type,SmplptVal::v01> v01{}; constexpr Register::FieldValue<decltype(smplPt)::Type,SmplptVal::v10> v10{}; } ///CLR_RXF enum class ClrrxfVal { v0=0x00000000, ///<Do not clear the RX FIFO counter. v1=0x00000001, ///<Clear the RX FIFO counter. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(10,10),Register::Access<Register::AccessType::writeOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,ClrrxfVal> clrRxf{}; namespace ClrrxfValC{ constexpr Register::FieldValue<decltype(clrRxf)::Type,ClrrxfVal::v0> v0{}; constexpr Register::FieldValue<decltype(clrRxf)::Type,ClrrxfVal::v1> v1{}; } ///Clear TX FIFO enum class ClrtxfVal { v0=0x00000000, ///<Do not clear the TX FIFO counter. v1=0x00000001, ///<Clear the TX FIFO counter. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(11,11),Register::Access<Register::AccessType::writeOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,ClrtxfVal> clrTxf{}; namespace ClrtxfValC{ constexpr Register::FieldValue<decltype(clrTxf)::Type,ClrtxfVal::v0> v0{}; constexpr Register::FieldValue<decltype(clrTxf)::Type,ClrtxfVal::v1> v1{}; } ///Disable Receive FIFO enum class DisrxfVal { v0=0x00000000, ///<RX FIFO is enabled. v1=0x00000001, ///<RX FIFO is disabled. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(12,12),Register::ReadWriteAccess,DisrxfVal> disRxf{}; namespace DisrxfValC{ constexpr Register::FieldValue<decltype(disRxf)::Type,DisrxfVal::v0> v0{}; constexpr Register::FieldValue<decltype(disRxf)::Type,DisrxfVal::v1> v1{}; } ///Disable Transmit FIFO enum class DistxfVal { v0=0x00000000, ///<TX FIFO is enabled. v1=0x00000001, ///<TX FIFO is disabled. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(13,13),Register::ReadWriteAccess,DistxfVal> disTxf{}; namespace DistxfValC{ constexpr Register::FieldValue<decltype(disTxf)::Type,DistxfVal::v0> v0{}; constexpr Register::FieldValue<decltype(disTxf)::Type,DistxfVal::v1> v1{}; } ///Module Disable enum class MdisVal { v0=0x00000000, ///<Enables the module clocks. v1=0x00000001, ///<Allows external logic to disable the module clocks. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(14,14),Register::ReadWriteAccess,MdisVal> mdis{}; namespace MdisValC{ constexpr Register::FieldValue<decltype(mdis)::Type,MdisVal::v0> v0{}; constexpr Register::FieldValue<decltype(mdis)::Type,MdisVal::v1> v1{}; } ///Doze Enable enum class DozeVal { v0=0x00000000, ///<Doze mode has no effect on the module. v1=0x00000001, ///<Doze mode disables the module. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(15,15),Register::ReadWriteAccess,DozeVal> doze{}; namespace DozeValC{ constexpr Register::FieldValue<decltype(doze)::Type,DozeVal::v0> v0{}; constexpr Register::FieldValue<decltype(doze)::Type,DozeVal::v1> v1{}; } ///Peripheral Chip Select x Inactive State constexpr Register::FieldLocation<Addr,Register::maskFromRange(20,16),Register::ReadWriteAccess,unsigned> pcsis{}; ///Receive FIFO Overflow Overwrite Enable enum class RooeVal { v0=0x00000000, ///<Incoming data is ignored. v1=0x00000001, ///<Incoming data is shifted into the shift register. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(24,24),Register::ReadWriteAccess,RooeVal> rooe{}; namespace RooeValC{ constexpr Register::FieldValue<decltype(rooe)::Type,RooeVal::v0> v0{}; constexpr Register::FieldValue<decltype(rooe)::Type,RooeVal::v1> v1{}; } ///Modified Timing Format Enable enum class MtfeVal { v0=0x00000000, ///<Modified SPI transfer format disabled. v1=0x00000001, ///<Modified SPI transfer format enabled. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(26,26),Register::ReadWriteAccess,MtfeVal> mtfe{}; namespace MtfeValC{ constexpr Register::FieldValue<decltype(mtfe)::Type,MtfeVal::v0> v0{}; constexpr Register::FieldValue<decltype(mtfe)::Type,MtfeVal::v1> v1{}; } ///Freeze enum class FrzVal { v0=0x00000000, ///<Do not halt serial transfers in Debug mode. v1=0x00000001, ///<Halt serial transfers in Debug mode. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(27,27),Register::ReadWriteAccess,FrzVal> frz{}; namespace FrzValC{ constexpr Register::FieldValue<decltype(frz)::Type,FrzVal::v0> v0{}; constexpr Register::FieldValue<decltype(frz)::Type,FrzVal::v1> v1{}; } ///SPI Configuration. enum class DconfVal { v00=0x00000000, ///<SPI }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(29,28),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,DconfVal> dconf{}; namespace DconfValC{ constexpr Register::FieldValue<decltype(dconf)::Type,DconfVal::v00> v00{}; } ///Continuous SCK Enable enum class ContsckeVal { v0=0x00000000, ///<Continuous SCK disabled. v1=0x00000001, ///<Continuous SCK enabled. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(30,30),Register::ReadWriteAccess,ContsckeVal> contScke{}; namespace ContsckeValC{ constexpr Register::FieldValue<decltype(contScke)::Type,ContsckeVal::v0> v0{}; constexpr Register::FieldValue<decltype(contScke)::Type,ContsckeVal::v1> v1{}; } ///Master/Slave Mode Select enum class MstrVal { v0=0x00000000, ///<Enables Slave mode v1=0x00000001, ///<Enables Master mode }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,31),Register::ReadWriteAccess,MstrVal> mstr{}; namespace MstrValC{ constexpr Register::FieldValue<decltype(mstr)::Type,MstrVal::v0> v0{}; constexpr Register::FieldValue<decltype(mstr)::Type,MstrVal::v1> v1{}; } } namespace Spi1Tcr{ ///<Transfer Count Register using Addr = Register::Address<0x4002d008,0x0000ffff,0x00000000,unsigned>; ///SPI Transfer Counter constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,16),Register::ReadWriteAccess,unsigned> spiTcnt{}; } namespace Spi1CtarSlave{ ///<Clock and Transfer Attributes Register (In Slave Mode) using Addr = Register::Address<0x4002d00c,0x01ffffff,0x00000000,unsigned>; ///Clock Phase enum class CphaVal { v0=0x00000000, ///<Data is captured on the leading edge of SCK and changed on the following edge. v1=0x00000001, ///<Data is changed on the leading edge of SCK and captured on the following edge. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(25,25),Register::ReadWriteAccess,CphaVal> cpha{}; namespace CphaValC{ constexpr Register::FieldValue<decltype(cpha)::Type,CphaVal::v0> v0{}; constexpr Register::FieldValue<decltype(cpha)::Type,CphaVal::v1> v1{}; } ///Clock Polarity enum class CpolVal { v0=0x00000000, ///<The inactive state value of SCK is low. v1=0x00000001, ///<The inactive state value of SCK is high. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(26,26),Register::ReadWriteAccess,CpolVal> cpol{}; namespace CpolValC{ constexpr Register::FieldValue<decltype(cpol)::Type,CpolVal::v0> v0{}; constexpr Register::FieldValue<decltype(cpol)::Type,CpolVal::v1> v1{}; } ///Frame Size constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,27),Register::ReadWriteAccess,unsigned> fmsz{}; } namespace Spi1Sr{ ///<Status Register using Addr = Register::Address<0x4002d02c,0x25f50000,0x00000000,unsigned>; ///Pop Next Pointer constexpr Register::FieldLocation<Addr,Register::maskFromRange(3,0),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> popnxtptr{}; ///RX FIFO Counter constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,4),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> rxctr{}; ///Transmit Next Pointer constexpr Register::FieldLocation<Addr,Register::maskFromRange(11,8),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> txnxtptr{}; ///TX FIFO Counter constexpr Register::FieldLocation<Addr,Register::maskFromRange(15,12),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> txctr{}; ///Receive FIFO Drain Flag enum class RfdfVal { v0=0x00000000, ///<RX FIFO is empty. v1=0x00000001, ///<RX FIFO is not empty. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(17,17),Register::ReadWriteAccess,RfdfVal> rfdf{}; namespace RfdfValC{ constexpr Register::FieldValue<decltype(rfdf)::Type,RfdfVal::v0> v0{}; constexpr Register::FieldValue<decltype(rfdf)::Type,RfdfVal::v1> v1{}; } ///Receive FIFO Overflow Flag enum class RfofVal { v0=0x00000000, ///<No Rx FIFO overflow. v1=0x00000001, ///<Rx FIFO overflow has occurred. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(19,19),Register::ReadWriteAccess,RfofVal> rfof{}; namespace RfofValC{ constexpr Register::FieldValue<decltype(rfof)::Type,RfofVal::v0> v0{}; constexpr Register::FieldValue<decltype(rfof)::Type,RfofVal::v1> v1{}; } ///Transmit FIFO Fill Flag enum class TfffVal { v0=0x00000000, ///<TX FIFO is full. v1=0x00000001, ///<TX FIFO is not full. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(25,25),Register::ReadWriteAccess,TfffVal> tfff{}; namespace TfffValC{ constexpr Register::FieldValue<decltype(tfff)::Type,TfffVal::v0> v0{}; constexpr Register::FieldValue<decltype(tfff)::Type,TfffVal::v1> v1{}; } ///Transmit FIFO Underflow Flag enum class TfufVal { v0=0x00000000, ///<No TX FIFO underflow. v1=0x00000001, ///<TX FIFO underflow has occurred. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(27,27),Register::ReadWriteAccess,TfufVal> tfuf{}; namespace TfufValC{ constexpr Register::FieldValue<decltype(tfuf)::Type,TfufVal::v0> v0{}; constexpr Register::FieldValue<decltype(tfuf)::Type,TfufVal::v1> v1{}; } ///End of Queue Flag enum class EoqfVal { v0=0x00000000, ///<EOQ is not set in the executing command. v1=0x00000001, ///<EOQ is set in the executing SPI command. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(28,28),Register::ReadWriteAccess,EoqfVal> eoqf{}; namespace EoqfValC{ constexpr Register::FieldValue<decltype(eoqf)::Type,EoqfVal::v0> v0{}; constexpr Register::FieldValue<decltype(eoqf)::Type,EoqfVal::v1> v1{}; } ///TX and RX Status enum class TxrxsVal { v0=0x00000000, ///<Transmit and receive operations are disabled (The module is in Stopped state). v1=0x00000001, ///<Transmit and receive operations are enabled (The module is in Running state). }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(30,30),Register::ReadWriteAccess,TxrxsVal> txrxs{}; namespace TxrxsValC{ constexpr Register::FieldValue<decltype(txrxs)::Type,TxrxsVal::v0> v0{}; constexpr Register::FieldValue<decltype(txrxs)::Type,TxrxsVal::v1> v1{}; } ///Transfer Complete Flag enum class TcfVal { v0=0x00000000, ///<Transfer not complete. v1=0x00000001, ///<Transfer complete. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,31),Register::ReadWriteAccess,TcfVal> tcf{}; namespace TcfValC{ constexpr Register::FieldValue<decltype(tcf)::Type,TcfVal::v0> v0{}; constexpr Register::FieldValue<decltype(tcf)::Type,TcfVal::v1> v1{}; } } namespace Spi1Rser{ ///<DMA/Interrupt Request Select and Enable Register using Addr = Register::Address<0x4002d030,0x64f4ffff,0x00000000,unsigned>; ///Receive FIFO Drain DMA or Interrupt Request Select enum class RfdfdirsVal { v0=0x00000000, ///<Interrupt request. v1=0x00000001, ///<DMA request. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(16,16),Register::ReadWriteAccess,RfdfdirsVal> rfdfDirs{}; namespace RfdfdirsValC{ constexpr Register::FieldValue<decltype(rfdfDirs)::Type,RfdfdirsVal::v0> v0{}; constexpr Register::FieldValue<decltype(rfdfDirs)::Type,RfdfdirsVal::v1> v1{}; } ///Receive FIFO Drain Request Enable enum class RfdfreVal { v0=0x00000000, ///<RFDF interrupt or DMA requests are disabled. v1=0x00000001, ///<RFDF interrupt or DMA requests are enabled. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(17,17),Register::ReadWriteAccess,RfdfreVal> rfdfRe{}; namespace RfdfreValC{ constexpr Register::FieldValue<decltype(rfdfRe)::Type,RfdfreVal::v0> v0{}; constexpr Register::FieldValue<decltype(rfdfRe)::Type,RfdfreVal::v1> v1{}; } ///Receive FIFO Overflow Request Enable enum class RfofreVal { v0=0x00000000, ///<RFOF interrupt requests are disabled. v1=0x00000001, ///<RFOF interrupt requests are enabled. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(19,19),Register::ReadWriteAccess,RfofreVal> rfofRe{}; namespace RfofreValC{ constexpr Register::FieldValue<decltype(rfofRe)::Type,RfofreVal::v0> v0{}; constexpr Register::FieldValue<decltype(rfofRe)::Type,RfofreVal::v1> v1{}; } ///Transmit FIFO Fill DMA or Interrupt Request Select enum class TfffdirsVal { v0=0x00000000, ///<TFFF flag generates interrupt requests. v1=0x00000001, ///<TFFF flag generates DMA requests. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(24,24),Register::ReadWriteAccess,TfffdirsVal> tfffDirs{}; namespace TfffdirsValC{ constexpr Register::FieldValue<decltype(tfffDirs)::Type,TfffdirsVal::v0> v0{}; constexpr Register::FieldValue<decltype(tfffDirs)::Type,TfffdirsVal::v1> v1{}; } ///Transmit FIFO Fill Request Enable enum class TfffreVal { v0=0x00000000, ///<TFFF interrupts or DMA requests are disabled. v1=0x00000001, ///<TFFF interrupts or DMA requests are enabled. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(25,25),Register::ReadWriteAccess,TfffreVal> tfffRe{}; namespace TfffreValC{ constexpr Register::FieldValue<decltype(tfffRe)::Type,TfffreVal::v0> v0{}; constexpr Register::FieldValue<decltype(tfffRe)::Type,TfffreVal::v1> v1{}; } ///Transmit FIFO Underflow Request Enable enum class TfufreVal { v0=0x00000000, ///<TFUF interrupt requests are disabled. v1=0x00000001, ///<TFUF interrupt requests are enabled. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(27,27),Register::ReadWriteAccess,TfufreVal> tfufRe{}; namespace TfufreValC{ constexpr Register::FieldValue<decltype(tfufRe)::Type,TfufreVal::v0> v0{}; constexpr Register::FieldValue<decltype(tfufRe)::Type,TfufreVal::v1> v1{}; } ///Finished Request Enable enum class EoqfreVal { v0=0x00000000, ///<EOQF interrupt requests are disabled. v1=0x00000001, ///<EOQF interrupt requests are enabled. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(28,28),Register::ReadWriteAccess,EoqfreVal> eoqfRe{}; namespace EoqfreValC{ constexpr Register::FieldValue<decltype(eoqfRe)::Type,EoqfreVal::v0> v0{}; constexpr Register::FieldValue<decltype(eoqfRe)::Type,EoqfreVal::v1> v1{}; } ///Transmission Complete Request Enable enum class TcfreVal { v0=0x00000000, ///<TCF interrupt requests are disabled. v1=0x00000001, ///<TCF interrupt requests are enabled. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,31),Register::ReadWriteAccess,TcfreVal> tcfRe{}; namespace TcfreValC{ constexpr Register::FieldValue<decltype(tcfRe)::Type,TcfreVal::v0> v0{}; constexpr Register::FieldValue<decltype(tcfRe)::Type,TcfreVal::v1> v1{}; } } namespace Spi1Pushr{ ///<PUSH TX FIFO Register In Master Mode using Addr = Register::Address<0x4002d034,0x03e00000,0x00000000,unsigned>; ///Transmit Data constexpr Register::FieldLocation<Addr,Register::maskFromRange(15,0),Register::ReadWriteAccess,unsigned> txdata{}; ///no description available constexpr Register::FieldLocation<Addr,Register::maskFromRange(20,16),Register::ReadWriteAccess,unsigned> pcs{}; ///Clear Transfer Counter enum class CtcntVal { v0=0x00000000, ///<Do not clear the TCR[TCNT] field. v1=0x00000001, ///<Clear the TCR[TCNT] field. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(26,26),Register::ReadWriteAccess,CtcntVal> ctcnt{}; namespace CtcntValC{ constexpr Register::FieldValue<decltype(ctcnt)::Type,CtcntVal::v0> v0{}; constexpr Register::FieldValue<decltype(ctcnt)::Type,CtcntVal::v1> v1{}; } ///End Of Queue enum class EoqVal { v0=0x00000000, ///<The SPI data is not the last data to transfer. v1=0x00000001, ///<The SPI data is the last data to transfer. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(27,27),Register::ReadWriteAccess,EoqVal> eoq{}; namespace EoqValC{ constexpr Register::FieldValue<decltype(eoq)::Type,EoqVal::v0> v0{}; constexpr Register::FieldValue<decltype(eoq)::Type,EoqVal::v1> v1{}; } ///Clock and Transfer Attributes Select constexpr Register::FieldLocation<Addr,Register::maskFromRange(30,28),Register::ReadWriteAccess,unsigned> ctas{}; ///Continuous Peripheral Chip Select Enable enum class ContVal { v0=0x00000000, ///<Return PCSn signals to their inactive state between transfers. v1=0x00000001, ///<Keep PCSn signals asserted between transfers. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,31),Register::ReadWriteAccess,ContVal> cont{}; namespace ContValC{ constexpr Register::FieldValue<decltype(cont)::Type,ContVal::v0> v0{}; constexpr Register::FieldValue<decltype(cont)::Type,ContVal::v1> v1{}; } } namespace Spi1PushrSlave{ ///<PUSH TX FIFO Register In Slave Mode using Addr = Register::Address<0x4002d034,0x00000000,0x00000000,unsigned>; ///Transmit Data constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,0),Register::ReadWriteAccess,unsigned> txdata{}; } namespace Spi1Popr{ ///<POP RX FIFO Register using Addr = Register::Address<0x4002d038,0x00000000,0x00000000,unsigned>; ///Received Data constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,0),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> rxdata{}; } namespace Spi1Ctar0{ ///<Clock and Transfer Attributes Register (In Master Mode) using Addr = Register::Address<0x4002d00c,0x00000000,0x00000000,unsigned>; ///Baud Rate Scaler constexpr Register::FieldLocation<Addr,Register::maskFromRange(3,0),Register::ReadWriteAccess,unsigned> br{}; ///Delay After Transfer Scaler constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,4),Register::ReadWriteAccess,unsigned> dt{}; ///After SCK Delay Scaler constexpr Register::FieldLocation<Addr,Register::maskFromRange(11,8),Register::ReadWriteAccess,unsigned> asc{}; ///PCS to SCK Delay Scaler constexpr Register::FieldLocation<Addr,Register::maskFromRange(15,12),Register::ReadWriteAccess,unsigned> cssck{}; ///Baud Rate Prescaler enum class PbrVal { v00=0x00000000, ///<Baud Rate Prescaler value is 2. v01=0x00000001, ///<Baud Rate Prescaler value is 3. v10=0x00000002, ///<Baud Rate Prescaler value is 5. v11=0x00000003, ///<Baud Rate Prescaler value is 7. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(17,16),Register::ReadWriteAccess,PbrVal> pbr{}; namespace PbrValC{ constexpr Register::FieldValue<decltype(pbr)::Type,PbrVal::v00> v00{}; constexpr Register::FieldValue<decltype(pbr)::Type,PbrVal::v01> v01{}; constexpr Register::FieldValue<decltype(pbr)::Type,PbrVal::v10> v10{}; constexpr Register::FieldValue<decltype(pbr)::Type,PbrVal::v11> v11{}; } ///Delay after Transfer Prescaler enum class PdtVal { v00=0x00000000, ///<Delay after Transfer Prescaler value is 1. v01=0x00000001, ///<Delay after Transfer Prescaler value is 3. v10=0x00000002, ///<Delay after Transfer Prescaler value is 5. v11=0x00000003, ///<Delay after Transfer Prescaler value is 7. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(19,18),Register::ReadWriteAccess,PdtVal> pdt{}; namespace PdtValC{ constexpr Register::FieldValue<decltype(pdt)::Type,PdtVal::v00> v00{}; constexpr Register::FieldValue<decltype(pdt)::Type,PdtVal::v01> v01{}; constexpr Register::FieldValue<decltype(pdt)::Type,PdtVal::v10> v10{}; constexpr Register::FieldValue<decltype(pdt)::Type,PdtVal::v11> v11{}; } ///After SCK Delay Prescaler enum class PascVal { v00=0x00000000, ///<Delay after Transfer Prescaler value is 1. v01=0x00000001, ///<Delay after Transfer Prescaler value is 3. v10=0x00000002, ///<Delay after Transfer Prescaler value is 5. v11=0x00000003, ///<Delay after Transfer Prescaler value is 7. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(21,20),Register::ReadWriteAccess,PascVal> pasc{}; namespace PascValC{ constexpr Register::FieldValue<decltype(pasc)::Type,PascVal::v00> v00{}; constexpr Register::FieldValue<decltype(pasc)::Type,PascVal::v01> v01{}; constexpr Register::FieldValue<decltype(pasc)::Type,PascVal::v10> v10{}; constexpr Register::FieldValue<decltype(pasc)::Type,PascVal::v11> v11{}; } ///PCS to SCK Delay Prescaler enum class PcssckVal { v00=0x00000000, ///<PCS to SCK Prescaler value is 1. v01=0x00000001, ///<PCS to SCK Prescaler value is 3. v10=0x00000002, ///<PCS to SCK Prescaler value is 5. v11=0x00000003, ///<PCS to SCK Prescaler value is 7. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(23,22),Register::ReadWriteAccess,PcssckVal> pcssck{}; namespace PcssckValC{ constexpr Register::FieldValue<decltype(pcssck)::Type,PcssckVal::v00> v00{}; constexpr Register::FieldValue<decltype(pcssck)::Type,PcssckVal::v01> v01{}; constexpr Register::FieldValue<decltype(pcssck)::Type,PcssckVal::v10> v10{}; constexpr Register::FieldValue<decltype(pcssck)::Type,PcssckVal::v11> v11{}; } ///LSB First enum class LsbfeVal { v0=0x00000000, ///<Data is transferred MSB first. v1=0x00000001, ///<Data is transferred LSB first. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(24,24),Register::ReadWriteAccess,LsbfeVal> lsbfe{}; namespace LsbfeValC{ constexpr Register::FieldValue<decltype(lsbfe)::Type,LsbfeVal::v0> v0{}; constexpr Register::FieldValue<decltype(lsbfe)::Type,LsbfeVal::v1> v1{}; } ///Clock Phase enum class CphaVal { v0=0x00000000, ///<Data is captured on the leading edge of SCK and changed on the following edge. v1=0x00000001, ///<Data is changed on the leading edge of SCK and captured on the following edge. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(25,25),Register::ReadWriteAccess,CphaVal> cpha{}; namespace CphaValC{ constexpr Register::FieldValue<decltype(cpha)::Type,CphaVal::v0> v0{}; constexpr Register::FieldValue<decltype(cpha)::Type,CphaVal::v1> v1{}; } ///Clock Polarity enum class CpolVal { v0=0x00000000, ///<The inactive state value of SCK is low. v1=0x00000001, ///<The inactive state value of SCK is high. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(26,26),Register::ReadWriteAccess,CpolVal> cpol{}; namespace CpolValC{ constexpr Register::FieldValue<decltype(cpol)::Type,CpolVal::v0> v0{}; constexpr Register::FieldValue<decltype(cpol)::Type,CpolVal::v1> v1{}; } ///Frame Size constexpr Register::FieldLocation<Addr,Register::maskFromRange(30,27),Register::ReadWriteAccess,unsigned> fmsz{}; ///Double Baud Rate enum class DbrVal { v0=0x00000000, ///<The baud rate is computed normally with a 50/50 duty cycle. v1=0x00000001, ///<The baud rate is doubled with the duty cycle depending on the Baud Rate Prescaler. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,31),Register::ReadWriteAccess,DbrVal> dbr{}; namespace DbrValC{ constexpr Register::FieldValue<decltype(dbr)::Type,DbrVal::v0> v0{}; constexpr Register::FieldValue<decltype(dbr)::Type,DbrVal::v1> v1{}; } } namespace Spi1Ctar1{ ///<Clock and Transfer Attributes Register (In Master Mode) using Addr = Register::Address<0x4002d010,0x00000000,0x00000000,unsigned>; ///Baud Rate Scaler constexpr Register::FieldLocation<Addr,Register::maskFromRange(3,0),Register::ReadWriteAccess,unsigned> br{}; ///Delay After Transfer Scaler constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,4),Register::ReadWriteAccess,unsigned> dt{}; ///After SCK Delay Scaler constexpr Register::FieldLocation<Addr,Register::maskFromRange(11,8),Register::ReadWriteAccess,unsigned> asc{}; ///PCS to SCK Delay Scaler constexpr Register::FieldLocation<Addr,Register::maskFromRange(15,12),Register::ReadWriteAccess,unsigned> cssck{}; ///Baud Rate Prescaler enum class PbrVal { v00=0x00000000, ///<Baud Rate Prescaler value is 2. v01=0x00000001, ///<Baud Rate Prescaler value is 3. v10=0x00000002, ///<Baud Rate Prescaler value is 5. v11=0x00000003, ///<Baud Rate Prescaler value is 7. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(17,16),Register::ReadWriteAccess,PbrVal> pbr{}; namespace PbrValC{ constexpr Register::FieldValue<decltype(pbr)::Type,PbrVal::v00> v00{}; constexpr Register::FieldValue<decltype(pbr)::Type,PbrVal::v01> v01{}; constexpr Register::FieldValue<decltype(pbr)::Type,PbrVal::v10> v10{}; constexpr Register::FieldValue<decltype(pbr)::Type,PbrVal::v11> v11{}; } ///Delay after Transfer Prescaler enum class PdtVal { v00=0x00000000, ///<Delay after Transfer Prescaler value is 1. v01=0x00000001, ///<Delay after Transfer Prescaler value is 3. v10=0x00000002, ///<Delay after Transfer Prescaler value is 5. v11=0x00000003, ///<Delay after Transfer Prescaler value is 7. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(19,18),Register::ReadWriteAccess,PdtVal> pdt{}; namespace PdtValC{ constexpr Register::FieldValue<decltype(pdt)::Type,PdtVal::v00> v00{}; constexpr Register::FieldValue<decltype(pdt)::Type,PdtVal::v01> v01{}; constexpr Register::FieldValue<decltype(pdt)::Type,PdtVal::v10> v10{}; constexpr Register::FieldValue<decltype(pdt)::Type,PdtVal::v11> v11{}; } ///After SCK Delay Prescaler enum class PascVal { v00=0x00000000, ///<Delay after Transfer Prescaler value is 1. v01=0x00000001, ///<Delay after Transfer Prescaler value is 3. v10=0x00000002, ///<Delay after Transfer Prescaler value is 5. v11=0x00000003, ///<Delay after Transfer Prescaler value is 7. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(21,20),Register::ReadWriteAccess,PascVal> pasc{}; namespace PascValC{ constexpr Register::FieldValue<decltype(pasc)::Type,PascVal::v00> v00{}; constexpr Register::FieldValue<decltype(pasc)::Type,PascVal::v01> v01{}; constexpr Register::FieldValue<decltype(pasc)::Type,PascVal::v10> v10{}; constexpr Register::FieldValue<decltype(pasc)::Type,PascVal::v11> v11{}; } ///PCS to SCK Delay Prescaler enum class PcssckVal { v00=0x00000000, ///<PCS to SCK Prescaler value is 1. v01=0x00000001, ///<PCS to SCK Prescaler value is 3. v10=0x00000002, ///<PCS to SCK Prescaler value is 5. v11=0x00000003, ///<PCS to SCK Prescaler value is 7. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(23,22),Register::ReadWriteAccess,PcssckVal> pcssck{}; namespace PcssckValC{ constexpr Register::FieldValue<decltype(pcssck)::Type,PcssckVal::v00> v00{}; constexpr Register::FieldValue<decltype(pcssck)::Type,PcssckVal::v01> v01{}; constexpr Register::FieldValue<decltype(pcssck)::Type,PcssckVal::v10> v10{}; constexpr Register::FieldValue<decltype(pcssck)::Type,PcssckVal::v11> v11{}; } ///LSB First enum class LsbfeVal { v0=0x00000000, ///<Data is transferred MSB first. v1=0x00000001, ///<Data is transferred LSB first. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(24,24),Register::ReadWriteAccess,LsbfeVal> lsbfe{}; namespace LsbfeValC{ constexpr Register::FieldValue<decltype(lsbfe)::Type,LsbfeVal::v0> v0{}; constexpr Register::FieldValue<decltype(lsbfe)::Type,LsbfeVal::v1> v1{}; } ///Clock Phase enum class CphaVal { v0=0x00000000, ///<Data is captured on the leading edge of SCK and changed on the following edge. v1=0x00000001, ///<Data is changed on the leading edge of SCK and captured on the following edge. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(25,25),Register::ReadWriteAccess,CphaVal> cpha{}; namespace CphaValC{ constexpr Register::FieldValue<decltype(cpha)::Type,CphaVal::v0> v0{}; constexpr Register::FieldValue<decltype(cpha)::Type,CphaVal::v1> v1{}; } ///Clock Polarity enum class CpolVal { v0=0x00000000, ///<The inactive state value of SCK is low. v1=0x00000001, ///<The inactive state value of SCK is high. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(26,26),Register::ReadWriteAccess,CpolVal> cpol{}; namespace CpolValC{ constexpr Register::FieldValue<decltype(cpol)::Type,CpolVal::v0> v0{}; constexpr Register::FieldValue<decltype(cpol)::Type,CpolVal::v1> v1{}; } ///Frame Size constexpr Register::FieldLocation<Addr,Register::maskFromRange(30,27),Register::ReadWriteAccess,unsigned> fmsz{}; ///Double Baud Rate enum class DbrVal { v0=0x00000000, ///<The baud rate is computed normally with a 50/50 duty cycle. v1=0x00000001, ///<The baud rate is doubled with the duty cycle depending on the Baud Rate Prescaler. }; constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,31),Register::ReadWriteAccess,DbrVal> dbr{}; namespace DbrValC{ constexpr Register::FieldValue<decltype(dbr)::Type,DbrVal::v0> v0{}; constexpr Register::FieldValue<decltype(dbr)::Type,DbrVal::v1> v1{}; } } namespace Spi1Txfr0{ ///<Transmit FIFO Registers using Addr = Register::Address<0x4002d03c,0x00000000,0x00000000,unsigned>; ///Transmit Data constexpr Register::FieldLocation<Addr,Register::maskFromRange(15,0),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> txdata{}; ///Transmit Command or Transmit Data constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,16),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> txcmdTxdata{}; } namespace Spi1Txfr1{ ///<Transmit FIFO Registers using Addr = Register::Address<0x4002d040,0x00000000,0x00000000,unsigned>; ///Transmit Data constexpr Register::FieldLocation<Addr,Register::maskFromRange(15,0),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> txdata{}; ///Transmit Command or Transmit Data constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,16),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> txcmdTxdata{}; } namespace Spi1Txfr2{ ///<Transmit FIFO Registers using Addr = Register::Address<0x4002d044,0x00000000,0x00000000,unsigned>; ///Transmit Data constexpr Register::FieldLocation<Addr,Register::maskFromRange(15,0),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> txdata{}; ///Transmit Command or Transmit Data constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,16),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> txcmdTxdata{}; } namespace Spi1Txfr3{ ///<Transmit FIFO Registers using Addr = Register::Address<0x4002d048,0x00000000,0x00000000,unsigned>; ///Transmit Data constexpr Register::FieldLocation<Addr,Register::maskFromRange(15,0),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> txdata{}; ///Transmit Command or Transmit Data constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,16),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> txcmdTxdata{}; } namespace Spi1Rxfr0{ ///<Receive FIFO Registers using Addr = Register::Address<0x4002d07c,0x00000000,0x00000000,unsigned>; ///Receive Data constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,0),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> rxdata{}; } namespace Spi1Rxfr1{ ///<Receive FIFO Registers using Addr = Register::Address<0x4002d080,0x00000000,0x00000000,unsigned>; ///Receive Data constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,0),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> rxdata{}; } namespace Spi1Rxfr2{ ///<Receive FIFO Registers using Addr = Register::Address<0x4002d084,0x00000000,0x00000000,unsigned>; ///Receive Data constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,0),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> rxdata{}; } namespace Spi1Rxfr3{ ///<Receive FIFO Registers using Addr = Register::Address<0x4002d088,0x00000000,0x00000000,unsigned>; ///Receive Data constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,0),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> rxdata{}; } }
porkybrain/Kvasir
Lib/Chip/CM4/Freescale/MK21DA5/SPI1.hpp
C++
apache-2.0
40,340
// +build go1.10,codegen package api import ( "encoding/json" "testing" ) func buildAPI() *API { a := &API{} stringShape := &Shape{ API: a, ShapeName: "string", Type: "string", } stringShapeRef := &ShapeRef{ API: a, ShapeName: "string", Shape: stringShape, } intShape := &Shape{ API: a, ShapeName: "int", Type: "int", } intShapeRef := &ShapeRef{ API: a, ShapeName: "int", Shape: intShape, } nestedComplexShape := &Shape{ API: a, ShapeName: "NestedComplexShape", MemberRefs: map[string]*ShapeRef{ "NestedField": stringShapeRef, }, Type: "structure", } nestedComplexShapeRef := &ShapeRef{ API: a, ShapeName: "NestedComplexShape", Shape: nestedComplexShape, } nestedListShape := &Shape{ API: a, ShapeName: "NestedListShape", MemberRef: *nestedComplexShapeRef, Type: "list", } nestedListShapeRef := &ShapeRef{ API: a, ShapeName: "NestedListShape", Shape: nestedListShape, } complexShape := &Shape{ API: a, ShapeName: "ComplexShape", MemberRefs: map[string]*ShapeRef{ "Field": stringShapeRef, "List": nestedListShapeRef, }, Type: "structure", } complexShapeRef := &ShapeRef{ API: a, ShapeName: "ComplexShape", Shape: complexShape, } listShape := &Shape{ API: a, ShapeName: "ListShape", MemberRef: *complexShapeRef, Type: "list", } listShapeRef := &ShapeRef{ API: a, ShapeName: "ListShape", Shape: listShape, } listsShape := &Shape{ API: a, ShapeName: "ListsShape", MemberRef: *listShapeRef, Type: "list", } listsShapeRef := &ShapeRef{ API: a, ShapeName: "ListsShape", Shape: listsShape, } input := &Shape{ API: a, ShapeName: "FooInput", MemberRefs: map[string]*ShapeRef{ "BarShape": stringShapeRef, "ComplexField": complexShapeRef, "ListField": listShapeRef, "ListsField": listsShapeRef, }, Type: "structure", } output := &Shape{ API: a, ShapeName: "FooOutput", MemberRefs: map[string]*ShapeRef{ "BazShape": intShapeRef, "ComplexField": complexShapeRef, "ListField": listShapeRef, "ListsField": listsShapeRef, }, Type: "structure", } inputRef := ShapeRef{ API: a, ShapeName: "FooInput", Shape: input, } outputRef := ShapeRef{ API: a, ShapeName: "FooOutput", Shape: output, } operations := map[string]*Operation{ "Foo": { API: a, Name: "Foo", ExportedName: "Foo", InputRef: inputRef, OutputRef: outputRef, }, } a.Operations = operations a.Shapes = map[string]*Shape{ "FooInput": input, "FooOutput": output, "string": stringShape, "int": intShape, "NestedComplexShape": nestedComplexShape, "NestedListShape": nestedListShape, "ComplexShape": complexShape, "ListShape": listShape, "ListsShape": listsShape, } a.Metadata = Metadata{ ServiceAbbreviation: "FooService", } a.BaseImportPath = "github.com/aws/aws-sdk-go/service/" a.Setup() return a } func TestExampleGeneration(t *testing.T) { example := ` { "version": "1.0", "examples": { "Foo": [ { "input": { "BarShape": "Hello world", "ComplexField": { "Field": "bar", "List": [ { "NestedField": "qux" } ] }, "ListField": [ { "Field": "baz" } ], "ListsField": [ [ { "Field": "baz" } ] ] }, "output": { "BazShape": 1 }, "comments": { "input": { }, "output": { } }, "description": "Foo bar baz qux", "title": "I pity the foo" } ] } } ` a := buildAPI() def := &ExamplesDefinition{} err := json.Unmarshal([]byte(example), def) if err != nil { t.Error(err) } def.API = a def.setup() expected := ` import ( "fmt" "strings" "time" "` + SDKImportRoot + `/aws" "` + SDKImportRoot + `/aws/awserr" "` + SDKImportRoot + `/aws/session" "` + SDKImportRoot + `/service/fooservice" ) var _ time.Duration var _ strings.Reader var _ aws.Config func parseTime(layout, value string) *time.Time { t, err := time.Parse(layout, value) if err != nil { panic(err) } return &t } // I pity the foo // // Foo bar baz qux func ExampleFooService_Foo_shared00() { svc := fooservice.New(session.New()) input := &fooservice.FooInput{ BarShape: aws.String("Hello world"), ComplexField: &fooservice.ComplexShape{ Field: aws.String("bar"), List: []*fooservice.NestedComplexShape{ { NestedField: aws.String("qux"), }, }, }, ListField: []*fooservice.ComplexShape{ { Field: aws.String("baz"), }, }, ListsField: [][]*fooservice.ComplexShape{ { { Field: aws.String("baz"), }, }, }, } result, err := svc.Foo(input) if err != nil { if aerr, ok := err.(awserr.Error); ok { switch aerr.Code() { default: fmt.Println(aerr.Error()) } } else { // Print the error, cast err to awserr.Error to get the Code and // Message from an error. fmt.Println(err.Error()) } return } fmt.Println(result) } ` if expected != a.ExamplesGoCode() { t.Errorf("Expected:\n%s\nReceived:\n%s\n", expected, a.ExamplesGoCode()) } } func TestBuildShape(t *testing.T) { a := buildAPI() cases := []struct { defs map[string]interface{} expected string }{ { defs: map[string]interface{}{ "barShape": "Hello World", }, expected: "BarShape: aws.String(\"Hello World\"),\n", }, { defs: map[string]interface{}{ "BarShape": "Hello World", }, expected: "BarShape: aws.String(\"Hello World\"),\n", }, } for _, c := range cases { ref := a.Operations["Foo"].InputRef shapeStr := defaultExamplesBuilder{}.BuildShape(&ref, c.defs, false) if c.expected != shapeStr { t.Errorf("Expected:\n%s\nReceived:\n%s", c.expected, shapeStr) } } }
Miciah/origin
vendor/github.com/aws/aws-sdk-go/private/model/api/example_test.go
GO
apache-2.0
6,120
/* * Copyright 2016 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.handler.ssl; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assume.assumeFalse; import static org.junit.Assume.assumeTrue; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; import java.security.PrivateKey; import io.netty.buffer.UnpooledByteBufAllocator; import org.junit.Test; import io.netty.handler.ssl.util.SelfSignedCertificate; import io.netty.util.ReferenceCountUtil; public class PemEncodedTest { @Test public void testPemEncodedOpenSsl() throws Exception { testPemEncoded(SslProvider.OPENSSL); } @Test public void testPemEncodedOpenSslRef() throws Exception { testPemEncoded(SslProvider.OPENSSL_REFCNT); } private static void testPemEncoded(SslProvider provider) throws Exception { assumeTrue(OpenSsl.isAvailable()); assumeFalse(OpenSsl.useKeyManagerFactory()); PemPrivateKey pemKey; PemX509Certificate pemCert; SelfSignedCertificate ssc = new SelfSignedCertificate(); try { pemKey = PemPrivateKey.valueOf(toByteArray(ssc.privateKey())); pemCert = PemX509Certificate.valueOf(toByteArray(ssc.certificate())); } finally { ssc.delete(); } SslContext context = SslContextBuilder.forServer(pemKey, pemCert) .sslProvider(provider) .build(); assertEquals(1, pemKey.refCnt()); assertEquals(1, pemCert.refCnt()); try { assertTrue(context instanceof ReferenceCountedOpenSslContext); } finally { ReferenceCountUtil.release(context); assertRelease(pemKey); assertRelease(pemCert); } } @Test(expected = IllegalArgumentException.class) public void testEncodedReturnsNull() throws Exception { PemPrivateKey.toPEM(UnpooledByteBufAllocator.DEFAULT, true, new PrivateKey() { @Override public String getAlgorithm() { return null; } @Override public String getFormat() { return null; } @Override public byte[] getEncoded() { return null; } }); } private static void assertRelease(PemEncoded encoded) { assertTrue(encoded.release()); } private static byte[] toByteArray(File file) throws Exception { FileInputStream in = new FileInputStream(file); try { ByteArrayOutputStream baos = new ByteArrayOutputStream(); try { byte[] buf = new byte[1024]; int len; while ((len = in.read(buf)) != -1) { baos.write(buf, 0, len); } } finally { baos.close(); } return baos.toByteArray(); } finally { in.close(); } } }
zer0se7en/netty
handler/src/test/java/io/netty/handler/ssl/PemEncodedTest.java
Java
apache-2.0
3,656
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.query.filter; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.base.Predicates; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import com.google.common.collect.Range; import com.google.common.collect.RangeSet; import com.google.common.collect.TreeRangeSet; import com.google.common.primitives.Floats; import io.druid.common.guava.GuavaUtils; import io.druid.java.util.common.StringUtils; import io.druid.query.extraction.ExtractionFn; import io.druid.segment.filter.DimensionPredicateFilter; import io.druid.segment.filter.SelectorFilter; import java.nio.ByteBuffer; import java.util.Objects; /** */ public class SelectorDimFilter implements DimFilter { private final String dimension; private final String value; private final ExtractionFn extractionFn; private final Object initLock = new Object(); private DruidLongPredicate longPredicate; private DruidFloatPredicate floatPredicate; @JsonCreator public SelectorDimFilter( @JsonProperty("dimension") String dimension, @JsonProperty("value") String value, @JsonProperty("extractionFn") ExtractionFn extractionFn ) { Preconditions.checkArgument(dimension != null, "dimension must not be null"); this.dimension = dimension; this.value = Strings.nullToEmpty(value); this.extractionFn = extractionFn; } @Override public byte[] getCacheKey() { byte[] dimensionBytes = StringUtils.toUtf8(dimension); byte[] valueBytes = (value == null) ? new byte[]{} : StringUtils.toUtf8(value); byte[] extractionFnBytes = extractionFn == null ? new byte[0] : extractionFn.getCacheKey(); return ByteBuffer.allocate(3 + dimensionBytes.length + valueBytes.length + extractionFnBytes.length) .put(DimFilterUtils.SELECTOR_CACHE_ID) .put(dimensionBytes) .put(DimFilterUtils.STRING_SEPARATOR) .put(valueBytes) .put(DimFilterUtils.STRING_SEPARATOR) .put(extractionFnBytes) .array(); } @Override public DimFilter optimize() { return new InDimFilter(dimension, ImmutableList.of(value), extractionFn).optimize(); } @Override public Filter toFilter() { if (extractionFn == null) { return new SelectorFilter(dimension, value); } else { final String valueOrNull = Strings.emptyToNull(value); final DruidPredicateFactory predicateFactory = new DruidPredicateFactory() { @Override public Predicate<String> makeStringPredicate() { return Predicates.equalTo(valueOrNull); } @Override public DruidLongPredicate makeLongPredicate() { initLongPredicate(); return longPredicate; } @Override public DruidFloatPredicate makeFloatPredicate() { initFloatPredicate(); return floatPredicate; } }; return new DimensionPredicateFilter(dimension, predicateFactory, extractionFn); } } @JsonProperty public String getDimension() { return dimension; } @JsonProperty public String getValue() { return value; } @JsonProperty public ExtractionFn getExtractionFn() { return extractionFn; } @Override public String toString() { if (extractionFn != null) { return String.format("%s(%s) = %s", extractionFn, dimension, value); } else { return String.format("%s = %s", dimension, value); } } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } SelectorDimFilter that = (SelectorDimFilter) o; if (!dimension.equals(that.dimension)) { return false; } if (value != null ? !value.equals(that.value) : that.value != null) { return false; } return extractionFn != null ? extractionFn.equals(that.extractionFn) : that.extractionFn == null; } @Override public RangeSet<String> getDimensionRangeSet(String dimension) { if (!Objects.equals(getDimension(), dimension) || getExtractionFn() != null) { return null; } RangeSet<String> retSet = TreeRangeSet.create(); retSet.add(Range.singleton(Strings.nullToEmpty(value))); return retSet; } @Override public int hashCode() { int result = dimension.hashCode(); result = 31 * result + (value != null ? value.hashCode() : 0); result = 31 * result + (extractionFn != null ? extractionFn.hashCode() : 0); return result; } private void initLongPredicate() { if (longPredicate != null) { return; } synchronized (initLock) { if (longPredicate != null) { return; } final Long valueAsLong = GuavaUtils.tryParseLong(value); if (valueAsLong == null) { longPredicate = DruidLongPredicate.ALWAYS_FALSE; } else { // store the primitive, so we don't unbox for every comparison final long unboxedLong = valueAsLong.longValue(); longPredicate = new DruidLongPredicate() { @Override public boolean applyLong(long input) { return input == unboxedLong; } }; } } } private void initFloatPredicate() { if (floatPredicate != null) { return; } synchronized (initLock) { if (floatPredicate != null) { return; } final Float valueAsFloat = Floats.tryParse(value); if (valueAsFloat == null) { floatPredicate = DruidFloatPredicate.ALWAYS_FALSE; } else { final int floatBits = Float.floatToIntBits(valueAsFloat); floatPredicate = new DruidFloatPredicate() { @Override public boolean applyFloat(float input) { return Float.floatToIntBits(input) == floatBits; } }; } } } }
zhihuij/druid
processing/src/main/java/io/druid/query/filter/SelectorDimFilter.java
Java
apache-2.0
6,989
/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License */ package com.android.location.provider; import android.annotation.NonNull; import android.annotation.Nullable; import android.hardware.location.IActivityRecognitionHardware; import android.hardware.location.IActivityRecognitionHardwareWatcher; import android.os.Binder; import android.os.IBinder; import android.os.Process; import android.os.RemoteException; import android.util.Log; /** * A watcher class for Activity-Recognition instances. * * @deprecated use {@link ActivityRecognitionProviderClient} instead. */ @Deprecated public class ActivityRecognitionProviderWatcher { private static final String TAG = "ActivityRecognitionProviderWatcher"; private static ActivityRecognitionProviderWatcher sWatcher; private static final Object sWatcherLock = new Object(); private ActivityRecognitionProvider mActivityRecognitionProvider; private ActivityRecognitionProviderWatcher() {} public static ActivityRecognitionProviderWatcher getInstance() { synchronized (sWatcherLock) { if (sWatcher == null) { sWatcher = new ActivityRecognitionProviderWatcher(); } return sWatcher; } } private IActivityRecognitionHardwareWatcher.Stub mWatcherStub = new IActivityRecognitionHardwareWatcher.Stub() { @Override public void onInstanceChanged(IActivityRecognitionHardware instance) { int callingUid = Binder.getCallingUid(); if (callingUid != Process.SYSTEM_UID) { Log.d(TAG, "Ignoring calls from non-system server. Uid: " + callingUid); return; } try { mActivityRecognitionProvider = new ActivityRecognitionProvider(instance); } catch (RemoteException e) { Log.e(TAG, "Error creating Hardware Activity-Recognition", e); } } }; /** * Gets the binder needed to interact with proxy provider in the platform. */ @NonNull public IBinder getBinder() { return mWatcherStub; } /** * Gets an object that supports the functionality of {@link ActivityRecognitionProvider}. * * @return Non-null value if the functionality is supported by the platform, false otherwise. */ @Nullable public ActivityRecognitionProvider getActivityRecognitionProvider() { return mActivityRecognitionProvider; } }
Ant-Droid/android_frameworks_base_OLD
location/lib/java/com/android/location/provider/ActivityRecognitionProviderWatcher.java
Java
apache-2.0
3,055
/* * Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.internal.adapter; import javax.cache.processor.EntryProcessor; import javax.cache.processor.EntryProcessorException; import javax.cache.processor.MutableEntry; import java.io.Serializable; public class ICacheReplaceEntryProcessor implements EntryProcessor<Integer, String, String>, Serializable { private static final long serialVersionUID = -396575576353368113L; @Override public String process(MutableEntry<Integer, String> entry, Object... arguments) throws EntryProcessorException { String value = entry.getValue(); if (value == null) { return null; } String oldString = (String) arguments[0]; String newString = (String) arguments[1]; String result = value.replace(oldString, newString); entry.setValue(result); return result; } }
tombujok/hazelcast
hazelcast/src/test/java/com/hazelcast/internal/adapter/ICacheReplaceEntryProcessor.java
Java
apache-2.0
1,482
/*<license> Copyright 2005 - $Date$ by PeopleWare n.v.. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. </license>*/ package org.ppwcode.vernacular.persistence_III; import static org.ppwcode.metainfo_I.License.Type.APACHE_V2; import java.io.Serializable; import org.ppwcode.metainfo_I.Copyright; import org.ppwcode.metainfo_I.License; import org.ppwcode.metainfo_I.vcs.SvnInfo; import org.ppwcode.vernacular.semantics_VI.bean.AbstractRousseauBean; /** * A partial implementation of the interface {@link PersistentBean}. * * @author Nele Smeets * @author Ruben Vandeginste * @author Jan Dockx * @author PeopleWare n.v. * * @mudo We now have a dependency here on JPA via annotations. Also, the listener is defined in a subpackage, which * depends on this package. This introduces a cycle! This is a bad idea. Like this, you always need the JPA * libraries, even if they are annotations, because the annotations are loaded in the import statements too * (at least under 1.5). Thus, the annotations must go, and we need to use the xml files. */ @Copyright("2004 - $Date$, PeopleWare n.v.") @License(APACHE_V2) @SvnInfo(revision = "$Revision$", date = "$Date$") public abstract class AbstractPersistentBean<_Id_ extends Serializable> extends AbstractRousseauBean implements PersistentBean<_Id_> { /*<property name="id">*/ //------------------------------------------------------------------ public final _Id_ getPersistenceId() { return $persistenceId; } public final boolean hasSamePersistenceId(final PersistentBean<_Id_> other) { return (other != null) && ((getPersistenceId() == null) ? other.getPersistenceId() == null : getPersistenceId().equals(other.getPersistenceId())); } public final void setPersistenceId(final _Id_ persistenceId) { $persistenceId = persistenceId; } // @Id // @GeneratedValue // @Column(name="persistenceId") private _Id_ $persistenceId; /*</property>*/ }
jandppw/ppwcode-recovered-from-google-code
java/vernacular/persistence/dev/d20081014-1359/src/main/java/org/ppwcode/vernacular/persistence_III/AbstractPersistentBean.java
Java
apache-2.0
2,465
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # # Copyright 2013 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # """NEC plugin sharednet Revision ID: 3b54bf9e29f7 Revises: 511471cc46b Create Date: 2013-02-17 09:21:48.287134 """ # revision identifiers, used by Alembic. revision = '3b54bf9e29f7' down_revision = '511471cc46b' # Change to ['*'] if this migration applies to all plugins migration_for_plugins = [ 'neutron.plugins.nec.nec_plugin.NECPluginV2' ] from alembic import op import sqlalchemy as sa from neutron.db import migration def upgrade(active_plugin=None, options=None): if not migration.should_run(active_plugin, migration_for_plugins): return op.create_table( 'ofctenantmappings', sa.Column('ofc_id', sa.String(length=255), nullable=False), sa.Column('quantum_id', sa.String(length=36), nullable=False), sa.PrimaryKeyConstraint('quantum_id'), sa.UniqueConstraint('ofc_id') ) op.create_table( 'ofcnetworkmappings', sa.Column('ofc_id', sa.String(length=255), nullable=False), sa.Column('quantum_id', sa.String(length=36), nullable=False), sa.PrimaryKeyConstraint('quantum_id'), sa.UniqueConstraint('ofc_id') ) op.create_table( 'ofcportmappings', sa.Column('ofc_id', sa.String(length=255), nullable=False), sa.Column('quantum_id', sa.String(length=36), nullable=False), sa.PrimaryKeyConstraint('quantum_id'), sa.UniqueConstraint('ofc_id') ) op.create_table( 'ofcfiltermappings', sa.Column('ofc_id', sa.String(length=255), nullable=False), sa.Column('quantum_id', sa.String(length=36), nullable=False), sa.PrimaryKeyConstraint('quantum_id'), sa.UniqueConstraint('ofc_id') ) def downgrade(active_plugin=None, options=None): if not migration.should_run(active_plugin, migration_for_plugins): return op.drop_table('ofcfiltermappings') op.drop_table('ofcportmappings') op.drop_table('ofcnetworkmappings') op.drop_table('ofctenantmappings')
Brocade-OpenSource/OpenStack-DNRM-Neutron
neutron/db/migration/alembic_migrations/versions/3b54bf9e29f7_nec_plugin_sharednet.py
Python
apache-2.0
2,645
/* * Copyright Terracotta, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.ehcache.config.builders; import org.ehcache.CacheManager; import org.ehcache.PersistentCacheManager; import org.ehcache.config.Builder; import org.ehcache.config.CacheConfiguration; import org.ehcache.config.Configuration; import org.ehcache.config.units.MemoryUnit; import org.ehcache.core.EhcacheManager; import org.ehcache.core.spi.store.heap.SizeOfEngine; import org.ehcache.impl.config.copy.DefaultCopyProviderConfiguration; import org.ehcache.impl.config.event.CacheEventDispatcherFactoryConfiguration; import org.ehcache.impl.config.loaderwriter.writebehind.WriteBehindProviderConfiguration; import org.ehcache.impl.config.persistence.CacheManagerPersistenceConfiguration; import org.ehcache.impl.config.serializer.DefaultSerializationProviderConfiguration; import org.ehcache.impl.config.store.heap.DefaultSizeOfEngineProviderConfiguration; import org.ehcache.impl.config.store.disk.OffHeapDiskStoreProviderConfiguration; import org.ehcache.spi.copy.Copier; import org.ehcache.spi.serialization.Serializer; import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceCreationConfiguration; import java.io.File; import java.util.Collection; import java.util.HashSet; import java.util.Set; import static java.util.Collections.emptySet; import static java.util.Collections.unmodifiableSet; import static org.ehcache.config.builders.ConfigurationBuilder.newConfigurationBuilder; import static org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration.DEFAULT_MAX_OBJECT_SIZE; import static org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration.DEFAULT_OBJECT_GRAPH_SIZE; import static org.ehcache.impl.config.store.heap.DefaultSizeOfEngineConfiguration.DEFAULT_UNIT; /** * The {@code CacheManagerBuilder} enables building cache managers using a fluent style. * <p> * As with all Ehcache builders, all instances are immutable and calling any method on the builder will return a new * instance without modifying the one on which the method was called. * This enables the sharing of builder instances without any risk of seeing them modified by code elsewhere. */ public class CacheManagerBuilder<T extends CacheManager> implements Builder<T> { private final ConfigurationBuilder configBuilder; private final Set<Service> services; /** * Builds a {@link CacheManager} or a subtype of it and initializes it if requested. * * @param init whether the returned {@code CacheManager} is to be initialized or not * @return a {@code CacheManager} or a subtype of it */ public T build(final boolean init) { final T cacheManager = newCacheManager(services, configBuilder.build()); if(init) { cacheManager.init(); } return cacheManager; } /** * Builds a {@link CacheManager} or a subtype of it uninitialized. * * @return a {@code CacheManager} or a subtype of it uninitialized */ @Override public T build() { return build(false); } private CacheManagerBuilder() { this.configBuilder = newConfigurationBuilder(); this.services = emptySet(); } private CacheManagerBuilder(CacheManagerBuilder<T> builder, Set<Service> services) { this.configBuilder = builder.configBuilder; this.services = unmodifiableSet(services); } private CacheManagerBuilder(CacheManagerBuilder<T> builder, ConfigurationBuilder configBuilder) { this.configBuilder = configBuilder; this.services = builder.services; } /** * Creates a new {@link CacheManager} based on the provided configuration. * The returned {@code CacheManager} is uninitialized. * * @param configuration the configuration to use * @return a {@code CacheManager} */ public static CacheManager newCacheManager(final Configuration configuration) { return new EhcacheManager(configuration); } T newCacheManager(Collection<Service> services, final Configuration configuration) { final EhcacheManager ehcacheManager = new EhcacheManager(configuration, services); return cast(ehcacheManager); } @SuppressWarnings("unchecked") T cast(EhcacheManager ehcacheManager) { return (T) ehcacheManager; } /** * Adds a {@link CacheConfiguration} linked to the specified alias to the returned builder. * * @param alias the cache alias * @param configuration the {@code CacheConfiguration} * @param <K> the cache key type * @param <V> the cache value type * @return a new builder with the added cache configuration * * @see CacheConfigurationBuilder */ public <K, V> CacheManagerBuilder<T> withCache(String alias, CacheConfiguration<K, V> configuration) { return new CacheManagerBuilder<>(this, configBuilder.addCache(alias, configuration)); } /** * Convenience method to add a {@link CacheConfiguration} linked to the specified alias to the returned builder by * building it from the provided {@link Builder}. * * @param alias the cache alias * @param configurationBuilder the {@code Builder} to get {@code CacheConfiguration} from * @param <K> the cache key type * @param <V> the cache value type * @return a new builder with the added cache configuration * * @see CacheConfigurationBuilder */ public <K, V> CacheManagerBuilder<T> withCache(String alias, Builder<? extends CacheConfiguration<K, V>> configurationBuilder) { return withCache(alias, configurationBuilder.build()); } /** * Specializes the returned {@link CacheManager} subtype through a specific {@link CacheManagerConfiguration} which * will optionally add configurations to the returned builder. * * @param cfg the {@code CacheManagerConfiguration} to use * @param <N> the subtype of {@code CacheManager} * @return a new builder ready to build a more specific subtype of cache manager * * @see #persistence(String) * @see PersistentCacheManager * @see CacheManagerPersistenceConfiguration */ public <N extends T> CacheManagerBuilder<N> with(CacheManagerConfiguration<N> cfg) { return cfg.builder(this); } /** * Convenience method to specialize the returned {@link CacheManager} subtype through a {@link CacheManagerConfiguration} * built using the provided {@link Builder}. * * @param cfgBuilder the {@code Builder} to get the {@code CacheManagerConfiguration} from * @return a new builder ready to build a more specific subtype of cache manager * * @see CacheConfigurationBuilder */ public <N extends T> CacheManagerBuilder<N> with(Builder<? extends CacheManagerConfiguration<N>> cfgBuilder) { return with(cfgBuilder.build()); } /** * Adds a {@link Service} instance to the returned builder. * <p> * The service instance will be used by the constructed {@link CacheManager}. * * @param service the {@code Service} to add * @return a new builder with the added service */ public CacheManagerBuilder<T> using(Service service) { Set<Service> newServices = new HashSet<>(services); newServices.add(service); return new CacheManagerBuilder<>(this, newServices); } /** * Adds a default {@link Copier} for the specified type to the returned builder. * * @param clazz the {@code Class} for which the copier is * @param copier the {@code Copier} instance * @param <C> the type which can be copied * @return a new builder with the added default copier */ public <C> CacheManagerBuilder<T> withCopier(Class<C> clazz, Class<? extends Copier<C>> copier) { DefaultCopyProviderConfiguration service = configBuilder.findServiceByClass(DefaultCopyProviderConfiguration.class); if (service == null) { service = new DefaultCopyProviderConfiguration(); service.addCopierFor(clazz, copier); return new CacheManagerBuilder<>(this, configBuilder.addService(service)); } else { DefaultCopyProviderConfiguration newConfig = new DefaultCopyProviderConfiguration(service); newConfig.addCopierFor(clazz, copier, true); return new CacheManagerBuilder<>(this, configBuilder.removeService(service).addService(newConfig)); } } /** * Adds a default {@link Serializer} for the specified type to the returned builder. * * @param clazz the {@code Class} for which the serializer is * @param serializer the {@code Serializer} instance * @param <C> the type which can be serialized * @return a new builder with the added default serializer */ public <C> CacheManagerBuilder<T> withSerializer(Class<C> clazz, Class<? extends Serializer<C>> serializer) { DefaultSerializationProviderConfiguration service = configBuilder.findServiceByClass(DefaultSerializationProviderConfiguration.class); if (service == null) { service = new DefaultSerializationProviderConfiguration(); service.addSerializerFor(clazz, serializer); return new CacheManagerBuilder<>(this, configBuilder.addService(service)); } else { DefaultSerializationProviderConfiguration newConfig = new DefaultSerializationProviderConfiguration(service); newConfig.addSerializerFor(clazz, serializer, true); return new CacheManagerBuilder<>(this, configBuilder.removeService(service).addService(newConfig)); } } /** * Adds a default {@link SizeOfEngine} configuration, that limits the max object graph to * size, to the returned builder. * * @param size the max object graph size * @return a new builder with the added configuration */ public CacheManagerBuilder<T> withDefaultSizeOfMaxObjectGraph(long size) { DefaultSizeOfEngineProviderConfiguration configuration = configBuilder.findServiceByClass(DefaultSizeOfEngineProviderConfiguration.class); if (configuration == null) { return new CacheManagerBuilder<>(this, configBuilder.addService(new DefaultSizeOfEngineProviderConfiguration(DEFAULT_MAX_OBJECT_SIZE, DEFAULT_UNIT, size))); } else { ConfigurationBuilder builder = configBuilder.removeService(configuration); return new CacheManagerBuilder<>(this, builder.addService(new DefaultSizeOfEngineProviderConfiguration(configuration .getMaxObjectSize(), configuration.getUnit(), size))); } } /** * Adds a default {@link SizeOfEngine} configuration, that limits the max object size, to * the returned builder. * * @param size the max object size * @param unit the max object size unit * @return a new builder with the added configuration */ public CacheManagerBuilder<T> withDefaultSizeOfMaxObjectSize(long size, MemoryUnit unit) { DefaultSizeOfEngineProviderConfiguration configuration = configBuilder.findServiceByClass(DefaultSizeOfEngineProviderConfiguration.class); if (configuration == null) { return new CacheManagerBuilder<>(this, configBuilder.addService(new DefaultSizeOfEngineProviderConfiguration(size, unit, DEFAULT_OBJECT_GRAPH_SIZE))); } else { ConfigurationBuilder builder = configBuilder.removeService(configuration); return new CacheManagerBuilder<>(this, builder.addService(new DefaultSizeOfEngineProviderConfiguration(size, unit, configuration .getMaxObjectGraphSize()))); } } /** * Adds a {@link WriteBehindProviderConfiguration}, that specifies the thread pool to use, to the returned builder. * * @param threadPoolAlias the thread pool alias * @return a new builder with the added configuration * * @see PooledExecutionServiceConfigurationBuilder */ public CacheManagerBuilder<T> withDefaultWriteBehindThreadPool(String threadPoolAlias) { WriteBehindProviderConfiguration config = configBuilder.findServiceByClass(WriteBehindProviderConfiguration.class); if (config == null) { return new CacheManagerBuilder<>(this, configBuilder.addService(new WriteBehindProviderConfiguration(threadPoolAlias))); } else { ConfigurationBuilder builder = configBuilder.removeService(config); return new CacheManagerBuilder<>(this, builder.addService(new WriteBehindProviderConfiguration(threadPoolAlias))); } } /** * Adds a {@link OffHeapDiskStoreProviderConfiguration}, that specifies the thread pool to use, to the returned * builder. * * @param threadPoolAlias the thread pool alias * @return a new builder with the added configuration * * @see PooledExecutionServiceConfigurationBuilder */ public CacheManagerBuilder<T> withDefaultDiskStoreThreadPool(String threadPoolAlias) { OffHeapDiskStoreProviderConfiguration config = configBuilder.findServiceByClass(OffHeapDiskStoreProviderConfiguration.class); if (config == null) { return new CacheManagerBuilder<>(this, configBuilder.addService(new OffHeapDiskStoreProviderConfiguration(threadPoolAlias))); } else { ConfigurationBuilder builder = configBuilder.removeService(config); return new CacheManagerBuilder<>(this, builder.addService(new OffHeapDiskStoreProviderConfiguration(threadPoolAlias))); } } /** * Adds a {@link CacheEventDispatcherFactoryConfiguration}, that specifies the thread pool to use, to the returned * builder. * * @param threadPoolAlias the thread pool alias * @return a new builder with the added configuration * * @see PooledExecutionServiceConfigurationBuilder */ public CacheManagerBuilder<T> withDefaultEventListenersThreadPool(String threadPoolAlias) { CacheEventDispatcherFactoryConfiguration config = configBuilder.findServiceByClass(CacheEventDispatcherFactoryConfiguration.class); if (config == null) { return new CacheManagerBuilder<>(this, configBuilder.addService(new CacheEventDispatcherFactoryConfiguration(threadPoolAlias))); } else { ConfigurationBuilder builder = configBuilder.removeService(config); return new CacheManagerBuilder<>(this, builder.addService(new CacheEventDispatcherFactoryConfiguration(threadPoolAlias))); } } /** * Adds a {@link ServiceCreationConfiguration} to the returned builder. * <p> * These configurations are used to load services and configure them at creation time. * * @param serviceConfiguration the {@code ServiceCreationConfiguration} to use * @return a new builder with the added configuration */ public CacheManagerBuilder<T> using(ServiceCreationConfiguration<?> serviceConfiguration) { return new CacheManagerBuilder<>(this, configBuilder.addService(serviceConfiguration)); } /** * Replaces an existing {@link ServiceCreationConfiguration} of the same type on the returned builder. * <p> * Duplicate service creation configuration will cause a cache manager to fail to initialize. * * @param overwriteServiceConfiguration the new {@code ServiceCreationConfiguration} to use * @return a new builder with the replaced configuration */ public CacheManagerBuilder<T> replacing(ServiceCreationConfiguration<?> overwriteServiceConfiguration) { ServiceCreationConfiguration<?> existingConfiguration = configBuilder.findServiceByClass(overwriteServiceConfiguration.getClass()); return new CacheManagerBuilder<>(this, configBuilder.removeService(existingConfiguration) .addService(overwriteServiceConfiguration)); } /** * Adds a {@link ClassLoader}, to use for non Ehcache types, to the returned builder * * @param classLoader the class loader to use * @return a new builder with the added class loader */ public CacheManagerBuilder<T> withClassLoader(ClassLoader classLoader) { return new CacheManagerBuilder<>(this, configBuilder.withClassLoader(classLoader)); } /** * Creates a new {@code CacheManagerBuilder} * * @return the cache manager builder */ public static CacheManagerBuilder<CacheManager> newCacheManagerBuilder() { return new CacheManagerBuilder<>(); } /** * Convenience method to get a {@link CacheManagerConfiguration} for a {@link PersistentCacheManager} stored on disk. The actual * level of persistence is configured on the disk resource pool per cache. * * @param rootDirectory the root directory to use for disk storage * @return a {@code CacheManagerConfiguration} * * @see ResourcePoolsBuilder#disk(long, MemoryUnit, boolean) * @see #with(CacheManagerConfiguration) * @see PersistentCacheManager */ public static CacheManagerConfiguration<PersistentCacheManager> persistence(String rootDirectory) { return persistence(new File(rootDirectory)); } /** * Convenience method to get a {@link CacheManagerConfiguration} for a {@link PersistentCacheManager} stored on disk. The actual * level of persistence is configured on the disk resource pool per cache. * * @param rootDirectory the root directory to use for disk storage * @return a {@code CacheManagerConfiguration} * * @see ResourcePoolsBuilder#disk(long, MemoryUnit, boolean) * @see #with(CacheManagerConfiguration) * @see PersistentCacheManager */ public static CacheManagerConfiguration<PersistentCacheManager> persistence(File rootDirectory) { return new CacheManagerPersistenceConfiguration(rootDirectory); } }
aurbroszniowski/ehcache3
impl/src/main/java/org/ehcache/config/builders/CacheManagerBuilder.java
Java
apache-2.0
17,637
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.xpack.core.ml.datafeed; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractSerializingTestCase; import java.io.IOException; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.core.Is.is; public class DelayedDataCheckConfigTests extends AbstractSerializingTestCase<DelayedDataCheckConfig> { @Override protected DelayedDataCheckConfig createTestInstance(){ return createRandomizedConfig(100); } @Override protected Writeable.Reader<DelayedDataCheckConfig> instanceReader() { return DelayedDataCheckConfig::new; } @Override protected DelayedDataCheckConfig doParseInstance(XContentParser parser) { return DelayedDataCheckConfig.STRICT_PARSER.apply(parser, null); } public void testConstructor() { expectThrows(IllegalArgumentException.class, () -> new DelayedDataCheckConfig(true, TimeValue.MINUS_ONE)); expectThrows(IllegalArgumentException.class, () -> new DelayedDataCheckConfig(true, TimeValue.timeValueHours(25))); } public void testEnabledDelayedDataCheckConfig() { DelayedDataCheckConfig delayedDataCheckConfig = DelayedDataCheckConfig.enabledDelayedDataCheckConfig(TimeValue.timeValueHours(5)); assertThat(delayedDataCheckConfig.isEnabled(), equalTo(true)); assertThat(delayedDataCheckConfig.getCheckWindow(), equalTo(TimeValue.timeValueHours(5))); } public void testDisabledDelayedDataCheckConfig() { DelayedDataCheckConfig delayedDataCheckConfig = DelayedDataCheckConfig.disabledDelayedDataCheckConfig(); assertThat(delayedDataCheckConfig.isEnabled(), equalTo(false)); assertThat(delayedDataCheckConfig.getCheckWindow(), equalTo(null)); } public void testDefaultDelayedDataCheckConfig() { DelayedDataCheckConfig delayedDataCheckConfig = DelayedDataCheckConfig.defaultDelayedDataCheckConfig(); assertThat(delayedDataCheckConfig.isEnabled(), equalTo(true)); assertThat(delayedDataCheckConfig.getCheckWindow(), is(nullValue())); } public static DelayedDataCheckConfig createRandomizedConfig(long bucketSpanMillis) { boolean enabled = randomBoolean(); TimeValue timeWindow = null; if (enabled || randomBoolean()) { // time span is required to be at least 1 millis, so we use a custom method to generate a time value here timeWindow = new TimeValue(randomLongBetween(bucketSpanMillis,bucketSpanMillis*2)); } return new DelayedDataCheckConfig(enabled, timeWindow); } @Override protected DelayedDataCheckConfig mutateInstance(DelayedDataCheckConfig instance) throws IOException { boolean enabled = instance.isEnabled(); TimeValue timeWindow = instance.getCheckWindow(); switch (between(0, 1)) { case 0: enabled = enabled == false; if (randomBoolean()) { timeWindow = TimeValue.timeValueMillis(randomLongBetween(1, 1000)); } else { timeWindow = null; } break; case 1: if (timeWindow == null) { timeWindow = TimeValue.timeValueMillis(randomLongBetween(1, 1000)); } else { timeWindow = new TimeValue(timeWindow.getMillis() + between(10, 100)); } enabled = true; break; default: throw new AssertionError("Illegal randomisation branch"); } return new DelayedDataCheckConfig(enabled, timeWindow); } }
ern/elasticsearch
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DelayedDataCheckConfigTests.java
Java
apache-2.0
4,017
class SwaggerCodegenAT2 < Formula desc "Generate clients, server stubs, and docs from an OpenAPI spec" homepage "https://swagger.io/swagger-codegen/" url "https://github.com/swagger-api/swagger-codegen/archive/v2.4.12.tar.gz" sha256 "52a61b28a95a250fdbe7ef38ef9ea1116878b6271e768e054404336ab19f1c14" bottle do cellar :any_skip_relocation sha256 "76cfaccf0ba7e190381d04b08078e14e27dcb46d572d85f6f4097d78563c6113" => :catalina sha256 "38d11eaecb8e3d0f555b8fdac370df7f5b09c41eacbc1ba70db7f51bf00cc9c9" => :mojave sha256 "a7a408013e8775c8cd959a716e0266f2c61bd595011135ee9d605c1d05765858" => :high_sierra end keg_only :versioned_formula depends_on "maven" => :build depends_on :java => "1.8" def install # Need to set JAVA_HOME manually since maven overrides 1.8 with 1.7+ cmd = Language::Java.java_home_cmd("1.8") ENV["JAVA_HOME"] = Utils.popen_read(cmd).chomp system "mvn", "clean", "package" libexec.install "modules/swagger-codegen-cli/target/swagger-codegen-cli.jar" bin.write_jar_script libexec/"swagger-codegen-cli.jar", "swagger-codegen" end test do (testpath/"minimal.yaml").write <<~EOS --- swagger: '2.0' info: version: 0.0.0 title: Simple API paths: /: get: responses: 200: description: OK EOS system "#{bin}/swagger-codegen", "generate", "-i", "minimal.yaml", "-l", "html2" assert_includes File.read(testpath/"index.html"), "<h1>Simple API</h1>" end end
BrewTestBot/homebrew-core
Formula/swagger-codegen@2.rb
Ruby
bsd-2-clause
1,550
// Copyright (C) 2016 the V8 project authors. All rights reserved. // This code is governed by the BSD license found in the LICENSE file. /*--- esid: sec-%typedarray%.prototype.copywithin description: > Set values with out of bounds negative target argument. info: | 22.2.3.5 %TypedArray%.prototype.copyWithin (target, start [ , end ] ) %TypedArray%.prototype.copyWithin is a distinct function that implements the same algorithm as Array.prototype.copyWithin as defined in 22.1.3.3 except that the this object's [[ArrayLength]] internal slot is accessed in place of performing a [[Get]] of "length" and the actual copying of values in step 12 must be performed in a manner that preserves the bit-level encoding of the source data. ... 22.1.3.3 Array.prototype.copyWithin (target, start [ , end ] ) ... 4. If relativeTarget < 0, let to be max((len + relativeTarget), 0); else let to be min(relativeTarget, len). ... includes: [compareArray.js, testBigIntTypedArray.js] features: [BigInt, TypedArray] ---*/ testWithBigIntTypedArrayConstructors(function(TA) { assert( compareArray( new TA([0n, 1n, 2n, 3n]).copyWithin(-10, 0), [0n, 1n, 2n, 3n] ), '[0, 1, 2, 3].copyWithin(-10, 0) -> [0, 1, 2, 3]' ); assert( compareArray( new TA([1n, 2n, 3n, 4n, 5n]).copyWithin(-Infinity, 0), [1n, 2n, 3n, 4n, 5n] ), '[1, 2, 3, 4, 5].copyWithin(-Infinity, 0) -> [1, 2, 3, 4, 5]' ); assert( compareArray( new TA([0n, 1n, 2n, 3n, 4n]).copyWithin(-10, 2), [2n, 3n, 4n, 3n, 4n] ), '[0, 1, 2, 3, 4].copyWithin(-10, 2) -> [2, 3, 4, 3, 4]' ); assert( compareArray( new TA([1n, 2n, 3n, 4n, 5n]).copyWithin(-Infinity, 2), [3n, 4n, 5n, 4n, 5n] ), '[1, 2, 3, 4, 5].copyWithin(-Infinity, 2) -> [3, 4, 5, 4, 5]' ); });
sebastienros/jint
Jint.Tests.Test262/test/built-ins/TypedArray/prototype/copyWithin/BigInt/negative-out-of-bounds-target.js
JavaScript
bsd-2-clause
1,835
class Travis < Formula desc "Command-line client for Travis CI" homepage "https://github.com/travis-ci/travis.rb/" url "https://github.com/travis-ci/travis.rb/archive/v1.8.9.tar.gz" sha256 "7a143bd0eb90e825370c808d38b70cca8c399c68bea8138442f40f09b6bbafc4" revision 3 bottle do sha256 "d695085c0886f6db8d5a1afeb6cae27e61d7fcc41d2e069c16ce0909048dba64" => :mojave sha256 "32634e86fdc04e7ca0df1834cf1ee6e8cdd3bfb0f89c0e4ed36a0d55b08483b3" => :high_sierra sha256 "1d21554de55885fd08e4433772663410044f088cf346b63533142019a2d865f5" => :sierra end depends_on "ruby" if MacOS.version <= :sierra resource "addressable" do url "https://rubygems.org/gems/addressable-2.4.0.gem" sha256 "7abfff765571b0a73549c9a9d2f7e143979cd0c252f7fa4c81e7102a973ef656" end resource "backports" do url "https://rubygems.org/gems/backports-3.11.3.gem" sha256 "57b04d4e2806c199bff3663d810db25e019cf88c42cacc0edbb36d3038d6a5ab" end resource "ethon" do url "https://rubygems.org/gems/ethon-0.11.0.gem" sha256 "88ec7960a8e00f76afc96ed15dcc8be0cb515f963fe3bb1d4e0b5c51f9d7e078" end resource "faraday" do url "https://rubygems.org/gems/faraday-0.15.2.gem" sha256 "affa23f5e5ee27170cbb5045c580af9b396bac525516c6583661c2bb08038f92" end resource "faraday_middleware" do url "https://rubygems.org/gems/faraday_middleware-0.12.2.gem" sha256 "2d90093c18c23e7f5a6f602ed3114d2c62abc3f7f959dd3046745b24a863f1dc" end resource "ffi" do url "https://rubygems.org/gems/ffi-1.9.25.gem" sha256 "f854f08f08190fec772a12e863f33761d02ad3efea3c3afcdeffc8a06313f54a" end resource "gh" do url "https://rubygems.org/gems/gh-0.15.1.gem" sha256 "ef733f81c17846f217f5ad9616105e9adc337775d41de1cc330133ad25708d3c" end resource "highline" do url "https://rubygems.org/gems/highline-1.7.10.gem" sha256 "1e147d5d20f1ad5b0e23357070d1e6d0904ae9f71c3c49e0234cf682ae3c2b06" end if MacOS.version <= :sierra resource "json" do url "https://rubygems.org/gems/json-2.1.0.gem" sha256 "b76fd09b881088c6c64a12721a1528f2f747a1c2ee52fab4c1f60db8af946607" end end resource "launchy" do url "https://rubygems.org/gems/launchy-2.4.3.gem" sha256 "42f52ce12c6fe079bac8a804c66522a0eefe176b845a62df829defe0e37214a4" end resource "multi_json" do url "https://rubygems.org/gems/multi_json-1.13.1.gem" sha256 "db8613c039b9501e6b2fb85efe4feabb02f55c3365bae52bba35381b89c780e6" end resource "multipart-post" do url "https://rubygems.org/gems/multipart-post-2.0.0.gem" sha256 "3dc44e50d3df3d42da2b86272c568fd7b75c928d8af3cc5f9834e2e5d9586026" end resource "net-http-persistent" do url "https://rubygems.org/gems/net-http-persistent-2.9.4.gem" sha256 "24274d207ffe66222ef70c78a052c7ea6e66b4ff21e2e8a99e3335d095822ef9" end resource "net-http-pipeline" do url "https://rubygems.org/gems/net-http-pipeline-1.0.1.gem" sha256 "6923ce2f28bfde589a9f385e999395eead48ccfe4376d4a85d9a77e8c7f0b22f" end resource "pusher-client" do url "https://rubygems.org/gems/pusher-client-0.6.2.gem" sha256 "c405c931090e126c056d99f6b69a01b1bcb6cbfdde02389c93e7d547c6efd5a3" end resource "typhoeus" do url "https://rubygems.org/gems/typhoeus-0.8.0.gem" sha256 "28b7cf3c7d915a06d412bddab445df94ab725252009aa409f5ea41ab6577a30f" end resource "websocket" do url "https://rubygems.org/gems/websocket-1.2.8.gem" sha256 "1d8155c1cdaab8e8e72587a60e08423c9dd84ee44e4e827358ce3d4c2ccb2138" end def install ENV["GEM_HOME"] = libexec resources.each do |r| r.verify_download_integrity(r.fetch) system "gem", "install", r.cached_download, "--ignore-dependencies", "--no-document", "--install-dir", libexec end system "gem", "build", "travis.gemspec" system "gem", "install", "--ignore-dependencies", "travis-#{version}.gem" bin.install libexec/"bin/travis" bin.env_script_all_files(libexec/"bin", :GEM_HOME => ENV["GEM_HOME"]) end test do (testpath/".travis.yml").write <<~EOS language: ruby sudo: true matrix: include: - os: osx rvm: system EOS output = shell_output("#{bin}/travis lint #{testpath}/.travis.yml") assert_match "valid", output output = shell_output("#{bin}/travis init 2>&1", 1) assert_match "Can't figure out GitHub repo name", output end end
adamliter/homebrew-core
Formula/travis.rb
Ruby
bsd-2-clause
4,429
# Sample code from Programing Ruby, page 18 line = 'abc' line.gsub(/Perl|Python/, 'Ruby')
wkoszek/book-programming-ruby
src/ex0035.rb
Ruby
bsd-2-clause
93
cask :v1 => 'jenkins' do version '1.639' sha256 'be0637aa4b0078d3998d12d1e51ae9a79343d948d21c803a1d33855e94dade36' url "http://mirrors.jenkins-ci.org/osx/jenkins-#{version}.pkg" name 'Jenkins' homepage 'https://jenkins-ci.org/' license :mit pkg "jenkins-#{version}.pkg" binary '/Library/Application Support/Jenkins/jenkins-runner.sh', :target => 'jenkins-runner' uninstall :script => '/Library/Application Support/Jenkins/Uninstall.command', :pkgutil => 'org.jenkins-ci.*pkg', :launchctl => 'org.jenkins-ci' zap :delete => '/Library/Preferences/org.jenkins-ci.plist' conflicts_with :formula => %w{ jenkins homebrew/versions/jenkins-lts }, :cask => 'caskroom/versions/jenkins-lts' caveats <<-EOS.undent #{token} requires Java. You can install the latest version with brew cask install java You can change the launch parameters for #{token} using "defaults", as described in https://wiki.jenkins-ci.org/display/JENKINS/Thanks+for+using+OSX+Installer Alternatively, you can directly run #{token} with custom parameters, eg java -jar /Applications/Jenkins/jenkins.war -XX:PermSize=$MIN_PERM_GEN --httpPort=$HTTP_PORT For more options, see https://wiki.jenkins-ci.org/display/JENKINS/Starting+and+Accessing+Jenkins EOS end
brianshumate/homebrew-cask
Casks/jenkins.rb
Ruby
bsd-2-clause
1,441
// Copyright (c) 2012 Ecma International. All rights reserved. // This code is governed by the BSD license found in the LICENSE file. /*--- es5id: 15.2.3.3-2-25 description: > Object.getOwnPropertyDescriptor - argument 'P' is a number that converts to a string (value is 1e-7) ---*/ var obj = { "1e-7": 1 }; var desc = Object.getOwnPropertyDescriptor(obj, 1e-7); assert.sameValue(desc.value, 1, 'desc.value');
sebastienros/jint
Jint.Tests.Test262/test/built-ins/Object/getOwnPropertyDescriptor/15.2.3.3-2-25.js
JavaScript
bsd-2-clause
425
// Copyright Neil Groves 2009. Use, modification and // distribution is subject to the Boost Software License, Version // 1.0. (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) // // // For more information, see http://www.boost.org/libs/range/ // #ifndef BOOST_RANGE_DETAIL_RANGE_RETURN_HPP_INCLUDED #define BOOST_RANGE_DETAIL_RANGE_RETURN_HPP_INCLUDED #include <boost/range/begin.hpp> #include <boost/range/end.hpp> #include <boost/range/iterator_range.hpp> namespace pdalboost { enum range_return_value { // (*) indicates the most common values return_found, // only the found resulting iterator (*) return_next, // next(found) iterator return_prior, // prior(found) iterator return_begin_found, // [begin, found) range (*) return_begin_next, // [begin, next(found)) range return_begin_prior, // [begin, prior(found)) range return_found_end, // [found, end) range (*) return_next_end, // [next(found), end) range return_prior_end, // [prior(found), end) range return_begin_end // [begin, end) range }; template< class SinglePassRange, range_return_value > struct range_return { typedef pdalboost::iterator_range< BOOST_DEDUCED_TYPENAME range_iterator<SinglePassRange>::type > type; static type pack(BOOST_DEDUCED_TYPENAME range_iterator<SinglePassRange>::type found, SinglePassRange& rng) { return type(found, pdalboost::end(rng)); } }; template< class SinglePassRange > struct range_return< SinglePassRange, return_found > { typedef BOOST_DEDUCED_TYPENAME range_iterator<SinglePassRange>::type type; static type pack(type found, SinglePassRange&) { return found; } }; template< class SinglePassRange > struct range_return< SinglePassRange, return_next > { typedef BOOST_DEDUCED_TYPENAME range_iterator<SinglePassRange>::type type; static type pack(type found, SinglePassRange& rng) { return found == pdalboost::end(rng) ? found : pdalboost::next(found); } }; template< class BidirectionalRange > struct range_return< BidirectionalRange, return_prior > { typedef BOOST_DEDUCED_TYPENAME range_iterator<BidirectionalRange>::type type; static type pack(type found, BidirectionalRange& rng) { return found == pdalboost::begin(rng) ? found : pdalboost::prior(found); } }; template< class SinglePassRange > struct range_return< SinglePassRange, return_begin_found > { typedef pdalboost::iterator_range< BOOST_DEDUCED_TYPENAME range_iterator<SinglePassRange>::type > type; static type pack(BOOST_DEDUCED_TYPENAME range_iterator<SinglePassRange>::type found, SinglePassRange& rng) { return type(pdalboost::begin(rng), found); } }; template< class SinglePassRange > struct range_return< SinglePassRange, return_begin_next > { typedef pdalboost::iterator_range< BOOST_DEDUCED_TYPENAME range_iterator<SinglePassRange>::type > type; static type pack(BOOST_DEDUCED_TYPENAME range_iterator<SinglePassRange>::type found, SinglePassRange& rng) { return type( pdalboost::begin(rng), found == pdalboost::end(rng) ? found : pdalboost::next(found) ); } }; template< class BidirectionalRange > struct range_return< BidirectionalRange, return_begin_prior > { typedef pdalboost::iterator_range< BOOST_DEDUCED_TYPENAME range_iterator<BidirectionalRange>::type > type; static type pack(BOOST_DEDUCED_TYPENAME range_iterator<BidirectionalRange>::type found, BidirectionalRange& rng) { return type( pdalboost::begin(rng), found == pdalboost::begin(rng) ? found : pdalboost::prior(found) ); } }; template< class SinglePassRange > struct range_return< SinglePassRange, return_found_end > { typedef pdalboost::iterator_range< BOOST_DEDUCED_TYPENAME range_iterator<SinglePassRange>::type > type; static type pack(BOOST_DEDUCED_TYPENAME range_iterator<SinglePassRange>::type found, SinglePassRange& rng) { return type(found, pdalboost::end(rng)); } }; template< class SinglePassRange > struct range_return< SinglePassRange, return_next_end > { typedef pdalboost::iterator_range< BOOST_DEDUCED_TYPENAME range_iterator<SinglePassRange>::type > type; static type pack(BOOST_DEDUCED_TYPENAME range_iterator<SinglePassRange>::type found, SinglePassRange& rng) { return type( found == pdalboost::end(rng) ? found : pdalboost::next(found), pdalboost::end(rng) ); } }; template< class BidirectionalRange > struct range_return< BidirectionalRange, return_prior_end > { typedef pdalboost::iterator_range< BOOST_DEDUCED_TYPENAME range_iterator<BidirectionalRange>::type > type; static type pack(BOOST_DEDUCED_TYPENAME range_iterator<BidirectionalRange>::type found, BidirectionalRange& rng) { return type( found == pdalboost::begin(rng) ? found : pdalboost::prior(found), pdalboost::end(rng) ); } }; template< class SinglePassRange > struct range_return< SinglePassRange, return_begin_end > { typedef pdalboost::iterator_range< BOOST_DEDUCED_TYPENAME range_iterator<SinglePassRange>::type > type; static type pack(BOOST_DEDUCED_TYPENAME range_iterator<SinglePassRange>::type, SinglePassRange& rng) { return type(pdalboost::begin(rng), pdalboost::end(rng)); } }; } #endif // include guard
lucadelu/PDAL
vendor/pdalboost/boost/range/detail/range_return.hpp
C++
bsd-3-clause
6,270
// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "webkit/media/webmediaplayer_params.h" #include "media/base/audio_renderer_sink.h" #include "media/base/media_log.h" namespace webkit_media { WebMediaPlayerParams::WebMediaPlayerParams( const scoped_refptr<media::AudioRendererSink>& audio_renderer_sink, const scoped_refptr<media::GpuVideoDecoder::Factories>& gpu_factories, const scoped_refptr<media::MediaLog>& media_log) : audio_renderer_sink_(audio_renderer_sink), gpu_factories_(gpu_factories), media_log_(media_log) { DCHECK(media_log_); } WebMediaPlayerParams::~WebMediaPlayerParams() {} } // namespace webkit_media
codenote/chromium-test
webkit/media/webmediaplayer_params.cc
C++
bsd-3-clause
788
// Copyright 2009 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package runtime import "unsafe" // The calls to nop are to keep these functions from being inlined. // If they are inlined we have no guarantee that later rewrites of the // code by optimizers will preserve the relative order of memory accesses. //go:nosplit func atomicload(ptr *uint32) uint32 { nop() return *ptr } //go:nosplit func atomicloadp(ptr unsafe.Pointer) unsafe.Pointer { nop() return *(*unsafe.Pointer)(ptr) } //go:nosplit func xadd64(ptr *uint64, delta int64) uint64 { for { old := *ptr if cas64(ptr, old, old+uint64(delta)) { return old + uint64(delta) } } } //go:nosplit func xchg64(ptr *uint64, new uint64) uint64 { for { old := *ptr if cas64(ptr, old, new) { return old } } } //go:noescape func xadd(ptr *uint32, delta int32) uint32 //go:noescape func xchg(ptr *uint32, new uint32) uint32 // NO go:noescape annotation; see atomic_pointer.go. func xchgp1(ptr unsafe.Pointer, new unsafe.Pointer) unsafe.Pointer //go:noescape func xchguintptr(ptr *uintptr, new uintptr) uintptr //go:noescape func atomicload64(ptr *uint64) uint64 //go:noescape func atomicor8(ptr *uint8, val uint8) //go:noescape func cas64(ptr *uint64, old, new uint64) bool //go:noescape func atomicstore(ptr *uint32, val uint32) //go:noescape func atomicstore64(ptr *uint64, val uint64) // NO go:noescape annotation; see atomic_pointer.go. func atomicstorep1(ptr unsafe.Pointer, val unsafe.Pointer)
frobware/go
src/runtime/atomic_386.go
GO
bsd-3-clause
1,585
/** * License and Terms of Use * * Copyright (c) 2011 SignpostMarv * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ (function(window, undefined){ var Array = window['Array'], EventTarget = window['EventTarget'], mapapi = window['mapapi'], gridPoint = mapapi['gridPoint'], bounds = mapapi['bounds'], ctype_digit = mapapi['utils']['ctype_digit'] ; if(mapapi == undefined){ throw 'mapapi.js is not loaded.'; }else if(EventTarget == undefined){ throw 'EventTarget is not loaded'; } function extend(a,b){ a.prototype = new b; a.prototype['constructor'] = a; } function shape(options){ EventTarget['call'](this); this['opts'] = {}; for(var i in this['defaultOpts']){ this['opts'][i] = this['defaultOpts'][i]; } if(options != undefined){ this['options'](options); } } extend(shape, EventTarget); shape.prototype['defaultOpts'] = {'fillStyle':'rgba(255,255,255,0.5)', 'strokeStyle':'rgb(255,255,255)', 'lineWidth':0}; shape.prototype['options'] = function(options){ options = options || {}; for(var i in options){ this['opts'] = options[i]; } } shape.prototype['withinShape'] = function(pos){ if(pos instanceof gridPoint){ return true; } return false; } shape.prototype['coords'] = function(value){ if(value != undefined){ this['options']({'coords':value}); } var coords = this['opts']['coords'] ; return coords != undefined ? coords : []; } shape.prototype['clickable'] = function(value){ if(value != undefined){ this['options']({'clickable':!!value}); } var clickable = this['opts']['clickable']; ; return clickable != undefined ? clickable : false; } shape.prototype['strokeStyle'] = function(value){ if(typeof value == 'string'){ this['options']({'strokeStyle':value}); } return this['opts']['strokeStyle']; } shape.prototype['lineWidth'] = function(value){ if(typeof value == 'number'){ this['options']({'lineWidth':Math.max(0,value)}); } return Math.max(0, this['opts']['lineWidth']); } shape.prototype['intersects'] = function(value){ if(value instanceof bounds && this['bounds'] instanceof bounds){ return this['bounds']['intersects'](value); } return false; } mapapi['shape'] = shape; function shapeManager(){ Array['call'](this); } extend(shapeManager, Array); shapeManager.prototype['push'] = function(){ for(var i=0;i<arguments['length'];++i){ if(!(arguments[i] instanceof shape)){ throw 'Arguments of mapapi.shapeManager::push() should be instances of mapapi.shape'; } } Array.prototype['push']['apply'](this, arguments); } shapeManager.prototype['intersects'] = function(value){ if(value instanceof bounds){ var shpmngr = new this['constructor'] ; for(var i=0;i<this['length'];++i){ if(this[i]['intersects'](value)){ shpmngr['push'](this[i]); } } return shpmngr; }else{ throw 'Intersection argument must be an instance of mapapi.bounds'; } } shapeManager.prototype['click'] = function(value){ var value = gridPoint['fuzzy'](value), ret ; for(var i=0;i<this['length'];++i){ if(this[i]['clickable']() && this[i]['withinShape'](value)){ ret = this[i]['fire']('click',{'pos':value}); if(ret != undefined && ret == false){ break; } } } } mapapi['shapeManager'] = shapeManager; function poly(options){ shape['call'](this, options); } extend(poly, shape); poly.prototype['options'] = function(options){ var options = options || {}, coords = options['coords'], fillStyle = options['fillStyle'], strokeStyle = options['strokeStyle'], lineWidth = options['lineWidth'] ; if(options['coords'] != undefined){ if(coords instanceof Array){ for(var i=0;i<coords['length'];++i){ coords[i] = gridPoint['fuzzy'](coords[i]); } var swx = coords[0]['x'], swy = coords[0]['y'], nex = coords[0]['x'], ney = coords[0]['y'] ; for(var i=1;i<coords['length'];++i){ swx = (coords[i]['x'] < swx) ? coords[i]['x'] : swx; swy = (coords[i]['y'] < swy) ? coords[i]['y'] : swy; nex = (coords[i]['x'] > nex) ? coords[i]['x'] : nex; ney = (coords[i]['y'] > ney) ? coords[i]['y'] : ney; } this['bounds'] = new bounds(new gridPoint(swx, swy), new gridPoint(nex, ney)); this['opts']['coords'] = coords; this['fire']('changedcoords'); }else{ throw 'coords must be array'; } } if(typeof fillStyle == 'string'){ var diff = this['opts']['fillStyle'] != fillStyle; this['opts']['fillStyle'] = fillStyle; if(diff){ this['fire']('changedfillstyle'); } } if(typeof strokeStyle == 'string'){ var diff = this['opts']['strokeStyle'] != strokeStyle; this['opts']['strokeStyle'] = strokeStyle; if(diff){ this['fire']('changedstrokestyle'); } } if(typeof lineWidth == 'number'){ var diff = this['opts']['lineWidth'] != Math.max(0,lineWidth); this['opts']['lineWidth'] = Math.max(0,lineWidth); if(diff){ this['fire']('changedlinewidth'); } } if(options['clickable'] != undefined){ this['opts']['clickable'] = !!options['clickable']; } } poly.prototype['fillStyle'] = function(value){ if(value != undefined){ this['options']({'fillStyle':value}); } return this['opts']['fillStyle']; } shape['polygon'] = poly; function rectangle(options){ poly['call'](this, options); } extend(rectangle, poly); rectangle.prototype['options'] = function(options){ var options = options || {}, coords = options['coords'] ; if(coords != undefined){ if(coords instanceof Array){ if(coords['length'] == 2){ for(var i=0;i<coords['length'];++i){ coords[i] = gridPoint['fuzzy'](coords[i]); } var sw = coords[0], ne = coords[1], foo,bar ; if(ne['y'] > sw['y']){ foo = new gridPoint(ne['x'], sw['y']); bar = new gridPoint(sw['x'], ne['y']); ne = foo; sw = bar; } if(sw['x'] > ne['x']){ foo = new gridPoint(ne['x'], sw['y']); bar = new gridPoint(sw['x'], ne['y']); sw = foo; ne = bar; } options['coords'] = [sw, ne]; }else{ throw 'When supplying mapapi.shape.rectangle::options with an Array for the coordinates, there should only be two entries'; } }else{ throw 'something other than array was given to mapapi.shape.rectangle::options'; } } poly.prototype['options']['call'](this, options); } rectangle.prototype['withinShape'] = function(value){ if(value == undefined){ throw 'Must specify an instance of mapapi.gridPoint'; }else if(!(this['bounds'] instanceof bounds)){ throw 'Coordinates not set'; } value = gridPoint['fuzzy'](value); return this['bounds']['isWithin'](value); } shape['rectangle'] = rectangle; function square(options){ rectangle['call'](this, options); } extend(square, rectangle); square.prototype['options'] = function(options){ options = options || {}; var coords = options['coords'] ; if(coords instanceof Array && coords['length'] <= 2){ var sw = coords[0], ne = coords[1] ; if(Math.abs(ne['x'] - sw['x']) != Math.abs(ne['y'] - sw['y'])){ throw 'coordinates should form a square'; } } rectangle.prototype['options']['call'](this, options); } shape['square'] = square; function line(options){ shape['call'](this, options); } extend(line, shape); line.prototype['defaultOpts'] = {'strokeStyle':'rgb(255,255,255)', 'lineWidth':1}; line.prototype['options'] = function(options){ var options = options || {}, coords = options['coords'], strokeStyle = options['strokeStyle'], lineWidth = options['lineWidth'] ; if(options['coords'] != undefined){ if(coords instanceof Array){ if(coords['length'] >= 2){ for(var i=0;i<coords['length'];++i){ coords[i] = gridPoint['fuzzy'](coords[i]); } this['opts']['coords'] = coords; this['fire']('changedcoords'); }else{ throw 'mapapi.shape.line requires two or more coordinates'; } }else{ throw 'mapapi.shape.line requires coordinates be passed as an array'; } } if(typeof strokeStyle == 'string'){ var diff = this['opts']['strokeStyle'] != strokeStyle; this['opts']['strokeStyle'] = strokeStyle; if(diff){ this['fire']('changedstrokestyle'); } } if(ctype_digit(lineWidth)){ lineWidth = Math.max(0,lineWidth * 1); var diff = this['opts']['lineWidth'] != lineWidth; this['opts']['lineWidth'] = lineWidth; if(diff){ this['fire']('changedlinewidth'); } } if(options['clickable'] != undefined){ this['opts']['clickable'] = !!options['clickable']; } } line.prototype['intersects'] = function(value){ if(value instanceof bounds){ var coords = this['coords']() ; for(var i=0;i<coords['length'];++i){ if(value['isWithin'](coords[i])){ return true; } } } return false; } shape['line'] = line; function circle(options){ shape['call'](this, options); } extend(circle, shape); circle.prototype['options'] = function(options){ var opts = this['opts'], options = options || {}, coords = options['coords'], radius = options['radius'], strokeStyle = options['strokeStyle'], lineWidth = options['lineWidth'], diffPos=false,diffRadius=false,diff ; if(coords != undefined){ coords[0] = gridPoint['fuzzy'](coords[0]); diffPos = opts['coords'] == undefined || !pos['equals'](opts['coords'][0]); opts['coords'] = [coords[0]]; } if(radius != undefined){ if(typeof radius != 'number'){ throw 'radius should be specified as a number'; }else if(radius <= 0){ throw 'radius should be greater than zero'; } diffRadius = radius != opts['radius']; opts['radius'] = radius; } if(diffPos || diffRadius){ this['fire']('changedcoords'); } if(typeof fillStyle == 'string'){ var diff = this['opts']['fillStyle'] != fillStyle; this['opts']['fillStyle'] = fillStyle; if(diff){ this['fire']('changedfillstyle'); } } if(typeof strokeStyle == 'string'){ var diff = this['opts']['strokeStyle'] != strokeStyle; this['opts']['strokeStyle'] = strokeStyle; if(diff){ this['fire']('changedstrokestyle'); } } if(typeof lineWidth == 'number'){ var diff = this['opts']['lineWidth'] != Math.max(0,lineWidth); this['opts']['lineWidth'] = Math.max(0,lineWidth); if(diff){ this['fire']('changedlinewidth'); } } if(options['clickable'] != undefined){ this['opts']['clickable'] = !!options['clickable']; } } circle.prototype['radius'] = function(value){ if(value != undefined){ this['options']({'radius':value}); } return this['opts']['radius']; } circle.prototype['fillStyle'] = function(value){ if(value != undefined){ this['options']({'fillStyle':value}); } return this['opts']['fillStyle']; } circle.prototype['withinShape'] = function(pos){ pos = gridPoint['fuzzy'](pos); return (this['coords']()[0] instanceof gridPoint && typeof this['radius']() == 'number') && (this['coords']()[0]['distance'](pos) <= this['radius']()); } circle.prototype['intersects'] = function(value){ if(value instanceof bounds && this['coords']()[0] instanceof gridPoint){ if(value['isWithin'](this['coords']()[0])){ return true; }else if(typeof this['radius']() == 'number'){ var sw = value['sw'], ne = value['ne'], distanceTests = [sw,ne,{'x':sw['x'], 'y':ne['y']}, {'x':ne['x'], 'y':sw['y']}] ; for(var i=0;i<distanceTests.length;++i){ if(this['withinShape'](distanceTests[i])){ return true; } } } } return false; } shape['circle'] = circle; })(window);
aurora-sim/Aurora-WebUI
www/worldmap/javascripts/mapapi.shape.js
JavaScript
bsd-3-clause
12,790
/** * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * * @format * @flow strict-local */ 'use strict'; import type {Node} from 'React'; import {ActivityIndicator, StyleSheet, View} from 'react-native'; import React, {Component} from 'react'; type State = {|animating: boolean|}; type Props = $ReadOnly<{||}>; type Timer = TimeoutID; class ToggleAnimatingActivityIndicator extends Component<Props, State> { _timer: Timer; constructor(props: Props) { super(props); this.state = { animating: true, }; } componentDidMount() { this.setToggleTimeout(); } componentWillUnmount() { clearTimeout(this._timer); } setToggleTimeout() { this._timer = setTimeout(() => { this.setState({animating: !this.state.animating}); this.setToggleTimeout(); }, 2000); } render(): Node { return ( <ActivityIndicator animating={this.state.animating} style={[styles.centering, {height: 80}]} size="large" /> ); } } const styles = StyleSheet.create({ centering: { alignItems: 'center', justifyContent: 'center', padding: 8, }, gray: { backgroundColor: '#cccccc', }, horizontal: { flexDirection: 'row', justifyContent: 'space-around', padding: 8, }, }); exports.displayName = (undefined: ?string); exports.category = 'UI'; exports.framework = 'React'; exports.title = 'ActivityIndicator'; exports.documentationURL = 'https://reactnative.dev/docs/activityindicator'; exports.description = 'Animated loading indicators.'; exports.examples = [ { title: 'Default (small, white)', render(): Node { return ( <ActivityIndicator style={[styles.centering, styles.gray]} color="white" /> ); }, }, { title: 'Gray', render(): Node { return ( <View> <ActivityIndicator style={[styles.centering]} /> <ActivityIndicator style={[styles.centering, styles.gray]} /> </View> ); }, }, { title: 'Custom colors', render(): Node { return ( <View style={styles.horizontal}> <ActivityIndicator color="#0000ff" /> <ActivityIndicator color="#aa00aa" /> <ActivityIndicator color="#aa3300" /> <ActivityIndicator color="#00aa00" /> </View> ); }, }, { title: 'Large', render(): Node { return ( <ActivityIndicator style={[styles.centering, styles.gray]} size="large" color="white" /> ); }, }, { title: 'Large, custom colors', render(): Node { return ( <View style={styles.horizontal}> <ActivityIndicator size="large" color="#0000ff" /> <ActivityIndicator size="large" color="#aa00aa" /> <ActivityIndicator size="large" color="#aa3300" /> <ActivityIndicator size="large" color="#00aa00" /> </View> ); }, }, { title: 'Start/stop', render(): Node { return <ToggleAnimatingActivityIndicator />; }, }, { title: 'Custom size', render(): Node { return ( <ActivityIndicator style={[styles.centering, {transform: [{scale: 1.5}]}]} size="large" /> ); }, }, { platform: 'android', title: 'Custom size (size: 75)', render(): Node { return <ActivityIndicator style={styles.centering} size={75} />; }, }, ];
hoangpham95/react-native
packages/rn-tester/js/examples/ActivityIndicator/ActivityIndicatorExample.js
JavaScript
bsd-3-clause
3,613
for astTuple in Query.input.tuples('ast'): if type(astTuple.ast) is Field: modifiers = astTuple.ast.modifiers nonFinalPublic = modifiers.isSet(Modifier.ModifierFlag.Public) and not modifiers.isSet(Modifier.ModifierFlag.Final) if not nonFinalPublic: Query.input.remove(astTuple) Query.result = Query.input
Vaishal-shah/Envision
InformationScripting/scripts/evaluation/nonFinalPublic.py
Python
bsd-3-clause
345
/** * Copyright (c) 2015-present, Facebook, Inc. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * * @providesModule takeSnapshot * @flow */ 'use strict'; const UIManager = require('UIManager'); const findNumericNodeHandle = require('findNumericNodeHandle'); /** * Capture an image of the screen, window or an individual view. The image * will be stored in a temporary file that will only exist for as long as the * app is running. * * The `view` argument can be the literal string `window` if you want to * capture the entire window, or it can be a reference to a specific * React Native component. * * The `options` argument may include: * - width/height (number) - the width and height of the image to capture. * - format (string) - either 'png' or 'jpeg'. Defaults to 'png'. * - quality (number) - the quality when using jpeg. 0.0 - 1.0 (default). * * Returns a Promise. * @platform ios */ function takeSnapshot( view?: 'window' | React$Element<any> | number, options?: { width?: number, height?: number, format?: 'png' | 'jpeg', quality?: number, }, ): Promise<any> { if (typeof view !== 'number' && view !== 'window') { view = findNumericNodeHandle(view) || 'window'; } // Call the hidden '__takeSnapshot' method; the main one throws an error to // prevent accidental backwards-incompatible usage. return UIManager.__takeSnapshot(view, options); } module.exports = takeSnapshot;
yangshun/react
src/renderers/native/takeSnapshot.js
JavaScript
bsd-3-clause
1,528
(function($, window, document) { var pluginName = 'fatNav', defaults = {}; function Plugin(options) { this.settings = $.extend({}, defaults, options); this._defaults = defaults; this._name = pluginName; this.init(); } $.extend(Plugin.prototype, { init: function() { var self = this; var $nav = this.$nav = $('.fat-nav'); var $hamburger = this.$hamburger = $('<a href="javascript:void(0)" class="hamburger"><div class="hamburger__icon"></div></a>'); this._bodyOverflow = $('body').css('overflow'); // Hack to prevent mobile safari scrolling the whole body when nav is open if (navigator.userAgent.match(/(iPad|iPhone|iPod)/g)) { $nav.children().css({ 'height': '110%', 'transform': 'translateY(-5%)' }); } $('body').append($hamburger); $().add($hamburger).add($nav.find('a')).on('click', function(e) { self.toggleNav(); }); }, toggleNav: function() { var self = this; this.$nav.fadeToggle(400); self.toggleBodyOverflow(); $().add(this.$hamburger).add(this.$nav).toggleClass('active'); }, toggleBodyOverflow: function() { var self = this; var $body = $('body'); $body.toggleClass('no-scroll'); var isNavOpen = $body.hasClass('no-scroll'); // $body.width($body.width()); $body.css('overflow', isNavOpen ? 'hidden' : self._bodyOverflow); } }); if (typeof $[pluginName] === 'undefined') { $[pluginName] = function(options) { return new Plugin(this, options); }; } }(jQuery, window, document));
wangrunxinyes/sby
frontend/plugin/include/extensions/fullscren.choices/src/js/jquery.fatNav.js
JavaScript
bsd-3-clause
2,044
/* * Copyright (C) 2009 Google Inc. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following disclaimer * in the documentation and/or other materials provided with the * distribution. * * Neither the name of Google Inc. nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #include "core/loader/ThreadableLoader.h" #include "core/dom/Document.h" #include "core/dom/ExecutionContext.h" #include "core/loader/DocumentThreadableLoader.h" #include "core/loader/ThreadableLoaderClientWrapper.h" #include "core/loader/WorkerThreadableLoader.h" #include "core/workers/WorkerGlobalScope.h" #include "core/workers/WorkerThread.h" namespace blink { PassOwnPtr<ThreadableLoader> ThreadableLoader::create(ExecutionContext& context, ThreadableLoaderClient* client, const ThreadableLoaderOptions& options, const ResourceLoaderOptions& resourceLoaderOptions) { ASSERT(client); if (context.isWorkerGlobalScope()) { return WorkerThreadableLoader::create(toWorkerGlobalScope(context), client, options, resourceLoaderOptions); } return DocumentThreadableLoader::create(toDocument(context), client, options, resourceLoaderOptions); } void ThreadableLoader::loadResourceSynchronously(ExecutionContext& context, const ResourceRequest& request, ThreadableLoaderClient& client, const ThreadableLoaderOptions& options, const ResourceLoaderOptions& resourceLoaderOptions) { if (context.isWorkerGlobalScope()) { WorkerThreadableLoader::loadResourceSynchronously(toWorkerGlobalScope(context), request, client, options, resourceLoaderOptions); return; } DocumentThreadableLoader::loadResourceSynchronously(toDocument(context), request, client, options, resourceLoaderOptions); } } // namespace blink
axinging/chromium-crosswalk
third_party/WebKit/Source/core/loader/ThreadableLoader.cpp
C++
bsd-3-clause
3,022
/* * zClip :: jQuery ZeroClipboard v1.1.1 * http://steamdev.com/zclip * * Copyright 2011, SteamDev * Released under the MIT license. * http://www.opensource.org/licenses/mit-license.php * * Date: Wed Jun 01, 2011 */ (function ($) { $.fn.zclip = function (params) { if (typeof params == "object" && !params.length) { var settings = $.extend({ path: 'ZeroClipboard.swf', copy: null, beforeCopy: null, afterCopy: null, clickAfter: true, setHandCursor: true, setCSSEffects: true }, params); return this.each(function () { var o = $(this); if (o.is(':visible') && (typeof settings.copy == 'string' || $.isFunction(settings.copy))) { ZeroClipboard.setMoviePath(settings.path); var clip = new ZeroClipboard.Client(); if($.isFunction(settings.copy)){ o.bind('zClip_copy',settings.copy); } if($.isFunction(settings.beforeCopy)){ o.bind('zClip_beforeCopy',settings.beforeCopy); } if($.isFunction(settings.afterCopy)){ o.bind('zClip_afterCopy',settings.afterCopy); } clip.setHandCursor(settings.setHandCursor); clip.setCSSEffects(settings.setCSSEffects); clip.addEventListener('mouseOver', function (client) { o.trigger('mouseenter'); }); clip.addEventListener('mouseOut', function (client) { o.trigger('mouseleave'); }); clip.addEventListener('mouseDown', function (client) { o.trigger('mousedown'); if(!$.isFunction(settings.copy)){ clip.setText(settings.copy); } else { clip.setText(o.triggerHandler('zClip_copy')); } if ($.isFunction(settings.beforeCopy)) { o.trigger('zClip_beforeCopy'); } }); clip.addEventListener('complete', function (client, text) { if ($.isFunction(settings.afterCopy)) { o.trigger('zClip_afterCopy'); } else { if (text.length > 500) { text = text.substr(0, 500) + "...\n\n(" + (text.length - 500) + " characters not shown)"; } o.removeClass('hover'); alert("Copied text to clipboard:\n\n " + text); } if (settings.clickAfter) { o.trigger('click'); } }); clip.glue(o[0], o.parent()[0]); $(window).bind('load resize',function(){clip.reposition();}); } }); } else if (typeof params == "string") { return this.each(function () { var o = $(this); params = params.toLowerCase(); var zclipId = o.data('zclipId'); var clipElm = $('#' + zclipId + '.zclip'); if (params == "remove") { clipElm.remove(); o.removeClass('active hover'); } else if (params == "hide") { clipElm.hide(); o.removeClass('active hover'); } else if (params == "show") { clipElm.show(); } }); } } })(jQuery); // ZeroClipboard // Simple Set Clipboard System // Author: Joseph Huckaby var ZeroClipboard = { version: "1.0.7", clients: {}, // registered upload clients on page, indexed by id moviePath: 'ZeroClipboard.swf', // URL to movie nextId: 1, // ID of next movie $: function (thingy) { // simple DOM lookup utility function if (typeof(thingy) == 'string') thingy = document.getElementById(thingy); if (!thingy.addClass) { // extend element with a few useful methods thingy.hide = function () { this.style.display = 'none'; }; thingy.show = function () { this.style.display = ''; }; thingy.addClass = function (name) { this.removeClass(name); this.className += ' ' + name; }; thingy.removeClass = function (name) { var classes = this.className.split(/\s+/); var idx = -1; for (var k = 0; k < classes.length; k++) { if (classes[k] == name) { idx = k; k = classes.length; } } if (idx > -1) { classes.splice(idx, 1); this.className = classes.join(' '); } return this; }; thingy.hasClass = function (name) { return !!this.className.match(new RegExp("\\s*" + name + "\\s*")); }; } return thingy; }, setMoviePath: function (path) { // set path to ZeroClipboard.swf this.moviePath = path; }, dispatch: function (id, eventName, args) { // receive event from flash movie, send to client var client = this.clients[id]; if (client) { client.receiveEvent(eventName, args); } }, register: function (id, client) { // register new client to receive events this.clients[id] = client; }, getDOMObjectPosition: function (obj, stopObj) { // get absolute coordinates for dom element var info = { left: 0, top: 0, width: obj.width ? obj.width : obj.offsetWidth, height: obj.height ? obj.height : obj.offsetHeight }; if (obj && (obj != stopObj)) { info.left += obj.offsetLeft; info.top += obj.offsetTop; } return info; }, Client: function (elem) { // constructor for new simple upload client this.handlers = {}; // unique ID this.id = ZeroClipboard.nextId++; this.movieId = 'ZeroClipboardMovie_' + this.id; // register client with singleton to receive flash events ZeroClipboard.register(this.id, this); // create movie if (elem) this.glue(elem); } }; ZeroClipboard.Client.prototype = { id: 0, // unique ID for us ready: false, // whether movie is ready to receive events or not movie: null, // reference to movie object clipText: '', // text to copy to clipboard handCursorEnabled: true, // whether to show hand cursor, or default pointer cursor cssEffects: true, // enable CSS mouse effects on dom container handlers: null, // user event handlers glue: function (elem, appendElem, stylesToAdd) { // glue to DOM element // elem can be ID or actual DOM element object this.domElement = ZeroClipboard.$(elem); // float just above object, or zIndex 99 if dom element isn't set var zIndex = 99; if (this.domElement.style.zIndex) { zIndex = parseInt(this.domElement.style.zIndex, 10) + 1; } if (typeof(appendElem) == 'string') { appendElem = ZeroClipboard.$(appendElem); } else if (typeof(appendElem) == 'undefined') { appendElem = document.getElementsByTagName('body')[0]; } // find X/Y position of domElement var box = ZeroClipboard.getDOMObjectPosition(this.domElement, appendElem); // create floating DIV above element this.div = document.createElement('div'); this.div.className = "zclip"; this.div.id = "zclip-" + this.movieId; $(this.domElement).data('zclipId', 'zclip-' + this.movieId); var style = this.div.style; style.position = 'absolute'; style.left = '' + box.left + 'px'; style.top = '' + box.top + 'px'; style.width = '' + box.width + 'px'; style.height = '' + box.height + 'px'; style.zIndex = zIndex; if (typeof(stylesToAdd) == 'object') { for (addedStyle in stylesToAdd) { style[addedStyle] = stylesToAdd[addedStyle]; } } // style.backgroundColor = '#f00'; // debug appendElem.appendChild(this.div); this.div.innerHTML = this.getHTML(box.width, box.height); }, getHTML: function (width, height) { // return HTML for movie var html = ''; var flashvars = 'id=' + this.id + '&width=' + width + '&height=' + height; if (navigator.userAgent.match(/MSIE/)) { // IE gets an OBJECT tag var protocol = location.href.match(/^https/i) ? 'https://' : 'http://'; html += '<object classid="clsid:d27cdb6e-ae6d-11cf-96b8-444553540000" codebase="' + protocol + 'download.macromedia.com/pub/shockwave/cabs/flash/swflash.cab#version=9,0,0,0" width="' + width + '" height="' + height + '" id="' + this.movieId + '" align="middle"><param name="allowScriptAccess" value="always" /><param name="allowFullScreen" value="false" /><param name="movie" value="' + ZeroClipboard.moviePath + '" /><param name="loop" value="false" /><param name="menu" value="false" /><param name="quality" value="best" /><param name="bgcolor" value="#ffffff" /><param name="flashvars" value="' + flashvars + '"/><param name="wmode" value="transparent"/></object>'; } else { // all other browsers get an EMBED tag html += '<embed id="' + this.movieId + '" src="' + ZeroClipboard.moviePath + '" loop="false" menu="false" quality="best" bgcolor="#ffffff" width="' + width + '" height="' + height + '" name="' + this.movieId + '" align="middle" allowScriptAccess="always" allowFullScreen="false" type="application/x-shockwave-flash" pluginspage="http://www.macromedia.com/go/getflashplayer" flashvars="' + flashvars + '" wmode="transparent" />'; } return html; }, hide: function () { // temporarily hide floater offscreen if (this.div) { this.div.style.left = '-2000px'; } }, show: function () { // show ourselves after a call to hide() this.reposition(); }, destroy: function () { // destroy control and floater if (this.domElement && this.div) { this.hide(); this.div.innerHTML = ''; var body = document.getElementsByTagName('body')[0]; try { body.removeChild(this.div); } catch (e) {; } this.domElement = null; this.div = null; } }, reposition: function (elem) { // reposition our floating div, optionally to new container // warning: container CANNOT change size, only position if (elem) { this.domElement = ZeroClipboard.$(elem); if (!this.domElement) this.hide(); } if (this.domElement && this.div) { var box = ZeroClipboard.getDOMObjectPosition(this.domElement); var style = this.div.style; style.left = '' + box.left + 'px'; style.top = '' + box.top + 'px'; } }, setText: function (newText) { // set text to be copied to clipboard this.clipText = newText; if (this.ready) { this.movie.setText(newText); } }, addEventListener: function (eventName, func) { // add user event listener for event // event types: load, queueStart, fileStart, fileComplete, queueComplete, progress, error, cancel eventName = eventName.toString().toLowerCase().replace(/^on/, ''); if (!this.handlers[eventName]) { this.handlers[eventName] = []; } this.handlers[eventName].push(func); }, setHandCursor: function (enabled) { // enable hand cursor (true), or default arrow cursor (false) this.handCursorEnabled = enabled; if (this.ready) { this.movie.setHandCursor(enabled); } }, setCSSEffects: function (enabled) { // enable or disable CSS effects on DOM container this.cssEffects = !! enabled; }, receiveEvent: function (eventName, args) { // receive event from flash eventName = eventName.toString().toLowerCase().replace(/^on/, ''); // special behavior for certain events switch (eventName) { case 'load': // movie claims it is ready, but in IE this isn't always the case... // bug fix: Cannot extend EMBED DOM elements in Firefox, must use traditional function this.movie = document.getElementById(this.movieId); if (!this.movie) { var self = this; setTimeout(function () { self.receiveEvent('load', null); }, 1); return; } // firefox on pc needs a "kick" in order to set these in certain cases if (!this.ready && navigator.userAgent.match(/Firefox/) && navigator.userAgent.match(/Windows/)) { var self = this; setTimeout(function () { self.receiveEvent('load', null); }, 100); this.ready = true; return; } this.ready = true; try { this.movie.setText(this.clipText); } catch (e) {} try { this.movie.setHandCursor(this.handCursorEnabled); } catch (e) {} break; case 'mouseover': if (this.domElement && this.cssEffects) { this.domElement.addClass('hover'); if (this.recoverActive) { this.domElement.addClass('active'); } } break; case 'mouseout': if (this.domElement && this.cssEffects) { this.recoverActive = false; if (this.domElement.hasClass('active')) { this.domElement.removeClass('active'); this.recoverActive = true; } this.domElement.removeClass('hover'); } break; case 'mousedown': if (this.domElement && this.cssEffects) { this.domElement.addClass('active'); } break; case 'mouseup': if (this.domElement && this.cssEffects) { this.domElement.removeClass('active'); this.recoverActive = false; } break; } // switch eventName if (this.handlers[eventName]) { for (var idx = 0, len = this.handlers[eventName].length; idx < len; idx++) { var func = this.handlers[eventName][idx]; if (typeof(func) == 'function') { // actual function reference func(this, args); } else if ((typeof(func) == 'object') && (func.length == 2)) { // PHP style object + method, i.e. [myObject, 'myMethod'] func[0][func[1]](this, args); } else if (typeof(func) == 'string') { // name of function window[func](this, args); } } // foreach event handler defined } // user defined handler for event } };
xantage/code
vilya/static/js/lib/jquery.zclip.js
JavaScript
bsd-3-clause
16,750
using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using OrchardCore.Environment.Shell.Descriptor.Models; namespace OrchardCore.Environment.Shell.Descriptor.Settings { /// <summary> /// Implements <see cref="IShellDescriptorManager"/> by returning a single tenant with a specified set /// of features. This class can be registered as a singleton as its state never changes. /// </summary> public class SetFeaturesShellDescriptorManager : IShellDescriptorManager { private readonly IEnumerable<ShellFeature> _shellFeatures; private ShellDescriptor _shellDescriptor; public SetFeaturesShellDescriptorManager(IEnumerable<ShellFeature> shellFeatures) { _shellFeatures = shellFeatures; } public Task<ShellDescriptor> GetShellDescriptorAsync() { if (_shellDescriptor == null) { _shellDescriptor = new ShellDescriptor { Features = _shellFeatures.Distinct().ToList() }; } return Task.FromResult(_shellDescriptor); } public Task UpdateShellDescriptorAsync(int priorSerialNumber, IEnumerable<ShellFeature> enabledFeatures, IEnumerable<ShellParameter> parameters) { return Task.CompletedTask; } } }
OrchardCMS/Brochard
src/OrchardCore/OrchardCore/Shell/Descriptor/Settings/SetFeaturesShellDescriptorManager.cs
C#
bsd-3-clause
1,381
// mksyscall.pl -l32 syscall_bsd.go syscall_darwin.go syscall_darwin_arm.go // MACHINE GENERATED BY THE COMMAND ABOVE; DO NOT EDIT package syscall import "unsafe" // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func getgroups(ngid int, gid *_Gid_t) (n int, err error) { r0, _, e1 := RawSyscall(SYS_GETGROUPS, uintptr(ngid), uintptr(unsafe.Pointer(gid)), 0) n = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func setgroups(ngid int, gid *_Gid_t) (err error) { _, _, e1 := RawSyscall(SYS_SETGROUPS, uintptr(ngid), uintptr(unsafe.Pointer(gid)), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func wait4(pid int, wstatus *_C_int, options int, rusage *Rusage) (wpid int, err error) { r0, _, e1 := Syscall6(SYS_WAIT4, uintptr(pid), uintptr(unsafe.Pointer(wstatus)), uintptr(options), uintptr(unsafe.Pointer(rusage)), 0, 0) wpid = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func accept(s int, rsa *RawSockaddrAny, addrlen *_Socklen) (fd int, err error) { r0, _, e1 := Syscall(SYS_ACCEPT, uintptr(s), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen))) fd = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func bind(s int, addr unsafe.Pointer, addrlen _Socklen) (err error) { _, _, e1 := Syscall(SYS_BIND, uintptr(s), uintptr(addr), uintptr(addrlen)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func connect(s int, addr unsafe.Pointer, addrlen _Socklen) (err error) { _, _, e1 := Syscall(SYS_CONNECT, uintptr(s), uintptr(addr), uintptr(addrlen)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func socket(domain int, typ int, proto int) (fd int, err error) { r0, _, e1 := RawSyscall(SYS_SOCKET, uintptr(domain), uintptr(typ), uintptr(proto)) fd = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func getsockopt(s int, level int, name int, val unsafe.Pointer, vallen *_Socklen) (err error) { _, _, e1 := Syscall6(SYS_GETSOCKOPT, uintptr(s), uintptr(level), uintptr(name), uintptr(val), uintptr(unsafe.Pointer(vallen)), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func setsockopt(s int, level int, name int, val unsafe.Pointer, vallen uintptr) (err error) { _, _, e1 := Syscall6(SYS_SETSOCKOPT, uintptr(s), uintptr(level), uintptr(name), uintptr(val), uintptr(vallen), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func getpeername(fd int, rsa *RawSockaddrAny, addrlen *_Socklen) (err error) { _, _, e1 := RawSyscall(SYS_GETPEERNAME, uintptr(fd), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen))) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func getsockname(fd int, rsa *RawSockaddrAny, addrlen *_Socklen) (err error) { _, _, e1 := RawSyscall(SYS_GETSOCKNAME, uintptr(fd), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen))) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Shutdown(s int, how int) (err error) { _, _, e1 := Syscall(SYS_SHUTDOWN, uintptr(s), uintptr(how), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func socketpair(domain int, typ int, proto int, fd *[2]int32) (err error) { _, _, e1 := RawSyscall6(SYS_SOCKETPAIR, uintptr(domain), uintptr(typ), uintptr(proto), uintptr(unsafe.Pointer(fd)), 0, 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func recvfrom(fd int, p []byte, flags int, from *RawSockaddrAny, fromlen *_Socklen) (n int, err error) { var _p0 unsafe.Pointer if len(p) > 0 { _p0 = unsafe.Pointer(&p[0]) } else { _p0 = unsafe.Pointer(&_zero) } r0, _, e1 := Syscall6(SYS_RECVFROM, uintptr(fd), uintptr(_p0), uintptr(len(p)), uintptr(flags), uintptr(unsafe.Pointer(from)), uintptr(unsafe.Pointer(fromlen))) n = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func sendto(s int, buf []byte, flags int, to unsafe.Pointer, addrlen _Socklen) (err error) { var _p0 unsafe.Pointer if len(buf) > 0 { _p0 = unsafe.Pointer(&buf[0]) } else { _p0 = unsafe.Pointer(&_zero) } _, _, e1 := Syscall6(SYS_SENDTO, uintptr(s), uintptr(_p0), uintptr(len(buf)), uintptr(flags), uintptr(to), uintptr(addrlen)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func recvmsg(s int, msg *Msghdr, flags int) (n int, err error) { r0, _, e1 := Syscall(SYS_RECVMSG, uintptr(s), uintptr(unsafe.Pointer(msg)), uintptr(flags)) n = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func sendmsg(s int, msg *Msghdr, flags int) (n int, err error) { r0, _, e1 := Syscall(SYS_SENDMSG, uintptr(s), uintptr(unsafe.Pointer(msg)), uintptr(flags)) n = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func kevent(kq int, change unsafe.Pointer, nchange int, event unsafe.Pointer, nevent int, timeout *Timespec) (n int, err error) { r0, _, e1 := Syscall6(SYS_KEVENT, uintptr(kq), uintptr(change), uintptr(nchange), uintptr(event), uintptr(nevent), uintptr(unsafe.Pointer(timeout))) n = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func sysctl(mib []_C_int, old *byte, oldlen *uintptr, new *byte, newlen uintptr) (err error) { var _p0 unsafe.Pointer if len(mib) > 0 { _p0 = unsafe.Pointer(&mib[0]) } else { _p0 = unsafe.Pointer(&_zero) } _, _, e1 := Syscall6(SYS___SYSCTL, uintptr(_p0), uintptr(len(mib)), uintptr(unsafe.Pointer(old)), uintptr(unsafe.Pointer(oldlen)), uintptr(unsafe.Pointer(new)), uintptr(newlen)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func utimes(path string, timeval *[2]Timeval) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } _, _, e1 := Syscall(SYS_UTIMES, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(timeval)), 0) use(unsafe.Pointer(_p0)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func futimes(fd int, timeval *[2]Timeval) (err error) { _, _, e1 := Syscall(SYS_FUTIMES, uintptr(fd), uintptr(unsafe.Pointer(timeval)), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func fcntl(fd int, cmd int, arg int) (val int, err error) { r0, _, e1 := Syscall(SYS_FCNTL, uintptr(fd), uintptr(cmd), uintptr(arg)) val = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func ptrace(request int, pid int, addr uintptr, data uintptr) (err error) { _, _, e1 := Syscall6(SYS_PTRACE, uintptr(request), uintptr(pid), uintptr(addr), uintptr(data), 0, 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func pipe() (r int, w int, err error) { r0, r1, e1 := RawSyscall(SYS_PIPE, 0, 0, 0) r = int(r0) w = int(r1) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func kill(pid int, signum int, posix int) (err error) { _, _, e1 := Syscall(SYS_KILL, uintptr(pid), uintptr(signum), uintptr(posix)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Access(path string, mode uint32) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } _, _, e1 := Syscall(SYS_ACCESS, uintptr(unsafe.Pointer(_p0)), uintptr(mode), 0) use(unsafe.Pointer(_p0)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Adjtime(delta *Timeval, olddelta *Timeval) (err error) { _, _, e1 := Syscall(SYS_ADJTIME, uintptr(unsafe.Pointer(delta)), uintptr(unsafe.Pointer(olddelta)), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Chdir(path string) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } _, _, e1 := Syscall(SYS_CHDIR, uintptr(unsafe.Pointer(_p0)), 0, 0) use(unsafe.Pointer(_p0)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Chflags(path string, flags int) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } _, _, e1 := Syscall(SYS_CHFLAGS, uintptr(unsafe.Pointer(_p0)), uintptr(flags), 0) use(unsafe.Pointer(_p0)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Chmod(path string, mode uint32) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } _, _, e1 := Syscall(SYS_CHMOD, uintptr(unsafe.Pointer(_p0)), uintptr(mode), 0) use(unsafe.Pointer(_p0)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Chown(path string, uid int, gid int) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } _, _, e1 := Syscall(SYS_CHOWN, uintptr(unsafe.Pointer(_p0)), uintptr(uid), uintptr(gid)) use(unsafe.Pointer(_p0)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Chroot(path string) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } _, _, e1 := Syscall(SYS_CHROOT, uintptr(unsafe.Pointer(_p0)), 0, 0) use(unsafe.Pointer(_p0)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Close(fd int) (err error) { _, _, e1 := Syscall(SYS_CLOSE, uintptr(fd), 0, 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Dup(fd int) (nfd int, err error) { r0, _, e1 := Syscall(SYS_DUP, uintptr(fd), 0, 0) nfd = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Dup2(from int, to int) (err error) { _, _, e1 := Syscall(SYS_DUP2, uintptr(from), uintptr(to), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Exchangedata(path1 string, path2 string, options int) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path1) if err != nil { return } var _p1 *byte _p1, err = BytePtrFromString(path2) if err != nil { return } _, _, e1 := Syscall(SYS_EXCHANGEDATA, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), uintptr(options)) use(unsafe.Pointer(_p0)) use(unsafe.Pointer(_p1)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Exit(code int) { Syscall(SYS_EXIT, uintptr(code), 0, 0) return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Fchdir(fd int) (err error) { _, _, e1 := Syscall(SYS_FCHDIR, uintptr(fd), 0, 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Fchflags(fd int, flags int) (err error) { _, _, e1 := Syscall(SYS_FCHFLAGS, uintptr(fd), uintptr(flags), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Fchmod(fd int, mode uint32) (err error) { _, _, e1 := Syscall(SYS_FCHMOD, uintptr(fd), uintptr(mode), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Fchown(fd int, uid int, gid int) (err error) { _, _, e1 := Syscall(SYS_FCHOWN, uintptr(fd), uintptr(uid), uintptr(gid)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Flock(fd int, how int) (err error) { _, _, e1 := Syscall(SYS_FLOCK, uintptr(fd), uintptr(how), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Fpathconf(fd int, name int) (val int, err error) { r0, _, e1 := Syscall(SYS_FPATHCONF, uintptr(fd), uintptr(name), 0) val = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Fstat(fd int, stat *Stat_t) (err error) { _, _, e1 := Syscall(SYS_FSTAT64, uintptr(fd), uintptr(unsafe.Pointer(stat)), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Fstatfs(fd int, stat *Statfs_t) (err error) { _, _, e1 := Syscall(SYS_FSTATFS64, uintptr(fd), uintptr(unsafe.Pointer(stat)), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Fsync(fd int) (err error) { _, _, e1 := Syscall(SYS_FSYNC, uintptr(fd), 0, 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Ftruncate(fd int, length int64) (err error) { _, _, e1 := Syscall(SYS_FTRUNCATE, uintptr(fd), uintptr(length), uintptr(length>>32)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Getdirentries(fd int, buf []byte, basep *uintptr) (n int, err error) { var _p0 unsafe.Pointer if len(buf) > 0 { _p0 = unsafe.Pointer(&buf[0]) } else { _p0 = unsafe.Pointer(&_zero) } r0, _, e1 := Syscall6(SYS_GETDIRENTRIES64, uintptr(fd), uintptr(_p0), uintptr(len(buf)), uintptr(unsafe.Pointer(basep)), 0, 0) n = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Getdtablesize() (size int) { r0, _, _ := Syscall(SYS_GETDTABLESIZE, 0, 0, 0) size = int(r0) return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Getegid() (egid int) { r0, _, _ := RawSyscall(SYS_GETEGID, 0, 0, 0) egid = int(r0) return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Geteuid() (uid int) { r0, _, _ := RawSyscall(SYS_GETEUID, 0, 0, 0) uid = int(r0) return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Getgid() (gid int) { r0, _, _ := RawSyscall(SYS_GETGID, 0, 0, 0) gid = int(r0) return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Getpgid(pid int) (pgid int, err error) { r0, _, e1 := RawSyscall(SYS_GETPGID, uintptr(pid), 0, 0) pgid = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Getpgrp() (pgrp int) { r0, _, _ := RawSyscall(SYS_GETPGRP, 0, 0, 0) pgrp = int(r0) return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Getpid() (pid int) { r0, _, _ := RawSyscall(SYS_GETPID, 0, 0, 0) pid = int(r0) return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Getppid() (ppid int) { r0, _, _ := RawSyscall(SYS_GETPPID, 0, 0, 0) ppid = int(r0) return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Getpriority(which int, who int) (prio int, err error) { r0, _, e1 := Syscall(SYS_GETPRIORITY, uintptr(which), uintptr(who), 0) prio = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Getrlimit(which int, lim *Rlimit) (err error) { _, _, e1 := RawSyscall(SYS_GETRLIMIT, uintptr(which), uintptr(unsafe.Pointer(lim)), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Getrusage(who int, rusage *Rusage) (err error) { _, _, e1 := RawSyscall(SYS_GETRUSAGE, uintptr(who), uintptr(unsafe.Pointer(rusage)), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Getsid(pid int) (sid int, err error) { r0, _, e1 := RawSyscall(SYS_GETSID, uintptr(pid), 0, 0) sid = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Getuid() (uid int) { r0, _, _ := RawSyscall(SYS_GETUID, 0, 0, 0) uid = int(r0) return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Issetugid() (tainted bool) { r0, _, _ := RawSyscall(SYS_ISSETUGID, 0, 0, 0) tainted = bool(r0 != 0) return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Kqueue() (fd int, err error) { r0, _, e1 := Syscall(SYS_KQUEUE, 0, 0, 0) fd = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Lchown(path string, uid int, gid int) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } _, _, e1 := Syscall(SYS_LCHOWN, uintptr(unsafe.Pointer(_p0)), uintptr(uid), uintptr(gid)) use(unsafe.Pointer(_p0)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Link(path string, link string) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } var _p1 *byte _p1, err = BytePtrFromString(link) if err != nil { return } _, _, e1 := Syscall(SYS_LINK, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), 0) use(unsafe.Pointer(_p0)) use(unsafe.Pointer(_p1)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Listen(s int, backlog int) (err error) { _, _, e1 := Syscall(SYS_LISTEN, uintptr(s), uintptr(backlog), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Lstat(path string, stat *Stat_t) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } _, _, e1 := Syscall(SYS_LSTAT64, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(stat)), 0) use(unsafe.Pointer(_p0)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Mkdir(path string, mode uint32) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } _, _, e1 := Syscall(SYS_MKDIR, uintptr(unsafe.Pointer(_p0)), uintptr(mode), 0) use(unsafe.Pointer(_p0)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Mkfifo(path string, mode uint32) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } _, _, e1 := Syscall(SYS_MKFIFO, uintptr(unsafe.Pointer(_p0)), uintptr(mode), 0) use(unsafe.Pointer(_p0)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Mknod(path string, mode uint32, dev int) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } _, _, e1 := Syscall(SYS_MKNOD, uintptr(unsafe.Pointer(_p0)), uintptr(mode), uintptr(dev)) use(unsafe.Pointer(_p0)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Mlock(b []byte) (err error) { var _p0 unsafe.Pointer if len(b) > 0 { _p0 = unsafe.Pointer(&b[0]) } else { _p0 = unsafe.Pointer(&_zero) } _, _, e1 := Syscall(SYS_MLOCK, uintptr(_p0), uintptr(len(b)), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Mlockall(flags int) (err error) { _, _, e1 := Syscall(SYS_MLOCKALL, uintptr(flags), 0, 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Mprotect(b []byte, prot int) (err error) { var _p0 unsafe.Pointer if len(b) > 0 { _p0 = unsafe.Pointer(&b[0]) } else { _p0 = unsafe.Pointer(&_zero) } _, _, e1 := Syscall(SYS_MPROTECT, uintptr(_p0), uintptr(len(b)), uintptr(prot)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Munlock(b []byte) (err error) { var _p0 unsafe.Pointer if len(b) > 0 { _p0 = unsafe.Pointer(&b[0]) } else { _p0 = unsafe.Pointer(&_zero) } _, _, e1 := Syscall(SYS_MUNLOCK, uintptr(_p0), uintptr(len(b)), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Munlockall() (err error) { _, _, e1 := Syscall(SYS_MUNLOCKALL, 0, 0, 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Open(path string, mode int, perm uint32) (fd int, err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } r0, _, e1 := Syscall(SYS_OPEN, uintptr(unsafe.Pointer(_p0)), uintptr(mode), uintptr(perm)) use(unsafe.Pointer(_p0)) fd = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Pathconf(path string, name int) (val int, err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } r0, _, e1 := Syscall(SYS_PATHCONF, uintptr(unsafe.Pointer(_p0)), uintptr(name), 0) use(unsafe.Pointer(_p0)) val = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Pread(fd int, p []byte, offset int64) (n int, err error) { var _p0 unsafe.Pointer if len(p) > 0 { _p0 = unsafe.Pointer(&p[0]) } else { _p0 = unsafe.Pointer(&_zero) } r0, _, e1 := Syscall6(SYS_PREAD, uintptr(fd), uintptr(_p0), uintptr(len(p)), uintptr(offset), uintptr(offset>>32), 0) n = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Pwrite(fd int, p []byte, offset int64) (n int, err error) { var _p0 unsafe.Pointer if len(p) > 0 { _p0 = unsafe.Pointer(&p[0]) } else { _p0 = unsafe.Pointer(&_zero) } r0, _, e1 := Syscall6(SYS_PWRITE, uintptr(fd), uintptr(_p0), uintptr(len(p)), uintptr(offset), uintptr(offset>>32), 0) n = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func read(fd int, p []byte) (n int, err error) { var _p0 unsafe.Pointer if len(p) > 0 { _p0 = unsafe.Pointer(&p[0]) } else { _p0 = unsafe.Pointer(&_zero) } r0, _, e1 := Syscall(SYS_READ, uintptr(fd), uintptr(_p0), uintptr(len(p))) n = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Readlink(path string, buf []byte) (n int, err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } var _p1 unsafe.Pointer if len(buf) > 0 { _p1 = unsafe.Pointer(&buf[0]) } else { _p1 = unsafe.Pointer(&_zero) } r0, _, e1 := Syscall(SYS_READLINK, uintptr(unsafe.Pointer(_p0)), uintptr(_p1), uintptr(len(buf))) use(unsafe.Pointer(_p0)) n = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Rename(from string, to string) (err error) { var _p0 *byte _p0, err = BytePtrFromString(from) if err != nil { return } var _p1 *byte _p1, err = BytePtrFromString(to) if err != nil { return } _, _, e1 := Syscall(SYS_RENAME, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), 0) use(unsafe.Pointer(_p0)) use(unsafe.Pointer(_p1)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Revoke(path string) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } _, _, e1 := Syscall(SYS_REVOKE, uintptr(unsafe.Pointer(_p0)), 0, 0) use(unsafe.Pointer(_p0)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Rmdir(path string) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } _, _, e1 := Syscall(SYS_RMDIR, uintptr(unsafe.Pointer(_p0)), 0, 0) use(unsafe.Pointer(_p0)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Seek(fd int, offset int64, whence int) (newoffset int64, err error) { r0, r1, e1 := Syscall6(SYS_LSEEK, uintptr(fd), uintptr(offset), uintptr(offset>>32), uintptr(whence), 0, 0) newoffset = int64(int64(r1)<<32 | int64(r0)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Select(n int, r *FdSet, w *FdSet, e *FdSet, timeout *Timeval) (err error) { _, _, e1 := Syscall6(SYS_SELECT, uintptr(n), uintptr(unsafe.Pointer(r)), uintptr(unsafe.Pointer(w)), uintptr(unsafe.Pointer(e)), uintptr(unsafe.Pointer(timeout)), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Setegid(egid int) (err error) { _, _, e1 := Syscall(SYS_SETEGID, uintptr(egid), 0, 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Seteuid(euid int) (err error) { _, _, e1 := RawSyscall(SYS_SETEUID, uintptr(euid), 0, 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Setgid(gid int) (err error) { _, _, e1 := RawSyscall(SYS_SETGID, uintptr(gid), 0, 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Setlogin(name string) (err error) { var _p0 *byte _p0, err = BytePtrFromString(name) if err != nil { return } _, _, e1 := Syscall(SYS_SETLOGIN, uintptr(unsafe.Pointer(_p0)), 0, 0) use(unsafe.Pointer(_p0)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Setpgid(pid int, pgid int) (err error) { _, _, e1 := RawSyscall(SYS_SETPGID, uintptr(pid), uintptr(pgid), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Setpriority(which int, who int, prio int) (err error) { _, _, e1 := Syscall(SYS_SETPRIORITY, uintptr(which), uintptr(who), uintptr(prio)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Setprivexec(flag int) (err error) { _, _, e1 := Syscall(SYS_SETPRIVEXEC, uintptr(flag), 0, 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Setregid(rgid int, egid int) (err error) { _, _, e1 := RawSyscall(SYS_SETREGID, uintptr(rgid), uintptr(egid), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Setreuid(ruid int, euid int) (err error) { _, _, e1 := RawSyscall(SYS_SETREUID, uintptr(ruid), uintptr(euid), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Setrlimit(which int, lim *Rlimit) (err error) { _, _, e1 := RawSyscall(SYS_SETRLIMIT, uintptr(which), uintptr(unsafe.Pointer(lim)), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Setsid() (pid int, err error) { r0, _, e1 := RawSyscall(SYS_SETSID, 0, 0, 0) pid = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Settimeofday(tp *Timeval) (err error) { _, _, e1 := RawSyscall(SYS_SETTIMEOFDAY, uintptr(unsafe.Pointer(tp)), 0, 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Setuid(uid int) (err error) { _, _, e1 := RawSyscall(SYS_SETUID, uintptr(uid), 0, 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Stat(path string, stat *Stat_t) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } _, _, e1 := Syscall(SYS_STAT64, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(stat)), 0) use(unsafe.Pointer(_p0)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Statfs(path string, stat *Statfs_t) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } _, _, e1 := Syscall(SYS_STATFS64, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(stat)), 0) use(unsafe.Pointer(_p0)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Symlink(path string, link string) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } var _p1 *byte _p1, err = BytePtrFromString(link) if err != nil { return } _, _, e1 := Syscall(SYS_SYMLINK, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), 0) use(unsafe.Pointer(_p0)) use(unsafe.Pointer(_p1)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Sync() (err error) { _, _, e1 := Syscall(SYS_SYNC, 0, 0, 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Truncate(path string, length int64) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } _, _, e1 := Syscall(SYS_TRUNCATE, uintptr(unsafe.Pointer(_p0)), uintptr(length), uintptr(length>>32)) use(unsafe.Pointer(_p0)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Umask(newmask int) (oldmask int) { r0, _, _ := Syscall(SYS_UMASK, uintptr(newmask), 0, 0) oldmask = int(r0) return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Undelete(path string) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } _, _, e1 := Syscall(SYS_UNDELETE, uintptr(unsafe.Pointer(_p0)), 0, 0) use(unsafe.Pointer(_p0)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Unlink(path string) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } _, _, e1 := Syscall(SYS_UNLINK, uintptr(unsafe.Pointer(_p0)), 0, 0) use(unsafe.Pointer(_p0)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Unmount(path string, flags int) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } _, _, e1 := Syscall(SYS_UNMOUNT, uintptr(unsafe.Pointer(_p0)), uintptr(flags), 0) use(unsafe.Pointer(_p0)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func write(fd int, p []byte) (n int, err error) { var _p0 unsafe.Pointer if len(p) > 0 { _p0 = unsafe.Pointer(&p[0]) } else { _p0 = unsafe.Pointer(&_zero) } r0, _, e1 := Syscall(SYS_WRITE, uintptr(fd), uintptr(_p0), uintptr(len(p))) n = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func mmap(addr uintptr, length uintptr, prot int, flag int, fd int, pos int64) (ret uintptr, err error) { r0, _, e1 := Syscall9(SYS_MMAP, uintptr(addr), uintptr(length), uintptr(prot), uintptr(flag), uintptr(fd), uintptr(pos), uintptr(pos>>32), 0, 0) ret = uintptr(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func munmap(addr uintptr, length uintptr) (err error) { _, _, e1 := Syscall(SYS_MUNMAP, uintptr(addr), uintptr(length), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func readlen(fd int, buf *byte, nbuf int) (n int, err error) { r0, _, e1 := Syscall(SYS_READ, uintptr(fd), uintptr(unsafe.Pointer(buf)), uintptr(nbuf)) n = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func writelen(fd int, buf *byte, nbuf int) (n int, err error) { r0, _, e1 := Syscall(SYS_WRITE, uintptr(fd), uintptr(unsafe.Pointer(buf)), uintptr(nbuf)) n = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func gettimeofday(tp *Timeval) (sec int32, usec int32, err error) { r0, r1, e1 := RawSyscall(SYS_GETTIMEOFDAY, uintptr(unsafe.Pointer(tp)), 0, 0) sec = int32(r0) usec = int32(r1) if e1 != 0 { err = errnoErr(e1) } return }
AnuchitPrasertsang/go
src/syscall/zsyscall_darwin_arm.go
GO
bsd-3-clause
33,695
/** * Copyright (c) 2014, * Charles Prud'homme (TASC, INRIA Rennes, LINA CNRS UMR 6241), * Jean-Guillaume Fages (COSLING S.A.S.). * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the <organization> nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.chocosolver.solver.thread; /** * Slave born to be mastered and work in parallel * * @author Jean-Guillaume Fages */ public abstract class AbstractParallelSlave<P extends AbstractParallelMaster> { //*********************************************************************************** // VARIABLES //*********************************************************************************** public P master; public final int id; //*********************************************************************************** // CONSTRUCTORS //*********************************************************************************** /** * Create a slave born to be mastered and work in parallel * * @param master master solver * @param id slave unique name */ public AbstractParallelSlave(P master, int id) { this.master = master; this.id = id; } //*********************************************************************************** // SUB-PROBLEM SOLVING //*********************************************************************************** /** * Creates a new thread to work in parallel */ public void workInParallel() { Thread t = new Thread() { @Override public void run() { work(); master.wishGranted(); } }; t.start(); } /** * do something */ public abstract void work(); }
piyushsh/choco3
choco-solver/src/main/java/org/chocosolver/solver/thread/AbstractParallelSlave.java
Java
bsd-3-clause
3,193
import json import mock from sentry.plugins.helpers import get_option, set_option from sentry.testutils import TestCase from sentry.models import set_sentry_version, Option from sentry.tasks.check_update import check_update, PYPI_URL class CheckUpdateTest(TestCase): OLD = '5.0.0' CURRENT = '5.5.0-DEV' NEW = '1000000000.5.1' KEY = 'sentry:latest_version' def test_run_check_update_task(self): with mock.patch('sentry.tasks.check_update.fetch_url_content') as fetch: fetch.return_value = ( None, None, json.dumps({'info': {'version': self.NEW}}) ) check_update() # latest_version > current_version fetch.assert_called_once_with(PYPI_URL) self.assertEqual(get_option(key=self.KEY), self.NEW) def test_run_check_update_task_with_bad_response(self): with mock.patch('sentry.tasks.check_update.fetch_url_content') as fetch: fetch.return_value = (None, None, '') check_update() # latest_version == current_version fetch.assert_called_once_with(PYPI_URL) self.assertEqual(get_option(key=self.KEY), None) def test_set_sentry_version_empty_latest(self): set_sentry_version(latest=self.NEW) self.assertEqual(get_option(key=self.KEY), self.NEW) def test_set_sentry_version_new(self): set_option(self.KEY, self.OLD) with mock.patch('sentry.get_version') as get_version: get_version.return_value = self.CURRENT set_sentry_version(latest=self.NEW) self.assertEqual(Option.objects.get_value(key=self.KEY), self.NEW) def test_set_sentry_version_old(self): set_option(self.KEY, self.NEW) with mock.patch('sentry.get_version') as get_version: get_version.return_value = self.CURRENT set_sentry_version(latest=self.OLD) self.assertEqual(Option.objects.get_value(key=self.KEY), self.NEW)
beni55/sentry
tests/sentry/tasks/check_update/tests.py
Python
bsd-3-clause
1,970
<?php $this->title = 'Update Country Detail: ' . ' ' . $model->name; $this->params['breadcrumbs'][] = ['label' => 'Countries', 'url' => ['index']]; $this->params['breadcrumbs'][] = ['label' => $model->name, 'url' => ['view', 'id' => $model->id]]; $this->params['breadcrumbs'][] = 'Update'; ?> <div class="countries-update"> <?= $this->render('_form', [ 'model' => $model, ]) ?> </div>
61ds/aging
_protected/backend/views/countries/update.php
PHP
bsd-3-clause
405
package org.buildmlearn.toolkit.flashcardtemplate.data; import org.w3c.dom.Document; import org.xml.sax.SAXException; import java.io.File; import java.io.IOException; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; /** * Created by Anupam (opticod) on 10/8/16. */ /** * @brief Contains xml data utils for flash card template's simulator. */ public class DataUtils { public static String[] readTitleAuthor() { String result[] = new String[2]; DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); dbf.setValidating(false); DocumentBuilder db; Document doc; try { File fXmlFile = new File(org.buildmlearn.toolkit.flashcardtemplate.Constants.XMLFileName); db = dbf.newDocumentBuilder(); doc = db.parse(fXmlFile); doc.normalize(); result[0] = doc.getElementsByTagName("title").item(0).getChildNodes() .item(0).getNodeValue(); result[1] = doc.getElementsByTagName("name").item(0).getChildNodes() .item(0).getNodeValue(); } catch (ParserConfigurationException | SAXException | IOException e) { e.printStackTrace(); } return result; } }
opticod/BuildmLearn-Toolkit-Android
source-code/app/src/main/java/org/buildmlearn/toolkit/flashcardtemplate/data/DataUtils.java
Java
bsd-3-clause
1,360
var vows = require('vows'), assert = require('assert'), path = require('path'), fs = require('fs'), exec = require('child_process').exec, base = path.join(__dirname, 'assets/badmodule/'), buildBase = path.join(base, 'build'), srcBase = path.join(base, 'src/foo'), rimraf = require('rimraf'); var tests = { 'clean build': { topic: function() { rimraf(path.join(buildBase, 'foo'), this.callback); }, 'should not have build dir and': { topic: function() { var self = this; fs.stat(path.join(buildBase, 'foo'), function(err) { self.callback(null, err); }); }, 'should not have build/foo': function(foo, err) { assert.isNotNull(err); assert.equal(err.code, 'ENOENT'); }, 'should build foo and': { topic: function() { var self = this, child; process.chdir(path.resolve(base, srcBase)); child = exec('../../../../../bin/shifter --no-global-config', function (error, stdout, stderr) { self.callback(null, { error: error, stderr: stderr }); }); }, 'should fail with an error code 1': function (topic) { assert.equal(topic.error.code, 1); }, 'should fail with an error message': function(topic) { assert.isNotNull(topic.stderr); } } } } }; vows.describe('building badmodule with UglifyJS via command line').addBatch(tests).export(module);
wf2/shifter
tests/14-builder-uglify-badmodule-cmd.js
JavaScript
bsd-3-clause
1,833
/* jshint multistr:true */ /* jshint -W040 */ 'use strict'; var envify = require('envify/custom'); var es3ify = require('es3ify'); var grunt = require('grunt'); var UglifyJS = require('uglify-js'); var uglifyify = require('uglifyify'); var derequire = require('derequire'); var collapser = require('bundle-collapser/plugin'); var SIMPLE_TEMPLATE = '/**\n\ * @PACKAGE@ v@VERSION@\n\ */'; var LICENSE_TEMPLATE = '/**\n\ * @PACKAGE@ v@VERSION@\n\ *\n\ * Copyright 2013-2014, Facebook, Inc.\n\ * All rights reserved.\n\ *\n\ * This source code is licensed under the BSD-style license found in the\n\ * LICENSE file in the root directory of this source tree. An additional grant\n\ * of patent rights can be found in the PATENTS file in the same directory.\n\ *\n\ */'; function minify(src) { return UglifyJS.minify(src, { fromString: true }).code; } // TODO: move this out to another build step maybe. function bannerify(src) { var version = grunt.config.data.pkg.version; var packageName = this.data.packageName || this.data.standalone; return LICENSE_TEMPLATE.replace('@PACKAGE@', packageName) .replace('@VERSION@', version) + '\n' + src; } function simpleBannerify(src) { var version = grunt.config.data.pkg.version; var packageName = this.data.packageName || this.data.standalone; return SIMPLE_TEMPLATE.replace('@PACKAGE@', packageName) .replace('@VERSION@', version) + '\n' + src; } // Our basic config which we'll add to to make our other builds var basic = { entries: [ './build/modules/React.js' ], outfile: './build/react.js', debug: false, standalone: 'React', transforms: [envify({NODE_ENV: 'development'})], plugins: [collapser], after: [es3ify.transform, derequire, simpleBannerify] }; var min = { entries: [ './build/modules/React.js' ], outfile: './build/react.min.js', debug: false, standalone: 'React', transforms: [envify({NODE_ENV: 'production'}), uglifyify], plugins: [collapser], after: [es3ify.transform, derequire, minify, bannerify] }; var transformer = { entries:[ './vendor/browser-transforms.js' ], outfile: './build/JSXTransformer.js', debug: false, standalone: 'JSXTransformer', transforms: [], plugins: [collapser], after: [es3ify.transform, derequire, simpleBannerify] }; var addons = { entries: [ './build/modules/ReactWithAddons.js' ], outfile: './build/react-with-addons.js', debug: false, standalone: 'React', packageName: 'React (with addons)', transforms: [envify({NODE_ENV: 'development'})], plugins: [collapser], after: [es3ify.transform, derequire, simpleBannerify] }; var addonsMin = { entries: [ './build/modules/ReactWithAddons.js' ], outfile: './build/react-with-addons.min.js', debug: false, standalone: 'React', packageName: 'React (with addons)', transforms: [envify({NODE_ENV: 'production'}), uglifyify], plugins: [collapser], after: [es3ify.transform, derequire, minify, bannerify] }; var withCodeCoverageLogging = { entries: [ './build/modules/React.js' ], outfile: './build/react.js', debug: true, standalone: 'React', transforms: [ envify({NODE_ENV: 'development'}), require('coverify') ], plugins: [collapser] }; module.exports = { basic: basic, min: min, transformer: transformer, addons: addons, addonsMin: addonsMin, withCodeCoverageLogging: withCodeCoverageLogging };
kchia/react
grunt/config/browserify.js
JavaScript
bsd-3-clause
3,473
/* * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source * tree. An additional intellectual property rights grant can be found * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ // testAPI.cpp : Defines the entry point for the console application. // // NOTES: // 1. MediaFile library and testAPI.cpp must be built in DEBUG mode for testing. // #include <iostream> #include <stdio.h> #include <assert.h> #ifdef WIN32 #include <windows.h> #include <tchar.h> #endif #include "common_types.h" #include "trace.h" #include "Engineconfigurations.h" #include "media_file.h" #include "file_player.h" #include "file_recorder.h" bool notify = false, playing = false, recording = false; // callback class for FileModule class MyFileModuleCallback : public FileCallback { public: virtual void PlayNotification( const WebRtc_Word32 id, const WebRtc_UWord32 durationMs ) { printf("\tReceived PlayNotification from module %ld, durationMs = %ld\n", id, durationMs); notify = true; }; virtual void RecordNotification( const WebRtc_Word32 id, const WebRtc_UWord32 durationMs ) { printf("\tReceived RecordNotification from module %ld, durationMs = %ld\n", id, durationMs); notify = true; }; virtual void PlayFileEnded(const WebRtc_Word32 id) { printf("\tReceived PlayFileEnded notification from module %ld.\n", id); playing = false; }; virtual void RecordFileEnded(const WebRtc_Word32 id) { printf("\tReceived RecordFileEnded notification from module %ld.\n", id); recording = false; } }; // main test app #ifdef WIN32 int _tmain(int argc, _TCHAR* argv[]) #else int main(int /*argc*/, char** /*argv*/) #endif { Trace::CreateTrace(); Trace::SetTraceFile("testTrace.txt"); Trace::SetEncryptedTraceFile("testTraceDebug.txt"); int playId = 1; int recordId = 2; printf("Welcome to test of FilePlayer and FileRecorder\n"); /////////////////////////////////////////////// // // avi test case 1 // /////////////////////////////////////////////// // todo PW we need more AVI tests Mp4 { FilePlayer& filePlayer(*FilePlayer::CreateFilePlayer(1, webrtc::kFileFormatAviFile)); FileRecorder& fileRecorder(*FileRecorder::CreateFileRecorder(1, webrtc::kFileFormatAviFile)); const char* KFileName = "./tmpAviFileTestCase1_audioI420CIF30fps.avi"; printf("\tReading from an avi file and writing the information to another \n"); printf("\tin the same format (I420 CIF 30fps) \n"); printf("\t\t check file named %s\n", KFileName); assert(filePlayer.StartPlayingVideoFile( "../../../MediaFile/main/test/files/aviTestCase1_audioI420CIF30fps.avi", false, false) == 0); // init codecs webrtc::VideoCodec videoCodec; webrtc::VideoCodec recVideoCodec; webrtc::CodecInst audioCodec; assert(filePlayer.VideoCodec( videoCodec ) == 0); assert(filePlayer.AudioCodec( audioCodec) == 0); recVideoCodec = videoCodec; assert( fileRecorder.StartRecordingVideoFile(KFileName, audioCodec, recVideoCodec) == 0); assert(fileRecorder.IsRecording()); webrtc::I420VideoFrame videoFrame; videoFrame.CreateEmptyFrame(videoCodec.width, videoCodec.height, videoCodec.width, (videoCodec.width + 1) / 2, (videoCodec.width + 1) / 2); int frameCount = 0; bool audioNotDone = true; bool videoNotDone = true; AudioFrame audioFrame; while( audioNotDone || videoNotDone) { if(filePlayer.TimeUntilNextVideoFrame() <= 0) { if(filePlayer.GetVideoFromFile( videoFrame) != 0) { // no more video frames break; } frameCount++; videoNotDone = !videoFrame.IsZeroSize(); videoFrame.SetRenderTime(TickTime::MillisecondTimestamp()); if( videoNotDone) { assert(fileRecorder.RecordVideoToFile(videoFrame) == 0); ::Sleep(10); } } WebRtc_UWord32 decodedDataLengthInSamples; if( 0 != filePlayer.Get10msAudioFromFile( audioFrame.data_, decodedDataLengthInSamples, audioCodec.plfreq)) { audioNotDone = false; } else { audioFrame.sample_rate_hz_ = filePlayer.Frequency(); audioFrame.samples_per_channel_ = (WebRtc_UWord16)decodedDataLengthInSamples; fileRecorder.RecordAudioToFile(audioFrame, &TickTime::Now()); } } ::Sleep(100); assert(fileRecorder.StopRecording() == 0); assert( !fileRecorder.IsRecording()); assert(frameCount == 135); printf("\tGenerated %s\n\n", KFileName); } /////////////////////////////////////////////// // // avi test case 2 // /////////////////////////////////////////////// { FilePlayer& filePlayer(*FilePlayer::CreateFilePlayer(2, webrtc::kFileFormatAviFile)); FileRecorder& fileRecorder(*FileRecorder::CreateFileRecorder(2, webrtc::kFileFormatAviFile)); const char* KFileName = "./tmpAviFileTestCase2_audioI420CIF20fps.avi"; printf("\tWriting information to a avi file and check the written file by \n"); printf("\treopening it and control codec information.\n"); printf("\t\t check file named %s all frames should be light green.\n", KFileName); // init codecs webrtc::VideoCodec videoCodec; webrtc::CodecInst audioCodec; memset(&videoCodec, 0, sizeof(videoCodec)); const char* KVideoCodecName = "I420"; strcpy(videoCodec.plName, KVideoCodecName); videoCodec.plType = 124; videoCodec.maxFramerate = 20; videoCodec.height = 288; videoCodec.width = 352; const char* KAudioCodecName = "PCMU"; strcpy(audioCodec.plname, KAudioCodecName); audioCodec.pltype = 0; audioCodec.plfreq = 8000; audioCodec.pacsize = 80; audioCodec.channels = 1; audioCodec.rate = 64000; assert( fileRecorder.StartRecordingVideoFile( KFileName, audioCodec, videoCodec) == 0); assert(fileRecorder.IsRecording()); const WebRtc_UWord32 KVideoWriteSize = static_cast< WebRtc_UWord32>( (videoCodec.width * videoCodec.height * 3) / 2); webrtc::VideoFrame videoFrame; // 10 ms AudioFrame audioFrame; audioFrame.samples_per_channel_ = audioCodec.plfreq/100; memset(audioFrame.data_, 0, 2*audioFrame.samples_per_channel_); audioFrame.sample_rate_hz_ = 8000; // prepare the video frame int half_width = (videoCodec.width + 1) / 2; int half_height = (videoCodec.height + 1) / 2; videoFrame.CreateEmptyFrame(videoCodec.width, videoCodec.height, videoCodec.width, half_width, half_width); memset(videoFrame.buffer(kYPlane), 127, videoCodec.width * videoCodec.height); memset(videoFrame.buffer(kUPlane), 0, half_width * half_height); memset(videoFrame.buffer(kVPlane), 0, half_width * half_height); // write avi file, with 20 video frames const int KWriteNumFrames = 20; int writeFrameCount = 0; while(writeFrameCount < KWriteNumFrames) { // add a video frame assert(fileRecorder.RecordVideoToFile(videoFrame) == 0); // add 50 ms of audio for(int i=0; i<5; i++) { assert( fileRecorder.RecordAudioToFile(audioFrame) == 0); }// for i writeFrameCount++; } ::Sleep(10); // enough tim eto write the queued data to the file assert(writeFrameCount == 20); assert(fileRecorder.StopRecording() == 0); assert( ! fileRecorder.IsRecording()); assert(filePlayer.StartPlayingVideoFile(KFileName,false, false) == 0); assert(filePlayer.IsPlayingFile( )); // compare codecs read from file to the ones used when writing the file webrtc::VideoCodec readVideoCodec; assert(filePlayer.VideoCodec( readVideoCodec ) == 0); assert(strcmp(readVideoCodec.plName, videoCodec.plName) == 0); assert(readVideoCodec.width == videoCodec.width); assert(readVideoCodec.height == videoCodec.height); assert(readVideoCodec.maxFramerate == videoCodec.maxFramerate); webrtc::CodecInst readAudioCodec; assert(filePlayer.AudioCodec( readAudioCodec) == 0); assert(strcmp(readAudioCodec.plname, audioCodec.plname) == 0); assert(readAudioCodec.pltype == audioCodec.pltype); assert(readAudioCodec.plfreq == audioCodec.plfreq); assert(readAudioCodec.pacsize == audioCodec.pacsize); assert(readAudioCodec.channels == audioCodec.channels); assert(readAudioCodec.rate == audioCodec.rate); assert(filePlayer.StopPlayingFile() == 0); assert( ! filePlayer.IsPlayingFile()); printf("\tGenerated %s\n\n", KFileName); } /////////////////////////////////////////////// // // avi test case 3 // /////////////////////////////////////////////// { FilePlayer& filePlayer(*FilePlayer::CreateFilePlayer(2, webrtc::kFileFormatAviFile)); FileRecorder& fileRecorder(*FileRecorder::CreateFileRecorder(3, webrtc::kFileFormatAviFile)); printf("\tReading from an avi file and writing the information to another \n"); printf("\tin a different format (H.263 CIF 30fps) \n"); printf("\t\t check file named tmpAviFileTestCase1_audioH263CIF30fps.avi\n"); assert(filePlayer.StartPlayingVideoFile( "../../../MediaFile/main/test/files/aviTestCase1_audioI420CIF30fps.avi", false, false) == 0); // init codecs webrtc::VideoCodec videoCodec; webrtc::VideoCodec recVideoCodec; webrtc::CodecInst audioCodec; assert(filePlayer.VideoCodec( videoCodec ) == 0); assert(filePlayer.AudioCodec( audioCodec) == 0); recVideoCodec = videoCodec; memcpy(recVideoCodec.plName, "H263",5); recVideoCodec.startBitrate = 1000; recVideoCodec.codecSpecific.H263.quality = 1; recVideoCodec.plType = 34; recVideoCodec.codecType = webrtc::kVideoCodecH263; assert( fileRecorder.StartRecordingVideoFile( "./tmpAviFileTestCase1_audioH263CIF30fps.avi", audioCodec, recVideoCodec) == 0); assert(fileRecorder.IsRecording()); webrtc::I420VideoFrame videoFrame; videoFrame.CreateEmptyFrame(videoCodec.width, videoCodec.height, videoCodec.width, half_width,half_width); int videoFrameCount = 0; int audioFrameCount = 0; bool audioNotDone = true; bool videoNotDone = true; AudioFrame audioFrame; while( audioNotDone || videoNotDone) { if(filePlayer.TimeUntilNextVideoFrame() <= 0) { if(filePlayer.GetVideoFromFile(videoFrame) != 0) { break; } videoFrameCount++; videoNotDone = !videoFrame.IsZeroSize(); if( videoNotDone) { assert(fileRecorder.RecordVideoToFile(videoFrame) == 0); } } WebRtc_UWord32 decodedDataLengthInSamples; if( 0 != filePlayer.Get10msAudioFromFile( audioFrame.data_, decodedDataLengthInSamples, audioCodec.plfreq)) { audioNotDone = false; } else { ::Sleep(5); audioFrame.sample_rate_hz_ = filePlayer.Frequency(); audioFrame.samples_per_channel_ = (WebRtc_UWord16)decodedDataLengthInSamples; assert(0 == fileRecorder.RecordAudioToFile(audioFrame)); audioFrameCount++; } } assert(videoFrameCount == 135); assert(audioFrameCount == 446); // we will start & stop with a video frame assert(fileRecorder.StopRecording() == 0); assert( !fileRecorder.IsRecording()); printf("\tGenerated ./tmpAviFileTestCase1_audioH263CIF30fps.avi\n\n"); } printf("\nTEST completed.\n"); Trace::ReturnTrace(); return 0; }
leighpauls/k2cro4
third_party/webrtc/modules/utility/test/testAPI.cc
C++
bsd-3-clause
13,298
#!/usr/bin/python # encoding: utf-8 # Jan 2011 (markus kossner) Cleaned up the code, added some documentation # somwhere around Aug 2008 (markus kossner) created # # This script extracts the molecular framework for a database of molecules. # You can use two modes (hard coded): # - Scaff: The molecular frame is extracted # - RedScaff: All linking chains between rings are deleted. The rings are directly connected. # # You can comment in/out the code snippets indicated by the comments # to force each atom of the frame to be a Carbon. # # Usage: Frames.py <database.sdf> # Output: # - sd files containing all molecules belonging to one frame (1.sdf, 2.sdf etc) # - frames.smi containing the (caninical) smiles and count of occurrence # from __future__ import print_function import os,sys from Chem import AllChem as Chem def flatten(x): """flatten(sequence) -> list Returns a single, flat list which contains all elements retrieved from the sequence and all nested sub-sequences (iterables). Examples: >>> [1, 2, [3,4], (5,6)] [1, 2, [3, 4], (5, 6)] >>> flatten([[[1,2,3], (42,None)], [4,5], [6], 7, MyVector(8,9,10)]) [1, 2, 3, 42, None, 4, 5, 6, 7, 8, 9, 10]""" result = [] for el in x: if hasattr(el, "__iter__") and not isinstance(el, basestring): result.extend(flatten(el)) else: result.append(el) return result def GetFrame(mol, mode='Scaff'): '''return a ganeric molecule defining the reduced scaffold of the input mol. mode can be 'Scaff' or 'RedScaff': Scaff -> chop off the side chains and return the scaffold RedScaff -> remove all linking chains and connect the rings directly at the atoms where the linker was ''' ring = mol.GetRingInfo() RingAtoms = flatten(ring.AtomRings()) NonRingAtoms = [ atom.GetIdx() for atom in mol.GetAtoms() if atom.GetIdx() not in RingAtoms ] RingNeighbors = [] Paths = [] for NonRingAtom in NonRingAtoms: for neighbor in mol.GetAtomWithIdx(NonRingAtom).GetNeighbors(): if neighbor.GetIdx() in RingAtoms: RingNeighbors.append(NonRingAtom) Paths.append([neighbor.GetIdx(),NonRingAtom]) #The ring Atoms having a non ring Nieghbor will be the start of a walk break PosConnectors = [x for x in NonRingAtoms if x not in RingNeighbors] #Only these Atoms are potential starting points of a Linker chain #print 'PosConnectors:' #print PosConnectors Framework = [ x for x in RingAtoms ] #Start a list of pathways which we will have to walk #print 'Path atoms:' #print Paths Linkers = [] while len(Paths)>0: NewPaths = [] for P in Paths: if P == None: print('ooh') else: for neighbor in mol.GetAtomWithIdx(P[-1]).GetNeighbors(): if neighbor.GetIdx() not in P: if neighbor.GetIdx() in NonRingAtoms: n = P[:] n.append(neighbor.GetIdx()) NewPaths.append(n[:]) elif neighbor.GetIdx() in RingAtoms: #print 'adding the following path to Framework:' #print P n = P[:] n.append(neighbor.GetIdx()) Linkers.append(n) Framework=Framework+P[:] Paths = NewPaths[:] #print 'Linkers:',Linkers #print 'RingAtoms:',RingAtoms #em.AddBond(3,4,Chem.BondType.SINGLE) if mode == 'RedScaff': Framework = list(set(Framework)) todel = [] NonRingAtoms.sort(reverse=True) em = Chem.EditableMol(mol) BondsToAdd = [ sorted([i[0],i[-1]]) for i in Linkers ] mem = [] for i in BondsToAdd: if i not in mem: em.AddBond(i[0],i[1],Chem.BondType.SINGLE) mem.append(i) for i in NonRingAtoms: todel.append(i) for i in todel: em.RemoveAtom(i) m = em.GetMol() #===================================# # Now do the flattening of atoms and bonds! # Any heavy atom will become a carbon and any bond will become a single bond! # #===================================# # for atom in m.GetAtoms(): # # atom.SetAtomicNum(6) # # atom.SetFormalCharge(0) # # for bond in m.GetBonds(): # # bond.SetBondType(Chem.BondType.SINGLE) # # Chem.SanitizeMol(m) # #===================================# return m if mode == 'Scaff': Framework = list(set(Framework)) todel = [] NonRingAtoms.sort(reverse=True) for i in NonRingAtoms: if i != None: if i not in Framework: todel.append(i) em = Chem.EditableMol(mol) for i in todel: em.RemoveAtom(i) m = em.GetMol() #===================================# # Now do the flattening of atoms and bonds! # Any heavy atom will become a carbon and any bond will become a single bond!! # #===================================# # for atom in m.GetAtoms(): # # atom.SetAtomicNum(6) # # atom.SetFormalCharge(0) # # for bond in m.GetBonds(): # # bond.SetBondType(Chem.BondType.SINGLE) # # Chem.SanitizeMol(m) # #===================================# return m if __name__=='__main__': if len(sys.argv) < 2: print("No input file provided: Frames.py filetosprocess.ext") sys.exit(1) suppl = Chem.SDMolSupplier(sys.argv[1]) FrameDict = {} for mol in suppl: m = GetFrame(mol) cansmiles = Chem.MolToSmiles(m, isomericSmiles=True) if FrameDict.has_key(cansmiles): FrameDict[cansmiles].append(mol) else: FrameDict[cansmiles]=[mol,] counter=0 w=open('frames.smi','w') for key,item in FrameDict.items(): counter+=1 d=Chem.SDWriter(str(counter)+'.sdf') for i in item: i.SetProp('Scaffold',key) i.SetProp('Cluster',str(counter)) d.write(i) print(key,len(item)) w.write(key+'\t'+str(len(item))+'\n') w.close print('number of Clusters: %d' %(counter))
soerendip42/rdkit
Contrib/M_Kossner/Frames.py
Python
bsd-3-clause
6,124
/* Copyright (c) 2003-2014, Arvid Norberg All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the author nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #ifndef TORRENT_BENCODE_HPP_INCLUDED #define TORRENT_BENCODE_HPP_INCLUDED // OVERVIEW // // Bencoding is a common representation in bittorrent used for // for dictionary, list, int and string hierarchies. It's used // to encode .torrent files and some messages in the network // protocol. libtorrent also uses it to store settings, resume // data and other state between sessions. // // Strings in bencoded structures are not necessarily representing // text. Strings are raw byte buffers of a certain length. If a // string is meant to be interpreted as text, it is required to // be UTF-8 encoded. See `BEP 3`_. // // There are two mechanims to *decode* bencoded buffers in libtorrent. // // The most flexible one is bdecode(), which returns a structure // represented by entry. When a buffer is decoded with this function, // it can be discarded. The entry does not contain any references back // to it. This means that bdecode() actually copies all the data out // of the buffer and into its own hierarchy. This makes this // function potentially expensive, if you're parsing large amounts // of data. // // Another consideration is that bdecode() is a recursive parser. // For this reason, in order to avoid DoS attacks by triggering // a stack overflow, there is a recursion limit. This limit is // a sanity check to make sure it doesn't run the risk of // busting the stack. // // The second mechanism is lazy_bdecode(), which returns a // bencoded structure represented by lazy_entry. This function // builds a tree that points back into the original buffer. // The returned lazy_entry will not be valid once the buffer // it was parsed out of is discarded. // // Not only is this function more efficient because of less // memory allocation and data copy, the parser is also not // recursive, which means it probably performs a little bit // better and can have a higher recursion limit on the structures // it's parsing. #include <stdlib.h> #include <string> #include <exception> #include <iterator> // for distance #ifdef _MSC_VER #pragma warning(push, 1) #endif #include <boost/static_assert.hpp> #ifdef _MSC_VER #pragma warning(pop) #endif #include "libtorrent/entry.hpp" #include "libtorrent/config.hpp" #include "libtorrent/assert.hpp" #include "libtorrent/escape_string.hpp" #include "libtorrent/io.hpp" // for write_string namespace libtorrent { #ifndef TORRENT_NO_DEPRECATE // thrown by bdecode() if the provided bencoded buffer does not contain // valid encoding. struct TORRENT_EXPORT invalid_encoding: std::exception { // hidden virtual const char* what() const throw() { return "invalid bencoding"; } }; #endif namespace detail { // this is used in the template, so it must be available to the client TORRENT_EXPORT char const* integer_to_str(char* buf, int size , entry::integer_type val); template <class OutIt> int write_integer(OutIt& out, entry::integer_type val) { // the stack allocated buffer for keeping the // decimal representation of the number can // not hold number bigger than this: BOOST_STATIC_ASSERT(sizeof(entry::integer_type) <= 8); char buf[21]; int ret = 0; for (char const* str = integer_to_str(buf, 21, val); *str != 0; ++str) { *out = *str; ++out; ++ret; } return ret; } template <class OutIt> void write_char(OutIt& out, char c) { *out = c; ++out; } template <class InIt> std::string read_until(InIt& in, InIt end, char end_token, bool& err) { std::string ret; if (in == end) { err = true; return ret; } while (*in != end_token) { ret += *in; ++in; if (in == end) { err = true; return ret; } } return ret; } template<class InIt> void read_string(InIt& in, InIt end, int len, std::string& str, bool& err) { TORRENT_ASSERT(len >= 0); for (int i = 0; i < len; ++i) { if (in == end) { err = true; return; } str += *in; ++in; } } template<class OutIt> int bencode_recursive(OutIt& out, const entry& e) { int ret = 0; switch(e.type()) { case entry::int_t: write_char(out, 'i'); ret += write_integer(out, e.integer()); write_char(out, 'e'); ret += 2; break; case entry::string_t: ret += write_integer(out, e.string().length()); write_char(out, ':'); ret += write_string(e.string(), out); ret += 1; break; case entry::list_t: write_char(out, 'l'); for (entry::list_type::const_iterator i = e.list().begin(); i != e.list().end(); ++i) ret += bencode_recursive(out, *i); write_char(out, 'e'); ret += 2; break; case entry::dictionary_t: write_char(out, 'd'); for (entry::dictionary_type::const_iterator i = e.dict().begin(); i != e.dict().end(); ++i) { // write key ret += write_integer(out, i->first.length()); write_char(out, ':'); ret += write_string(i->first, out); // write value ret += bencode_recursive(out, i->second); ret += 1; } write_char(out, 'e'); ret += 2; break; default: // trying to encode a structure with uninitialized values! TORRENT_ASSERT_VAL(false, e.type()); // do nothing break; } return ret; } template<class InIt> void bdecode_recursive(InIt& in, InIt end, entry& ret, bool& err, int depth) { if (depth >= 100) { err = true; return; } if (in == end) { err = true; #ifdef TORRENT_DEBUG ret.m_type_queried = false; #endif return; } switch (*in) { // ---------------------------------------------- // integer case 'i': { ++in; // 'i' std::string val = read_until(in, end, 'e', err); if (err) return; TORRENT_ASSERT(*in == 'e'); ++in; // 'e' ret = entry(entry::int_t); char* end_pointer; ret.integer() = strtoll(val.c_str(), &end_pointer, 10); #ifdef TORRENT_DEBUG ret.m_type_queried = false; #endif if (end_pointer == val.c_str()) { err = true; return; } } break; // ---------------------------------------------- // list case 'l': { ret = entry(entry::list_t); ++in; // 'l' while (*in != 'e') { ret.list().push_back(entry()); entry& e = ret.list().back(); bdecode_recursive(in, end, e, err, depth + 1); if (err) { #ifdef TORRENT_DEBUG ret.m_type_queried = false; #endif return; } if (in == end) { err = true; #ifdef TORRENT_DEBUG ret.m_type_queried = false; #endif return; } } #ifdef TORRENT_DEBUG ret.m_type_queried = false; #endif TORRENT_ASSERT(*in == 'e'); ++in; // 'e' } break; // ---------------------------------------------- // dictionary case 'd': { ret = entry(entry::dictionary_t); ++in; // 'd' while (*in != 'e') { entry key; bdecode_recursive(in, end, key, err, depth + 1); if (err || key.type() != entry::string_t) { #ifdef TORRENT_DEBUG ret.m_type_queried = false; #endif return; } entry& e = ret[key.string()]; bdecode_recursive(in, end, e, err, depth + 1); if (err) { #ifdef TORRENT_DEBUG ret.m_type_queried = false; #endif return; } if (in == end) { err = true; #ifdef TORRENT_DEBUG ret.m_type_queried = false; #endif return; } } #ifdef TORRENT_DEBUG ret.m_type_queried = false; #endif TORRENT_ASSERT(*in == 'e'); ++in; // 'e' } break; // ---------------------------------------------- // string default: if (is_digit((unsigned char)*in)) { std::string len_s = read_until(in, end, ':', err); if (err) { #ifdef TORRENT_DEBUG ret.m_type_queried = false; #endif return; } TORRENT_ASSERT(*in == ':'); ++in; // ':' int len = atoi(len_s.c_str()); ret = entry(entry::string_t); read_string(in, end, len, ret.string(), err); if (err) { #ifdef TORRENT_DEBUG ret.m_type_queried = false; #endif return; } } else { err = true; #ifdef TORRENT_DEBUG ret.m_type_queried = false; #endif return; } #ifdef TORRENT_DEBUG ret.m_type_queried = false; #endif } } } // These functions will encode data to bencoded_ or decode bencoded_ data. // // If possible, lazy_bdecode() should be preferred over ``bdecode()``. // // The entry_ class is the internal representation of the bencoded data // and it can be used to retrieve information, an entry_ can also be build by // the program and given to ``bencode()`` to encode it into the ``OutIt`` // iterator. // // The ``OutIt`` and ``InIt`` are iterators // (InputIterator_ and OutputIterator_ respectively). They // are templates and are usually instantiated as ostream_iterator_, // back_insert_iterator_ or istream_iterator_. These // functions will assume that the iterator refers to a character // (``char``). So, if you want to encode entry ``e`` into a buffer // in memory, you can do it like this:: // // std::vector<char> buffer; // bencode(std::back_inserter(buf), e); // // .. _InputIterator: http://www.sgi.com/tech/stl/InputIterator.html // .. _OutputIterator: http://www.sgi.com/tech/stl/OutputIterator.html // .. _ostream_iterator: http://www.sgi.com/tech/stl/ostream_iterator.html // .. _back_insert_iterator: http://www.sgi.com/tech/stl/back_insert_iterator.html // .. _istream_iterator: http://www.sgi.com/tech/stl/istream_iterator.html // // If you want to decode a torrent file from a buffer in memory, you can do it like this:: // // std::vector<char> buffer; // // ... // entry e = bdecode(buf.begin(), buf.end()); // // Or, if you have a raw char buffer:: // // const char* buf; // // ... // entry e = bdecode(buf, buf + data_size); // // Now we just need to know how to retrieve information from the entry. // // If ``bdecode()`` encounters invalid encoded data in the range given to it // it will return a default constructed ``entry`` object. template<class OutIt> int bencode(OutIt out, const entry& e) { return detail::bencode_recursive(out, e); } template<class InIt> entry bdecode(InIt start, InIt end) { entry e; bool err = false; detail::bdecode_recursive(start, end, e, err, 0); #ifdef TORRENT_DEBUG TORRENT_ASSERT(e.m_type_queried == false); #endif if (err) return entry(); return e; } template<class InIt> entry bdecode(InIt start, InIt end, int& len) { entry e; bool err = false; InIt s = start; detail::bdecode_recursive(start, end, e, err, 0); len = std::distance(s, start); TORRENT_ASSERT(len >= 0); if (err) return entry(); return e; } } #endif // TORRENT_BENCODE_HPP_INCLUDED
mirror/libtorrent
include/libtorrent/bencode.hpp
C++
bsd-3-clause
12,312
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections.Generic; using System.Reflection; using log4net; using Nini.Config; using OpenMetaverse; using Mono.Addins; using OpenSim.Framework; using OpenSim.Region.Framework.Interfaces; using OpenSim.Region.Framework.Scenes; using OpenSim.Services.Interfaces; using GridRegion = OpenSim.Services.Interfaces.GridRegion; namespace OpenSim.Region.CoreModules.World.WorldMap { [Extension(Path = "/OpenSim/RegionModules", NodeName = "RegionModule", Id = "MapSearchModule")] public class MapSearchModule : ISharedRegionModule { private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); Scene m_scene = null; // only need one for communication with GridService List<Scene> m_scenes = new List<Scene>(); List<UUID> m_Clients; IWorldMapModule m_WorldMap; IWorldMapModule WorldMap { get { if (m_WorldMap == null) m_WorldMap = m_scene.RequestModuleInterface<IWorldMapModule>(); return m_WorldMap; } } #region ISharedRegionModule Members public void Initialise(IConfigSource source) { } public void AddRegion(Scene scene) { if (m_scene == null) { m_scene = scene; } m_scenes.Add(scene); scene.EventManager.OnNewClient += OnNewClient; m_Clients = new List<UUID>(); } public void RemoveRegion(Scene scene) { m_scenes.Remove(scene); if (m_scene == scene && m_scenes.Count > 0) m_scene = m_scenes[0]; scene.EventManager.OnNewClient -= OnNewClient; } public void PostInitialise() { } public void Close() { m_scene = null; m_scenes.Clear(); } public string Name { get { return "MapSearchModule"; } } public Type ReplaceableInterface { get { return null; } } public void RegionLoaded(Scene scene) { } #endregion private void OnNewClient(IClientAPI client) { client.OnMapNameRequest += OnMapNameRequestHandler; } private void OnMapNameRequestHandler(IClientAPI remoteClient, string mapName, uint flags) { lock (m_Clients) { if (m_Clients.Contains(remoteClient.AgentId)) return; m_Clients.Add(remoteClient.AgentId); } OnMapNameRequest(remoteClient, mapName, flags); } private void OnMapNameRequest(IClientAPI remoteClient, string mapName, uint flags) { Util.FireAndForget(x => { try { List<MapBlockData> blocks = new List<MapBlockData>(); if (mapName.Length < 3 || (mapName.EndsWith("#") && mapName.Length < 4)) { // final block, closing the search result AddFinalBlock(blocks,mapName); // flags are agent flags sent from the viewer. // they have different values depending on different viewers, apparently remoteClient.SendMapBlock(blocks, flags); remoteClient.SendAlertMessage("Use a search string with at least 3 characters"); return; } //m_log.DebugFormat("MAP NAME=({0})", mapName); // Hack to get around the fact that ll V3 now drops the port from the // map name. See https://jira.secondlife.com/browse/VWR-28570 // // Caller, use this magic form instead: // secondlife://http|!!mygrid.com|8002|Region+Name/128/128 // or url encode if possible. // the hacks we do with this viewer... // bool needOriginalName = false; string mapNameOrig = mapName; if (mapName.Contains("|")) { mapName = mapName.Replace('|', ':'); needOriginalName = true; } if (mapName.Contains("+")) { mapName = mapName.Replace('+', ' '); needOriginalName = true; } if (mapName.Contains("!")) { mapName = mapName.Replace('!', '/'); needOriginalName = true; } if (mapName.Contains(".")) needOriginalName = true; // try to fetch from GridServer List<GridRegion> regionInfos = m_scene.GridService.GetRegionsByName(m_scene.RegionInfo.ScopeID, mapName, 20); // if (regionInfos.Count == 0) // remoteClient.SendAlertMessage("Hyperlink could not be established."); //m_log.DebugFormat("[MAPSEARCHMODULE]: search {0} returned {1} regions", mapName, regionInfos.Count); MapBlockData data; if (regionInfos != null && regionInfos.Count > 0) { foreach (GridRegion info in regionInfos) { data = new MapBlockData(); data.Agents = 0; data.Access = info.Access; MapBlockData block = new MapBlockData(); WorldMap.MapBlockFromGridRegion(block, info, flags); if (flags == 2 && regionInfos.Count == 1 && needOriginalName) block.Name = mapNameOrig; blocks.Add(block); } } // final block, closing the search result AddFinalBlock(blocks,mapNameOrig); // flags are agent flags sent from the viewer. // they have different values depending on different viewers, apparently remoteClient.SendMapBlock(blocks, flags); // send extra user messages for V3 // because the UI is very confusing // while we don't fix the hard-coded urls if (flags == 2) { if (regionInfos == null || regionInfos.Count == 0) remoteClient.SendAgentAlertMessage("No regions found with that name.", true); // else if (regionInfos.Count == 1) // remoteClient.SendAgentAlertMessage("Region found!", false); } } finally { lock (m_Clients) m_Clients.Remove(remoteClient.AgentId); } }); } private void AddFinalBlock(List<MapBlockData> blocks,string name) { // final block, closing the search result MapBlockData data = new MapBlockData(); data.Agents = 0; data.Access = (byte)SimAccess.NonExistent; data.MapImageId = UUID.Zero; data.Name = name; data.RegionFlags = 0; data.WaterHeight = 0; // not used data.X = 0; data.Y = 0; blocks.Add(data); } // private Scene GetClientScene(IClientAPI client) // { // foreach (Scene s in m_scenes) // { // if (client.Scene.RegionInfo.RegionHandle == s.RegionInfo.RegionHandle) // return s; // } // return m_scene; // } } }
TomDataworks/opensim
OpenSim/Region/CoreModules/World/WorldMap/MapSearchModule.cs
C#
bsd-3-clause
9,856
// Copyright 2011 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "cc/layers/tiled_layer.h" #include <algorithm> #include <vector> #include "base/auto_reset.h" #include "base/basictypes.h" #include "build/build_config.h" #include "cc/layers/layer_impl.h" #include "cc/layers/tiled_layer_impl.h" #include "cc/resources/layer_updater.h" #include "cc/resources/prioritized_resource.h" #include "cc/resources/priority_calculator.h" #include "cc/trees/layer_tree_host.h" #include "cc/trees/occlusion_tracker.h" #include "third_party/khronos/GLES2/gl2.h" #include "ui/gfx/rect_conversions.h" namespace cc { // Maximum predictive expansion of the visible area. static const int kMaxPredictiveTilesCount = 2; // Number of rows/columns of tiles to pre-paint. // We should increase these further as all textures are // prioritized and we insure performance doesn't suffer. static const int kPrepaintRows = 4; static const int kPrepaintColumns = 2; class UpdatableTile : public LayerTilingData::Tile { public: static scoped_ptr<UpdatableTile> Create( scoped_ptr<LayerUpdater::Resource> updater_resource) { return make_scoped_ptr(new UpdatableTile(updater_resource.Pass())); } LayerUpdater::Resource* updater_resource() { return updater_resource_.get(); } PrioritizedResource* managed_resource() { return updater_resource_->texture(); } bool is_dirty() const { return !dirty_rect.IsEmpty(); } // Reset update state for the current frame. This should occur before painting // for all layers. Since painting one layer can invalidate another layer after // it has already painted, mark all non-dirty tiles as valid before painting // such that invalidations during painting won't prevent them from being // pushed. void ResetUpdateState() { update_rect = gfx::Rect(); occluded = false; partial_update = false; valid_for_frame = !is_dirty(); } // This promises to update the tile and therefore also guarantees the tile // will be valid for this frame. dirty_rect is copied into update_rect so we // can continue to track re-entrant invalidations that occur during painting. void MarkForUpdate() { valid_for_frame = true; update_rect = dirty_rect; dirty_rect = gfx::Rect(); } gfx::Rect dirty_rect; gfx::Rect update_rect; bool partial_update; bool valid_for_frame; bool occluded; private: explicit UpdatableTile(scoped_ptr<LayerUpdater::Resource> updater_resource) : partial_update(false), valid_for_frame(false), occluded(false), updater_resource_(updater_resource.Pass()) {} scoped_ptr<LayerUpdater::Resource> updater_resource_; DISALLOW_COPY_AND_ASSIGN(UpdatableTile); }; TiledLayer::TiledLayer() : ContentsScalingLayer(), texture_format_(RGBA_8888), skips_draw_(false), failed_update_(false), tiling_option_(AUTO_TILE) { tiler_ = LayerTilingData::Create(gfx::Size(), LayerTilingData::HAS_BORDER_TEXELS); } TiledLayer::~TiledLayer() {} scoped_ptr<LayerImpl> TiledLayer::CreateLayerImpl(LayerTreeImpl* tree_impl) { return TiledLayerImpl::Create(tree_impl, id()).PassAs<LayerImpl>(); } void TiledLayer::UpdateTileSizeAndTilingOption() { DCHECK(layer_tree_host()); gfx::Size default_tile_size = layer_tree_host()->settings().default_tile_size; gfx::Size max_untiled_layer_size = layer_tree_host()->settings().max_untiled_layer_size; int layer_width = content_bounds().width(); int layer_height = content_bounds().height(); gfx::Size tile_size(std::min(default_tile_size.width(), layer_width), std::min(default_tile_size.height(), layer_height)); // Tile if both dimensions large, or any one dimension large and the other // extends into a second tile but the total layer area isn't larger than that // of the largest possible untiled layer. This heuristic allows for long // skinny layers (e.g. scrollbars) that are Nx1 tiles to minimize wasted // texture space but still avoids creating very large tiles. bool any_dimension_large = layer_width > max_untiled_layer_size.width() || layer_height > max_untiled_layer_size.height(); bool any_dimension_one_tile = (layer_width <= default_tile_size.width() || layer_height <= default_tile_size.height()) && (layer_width * layer_height) <= (max_untiled_layer_size.width() * max_untiled_layer_size.height()); bool auto_tiled = any_dimension_large && !any_dimension_one_tile; bool is_tiled; if (tiling_option_ == ALWAYS_TILE) is_tiled = true; else if (tiling_option_ == NEVER_TILE) is_tiled = false; else is_tiled = auto_tiled; gfx::Size requested_size = is_tiled ? tile_size : content_bounds(); const int max_size = layer_tree_host()->GetRendererCapabilities().max_texture_size; requested_size.SetToMin(gfx::Size(max_size, max_size)); SetTileSize(requested_size); } void TiledLayer::UpdateBounds() { gfx::Size old_tiling_size = tiler_->tiling_size(); gfx::Size new_tiling_size = content_bounds(); if (old_tiling_size == new_tiling_size) return; tiler_->SetTilingSize(new_tiling_size); // Invalidate any areas that the new bounds exposes. Region new_region = SubtractRegions(gfx::Rect(new_tiling_size), gfx::Rect(old_tiling_size)); for (Region::Iterator new_rects(new_region); new_rects.has_rect(); new_rects.next()) InvalidateContentRect(new_rects.rect()); UpdateDrawsContent(HasDrawableContent()); } void TiledLayer::SetTileSize(const gfx::Size& size) { tiler_->SetTileSize(size); UpdateDrawsContent(HasDrawableContent()); } void TiledLayer::SetBorderTexelOption( LayerTilingData::BorderTexelOption border_texel_option) { tiler_->SetBorderTexelOption(border_texel_option); UpdateDrawsContent(HasDrawableContent()); } bool TiledLayer::HasDrawableContent() const { bool has_more_than_one_tile = (tiler_->num_tiles_x() > 1) || (tiler_->num_tiles_y() > 1); return !(tiling_option_ == NEVER_TILE && has_more_than_one_tile) && ContentsScalingLayer::HasDrawableContent(); } void TiledLayer::ReduceMemoryUsage() { if (Updater()) Updater()->ReduceMemoryUsage(); } void TiledLayer::SetIsMask(bool is_mask) { set_tiling_option(is_mask ? NEVER_TILE : AUTO_TILE); } void TiledLayer::PushPropertiesTo(LayerImpl* layer) { ContentsScalingLayer::PushPropertiesTo(layer); TiledLayerImpl* tiled_layer = static_cast<TiledLayerImpl*>(layer); tiled_layer->set_skips_draw(skips_draw_); tiled_layer->SetTilingData(*tiler_); std::vector<UpdatableTile*> invalid_tiles; for (LayerTilingData::TileMap::const_iterator iter = tiler_->tiles().begin(); iter != tiler_->tiles().end(); ++iter) { int i = iter->first.first; int j = iter->first.second; UpdatableTile* tile = static_cast<UpdatableTile*>(iter->second); // TODO(enne): This should not ever be null. if (!tile) continue; if (!tile->managed_resource()->have_backing_texture()) { // Evicted tiles get deleted from both layers invalid_tiles.push_back(tile); continue; } if (!tile->valid_for_frame) { // Invalidated tiles are set so they can get different debug colors. tiled_layer->PushInvalidTile(i, j); continue; } tiled_layer->PushTileProperties( i, j, tile->managed_resource()->resource_id(), tile->opaque_rect(), tile->managed_resource()->contents_swizzled()); } for (std::vector<UpdatableTile*>::const_iterator iter = invalid_tiles.begin(); iter != invalid_tiles.end(); ++iter) tiler_->TakeTile((*iter)->i(), (*iter)->j()); // TiledLayer must push properties every frame, since viewport state and // occlusion from anywhere in the tree can change what the layer decides to // push to the impl tree. needs_push_properties_ = true; } PrioritizedResourceManager* TiledLayer::ResourceManager() { if (!layer_tree_host()) return NULL; return layer_tree_host()->contents_texture_manager(); } const PrioritizedResource* TiledLayer::ResourceAtForTesting(int i, int j) const { UpdatableTile* tile = TileAt(i, j); if (!tile) return NULL; return tile->managed_resource(); } void TiledLayer::SetLayerTreeHost(LayerTreeHost* host) { if (host && host != layer_tree_host()) { for (LayerTilingData::TileMap::const_iterator iter = tiler_->tiles().begin(); iter != tiler_->tiles().end(); ++iter) { UpdatableTile* tile = static_cast<UpdatableTile*>(iter->second); // TODO(enne): This should not ever be null. if (!tile) continue; tile->managed_resource()->SetTextureManager( host->contents_texture_manager()); } } ContentsScalingLayer::SetLayerTreeHost(host); } UpdatableTile* TiledLayer::TileAt(int i, int j) const { return static_cast<UpdatableTile*>(tiler_->TileAt(i, j)); } UpdatableTile* TiledLayer::CreateTile(int i, int j) { CreateUpdaterIfNeeded(); scoped_ptr<UpdatableTile> tile( UpdatableTile::Create(Updater()->CreateResource(ResourceManager()))); tile->managed_resource()->SetDimensions(tiler_->tile_size(), texture_format_); UpdatableTile* added_tile = tile.get(); tiler_->AddTile(tile.PassAs<LayerTilingData::Tile>(), i, j); added_tile->dirty_rect = tiler_->TileRect(added_tile); // Temporary diagnostic crash. CHECK(added_tile); CHECK(TileAt(i, j)); return added_tile; } void TiledLayer::SetNeedsDisplayRect(const gfx::RectF& dirty_rect) { InvalidateContentRect(LayerRectToContentRect(dirty_rect)); ContentsScalingLayer::SetNeedsDisplayRect(dirty_rect); } void TiledLayer::InvalidateContentRect(const gfx::Rect& content_rect) { UpdateBounds(); if (tiler_->is_empty() || content_rect.IsEmpty() || skips_draw_) return; for (LayerTilingData::TileMap::const_iterator iter = tiler_->tiles().begin(); iter != tiler_->tiles().end(); ++iter) { UpdatableTile* tile = static_cast<UpdatableTile*>(iter->second); DCHECK(tile); // TODO(enne): This should not ever be null. if (!tile) continue; gfx::Rect bound = tiler_->TileRect(tile); bound.Intersect(content_rect); tile->dirty_rect.Union(bound); } } // Returns true if tile is dirty and only part of it needs to be updated. bool TiledLayer::TileOnlyNeedsPartialUpdate(UpdatableTile* tile) { return !tile->dirty_rect.Contains(tiler_->TileRect(tile)) && tile->managed_resource()->have_backing_texture(); } bool TiledLayer::UpdateTiles(int left, int top, int right, int bottom, ResourceUpdateQueue* queue, const OcclusionTracker<Layer>* occlusion, bool* updated) { CreateUpdaterIfNeeded(); bool ignore_occlusions = !occlusion; if (!HaveTexturesForTiles(left, top, right, bottom, ignore_occlusions)) { failed_update_ = true; return false; } gfx::Rect update_rect; gfx::Rect paint_rect; MarkTilesForUpdate( &update_rect, &paint_rect, left, top, right, bottom, ignore_occlusions); if (paint_rect.IsEmpty()) return true; *updated = true; UpdateTileTextures( update_rect, paint_rect, left, top, right, bottom, queue, occlusion); return true; } void TiledLayer::MarkOcclusionsAndRequestTextures( int left, int top, int right, int bottom, const OcclusionTracker<Layer>* occlusion) { int occluded_tile_count = 0; bool succeeded = true; for (int j = top; j <= bottom; ++j) { for (int i = left; i <= right; ++i) { UpdatableTile* tile = TileAt(i, j); DCHECK(tile); // Did SetTexturePriorities get skipped? // TODO(enne): This should not ever be null. if (!tile) continue; // Did ResetUpdateState get skipped? Are we doing more than one occlusion // pass? DCHECK(!tile->occluded); gfx::Rect visible_tile_rect = gfx::IntersectRects( tiler_->tile_bounds(i, j), visible_content_rect()); if (!draw_transform_is_animating() && occlusion && occlusion->Occluded( render_target(), visible_tile_rect, draw_transform())) { tile->occluded = true; occluded_tile_count++; } else { succeeded &= tile->managed_resource()->RequestLate(); } } } } bool TiledLayer::HaveTexturesForTiles(int left, int top, int right, int bottom, bool ignore_occlusions) { for (int j = top; j <= bottom; ++j) { for (int i = left; i <= right; ++i) { UpdatableTile* tile = TileAt(i, j); DCHECK(tile); // Did SetTexturePriorites get skipped? // TODO(enne): This should not ever be null. if (!tile) continue; // Ensure the entire tile is dirty if we don't have the texture. if (!tile->managed_resource()->have_backing_texture()) tile->dirty_rect = tiler_->TileRect(tile); // If using occlusion and the visible region of the tile is occluded, // don't reserve a texture or update the tile. if (tile->occluded && !ignore_occlusions) continue; if (!tile->managed_resource()->can_acquire_backing_texture()) return false; } } return true; } void TiledLayer::MarkTilesForUpdate(gfx::Rect* update_rect, gfx::Rect* paint_rect, int left, int top, int right, int bottom, bool ignore_occlusions) { for (int j = top; j <= bottom; ++j) { for (int i = left; i <= right; ++i) { UpdatableTile* tile = TileAt(i, j); DCHECK(tile); // Did SetTexturePriorites get skipped? // TODO(enne): This should not ever be null. if (!tile) continue; if (tile->occluded && !ignore_occlusions) continue; // Prepare update rect from original dirty rects. update_rect->Union(tile->dirty_rect); // TODO(reveman): Decide if partial update should be allowed based on cost // of update. https://bugs.webkit.org/show_bug.cgi?id=77376 if (tile->is_dirty() && !layer_tree_host()->AlwaysUsePartialTextureUpdates()) { // If we get a partial update, we use the same texture, otherwise return // the current texture backing, so we don't update visible textures // non-atomically. If the current backing is in-use, it won't be // deleted until after the commit as the texture manager will not allow // deletion or recycling of in-use textures. if (TileOnlyNeedsPartialUpdate(tile) && layer_tree_host()->RequestPartialTextureUpdate()) { tile->partial_update = true; } else { tile->dirty_rect = tiler_->TileRect(tile); tile->managed_resource()->ReturnBackingTexture(); } } paint_rect->Union(tile->dirty_rect); tile->MarkForUpdate(); } } } void TiledLayer::UpdateTileTextures(const gfx::Rect& update_rect, const gfx::Rect& paint_rect, int left, int top, int right, int bottom, ResourceUpdateQueue* queue, const OcclusionTracker<Layer>* occlusion) { // The update_rect should be in layer space. So we have to convert the // paint_rect from content space to layer space. float width_scale = paint_properties().bounds.width() / static_cast<float>(content_bounds().width()); float height_scale = paint_properties().bounds.height() / static_cast<float>(content_bounds().height()); update_rect_ = gfx::ScaleRect(update_rect, width_scale, height_scale); // Calling PrepareToUpdate() calls into WebKit to paint, which may have the // side effect of disabling compositing, which causes our reference to the // texture updater to be deleted. However, we can't free the memory backing // the SkCanvas until the paint finishes, so we grab a local reference here to // hold the updater alive until the paint completes. scoped_refptr<LayerUpdater> protector(Updater()); gfx::Rect painted_opaque_rect; Updater()->PrepareToUpdate(paint_rect, tiler_->tile_size(), 1.f / width_scale, 1.f / height_scale, &painted_opaque_rect); for (int j = top; j <= bottom; ++j) { for (int i = left; i <= right; ++i) { UpdatableTile* tile = TileAt(i, j); DCHECK(tile); // Did SetTexturePriorites get skipped? // TODO(enne): This should not ever be null. if (!tile) continue; gfx::Rect tile_rect = tiler_->tile_bounds(i, j); // Use update_rect as the above loop copied the dirty rect for this frame // to update_rect. gfx::Rect dirty_rect = tile->update_rect; if (dirty_rect.IsEmpty()) continue; // Save what was painted opaque in the tile. Keep the old area if the // paint didn't touch it, and didn't paint some other part of the tile // opaque. gfx::Rect tile_painted_rect = gfx::IntersectRects(tile_rect, paint_rect); gfx::Rect tile_painted_opaque_rect = gfx::IntersectRects(tile_rect, painted_opaque_rect); if (!tile_painted_rect.IsEmpty()) { gfx::Rect paint_inside_tile_opaque_rect = gfx::IntersectRects(tile->opaque_rect(), tile_painted_rect); bool paint_inside_tile_opaque_rect_is_non_opaque = !paint_inside_tile_opaque_rect.IsEmpty() && !tile_painted_opaque_rect.Contains(paint_inside_tile_opaque_rect); bool opaque_paint_not_inside_tile_opaque_rect = !tile_painted_opaque_rect.IsEmpty() && !tile->opaque_rect().Contains(tile_painted_opaque_rect); if (paint_inside_tile_opaque_rect_is_non_opaque || opaque_paint_not_inside_tile_opaque_rect) tile->set_opaque_rect(tile_painted_opaque_rect); } // source_rect starts as a full-sized tile with border texels included. gfx::Rect source_rect = tiler_->TileRect(tile); source_rect.Intersect(dirty_rect); // Paint rect not guaranteed to line up on tile boundaries, so // make sure that source_rect doesn't extend outside of it. source_rect.Intersect(paint_rect); tile->update_rect = source_rect; if (source_rect.IsEmpty()) continue; const gfx::Point anchor = tiler_->TileRect(tile).origin(); // Calculate tile-space rectangle to upload into. gfx::Vector2d dest_offset = source_rect.origin() - anchor; CHECK_GE(dest_offset.x(), 0); CHECK_GE(dest_offset.y(), 0); // Offset from paint rectangle to this tile's dirty rectangle. gfx::Vector2d paint_offset = source_rect.origin() - paint_rect.origin(); CHECK_GE(paint_offset.x(), 0); CHECK_GE(paint_offset.y(), 0); CHECK_LE(paint_offset.x() + source_rect.width(), paint_rect.width()); CHECK_LE(paint_offset.y() + source_rect.height(), paint_rect.height()); tile->updater_resource()->Update( queue, source_rect, dest_offset, tile->partial_update); } } } // This picks a small animated layer to be anything less than one viewport. This // is specifically for page transitions which are viewport-sized layers. The // extra tile of padding is due to these layers being slightly larger than the // viewport in some cases. bool TiledLayer::IsSmallAnimatedLayer() const { if (!draw_transform_is_animating() && !screen_space_transform_is_animating()) return false; gfx::Size viewport_size = layer_tree_host() ? layer_tree_host()->device_viewport_size() : gfx::Size(); gfx::Rect content_rect(content_bounds()); return content_rect.width() <= viewport_size.width() + tiler_->tile_size().width() && content_rect.height() <= viewport_size.height() + tiler_->tile_size().height(); } namespace { // TODO(epenner): Remove this and make this based on distance once distance can // be calculated for offscreen layers. For now, prioritize all small animated // layers after 512 pixels of pre-painting. void SetPriorityForTexture(const gfx::Rect& visible_rect, const gfx::Rect& tile_rect, bool draws_to_root, bool is_small_animated_layer, PrioritizedResource* texture) { int priority = PriorityCalculator::LowestPriority(); if (!visible_rect.IsEmpty()) { priority = PriorityCalculator::PriorityFromDistance( visible_rect, tile_rect, draws_to_root); } if (is_small_animated_layer) { priority = PriorityCalculator::max_priority( priority, PriorityCalculator::SmallAnimatedLayerMinPriority()); } if (priority != PriorityCalculator::LowestPriority()) texture->set_request_priority(priority); } } // namespace void TiledLayer::SetTexturePriorities(const PriorityCalculator& priority_calc) { UpdateBounds(); ResetUpdateState(); UpdateScrollPrediction(); if (tiler_->has_empty_bounds()) return; bool draws_to_root = !render_target()->parent(); bool small_animated_layer = IsSmallAnimatedLayer(); // Minimally create the tiles in the desired pre-paint rect. gfx::Rect create_tiles_rect = IdlePaintRect(); if (small_animated_layer) create_tiles_rect = gfx::Rect(content_bounds()); if (!create_tiles_rect.IsEmpty()) { int left, top, right, bottom; tiler_->ContentRectToTileIndices( create_tiles_rect, &left, &top, &right, &bottom); for (int j = top; j <= bottom; ++j) { for (int i = left; i <= right; ++i) { if (!TileAt(i, j)) CreateTile(i, j); } } } // Now update priorities on all tiles we have in the layer, no matter where // they are. for (LayerTilingData::TileMap::const_iterator iter = tiler_->tiles().begin(); iter != tiler_->tiles().end(); ++iter) { UpdatableTile* tile = static_cast<UpdatableTile*>(iter->second); // TODO(enne): This should not ever be null. if (!tile) continue; gfx::Rect tile_rect = tiler_->TileRect(tile); SetPriorityForTexture(predicted_visible_rect_, tile_rect, draws_to_root, small_animated_layer, tile->managed_resource()); } } Region TiledLayer::VisibleContentOpaqueRegion() const { if (skips_draw_) return Region(); if (contents_opaque()) return visible_content_rect(); return tiler_->OpaqueRegionInContentRect(visible_content_rect()); } void TiledLayer::ResetUpdateState() { skips_draw_ = false; failed_update_ = false; LayerTilingData::TileMap::const_iterator end = tiler_->tiles().end(); for (LayerTilingData::TileMap::const_iterator iter = tiler_->tiles().begin(); iter != end; ++iter) { UpdatableTile* tile = static_cast<UpdatableTile*>(iter->second); // TODO(enne): This should not ever be null. if (!tile) continue; tile->ResetUpdateState(); } } namespace { gfx::Rect ExpandRectByDelta(const gfx::Rect& rect, const gfx::Vector2d& delta) { int width = rect.width() + std::abs(delta.x()); int height = rect.height() + std::abs(delta.y()); int x = rect.x() + ((delta.x() < 0) ? delta.x() : 0); int y = rect.y() + ((delta.y() < 0) ? delta.y() : 0); return gfx::Rect(x, y, width, height); } } void TiledLayer::UpdateScrollPrediction() { // This scroll prediction is very primitive and should be replaced by a // a recursive calculation on all layers which uses actual scroll/animation // velocities. To insure this doesn't miss-predict, we only use it to predict // the visible_rect if: // - content_bounds() hasn't changed. // - visible_rect.size() hasn't changed. // These two conditions prevent rotations, scales, pinch-zooms etc. where // the prediction would be incorrect. gfx::Vector2d delta = visible_content_rect().CenterPoint() - previous_visible_rect_.CenterPoint(); predicted_scroll_ = -delta; predicted_visible_rect_ = visible_content_rect(); if (previous_content_bounds_ == content_bounds() && previous_visible_rect_.size() == visible_content_rect().size()) { // Only expand the visible rect in the major scroll direction, to prevent // massive paints due to diagonal scrolls. gfx::Vector2d major_scroll_delta = (std::abs(delta.x()) > std::abs(delta.y())) ? gfx::Vector2d(delta.x(), 0) : gfx::Vector2d(0, delta.y()); predicted_visible_rect_ = ExpandRectByDelta(visible_content_rect(), major_scroll_delta); // Bound the prediction to prevent unbounded paints, and clamp to content // bounds. gfx::Rect bound = visible_content_rect(); bound.Inset(-tiler_->tile_size().width() * kMaxPredictiveTilesCount, -tiler_->tile_size().height() * kMaxPredictiveTilesCount); bound.Intersect(gfx::Rect(content_bounds())); predicted_visible_rect_.Intersect(bound); } previous_content_bounds_ = content_bounds(); previous_visible_rect_ = visible_content_rect(); } bool TiledLayer::Update(ResourceUpdateQueue* queue, const OcclusionTracker<Layer>* occlusion) { DCHECK(!skips_draw_ && !failed_update_); // Did ResetUpdateState get skipped? // Tiled layer always causes commits to wait for activation, as it does // not support pending trees. SetNextCommitWaitsForActivation(); bool updated = false; { base::AutoReset<bool> ignore_set_needs_commit(&ignore_set_needs_commit_, true); updated |= ContentsScalingLayer::Update(queue, occlusion); UpdateBounds(); } if (tiler_->has_empty_bounds() || !DrawsContent()) return false; // Animation pre-paint. If the layer is small, try to paint it all // immediately whether or not it is occluded, to avoid paint/upload // hiccups while it is animating. if (IsSmallAnimatedLayer()) { int left, top, right, bottom; tiler_->ContentRectToTileIndices(gfx::Rect(content_bounds()), &left, &top, &right, &bottom); UpdateTiles(left, top, right, bottom, queue, NULL, &updated); if (updated) return updated; // This was an attempt to paint the entire layer so if we fail it's okay, // just fallback on painting visible etc. below. failed_update_ = false; } if (predicted_visible_rect_.IsEmpty()) return updated; // Visible painting. First occlude visible tiles and paint the non-occluded // tiles. int left, top, right, bottom; tiler_->ContentRectToTileIndices( predicted_visible_rect_, &left, &top, &right, &bottom); MarkOcclusionsAndRequestTextures(left, top, right, bottom, occlusion); skips_draw_ = !UpdateTiles( left, top, right, bottom, queue, occlusion, &updated); if (skips_draw_) tiler_->reset(); if (skips_draw_ || updated) return true; // If we have already painting everything visible. Do some pre-painting while // idle. gfx::Rect idle_paint_content_rect = IdlePaintRect(); if (idle_paint_content_rect.IsEmpty()) return updated; // Prepaint anything that was occluded but inside the layer's visible region. if (!UpdateTiles(left, top, right, bottom, queue, NULL, &updated) || updated) return updated; int prepaint_left, prepaint_top, prepaint_right, prepaint_bottom; tiler_->ContentRectToTileIndices(idle_paint_content_rect, &prepaint_left, &prepaint_top, &prepaint_right, &prepaint_bottom); // Then expand outwards one row/column at a time until we find a dirty // row/column to update. Increment along the major and minor scroll directions // first. gfx::Vector2d delta = -predicted_scroll_; delta = gfx::Vector2d(delta.x() == 0 ? 1 : delta.x(), delta.y() == 0 ? 1 : delta.y()); gfx::Vector2d major_delta = (std::abs(delta.x()) > std::abs(delta.y())) ? gfx::Vector2d(delta.x(), 0) : gfx::Vector2d(0, delta.y()); gfx::Vector2d minor_delta = (std::abs(delta.x()) <= std::abs(delta.y())) ? gfx::Vector2d(delta.x(), 0) : gfx::Vector2d(0, delta.y()); gfx::Vector2d deltas[4] = { major_delta, minor_delta, -major_delta, -minor_delta }; for (int i = 0; i < 4; i++) { if (deltas[i].y() > 0) { while (bottom < prepaint_bottom) { ++bottom; if (!UpdateTiles( left, bottom, right, bottom, queue, NULL, &updated) || updated) return updated; } } if (deltas[i].y() < 0) { while (top > prepaint_top) { --top; if (!UpdateTiles( left, top, right, top, queue, NULL, &updated) || updated) return updated; } } if (deltas[i].x() < 0) { while (left > prepaint_left) { --left; if (!UpdateTiles( left, top, left, bottom, queue, NULL, &updated) || updated) return updated; } } if (deltas[i].x() > 0) { while (right < prepaint_right) { ++right; if (!UpdateTiles( right, top, right, bottom, queue, NULL, &updated) || updated) return updated; } } } return updated; } void TiledLayer::OnOutputSurfaceCreated() { // Ensure that all textures are of the right format. for (LayerTilingData::TileMap::const_iterator iter = tiler_->tiles().begin(); iter != tiler_->tiles().end(); ++iter) { UpdatableTile* tile = static_cast<UpdatableTile*>(iter->second); if (!tile) continue; PrioritizedResource* resource = tile->managed_resource(); resource->SetDimensions(resource->size(), texture_format_); } } bool TiledLayer::NeedsIdlePaint() { // Don't trigger more paints if we failed (as we'll just fail again). if (failed_update_ || visible_content_rect().IsEmpty() || tiler_->has_empty_bounds() || !DrawsContent()) return false; gfx::Rect idle_paint_content_rect = IdlePaintRect(); if (idle_paint_content_rect.IsEmpty()) return false; int left, top, right, bottom; tiler_->ContentRectToTileIndices( idle_paint_content_rect, &left, &top, &right, &bottom); for (int j = top; j <= bottom; ++j) { for (int i = left; i <= right; ++i) { UpdatableTile* tile = TileAt(i, j); DCHECK(tile); // Did SetTexturePriorities get skipped? if (!tile) continue; bool updated = !tile->update_rect.IsEmpty(); bool can_acquire = tile->managed_resource()->can_acquire_backing_texture(); bool dirty = tile->is_dirty() || !tile->managed_resource()->have_backing_texture(); if (!updated && can_acquire && dirty) return true; } } return false; } gfx::Rect TiledLayer::IdlePaintRect() { // Don't inflate an empty rect. if (visible_content_rect().IsEmpty()) return gfx::Rect(); gfx::Rect prepaint_rect = visible_content_rect(); prepaint_rect.Inset(-tiler_->tile_size().width() * kPrepaintColumns, -tiler_->tile_size().height() * kPrepaintRows); gfx::Rect content_rect(content_bounds()); prepaint_rect.Intersect(content_rect); return prepaint_rect; } } // namespace cc
sencha/chromium-spacewalk
cc/layers/tiled_layer.cc
C++
bsd-3-clause
32,370
(function ($) { $.Redactor.opts.langs['ua'] = { html: 'Код', video: 'Відео', image: 'Зображення', table: 'Таблиця', link: 'Посилання', link_insert: 'Вставити посилання ...', link_edit: 'Edit link', unlink: 'Видалити посилання', formatting: 'Стилі', paragraph: 'Звичайний текст', quote: 'Цитата', code: 'Код', header1: 'Заголовок 1', header2: 'Заголовок 2', header3: 'Заголовок 3', header4: 'Заголовок 4', bold: 'Жирний', italic: 'Похилий', fontcolor: 'Колір тексту', backcolor: 'Заливка тексту', unorderedlist: 'Звичайний список', orderedlist: 'Нумерований список', outdent: 'Зменшити відступ', indent: 'Збільшити відступ', cancel: 'Скасувати', insert: 'Вставити', save: 'Зберегти', _delete: 'Видалити', insert_table: 'Вставити таблицю', insert_row_above: 'Додати рядок зверху', insert_row_below: 'Додати рядок знизу', insert_column_left: 'Додати стовпець ліворуч', insert_column_right: 'Додати стовпець праворуч', delete_column: 'Видалити стовпець', delete_row: 'Видалити рядок', delete_table: 'Видалити таблицю', rows: 'Рядки', columns: 'Стовпці', add_head: 'Додати заголовок', delete_head: 'Видалити заголовок', title: 'Підказка', image_view: 'Завантажити зображення', image_position: 'Обтікання текстом', none: 'ні', left: 'ліворуч', right: 'праворуч', image_web_link: 'Посилання на зображення', text: 'Текст', mailto: 'Ел. пошта', web: 'URL', video_html_code: 'Код відео ролика', file: 'Файл', upload: 'Завантажити', download: 'Завантажити', choose: 'Вибрати', or_choose: 'Або виберіть', drop_file_here: 'Перетягніть файл сюди', align_left: 'По лівому краю', align_center: 'По центру', align_right: 'По правому краю', align_justify: 'Вирівняти текст по ширині', horizontalrule: 'Горизонтальная лінійка', fullscreen: 'На весь екран', deleted: 'Закреслений', anchor: 'Anchor', link_new_tab: 'Open link in new tab', underline: 'Underline', alignment: 'Alignment', filename: 'Name (optional)' }; })( jQuery );
elearninglondon/ematrix_2015
themes/third_party/editor/redactor/lang/ua.js
JavaScript
mit
2,697
// Copyright (c) Microsoft Corporation. // Licensed under the MIT License. using System; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Text; using System.Windows; using System.Windows.Data; namespace Microsoft.Management.UI.Internal { /// <summary> /// Takes a value and returns the largest value which is a integral amount of the second value. /// </summary> [SuppressMessage("Microsoft.MSInternal", "CA903:InternalNamespaceShouldNotContainPublicTypes")] public class IntegralConverter : IMultiValueConverter { /// <summary> /// Takes a value and returns the largest value which is a integral amount of the second value. /// </summary> /// <param name="values"> /// The first value is the source. The second is the factor. /// </param> /// <param name="targetType">The parameter is not used.</param> /// <param name="parameter">The padding to subtract from the first value.</param> /// <param name="culture">The parameter is not used.</param> /// <returns> /// The integral value. /// </returns> public object Convert(object[] values, Type targetType, object parameter, System.Globalization.CultureInfo culture) { if (values == null) { throw new ArgumentNullException("values"); } if (values.Length != 2) { throw new ArgumentException("Two values expected", "values"); } if (values[0] == DependencyProperty.UnsetValue || values[1] == DependencyProperty.UnsetValue) { return DependencyProperty.UnsetValue; } var source = (double)values[0]; var factor = (double)values[1]; double padding = 0; if (parameter != null) { padding = double.Parse((string)parameter, CultureInfo.InvariantCulture); } var newSource = source - padding; if (newSource < factor) { return source; } var remainder = newSource % factor; var result = newSource - remainder; return result; } /// <summary> /// This method is not used. /// </summary> /// <param name="value">The parameter is not used.</param> /// <param name="targetTypes">The parameter is not used.</param> /// <param name="parameter">The parameter is not used.</param> /// <param name="culture">The parameter is not used.</param> /// <returns>The parameter is not used.</returns> public object[] ConvertBack(object value, Type[] targetTypes, object parameter, System.Globalization.CultureInfo culture) { throw new NotImplementedException(); } } }
JamesWTruher/PowerShell-1
src/Microsoft.Management.UI.Internal/ManagementList/Common/IntegralConverter.cs
C#
mit
2,970
// Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors // MIT License. See license.txt $.extend(frappe.model, { docinfo: {}, sync: function(r) { /* docs: extract docs, docinfo (attachments, comments, assignments) from incoming request and set in `locals` and `frappe.model.docinfo` */ var isPlain; if(!r.docs && !r.docinfo) r = {docs:r}; isPlain = $.isPlainObject(r.docs); if(isPlain) r.docs = [r.docs]; if(r.docs) { var last_parent_name = null; for(var i=0, l=r.docs.length; i<l; i++) { var d = r.docs[i]; frappe.model.add_to_locals(d); d.__last_sync_on = new Date(); if(d.doctype==="DocType") { frappe.meta.sync(d); } if(cur_frm && cur_frm.doctype==d.doctype && cur_frm.docname==d.name) { cur_frm.doc = d; } if(d.localname) { frappe.model.new_names[d.localname] = d.name; $(document).trigger('rename', [d.doctype, d.localname, d.name]); delete locals[d.doctype][d.localname]; // update docinfo to new dict keys if(i===0) { frappe.model.docinfo[d.doctype][d.name] = frappe.model.docinfo[d.doctype][d.localname]; frappe.model.docinfo[d.doctype][d.localname] = undefined; } } } if(cur_frm && isPlain) cur_frm.dirty(); } // set docinfo (comments, assign, attachments) if(r.docinfo) { if(r.docs) { var doc = r.docs[0]; } else { if(cur_frm) var doc = cur_frm.doc; } if(doc) { if(!frappe.model.docinfo[doc.doctype]) frappe.model.docinfo[doc.doctype] = {}; frappe.model.docinfo[doc.doctype][doc.name] = r.docinfo; } } return r.docs; }, add_to_locals: function(doc) { if(!locals[doc.doctype]) locals[doc.doctype] = {}; if(!doc.name && doc.__islocal) { // get name (local if required) if(!doc.parentfield) frappe.model.clear_doc(doc); doc.name = frappe.model.get_new_name(doc.doctype); if(!doc.parentfield) frappe.provide("frappe.model.docinfo." + doc.doctype + "." + doc.name); } locals[doc.doctype][doc.name] = doc; // add child docs to locals if(!doc.parentfield) { for(var i in doc) { var value = doc[i]; if($.isArray(value)) { for (var x=0, y=value.length; x < y; x++) { var d = value[x]; if(!d.parent) d.parent = doc.name; frappe.model.add_to_locals(d); } } } } } });
bcornwellmott/frappe
frappe/public/js/frappe/model/sync.js
JavaScript
mit
2,359
export const INCREMENT_COUNTER = 'INCREMENT_COUNTER' export const DECREMENT_COUNTER = 'DECREMENT_COUNTER'
niqdev/react-redux-bootstrap4
src/modules/counter/CounterActionTypes.js
JavaScript
mit
106
#include <iostream> #include <cstdio> using namespace std; int main() { cout<<"\n"; printf('\n'); // your code goes here return 0; }
aqfaridi/Code-Online-Judge
web/env/Main1122/Main1122.cpp
C++
mit
152
package org.knowm.xchange.bitmarket; import static org.assertj.core.api.Assertions.assertThat; import java.math.BigDecimal; import java.util.List; import java.util.Map; import org.knowm.xchange.bitmarket.dto.account.BitMarketBalance; import org.knowm.xchange.bitmarket.dto.marketdata.BitMarketOrderBook; import org.knowm.xchange.bitmarket.dto.marketdata.BitMarketTicker; import org.knowm.xchange.bitmarket.dto.marketdata.BitMarketTrade; import org.knowm.xchange.bitmarket.dto.trade.BitMarketOrder; import org.knowm.xchange.dto.account.Balance; import org.knowm.xchange.dto.marketdata.OrderBook; import org.knowm.xchange.dto.marketdata.Ticker; import org.knowm.xchange.dto.marketdata.Trade; import org.knowm.xchange.dto.trade.LimitOrder; import org.knowm.xchange.dto.trade.UserTrade; public class BitMarketAssert { public static void assertEquals(Balance o1, Balance o2) { assertThat(o1.getCurrency()).isEqualTo(o2.getCurrency()); assertThat(o1.getTotal()).isEqualTo(o2.getTotal()); assertThat(o1.getAvailable()).isEqualTo(o2.getAvailable()); assertThat(o1.getFrozen()).isEqualTo(o2.getFrozen()); } public static void assertEquals(Trade o1, Trade o2) { assertThat(o1.getType()).isEqualTo(o2.getType()); assertThat(o1.getOriginalAmount()).isEqualTo(o2.getOriginalAmount()); assertThat(o1.getCurrencyPair()).isEqualTo(o2.getCurrencyPair()); assertThat(o1.getPrice()).isEqualTo(o2.getPrice()); assertThat(o1.getTimestamp()).isEqualTo(o2.getTimestamp()); assertThat(o1.getId()).isEqualTo(o2.getId()); } public static void assertEquals(UserTrade o1, UserTrade o2) { assertThat(o1.getType()).isEqualTo(o2.getType()); assertThat(o1.getOriginalAmount()).isEqualTo(o2.getOriginalAmount()); assertThat(o1.getCurrencyPair()).isEqualTo(o2.getCurrencyPair()); assertThat(o1.getPrice()).isEqualTo(o2.getPrice()); assertThat(o1.getTimestamp()).isEqualTo(o2.getTimestamp()); assertThat(o1.getId()).isEqualTo(o2.getId()); assertThat(o1.getOrderId()).isEqualTo(o2.getOrderId()); assertThat(o1.getFeeAmount()).isEqualTo(o2.getFeeAmount()); assertThat(o1.getFeeCurrency()).isEqualTo(o2.getFeeCurrency()); } public static void assertEquals(LimitOrder o1, LimitOrder o2) { assertThat(o1.getId()).isEqualTo(o2.getId()); assertThat(o1.getType()).isEqualTo(o2.getType()); assertThat(o1.getCurrencyPair()).isEqualTo(o2.getCurrencyPair()); assertThat(o1.getLimitPrice()).isEqualTo(o2.getLimitPrice()); assertThat(o1.getOriginalAmount()).isEqualTo(o2.getOriginalAmount()); assertThat(o1.getTimestamp()).isEqualTo(o2.getTimestamp()); } public static void assertEqualsWithoutTimestamp(LimitOrder o1, LimitOrder o2) { assertThat(o1.getId()).isEqualTo(o2.getId()); assertThat(o1.getType()).isEqualTo(o2.getType()); assertThat(o1.getCurrencyPair()).isEqualTo(o2.getCurrencyPair()); assertThat(o1.getLimitPrice()).isEqualTo(o2.getLimitPrice()); assertThat(o1.getOriginalAmount()).isEqualTo(o2.getOriginalAmount()); } public static void assertEquals(Ticker o1, Ticker o2) { assertThat(o1.getBid()).isEqualTo(o2.getBid()); assertThat(o1.getAsk()).isEqualTo(o2.getAsk()); assertThat(o1.getCurrencyPair()).isEqualTo(o2.getCurrencyPair()); assertThat(o1.getHigh()).isEqualTo(o2.getHigh()); assertThat(o1.getLast()).isEqualTo(o2.getLast()); assertThat(o1.getLow()).isEqualTo(o2.getLow()); assertThat(o1.getTimestamp()).isEqualTo(o2.getTimestamp()); assertThat(o1.getVolume()).isEqualTo(o2.getVolume()); assertThat(o1.getVwap()).isEqualTo(o2.getVwap()); } public static void assertEquals(OrderBook o1, OrderBook o2) { assertThat(o1.getTimeStamp()).isEqualTo(o2.getTimeStamp()); assertEquals(o1.getAsks(), o2.getAsks()); assertEquals(o1.getBids(), o2.getBids()); } public static void assertEquals(List<LimitOrder> o1, List<LimitOrder> o2) { assertThat(o1.size()).isEqualTo(o2.size()); for (int i = 0; i < o1.size(); i++) { assertEqualsWithoutTimestamp(o1.get(i), o2.get(i)); } } public static void assertEquals(BitMarketOrder o1, BitMarketOrder o2) { assertThat(o1.getId()).isEqualTo(o2.getId()); assertThat(o1.getMarket()).isEqualTo(o2.getMarket()); assertThat(o1.getAmount()).isEqualTo(o2.getAmount()); assertThat(o1.getRate()).isEqualTo(o2.getRate()); assertThat(o1.getFiat()).isEqualTo(o2.getFiat()); assertThat(o1.getType()).isEqualTo(o2.getType()); assertThat(o1.getTime()).isEqualTo(o2.getTime()); } public static void assertEquals(BitMarketOrderBook o1, BitMarketOrderBook o2) { assertEquals(o1.getAsks(), o2.getAsks()); assertEquals(o1.getBids(), o2.getBids()); assertThat(o1.toString()).isEqualTo(o2.toString()); } public static void assertEquals(BitMarketTicker o1, BitMarketTicker o2) { assertThat(o1.getAsk()).isEqualTo(o2.getAsk()); assertThat(o1.getBid()).isEqualTo(o2.getBid()); assertThat(o1.getLast()).isEqualTo(o2.getLast()); assertThat(o1.getLow()).isEqualTo(o2.getLow()); assertThat(o1.getHigh()).isEqualTo(o2.getHigh()); assertThat(o1.getVwap()).isEqualTo(o2.getVwap()); assertThat(o1.getVolume()).isEqualTo(o2.getVolume()); assertThat(o1.toString()).isEqualTo(o2.toString()); } public static void assertEquals(BitMarketTrade o1, BitMarketTrade o2) { assertThat(o1.getTid()).isEqualTo(o2.getTid()); assertThat(o1.getPrice()).isEqualTo(o2.getPrice()); assertThat(o1.getAmount()).isEqualTo(o2.getAmount()); assertThat(o1.getDate()).isEqualTo(o2.getDate()); assertThat(o1.toString()).isEqualTo(o2.toString()); } public static void assertEquals(BitMarketBalance o1, BitMarketBalance o2) { assertEquals(o1.getAvailable(), o2.getAvailable()); assertEquals(o1.getBlocked(), o2.getBlocked()); } private static void assertEquals(Map<String, BigDecimal> o1, Map<String, BigDecimal> o2) { assertThat(o1.size()).isEqualTo(o2.size()); for (String key : o1.keySet()) { assertThat(o1.get(key)).isEqualTo(o2.get(key)); } } private static void assertEquals(BigDecimal[][] o1, BigDecimal[][] o2) { assertThat(o1.length).isEqualTo(o2.length); for (int i = 0; i < o1.length; i++) { assertThat(o1[i].length).isEqualTo(o2[i].length); for (int j = 0; j < o1[i].length; j++) { assertThat(o1[i][j]).isEqualTo(o2[i][j]); } } } }
chrisrico/XChange
xchange-bitmarket/src/test/java/org/knowm/xchange/bitmarket/BitMarketAssert.java
Java
mit
6,414
package org.knowm.xchange.test.exx; import java.io.IOException; import org.knowm.xchange.Exchange; import org.knowm.xchange.ExchangeFactory; import org.knowm.xchange.ExchangeSpecification; import org.knowm.xchange.exx.EXXExchange; import org.knowm.xchange.service.account.AccountService; /** * kevinobamatheus@gmail.com * * @author kevingates */ public class AccountServiceIntegration { public static void main(String[] args) { try { getAssetInfo(); } catch (IOException e) { e.printStackTrace(); } } private static void getAssetInfo() throws IOException { String apiKey = ""; String secretKey = ""; Exchange exchange = ExchangeFactory.INSTANCE.createExchange(EXXExchange.class.getName()); ExchangeSpecification exchangeSpecification = exchange.getDefaultExchangeSpecification(); exchangeSpecification.setSslUri("https://trade.exx.com"); exchangeSpecification.setApiKey(apiKey); exchangeSpecification.setSecretKey(secretKey); exchange.applySpecification(exchangeSpecification); AccountService accountService = exchange.getAccountService(); try { System.out.println("accountInfo"); System.out.println(accountService.getAccountInfo()); System.out.println(accountService.getAccountInfo().getWallets()); } catch (IOException e) { e.printStackTrace(); } } }
chrisrico/XChange
xchange-exx/src/test/java/org/knowm/xchange/test/exx/AccountServiceIntegration.java
Java
mit
1,369
from django.shortcuts import render def home(request): return render(request, 'home.html', {})
Traviskn/django_starter_template
{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/views.py
Python
mit
101
// Time Complexity: O(n^2) // Space Complexity: O(1) class Solution { public: vector<vector<int> > threeSum(vector<int> &num) { vector<vector<int> > ans; const int target = 0; sort(num.begin(), num.end()); auto last = num.rend(); for(auto a = num.rbegin(); a < prev(last, 2); ++a) { if(a > num.rbegin() && *a == *(a - 1)) continue; auto b = next(a); auto c = prev(last); while(b < c) { if(b > next(a) && *b == *(b - 1)) { ++b; } else if(c < prev(last) && *c == *(c + 1)) { --c; } else { const int sum = *a + *b + *c; if(sum < target) --c; else if(sum > target) ++b; else { ans.push_back({ *c, *b, *a}); ++b; --c; } } } } return ans; } };
tudennis/LeetCode---kamyu104-11-24-2015
C++/threeSum2.cpp
C++
mit
1,277
package gueei.binding; import java.util.Collection; import java.util.ArrayList; public abstract class DependentObservable<T> extends Observable<T> implements Observer{ protected IObservable<?>[] mDependents; public DependentObservable(Class<T> type, IObservable<?>... dependents) { super(type); for(IObservable<?> o : dependents){ o.subscribe(this); } this.mDependents = dependents; this.onPropertyChanged(null, new ArrayList<Object>()); } // This is provided in case the constructor can't be used. // Not intended for normal usage public void addDependents(IObservable<?>... dependents){ IObservable<?>[] temp = mDependents; mDependents = new IObservable<?>[temp.length + dependents.length]; int len = temp.length; for(int i=0; i<len; i++){ mDependents[i] = temp[i]; } int len2 = dependents.length; for(int i=0; i<len2; i++){ mDependents[i+len] = dependents[i]; dependents[i].subscribe(this); } this.onPropertyChanged(null, new ArrayList<Object>()); } public abstract T calculateValue(Object... args) throws Exception; public final void onPropertyChanged(IObservable<?> prop, Collection<Object> initiators) { dirty = true; initiators.add(this); this.notifyChanged(initiators); } private boolean dirty = false; @Override public T get() { if (dirty){ int len = mDependents.length; Object[] values = new Object[len]; for(int i=0; i<len; i++){ values[i] = mDependents[i].get(); } try{ T value = this.calculateValue(values); this.setWithoutNotify(value); }catch(Exception e){ BindingLog.exception ("DependentObservable.CalculateValue()", e); } dirty = false; } return super.get(); } public boolean isDirty() { return dirty; } public void setDirty(boolean dirty) { this.dirty = dirty; } }
yangqiang1223/AndroidBinding
Core/AndroidBinding/src/gueei/binding/DependentObservable.java
Java
mit
1,894
var expect = require('chai').expect; var runner = require('../runner'); describe('nasm runner', function() { describe('.run', function() { it('should handle basic code evaluation (no libc)', function(done) { runner.run({ language: 'nasm', code: [ ' global _start', ' section .text', '_start:', ' mov rax, 1', ' mov rdi, 1', ' mov rsi, message', ' mov rdx, 25', ' syscall', ' mov eax, 60', ' xor rdi, rdi', ' syscall', 'message:', 'db "Hello, Netwide Assembler!", 25' ].join('\n') }, function(buffer) { expect(buffer.stdout).to.equal('Hello, Netwide Assembler!'); done(); }); }); it('should handle basic code evaluation (with libc)', function(done) { runner.run({ language: 'nasm', code: [ ' global main', ' extern puts', ' section .text', 'main:', ' mov rdi, message', ' call puts', ' ret', 'message:', 'db "Netwide Assembler together with LIBC! Let\'s Port Codewars From Rails to THIS! \\m/", 0' ].join('\n') }, function(buffer) { expect(buffer.stdout).to.equal('Netwide Assembler together with LIBC! Let\'s Port Codewars From Rails to THIS! \\m/\n'); done(); }); }); }); });
Codewars/codewars-runner
test/runners/nasm_spec.js
JavaScript
mit
1,513
/** * error handling middleware loosely based off of the connect/errorHandler code. This handler chooses * to render errors using Jade / Express instead of the manual templating used by the connect middleware * sample. This may or may not be a good idea :-) * @param options {object} array of options **/ exports = module.exports = function errorHandler(options) { options = options || {}; // defaults var showStack = options.showStack || options.stack , showMessage = options.showMessage || options.message , dumpExceptions = options.dumpExceptions || options.dump , formatUrl = options.formatUrl; return function errorHandler(err, req, res, next) { res.statusCode = 500; if(dumpExceptions) console.error(err.stack); var app = res.app; if(err instanceof exports.NotFound) { res.render('errors/404', { locals: { title: '404 - Not Found' }, status: 404 }); } else { res.render('errors/500', { locals: { title: 'The Server Encountered an Error' , error: showStack ? err : undefined }, status: 500 }); } }; }; exports.NotFound = function(msg) { this.name = 'NotFound'; Error.call(this, msg); Error.captureStackTrace(this, arguments.callee); }
brendan1mcmanus/PennApps-Fall-2015
middleware/errorHandler.js
JavaScript
mit
1,381