text
stringlengths
2
99.9k
meta
dict
/* *********************************************************************** * * project: org.matsim.* * CompressedRoute.java * * * *********************************************************************** * * * * copyright : (C) 2008 by the members listed in the COPYING, * * LICENSE and WARRANTY file. * * email : info at matsim dot org * * * * *********************************************************************** * * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * See also COPYING, LICENSE and WARRANTY file * * * * *********************************************************************** */ package org.matsim.core.population.routes; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.apache.log4j.Logger; import org.matsim.api.core.v01.Id; import org.matsim.api.core.v01.network.Link; import org.matsim.api.core.v01.network.Network; import org.matsim.core.network.NetworkUtils; import org.matsim.vehicles.Vehicle; /** * Implementation of {@link NetworkRoute} that tries to minimize the amount of * data needed to be stored for each route. This will give some memory savings, * allowing for larger scenarios (=more agents), especially on detailed * networks, but is likely a bit slower due to the more complex access of the * route information internally. * * <p>Description of the compression algorithm:<br /> * Given a map containing for each link a defined successor (subsequentLinks-map), this implementation * does not store the links in its route-information that are the same as the successor defined in the * subsequentLinks-map.<br /> * Given a startLinkId, endLinkId and a list of linkIds to be stored, this implementation stores * first the startLinkId. Next, if the successor of the startLinkId is different from the first linkId * in the list, this linkId is stored, otherwise not. Then the successor of that linkId is compared to * the next linkId in the list. If the successor is different, the linkId is stored, otherwise not. * This procedure is repeated until the complete list of linkIds is processed. * </p> * * @author mrieser */ final class CompressedNetworkRouteImpl extends AbstractRoute implements NetworkRoute, Cloneable { private final static Logger log = Logger.getLogger(CompressedNetworkRouteImpl.class); private ArrayList<Id<Link>> route = new ArrayList<Id<Link>>(0); private final Map<Id<Link>, Id<Link>> subsequentLinks; private double travelCost = Double.NaN; /** number of links in uncompressed route */ private int uncompressedLength = -1; private int modCount = 0; private int routeModCountState = 0; private Id<Vehicle> vehicleId = null; private final Network network; public CompressedNetworkRouteImpl(final Id<Link> startLinkId, final Id<Link> endLinkId, Network network, final Map<Id<Link>, Id<Link>> subsequentLinks) { super(startLinkId, endLinkId); this.network = network; this.subsequentLinks = subsequentLinks; } @Override public CompressedNetworkRouteImpl clone() { CompressedNetworkRouteImpl cloned = (CompressedNetworkRouteImpl) super.clone(); ArrayList<Id<Link>> tmpRoute = cloned.route; cloned.route = new ArrayList<Id<Link>>(tmpRoute); // deep copy return cloned; } @Override public List<Id<Link>> getLinkIds() { if (this.uncompressedLength < 0) { // it seems the route never got initialized correctly return new ArrayList<Id<Link>>(0); } ArrayList<Id<Link>> links = new ArrayList<Id<Link>>(this.uncompressedLength); if (this.modCount != this.routeModCountState) { log.error("Route was modified after storing it! modCount=" + this.modCount + " routeModCount=" + this.routeModCountState); return links; } Id<Link> previousLinkId = getStartLinkId(); Id<Link> endLinkId = getEndLinkId(); if ((previousLinkId == null) || (endLinkId == null)) { return links; } if (previousLinkId.equals(endLinkId)) { return links; } for (Id<Link> linkId : this.route) { getLinksTillLink(links, linkId, previousLinkId); links.add(linkId); previousLinkId = linkId; } getLinksTillLink(links, endLinkId, previousLinkId); return links; } private void getLinksTillLink(final List<Id<Link>> links, final Id<Link> nextLinkId, final Id<Link> startLinkId) { Id<Link> linkId = startLinkId; Link nextLink = this.network.getLinks().get(nextLinkId); while (true) { // loop until we hit "return;" Link link = this.network.getLinks().get(linkId); if (link.getToNode() == nextLink.getFromNode()) { return; } linkId = this.subsequentLinks.get(linkId); links.add(linkId); } } // @Override // public void setEndLinkId(final Id<Link> linkId) { // this.modCount++; // super.setEndLinkId(linkId); // } // // @Override // public void setStartLinkId(final Id<Link> linkId) { // this.modCount++; // super.setStartLinkId(linkId); // } // AbstractRoute is now implements Lockable and I have addressed this via that feature. kai, sep/17 @Override public NetworkRoute getSubRoute(Id<Link> fromLinkId, Id<Link> toLinkId) { List<Id<Link>> newLinkIds = new ArrayList<Id<Link>>(10); boolean foundFromLink = fromLinkId.equals(this.getStartLinkId()); boolean collectLinks = foundFromLink; boolean equalFromTo = fromLinkId.equals(toLinkId); if (!foundFromLink || !equalFromTo) { for (Id<Link> linkId : getLinkIds()) { if (linkId.equals(toLinkId)) { collectLinks = false; if (equalFromTo) { foundFromLink = true; } if (foundFromLink) { // only break if from is also found, as endLink could be part of a loop/circle break; // we found start and end, stop looping } } if (collectLinks) { newLinkIds.add(linkId); } if (linkId.equals(fromLinkId)) { foundFromLink = true; collectLinks = true; // we found the start, start collecting newLinkIds.clear(); // in case of a loop, cut it out } } if (!foundFromLink) { foundFromLink = fromLinkId.equals(this.getEndLinkId()); collectLinks = foundFromLink; } if (!foundFromLink) { throw new IllegalArgumentException("fromLinkId is not part of this route."); } if ((collectLinks) && (toLinkId.equals(this.getEndLinkId()))) { collectLinks = false; } if (collectLinks) { throw new IllegalArgumentException("toLinkId is not part of this route."); } } NetworkRoute subRoute = new CompressedNetworkRouteImpl(fromLinkId, toLinkId, this.network, this.subsequentLinks); subRoute.setLinkIds(fromLinkId, newLinkIds, toLinkId); return subRoute; } @Override public double getTravelCost() { return this.travelCost; } @Override public void setTravelCost(final double travelCost) { this.travelCost = travelCost; } @Override public void setLinkIds(final Id<Link> startLinkId, final List<Id<Link>> srcRoute, final Id<Link> endLinkId) { this.route.clear(); setStartLinkId(startLinkId); setEndLinkId(endLinkId); this.routeModCountState = this.modCount; if ((srcRoute == null) || (srcRoute.size() == 0)) { this.uncompressedLength = 0; return; } Id<Link> previousLinkId = startLinkId; for (Id<Link> linkId : srcRoute) { if (!this.subsequentLinks.get(previousLinkId).equals(linkId)) { this.route.add(linkId); } previousLinkId = linkId; } this.route.trimToSize(); this.uncompressedLength = srcRoute.size(); // System.out.println("uncompressed size: \t" + this.uncompressedLength + "\tcompressed size: \t" + this.route.size()); this.setLocked() ; } @Override public Id<Vehicle> getVehicleId() { return this.vehicleId; } @Override public void setVehicleId(final Id<Vehicle> vehicleId) { this.vehicleId = vehicleId; } @Override public String getRouteType() { return "links"; } @Override public String getRouteDescription() { StringBuilder desc = new StringBuilder(100); desc.append(this.getStartLinkId().toString()); for (Id<Link> linkId : this.getLinkIds()) { desc.append(" "); desc.append(linkId.toString()); } // If the start links equals the end link additionally check if its is a round trip. if (!this.getEndLinkId().equals(this.getStartLinkId()) || this.getLinkIds().size() > 0) { desc.append(" "); desc.append(this.getEndLinkId().toString()); } return desc.toString(); } @Override public void setRouteDescription(String routeDescription) { List<Id<Link>> linkIds = NetworkUtils.getLinkIds(routeDescription); Id<Link> startLinkId = getStartLinkId(); Id<Link> endLinkId = getEndLinkId(); if (linkIds.size() > 0) { startLinkId = linkIds.remove(0); setStartLinkId(startLinkId); } if (linkIds.size() > 0) { endLinkId = linkIds.remove(linkIds.size() - 1); setEndLinkId(endLinkId); } this.setLinkIds(startLinkId, linkIds, endLinkId); } }
{ "pile_set_name": "Github" }
/* Copyright (c) 2014-2015, ArrayFire Copyright (c) 2015 Gábor Mező aka unbornchikken (gabor.mezo@outlook.com) All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the ArrayFire nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ "use strict"; let source = module.exports = { device: 0, host: 1, afDevice: 0, afHost: 1 };
{ "pile_set_name": "Github" }
/////////////////////////////////////////////////////////////////////////////// // modifier.hpp // // Copyright 2008 Eric Niebler. Distributed under the Boost // Software License, Version 1.0. (See accompanying file // LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) #ifndef BOOST_XPRESSIVE_DETAIL_STATIC_MODIFIER_HPP_EAN_10_04_2005 #define BOOST_XPRESSIVE_DETAIL_STATIC_MODIFIER_HPP_EAN_10_04_2005 // MS compatible compilers support #pragma once #if defined(_MSC_VER) # pragma once # pragma warning(push) # pragma warning(disable : 4510) // default constructor could not be generated # pragma warning(disable : 4610) // user defined constructor required #endif #include <boost/xpressive/detail/detail_fwd.hpp> #include <boost/proto/traits.hpp> #include <boost/xpressive/regex_constants.hpp> namespace boost { namespace xpressive { namespace detail { /////////////////////////////////////////////////////////////////////////////// // modifier template<typename Modifier> struct modifier_op { typedef regex_constants::syntax_option_type opt_type; template<typename Expr> struct apply { typedef typename proto::binary_expr< modifier_tag , typename proto::terminal<Modifier>::type , typename proto::result_of::as_child<Expr const>::type >::type type; }; template<typename Expr> typename apply<Expr>::type const operator ()(Expr const &expr) const { typename apply<Expr>::type that = {{this->mod_}, proto::as_child(expr)}; return that; } operator opt_type() const { return this->opt_; } Modifier mod_; opt_type opt_; }; }}} #if defined(_MSC_VER) # pragma warning(pop) #endif #endif
{ "pile_set_name": "Github" }
485 260 518 290 1 180 253 211 289 3 150 74 196 111 5 372 127 414 156 6
{ "pile_set_name": "Github" }
function c = min(a,b) % MIN for adiff objects. This selects the value of a which is a maximum. % Both min(a) and min(a,b) will work. min(a,[],dim) is meaningless because % the adiff object is a column vector; use min(a) instead. if nargin==1 [y,i] = min(a.x); c = adiff(y,a.dx(i,:),a.root); else switch [class(a),class(b)] case 'doubleadiff' ok = b.x<a; c = adiff(min(a,b.x), rowmult(ok,b.dx), b.root); case 'adiffdouble' ok = a.x<b; c = adiff(min(a.x,b), rowmult(ok,a.dx), a.root); case 'adiffadiff' checkroot(a,b); if size(a.dx,1)~=size(b.dx,1) if size(a.dx,1)==1 a.dx = repmat(a.dx,size(b.dx,1),1); elseif size(b.dx,1)==1 b.dx = repmat(b.dx,size(a.dx,1),1); end end ok = a.x<b.x; c = adiff(min(a.x,b.x), rowmult(ok,a.dx)+rowmult(1-ok,b.dx), a.root); otherwise error(['Can''t compute max of ',class(a),' and ',class(b)]); end end
{ "pile_set_name": "Github" }
/* * OMAP4 specific common source file. * * Copyright (C) 2010 Texas Instruments, Inc. * Author: * Santosh Shilimkar <santosh.shilimkar@ti.com> * * * This program is free software,you can redistribute it and/or modify * it under the terms of the GNU General Public License version 2 as * published by the Free Software Foundation. */ #include <linux/kernel.h> #include <linux/init.h> #include <linux/io.h> #include <linux/platform_device.h> #include <linux/memblock.h> #include <asm/hardware/gic.h> #include <asm/hardware/cache-l2x0.h> #include <asm/mach/map.h> #include <asm/memblock.h> #include <plat/irqs.h> #include <plat/sram.h> #include <plat/omap-secure.h> #include <mach/hardware.h> #include <mach/omap-wakeupgen.h> #include "common.h" #include "omap4-sar-layout.h" #include <linux/export.h> #ifdef CONFIG_CACHE_L2X0 static void __iomem *l2cache_base; #endif static void __iomem *sar_ram_base; #ifdef CONFIG_OMAP4_ERRATA_I688 /* Used to implement memory barrier on DRAM path */ #define OMAP4_DRAM_BARRIER_VA 0xfe600000 void __iomem *dram_sync, *sram_sync; static phys_addr_t paddr; static u32 size; void omap_bus_sync(void) { if (dram_sync && sram_sync) { writel_relaxed(readl_relaxed(dram_sync), dram_sync); writel_relaxed(readl_relaxed(sram_sync), sram_sync); isb(); } } EXPORT_SYMBOL(omap_bus_sync); /* Steal one page physical memory for barrier implementation */ int __init omap_barrier_reserve_memblock(void) { size = ALIGN(PAGE_SIZE, SZ_1M); paddr = arm_memblock_steal(size, SZ_1M); return 0; } void __init omap_barriers_init(void) { struct map_desc dram_io_desc[1]; dram_io_desc[0].virtual = OMAP4_DRAM_BARRIER_VA; dram_io_desc[0].pfn = __phys_to_pfn(paddr); dram_io_desc[0].length = size; dram_io_desc[0].type = MT_MEMORY_SO; iotable_init(dram_io_desc, ARRAY_SIZE(dram_io_desc)); dram_sync = (void __iomem *) dram_io_desc[0].virtual; sram_sync = (void __iomem *) OMAP4_SRAM_VA; pr_info("OMAP4: Map 0x%08llx to 0x%08lx for dram barrier\n", (long long) paddr, dram_io_desc[0].virtual); } #else void __init omap_barriers_init(void) {} #endif void __init gic_init_irq(void) { void __iomem *omap_irq_base; void __iomem *gic_dist_base_addr; /* Static mapping, never released */ gic_dist_base_addr = ioremap(OMAP44XX_GIC_DIST_BASE, SZ_4K); BUG_ON(!gic_dist_base_addr); /* Static mapping, never released */ omap_irq_base = ioremap(OMAP44XX_GIC_CPU_BASE, SZ_512); BUG_ON(!omap_irq_base); omap_wakeupgen_init(); gic_init(0, 29, gic_dist_base_addr, omap_irq_base); } #ifdef CONFIG_CACHE_L2X0 void __iomem *omap4_get_l2cache_base(void) { return l2cache_base; } static void omap4_l2x0_disable(void) { /* Disable PL310 L2 Cache controller */ omap_smc1(0x102, 0x0); } static void omap4_l2x0_set_debug(unsigned long val) { /* Program PL310 L2 Cache controller debug register */ omap_smc1(0x100, val); } static int __init omap_l2_cache_init(void) { u32 aux_ctrl = 0; /* * To avoid code running on other OMAPs in * multi-omap builds */ if (!cpu_is_omap44xx()) return -ENODEV; /* Static mapping, never released */ l2cache_base = ioremap(OMAP44XX_L2CACHE_BASE, SZ_4K); if (WARN_ON(!l2cache_base)) return -ENOMEM; /* * 16-way associativity, parity disabled * Way size - 32KB (es1.0) * Way size - 64KB (es2.0 +) */ aux_ctrl = ((1 << L2X0_AUX_CTRL_ASSOCIATIVITY_SHIFT) | (0x1 << 25) | (0x1 << L2X0_AUX_CTRL_NS_LOCKDOWN_SHIFT) | (0x1 << L2X0_AUX_CTRL_NS_INT_CTRL_SHIFT)); if (omap_rev() == OMAP4430_REV_ES1_0) { aux_ctrl |= 0x2 << L2X0_AUX_CTRL_WAY_SIZE_SHIFT; } else { aux_ctrl |= ((0x3 << L2X0_AUX_CTRL_WAY_SIZE_SHIFT) | (1 << L2X0_AUX_CTRL_SHARE_OVERRIDE_SHIFT) | (1 << L2X0_AUX_CTRL_DATA_PREFETCH_SHIFT) | (1 << L2X0_AUX_CTRL_INSTR_PREFETCH_SHIFT) | (1 << L2X0_AUX_CTRL_EARLY_BRESP_SHIFT)); } if (omap_rev() != OMAP4430_REV_ES1_0) omap_smc1(0x109, aux_ctrl); /* Enable PL310 L2 Cache controller */ omap_smc1(0x102, 0x1); l2x0_init(l2cache_base, aux_ctrl, L2X0_AUX_CTRL_MASK); /* * Override default outer_cache.disable with a OMAP4 * specific one */ outer_cache.disable = omap4_l2x0_disable; outer_cache.set_debug = omap4_l2x0_set_debug; return 0; } early_initcall(omap_l2_cache_init); #endif void __iomem *omap4_get_sar_ram_base(void) { return sar_ram_base; } /* * SAR RAM used to save and restore the HW * context in low power modes */ static int __init omap4_sar_ram_init(void) { /* * To avoid code running on other OMAPs in * multi-omap builds */ if (!cpu_is_omap44xx()) return -ENOMEM; /* Static mapping, never released */ sar_ram_base = ioremap(OMAP44XX_SAR_RAM_BASE, SZ_16K); if (WARN_ON(!sar_ram_base)) return -ENOMEM; return 0; } early_initcall(omap4_sar_ram_init);
{ "pile_set_name": "Github" }
/* gmp_randinit_default -- initialize a random state with a default algorithm. Copyright 2001, 2002 Free Software Foundation, Inc. This file is part of the GNU MP Library. The GNU MP Library is free software; you can redistribute it and/or modify it under the terms of either: * the GNU Lesser General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. or * the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. or both in parallel, as here. The GNU MP Library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received copies of the GNU General Public License and the GNU Lesser General Public License along with the GNU MP Library. If not, see https://www.gnu.org/licenses/. */ #include "gmp-impl.h" void gmp_randinit_default (gmp_randstate_t rstate) { gmp_randinit_mt (rstate); }
{ "pile_set_name": "Github" }
Description: Generates a config file and locale files and a model/attributes translation file for Rails 3.x i18n. In other words, executes i18n_locale and i18n_translation at once. Example: % rails g i18n locale_name (ja, pt-BR, etc.) This will create: config/locales/ja.yml config/locales/translation_ja.yml And update: config/application.rb
{ "pile_set_name": "Github" }
/* * Copyright (c) Facebook, Inc. and its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <thrift/compiler/util.h> #include <ostream> #include <sstream> #include <vector> #include <boost/algorithm/string/join.hpp> #include <boost/algorithm/string/split.hpp> namespace apache { namespace thrift { namespace compiler { std::string strip_left_margin(std::string const& s) { constexpr auto const strippable = " \t"; if (s.empty()) { return s; } // step: split std::vector<std::string> lines; boost::algorithm::split(lines, s, [](auto const c) { return c == '\n'; }); // step: preprocess if (lines.back().find_first_not_of(strippable) == std::string::npos) { lines.back().clear(); } if (lines.front().find_first_not_of(strippable) == std::string::npos) { lines.erase(lines.begin()); } // step: find the left margin constexpr auto const sentinel = std::numeric_limits<size_t>::max(); auto indent = sentinel; size_t max_length = 0; for (auto& line : lines) { auto const needle = line.find_first_not_of(strippable); if (needle == std::string::npos) { max_length = std::max(max_length, line.size()); } else { indent = std::min(indent, needle); } } indent = indent == sentinel ? max_length : indent; // step: strip the left margin for (auto& line : lines) { line = line.substr(std::min(indent, line.size())); } // step: join return boost::algorithm::join(lines, "\n"); } std::string json_quote_ascii(std::string const& s) { std::ostringstream o; json_quote_ascii(o, s); return o.str(); } std::ostream& json_quote_ascii(std::ostream& o, std::string const& s) { o << "\""; for (char const c : s) { switch (c) { // clang-format off case '"': o << "\\\""; break; case '\\': o << "\\\\"; break; case '\b': o << "\\b"; break; case '\f': o << "\\f"; break; case '\n': o << "\\n"; break; case '\r': o << "\\r"; break; // clang-format on default: { uint8_t const b = uint8_t(c); if (!(b >= 0x20 && b < 0x80)) { constexpr auto const hex = "0123456789abcdef"; auto const c1 = char(hex[(b >> 4) & 0x0f]); auto const c0 = char(hex[(b >> 0) & 0x0f]); o << "\\u00" << c1 << c0; } else { o << c; } } } } o << "\""; return o; } } // namespace compiler } // namespace thrift } // namespace apache
{ "pile_set_name": "Github" }
<?php /** * Add methods for dispatching events * * @author Ushahidi Team <team@ushahidi.com> * @package Ushahidi\Application * @copyright 2014 Ushahidi * @license https://www.gnu.org/licenses/agpl-3.0.html GNU Affero General Public License Version 3 (AGPL3) */ namespace Ushahidi\Core\Traits\Events; use Illuminate\Contracts\Events\Dispatcher; trait DispatchesEvents { protected $events; public function setDispatcher(Dispatcher $events) { $this->events = $events; } /** * Trigger event * * @param string|object $event * @param mixed $payload * @param bool $halt * @return array|null */ protected function dispatch($event, $payload = [], $halt = false) { $this->events->dispatch($event, $payload, $halt); } }
{ "pile_set_name": "Github" }
/* * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License version 2 as * published by the Free Software Foundation. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. */ #include <xen/mm.h> #include <xen/sched.h> #include <xen/acpi.h> #include <xen/event.h> #include <xen/iocap.h> #include <xen/device_tree.h> #include <xen/libfdt/libfdt.h> #include <acpi/actables.h> #include <asm/kernel.h> #include <asm/domain_build.h> /* Override macros from asm/page.h to make them work with mfn_t */ #undef virt_to_mfn #define virt_to_mfn(va) _mfn(__virt_to_mfn(va)) #define ACPI_DOM0_FDT_MIN_SIZE 4096 static int __init acpi_iomem_deny_access(struct domain *d) { acpi_status status; struct acpi_table_spcr *spcr = NULL; unsigned long mfn; int rc; /* Firstly permit full MMIO capabilities. */ rc = iomem_permit_access(d, 0UL, ~0UL); if ( rc ) return rc; /* TODO: Deny MMIO access for SMMU, GIC ITS */ status = acpi_get_table(ACPI_SIG_SPCR, 0, (struct acpi_table_header **)&spcr); if ( ACPI_FAILURE(status) ) { printk("Failed to get SPCR table\n"); return -EINVAL; } mfn = spcr->serial_port.address >> PAGE_SHIFT; /* Deny MMIO access for UART */ rc = iomem_deny_access(d, mfn, mfn + 1); if ( rc ) return rc; /* Deny MMIO access for GIC regions */ return gic_iomem_deny_access(d); } static int __init acpi_route_spis(struct domain *d) { int i, res; struct irq_desc *desc; /* * Route the IRQ to hardware domain and permit the access. * The interrupt type will be set by set by the hardware domain. */ for( i = NR_LOCAL_IRQS; i < vgic_num_irqs(d); i++ ) { /* * TODO: Exclude the SPIs SMMU uses which should not be routed to * the hardware domain. */ desc = irq_to_desc(i); if ( desc->action != NULL) continue; /* XXX: Shall we use a proper devname? */ res = map_irq_to_domain(d, i, true, "ACPI"); if ( res ) return res; } return 0; } static int __init acpi_make_hypervisor_node(const struct kernel_info *kinfo, struct membank tbl_add[]) { const char compat[] = "xen,xen-"__stringify(XEN_VERSION)"."__stringify(XEN_SUBVERSION)"\0" "xen,xen"; int res; /* Convenience alias */ void *fdt = kinfo->fdt; dt_dprintk("Create hypervisor node\n"); /* See linux Documentation/devicetree/bindings/arm/xen.txt */ res = fdt_begin_node(fdt, "hypervisor"); if ( res ) return res; /* Cannot use fdt_property_string due to embedded nulls */ res = fdt_property(fdt, "compatible", compat, sizeof(compat)); if ( res ) return res; res = acpi_make_efi_nodes(fdt, tbl_add); if ( res ) return res; res = fdt_end_node(fdt); return res; } /* * Prepare a minimal DTB for Dom0 which contains bootargs, initrd, memory * information, EFI table. */ static int __init create_acpi_dtb(struct kernel_info *kinfo, struct membank tbl_add[]) { int new_size; int ret; dt_dprintk("Prepare a min DTB for DOM0\n"); /* Allocate min size for DT */ new_size = ACPI_DOM0_FDT_MIN_SIZE; kinfo->fdt = xmalloc_bytes(new_size); if ( kinfo->fdt == NULL ) return -ENOMEM; /* Create a new empty DT for DOM0 */ ret = fdt_create(kinfo->fdt, new_size); if ( ret < 0 ) goto err; ret = fdt_finish_reservemap(kinfo->fdt); if ( ret < 0 ) goto err; ret = fdt_begin_node(kinfo->fdt, ""); if ( ret < 0 ) goto err; ret = fdt_property_cell(kinfo->fdt, "#address-cells", 2); if ( ret ) return ret; ret = fdt_property_cell(kinfo->fdt, "#size-cells", 1); if ( ret ) return ret; /* Create a chosen node for DOM0 */ ret = make_chosen_node(kinfo); if ( ret ) goto err; ret = acpi_make_hypervisor_node(kinfo, tbl_add); if ( ret ) goto err; ret = fdt_end_node(kinfo->fdt); if ( ret < 0 ) goto err; ret = fdt_finish(kinfo->fdt); if ( ret < 0 ) goto err; return 0; err: printk("Device tree generation failed (%d).\n", ret); xfree(kinfo->fdt); return -EINVAL; } static void __init acpi_map_other_tables(struct domain *d) { int i; unsigned long res; u64 addr, size; /* Map all ACPI tables to Dom0 using 1:1 mappings. */ for( i = 0; i < acpi_gbl_root_table_list.count; i++ ) { addr = acpi_gbl_root_table_list.tables[i].address; size = acpi_gbl_root_table_list.tables[i].length; res = map_regions_p2mt(d, gaddr_to_gfn(addr), PFN_UP(size), maddr_to_mfn(addr), p2m_mmio_direct_c); if ( res ) { panic(XENLOG_ERR "Unable to map ACPI region 0x%"PRIx64 " - 0x%"PRIx64" in domain\n", addr & PAGE_MASK, PAGE_ALIGN(addr + size) - 1); } } } static int __init acpi_create_rsdp(struct domain *d, struct membank tbl_add[]) { struct acpi_table_rsdp *rsdp = NULL; u64 addr; u64 table_size = sizeof(struct acpi_table_rsdp); u8 *base_ptr; u8 checksum; addr = acpi_os_get_root_pointer(); if ( !addr ) { printk("Unable to get acpi root pointer\n"); return -EINVAL; } rsdp = acpi_os_map_memory(addr, table_size); base_ptr = d->arch.efi_acpi_table + acpi_get_table_offset(tbl_add, TBL_RSDP); memcpy(base_ptr, rsdp, table_size); acpi_os_unmap_memory(rsdp, table_size); rsdp = (struct acpi_table_rsdp *)base_ptr; /* Replace xsdt_physical_address */ rsdp->xsdt_physical_address = tbl_add[TBL_XSDT].start; checksum = acpi_tb_checksum(ACPI_CAST_PTR(u8, rsdp), table_size); rsdp->checksum = rsdp->checksum - checksum; tbl_add[TBL_RSDP].start = d->arch.efi_acpi_gpa + acpi_get_table_offset(tbl_add, TBL_RSDP); tbl_add[TBL_RSDP].size = table_size; return 0; } static void __init acpi_xsdt_modify_entry(u64 entry[], unsigned long entry_count, char *signature, u64 addr) { int i; struct acpi_table_header *table; u64 size = sizeof(struct acpi_table_header); for( i = 0; i < entry_count; i++ ) { table = acpi_os_map_memory(entry[i], size); if ( ACPI_COMPARE_NAME(table->signature, signature) ) { entry[i] = addr; acpi_os_unmap_memory(table, size); break; } acpi_os_unmap_memory(table, size); } } static int __init acpi_create_xsdt(struct domain *d, struct membank tbl_add[]) { struct acpi_table_header *table = NULL; struct acpi_table_rsdp *rsdp_tbl; struct acpi_table_xsdt *xsdt = NULL; u64 table_size, addr; unsigned long entry_count; u8 *base_ptr; u8 checksum; addr = acpi_os_get_root_pointer(); if ( !addr ) { printk("Unable to get acpi root pointer\n"); return -EINVAL; } rsdp_tbl = acpi_os_map_memory(addr, sizeof(struct acpi_table_rsdp)); table = acpi_os_map_memory(rsdp_tbl->xsdt_physical_address, sizeof(struct acpi_table_header)); /* Add place for STAO table in XSDT table */ table_size = table->length + sizeof(u64); entry_count = (table->length - sizeof(struct acpi_table_header)) / sizeof(u64); base_ptr = d->arch.efi_acpi_table + acpi_get_table_offset(tbl_add, TBL_XSDT); memcpy(base_ptr, table, table->length); acpi_os_unmap_memory(table, sizeof(struct acpi_table_header)); acpi_os_unmap_memory(rsdp_tbl, sizeof(struct acpi_table_rsdp)); xsdt = (struct acpi_table_xsdt *)base_ptr; acpi_xsdt_modify_entry(xsdt->table_offset_entry, entry_count, ACPI_SIG_FADT, tbl_add[TBL_FADT].start); acpi_xsdt_modify_entry(xsdt->table_offset_entry, entry_count, ACPI_SIG_MADT, tbl_add[TBL_MADT].start); xsdt->table_offset_entry[entry_count] = tbl_add[TBL_STAO].start; xsdt->header.length = table_size; checksum = acpi_tb_checksum(ACPI_CAST_PTR(u8, xsdt), table_size); xsdt->header.checksum -= checksum; tbl_add[TBL_XSDT].start = d->arch.efi_acpi_gpa + acpi_get_table_offset(tbl_add, TBL_XSDT); tbl_add[TBL_XSDT].size = table_size; return 0; } static int __init acpi_create_stao(struct domain *d, struct membank tbl_add[]) { struct acpi_table_header *table = NULL; struct acpi_table_stao *stao = NULL; u32 table_size = sizeof(struct acpi_table_stao); u32 offset = acpi_get_table_offset(tbl_add, TBL_STAO); acpi_status status; u8 *base_ptr, checksum; /* Copy OEM and ASL compiler fields from another table, use MADT */ status = acpi_get_table(ACPI_SIG_MADT, 0, &table); if ( ACPI_FAILURE(status) ) { const char *msg = acpi_format_exception(status); printk("STAO: Failed to get MADT table, %s\n", msg); return -EINVAL; } base_ptr = d->arch.efi_acpi_table + offset; memcpy(base_ptr, table, sizeof(struct acpi_table_header)); stao = (struct acpi_table_stao *)base_ptr; memcpy(stao->header.signature, ACPI_SIG_STAO, 4); stao->header.revision = 1; stao->header.length = table_size; stao->ignore_uart = 1; checksum = acpi_tb_checksum(ACPI_CAST_PTR(u8, stao), table_size); stao->header.checksum -= checksum; tbl_add[TBL_STAO].start = d->arch.efi_acpi_gpa + offset; tbl_add[TBL_STAO].size = table_size; return 0; } static int __init acpi_create_madt(struct domain *d, struct membank tbl_add[]) { struct acpi_table_header *table = NULL; struct acpi_table_madt *madt = NULL; struct acpi_subtable_header *header; struct acpi_madt_generic_distributor *gicd; u32 table_size = sizeof(struct acpi_table_madt); u32 offset = acpi_get_table_offset(tbl_add, TBL_MADT); int ret; acpi_status status; u8 *base_ptr, checksum; status = acpi_get_table(ACPI_SIG_MADT, 0, &table); if ( ACPI_FAILURE(status) ) { const char *msg = acpi_format_exception(status); printk("Failed to get MADT table, %s\n", msg); return -EINVAL; } base_ptr = d->arch.efi_acpi_table + offset; memcpy(base_ptr, table, table_size); /* Add Generic Distributor. */ header = acpi_table_get_entry_madt(ACPI_MADT_TYPE_GENERIC_DISTRIBUTOR, 0); if ( !header ) { printk("Can't get GICD entry\n"); return -EINVAL; } gicd = container_of(header, struct acpi_madt_generic_distributor, header); memcpy(base_ptr + table_size, gicd, sizeof(struct acpi_madt_generic_distributor)); table_size += sizeof(struct acpi_madt_generic_distributor); /* Add other subtables. */ ret = gic_make_hwdom_madt(d, offset + table_size); if ( ret < 0 ) { printk("Failed to get other subtables\n"); return -EINVAL; } table_size += ret; madt = (struct acpi_table_madt *)base_ptr; madt->header.length = table_size; checksum = acpi_tb_checksum(ACPI_CAST_PTR(u8, madt), table_size); madt->header.checksum -= checksum; tbl_add[TBL_MADT].start = d->arch.efi_acpi_gpa + offset; tbl_add[TBL_MADT].size = table_size; return 0; } static int __init acpi_create_fadt(struct domain *d, struct membank tbl_add[]) { struct acpi_table_header *table = NULL; struct acpi_table_fadt *fadt = NULL; u64 table_size; acpi_status status; u8 *base_ptr; u8 checksum; status = acpi_get_table(ACPI_SIG_FADT, 0, &table); if ( ACPI_FAILURE(status) ) { const char *msg = acpi_format_exception(status); printk("Failed to get FADT table, %s\n", msg); return -EINVAL; } table_size = table->length; base_ptr = d->arch.efi_acpi_table + acpi_get_table_offset(tbl_add, TBL_FADT); memcpy(base_ptr, table, table_size); fadt = (struct acpi_table_fadt *)base_ptr; /* Set PSCI_COMPLIANT and PSCI_USE_HVC */ fadt->arm_boot_flags |= (ACPI_FADT_PSCI_COMPLIANT | ACPI_FADT_PSCI_USE_HVC); checksum = acpi_tb_checksum(ACPI_CAST_PTR(u8, fadt), table_size); fadt->header.checksum -= checksum; tbl_add[TBL_FADT].start = d->arch.efi_acpi_gpa + acpi_get_table_offset(tbl_add, TBL_FADT); tbl_add[TBL_FADT].size = table_size; return 0; } static int __init estimate_acpi_efi_size(struct domain *d, struct kernel_info *kinfo) { size_t efi_size, acpi_size, madt_size; u64 addr; struct acpi_table_rsdp *rsdp_tbl; struct acpi_table_header *table; efi_size = estimate_efi_size(kinfo->mem.nr_banks); acpi_size = ROUNDUP(sizeof(struct acpi_table_fadt), 8); acpi_size += ROUNDUP(sizeof(struct acpi_table_stao), 8); madt_size = gic_get_hwdom_madt_size(d); acpi_size += ROUNDUP(madt_size, 8); addr = acpi_os_get_root_pointer(); if ( !addr ) { printk("Unable to get acpi root pointer\n"); return -EINVAL; } rsdp_tbl = acpi_os_map_memory(addr, sizeof(struct acpi_table_rsdp)); if ( !rsdp_tbl ) { printk("Unable to map RSDP table\n"); return -EINVAL; } table = acpi_os_map_memory(rsdp_tbl->xsdt_physical_address, sizeof(struct acpi_table_header)); acpi_os_unmap_memory(rsdp_tbl, sizeof(struct acpi_table_rsdp)); if ( !table ) { printk("Unable to map XSDT table\n"); return -EINVAL; } /* Add place for STAO table in XSDT table */ acpi_size += ROUNDUP(table->length + sizeof(u64), 8); acpi_os_unmap_memory(table, sizeof(struct acpi_table_header)); acpi_size += ROUNDUP(sizeof(struct acpi_table_rsdp), 8); d->arch.efi_acpi_len = PAGE_ALIGN(ROUNDUP(efi_size, 8) + ROUNDUP(acpi_size, 8)); return 0; } int __init prepare_acpi(struct domain *d, struct kernel_info *kinfo) { int rc = 0; int order; struct membank tbl_add[TBL_MMAX] = {}; rc = estimate_acpi_efi_size(d, kinfo); if ( rc != 0 ) return rc; order = get_order_from_bytes(d->arch.efi_acpi_len); d->arch.efi_acpi_table = alloc_xenheap_pages(order, 0); if ( d->arch.efi_acpi_table == NULL ) { printk("unable to allocate memory!\n"); return -ENOMEM; } memset(d->arch.efi_acpi_table, 0, d->arch.efi_acpi_len); /* * For ACPI, Dom0 doesn't use kinfo->gnttab_start to get the grant table * region. So we use it as the ACPI table mapped address. Also it needs to * check if the size of grant table region is enough for those ACPI tables. */ d->arch.efi_acpi_gpa = kinfo->gnttab_start; if ( kinfo->gnttab_size < d->arch.efi_acpi_len ) { printk("The grant table region is not enough to fit the ACPI tables!\n"); return -EINVAL; } rc = acpi_create_fadt(d, tbl_add); if ( rc != 0 ) return rc; rc = acpi_create_madt(d, tbl_add); if ( rc != 0 ) return rc; rc = acpi_create_stao(d, tbl_add); if ( rc != 0 ) return rc; rc = acpi_create_xsdt(d, tbl_add); if ( rc != 0 ) return rc; rc = acpi_create_rsdp(d, tbl_add); if ( rc != 0 ) return rc; acpi_map_other_tables(d); acpi_create_efi_system_table(d, tbl_add); acpi_create_efi_mmap_table(d, &kinfo->mem, tbl_add); /* Map the EFI and ACPI tables to Dom0 */ rc = map_regions_p2mt(d, gaddr_to_gfn(d->arch.efi_acpi_gpa), PFN_UP(d->arch.efi_acpi_len), virt_to_mfn(d->arch.efi_acpi_table), p2m_mmio_direct_c); if ( rc != 0 ) { printk(XENLOG_ERR "Unable to map EFI/ACPI table 0x%"PRIx64 " - 0x%"PRIx64" in domain %d\n", d->arch.efi_acpi_gpa & PAGE_MASK, PAGE_ALIGN(d->arch.efi_acpi_gpa + d->arch.efi_acpi_len) - 1, d->domain_id); return rc; } /* * Flush the cache for this region, otherwise DOM0 may read wrong data when * the cache is disabled. */ clean_and_invalidate_dcache_va_range(d->arch.efi_acpi_table, d->arch.efi_acpi_len); rc = create_acpi_dtb(kinfo, tbl_add); if ( rc != 0 ) return rc; rc = acpi_route_spis(d); if ( rc != 0 ) return rc; rc = acpi_iomem_deny_access(d); if ( rc != 0 ) return rc; /* * All PPIs have been registered, allocate the event channel * interrupts. */ evtchn_allocate(d); return 0; }
{ "pile_set_name": "Github" }
<testcase> <info> <keywords> SFTP SFTP put FAILURE </keywords> </info> # # Server-side <reply> </reply> # # Client-side <client> <server> sftp </server> <name> SFTP put failure </name> <command> --key curl_client_key --pubkey curl_client_key.pub -u %USER: -T log/file622.txt sftp://%HOSTIP:%SSHPORT%PWD/log/nonexistent-directory/nonexistent-file --insecure </command> <file name="log/file622.txt"> Test data for ssh upload test </file> </client> # # Verify data after the test has been "shot" <verify> <errorcode> 78 </errorcode> <valgrind> disable </valgrind> </verify> </testcase>
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="iso-8859-1"?> <Workspace> <ConfigDictionary> <CurrentConfigs><Project>RTOSDemo/Debug</Project></CurrentConfigs></ConfigDictionary> <Desktop> <Static> <Workspace> <ColumnWidths> <Column0>241</Column0><Column1>27</Column1><Column2>27</Column2><Column3>27</Column3></ColumnWidths> </Workspace> <Find-in-Files><ColumnWidth0>482</ColumnWidth0><ColumnWidth1>68</ColumnWidth1><ColumnWidth2>826</ColumnWidth2></Find-in-Files><Build><ColumnWidth0>19</ColumnWidth0><ColumnWidth1>1007</ColumnWidth1><ColumnWidth2>268</ColumnWidth2><ColumnWidth3>67</ColumnWidth3></Build><TerminalIO/><Debug-Log><ColumnWidth0>20</ColumnWidth0><ColumnWidth1>1622</ColumnWidth1></Debug-Log></Static> <Windows> <Wnd0> <Tabs> <Tab> <Identity>TabID-18883-22024</Identity> <TabName>Workspace</TabName> <Factory>Workspace</Factory> <Session> <NodeDict><ExpandedNode>RTOSDemo</ExpandedNode></NodeDict></Session> </Tab> </Tabs> <SelectedTab>0</SelectedTab></Wnd0><Wnd1><Tabs><Tab><Identity>TabID-29040-7360</Identity><TabName>Find in Files</TabName><Factory>Find-in-Files</Factory><Session/></Tab><Tab><Identity>TabID-19024-10413</Identity><TabName>Build</TabName><Factory>Build</Factory><Session/></Tab><Tab><Identity>TabID-19202-19100</Identity><TabName>Debug Log</TabName><Factory>Debug-Log</Factory><Session/></Tab></Tabs><SelectedTab>1</SelectedTab></Wnd1></Windows> <Editor> <Pane><Tab><Factory>TextEditor</Factory><Filename>$WS_DIR$\main.c</Filename><XPos>0</XPos><YPos>0</YPos><SelStart>0</SelStart><SelEnd>0</SelEnd><XPos2>0</XPos2><YPos2>61</YPos2><SelStart2>10549</SelStart2><SelEnd2>10549</SelEnd2></Tab><ActiveTab>0</ActiveTab></Pane><ActivePane>0</ActivePane><Sizes><Pane><X>1000000</X><Y>1000000</Y></Pane></Sizes><SplitMode>1</SplitMode></Editor> <Positions> <Top><Row0><Sizes><Toolbar-013BAC30><key>iaridepm.enu1</key></Toolbar-013BAC30></Sizes></Row0><Row1><Sizes/></Row1><Row2><Sizes/></Row2></Top><Left><Row0><Sizes><Wnd0><Rect><Top>-2</Top><Left>-2</Left><Bottom>719</Bottom><Right>315</Right><x>-2</x><y>-2</y><xscreen>200</xscreen><yscreen>200</yscreen><sizeHorzCX>119048</sizeHorzCX><sizeHorzCY>203666</sizeHorzCY><sizeVertCX>188690</sizeVertCX><sizeVertCY>734216</sizeVertCY></Rect></Wnd0></Sizes></Row0></Left><Right><Row0><Sizes/></Row0></Right><Bottom><Row0><Sizes><Wnd1><Rect><Top>-2</Top><Left>-2</Left><Bottom>219</Bottom><Right>1682</Right><x>-2</x><y>-2</y><xscreen>1684</xscreen><yscreen>221</yscreen><sizeHorzCX>1002381</sizeHorzCX><sizeHorzCY>225051</sizeHorzCY><sizeVertCX>119048</sizeVertCX><sizeVertCY>203666</sizeVertCY></Rect></Wnd1></Sizes></Row0></Bottom><Float><Sizes/></Float></Positions> </Desktop> </Workspace>
{ "pile_set_name": "Github" }
using System; using BepuUtilities.Collections; using BepuUtilities.Memory; using BepuPhysics; using System.Numerics; using System.Threading.Tasks; using System.Threading; using BepuUtilities; using BepuPhysics.CollisionDetection; using BepuPhysics.Trees; using System.Runtime.CompilerServices; namespace DemoRenderer.Constraints { public class BoundingBoxLineExtractor { const int jobsPerThread = 4; QuickList<ThreadJob> jobs; BroadPhase broadPhase; int masterLinesCount; Buffer<LineInstance> masterLinesSpan; struct ThreadJob { public int LeafStart; public int LeafCount; public bool CoversActiveCollidables; } BufferPool pool; Action<int> workDelegate; public BoundingBoxLineExtractor(BufferPool pool) { this.pool = pool; jobs = new QuickList<ThreadJob>(Environment.ProcessorCount * jobsPerThread, pool); workDelegate = Work; } [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void WriteBoundsLines(in Vector3 min, in Vector3 max, uint packedColor, uint packedBackgroundColor, ref LineInstance targetLines) { var v001 = new Vector3(min.X, min.Y, max.Z); var v010 = new Vector3(min.X, max.Y, min.Z); var v011 = new Vector3(min.X, max.Y, max.Z); var v100 = new Vector3(max.X, min.Y, min.Z); var v101 = new Vector3(max.X, min.Y, max.Z); var v110 = new Vector3(max.X, max.Y, min.Z); Unsafe.Add(ref targetLines, 0) = new LineInstance(min, v001, packedColor, packedBackgroundColor); Unsafe.Add(ref targetLines, 1) = new LineInstance(min, v010, packedColor, packedBackgroundColor); Unsafe.Add(ref targetLines, 2) = new LineInstance(min, v100, packedColor, packedBackgroundColor); Unsafe.Add(ref targetLines, 3) = new LineInstance(v001, v011, packedColor, packedBackgroundColor); Unsafe.Add(ref targetLines, 4) = new LineInstance(v001, v101, packedColor, packedBackgroundColor); Unsafe.Add(ref targetLines, 5) = new LineInstance(v010, v011, packedColor, packedBackgroundColor); Unsafe.Add(ref targetLines, 6) = new LineInstance(v010, v110, packedColor, packedBackgroundColor); Unsafe.Add(ref targetLines, 7) = new LineInstance(v011, max, packedColor, packedBackgroundColor); Unsafe.Add(ref targetLines, 8) = new LineInstance(v100, v101, packedColor, packedBackgroundColor); Unsafe.Add(ref targetLines, 9) = new LineInstance(v100, v110, packedColor, packedBackgroundColor); Unsafe.Add(ref targetLines, 10) = new LineInstance(v101, max, packedColor, packedBackgroundColor); Unsafe.Add(ref targetLines, 11) = new LineInstance(v110, max, packedColor, packedBackgroundColor); } [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void WriteBoundsLines(in Vector3 min, in Vector3 max, in Vector3 color, in Vector3 backgroundColor, ref LineInstance targetLines) { WriteBoundsLines(min, max, Helpers.PackColor(color), Helpers.PackColor(backgroundColor), ref targetLines); } private unsafe void Work(int jobIndex) { ref var job = ref jobs[jobIndex]; var end = job.LeafStart + job.LeafCount; var lineCount = 12 * job.LeafCount; var masterStart = Interlocked.Add(ref masterLinesCount, lineCount) - lineCount; var color = new Vector3(0, 1, 0); var backgroundColor = new Vector3(0, 0, 0); if (!job.CoversActiveCollidables) { var inactiveTint = new Vector3(0.3f, 0.3f, 0.7f); color *= inactiveTint; backgroundColor *= inactiveTint; } var packedColor = Helpers.PackColor(color); var packedBackgroundColor = Helpers.PackColor(backgroundColor); for (int i = 0; i < job.LeafCount; ++i) { var broadPhaseIndex = job.LeafStart + i; Vector3* min, max; if (job.CoversActiveCollidables) broadPhase.GetActiveBoundsPointers(broadPhaseIndex, out min, out max); else broadPhase.GetStaticBoundsPointers(broadPhaseIndex, out min, out max); var outputStartIndex = masterStart + i * 12; WriteBoundsLines(*min, *max, packedColor, packedBackgroundColor, ref masterLinesSpan[outputStartIndex]); } } void CreateJobsForTree(in Tree tree, bool active, ref QuickList<ThreadJob> jobs) { var maximumJobCount = jobsPerThread * Environment.ProcessorCount; var possibleLeavesPerJob = tree.LeafCount / maximumJobCount; var remainder = tree.LeafCount - possibleLeavesPerJob * maximumJobCount; int jobbedLeafCount = 0; jobs.EnsureCapacity(jobs.Count + maximumJobCount, pool); for (int i = 0; i < maximumJobCount; ++i) { var jobLeafCount = i < remainder ? possibleLeavesPerJob + 1 : possibleLeavesPerJob; if (jobLeafCount > 0) { ref var job = ref jobs.AllocateUnsafely(); job.LeafCount = jobLeafCount; job.LeafStart = jobbedLeafCount; job.CoversActiveCollidables = active; jobbedLeafCount += jobLeafCount; } else break; } } internal unsafe void AddInstances(BroadPhase broadPhase, ref QuickList<LineInstance> lines, ParallelLooper looper, BufferPool pool) { //For now, we only pull the bounding boxes of objects that are active. lines.EnsureCapacity(lines.Count + 12 * (broadPhase.ActiveTree.LeafCount + broadPhase.StaticTree.LeafCount), pool); CreateJobsForTree(broadPhase.ActiveTree, true, ref jobs); CreateJobsForTree(broadPhase.StaticTree, false, ref jobs); masterLinesSpan = lines.Span; masterLinesCount = lines.Count; this.broadPhase = broadPhase; looper.For(0, jobs.Count, workDelegate); lines.Count = masterLinesCount; this.broadPhase = null; jobs.Count = 0; } public void Dispose() { jobs.Dispose(pool); } } }
{ "pile_set_name": "Github" }
/* * * Copyright 2017 gRPC authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package grpc import ( "fmt" "net" "reflect" "time" "golang.org/x/net/context" "google.golang.org/grpc/balancer" "google.golang.org/grpc/channelz" "google.golang.org/grpc/connectivity" lbpb "google.golang.org/grpc/grpclb/grpc_lb_v1/messages" "google.golang.org/grpc/grpclog" "google.golang.org/grpc/metadata" "google.golang.org/grpc/resolver" ) // processServerList updates balaner's internal state, create/remove SubConns // and regenerates picker using the received serverList. func (lb *lbBalancer) processServerList(l *lbpb.ServerList) { grpclog.Infof("lbBalancer: processing server list: %+v", l) lb.mu.Lock() defer lb.mu.Unlock() // Set serverListReceived to true so fallback will not take effect if it has // not hit timeout. lb.serverListReceived = true // If the new server list == old server list, do nothing. if reflect.DeepEqual(lb.fullServerList, l.Servers) { grpclog.Infof("lbBalancer: new serverlist same as the previous one, ignoring") return } lb.fullServerList = l.Servers var backendAddrs []resolver.Address for _, s := range l.Servers { if s.DropForLoadBalancing || s.DropForRateLimiting { continue } md := metadata.Pairs(lbTokeyKey, s.LoadBalanceToken) ip := net.IP(s.IpAddress) ipStr := ip.String() if ip.To4() == nil { // Add square brackets to ipv6 addresses, otherwise net.Dial() and // net.SplitHostPort() will return too many colons error. ipStr = fmt.Sprintf("[%s]", ipStr) } addr := resolver.Address{ Addr: fmt.Sprintf("%s:%d", ipStr, s.Port), Metadata: &md, } backendAddrs = append(backendAddrs, addr) } // Call refreshSubConns to create/remove SubConns. lb.refreshSubConns(backendAddrs) // Regenerate and update picker no matter if there's update on backends (if // any SubConn will be newed/removed). Because since the full serverList was // different, there might be updates in drops or pick weights(different // number of duplicates). We need to update picker with the fulllist. // // Now with cache, even if SubConn was newed/removed, there might be no // state changes. lb.regeneratePicker() lb.cc.UpdateBalancerState(lb.state, lb.picker) } // refreshSubConns creates/removes SubConns with backendAddrs. It returns a bool // indicating whether the backendAddrs are different from the cached // backendAddrs (whether any SubConn was newed/removed). // Caller must hold lb.mu. func (lb *lbBalancer) refreshSubConns(backendAddrs []resolver.Address) bool { lb.backendAddrs = nil var backendsUpdated bool // addrsSet is the set converted from backendAddrs, it's used to quick // lookup for an address. addrsSet := make(map[resolver.Address]struct{}) // Create new SubConns. for _, addr := range backendAddrs { addrWithoutMD := addr addrWithoutMD.Metadata = nil addrsSet[addrWithoutMD] = struct{}{} lb.backendAddrs = append(lb.backendAddrs, addrWithoutMD) if _, ok := lb.subConns[addrWithoutMD]; !ok { backendsUpdated = true // Use addrWithMD to create the SubConn. sc, err := lb.cc.NewSubConn([]resolver.Address{addr}, balancer.NewSubConnOptions{}) if err != nil { grpclog.Warningf("roundrobinBalancer: failed to create new SubConn: %v", err) continue } lb.subConns[addrWithoutMD] = sc // Use the addr without MD as key for the map. if _, ok := lb.scStates[sc]; !ok { // Only set state of new sc to IDLE. The state could already be // READY for cached SubConns. lb.scStates[sc] = connectivity.Idle } sc.Connect() } } for a, sc := range lb.subConns { // a was removed by resolver. if _, ok := addrsSet[a]; !ok { backendsUpdated = true lb.cc.RemoveSubConn(sc) delete(lb.subConns, a) // Keep the state of this sc in b.scStates until sc's state becomes Shutdown. // The entry will be deleted in HandleSubConnStateChange. } } return backendsUpdated } func (lb *lbBalancer) readServerList(s *balanceLoadClientStream) error { for { reply, err := s.Recv() if err != nil { return fmt.Errorf("grpclb: failed to recv server list: %v", err) } if serverList := reply.GetServerList(); serverList != nil { lb.processServerList(serverList) } } } func (lb *lbBalancer) sendLoadReport(s *balanceLoadClientStream, interval time.Duration) { ticker := time.NewTicker(interval) defer ticker.Stop() for { select { case <-ticker.C: case <-s.Context().Done(): return } stats := lb.clientStats.toClientStats() t := time.Now() stats.Timestamp = &lbpb.Timestamp{ Seconds: t.Unix(), Nanos: int32(t.Nanosecond()), } if err := s.Send(&lbpb.LoadBalanceRequest{ LoadBalanceRequestType: &lbpb.LoadBalanceRequest_ClientStats{ ClientStats: stats, }, }); err != nil { return } } } func (lb *lbBalancer) callRemoteBalancer() error { lbClient := &loadBalancerClient{cc: lb.ccRemoteLB} ctx, cancel := context.WithCancel(context.Background()) defer cancel() stream, err := lbClient.BalanceLoad(ctx, FailFast(false)) if err != nil { return fmt.Errorf("grpclb: failed to perform RPC to the remote balancer %v", err) } // grpclb handshake on the stream. initReq := &lbpb.LoadBalanceRequest{ LoadBalanceRequestType: &lbpb.LoadBalanceRequest_InitialRequest{ InitialRequest: &lbpb.InitialLoadBalanceRequest{ Name: lb.target, }, }, } if err := stream.Send(initReq); err != nil { return fmt.Errorf("grpclb: failed to send init request: %v", err) } reply, err := stream.Recv() if err != nil { return fmt.Errorf("grpclb: failed to recv init response: %v", err) } initResp := reply.GetInitialResponse() if initResp == nil { return fmt.Errorf("grpclb: reply from remote balancer did not include initial response") } if initResp.LoadBalancerDelegate != "" { return fmt.Errorf("grpclb: Delegation is not supported") } go func() { if d := convertDuration(initResp.ClientStatsReportInterval); d > 0 { lb.sendLoadReport(stream, d) } }() return lb.readServerList(stream) } func (lb *lbBalancer) watchRemoteBalancer() { for { err := lb.callRemoteBalancer() select { case <-lb.doneCh: return default: if err != nil { grpclog.Error(err) } } } } func (lb *lbBalancer) dialRemoteLB(remoteLBName string) { var dopts []DialOption if creds := lb.opt.DialCreds; creds != nil { if err := creds.OverrideServerName(remoteLBName); err == nil { dopts = append(dopts, WithTransportCredentials(creds)) } else { grpclog.Warningf("grpclb: failed to override the server name in the credentials: %v, using Insecure", err) dopts = append(dopts, WithInsecure()) } } else { dopts = append(dopts, WithInsecure()) } if lb.opt.Dialer != nil { // WithDialer takes a different type of function, so we instead use a // special DialOption here. dopts = append(dopts, withContextDialer(lb.opt.Dialer)) } // Explicitly set pickfirst as the balancer. dopts = append(dopts, WithBalancerName(PickFirstBalancerName)) dopts = append(dopts, withResolverBuilder(lb.manualResolver)) if channelz.IsOn() { dopts = append(dopts, WithChannelzParentID(lb.opt.ChannelzParentID)) } // DialContext using manualResolver.Scheme, which is a random scheme generated // when init grpclb. The target name is not important. cc, err := DialContext(context.Background(), "grpclb:///grpclb.server", dopts...) if err != nil { grpclog.Fatalf("failed to dial: %v", err) } lb.ccRemoteLB = cc go lb.watchRemoteBalancer() }
{ "pile_set_name": "Github" }
msgid "" msgstr "" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" #: frmselectdataset.scancelbtn msgid "Cancel" msgstr "" #: frmselectdataset.scaption msgid "Select dataset to copy from" msgstr "" #: frmselectdataset.scopymeta msgid "Copy only metadata" msgstr "" #: frmselectdataset.sdataset msgid "&Dataset to copy from:" msgstr "" #: frmselectdataset.serrcomponentnotfound #, object-pascal-format msgid "Error: Component \"%s\" not found" msgstr "" #: frmselectdataset.serrselectdataset msgid "Please select a dataset first" msgstr "" #: frmselectdataset.smenucopydataset msgid "Copy data from Dataset" msgstr "" #: frmselectdataset.smenucreatedataset msgid "Create dataset" msgstr "" #: frmselectdataset.sokbtn msgid "OK" msgstr ""
{ "pile_set_name": "Github" }
{ "randomStatetest" : { "env" : { "currentCoinbase" : "945304eb96065b2a98b57a48a06ae28d285a71b5", "currentDifficulty" : "5623894562375", "currentGasLimit" : "115792089237316195423570985008687907853269984665640564039457584007913129639935", "currentNumber" : "0", "currentTimestamp" : "1", "previousHash" : "5e20a0453cecd065ea59c37ac63e079ee08998b6045136a8ce6635c7912ec0b6" }, "logs" : [ ], "out" : "0x", "post" : { "095e7baea6a6c7c4c2dfeb977efac326af552d87" : { "balance" : "1958353085", "code" : "0x41417ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe7f00000000000000000000000100000000000000000000000000000000000000007ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe7f00000000000000000000000100000000000000000000000000000000000000007fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff066f9e9092673a8f430b6ba115209018165560005155", "nonce" : "0", "storage" : { "0x" : "0x9e9092673a8f430b6ba1152090181655" } }, "945304eb96065b2a98b57a48a06ae28d285a71b5" : { "balance" : "49626", "code" : "0x6000355415600957005b60203560003555", "nonce" : "0", "storage" : { } }, "a94f5374fce5edbc8e2a8697c15331677e6ebf0b" : { "balance" : "999999998041597335", "code" : "0x", "nonce" : "1", "storage" : { } } }, "postStateRoot" : "2c988fb474af7ae7265836009e4e3c0ccbfeed0e3a88d3b83a47d33740ab9528", "pre" : { "095e7baea6a6c7c4c2dfeb977efac326af552d87" : { "balance" : "0", "code" : "0x41417ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe7f00000000000000000000000100000000000000000000000000000000000000007ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe7f00000000000000000000000100000000000000000000000000000000000000007fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff066f9e9092673a8f430b6ba115209018165560005155", "nonce" : "0", "storage" : { } }, "945304eb96065b2a98b57a48a06ae28d285a71b5" : { "balance" : "46", "code" : "0x6000355415600957005b60203560003555", "nonce" : "0", "storage" : { } }, "a94f5374fce5edbc8e2a8697c15331677e6ebf0b" : { "balance" : "1000000000000000000", "code" : "0x", "nonce" : "0", "storage" : { } } }, "transaction" : { "data" : "0x41417ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe7f00000000000000000000000100000000000000000000000000000000000000007ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe7f00000000000000000000000100000000000000000000000000000000000000007fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff066f9e9092673a8f430b6ba11520901816", "gasLimit" : "0x5369d744", "gasPrice" : "1", "nonce" : "0", "secretKey" : "45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8", "to" : "095e7baea6a6c7c4c2dfeb977efac326af552d87", "value" : "1958353085" } } }
{ "pile_set_name": "Github" }
--- Description: Bitmap Header Types ms.assetid: 6df4655a-f707-4893-b6e6-f7e4d7f67b4e title: Bitmap Header Types ms.topic: article ms.date: 05/31/2018 --- # Bitmap Header Types The bitmap has four basic header types: - [**BITMAPCOREHEADER**](/windows/win32/api/wingdi/ns-wingdi-bitmapcoreheader) - [**BITMAPINFOHEADER**](/previous-versions//dd183376(v=vs.85)) - [**BITMAPV4HEADER**](/windows/desktop/api/Wingdi/ns-wingdi-bitmapv4header) - [**BITMAPV5HEADER**](/windows/desktop/api/Wingdi/ns-wingdi-bitmapv5header) The four types of bitmap headers are differentiated by the **Size** member, which is the first **DWORD** in each of the structures. The [**BITMAPV5HEADER**](/windows/desktop/api/Wingdi/ns-wingdi-bitmapv5header) structure is an extended [**BITMAPV4HEADER**](/windows/desktop/api/Wingdi/ns-wingdi-bitmapv4header) structure, which is an extended [**BITMAPINFOHEADER**](/previous-versions//dd183376(v=vs.85)) structure. However, the **BITMAPINFOHEADER** and [**BITMAPCOREHEADER**](/windows/win32/api/wingdi/ns-wingdi-bitmapcoreheader) have only the **Size** member in common with other bitmap header structures. The [**BITMAPCOREHEADER**](/windows/win32/api/wingdi/ns-wingdi-bitmapcoreheader) and [**BITMAPV4HEADER**](/windows/desktop/api/Wingdi/ns-wingdi-bitmapv4header) formats have been superseded by [**BITMAPINFOHEADER**](/previous-versions//dd183376(v=vs.85)) and [**BITMAPV5HEADER**](/windows/desktop/api/Wingdi/ns-wingdi-bitmapv5header) formats, respectively. The **BITMAPCOREHEADER** and **BITMAPV4HEADER** formats are presented for completeness and backward compatibility. The format for a DIB is the following (for more information, see [Bitmap Storage](bitmap-storage.md) ): - a [**BITMAPFILEHEADER**](/windows/win32/api/wingdi/ns-wingdi-bitmapfileheader) structure - either a [**BITMAPCOREHEADER**](/windows/win32/api/wingdi/ns-wingdi-bitmapcoreheader), a [**BITMAPINFOHEADER**](/previous-versions//dd183376(v=vs.85)), a [**BITMAPV4HEADER**](/windows/desktop/api/Wingdi/ns-wingdi-bitmapv4header), or a [**BITMAPV5HEADER**](/windows/desktop/api/Wingdi/ns-wingdi-bitmapv5header) structure. - an optional color table, which is either a set of [**RGBQUAD**](/windows/win32/api/wingdi/ns-wingdi-rgbquad) structures or a set of [**RGBTRIPLE**](/windows/win32/api/wingdi/ns-wingdi-rgbtriple) structures. - the bitmap data - optional Profile data A color table describes how pixel values correspond to RGB color values. RGB is a model for describing colors that are produced by emitting light. *Profile data* refers to either the profile file name (linked profile) or the actual profile bits (embedded profile). The file format places the profile data at the end of the file. The profile data is placed just after the color table (if present). However, if the function receives a packed DIB, the profile data comes after the bitmap bits, like in the file format. Profile data will only exist for [**BITMAPV5HEADER**](/windows/desktop/api/Wingdi/ns-wingdi-bitmapv5header) structures where **bV5CSType** is PROFILE\_LINKED or PROFILE\_EMBEDDED. For functions that receive packed DIBs, the profile data comes after the bitmap data. A palettized device is any device that uses palettes to assign colors. The classic example of a palettized device is a display running in 8 bit color depth (that is, 256 colors). The display in this mode uses a small color table to assign colors to a bitmap. The colors in a bitmap are assigned to the closest color in the palette that the device is using. The palettized device does not create an optimal palette for displaying the bitmap; it simply uses whatever is in the current palette. Applications are responsible for creating a palette and selecting it into the system. In general, 16-, 24-, and 32-bits-per-pixel (bpp) bitmaps do not contain color tables (a.k.a. optimal palettes for the bitmap); the application is responsible for generating a optimal palette in this case. However, 16-, 24-, and 32-bpp bitmaps can contain such optimal color tables for displaying on palettized devices; in this case the application just needs to create a palette based off the color table present in the bitmap file. Bitmaps that are 1, 4, or 8 bpp must have a color table with a maximum size based on the bpp. The maximum size for 1, 4, and 8 bpp bitmaps is 2 to the power of the bpp. Thus, a 1 bpp bitmap has a maximum of two colors, the 4 bpp bitmap has a maximum of 16 colors, and the 8 bpp bitmap has a maximum of 256 colors. Bitmaps that are 16-, 24-, or 32-bpp do not require color tables, but may have them to specify colors for palettized devices. If a color table is present for 16-, 24-, or 32-bpp bitmap, the **biClrUsed** member specifies the size of the color table and the color table must have that many colors in it. If **biClrUsed** is zero, there is no color table. The red, green, and blue bitfield masks for BI\_BITFIELD bitmaps immediately follow the [**BITMAPINFOHEADER**](/previous-versions//dd183376(v=vs.85)), [**BITMAPV4HEADER**](/windows/desktop/api/Wingdi/ns-wingdi-bitmapv4header), and [**BITMAPV5HEADER**](/windows/desktop/api/Wingdi/ns-wingdi-bitmapv5header) structures. The **BITMAPV4HEADER** and **BITMAPV5HEADER** structures contain additional members for red, green, and blue masks as follows. | Member | Meaning | |---------------|--------------------------------------------------------------------------------------------------------------------------------| | **RedMask** | Color mask that specifies the red component of each pixel, valid only if the **Compression** member is set to BI\_BITFIELDS. | | **GreenMask** | Color mask that specifies the green component of each pixel, valid only if the **Compression** member is set to BI\_BITFIELDS. | | **BlueMask** | Color mask that specifies the blue component of each pixel, valid only if the **Compression** member is set to BI\_BITFIELDS. |   When the **biCompression** member of [**BITMAPINFOHEADER**](/previous-versions//dd183376(v=vs.85)) is set to BI\_BITFIELDS and the function receives an argument of type **LPBITMAPINFO**, the color masks will immediately follow the header. The color table, if present, will follow the color masks. [**BITMAPCOREHEADER**](/windows/win32/api/wingdi/ns-wingdi-bitmapcoreheader) bitmaps do not support color masks. By default, bitmap data is bottom-up in its format. Bottom-up means that the first scan line in the bitmap data is the last scan line to be displayed. For example, the 0<sup>th</sup> pixel of the 0<sup>th</sup> scan line of the bitmap data of a 10-pixel by 10-pixel bitmap will be the 0<sup>th</sup> pixel of the 9<sup>th</sup> scan line of the displayed or printed image. Run-length encoded (RLE) format bitmaps and [**BITMAPCOREHEADER**](/windows/win32/api/wingdi/ns-wingdi-bitmapcoreheader) bitmaps cannot be top-down bitmaps. The scan lines are **DWORD** aligned, except for RLE-compressed bitmaps. They must be padded for scan line widths, in bytes, that are not evenly divisible by four, except for RLE compressed bitmaps. For example, a 10- by 10-pixel 24-bpp bitmap will have two padding bytes at the end of each scan line.    
{ "pile_set_name": "Github" }
#!/usr/bin/env python # # Process a list of Python and/or XML files containing SCons documentation. # # This script creates formatted lists of the Builders, functions, Tools # or construction variables documented in the specified XML files. # # Depending on the options, the lists are output in either # DocBook-formatted generated XML files containing the summary text # and/or .mod files containing the ENTITY definitions for each item. # import getopt import os import sys import SConsDoc from SConsDoc import tf as stf base_sys_path = [os.getcwd() + '/build/test-tar-gz/lib/scons'] + sys.path helpstr = """\ Usage: scons-proc.py [-b file(s)] [-f file(s)] [-t file(s)] [-v file(s)] [infile ...] Options: -b file(s) dump builder information to the specified file(s) -f file(s) dump function information to the specified file(s) -t file(s) dump tool information to the specified file(s) -v file(s) dump variable information to the specified file(s) The "files" argument following a -[bftv] argument is expected to be a comma-separated pair of names like: foo.gen,foo.mod """ opts, args = getopt.getopt(sys.argv[1:], "b:f:ht:v:", ['builders=', 'help', 'tools=', 'variables=']) buildersfiles = None functionsfiles = None toolsfiles = None variablesfiles = None for o, a in opts: if o in ['-b', '--builders']: buildersfiles = a elif o in ['-f', '--functions']: functionsfiles = a elif o in ['-h', '--help']: sys.stdout.write(helpstr) sys.exit(0) elif o in ['-t', '--tools']: toolsfiles = a elif o in ['-v', '--variables']: variablesfiles = a def parse_docs(args, include_entities=True): h = SConsDoc.SConsDocHandler() for f in args: if include_entities: try: h.parseXmlFile(f) except Exception as e: print("error parsing %s\n" % f, file=sys.stderr) print(str(e), file=sys.stderr) sys.exit(1) else: # mode we read (text/bytes) has to match handling in SConsDoc with open(f, 'r') as fp: content = fp.read() if content: try: h.parseContent(content, include_entities) except Exception as e: print("error parsing %s\n" % f, file=sys.stderr) print(str(e), file=sys.stderr) sys.exit(1) return h Warning = """\ <!-- THIS IS AN AUTOMATICALLY-GENERATED FILE. DO NOT EDIT. --> """ Regular_Entities_Header = """\ <!-- Regular %s entities. --> """ Link_Entities_Header = """\ <!-- Entities that are links to the %s entries --> """ class SCons_XML: def __init__(self, entries, **kw): self.values = entries for k, v in kw.items(): setattr(self, k, v) def fopen(self, name, mode='w'): if name == '-': return sys.stdout return open(name, mode) def write(self, files): gen, mod = files.split(',') self.write_gen(gen) self.write_mod(mod) def write_gen(self, filename): if not filename: return # Try to split off .gen filename if filename.count(','): fl = filename.split(',') filename = fl[0] # Start new XML file root = stf.newXmlTree("variablelist") for v in self.values: ve = stf.newNode("varlistentry") stf.setAttribute(ve, 'id', '%s%s' % (v.prefix, v.idfunc())) for t in v.xml_terms(): stf.appendNode(ve, t) vl = stf.newNode("listitem") added = False if v.summary is not None: for s in v.summary: added = True stf.appendNode(vl, stf.copyNode(s)) if v.sets: added = True vp = stf.newNode("para") stf.setText(vp, 'Sets: ') for x in v.sets[:-1]: stf.appendCvLink(vp, x, ', ') stf.appendCvLink(vp, v.sets[-1], '.') stf.appendNode(vl, vp) if v.uses: added = True vp = stf.newNode("para") stf.setText(vp, 'Uses: ') for x in v.uses[:-1]: stf.appendCvLink(vp, x, ', ') stf.appendCvLink(vp, v.uses[-1], '.') stf.appendNode(vl, vp) # Still nothing added to this list item? if not added: # Append an empty para vp = stf.newNode("para") stf.appendNode(vl, vp) stf.appendNode(ve, vl) stf.appendNode(root, ve) # Write file f = self.fopen(filename) stf.writeGenTree(root, f) f.close() def write_mod(self, filename): try: description = self.values[0].description except: description = "" if not filename: return # Try to split off .mod filename if filename.count(','): fl = filename.split(',') filename = fl[1] f = self.fopen(filename) f.write(Warning) f.write('\n') f.write(Regular_Entities_Header % description) f.write('\n') for v in self.values: f.write('<!ENTITY %s%s "<%s xmlns=\'%s\'>%s</%s>">\n' % (v.prefix, v.idfunc(), v.tag, SConsDoc.dbxsd, v.entityfunc(), v.tag)) if self.env_signatures: f.write('\n') for v in self.values: f.write('<!ENTITY %senv-%s "<%s xmlns=\'%s\'>env.%s</%s>">\n' % (v.prefix, v.idfunc(), v.tag, SConsDoc.dbxsd, v.entityfunc(), v.tag)) f.write('\n') f.write(Link_Entities_Header % description) f.write('\n') for v in self.values: f.write('<!ENTITY %slink-%s "<link linkend=\'%s%s\' xmlns=\'%s\'><%s>%s</%s></link>">\n' % (v.prefix, v.idfunc(), v.prefix, v.idfunc(), SConsDoc.dbxsd, v.tag, v.entityfunc(), v.tag)) if self.env_signatures: f.write('\n') for v in self.values: f.write('<!ENTITY %slink-env-%s "<link linkend=\'%s%s\' xmlns=\'%s\'><%s>env.%s</%s></link>">\n' % (v.prefix, v.idfunc(), v.prefix, v.idfunc(), SConsDoc.dbxsd, v.tag, v.entityfunc(), v.tag)) f.close() class Proxy: def __init__(self, subject): """Wrap an object as a Proxy object""" self.__subject = subject def __getattr__(self, name): """Retrieve an attribute from the wrapped object. If the named attribute doesn't exist, AttributeError is raised """ return getattr(self.__subject, name) def get(self): """Retrieve the entire wrapped object""" return self.__subject def __eq__(self, other): if issubclass(other.__class__, self.__subject.__class__): return self.__subject == other return self.__dict__ == other.__dict__ ## def __lt__(self, other): ## if issubclass(other.__class__, self.__subject.__class__): ## return self.__subject < other ## return self.__dict__ < other.__dict__ class SConsThing(Proxy): """Base class for the SConsDoc special elements""" def idfunc(self): return self.name def xml_terms(self): e = stf.newNode("term") stf.setText(e, self.name) return [e] class Builder(SConsThing): """Generate the descriptions and entities for <builder> elements""" description = 'builder' prefix = 'b-' tag = 'function' def xml_terms(self): """emit xml for an scons builder builders don't show a full signature, just func() """ # build term for global function gterm = stf.newNode("term") func = stf.newSubNode(gterm, Builder.tag) stf.setText(func, self.name) stf.setTail(func, '()') # build term for env. method mterm = stf.newNode("term") inst = stf.newSubNode(mterm, "replaceable") stf.setText(inst, "env") stf.setTail(inst, ".") # we could use <function> here, but it's a "method" meth = stf.newSubNode(mterm, "methodname") stf.setText(meth, self.name) stf.setTail(meth, '()') return [gterm, mterm] def entityfunc(self): return self.name class Function(SConsThing): """Generate the descriptions and entities for <scons_function> elements""" description = 'function' prefix = 'f-' tag = 'function' def xml_terms(self): """emit xml for an scons function The signature attribute controls whether to emit the global function, the environment method, or both. """ if self.arguments is None: a = stf.newNode("arguments") stf.setText(a, '()') arguments = [a] else: arguments = self.arguments tlist = [] for arg in arguments: signature = 'both' if stf.hasAttribute(arg, 'signature'): signature = stf.getAttribute(arg, 'signature') sig = stf.getText(arg).strip()[1:-1] # strip (), temporarily if signature in ('both', 'global'): # build term for global function gterm = stf.newNode("term") func = stf.newSubNode(gterm, Function.tag) stf.setText(func, self.name) if sig: # if there are parameters, use that entity stf.setTail(func, "(") s = stf.newSubNode(gterm, "parameter") stf.setText(s, sig) stf.setTail(s, ")") else: stf.setTail(func, "()") tlist.append(gterm) if signature in ('both', 'env'): # build term for env. method mterm = stf.newNode("term") inst = stf.newSubNode(mterm, "replaceable") stf.setText(inst, "env") stf.setTail(inst, ".") # we could use <function> here, but it's a "method" meth = stf.newSubNode(mterm, "methodname") stf.setText(meth, self.name) if sig: # if there are parameters, use that entity stf.setTail(meth, "(") s = stf.newSubNode(mterm, "parameter") stf.setText(s, sig) stf.setTail(s, ")") else: stf.setTail(meth, "()") tlist.append(mterm) if not tlist: tlist.append(stf.newNode("term")) return tlist def entityfunc(self): return self.name class Tool(SConsThing): """Generate the descriptions and entities for <tool> elements""" description = 'tool' prefix = 't-' tag = 'literal' def idfunc(self): return self.name.replace('+', 'X') def entityfunc(self): return self.name class Variable(SConsThing): """Generate the descriptions and entities for <cvar> elements""" description = 'construction variable' prefix = 'cv-' tag = 'envar' def xml_terms(self): term = stf.newNode("term") var = stf.newSubNode(term, Variable.tag) stf.setText(var, self.name) return [term] def entityfunc(self): return '$' + self.name def write_output_files(h, buildersfiles, functionsfiles, toolsfiles, variablesfiles, write_func): if buildersfiles: g = processor_class([Builder(b) for b in sorted(h.builders.values())], env_signatures=True) write_func(g, buildersfiles) if functionsfiles: g = processor_class([Function(b) for b in sorted(h.functions.values())], env_signatures=True) write_func(g, functionsfiles) if toolsfiles: g = processor_class([Tool(t) for t in sorted(h.tools.values())], env_signatures=False) write_func(g, toolsfiles) if variablesfiles: g = processor_class([Variable(v) for v in sorted(h.cvars.values())], env_signatures=False) write_func(g, variablesfiles) processor_class = SCons_XML # Step 1: Creating entity files for builders, functions,... print("Generating entity files...") h = parse_docs(args, include_entities=False) write_output_files(h, buildersfiles, functionsfiles, toolsfiles, variablesfiles, SCons_XML.write_mod) # Step 2: Validating all input files print("Validating files against SCons XSD...") if SConsDoc.validate_all_xml(['SCons']): print("OK") else: print("Validation failed! Please correct the errors above and try again.") sys.exit(1) # Step 3: Creating actual documentation snippets, using the # fully resolved and updated entities from the *.mod files. print("Updating documentation for builders, tools and functions...") h = parse_docs(args, include_entities=True) write_output_files(h, buildersfiles, functionsfiles, toolsfiles, variablesfiles, SCons_XML.write) print("Done") # Local Variables: # tab-width:4 # indent-tabs-mode:nil # End: # vim: set expandtab tabstop=4 shiftwidth=4:
{ "pile_set_name": "Github" }
size(200); pen[] p={red,green,blue,magenta}; pair[] z={(-1,0),(0,0),(0,1),(1,0)}; int[] edges={0,0,0,1}; gouraudshade(z[0]--z[2]--z[3]--cycle,p,z,edges); draw(z[0]--z[1]--z[2]--cycle); draw(z[1]--z[3]--z[2],dashed); dot(Label,z[0],W); dot(Label,z[1],S); dot(Label,z[2],N); dot(Label,z[3],E); label("0",z[0]--z[1],S,red); label("1",z[1]--z[2],E,red); label("2",z[2]--z[0],NW,red);
{ "pile_set_name": "Github" }
GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1 HEADER_SEARCH_PATHS = $(inherited) "${PODS_ROOT}/Headers/Public" "${PODS_ROOT}/Headers/Public/RKTagsView" OTHER_CFLAGS = $(inherited) -isystem "${PODS_ROOT}/Headers/Public" -isystem "${PODS_ROOT}/Headers/Public/RKTagsView" OTHER_LDFLAGS = $(inherited) -ObjC -l"RKTagsView" PODS_ROOT = ${SRCROOT}/Pods
{ "pile_set_name": "Github" }
/** * MegaMek - Copyright (C) 2005 Ben Mazur (bmazur@sev.org) * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License as published by the Free * Software Foundation; either version 2 of the License, or (at your option) * any later version. * * This program is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * for more details. */ package megamek.common.weapons; import megamek.common.IGame; import megamek.common.ToHitData; import megamek.common.actions.WeaponAttackAction; import megamek.server.Server; /** * @author Jay Lawson */ public class SwordfishHandler extends AmmoWeaponHandler { /** * */ private static final long serialVersionUID = -2536312899903153911L; /** * @param t * @param w * @param g * @param s */ public SwordfishHandler(ToHitData t, WeaponAttackAction w, IGame g, Server s) { super(t, w, g, s); } @Override protected int getCapMisMod() { return 11; } }
{ "pile_set_name": "Github" }
WORKSPACE=${WORKSPACE:-$( cd $(dirname $0)/../../.. ; pwd -P )} XBMC_PLATFORM_DIR=linux64-wayland . $WORKSPACE/tools/buildsteps/defaultenv make -C $WORKSPACE/tools/depends/target/cmakebuildsys
{ "pile_set_name": "Github" }
fileFormatVersion: 2 guid: e2c34acb208a3324586159903f01b109 NativeFormatImporter: externalObjects: {} mainObjectFileID: 11400000 userData: assetBundleName: assetBundleVariant:
{ "pile_set_name": "Github" }
//////////////////////////////////////////////////////////// // // SFML - Simple and Fast Multimedia Library // Copyright (C) 2007-2018 Laurent Gomila (laurent@sfml-dev.org) // // This software is provided 'as-is', without any express or implied warranty. // In no event will the authors be held liable for any damages arising from the use of this software. // // Permission is granted to anyone to use this software for any purpose, // including commercial applications, and to alter it and redistribute it freely, // subject to the following restrictions: // // 1. The origin of this software must not be misrepresented; // you must not claim that you wrote the original software. // If you use this software in a product, an acknowledgment // in the product documentation would be appreciated but is not required. // // 2. Altered source versions must be plainly marked as such, // and must not be misrepresented as being the original software. // // 3. This notice may not be removed or altered from any source distribution. // //////////////////////////////////////////////////////////// #ifndef SFML_STRING_HPP #define SFML_STRING_HPP //////////////////////////////////////////////////////////// // Headers //////////////////////////////////////////////////////////// #include <SFML/System/Export.hpp> #include <SFML/System/Utf.hpp> #include <iterator> #include <locale> #include <string> namespace sf { //////////////////////////////////////////////////////////// /// \brief Utility string class that automatically handles /// conversions between types and encodings /// //////////////////////////////////////////////////////////// class SFML_SYSTEM_API String { public: //////////////////////////////////////////////////////////// // Types //////////////////////////////////////////////////////////// typedef std::basic_string<Uint32>::iterator Iterator; ///< Iterator type typedef std::basic_string<Uint32>::const_iterator ConstIterator; ///< Read-only iterator type //////////////////////////////////////////////////////////// // Static member data //////////////////////////////////////////////////////////// static const std::size_t InvalidPos; ///< Represents an invalid position in the string //////////////////////////////////////////////////////////// /// \brief Default constructor /// /// This constructor creates an empty string. /// //////////////////////////////////////////////////////////// String(); //////////////////////////////////////////////////////////// /// \brief Construct from a single ANSI character and a locale /// /// The source character is converted to UTF-32 according /// to the given locale. /// /// \param ansiChar ANSI character to convert /// \param locale Locale to use for conversion /// //////////////////////////////////////////////////////////// String(char ansiChar, const std::locale& locale = std::locale()); //////////////////////////////////////////////////////////// /// \brief Construct from single wide character /// /// \param wideChar Wide character to convert /// //////////////////////////////////////////////////////////// String(wchar_t wideChar); //////////////////////////////////////////////////////////// /// \brief Construct from single UTF-32 character /// /// \param utf32Char UTF-32 character to convert /// //////////////////////////////////////////////////////////// String(Uint32 utf32Char); //////////////////////////////////////////////////////////// /// \brief Construct from a null-terminated C-style ANSI string and a locale /// /// The source string is converted to UTF-32 according /// to the given locale. /// /// \param ansiString ANSI string to convert /// \param locale Locale to use for conversion /// //////////////////////////////////////////////////////////// String(const char* ansiString, const std::locale& locale = std::locale()); //////////////////////////////////////////////////////////// /// \brief Construct from an ANSI string and a locale /// /// The source string is converted to UTF-32 according /// to the given locale. /// /// \param ansiString ANSI string to convert /// \param locale Locale to use for conversion /// //////////////////////////////////////////////////////////// String(const std::string& ansiString, const std::locale& locale = std::locale()); //////////////////////////////////////////////////////////// /// \brief Construct from null-terminated C-style wide string /// /// \param wideString Wide string to convert /// //////////////////////////////////////////////////////////// String(const wchar_t* wideString); //////////////////////////////////////////////////////////// /// \brief Construct from a wide string /// /// \param wideString Wide string to convert /// //////////////////////////////////////////////////////////// String(const std::wstring& wideString); //////////////////////////////////////////////////////////// /// \brief Construct from a null-terminated C-style UTF-32 string /// /// \param utf32String UTF-32 string to assign /// //////////////////////////////////////////////////////////// String(const Uint32* utf32String); //////////////////////////////////////////////////////////// /// \brief Construct from an UTF-32 string /// /// \param utf32String UTF-32 string to assign /// //////////////////////////////////////////////////////////// String(const std::basic_string<Uint32>& utf32String); //////////////////////////////////////////////////////////// /// \brief Copy constructor /// /// \param copy Instance to copy /// //////////////////////////////////////////////////////////// String(const String& copy); //////////////////////////////////////////////////////////// /// \brief Create a new sf::String from a UTF-8 encoded string /// /// \param begin Forward iterator to the beginning of the UTF-8 sequence /// \param end Forward iterator to the end of the UTF-8 sequence /// /// \return A sf::String containing the source string /// /// \see fromUtf16, fromUtf32 /// //////////////////////////////////////////////////////////// template <typename T> static String fromUtf8(T begin, T end); //////////////////////////////////////////////////////////// /// \brief Create a new sf::String from a UTF-16 encoded string /// /// \param begin Forward iterator to the beginning of the UTF-16 sequence /// \param end Forward iterator to the end of the UTF-16 sequence /// /// \return A sf::String containing the source string /// /// \see fromUtf8, fromUtf32 /// //////////////////////////////////////////////////////////// template <typename T> static String fromUtf16(T begin, T end); //////////////////////////////////////////////////////////// /// \brief Create a new sf::String from a UTF-32 encoded string /// /// This function is provided for consistency, it is equivalent to /// using the constructors that takes a const sf::Uint32* or /// a std::basic_string<sf::Uint32>. /// /// \param begin Forward iterator to the beginning of the UTF-32 sequence /// \param end Forward iterator to the end of the UTF-32 sequence /// /// \return A sf::String containing the source string /// /// \see fromUtf8, fromUtf16 /// //////////////////////////////////////////////////////////// template <typename T> static String fromUtf32(T begin, T end); //////////////////////////////////////////////////////////// /// \brief Implicit conversion operator to std::string (ANSI string) /// /// The current global locale is used for conversion. If you /// want to explicitly specify a locale, see toAnsiString. /// Characters that do not fit in the target encoding are /// discarded from the returned string. /// This operator is defined for convenience, and is equivalent /// to calling toAnsiString(). /// /// \return Converted ANSI string /// /// \see toAnsiString, operator std::wstring /// //////////////////////////////////////////////////////////// operator std::string() const; //////////////////////////////////////////////////////////// /// \brief Implicit conversion operator to std::wstring (wide string) /// /// Characters that do not fit in the target encoding are /// discarded from the returned string. /// This operator is defined for convenience, and is equivalent /// to calling toWideString(). /// /// \return Converted wide string /// /// \see toWideString, operator std::string /// //////////////////////////////////////////////////////////// operator std::wstring() const; //////////////////////////////////////////////////////////// /// \brief Convert the Unicode string to an ANSI string /// /// The UTF-32 string is converted to an ANSI string in /// the encoding defined by \a locale. /// Characters that do not fit in the target encoding are /// discarded from the returned string. /// /// \param locale Locale to use for conversion /// /// \return Converted ANSI string /// /// \see toWideString, operator std::string /// //////////////////////////////////////////////////////////// std::string toAnsiString(const std::locale& locale = std::locale()) const; //////////////////////////////////////////////////////////// /// \brief Convert the Unicode string to a wide string /// /// Characters that do not fit in the target encoding are /// discarded from the returned string. /// /// \return Converted wide string /// /// \see toAnsiString, operator std::wstring /// //////////////////////////////////////////////////////////// std::wstring toWideString() const; //////////////////////////////////////////////////////////// /// \brief Convert the Unicode string to a UTF-8 string /// /// \return Converted UTF-8 string /// /// \see toUtf16, toUtf32 /// //////////////////////////////////////////////////////////// std::basic_string<Uint8> toUtf8() const; //////////////////////////////////////////////////////////// /// \brief Convert the Unicode string to a UTF-16 string /// /// \return Converted UTF-16 string /// /// \see toUtf8, toUtf32 /// //////////////////////////////////////////////////////////// std::basic_string<Uint16> toUtf16() const; //////////////////////////////////////////////////////////// /// \brief Convert the Unicode string to a UTF-32 string /// /// This function doesn't perform any conversion, since the /// string is already stored as UTF-32 internally. /// /// \return Converted UTF-32 string /// /// \see toUtf8, toUtf16 /// //////////////////////////////////////////////////////////// std::basic_string<Uint32> toUtf32() const; //////////////////////////////////////////////////////////// /// \brief Overload of assignment operator /// /// \param right Instance to assign /// /// \return Reference to self /// //////////////////////////////////////////////////////////// String& operator =(const String& right); //////////////////////////////////////////////////////////// /// \brief Overload of += operator to append an UTF-32 string /// /// \param right String to append /// /// \return Reference to self /// //////////////////////////////////////////////////////////// String& operator +=(const String& right); //////////////////////////////////////////////////////////// /// \brief Overload of [] operator to access a character by its position /// /// This function provides read-only access to characters. /// Note: the behavior is undefined if \a index is out of range. /// /// \param index Index of the character to get /// /// \return Character at position \a index /// //////////////////////////////////////////////////////////// Uint32 operator [](std::size_t index) const; //////////////////////////////////////////////////////////// /// \brief Overload of [] operator to access a character by its position /// /// This function provides read and write access to characters. /// Note: the behavior is undefined if \a index is out of range. /// /// \param index Index of the character to get /// /// \return Reference to the character at position \a index /// //////////////////////////////////////////////////////////// Uint32& operator [](std::size_t index); //////////////////////////////////////////////////////////// /// \brief Clear the string /// /// This function removes all the characters from the string. /// /// \see isEmpty, erase /// //////////////////////////////////////////////////////////// void clear(); //////////////////////////////////////////////////////////// /// \brief Get the size of the string /// /// \return Number of characters in the string /// /// \see isEmpty /// //////////////////////////////////////////////////////////// std::size_t getSize() const; //////////////////////////////////////////////////////////// /// \brief Check whether the string is empty or not /// /// \return True if the string is empty (i.e. contains no character) /// /// \see clear, getSize /// //////////////////////////////////////////////////////////// bool isEmpty() const; //////////////////////////////////////////////////////////// /// \brief Erase one or more characters from the string /// /// This function removes a sequence of \a count characters /// starting from \a position. /// /// \param position Position of the first character to erase /// \param count Number of characters to erase /// //////////////////////////////////////////////////////////// void erase(std::size_t position, std::size_t count = 1); //////////////////////////////////////////////////////////// /// \brief Insert one or more characters into the string /// /// This function inserts the characters of \a str /// into the string, starting from \a position. /// /// \param position Position of insertion /// \param str Characters to insert /// //////////////////////////////////////////////////////////// void insert(std::size_t position, const String& str); //////////////////////////////////////////////////////////// /// \brief Find a sequence of one or more characters in the string /// /// This function searches for the characters of \a str /// in the string, starting from \a start. /// /// \param str Characters to find /// \param start Where to begin searching /// /// \return Position of \a str in the string, or String::InvalidPos if not found /// //////////////////////////////////////////////////////////// std::size_t find(const String& str, std::size_t start = 0) const; //////////////////////////////////////////////////////////// /// \brief Replace a substring with another string /// /// This function replaces the substring that starts at index \a position /// and spans \a length characters with the string \a replaceWith. /// /// \param position Index of the first character to be replaced /// \param length Number of characters to replace. You can pass InvalidPos to /// replace all characters until the end of the string. /// \param replaceWith String that replaces the given substring. /// //////////////////////////////////////////////////////////// void replace(std::size_t position, std::size_t length, const String& replaceWith); //////////////////////////////////////////////////////////// /// \brief Replace all occurrences of a substring with a replacement string /// /// This function replaces all occurrences of \a searchFor in this string /// with the string \a replaceWith. /// /// \param searchFor The value being searched for /// \param replaceWith The value that replaces found \a searchFor values /// //////////////////////////////////////////////////////////// void replace(const String& searchFor, const String& replaceWith); //////////////////////////////////////////////////////////// /// \brief Return a part of the string /// /// This function returns the substring that starts at index \a position /// and spans \a length characters. /// /// \param position Index of the first character /// \param length Number of characters to include in the substring (if /// the string is shorter, as many characters as possible /// are included). \ref InvalidPos can be used to include all /// characters until the end of the string. /// /// \return String object containing a substring of this object /// //////////////////////////////////////////////////////////// String substring(std::size_t position, std::size_t length = InvalidPos) const; //////////////////////////////////////////////////////////// /// \brief Get a pointer to the C-style array of characters /// /// This functions provides a read-only access to a /// null-terminated C-style representation of the string. /// The returned pointer is temporary and is meant only for /// immediate use, thus it is not recommended to store it. /// /// \return Read-only pointer to the array of characters /// //////////////////////////////////////////////////////////// const Uint32* getData() const; //////////////////////////////////////////////////////////// /// \brief Return an iterator to the beginning of the string /// /// \return Read-write iterator to the beginning of the string characters /// /// \see end /// //////////////////////////////////////////////////////////// Iterator begin(); //////////////////////////////////////////////////////////// /// \brief Return an iterator to the beginning of the string /// /// \return Read-only iterator to the beginning of the string characters /// /// \see end /// //////////////////////////////////////////////////////////// ConstIterator begin() const; //////////////////////////////////////////////////////////// /// \brief Return an iterator to the end of the string /// /// The end iterator refers to 1 position past the last character; /// thus it represents an invalid character and should never be /// accessed. /// /// \return Read-write iterator to the end of the string characters /// /// \see begin /// //////////////////////////////////////////////////////////// Iterator end(); //////////////////////////////////////////////////////////// /// \brief Return an iterator to the end of the string /// /// The end iterator refers to 1 position past the last character; /// thus it represents an invalid character and should never be /// accessed. /// /// \return Read-only iterator to the end of the string characters /// /// \see begin /// //////////////////////////////////////////////////////////// ConstIterator end() const; private: friend SFML_SYSTEM_API bool operator ==(const String& left, const String& right); friend SFML_SYSTEM_API bool operator <(const String& left, const String& right); //////////////////////////////////////////////////////////// // Member data //////////////////////////////////////////////////////////// std::basic_string<Uint32> m_string; ///< Internal string of UTF-32 characters }; //////////////////////////////////////////////////////////// /// \relates String /// \brief Overload of == operator to compare two UTF-32 strings /// /// \param left Left operand (a string) /// \param right Right operand (a string) /// /// \return True if both strings are equal /// //////////////////////////////////////////////////////////// SFML_SYSTEM_API bool operator ==(const String& left, const String& right); //////////////////////////////////////////////////////////// /// \relates String /// \brief Overload of != operator to compare two UTF-32 strings /// /// \param left Left operand (a string) /// \param right Right operand (a string) /// /// \return True if both strings are different /// //////////////////////////////////////////////////////////// SFML_SYSTEM_API bool operator !=(const String& left, const String& right); //////////////////////////////////////////////////////////// /// \relates String /// \brief Overload of < operator to compare two UTF-32 strings /// /// \param left Left operand (a string) /// \param right Right operand (a string) /// /// \return True if \a left is lexicographically before \a right /// //////////////////////////////////////////////////////////// SFML_SYSTEM_API bool operator <(const String& left, const String& right); //////////////////////////////////////////////////////////// /// \relates String /// \brief Overload of > operator to compare two UTF-32 strings /// /// \param left Left operand (a string) /// \param right Right operand (a string) /// /// \return True if \a left is lexicographically after \a right /// //////////////////////////////////////////////////////////// SFML_SYSTEM_API bool operator >(const String& left, const String& right); //////////////////////////////////////////////////////////// /// \relates String /// \brief Overload of <= operator to compare two UTF-32 strings /// /// \param left Left operand (a string) /// \param right Right operand (a string) /// /// \return True if \a left is lexicographically before or equivalent to \a right /// //////////////////////////////////////////////////////////// SFML_SYSTEM_API bool operator <=(const String& left, const String& right); //////////////////////////////////////////////////////////// /// \relates String /// \brief Overload of >= operator to compare two UTF-32 strings /// /// \param left Left operand (a string) /// \param right Right operand (a string) /// /// \return True if \a left is lexicographically after or equivalent to \a right /// //////////////////////////////////////////////////////////// SFML_SYSTEM_API bool operator >=(const String& left, const String& right); //////////////////////////////////////////////////////////// /// \relates String /// \brief Overload of binary + operator to concatenate two strings /// /// \param left Left operand (a string) /// \param right Right operand (a string) /// /// \return Concatenated string /// //////////////////////////////////////////////////////////// SFML_SYSTEM_API String operator +(const String& left, const String& right); #include <SFML/System/String.inl> } // namespace sf #endif // SFML_STRING_HPP //////////////////////////////////////////////////////////// /// \class sf::String /// \ingroup system /// /// sf::String is a utility string class defined mainly for /// convenience. It is a Unicode string (implemented using /// UTF-32), thus it can store any character in the world /// (European, Chinese, Arabic, Hebrew, etc.). /// /// It automatically handles conversions from/to ANSI and /// wide strings, so that you can work with standard string /// classes and still be compatible with functions taking a /// sf::String. /// /// \code /// sf::String s; /// /// std::string s1 = s; // automatically converted to ANSI string /// std::wstring s2 = s; // automatically converted to wide string /// s = "hello"; // automatically converted from ANSI string /// s = L"hello"; // automatically converted from wide string /// s += 'a'; // automatically converted from ANSI string /// s += L'a'; // automatically converted from wide string /// \endcode /// /// Conversions involving ANSI strings use the default user locale. However /// it is possible to use a custom locale if necessary: /// \code /// std::locale locale; /// sf::String s; /// ... /// std::string s1 = s.toAnsiString(locale); /// s = sf::String("hello", locale); /// \endcode /// /// sf::String defines the most important functions of the /// standard std::string class: removing, random access, iterating, /// appending, comparing, etc. However it is a simple class /// provided for convenience, and you may have to consider using /// a more optimized class if your program requires complex string /// handling. The automatic conversion functions will then take /// care of converting your string to sf::String whenever SFML /// requires it. /// /// Please note that SFML also defines a low-level, generic /// interface for Unicode handling, see the sf::Utf classes. /// ////////////////////////////////////////////////////////////
{ "pile_set_name": "Github" }
<?php return [ 'plugin' => [ 'name' => 'Блог', 'description' => 'Надежная блоговая-платформа.' ], 'blog' => [ 'menu_label' => 'Блог', 'menu_description' => 'Управление Блогом', 'posts' => 'Записи', 'create_post' => 'записи', 'categories' => 'Категории', 'create_category' => 'категории', 'tab' => 'Блог', 'access_posts' => 'Управление записями блога', 'access_categories' => 'Управление категориями блога', 'access_other_posts' => 'Управление записями других пользователей', 'access_import_export' => 'Разрешено импортировать и экспортировать записи', 'access_publish' => 'Разрешено публиковать записи', 'manage_settings' => 'Управление настройками блога', 'delete_confirm' => 'Вы уверены, что хотите сделать это?', 'chart_published' => 'Опубликовано', 'chart_drafts' => 'Черновики', 'chart_total' => 'Всего', 'settings_description' => 'Управление настройками блога', 'show_all_posts_label' => 'Показывать все записи для внутренних (бэкенд) пользователей', 'show_all_posts_comment' => 'Показывать опубликованные и неопубликованные записи на фронтенде для внутренних (бэкенд) пользователей', 'tab_general' => 'Основные' ], 'posts' => [ 'list_title' => 'Управление записями блога', 'filter_category' => 'Категория', 'filter_published' => 'Скрыть опубликованные', 'filter_date' => 'Дата', 'new_post' => 'Новая запись', 'export_post' => 'Экспорт записей', 'import_post' => 'Импорт записей' ], 'post' => [ 'title' => 'Заголовок', 'title_placeholder' => 'Новый заголовок записи', 'content' => 'Контент', 'content_html' => 'HTML Контент', 'slug' => 'URL записи', 'slug_placeholder' => 'new-post-slug', 'categories' => 'Категории', 'author_email' => 'Email автора', 'created' => 'Создано', 'created_date' => 'Дата создания', 'updated' => 'Обновлено', 'updated_date' => 'Дата обновления', 'published' => 'Опубликовано', 'published_date' => 'Дата публикации', 'published_validation' => 'Пожалуйста, укажите дату публикации.', 'tab_edit' => 'Редактор', 'tab_categories' => 'Категории', 'categories_comment' => 'Выберите категории, к которым относится эта запись', 'categories_placeholder' => 'Не найдено ни одной категории, создайте хотя бы одну!', 'tab_manage' => 'Управление', 'published_on' => 'Опубликовано', 'excerpt' => 'Отрывок', 'summary' => 'Резюме', 'featured_images' => 'Тематические изображения', 'delete_confirm' => 'Вы действительно хотите удалить эту запись?', 'delete_success' => 'Эти записи успешно удалены.', 'close_confirm' => 'Запись не была сохранена.', 'return_to_posts' => 'Вернуться к списку записей' ], 'categories' => [ 'list_title' => 'Управление категориями блога', 'new_category' => 'Новая категория', 'uncategorized' => 'Без категории' ], 'category' => [ 'name' => 'Название', 'name_placeholder' => 'Новое имя категории', 'description' => 'Описание', 'slug' => 'URL адрес', 'slug_placeholder' => 'new-category-slug', 'posts' => 'Записи', 'delete_confirm' => 'Вы действительно хотите удалить эту категорию?', 'delete_success' => 'Эти категории успешно удалены.', 'return_to_categories' => 'Вернуться к списку категорий', 'reorder' => 'Порядок категорий' ], 'menuitem' => [ 'blog_category' => 'Категория блога', 'all_blog_categories' => 'Все категории блога', 'blog_post' => 'Запись блога', 'all_blog_posts' => 'Все записи блога', 'category_blog_posts' => 'Записи категории блога' ], 'settings' => [ 'category_title' => 'Список категорий блога', 'category_description' => 'Отображает список категорий на странице.', 'category_slug' => 'Параметр URL', 'category_slug_description' => 'Параметр маршрута, используемый для поиска в текущей категории по URL. Это свойство используется по умолчанию компонентом Фрагменты для маркировки активной категории.', 'category_display_empty' => 'Пустые категории', 'category_display_empty_description' => 'Отображать категории, которые не имеют записей.', 'category_page' => 'Страница категорий', 'category_page_description' => 'Название страницы категорий. Это свойство используется по умолчанию компонентом Фрагменты.', 'post_title' => 'Запись блога', 'post_description' => 'Отображение записи блога', 'post_slug' => 'Параметр URL', 'post_slug_description' => 'Параметр маршрута, необходимый для выбора конкретной записи.', 'post_category' => 'Страница категорий', 'post_category_description' => 'Название страницы категорий. Это свойство используется по умолчанию компонентом Фрагменты.', 'posts_title' => 'Список записей блога', 'posts_description' => 'Отображает список последних записей блога на странице.', 'posts_pagination' => 'Параметр постраничной навигации', 'posts_pagination_description' => 'Параметр, необходимый для постраничной навигации.', 'posts_filter' => 'Фильтр категорий', 'posts_filter_description' => 'Введите URL категории или параметр URL-адреса для фильтрации записей. Оставьте пустым, чтобы посмотреть все записи.', 'posts_per_page' => 'Записей на странице', 'posts_per_page_validation' => 'Недопустимый Формат. Ожидаемый тип данных - действительное число.', 'posts_no_posts' => 'Отсутствие записей', 'posts_no_posts_description' => 'Сообщение, отображаемое в блоге, если отсутствуют записи. Это свойство используется по умолчанию компонентом Фрагменты.', 'posts_no_posts_default' => 'Записей не найдено', 'posts_order' => 'Сортировка', 'posts_order_description' => 'Атрибут, по которому будут сортироваться записи.', 'posts_category' => 'Страница категорий', 'posts_category_description' => 'Название категории на странице записи "размещена в категории". Это свойство используется по умолчанию компонентом Фрагменты.', 'posts_post' => 'Страница записи', 'posts_post_description' => 'Название страницы для ссылки "подробнее". Это свойство используется по умолчанию компонентом Фрагменты.', 'posts_except_post' => 'Кроме записи', 'posts_except_post_description' => 'Введите ID/URL или переменную с ID/URL записи, которую вы хотите исключить', 'posts_except_categories' => 'Кроме категорий', 'posts_except_categories_description' => 'Введите разделенный запятыми список URL категорий или переменную со списком категорий, которые вы хотите исключить', 'rssfeed_blog' => 'Страница блога', 'rssfeed_blog_description' => 'Имя основного файла страницы блога для генерации ссылок. Это свойство используется по умолчанию компонентом Фрагменты.', 'rssfeed_title' => 'RSS Feed', 'rssfeed_description' => 'Создает RSS-канал, содержащий записи из блога.', 'group_links' => 'Ссылки', 'group_exceptions' => 'Исключения' ], 'sorting' => [ 'title_asc' => 'Заголовок (по возрастанию)', 'title_desc' => 'Заголовок (по убыванию)', 'created_asc' => 'Создано (по возрастанию)', 'created_desc' => 'Создано (по убыванию)', 'updated_asc' => 'Обновлено (по возрастанию)', 'updated_desc' => 'Обновлено (по убыванию)', 'published_asc' => 'Опубликовано (по возрастанию)', 'published_desc' => 'Опубликовано (по убыванию)', 'random' => 'Случайно' ], 'import' => [ 'update_existing_label' => 'Обновить существующие записи', 'update_existing_comment' => 'Установите этот флажок, чтобы обновлять записи имеющие одинаковый ID, title или URL.', 'auto_create_categories_label' => 'Создать категории указанные в импортируемом файле', 'auto_create_categories_comment' => 'Вы должны сопоставить столбец Категории, чтобы использовать эту функцию. В противном случае выберите для назначения категорию по умолчанию из пунктов ниже.', 'categories_label' => 'Категории', 'categories_comment' => 'Выберите категории, к которым будут принадлежать импортированные записи (необязательно).', 'default_author_label' => 'Автор записи по умолчанию (необязательно)', 'default_author_comment' => 'Импорт попытается использовать существующего автора, если он соответствуете столбцу Email автора, в противном случае используется указанный выше автор.', 'default_author_placeholder' => '-- выберите автора --' ] ];
{ "pile_set_name": "Github" }
<div class="column"> {{ sylius_template_event('sylius.shop.layout.header.content') }} </div>
{ "pile_set_name": "Github" }
package de.danoeh.antennapod.core.feed.util; import de.danoeh.antennapod.core.asynctask.ImageResource; import de.danoeh.antennapod.core.feed.FeedItem; import de.danoeh.antennapod.core.feed.FeedMedia; import de.danoeh.antennapod.core.preferences.UserPreferences; /** * Utility class to use the appropriate image resource based on {@link UserPreferences} */ public final class ImageResourceUtils { private ImageResourceUtils() { } public static String getImageLocation(ImageResource resource) { if (UserPreferences.getUseEpisodeCoverSetting()) { return resource.getImageLocation(); } else { return getShowImageLocation(resource); } } private static String getShowImageLocation(ImageResource resource) { if (resource instanceof FeedItem) { FeedItem item = (FeedItem) resource; if (item.getFeed() != null) { return item.getFeed().getImageLocation(); } else { return null; } } else if (resource instanceof FeedMedia) { FeedMedia media = (FeedMedia) resource; FeedItem item = media.getItem(); if (item != null && item.getFeed() != null) { return item.getFeed().getImageLocation(); } else { return null; } } else { return resource.getImageLocation(); } } }
{ "pile_set_name": "Github" }
// (C) Copyright John Maddock 2007. // Use, modification and distribution are subject to the // Boost Software License, Version 1.0. (See accompanying file // LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) // // This file is machine generated, do not edit by hand // Polynomial evaluation using Horners rule #ifndef BOOST_MATH_TOOLS_POLY_RAT_5_HPP #define BOOST_MATH_TOOLS_POLY_RAT_5_HPP namespace boost{ namespace math{ namespace tools{ namespace detail{ template <class T, class U, class V> inline V evaluate_rational_c_imp(const T*, const U*, const V&, const mpl::int_<0>*) { return static_cast<V>(0); } template <class T, class U, class V> inline V evaluate_rational_c_imp(const T* a, const U* b, const V&, const mpl::int_<1>*) { return static_cast<V>(a[0]) / static_cast<V>(b[0]); } template <class T, class U, class V> inline V evaluate_rational_c_imp(const T* a, const U* b, const V& x, const mpl::int_<2>*) { if(x <= 1) return static_cast<V>((a[1] * x + a[0]) / (b[1] * x + b[0])); else { V z = 1 / x; return static_cast<V>((a[0] * z + a[1]) / (b[0] * z + b[1])); } } template <class T, class U, class V> inline V evaluate_rational_c_imp(const T* a, const U* b, const V& x, const mpl::int_<3>*) { if(x <= 1) return static_cast<V>(((a[2] * x + a[1]) * x + a[0]) / ((b[2] * x + b[1]) * x + b[0])); else { V z = 1 / x; return static_cast<V>(((a[0] * z + a[1]) * z + a[2]) / ((b[0] * z + b[1]) * z + b[2])); } } template <class T, class U, class V> inline V evaluate_rational_c_imp(const T* a, const U* b, const V& x, const mpl::int_<4>*) { if(x <= 1) return static_cast<V>((((a[3] * x + a[2]) * x + a[1]) * x + a[0]) / (((b[3] * x + b[2]) * x + b[1]) * x + b[0])); else { V z = 1 / x; return static_cast<V>((((a[0] * z + a[1]) * z + a[2]) * z + a[3]) / (((b[0] * z + b[1]) * z + b[2]) * z + b[3])); } } template <class T, class U, class V> inline V evaluate_rational_c_imp(const T* a, const U* b, const V& x, const mpl::int_<5>*) { if(x <= 1) return static_cast<V>(((((a[4] * x + a[3]) * x + a[2]) * x + a[1]) * x + a[0]) / ((((b[4] * x + b[3]) * x + b[2]) * x + b[1]) * x + b[0])); else { V z = 1 / x; return static_cast<V>(((((a[0] * z + a[1]) * z + a[2]) * z + a[3]) * z + a[4]) / ((((b[0] * z + b[1]) * z + b[2]) * z + b[3]) * z + b[4])); } } }}}} // namespaces #endif // include guard
{ "pile_set_name": "Github" }
//===-- llvm/CodeGen/RegAllocRegistry.h -------------------------*- C++ -*-===// // // The LLVM Compiler Infrastructure // // This file is distributed under the University of Illinois Open Source // License. See LICENSE.TXT for details. // //===----------------------------------------------------------------------===// // // This file contains the implementation for register allocator function // pass registry (RegisterRegAlloc). // //===----------------------------------------------------------------------===// #ifndef LLVM_CODEGENREGALLOCREGISTRY_H #define LLVM_CODEGENREGALLOCREGISTRY_H #include "llvm/CodeGen/MachinePassRegistry.h" namespace llvm { //===----------------------------------------------------------------------===// /// /// RegisterRegAlloc class - Track the registration of register allocators. /// //===----------------------------------------------------------------------===// class RegisterRegAlloc : public MachinePassRegistryNode { public: typedef FunctionPass *(*FunctionPassCtor)(); static MachinePassRegistry Registry; RegisterRegAlloc(const char *N, const char *D, FunctionPassCtor C) : MachinePassRegistryNode(N, D, (MachinePassCtor)C) { Registry.Add(this); } ~RegisterRegAlloc() { Registry.Remove(this); } // Accessors. // RegisterRegAlloc *getNext() const { return (RegisterRegAlloc *)MachinePassRegistryNode::getNext(); } static RegisterRegAlloc *getList() { return (RegisterRegAlloc *)Registry.getList(); } static FunctionPassCtor getDefault() { return (FunctionPassCtor)Registry.getDefault(); } static void setDefault(FunctionPassCtor C) { Registry.setDefault((MachinePassCtor)C); } static void setListener(MachinePassRegistryListener *L) { Registry.setListener(L); } }; } // end namespace llvm #endif
{ "pile_set_name": "Github" }
/* * Copyright 2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.dataflow.core.dsl; import java.util.List; /** * @author Andy Clement */ public class StreamNode extends AstNode { protected final String streamText; protected final String streamName; protected final List<AppNode> appNodes; protected SourceDestinationNode sourceDestinationNode; protected SinkDestinationNode sinkDestinationNode; public StreamNode(String streamText, String streamName, List<AppNode> appNodes, SourceDestinationNode sourceDestinationNode, SinkDestinationNode sinkDestinationNode) { super(appNodes.get(0).getStartPos(), appNodes.get(appNodes.size() - 1).getEndPos()); this.streamText = streamText; this.streamName = streamName; this.appNodes = appNodes; this.sourceDestinationNode = sourceDestinationNode; this.sinkDestinationNode = sinkDestinationNode; } @Override public String stringify(boolean includePositionalInfo) { StringBuilder s = new StringBuilder(); // s.append("Stream[").append(streamText).append("]"); s.append("["); if (getStreamName() != null) { s.append(getStreamName()).append(" = "); } if (sourceDestinationNode != null) { s.append(sourceDestinationNode.stringify(includePositionalInfo)); } for (AppNode appNode : appNodes) { s.append(appNode.stringify(includePositionalInfo)); } if (sinkDestinationNode != null) { s.append(sinkDestinationNode.stringify(includePositionalInfo)); } s.append("]"); return s.toString(); } @Override public String toString() { StringBuilder s = new StringBuilder(); if (getStreamName() != null) { s.append(getStreamName()).append(" = "); } if (sourceDestinationNode != null) { s.append(sourceDestinationNode.toString()); } for (int m = 0; m < appNodes.size(); m++) { AppNode appNode = appNodes.get(m); s.append(appNode.toString()); if (m + 1 < appNodes.size()) { if (appNode.isUnboundStreamApp()) { s.append(" || "); } else { s.append(" | "); } } } if (sinkDestinationNode != null) { s.append(sinkDestinationNode.toString()); } return s.toString(); } public List<AppNode> getAppNodes() { return appNodes; } public SourceDestinationNode getSourceDestinationNode() { return sourceDestinationNode; } public SinkDestinationNode getSinkDestinationNode() { return sinkDestinationNode; } public String getStreamName() { return streamName; } /** * Find the first reference to the named app in the stream. If the same app is * referred to multiple times the secondary references cannot be accessed via this * method. * * @param appName the name of the app * @return the first occurrence of the named app in the stream */ public AppNode getApp(String appName) { for (AppNode appNode : appNodes) { if (appNode.getName().equals(appName)) { return appNode; } } return null; } public int getIndexOfLabel(String labelOrAppName) { for (int m = 0; m < appNodes.size(); m++) { AppNode appNode = appNodes.get(m); if (appNode.getLabelName().equals(labelOrAppName)) { return m; } } return -1; } public String getStreamData() { return toString(); } public String getStreamText() { return this.streamText; } public String getName() { return this.streamName; } }
{ "pile_set_name": "Github" }
//------------------------------------------------------------------------------ // <auto-generated> // This code was generated by a tool. // // Changes to this file may cause incorrect behavior and will be lost if // the code is regenerated. // </auto-generated> //------------------------------------------------------------------------------ namespace Aspose.Cells.GridWeb.Examples.CSharp { public partial class Site1 { /// <summary> /// HeaderContent control. /// </summary> /// <remarks> /// Auto-generated field. /// To modify move field declaration from designer file to code-behind file. /// </remarks> protected global::System.Web.UI.WebControls.ContentPlaceHolder HeaderContent; /// <summary> /// form1 control. /// </summary> /// <remarks> /// Auto-generated field. /// To modify move field declaration from designer file to code-behind file. /// </remarks> protected global::System.Web.UI.HtmlControls.HtmlForm form1; /// <summary> /// MainContent control. /// </summary> /// <remarks> /// Auto-generated field. /// To modify move field declaration from designer file to code-behind file. /// </remarks> protected global::System.Web.UI.WebControls.ContentPlaceHolder MainContent; } }
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="UTF-8"?> <!-- Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. --> <plugin xmlns="http://apache.org/cordova/ns/plugins/1.0" id="org.apache.cordova.console" version="0.2.11"> <name>Console</name> <description>Cordova Console Plugin</description> <license>Apache 2.0</license> <keywords>cordova,console</keywords> <repo>https://git-wip-us.apache.org/repos/asf/cordova-plugin-console.git</repo> <issue>https://issues.apache.org/jira/browse/CB/component/12320644</issue> <!-- ios --> <platform name="ios"> <config-file target="config.xml" parent="/*"> <feature name="Console"> <param name="ios-package" value="CDVLogger"/> </feature> </config-file> <js-module src="www/console-via-logger.js" name="console"> <clobbers target="console" /> </js-module> <js-module src="www/logger.js" name="logger"> <clobbers target="cordova.logger" /> </js-module> <header-file src="src/ios/CDVLogger.h" /> <source-file src="src/ios/CDVLogger.m" /> </platform> <!-- ubuntu --> <platform name="ubuntu"> <js-module src="www/console-via-logger.js" name="console"> <clobbers target="console" /> </js-module> <js-module src="www/logger.js" name="logger"> <clobbers target="cordova.logger" /> </js-module> <header-file src="src/ubuntu/console.h" /> <source-file src="src/ubuntu/console.cpp" /> </platform> <!-- wp7 --> <platform name="wp7"> <config-file target="config.xml" parent="/*"> <feature name="Console"> <param name="wp-package" value="DebugConsole"/> </feature> </config-file> <js-module src="www/console-via-logger.js" name="console"> <clobbers target="console" /> </js-module> <js-module src="www/logger.js" name="logger"> <clobbers target="cordova.logger" /> </js-module> <source-file src="src/wp/DebugConsole.cs" /> </platform> <!-- wp8 --> <platform name="wp8"> <config-file target="config.xml" parent="/*"> <feature name="Console"> <param name="wp-package" value="DebugConsole"/> </feature> </config-file> <js-module src="www/console-via-logger.js" name="console"> <clobbers target="console" /> </js-module> <js-module src="www/logger.js" name="logger"> <clobbers target="cordova.logger" /> </js-module> <source-file src="src/wp/DebugConsole.cs" /> </platform> <!-- windows8 --> <platform name="windows8"> <js-module src="www/logger.js" name="logger"> <clobbers target="cordova.logger" /> </js-module> <js-module src="www/console-via-logger.js" name="console"> <clobbers target="console" /> </js-module> </platform> </plugin>
{ "pile_set_name": "Github" }
<?xml version='1.0' encoding='utf-8'?> <document xmlns:cache="https://code.dccouncil.us/schemas/cache" xmlns:citations="https://code.dccouncil.us/schemas/citations" xmlns:codified="https://code.dccouncil.us/schemas/codified" xmlns:codify="https://code.dccouncil.us/schemas/codify" xmlns:macro="https://code.dccouncil.us/schemas/macro" xmlns:xi="http://www.w3.org/2001/XInclude" xmlns:xs="http://www.w3.org/2001/XMLSchema" xmlns:xsl="http://www.w3.org/1999/XSL/Transform" xmlns="https://code.dccouncil.us/schemas/dc-library" id="D.C. Law 23-88"> <num type="law">23-88</num> <num type="bill">23-228</num> <num type="act">23-256</num> <heading type="short">Legitimate Theater Sidewalk Café Authorization Amendment Act of 2020</heading> <heading type="long">To amend the District of Columbia Public Space Rental Act to authorize the use of certain public space by a legitimate theater as a sidewalk café; and to amend Chapter 3 of Title 24 of the District of Columbia Municipal Regulations to allow a legitimate theater to operate a sidewalk café, and reconcile the general requirements for a sidewalk café permit and the application procedures for a sidewalk café permit.</heading> <meta> <introduced>2019-04-01</introduced> <effective>2020-05-06</effective> <citations> <citation type="law" url="http://lims.dccouncil.us/Download/42204/B23-0228-SignedAct.pdf">D.C. Law 23-88</citation> <citation type="act" url="http://lims.dccouncil.us/Download/42204/B23-0228-SignedAct.pdf">D.C. Act 23-256</citation> <citation type="register">67 DCR 3516</citation> </citations> <history url="http://lims.dccouncil.us/Legislation/B23-0228"> <vote date="2020-02-04" reading="First"/> <vote date="2020-03-03" reading="Final"/> <enacted>2020-03-17</enacted> <summary>BILL SUMMARY - As introduced it requires that the Department of Transportation review applications of a Legitimate Theater for a sidewalk café' and requires that they notify the Department of Consumer and Regulatory Affairs of its recommendation. Upon an affirmative recommendation the Department of Consumer and Regulatory Affairs must issue a Sidewalk café' Permit and Certificate of Use to the applicant.</summary> <committee>Committee on Transportation and the Environment and Committee of the Whole</committee> </history> </meta> <text>BE IT ENACTED BY THE COUNCIL OF THE DISTRICT OF COLUMBIA, That this act may be cited as the "Legitimate Theater Sidewalk Café Authorization Amendment Act of 2020".</text> <section> <num>2</num> <text>The District of Columbia Public Space Rental Act, approved October 17, 1968 (82 Stat. 1156; D.C. Official Code § 10-1101.01 <em>et seq.</em>), is amended by adding a new section 201b to read as follows:</text> <include> <section> <codify:insert doc="D.C. Code" path="10|11|I|B" after="§10-1102.01a" num-value="10-1102.01b" history-prefix="as added"/> <num>201b</num> <heading>Legitimate theater sidewalk café authorization.</heading> <para> <num>(a)</num> <text>The Mayor shall allow the use by a legitimate theater of public space abutting the legitimate theater as a sidewalk café; provided, that the applicant:</text> <para> <num>(1)</num> <text>Meets the administrative procedures for a sidewalk café as set forth in Chapter 3 of Title 24 of the District of Columbia Municipal Regulations (24 DCMR § 300 <em>et seq.</em>); and</text> </para> <para> <num>(2)</num> <text>Obtains the necessary licenses and license endorsements required by the Alcoholic Beverage Control Board to sell, serve, or permit the consumption of alcoholic beverages in a sidewalk café pursuant to <code-cite doc="D.C. Code" path="§25-113a|(c)">D.C. Official Code § 25-113a(c)</code-cite>.</text> </para> </para> <para> <num>(b)</num> <text>For the purposes of this section, the term:</text> <para> <num>(1)</num> <text>"Legitimate theater" shall have the same meaning as in section 399.1 of Title 24 of the District of Columbia Municipal Regulations (24 DCMR § 399.1).</text> </para> <para> <num>(2)</num> <text>"Sidewalk café" shall have the same meaning as in section 399.1 of Title 24 of the District of Columbia Municipal Regulations (24 DCMR § 399.1).</text> </para> </para> <annotation doc="Pub. L. 90-596" type="History" path="§201b">Oct. 17, 1968, 82 Stat. 1156, Pub. L. 90-596, title II, § 201b</annotation> </section> </include> <aftertext>.</aftertext> </section> <section> <num>3</num> <text>Chapter 3 of Title 24 of the District of Columbia Municipal Regulations (24 DCMR § 300 <em>et seq.</em>), is amended as follows:</text> <para> <num>(a)</num> <text>Section 301.3 is amended by striking the phrase "restaurant, grocery store, brewery, winery, or distillery" both times it appears and inserting the phrase "legitimate theater, restaurant, distillery, brewery, winery, grocery store, fast food establishment, or prepared food shop" in its place.</text> </para> <para> <num>(b)</num> <text>Section 303.13(h) is amended by striking the phrase "abutting restaurant" and inserting the phrase "abutting legitimate theater, restaurant," in its place.</text> </para> <para> <num>(c)</num> <text>Section 399.1 is amended by adding a new definition to read as follows:</text> <include> <text>Legitimate theater - a building, or a part of a building, that is designed and used for the presentation of live plays and other forms of dramatic performance. The facility typically has a stage or other performing area plus tiers of seats for the audience, or other arrangements for the audience to sit or stand to view the performance.</text> </include> <aftertext>.</aftertext> </para> </section> <section> <num>4</num> <heading>Fiscal impact statement.</heading> <text>The Council adopts the fiscal impact statement in the committee report as the fiscal impact statement required by section 4a of the General Legislative Procedures Act of 1975, approved October 16, 2006 (120 Stat. 2038; D.C. Official Code § 1-301.47a).</text> </section> <section> <num>5</num> <heading>Effective date.</heading> <text> <ul proof="needed"> <li>This act shall take effect following approval by the Mayor (or in the event of veto by the Mayor, action by the Council to override the veto), a 30-day period of congressional review as provided in section 602(c)(1) of the District of Columbia Home Rule Act, approved December 24, 1973 (87 Stat. 813; D.C. Official Code § 1-206.02(c)(1)), and publication in the District of Columbia Register.</li> </ul> </text> </section> </document>
{ "pile_set_name": "Github" }
# - Try to find Simmetrix SimModSuite # Once done this will define # SIMMODSUITE_FOUND - System has SimModSuite # SIMMODSUITE_INCLUDE_DIR - The SimModSuite include directories # SIMMODSUITE_LIBS - The libraries needed to use SimModSuite # SIMMODSUITE_<library>_FOUND - System has <library> # SIMMODSUITE_MAJOR_VERSION - the leading integer of the version string # SIMMODSUITE_MINOR_VERSION - the date code from the version string # # Based on input variables: # SIM_MPI # SIMMETRIX_LIB_DIR # SIMMETRIX_INCLUDE_DIR # And environment variable: # CMAKE_PREFIX_PATH # # This implementation assumes a simmetrix install has the following structure # VERSION/ # include/*.h # lib/ARCHOS/*.a set(SIM_MPI "" CACHE STRING "MPI implementation used for SimPartitionWrapper") if(SIM_MPI MATCHES "^$") message(FATAL_ERROR "SIM_MPI is not defined... libSimPartitionWrapper-$SIM_MPI.a should exist in the SimModSuite lib directory") endif() macro(simLibCheck libs isRequired) foreach(lib ${libs}) unset(simlib CACHE) find_library(simlib "${lib}" PATHS ${SIMMETRIX_LIB_DIR}) if(simlib MATCHES "^simlib-NOTFOUND$") if(${isRequired}) message(FATAL_ERROR "simmetrix library ${lib} not found in ${SIMMETRIX_LIB_DIR}") else() message("simmetrix library ${lib} not found in ${SIMMETRIX_LIB_DIR}") endif() else() set("SIMMODSUITE_${lib}_FOUND" TRUE CACHE INTERNAL "SimModSuite library present") set(SIMMODSUITE_LIBS ${SIMMODSUITE_LIBS} ${simlib}) endif() endforeach() endmacro(simLibCheck) macro(getSimCadLib searchPath libName lib check) file(GLOB cadLib RELATIVE ${searchPath}/ ${searchPath}/lib${libName}*) if( check AND NOT cadLib ) message(FATAL_ERROR "lib${libName} not found") endif() set(${lib} "${cadLib}") endmacro(getSimCadLib) find_path(SIMMODSUITE_INCLUDE_DIR NAMES SimUtil.h SimError.h SimModel.h PATHS ${SIMMETRIX_INCLUDE_DIR}) if(NOT EXISTS "${SIMMODSUITE_INCLUDE_DIR}") message(FATAL_ERROR "simmetrix include dir not found") endif() string(REGEX REPLACE "/include$" "" SIMMODSUITE_INSTALL_DIR "${SIMMODSUITE_INCLUDE_DIR}") string(REGEX MATCH "[0-9]+.[0-9]+-[0-9]+" SIM_VERSION "${SIMMODSUITE_INCLUDE_DIR}") #VERSION_LESS and VERSION_GREATER need '.' delimited version strings. string(REGEX REPLACE "([0-9]+.[0-9]+)-([0-9]+)" "\\1.\\2" SIM_DOT_VERSION "${SIM_VERSION}") string(REGEX REPLACE "([0-9]+).([0-9]+)-([0-9]+)" "\\1" SIMMODSUITE_MAJOR_VERSION "${SIM_VERSION}") string(REGEX REPLACE "([0-9]+).([0-9]+)-([0-9]+)" "\\3" SIMMODSUITE_MINOR_VERSION "${SIM_VERSION}") set(MIN_VALID_SIM_VERSION 12.0.190225) set(MAX_VALID_SIM_VERSION 15.0.200714) if( ${SKIP_SIMMETRIX_VERSION_CHECK} ) message(STATUS "Skipping Simmetrix SimModSuite version check." " This may result in undefined behavior") elseif( (SIM_DOT_VERSION VERSION_LESS MIN_VALID_SIM_VERSION) OR (SIM_DOT_VERSION VERSION_GREATER MAX_VALID_SIM_VERSION) ) MESSAGE(FATAL_ERROR "invalid Simmetrix version: ${SIM_DOT_VERSION}, \ valid versions are ${MIN_VALID_SIM_VERSION} to ${MAX_VALID_SIM_VERSION}") endif() message(STATUS "Building with SimModSuite ${SIM_DOT_VERSION}") set(SIMMODSUITE_LIBS "") set(SIM_BOOTSTRAP_LIB_NAME SimPartitionedMesh-mpi) simLibCheck("${SIM_BOOTSTRAP_LIB_NAME}" TRUE) string(FIND "${SIMMODSUITE_LIBS}" "/lib/" archStart) string(FIND "${SIMMODSUITE_LIBS}" "/libSim" archEnd) math(EXPR archStart "${archStart}+5") math(EXPR len "${archEnd}-${archStart}") string(SUBSTRING "${SIMMODSUITE_LIBS}" "${archStart}" "${len}" SIM_ARCHOS) message(STATUS "SIM_ARCHOS ${SIM_ARCHOS}") option(SIM_PARASOLID "Use Parasolid through Simmetrix" OFF) if (SIM_PARASOLID) set(MIN_SIM_PARASOLID_VERSION 290) set(MAX_SIM_PARASOLID_VERSION 310) foreach(version RANGE ${MAX_SIM_PARASOLID_VERSION} ${MIN_SIM_PARASOLID_VERSION} -10) set(SIM_PARASOLID_VERSION ${version}) getSimCadLib("${SIMMODSUITE_INSTALL_DIR}/lib/${SIM_ARCHOS}" SimParasolid${SIM_PARASOLID_VERSION} simParaLib FALSE) if(simParaLib) break() endif() endforeach() if(NOT simParaLib) message(FATAL_ERROR "libSimParasolid<#>.a " "${MIN_SIM_PARASOLID_VERSION}-${MAX_SIM_PARASOLID_VERSION} " "not found - check the version installed with SimModSuite") endif() set(SIM_CAD_LIB_NAMES ${simParaLib} pskernel) endif() option(SIM_ACIS "Use Acis through Simmetrix" OFF) if (SIM_ACIS) getSimCadLib("${SIMMODSUITE_INSTALL_DIR}/lib/${SIM_ARCHOS}" SimAcis simAcisLib TRUE) set(SIM_CAD_LIB_NAMES ${simAcisLib} ${SIM_CAD_LIB_NAMES} SpaACIS) endif() option(SIM_DISCRETE "Use Simmetrix discrete modeling" ON) if (SIM_DISCRETE) set(SIM_CAD_LIB_NAMES SimDiscrete ${SIM_CAD_LIB_NAMES}) endif() simLibCheck("${SIM_CAD_LIB_NAMES}" TRUE) set(SIM_OPT_LIB_NAMES SimField SimAdvMeshing) simLibCheck("${SIM_OPT_LIB_NAMES}" FALSE) set(SIM_CORE_LIB_NAMES SimPartitionedMesh-mpi SimMeshing SimMeshTools SimModel SimPartitionWrapper-${SIM_MPI}) simLibCheck("${SIM_CORE_LIB_NAMES}" TRUE) if (UNIX AND NOT APPLE) find_package(Threads REQUIRED) set(SIMMODSUITE_LIBS ${SIMMODSUITE_LIBS} ${CMAKE_THREAD_LIBS_INIT}) endif() include(FindPackageHandleStandardArgs) # handle the QUIETLY and REQUIRED arguments and set SIMMODSUITE_FOUND to TRUE # if all listed variables are TRUE find_package_handle_standard_args(SIMMODSUITE DEFAULT_MSG SIMMODSUITE_LIBS SIMMODSUITE_INCLUDE_DIR SIMMODSUITE_MAJOR_VERSION SIMMODSUITE_MINOR_VERSION) mark_as_advanced(SIMMODSUITE_INCLUDE_DIR SIMMODSUITE_LIBS SIMMODSUITE_MAJOR_VERSION SIMMODSUITE_MINOR_VERSION)
{ "pile_set_name": "Github" }
// SPDX-License-Identifier: LGPL-2.1+ /* * Copyright (C) 2018 Red Hat, Inc. */ #include "nm-default.h" #include "nm-glib-aux/nm-logging-fwd.h" /*****************************************************************************/ gboolean _nm_log_enabled_impl (gboolean mt_require_locking, NMLogLevel level, NMLogDomain domain) { return FALSE; } void _nm_log_impl (const char *file, guint line, const char *func, gboolean mt_require_locking, NMLogLevel level, NMLogDomain domain, int error, const char *ifname, const char *con_uuid, const char *fmt, ...) { } void _nm_utils_monotonic_timestamp_initialized (const struct timespec *tp, gint64 offset_sec, gboolean is_boottime) { }
{ "pile_set_name": "Github" }
// 代码地址: https://github.com/CoderMJLee/MJRefresh // 代码地址: http://code4app.com/ios/%E5%BF%AB%E9%80%9F%E9%9B%86%E6%88%90%E4%B8%8B%E6%8B%89%E4%B8%8A%E6%8B%89%E5%88%B7%E6%96%B0/52326ce26803fabc46000000 #import <UIKit/UIKit.h> #import <objc/message.h> // 弱引用 #define MJWeakSelf __weak typeof(self) weakSelf = self; // 日志输出 #ifdef DEBUG #define MJRefreshLog(...) NSLog(__VA_ARGS__) #else #define MJRefreshLog(...) #endif // 过期提醒 #define MJRefreshDeprecated(instead) NS_DEPRECATED(2_0, 2_0, 2_0, 2_0, instead) // 运行时objc_msgSend #define MJRefreshMsgSend(...) ((void (*)(void *, SEL, UIView *))objc_msgSend)(__VA_ARGS__) #define MJRefreshMsgTarget(target) (__bridge void *)(target) // RGB颜色 #define MJRefreshColor(r, g, b) [UIColor colorWithRed:(r)/255.0 green:(g)/255.0 blue:(b)/255.0 alpha:1.0] // 文字颜色 #define MJRefreshLabelTextColor MJRefreshColor(90, 90, 90) // 字体大小 #define MJRefreshLabelFont [UIFont boldSystemFontOfSize:14] // 常量 UIKIT_EXTERN const CGFloat MJRefreshLabelLeftInset; UIKIT_EXTERN const CGFloat MJRefreshHeaderHeight; UIKIT_EXTERN const CGFloat MJRefreshFooterHeight; UIKIT_EXTERN const CGFloat MJRefreshFastAnimationDuration; UIKIT_EXTERN const CGFloat MJRefreshSlowAnimationDuration; UIKIT_EXTERN NSString *const MJRefreshKeyPathContentOffset; UIKIT_EXTERN NSString *const MJRefreshKeyPathContentSize; UIKIT_EXTERN NSString *const MJRefreshKeyPathContentInset; UIKIT_EXTERN NSString *const MJRefreshKeyPathPanState; UIKIT_EXTERN NSString *const MJRefreshHeaderLastUpdatedTimeKey; UIKIT_EXTERN NSString *const MJRefreshHeaderIdleText; UIKIT_EXTERN NSString *const MJRefreshHeaderPullingText; UIKIT_EXTERN NSString *const MJRefreshHeaderRefreshingText; UIKIT_EXTERN NSString *const MJRefreshAutoFooterIdleText; UIKIT_EXTERN NSString *const MJRefreshAutoFooterRefreshingText; UIKIT_EXTERN NSString *const MJRefreshAutoFooterNoMoreDataText; UIKIT_EXTERN NSString *const MJRefreshBackFooterIdleText; UIKIT_EXTERN NSString *const MJRefreshBackFooterPullingText; UIKIT_EXTERN NSString *const MJRefreshBackFooterRefreshingText; UIKIT_EXTERN NSString *const MJRefreshBackFooterNoMoreDataText; UIKIT_EXTERN NSString *const MJRefreshHeaderLastTimeText; UIKIT_EXTERN NSString *const MJRefreshHeaderDateTodayText; UIKIT_EXTERN NSString *const MJRefreshHeaderNoneLastDateText; // 状态检查 #define MJRefreshCheckState \ MJRefreshState oldState = self.state; \ if (state == oldState) return; \ [super setState:state]; // 异步主线程执行,不强持有Self #define MJRefreshDispatchAsyncOnMainQueue(x) \ __weak typeof(self) weakSelf = self; \ dispatch_async(dispatch_get_main_queue(), ^{ \ typeof(weakSelf) self = weakSelf; \ {x} \ });
{ "pile_set_name": "Github" }
This Makefile requires devkitARM (http://www.devkitpro.org/category/devkitarm/) and works inside "contrib/nds". It is based on a devkitARM template. Eduardo Costa <eduardo.m.costa@gmail.com> January 3, 2009
{ "pile_set_name": "Github" }
//------------------------------------------------------------------------------ /* This file is part of Beast: https://github.com/vinniefalco/Beast Copyright 2013, Vinnie Falco <vinnie.falco@gmail.com> Portions of this file are from JUCE. Copyright (c) 2013 - Raw Material Software Ltd. Please visit http://www.juce.com Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL , DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ //============================================================================== namespace beast { #if BEAST_DEBUG struct DanglingStreamChecker { DanglingStreamChecker() {} ~DanglingStreamChecker() { /* It's always a bad idea to leak any object, but if you're leaking output streams, then there's a good chance that you're failing to flush a file to disk properly, which could result in corrupted data and other similar nastiness.. */ bassert (activeStreams.size() == 0); } Array<void*, CriticalSection> activeStreams; }; static DanglingStreamChecker danglingStreamChecker; #endif //============================================================================== OutputStream::OutputStream() : newLineString (NewLine::getDefault()) { #if BEAST_DEBUG danglingStreamChecker.activeStreams.add (this); #endif } OutputStream::~OutputStream() { #if BEAST_DEBUG danglingStreamChecker.activeStreams.removeFirstMatchingValue (this); #endif } //============================================================================== bool OutputStream::writeBool (const bool b) { return writeByte (b ? (char) 1 : (char) 0); } bool OutputStream::writeByte (char byte) { return write (&byte, 1); } bool OutputStream::writeRepeatedByte (std::uint8_t byte, size_t numTimesToRepeat) { for (size_t i = 0; i < numTimesToRepeat; ++i) if (! writeByte ((char) byte)) return false; return true; } bool OutputStream::writeShort (short value) { const unsigned short v = ByteOrder::swapIfBigEndian ((unsigned short) value); return write (&v, 2); } bool OutputStream::writeShortBigEndian (short value) { const unsigned short v = ByteOrder::swapIfLittleEndian ((unsigned short) value); return write (&v, 2); } bool OutputStream::writeInt32 (std::int32_t value) { static_bassert (sizeof (std::int32_t) == 4); const unsigned int v = ByteOrder::swapIfBigEndian ((std::uint32_t) value); return write (&v, 4); } bool OutputStream::writeInt (int value) { static_bassert (sizeof (int) == 4); const unsigned int v = ByteOrder::swapIfBigEndian ((unsigned int) value); return write (&v, 4); } bool OutputStream::writeInt32BigEndian (int value) { static_bassert (sizeof (std::int32_t) == 4); const std::uint32_t v = ByteOrder::swapIfLittleEndian ((std::uint32_t) value); return write (&v, 4); } bool OutputStream::writeIntBigEndian (int value) { static_bassert (sizeof (int) == 4); const unsigned int v = ByteOrder::swapIfLittleEndian ((unsigned int) value); return write (&v, 4); } bool OutputStream::writeCompressedInt (int value) { unsigned int un = (value < 0) ? (unsigned int) -value : (unsigned int) value; std::uint8_t data[5]; int num = 0; while (un > 0) { data[++num] = (std::uint8_t) un; un >>= 8; } data[0] = (std::uint8_t) num; if (value < 0) data[0] |= 0x80; return write (data, (size_t) num + 1); } bool OutputStream::writeInt64 (std::int64_t value) { const std::uint64_t v = ByteOrder::swapIfBigEndian ((std::uint64_t) value); return write (&v, 8); } bool OutputStream::writeInt64BigEndian (std::int64_t value) { const std::uint64_t v = ByteOrder::swapIfLittleEndian ((std::uint64_t) value); return write (&v, 8); } bool OutputStream::writeFloat (float value) { union { int asInt; float asFloat; } n; n.asFloat = value; return writeInt (n.asInt); } bool OutputStream::writeFloatBigEndian (float value) { union { int asInt; float asFloat; } n; n.asFloat = value; return writeIntBigEndian (n.asInt); } bool OutputStream::writeDouble (double value) { union { std::int64_t asInt; double asDouble; } n; n.asDouble = value; return writeInt64 (n.asInt); } bool OutputStream::writeDoubleBigEndian (double value) { union { std::int64_t asInt; double asDouble; } n; n.asDouble = value; return writeInt64BigEndian (n.asInt); } bool OutputStream::writeString (const String& text) { // (This avoids using toUTF8() to prevent the memory bloat that it would leave behind // if lots of large, persistent strings were to be written to streams). const size_t numBytes = text.getNumBytesAsUTF8() + 1; HeapBlock<char> temp (numBytes); text.copyToUTF8 (temp, numBytes); return write (temp, numBytes); } bool OutputStream::writeText (const String& text, const bool asUTF16, const bool writeUTF16ByteOrderMark) { if (asUTF16) { if (writeUTF16ByteOrderMark) write ("\x0ff\x0fe", 2); String::CharPointerType src (text.getCharPointer()); bool lastCharWasReturn = false; for (;;) { const beast_wchar c = src.getAndAdvance(); if (c == 0) break; if (c == '\n' && ! lastCharWasReturn) writeShort ((short) '\r'); lastCharWasReturn = (c == L'\r'); if (! writeShort ((short) c)) return false; } } else { const char* src = text.toUTF8(); const char* t = src; for (;;) { if (*t == '\n') { if (t > src) if (! write (src, (size_t) (t - src))) return false; if (! write ("\r\n", 2)) return false; src = t + 1; } else if (*t == '\r') { if (t[1] == '\n') ++t; } else if (*t == 0) { if (t > src) if (! write (src, (size_t) (t - src))) return false; break; } ++t; } } return true; } int OutputStream::writeFromInputStream (InputStream& source, std::int64_t numBytesToWrite) { if (numBytesToWrite < 0) numBytesToWrite = std::numeric_limits<std::int64_t>::max(); int numWritten = 0; while (numBytesToWrite > 0) { char buffer [8192]; const int num = source.read (buffer, (int) bmin (numBytesToWrite, (std::int64_t) sizeof (buffer))); if (num <= 0) break; write (buffer, (size_t) num); numBytesToWrite -= num; numWritten += num; } return numWritten; } //============================================================================== void OutputStream::setNewLineString (const String& newLineString_) { newLineString = newLineString_; } //============================================================================== OutputStream& operator<< (OutputStream& stream, const int number) { return stream << String (number); } OutputStream& operator<< (OutputStream& stream, const std::int64_t number) { return stream << String (number); } OutputStream& operator<< (OutputStream& stream, const double number) { return stream << String (number); } OutputStream& operator<< (OutputStream& stream, const char character) { stream.writeByte (character); return stream; } OutputStream& operator<< (OutputStream& stream, const char* const text) { stream.write (text, strlen (text)); return stream; } OutputStream& operator<< (OutputStream& stream, const MemoryBlock& data) { if (data.getSize() > 0) stream.write (data.getData(), data.getSize()); return stream; } OutputStream& operator<< (OutputStream& stream, const File& fileToRead) { FileInputStream in (fileToRead); if (in.openedOk()) return stream << in; return stream; } OutputStream& operator<< (OutputStream& stream, InputStream& streamToRead) { stream.writeFromInputStream (streamToRead, -1); return stream; } OutputStream& operator<< (OutputStream& stream, const NewLine&) { return stream << stream.getNewLineString(); } //------------------------------------------------------------------------------ // Unfortunately, putting these in the header causes duplicate // definition linker errors, even with the inline keyword! template <> bool OutputStream::writeType <char> (char v) { return writeByte (v); } template <> bool OutputStream::writeType <short> (short v) { return writeShort (v); } template <> bool OutputStream::writeType <std::int32_t> (std::int32_t v) { return writeInt32 (v); } template <> bool OutputStream::writeType <std::int64_t> (std::int64_t v) { return writeInt64 (v); } template <> bool OutputStream::writeType <unsigned char> (unsigned char v) { return writeByte (static_cast <char> (v)); } template <> bool OutputStream::writeType <unsigned short> (unsigned short v) { return writeShort (static_cast <short> (v)); } template <> bool OutputStream::writeType <std::uint32_t> (std::uint32_t v) { return writeInt32 (static_cast <std::int32_t> (v)); } template <> bool OutputStream::writeType <std::uint64_t> (std::uint64_t v) { return writeInt64 (static_cast <std::int64_t> (v)); } template <> bool OutputStream::writeType <float> (float v) { return writeFloat (v); } template <> bool OutputStream::writeType <double> (double v) { return writeDouble (v); } //------------------------------------------------------------------------------ template <> bool OutputStream::writeTypeBigEndian <char> (char v) { return writeByte (v); } template <> bool OutputStream::writeTypeBigEndian <short> (short v) { return writeShortBigEndian (v); } template <> bool OutputStream::writeTypeBigEndian <std::int32_t> (std::int32_t v) { return writeInt32BigEndian (v); } template <> bool OutputStream::writeTypeBigEndian <std::int64_t> (std::int64_t v) { return writeInt64BigEndian (v); } template <> bool OutputStream::writeTypeBigEndian <unsigned char> (unsigned char v) { return writeByte (static_cast <char> (v)); } template <> bool OutputStream::writeTypeBigEndian <unsigned short> (unsigned short v) { return writeShortBigEndian (static_cast <short> (v)); } template <> bool OutputStream::writeTypeBigEndian <std::uint32_t> (std::uint32_t v) { return writeInt32BigEndian (static_cast <std::int32_t> (v)); } template <> bool OutputStream::writeTypeBigEndian <std::uint64_t> (std::uint64_t v) { return writeInt64BigEndian (static_cast <std::int64_t> (v)); } template <> bool OutputStream::writeTypeBigEndian <float> (float v) { return writeFloatBigEndian (v); } template <> bool OutputStream::writeTypeBigEndian <double> (double v) { return writeDoubleBigEndian (v); } OutputStream& operator<< (OutputStream& stream, const String& text) { const size_t numBytes = text.getNumBytesAsUTF8(); #if (BEAST_STRING_UTF_TYPE == 8) stream.write (text.getCharPointer().getAddress(), numBytes); #else // (This avoids using toUTF8() to prevent the memory bloat that it would leave behind // if lots of large, persistent strings were to be written to streams). HeapBlock<char> temp (numBytes + 1); CharPointer_UTF8 (temp).writeAll (text.getCharPointer()); stream.write (temp, numBytes); #endif return stream; } } // beast
{ "pile_set_name": "Github" }
import { CosmosDBManagementClient } from "azure-arm-cosmosdb"; import { DatabaseAccount } from "azure-arm-cosmosdb/lib/models"; import { ServiceClientCredentials } from "ms-rest"; import { SubscriptionItem, ResourceGroupItem } from "../azure-auth/azureAuth"; import * as path from "path"; import { SubscriptionError, AuthorizationError, DeploymentError, } from "../../errors"; import { ResourceManagementClient, ResourceManagementModels } from "azure-arm-resource/lib/resource/resourceManagementClient"; import { ResourceManager } from "../azure-arm/resourceManager"; import { ARMFileHelper } from "../azure-arm/armFileHelper"; import { CONSTANTS } from "../../constants/constants"; import fs = require("fs-extra"); import { ConnectionString } from "../utils/connectionString"; import { Controller } from "../../controller"; import { MESSAGES } from "../../constants/messages"; export interface CosmosDBSelections { cosmosDBResourceName: string; location: string; cosmosAPI: API; subscriptionItem: SubscriptionItem; resourceGroupItem: ResourceGroupItem; } /* * Database Object - tuple to return to caller */ export interface DatabaseObject { databaseAccount: DatabaseAccount; connectionString: string; } /* * Azure Cosmos DB for Mongo API | Gremlin | Azure Table | Core (SQL) | Cassandra */ export type API = "MongoDB" | "Graph" | "Table" | "SQL" | "Cassandra"; /* * Implemented API selections * value: the API which should be returned as selection * label: String to display to user */ export interface APIObject { value: API; label: string; } const COSMOS_DEPLOYMENT_SUFFIX = "-cosmos"; /* * Returns an array of available/implemented APIObjects for cosmos */ export function GetAvailableAPIs(): APIObject[] { return [ { value: "MongoDB", label: "Azure Cosmos DB for MongoDB API" }, { value: "Graph", label: "Gremlin (graph)" }, { value: "Table", label: "Azure Table" }, { value: "SQL", label: "Core (SQL)" }, { value: "Cassandra", label: "Cassandra" } ]; } /* * ARM template definitions for Cosmos APIs */ interface APIdefinition { readonly kind: string; readonly defaultExperience: string; readonly capabilities: any[]; } export class CosmosDBDeploy { /* * Map of Cosmos API type to its definitions for ARM templates */ private APIdefinitionMap = new Map<API, APIdefinition>([ [ "MongoDB", { kind: "MongoDB", defaultExperience: "Azure Cosmos DB for MongoDB API", capabilities: [] } ], [ "Graph", { kind: "GlobalDocumentDB", defaultExperience: "Gremlin (graph)", capabilities: [{ name: "EnableGremlin" }] } ], [ "Table", { kind: "GlobalDocumentDB", defaultExperience: "Azure Table", capabilities: [{ name: "EnableTable" }] } ], [ "SQL", { kind: "GlobalDocumentDB", defaultExperience: "Core (SQL)", capabilities: [] } ], [ "Cassandra", { kind: "GlobalDocumentDB", defaultExperience: "Cassandra", capabilities: [{ name: "EnableCassandra" }] } ] ]); private SubscriptionItemCosmosClient: CosmosDBManagementClient | undefined; public async createCosmosDB( userCosmosDBSelection: CosmosDBSelections, genPath: string ): Promise<DatabaseObject> { /* * Create Cosmos Client with users credentials and selected subscription * */ let userSubscriptionItem: SubscriptionItem; try { userSubscriptionItem = userCosmosDBSelection.subscriptionItem; this.setCosmosClient(userSubscriptionItem); } catch (error) { throw new AuthorizationError(error.message); } const resourceGroup = userCosmosDBSelection.resourceGroupItem.name; const databaseName = userCosmosDBSelection.cosmosDBResourceName; const location = userCosmosDBSelection.location; const experience = userCosmosDBSelection.cosmosAPI; const template = JSON.parse( fs.readFileSync( path.join( Controller.vsContext.extensionPath, "src", "azure", "azure-cosmosDB", "arm-templates", "template.json" ), "utf8" ) ); const parameters = JSON.parse( fs.readFileSync( path.join( Controller.vsContext.extensionPath, "src", "azure", "azure-cosmosDB", "arm-templates", "parameters.json" ), "utf8" ) ); const definitions: APIdefinition = this.APIdefinitionMap.get(experience)!; parameters.parameters = { name: { value: databaseName }, location: { value: location .split(" ") .join("") .toLowerCase() }, locationName: { value: location }, defaultExperience: { value: definitions.defaultExperience }, capabilities: { value: definitions.capabilities }, kind: { value: definitions.kind } }; const deploymentParams = parameters.parameters; const options: ResourceManagementModels.Deployment = { properties: { mode: "Incremental", parameters: deploymentParams, template: template } }; try { if (this.SubscriptionItemCosmosClient === undefined) { throw new AuthorizationError( MESSAGES.ERRORS.COSMOS_CLIENT_NOT_DEFINED ); } const azureResourceClient: ResourceManagementClient = new ResourceManager().getResourceManagementClient( userSubscriptionItem ); ARMFileHelper.createDirIfNonExistent(path.join(genPath, "arm-templates")); ARMFileHelper.writeObjectToJsonFile( path.join(genPath, "arm-templates", "cosmos-template.json"), template ); ARMFileHelper.writeObjectToJsonFile( path.join(genPath, "arm-templates", "cosmos-parameters.json"), parameters ); /* * Cosmos Client to generate a cosmos DB resource using resource group name, database name, and options * */ await azureResourceClient.deployments.createOrUpdate( resourceGroup, databaseName + COSMOS_DEPLOYMENT_SUFFIX, options ); const databaseAccount: DatabaseAccount = await this.SubscriptionItemCosmosClient.databaseAccounts.get( resourceGroup, databaseName ); const connectionString = await this.getConnectionString( this.SubscriptionItemCosmosClient, resourceGroup, databaseName ); /* * Returning a tuple which includes databaseAccount from callback and its connection string */ const db: DatabaseObject = { databaseAccount, connectionString }; return db; } catch (error) { throw new DeploymentError(error.message); } } /* * Set internal cosmos client using a user's selected subscription item */ private setCosmosClient(userSubscriptionItem: SubscriptionItem): void { if ( this.SubscriptionItemCosmosClient === undefined || this.SubscriptionItemCosmosClient.subscriptionId !== userSubscriptionItem.subscriptionId ) { this.SubscriptionItemCosmosClient = this.createCosmosClient( userSubscriptionItem ); } } private createCosmosClient( userSubscriptionItem: SubscriptionItem ): CosmosDBManagementClient { const userCredentials: ServiceClientCredentials = userSubscriptionItem.session.credentials; if ( userSubscriptionItem === undefined || userSubscriptionItem.subscription === undefined || userSubscriptionItem.subscriptionId === undefined ) { throw new SubscriptionError(MESSAGES.ERRORS.SUBSCRIPTION_NOT_DEFINED); } return new CosmosDBManagementClient( userCredentials, userSubscriptionItem.subscriptionId, userSubscriptionItem.session.environment.resourceManagerEndpointUrl ); } /* * Validating the given string name is unique * @return Return `undefined`, `null`, or the empty string when 'value' is valid and string message when 'value' is not valid. */ public async validateCosmosDBAccountName( name: string, userSubscriptionItem: SubscriptionItem ): Promise<string | undefined> { this.setCosmosClient(userSubscriptionItem); return await this.validateUniqueCosmosDBAccountName(name); } /* * Validating the given string name is unique * @return Return `undefined`, `null`, or the empty string when 'value' is valid and string message when 'value' is not valid. */ private async validateUniqueCosmosDBAccountName( name: string ): Promise<string | undefined> { if (this.SubscriptionItemCosmosClient === undefined) { throw new AuthorizationError(MESSAGES.ERRORS.COSMOS_CLIENT_NOT_DEFINED); } name = name ? name.trim() : ""; const min = CONSTANTS.COSMOS_DB_NAME.MIN_LENGTH; const max = CONSTANTS.COSMOS_DB_NAME.MAX_LENGTH; if (name.length < min || name.length > max) { return MESSAGES.ERRORS.NAME_MIN_MAX(min, max); } else if (name.match(/[^a-z0-9-]/)) { return MESSAGES.ERRORS.COSMOS_VALID_CHARACTERS; } else if ( await this.SubscriptionItemCosmosClient.databaseAccounts.checkNameExists( name ) ) { return MESSAGES.ERRORS.COSMOS_ACCOUNT_NOT_AVAILABLE(name); } else { return undefined; } } /* * Returns Azure Cosmos DB connection string for user's deployed database instance. * This is what the user will use to connect to the database. * * Overload on getConnectionString; one for providing creating the Cosmos Client */ public async getConnectionString( userSubscriptionItem: SubscriptionItem, resourceGroup: string, dataBaseName: string ): Promise<string>; public async getConnectionString( cosmosDBManagementClient: CosmosDBManagementClient, resourceGroup: string, dataBaseName: string ): Promise<string>; public async getConnectionString( cosmosClientOrSubscriptionItem: CosmosDBManagementClient | SubscriptionItem, resourceGroup: string, dataBaseName: string ): Promise<string> { let cosmosClient: CosmosDBManagementClient; if (cosmosClientOrSubscriptionItem instanceof CosmosDBManagementClient) { cosmosClient = cosmosClientOrSubscriptionItem; } else { try { cosmosClient = this.createCosmosClient(cosmosClientOrSubscriptionItem); } catch (error) { throw new AuthorizationError( MESSAGES.ERRORS.CONNECTION_STRING_FAILED + error.message ); } } const result = await cosmosClient.databaseAccounts.listConnectionStrings( resourceGroup, dataBaseName ); return result!.connectionStrings![0].connectionString!; } public static updateConnectionStringInEnvFile( filePath: string, connectionString: string ): void { /** * Updates .env file in generated project directory once the connection string is received. * Throws an error if the user deleted the project directory * @filePath: path of .env file */ let envText; if (ConnectionString.isCosmosSQLConnectionString(connectionString)) { const sqlData = ConnectionString.getConnectionStringSqlData(connectionString); envText = `${CONSTANTS.COSMOSDB_SQL.URI}=${sqlData.origin} ${CONSTANTS.COSMOSDB_SQL.PRIMARY_KEY}=${sqlData.primaryKey}`; } else { const mongoData = ConnectionString.getConnectionStringMongoData(connectionString); envText = `${CONSTANTS.COSMOSDB_MONGO.CONNSTR}=${mongoData.origin}/${mongoData.username} ${CONSTANTS.COSMOSDB_MONGO.USER}=${mongoData.username} ${CONSTANTS.COSMOSDB_MONGO.PASSWORD}=${mongoData.password}`; } const envPath = path.join(filePath, "backend", ".env"); try { if (fs.existsSync(filePath)) { fs.writeFileSync(envPath, envText); } } catch (err) { throw new Error(err); } } public static updateConnectionStringInAppSettingsFile( filePath: string, connectionString: string ): void { try { const appSettingsPath = path.join(filePath, "backend", "appsettings.json"); const appsettings = fs.readJSONSync(appSettingsPath); if (ConnectionString.isCosmosSQLConnectionString(connectionString)) { const sqlData = ConnectionString.getConnectionStringSqlData(connectionString); appsettings.COSMOSDB_URI = sqlData.origin; appsettings.COSMOSDB_PRIMARY_KEY = sqlData.primaryKey; } else { const mongoData = ConnectionString.getConnectionStringMongoData(connectionString); appsettings.COSMOSDB_CONNSTR = `${mongoData.origin}/${mongoData.username}`; appsettings.COSMOSDB_USER = mongoData.username; appsettings.COSMOSDB_PASSWORD = mongoData.password; } fs.writeJSONSync(appSettingsPath, appsettings, { spaces: 2 }); } catch (err) { throw new Error(err); } } }
{ "pile_set_name": "Github" }
/* ***** BEGIN LICENSE BLOCK ***** * Distributed under the BSD license: * * Copyright (c) 2010, Ajax.org B.V. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of Ajax.org B.V. nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL AJAX.ORG B.V. BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * ***** END LICENSE BLOCK ***** */ if (typeof process !== "undefined") require("amd-loader"); define(function(require, exports, module) { "use strict"; var PythonMode = require("../python").Mode; var EditSession = require("../../edit_session").EditSession; var assert = require("../../test/assertions"); module.exports = { "test: bracket folding": function() { var session = new EditSession([ '[ ', 'stuff', ']', '[ ', '{ ', '[ #-' ]); var mode = new PythonMode(); session.setFoldStyle("markbeginend"); session.setMode(mode); assert.equal(session.getFoldWidget(0), "start"); assert.equal(session.getFoldWidget(1), ""); assert.equal(session.getFoldWidget(2), ""); assert.equal(session.getFoldWidget(3), "start"); assert.equal(session.getFoldWidget(4), "start"); assert.equal(session.getFoldWidget(5), ""); assert.range(session.getFoldWidgetRange(0), 0, 1, 2, 0); assert.equal(session.getFoldWidgetRange(3), null); assert.equal(session.getFoldWidgetRange(5), null); }, "test: indentation folding": function() { var session = new EditSession([ 'def a: #', '', ' b:', ' c', ' ', ' c', '', ' ', '' ]); var mode = new PythonMode(); session.setFoldStyle("markbeginend"); session.setMode(mode); assert.equal(session.getFoldWidget(0), "start"); assert.equal(session.getFoldWidget(1), ""); assert.equal(session.getFoldWidget(2), "start"); assert.range(session.getFoldWidgetRange(0), 0, 6, 5, 3); assert.range(session.getFoldWidgetRange(2), 2, 3, 5, 3); } }; }); if (typeof module !== "undefined" && module === require.main) require("asyncjs").test.testcase(module.exports).exec();
{ "pile_set_name": "Github" }
#define SUBROUTINE add #define MODULE_LIST add.c #define P3_SUBLIB #define USES_C
{ "pile_set_name": "Github" }
package com.roncoo.education.user.service.api.pc.biz; import java.math.BigDecimal; import java.util.regex.Pattern; import com.roncoo.education.user.common.req.*; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import org.springframework.transaction.annotation.Transactional; import org.springframework.util.StringUtils; import com.roncoo.education.user.common.resq.LecturerAuditPageRESQ; import com.roncoo.education.user.common.resq.LecturerAuditViewRESQ; import com.roncoo.education.user.common.resq.LecturerExtViewRESQ; import com.roncoo.education.user.service.dao.LecturerAuditDao; import com.roncoo.education.user.service.dao.LecturerDao; import com.roncoo.education.user.service.dao.LecturerExtDao; import com.roncoo.education.user.service.dao.UserDao; import com.roncoo.education.user.service.dao.UserExtDao; import com.roncoo.education.user.service.dao.impl.mapper.entity.Lecturer; import com.roncoo.education.user.service.dao.impl.mapper.entity.LecturerAudit; import com.roncoo.education.user.service.dao.impl.mapper.entity.LecturerAuditExample; import com.roncoo.education.user.service.dao.impl.mapper.entity.LecturerAuditExample.Criteria; import com.roncoo.education.user.service.dao.impl.mapper.entity.LecturerExt; import com.roncoo.education.user.service.dao.impl.mapper.entity.User; import com.roncoo.education.user.service.dao.impl.mapper.entity.UserExt; import com.roncoo.education.util.base.BaseBiz; import com.roncoo.education.util.base.BaseException; import com.roncoo.education.util.base.Page; import com.roncoo.education.util.base.PageUtil; import com.roncoo.education.util.base.Result; import com.roncoo.education.util.enums.AuditStatusEnum; import com.roncoo.education.util.enums.ResultEnum; import com.roncoo.education.util.enums.UserTypeEnum; import com.roncoo.education.util.tools.BeanUtil; import com.roncoo.education.util.tools.NOUtil; import com.roncoo.education.util.tools.SignUtil; import com.roncoo.education.util.tools.StrUtil; import com.xiaoleilu.hutool.crypto.DigestUtil; import com.xiaoleilu.hutool.util.ObjectUtil; @Component public class PcApiLecturerAuditBiz extends BaseBiz { @Autowired private LecturerAuditDao lecturerAuditDao; @Autowired private LecturerDao lecturerDao; @Autowired private LecturerExtDao lecturerExtDao; @Autowired private UserDao userDao; @Autowired private UserExtDao userExtDao; public Result<Page<LecturerAuditPageRESQ>> listForPage(LecturerAuditPageREQ req) { LecturerAuditExample example = new LecturerAuditExample(); Criteria c = example.createCriteria(); if (StringUtils.hasText(req.getLecturerMobile())) { c.andLecturerMobileEqualTo(req.getLecturerMobile()); } if (StringUtils.hasText(req.getLecturerName())) { c.andLecturerNameLike(PageUtil.rightLike(req.getLecturerName())); } if (req.getAuditStatus() != null) { c.andAuditStatusEqualTo(req.getAuditStatus()); } else { c.andAuditStatusNotEqualTo(AuditStatusEnum.SUCCESS.getCode()); } if (req.getStatusId() != null) { c.andStatusIdEqualTo(req.getStatusId()); } example.setOrderByClause(" audit_status asc, status_id desc, sort desc, id desc "); Page<LecturerAudit> page = lecturerAuditDao.listForPage(req.getPageCurrent(), req.getPageSize(), example); return Result.success(PageUtil.transform(page, LecturerAuditPageRESQ.class)); } /** * 添加讲师 * * @param req * @return */ @Transactional public Result<Integer> save(LecturerAuditSaveREQ req) { if (StringUtils.isEmpty(req.getLecturerMobile())) { return Result.error("手机号不能为空"); } // 手机号去空处理 String mobile = req.getLecturerMobile().trim(); // 手机号码校验 if (!Pattern.compile(REGEX_MOBILE).matcher(mobile).matches()) { return Result.error("手机号码格式不正确"); } // 根据传入手机号获取用户信息(讲师的用户信息) UserExt userExt = userExtDao.getByMobile(mobile); // 1、用户不存在,注册用户 if (ObjectUtil.isNull(userExt)) { if (StringUtils.isEmpty(req.getMobilePsw())) { return Result.error("密码不能为空"); } if (!req.getConfirmPasswd().equals(req.getMobilePsw())) { return Result.error("两次密码不一致,请重试!"); } // 注册用户 userExt = register(req, mobile); } // 2、添加讲师 LecturerAudit lecturerAudit = lecturerAuditDao.getByLecturerUserNo(userExt.getUserNo()); // 校验讲师是否存在 if (ObjectUtil.isNotNull(lecturerAudit)) { // 讲师存在 if (AuditStatusEnum.SUCCESS.getCode().equals(lecturerAudit.getAuditStatus())) { // 审核成功 return Result.error(ResultEnum.LECTURER_REQUISITION_YET); } else if (AuditStatusEnum.WAIT.getCode().equals(lecturerAudit.getAuditStatus())) { // 待审核 return Result.error(ResultEnum.LECTURER_REQUISITION_WAIT); } else { return Result.error(ResultEnum.LECTURER_REQUISITION_FAIL); } } else { // 讲师不存在 int results = lecturerInfo(req, userExt); if (results < 0) { return Result.error(ResultEnum.USER_SAVE_FAIL); } return Result.success(results); } } /** * 讲师审核修改 * * @param req * @return */ public Result<Integer> update(LecturerAuditUpdateREQ req) { if (StringUtils.isEmpty(req.getId())) { return Result.error("ID不能为空"); } LecturerAudit lecturerAudit = lecturerAuditDao.getById(req.getId()); if (ObjectUtil.isNull(lecturerAudit)) { return Result.error("找不到讲师信息"); } LecturerAudit record = BeanUtil.copyProperties(req, LecturerAudit.class); record.setAuditStatus(AuditStatusEnum.WAIT.getCode()); int results = lecturerAuditDao.updateById(record); if (results < 0) { return Result.error(ResultEnum.USER_UPDATE_FAIL); } return Result.success(results); } @Transactional public Result<Integer> audit(LecturerAuditAuditREQ req) { if (StringUtils.isEmpty(req.getId())) { return Result.error("ID不能为空"); } if (StringUtils.isEmpty(req.getAuditStatus())) { return Result.error("auditStatus不能为空"); } LecturerAudit lecturerAudit = lecturerAuditDao.getById(req.getId()); if (ObjectUtil.isNull(lecturerAudit)) { return Result.error("找不到讲师信息"); } if (AuditStatusEnum.SUCCESS.getCode().equals(req.getAuditStatus())) { // 查找讲师信息表,是否存在该讲师 Lecturer lecturer = lecturerDao.getByLecturerUserNo(lecturerAudit.getLecturerUserNo()); if (ObjectUtil.isNull(lecturer)) { // 插入 lecturer = BeanUtil.copyProperties(lecturerAudit, Lecturer.class); lecturer.setGmtCreate(null); lecturer.setGmtModified(null); lecturerDao.save(lecturer); } else { // 更新 lecturer = BeanUtil.copyProperties(lecturerAudit, Lecturer.class); lecturer.setGmtCreate(null); lecturer.setGmtModified(null); lecturerDao.updateById(lecturer); } // 查找用户信息是否存在 UserExt userExt = userExtDao.getByUserNo(lecturer.getLecturerUserNo()); if (ObjectUtil.isNull(userExt)) { return Result.error("获取不到用户信息"); } // 存在更新为讲师类型 userExt.setUserType(UserTypeEnum.LECTURER.getCode()); userExtDao.updateById(userExt); } LecturerAudit record = BeanUtil.copyProperties(req, LecturerAudit.class); int results = lecturerAuditDao.updateById(record); if (results < 0) { return Result.error(ResultEnum.USER_LECTURER_AUDIT); } return Result.success(results); } public Result<LecturerAuditViewRESQ> view(LecturerAuditViewREQ req) { if (StringUtils.isEmpty(req.getId())) { return Result.error("ID不能为空"); } LecturerAudit record = lecturerAuditDao.getById(req.getId()); if (ObjectUtil.isNull(record)) { return Result.error("找不到讲师编号"); } LecturerAuditViewRESQ resq = BeanUtil.copyProperties(record, LecturerAuditViewRESQ.class); // 查找讲师账户信息 LecturerExt lecturerExt = lecturerExtDao.getByLecturerUserNo(resq.getLecturerUserNo()); resq.setLecturerExt(BeanUtil.copyProperties(lecturerExt, LecturerExtViewRESQ.class)); return Result.success(resq); } /** * 添加用户信息 */ private UserExt register(LecturerAuditSaveREQ req, String mobile) { // 用户基本信息 User user = new User(); user.setUserNo(NOUtil.getUserNo()); user.setMobile(mobile); user.setMobileSalt(StrUtil.get32UUID()); user.setMobilePsw(DigestUtil.sha1Hex(user.getMobileSalt() + req.getMobilePsw())); userDao.save(user); // 用户教育信息 UserExt userExt = new UserExt(); userExt.setUserNo(user.getUserNo()); userExt.setMobile(user.getMobile()); userExt.setNickname(req.getLecturerName()); userExtDao.save(userExt); return userExt; } /** * 添加讲师信息 */ private int lecturerInfo(LecturerAuditSaveREQ req, UserExt userExt) { // 插入讲师信息 LecturerAudit infoAudit = BeanUtil.copyProperties(req, LecturerAudit.class); if (!StringUtils.isEmpty(userExt.getHeadImgUrl())) { infoAudit.setHeadImgUrl(userExt.getHeadImgUrl()); } infoAudit.setLecturerUserNo(userExt.getUserNo()); infoAudit.setLecturerProportion(LECTURER_DEFAULT_PROPORTION);// 设置讲师默认分成百分之70 int infoAuditNum = lecturerAuditDao.save(infoAudit); if (infoAuditNum < 1) { throw new BaseException("讲师信息表新增失败"); } // 插入讲师账户 LecturerExt lecturerExt = new LecturerExt(); lecturerExt.setLecturerUserNo(infoAudit.getLecturerUserNo()); lecturerExt.setTotalIncome(BigDecimal.ZERO); lecturerExt.setHistoryMoney(BigDecimal.ZERO); lecturerExt.setEnableBalances(BigDecimal.ZERO); lecturerExt.setFreezeBalances(BigDecimal.ZERO); lecturerExt.setSign(SignUtil.getByLecturer(lecturerExt.getTotalIncome(), lecturerExt.getHistoryMoney(), lecturerExt.getEnableBalances(), lecturerExt.getFreezeBalances())); int lecturerExtNum = lecturerExtDao.save(lecturerExt); if (lecturerExtNum < 1) { throw new BaseException("讲师账户表新增失败"); } return lecturerExtNum; } public Result<Integer> check(LecturerAuditCheckMobileREQ req) { if (StringUtils.isEmpty(req.getLecturerMobile())) { return Result.error("手机号不能为空"); } // 手机号去空处理 String mobile = req.getLecturerMobile().trim(); // 手机号码校验 if (!Pattern.compile(REGEX_MOBILE).matcher(mobile).matches()) { return Result.error("手机号码格式不正确"); } // 根据传入手机号获取用户信息(讲师的用户信息) UserExt userExt = userExtDao.getByMobile(mobile); // 1、用户不存在,注册用户 if (ObjectUtil.isNull(userExt)) { return Result.success(501); } // 2、添加讲师 LecturerAudit lecturerAudit = lecturerAuditDao.getByLecturerUserNo(userExt.getUserNo()); // 校验讲师是否存在 if (ObjectUtil.isNotNull(lecturerAudit)) { // 讲师存在 if (AuditStatusEnum.SUCCESS.getCode().equals(lecturerAudit.getAuditStatus())) { // 审核成功 return Result.success(503); } else if (AuditStatusEnum.WAIT.getCode().equals(lecturerAudit.getAuditStatus())) { // 待审核 return Result.success(502); } else { return Result.success(506); } } // 讲师不存在 return Result.success(1); } }
{ "pile_set_name": "Github" }
ROLE_SYSTEM_DOCUMENT READONLY FOCUSABLE ++IA2_ROLE_FORM ++++ROLE_SYSTEM_TEXT FOCUSABLE ++++ROLE_SYSTEM_PUSHBUTTON name='Submit' FOCUSABLE
{ "pile_set_name": "Github" }
#!/bin/sh # SPDX-License-Identifier: GPL-2.0 PKG="Qt5Core Qt5Gui Qt5Widgets" PKG2="QtCore QtGui" if [ -z "$(command -v pkg-config)" ]; then echo >&2 "*" echo >&2 "* 'make xconfig' requires 'pkg-config'. Please install it." echo >&2 "*" exit 1 fi if pkg-config --exists $PKG; then echo cflags=\"-std=c++11 -fPIC $(pkg-config --cflags Qt5Core Qt5Gui Qt5Widgets)\" echo libs=\"$(pkg-config --libs $PKG)\" echo moc=\"$(pkg-config --variable=host_bins Qt5Core)/moc\" exit 0 fi if pkg-config --exists $PKG2; then echo cflags=\"$(pkg-config --cflags $PKG2)\" echo libs=\"$(pkg-config --libs $PKG2)\" echo moc=\"$(pkg-config --variable=moc_location QtCore)\" exit 0 fi echo >&2 "*" echo >&2 "* Could not find Qt via pkg-config." echo >&2 "* Please install either Qt 4.8 or 5.x. and make sure it's in PKG_CONFIG_PATH" echo >&2 "*" exit 1
{ "pile_set_name": "Github" }
// +build linux package mount import ( "bufio" "fmt" "io" "os" "strings" ) const ( /* 36 35 98:0 /mnt1 /mnt2 rw,noatime master:1 - ext3 /dev/root rw,errors=continue (1)(2)(3) (4) (5) (6) (7) (8) (9) (10) (11) (1) mount ID: unique identifier of the mount (may be reused after umount) (2) parent ID: ID of parent (or of self for the top of the mount tree) (3) major:minor: value of st_dev for files on filesystem (4) root: root of the mount within the filesystem (5) mount point: mount point relative to the process's root (6) mount options: per mount options (7) optional fields: zero or more fields of the form "tag[:value]" (8) separator: marks the end of the optional fields (9) filesystem type: name of filesystem of the form "type[.subtype]" (10) mount source: filesystem specific information or "none" (11) super options: per super block options*/ mountinfoFormat = "%d %d %d:%d %s %s %s %s" ) // Parse /proc/self/mountinfo because comparing Dev and ino does not work from // bind mounts func parseMountTable() ([]*Info, error) { f, err := os.Open("/proc/self/mountinfo") if err != nil { return nil, err } defer f.Close() return parseInfoFile(f) } func parseInfoFile(r io.Reader) ([]*Info, error) { var ( s = bufio.NewScanner(r) out = []*Info{} ) for s.Scan() { if err := s.Err(); err != nil { return nil, err } var ( p = &Info{} text = s.Text() optionalFields string ) if _, err := fmt.Sscanf(text, mountinfoFormat, &p.ID, &p.Parent, &p.Major, &p.Minor, &p.Root, &p.Mountpoint, &p.Opts, &optionalFields); err != nil { return nil, fmt.Errorf("Scanning '%s' failed: %s", text, err) } // Safe as mountinfo encodes mountpoints with spaces as \040. index := strings.Index(text, " - ") postSeparatorFields := strings.Fields(text[index+3:]) if len(postSeparatorFields) < 3 { return nil, fmt.Errorf("Error found less than 3 fields post '-' in %q", text) } if optionalFields != "-" { p.Optional = optionalFields } p.Fstype = postSeparatorFields[0] p.Source = postSeparatorFields[1] p.VfsOpts = strings.Join(postSeparatorFields[2:], " ") out = append(out, p) } return out, nil } // PidMountInfo collects the mounts for a specific process ID. If the process // ID is unknown, it is better to use `GetMounts` which will inspect // "/proc/self/mountinfo" instead. func PidMountInfo(pid int) ([]*Info, error) { f, err := os.Open(fmt.Sprintf("/proc/%d/mountinfo", pid)) if err != nil { return nil, err } defer f.Close() return parseInfoFile(f) }
{ "pile_set_name": "Github" }
;;; yaml-mode.el --- Major mode for editing YAML files ;; Copyright (C) 2006 Yoshiki Kurihara ;; Author: Yoshiki Kurihara <kurihara@cpan.org> ;; Marshall T. Vandegrift <llasram@gmail.com> ;; Keywords: data yaml ;; Version: 0.0.3 ;; This file is not part of Emacs ;; This file is free software; you can redistribute it and/or modify ;; it under the terms of the GNU General Public License as published by ;; the Free Software Foundation; either version 2, or (at your option) ;; any later version. ;; This file is distributed in the hope that it will be useful, ;; but WITHOUT ANY WARRANTY; without even the implied warranty of ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ;; GNU General Public License for more details. ;; You should have received a copy of the GNU General Public License ;; along with GNU Emacs; see the file COPYING. If not, write to ;; the Free Software Foundation, Inc., 59 Temple Place - Suite 330, ;; Boston, MA 02111-1307, USA. ;;; Commentary: ;; This is a major mode for editing files in the YAML data ;; serialization format. It was initially developed by Yoshiki ;; Kurihara and many features were added by Marshall Vandegrift. As ;; YAML and Python share the fact that indentation determines ;; structure, this mode provides indentation and indentation command ;; behavior very similar to that of python-mode. ;;; Installation: ;; To install, just drop this file into a directory in your ;; `load-path' and (optionally) byte-compile it. To automatically ;; handle files ending in '.yml', add something like: ;; ;; (require 'yaml-mode) ;; (add-to-list 'auto-mode-alist '("\\.yml$" . yaml-mode)) ;; ;; to your .emacs file. ;; ;; Unlike python-mode, this mode follows the Emacs convention of not ;; binding the ENTER key to `newline-and-indent'. To get this ;; behavior, add the key definition to `yaml-mode-hook': ;; ;; (add-hook 'yaml-mode-hook ;; '(lambda () ;; (define-key yaml-mode-map "\C-m" 'newline-and-indent))) ;;; Known Bugs: ;; YAML is easy to write but complex to parse, and this mode doesn't ;; even really try. Indentation and highlighting will break on ;; abnormally complicated structures. ;;; Code: ;; User definable variables (defgroup yaml nil "Support for the YAML serialization format" :group 'languages :prefix "yaml-") (defcustom yaml-mode-hook nil "*Hook run by `yaml-mode'." :type 'hook :group 'yaml) (defcustom yaml-indent-offset 2 "*Amount of offset per level of indentation." :type 'integer :group 'yaml) (defcustom yaml-backspace-function 'backward-delete-char-untabify "*Function called by `yaml-electric-backspace' when deleting backwards." :type 'function :group 'yaml) (defcustom yaml-block-literal-search-lines 100 "*Maximum number of lines to search for start of block literals." :type 'integer :group 'yaml) (defcustom yaml-block-literal-electric-alist '((?| . "") (?> . "-")) "*Characters for which to provide electric behavior. The association list key should be a key code and the associated value should be a string containing additional characters to insert when that key is pressed to begin a block literal." :type 'alist :group 'yaml) (defface yaml-tab-face '((((class color)) (:background "red" :foreground "red" :bold t)) (t (:reverse-video t))) "Face to use for highlighting tabs in YAML files." :group 'faces :group 'yaml) ;; Constants (defconst yaml-mode-version "0.0.3" "Version of `yaml-mode.'") (defconst yaml-blank-line-re "^ *$" "Regexp matching a line containing only (valid) whitespace.") (defconst yaml-comment-re "\\(#*.*\\)" "Regexp matching a line containing a YAML comment or delimiter.") (defconst yaml-directive-re "^\\(?:--- \\)? *%\\(\\w+\\)" "Regexp matching a line contatining a YAML directive.") (defconst yaml-document-delimiter-re "^ *\\(?:---\\|[.][.][.]\\)" "Rexexp matching a YAML document delimiter line.") (defconst yaml-node-anchor-alias-re "[&*]\\w+" "Regexp matching a YAML node anchor or alias.") (defconst yaml-tag-re "!!?[^ \n]+" "Rexexp matching a YAML tag.") (defconst yaml-bare-scalar-re "\\(?:[^-:,#!\n{\\[ ]\\|[^#!\n{\\[ ]\\S-\\)[^#\n]*?" "Rexexp matching a YAML bare scalar.") (defconst yaml-hash-key-re (concat "\\(?:^\\(?:--- \\)?\\|{\\|\\(?:[-,] +\\)+\\) *" "\\(?:" yaml-tag-re " +\\)?" "\\(" yaml-bare-scalar-re "\\) *:" "\\(?: +\\|$\\)") "Regexp matching a single YAML hash key.") (defconst yaml-scalar-context-re (concat "\\(?:^\\(?:--- \\)?\\|{\\|\\(?:[-,] +\\)+\\) *" "\\(?:" yaml-bare-scalar-re " *: \\)?") "Regexp indicating the begininng of a scalar context.") (defconst yaml-nested-map-re (concat ".*: *\\(?:&.*\\|{ *\\|" yaml-tag-re " *\\)?$") "Regexp matching a line beginning a YAML nested structure.") (defconst yaml-block-literal-base-re " *[>|][-+0-9]* *\\(?:\n\\|\\'\\)" "Regexp matching the substring start of a block literal.") (defconst yaml-block-literal-re (concat yaml-scalar-context-re "\\(?:" yaml-tag-re "\\)?" yaml-block-literal-base-re) "Regexp matching a line beginning a YAML block literal") (defconst yaml-nested-sequence-re (concat "^\\(?: *- +\\)+" "\\(?:" yaml-bare-scalar-re " *:\\(?: +.*\\)?\\)?$") "Regexp matching a line containing one or more nested YAML sequences") (defconst yaml-constant-scalars-re (concat "\\(?:^\\|\\(?::\\|-\\|,\\|{\\|\\[\\) +\\) *" (regexp-opt '("~" "null" "Null" "NULL" ".nan" ".NaN" ".NAN" ".inf" ".Inf" ".INF" "-.inf" "-.Inf" "-.INF" "y" "Y" "yes" "Yes" "YES" "n" "N" "no" "No" "NO" "true" "True" "TRUE" "false" "False" "FALSE" "on" "On" "ON" "off" "Off" "OFF") t) " *$") "Regexp matching certain scalar constants in scalar context") ;; Mode setup (defvar yaml-mode-map () "Keymap used in `yaml-mode' buffers.") (if yaml-mode-map nil (setq yaml-mode-map (make-sparse-keymap)) (define-key yaml-mode-map "|" 'yaml-electric-bar-and-angle) (define-key yaml-mode-map ">" 'yaml-electric-bar-and-angle) (define-key yaml-mode-map "-" 'yaml-electric-dash-and-dot) (define-key yaml-mode-map "." 'yaml-electric-dash-and-dot) (define-key yaml-mode-map [backspace] 'yaml-electric-backspace) (define-key yaml-mode-map "\C-j" 'newline-and-indent)) (defvar yaml-mode-syntax-table nil "Syntax table in use in yaml-mode buffers.") (if yaml-mode-syntax-table nil (setq yaml-mode-syntax-table (make-syntax-table)) (modify-syntax-entry ?\' "\"" yaml-mode-syntax-table) (modify-syntax-entry ?\" "\"" yaml-mode-syntax-table) (modify-syntax-entry ?# "<" yaml-mode-syntax-table) (modify-syntax-entry ?\n ">" yaml-mode-syntax-table) (modify-syntax-entry ?\\ "\\" yaml-mode-syntax-table) (modify-syntax-entry ?- "." yaml-mode-syntax-table) (modify-syntax-entry ?_ "_" yaml-mode-syntax-table) (modify-syntax-entry ?\( "." yaml-mode-syntax-table) (modify-syntax-entry ?\) "." yaml-mode-syntax-table) (modify-syntax-entry ?\{ "(}" yaml-mode-syntax-table) (modify-syntax-entry ?\} "){" yaml-mode-syntax-table) (modify-syntax-entry ?\[ "(]" yaml-mode-syntax-table) (modify-syntax-entry ?\] ")[" yaml-mode-syntax-table)) (define-derived-mode yaml-mode fundamental-mode "YAML" "Simple mode to edit YAML. \\{yaml-mode-map}" (set (make-local-variable 'comment-start) "# ") (set (make-local-variable 'comment-start-skip) "#+ *") (set (make-local-variable 'indent-line-function) 'yaml-indent-line) (set (make-local-variable 'font-lock-defaults) '(yaml-font-lock-keywords nil nil nil nil (font-lock-syntactic-keywords . yaml-font-lock-syntactic-keywords)))) ;; Font-lock support (defvar yaml-font-lock-keywords (list (cons yaml-comment-re '(1 font-lock-comment-face)) (cons yaml-constant-scalars-re '(1 font-lock-constant-face)) (cons yaml-tag-re '(0 font-lock-type-face)) (cons yaml-node-anchor-alias-re '(0 font-lock-function-name-face t)) (cons yaml-hash-key-re '(1 font-lock-variable-name-face t)) (cons yaml-document-delimiter-re '(0 font-lock-comment-face)) (cons yaml-directive-re '(1 font-lock-builtin-face)) '(yaml-font-lock-block-literals 0 font-lock-string-face t) '("^[\t]+" 0 'yaml-tab-face t)) "Additional expressions to highlight in YAML mode.") (defvar yaml-font-lock-syntactic-keywords (list '(yaml-syntactic-block-literals 0 "." t)) "Additional syntax features to highlight in YAML mode.") (defun yaml-font-lock-block-literals (bound) "Find lines within block literals. Find the next line of the first (if any) block literal after point and prior to BOUND. Returns the beginning and end of the block literal line in the match data, as consumed by `font-lock-keywords' matcher functions. The function begins by searching backwards to determine whether or not the current line is within a block literal. This could be time-consuming in large buffers, so the number of lines searched is artificially limitted to the value of `yaml-block-literal-search-lines'." (if (eolp) (goto-char (1+ (point)))) (unless (or (eobp) (>= (point) bound)) (let ((begin (point)) (end (min (1+ (point-at-eol)) bound))) (goto-char (point-at-bol)) (while (and (looking-at yaml-blank-line-re) (not (bobp))) (forward-line -1)) (let ((nlines yaml-block-literal-search-lines) (min-level (current-indentation))) (forward-line -1) (while (and (/= nlines 0) (/= min-level 0) (not (looking-at yaml-block-literal-re)) (not (bobp))) (set 'nlines (1- nlines)) (unless (looking-at yaml-blank-line-re) (set 'min-level (min min-level (current-indentation)))) (forward-line -1)) (cond ((and (< (current-indentation) min-level) (looking-at yaml-block-literal-re)) (goto-char end) (set-match-data (list begin end)) t) ((progn (goto-char begin) (re-search-forward (concat yaml-block-literal-re " *\\(.*\\)\n") bound t)) (set-match-data (nthcdr 2 (match-data))) t)))))) (defun yaml-syntactic-block-literals (bound) "Find quote characters within block literals. Finds the first quote character within a block literal (if any) after point and prior to BOUND. Returns the position of the quote character in the match data, as consumed by matcher functions in `font-lock-syntactic-keywords'. This allows the mode to treat ['\"] characters in block literals as punctuation syntax instead of string syntax, preventing unmatched quotes in block literals from painting the entire buffer in `font-lock-string-face'." (let ((found nil)) (while (and (not found) (/= (point) bound) (yaml-font-lock-block-literals bound)) (let ((begin (match-beginning 0)) (end (match-end 0))) (goto-char begin) (cond ((re-search-forward "['\"]" end t) (setq found t)) ((goto-char end))))) found)) ;; Indentation and electric keys (defun yaml-compute-indentation () "Calculate the maximum sensible indentation for the current line." (save-excursion (beginning-of-line) (if (looking-at yaml-document-delimiter-re) 0 (forward-line -1) (while (and (looking-at yaml-blank-line-re) (> (point) (point-min))) (forward-line -1)) (+ (current-indentation) (if (looking-at yaml-nested-map-re) yaml-indent-offset 0) (if (looking-at yaml-nested-sequence-re) yaml-indent-offset 0) (if (looking-at yaml-block-literal-re) yaml-indent-offset 0))))) (defun yaml-indent-line () "Indent the current line. The first time this command is used, the line will be indented to the maximum sensible indentation. Each immediately subsequent usage will back-dent the line by `yaml-indent-offset' spaces. On reaching column 0, it will cycle back to the maximum sensible indentation." (interactive "*") (let ((ci (current-indentation)) (cc (current-column)) (need (yaml-compute-indentation))) (save-excursion (beginning-of-line) (delete-horizontal-space) (if (and (equal last-command this-command) (/= ci 0)) (indent-to (* (/ (- ci 1) yaml-indent-offset) yaml-indent-offset)) (indent-to need))) (if (< (current-column) (current-indentation)) (forward-to-indentation 0)))) (defun yaml-electric-backspace (arg) "Delete characters or back-dent the current line. If invoked following only whitespace on a line, will back-dent to the immediately previous multiple of `yaml-indent-offset' spaces." (interactive "*p") (if (or (/= (current-indentation) (current-column)) (bolp)) (funcall yaml-backspace-function arg) (let ((ci (current-column))) (beginning-of-line) (delete-horizontal-space) (indent-to (* (/ (- ci (* arg yaml-indent-offset)) yaml-indent-offset) yaml-indent-offset))))) (defun yaml-electric-bar-and-angle (arg) "Insert the bound key and possibly begin a block literal. Inserts the bound key. If inserting the bound key causes the current line to match the initial line of a block literal, then inserts the matching string from `yaml-block-literal-electric-alist', a newline, and indents appropriately." (interactive "*P") (self-insert-command (prefix-numeric-value arg)) (let ((extra-chars (assoc last-command-char yaml-block-literal-electric-alist))) (cond ((and extra-chars (not arg) (eolp) (save-excursion (beginning-of-line) (looking-at yaml-block-literal-re))) (insert (cdr extra-chars)) (newline-and-indent))))) (defun yaml-electric-dash-and-dot (arg) "Insert the bound key and possibly de-dent line. Inserts the bound key. If inserting the bound key causes the current line to match a document delimiter, de-dent the line to the left margin." (interactive "*P") (self-insert-command (prefix-numeric-value arg)) (save-excursion (beginning-of-line) (if (and (not arg) (looking-at yaml-document-delimiter-re)) (delete-horizontal-space)))) (defun yaml-mode-version () "Diplay version of `yaml-mode'." (interactive) (message "yaml-mode %s" yaml-mode-version) yaml-mode-version) (provide 'yaml-mode) ;;; yaml-mode.el ends here
{ "pile_set_name": "Github" }
/* The contents of this file are subject to the Netscape Public * License Version 1.1 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of * the License at http://www.mozilla.org/NPL/ * * Software distributed under the License is distributed on an "AS * IS" basis, WITHOUT WARRANTY OF ANY KIND, either express or * implied. See the License for the specific language governing * rights and limitations under the License. * * The Original Code is Mozilla Communicator client code, released March * 31, 1998. * * The Initial Developer of the Original Code is Netscape Communications * Corporation. Portions created by Netscape are * Copyright (C) 1998 Netscape Communications Corporation. All * Rights Reserved. * * Contributor(s): * */ /** File Name: 10.1.4-9.js ECMA Section: 10.1.4 Scope Chain and Identifier Resolution Description: Every execution context has associated with it a scope chain. This is logically a list of objects that are searched when binding an Identifier. When control enters an execution context, the scope chain is created and is populated with an initial set of objects, depending on the type of code. When control leaves the execution context, the scope chain is destroyed. During execution, the scope chain of the execution context is affected only by WithStatement. When execution enters a with block, the object specified in the with statement is added to the front of the scope chain. When execution leaves a with block, whether normally or via a break or continue statement, the object is removed from the scope chain. The object being removed will always be the first object in the scope chain. During execution, the syntactic production PrimaryExpression : Identifier is evaluated using the following algorithm: 1. Get the next object in the scope chain. If there isn't one, go to step 5. 2. Call the [[HasProperty]] method of Result(l), passing the Identifier as the property. 3. If Result(2) is true, return a value of type Reference whose base object is Result(l) and whose property name is the Identifier. 4. Go to step 1. 5. Return a value of type Reference whose base object is null and whose property name is the Identifier. The result of binding an identifier is always a value of type Reference with its member name component equal to the identifier string. Author: christine@netscape.com Date: 12 november 1997 */ var SECTION = "10.1.4-9"; var VERSION = "ECMA_2"; startTest(); var testcases = getTestCases(); writeHeaderToLog( SECTION + " Scope Chain and Identifier Resolution"); test(); function test() { for ( tc=0; tc < testcases.length; tc++ ) { var MYOBJECT = new MyObject(); var RESULT = "hello"; with ( MYOBJECT ) { NEW_PROPERTY = RESULT; } testcases[tc].actual = NEW_PROPERTY; testcases[tc].expect = RESULT; testcases[tc].passed = writeTestCaseResult( testcases[tc].expect, testcases[tc].actual, testcases[tc].description +" = "+ testcases[tc].actual ); testcases[tc].reason += ( testcases[tc].passed ) ? "" : "wrong value "; } stopTest(); return ( testcases ); } function getTestCases() { var array = new Array(); var item = 0; array[item++] = new TestCase( SECTION, "NEW_PROPERTY = " ); return ( array ); } function MyObject( n ) { this.__proto__ = Number.prototype; }
{ "pile_set_name": "Github" }
open Dune_action_plugin.V1 let action = write_file ~path:(Path.of_string "some_target") ~data:"Hello from some_target!" let () = run action
{ "pile_set_name": "Github" }
/* Class = "NSComboBox"; ibShadowedToolTip = "<key>Inject</key>\n<string>Detect</string>\nProperty injection for the sound chip. Only works when the DSDT defines Device (HDEF). If you renamed it, you can also inject the other properties differently. Do not use this parameter with VoodooHDA. Possible options are:\n\nNo - Injection is turned off\nDetect - Automatic detection of the sound chip and injection of its ID as layout ID. Actually this is nonsense but still very popular. Does not do any harm and affects the display of sound properties in System Profiler.\n883 - Decimal number representing. Refers to Realtek ALC883 in this case.\n0x373 - Same as above in hexadecimal.\nThese numbers are incorrect, you will need to find the correct value and possibly replace the layout file bundled with AppleHDA to get the chip working."; ObjectID = "0Ex-U9-H5a"; */ "0Ex-U9-H5a.ibShadowedToolTip" = "<key>Inject</key>\n<string>Detect</string>\nProperty injection for the sound chip. Only works when the DSDT defines Device (HDEF). If you renamed it, you can also inject the other properties differently. Do not use this parameter with VoodooHDA. Possible options are:\n\nNo - Injection is turned off\nDetect - Automatic detection of the sound chip and injection of its ID as layout ID. Actually this is nonsense but still very popular. Does not do any harm and affects the display of sound properties in System Profiler.\n883 - Decimal number representing. Refers to Realtek ALC883 in this case.\n0x373 - Same as above in hexadecimal.\nThese numbers are incorrect, you will need to find the correct value and possibly replace the layout file bundled with AppleHDA to get the chip working."; /* Class = "NSTextFieldCell"; title = "Inject"; ObjectID = "1B4-NT-MXV"; */ "1B4-NT-MXV.title" = "Inject"; /* Class = "NSButton"; ibShadowedToolTip = "<key>FixOwnership</key>\n<true/>\nIt is possible to leave USB injection enabled and only turn off the ownership fix.\n\nThis fix is not relevant for UEFI booting."; ObjectID = "2I2-ep-g7I"; */ "2I2-ep-g7I.ibShadowedToolTip" = "<key>FixOwnership</key>\n<true/>\nIt is possible to leave USB injection enabled and only turn off the ownership fix.\n\nThis fix is not relevant for UEFI booting."; /* Class = "NSTabViewItem"; label = "Properties"; ObjectID = "3qf-0a-EIY"; */ "3qf-0a-EIY.label" = "Properties"; /* Class = "NSButton"; ibShadowedToolTip = "<key>ResetHDA</key>\n<true/>\nAudio controller initialization. Some users have non-working sound after cold boot but works after restart or wake (even on Windows!). This is a workaround that works at early boot, so it will affect Windows as well."; ObjectID = "4ga-hk-hZY"; */ "4ga-hk-hZY.ibShadowedToolTip" = "<key>ResetHDA</key>\n<true/>\nAudio controller initialization. Some users have non-working sound after cold boot but works after restart or wake (even on Windows!). This is a workaround that works at early boot, so it will affect Windows as well."; /* Class = "NSTableColumn"; headerCell.title = "Key*"; ObjectID = "5Yq-DI-Ikq"; */ "5Yq-DI-Ikq.headerCell.title" = "Key*"; /* Class = "NSComboBoxCell"; 6wG-aR-1wy.ibShadowedObjectValues[1] = "No"; ObjectID = "6wG-aR-1wy"; */ "6wG-aR-1wy.ibShadowedObjectValues[1]" = "No"; /* Class = "NSComboBoxCell"; 6wG-aR-1wy.ibShadowedObjectValues[2] = "Detect"; ObjectID = "6wG-aR-1wy"; */ "6wG-aR-1wy.ibShadowedObjectValues[2]" = "Detect"; /* Class = "NSBox"; title = "Audio"; ObjectID = "7jP-8O-5Hn"; */ "7jP-8O-5Hn.title" = "Audio"; /* Class = "NSTextFieldCell"; placeholderString = "PciRoot(0x0)/Pci(0x1c,0x3)/Pci(0x0,0x0)"; ObjectID = "7ub-J8-a2r"; */ "7ub-J8-a2r.placeholderString" = "PciRoot(0x0)/Pci(0x1c,0x3)/Pci(0x0,0x0)"; /* Class = "NSTextFieldCell"; title = "PciRoot(0x0)/Pci(0x1c,0x3)/Pci(0x0,0x0)"; ObjectID = "7ub-J8-a2r"; */ "7ub-J8-a2r.title" = "PciRoot(0x0)/Pci(0x1c,0x3)/Pci(0x0,0x0)"; /* Class = "NSTextFieldCell"; title = "NVidia"; ObjectID = "8ip-p9-5tY"; */ "8ip-p9-5tY.title" = "NVidia"; /* Class = "CocoaBindingsConnection"; ibShadowedIsNilPlaceholder = "HEX in DATA type; 0 = \"NO\", 1 = \"YES\" in BOOLEAN type "; ObjectID = "8r1-Ie-cZD"; */ "8r1-Ie-cZD.ibShadowedIsNilPlaceholder" = "HEX in DATA type; 0 = \"NO\", 1 = \"YES\" in BOOLEAN type "; /* Class = "NSButtonCell"; title = "UseIntelHDMI"; ObjectID = "9J8-8s-0O0"; */ "9J8-8s-0O0.title" = "UseIntelHDMI"; /* Class = "NSButtonCell"; title = "Inject"; ObjectID = "9Nj-te-UVe"; */ "9Nj-te-UVe.title" = "Inject"; /* Class = "NSButton"; ibShadowedToolTip = "Some tuning about C4/C6 States and SpeedStep"; ObjectID = "9QK-wm-bPF"; */ "9QK-wm-bPF.ibShadowedToolTip" = "Some tuning about C4/C6 States and SpeedStep"; /* Class = "NSComboBoxCell"; 9oF-ez-2XI.ibShadowedObjectValues[0] = "STRING"; ObjectID = "9oF-ez-2XI"; */ "9oF-ez-2XI.ibShadowedObjectValues[0]" = "STRING"; /* Class = "NSComboBoxCell"; 9oF-ez-2XI.ibShadowedObjectValues[1] = "NUMBER"; ObjectID = "9oF-ez-2XI"; */ "9oF-ez-2XI.ibShadowedObjectValues[1]" = "NUMBER"; /* Class = "NSComboBoxCell"; 9oF-ez-2XI.ibShadowedObjectValues[2] = "DATA"; ObjectID = "9oF-ez-2XI"; */ "9oF-ez-2XI.ibShadowedObjectValues[2]" = "DATA"; /* Class = "NSBox"; title = "USB"; ObjectID = "9yR-xy-e2p"; */ "9yR-xy-e2p.title" = "USB"; /* Class = "NSButtonCell"; title = "NameEH00"; ObjectID = "DfW-yh-0AF"; */ "DfW-yh-0AF.title" = "NameEH00"; /* Class = "NSTableColumn"; headerCell.title = "Value Type"; ObjectID = "E6H-GQ-ELu"; */ "E6H-GQ-ELu.headerCell.title" = "Value Type"; /* Class = "NSBox"; title = "Box"; ObjectID = "EB8-mt-NcG"; */ "EB8-mt-NcG.title" = "Box"; /* Class = "NSButton"; ibShadowedToolTip = "<key>Inject</key>\n<true/>\nInjects USB properties. You can turn it off for whatever reason, if you need. It is also disabled if the DSDT patch mask matches 0x1000 to prevent data duplication."; ObjectID = "Eq8-eZ-fBy"; */ "Eq8-eZ-fBy.ibShadowedToolTip" = "<key>Inject</key>\n<true/>\nInjects USB properties. You can turn it off for whatever reason, if you need. It is also disabled if the DSDT patch mask matches 0x1000 to prevent data duplication."; /* Class = "CocoaBindingsConnection"; ibShadowedIsNilPlaceholder = "0xXXXX14E4"; ObjectID = "EuO-kz-UKB"; */ "EuO-kz-UKB.ibShadowedIsNilPlaceholder" = "0xXXXX14E4"; /* Class = "NSTableColumn"; headerCell.title = "Comment"; ObjectID = "F5e-zV-Jze"; */ "F5e-zV-Jze.headerCell.title" = "Comment"; /* Class = "NSTextFieldCell"; title = "Text Cell"; ObjectID = "FWa-mo-uK1"; */ "FWa-mo-uK1.title" = "Text Cell"; /* Class = "NSButton"; ibShadowedToolTip = "<key>NoDefaultProperties</key>\n<false/>\nThis key will affect DSDT fixes and force them to generate an empty _DSM. For example, if you enable FIX_DISPLAY Clover will create a device for the graphics card but with an empty _DSM. AddProperties and FakeID values will still be injected.\n\nThis works only for Display, Sound, LAN and WiFi."; ObjectID = "Fbo-vD-sOq"; */ "Fbo-vD-sOq.ibShadowedToolTip" = "<key>NoDefaultProperties</key>\n<false/>\nThis key will affect DSDT fixes and force them to generate an empty _DSM. For example, if you enable FIX_DISPLAY Clover will create a device for the graphics card but with an empty _DSM. AddProperties and FakeID values will still be injected.\n\nThis works only for Display, Sound, LAN and WiFi."; /* Class = "NSTableColumn"; headerCell.title = "PciAddr*"; ObjectID = "GiF-gc-xsF"; */ "GiF-gc-xsF.headerCell.title" = "PciAddr*"; /* Class = "NSBox"; title = "Properties [HEX]"; ObjectID = "Gr2-u0-gLN"; */ "Gr2-u0-gLN.title" = "Properties [HEX]"; /* Class = "NSButton"; ibShadowedToolTip = "<key>HighCurrent</key>\n<true/>\nMore power needed to charge iPad from USB ports."; ObjectID = "H9i-56-9Ga"; */ "H9i-56-9Ga.ibShadowedToolTip" = "<key>HighCurrent</key>\n<true/>\nMore power needed to charge iPad from USB ports."; /* Class = "NSView"; ibShadowedToolTip = "Now Devices->Properties can be written into config.plist in format as DarwinDumper do\n\nFor example\n<dict>\n<key>PciRoot(0x0)/Pci(0x1b,0x0)</key>\n\t<dict>\n\t\t<key>MaximumBootBeepVolume</key>\n\t\t<string>M</string>\n\t\t<key>PinConfigurations</key>\n\t\t<data>\n\t\tUEArAUABEJBCARCQIDCLARABoJAw4MsBYOBLAQ==\n\t\t</data>\n\t\t<key>layout-id</key>\n\t\t<data>\n\t\tEgAAAA==\n\t\t</data>\n\t\t<key>platformFamily</key>\n\t\t<data>\n\t\tAA==\n\t\t</data>\n\t</dict>\n\t<key>PciRoot(0x0)/Pci(0x1c,0x2)/Pci(0x0,0x0)/Pci(0x0,0x0)</key>\n\t<dict>\n\t\t<key>fwswappedbib</key>\n\t\t<data>\n\t\tAQAAAA==\n\t\t</data>\n\t</dict>\n\nThis way we can deprecate Arbitrary section."; ObjectID = "HE5-9l-mik"; */ "HE5-9l-mik.ibShadowedToolTip" = "Now Devices->Properties can be written into config.plist in format as DarwinDumper do\n\nFor example\n<dict>\n<key>PciRoot(0x0)/Pci(0x1b,0x0)</key>\n\t<dict>\n\t\t<key>MaximumBootBeepVolume</key>\n\t\t<string>M</string>\n\t\t<key>PinConfigurations</key>\n\t\t<data>\n\t\tUEArAUABEJBCARCQIDCLARABoJAw4MsBYOBLAQ==\n\t\t</data>\n\t\t<key>layout-id</key>\n\t\t<data>\n\t\tEgAAAA==\n\t\t</data>\n\t\t<key>platformFamily</key>\n\t\t<data>\n\t\tAA==\n\t\t</data>\n\t</dict>\n\t<key>PciRoot(0x0)/Pci(0x1c,0x2)/Pci(0x0,0x0)/Pci(0x0,0x0)</key>\n\t<dict>\n\t\t<key>fwswappedbib</key>\n\t\t<data>\n\t\tAQAAAA==\n\t\t</data>\n\t</dict>\n\nThis way we can deprecate Arbitrary section."; /* Class = "NSTableColumn"; headerCell.title = "Devices*"; ObjectID = "HEH-g8-L3d"; */ "HEH-g8-L3d.headerCell.title" = "Devices*"; /* Class = "NSButton"; ibShadowedToolTip = "<key>Inject</key>\n<false/>\nEnabling it will cause the automatic injection to turn off and allow the injection of a custom string from Properties"; ObjectID = "HY8-2T-aia"; */ "HY8-2T-aia.ibShadowedToolTip" = "<key>Inject</key>\n<false/>\nEnabling it will cause the automatic injection to turn off and allow the injection of a custom string from Properties"; /* Class = "NSButton"; ibShadowedToolTip = "Try to force enable HPET if isn't."; ObjectID = "Hfo-IB-7nr"; */ "Hfo-IB-7nr.ibShadowedToolTip" = "Try to force enable HPET if isn't."; /* Class = "NSButtonCell"; title = "HighCurrent"; ObjectID = "JU3-mD-ZWl"; */ "JU3-mD-ZWl.title" = "HighCurrent"; /* Class = "NSTableColumn"; headerCell.title = "Value Type"; ObjectID = "Jke-Be-RCo"; */ "Jke-Be-RCo.headerCell.title" = "Value Type"; /* Class = "NSButtonCell"; title = "Lpc Tune"; ObjectID = "Jlv-i8-A4y"; */ "Jlv-i8-A4y.title" = "Lpc Tune"; /* Class = "NSButtonCell"; title = "LANInjection"; ObjectID = "LBL-HV-p17"; */ "LBL-HV-p17.title" = "LANInjection"; /* Class = "NSButton"; ibShadowedToolTip = "Enable or disable HDMI injection, disabled by default."; ObjectID = "Mi8-oT-4ye"; */ "Mi8-oT-4ye.ibShadowedToolTip" = "Enable or disable HDMI injection, disabled by default."; /* Class = "CocoaBindingsConnection"; ibShadowedIsNilPlaceholder = "0xXXXX10DE"; ObjectID = "NDY-K4-95p"; */ "NDY-K4-95p.ibShadowedIsNilPlaceholder" = "0xXXXX10DE"; /* Class = "NSComboBoxCell"; NaD-ja-1qu.ibShadowedObjectValues[0] = "0x0710"; ObjectID = "NaD-ja-1qu"; */ "NaD-ja-1qu.ibShadowedObjectValues[0]" = "0x0710"; /* Class = "NSComboBoxCell"; NaD-ja-1qu.ibShadowedObjectValues[1] = "0x056C"; ObjectID = "NaD-ja-1qu"; */ "NaD-ja-1qu.ibShadowedObjectValues[1]" = "0x056C"; /* Class = "NSComboBoxCell"; NaD-ja-1qu.ibShadowedObjectValues[2] = "0x07A1"; ObjectID = "NaD-ja-1qu"; */ "NaD-ja-1qu.ibShadowedObjectValues[2]" = "0x07A1"; /* Class = "NSComboBoxCell"; NaD-ja-1qu.ibShadowedObjectValues[3] = "0x0AD9"; ObjectID = "NaD-ja-1qu"; */ "NaD-ja-1qu.ibShadowedObjectValues[3]" = "0x0AD9"; /* Class = "NSComboBoxCell"; NaD-ja-1qu.ibShadowedObjectValues[4] = "0x1499"; ObjectID = "NaD-ja-1qu"; */ "NaD-ja-1qu.ibShadowedObjectValues[4]" = "0x1499"; /* Class = "NSComboBoxCell"; NaD-ja-1qu.ibShadowedObjectValues[5] = "0x056C"; ObjectID = "NaD-ja-1qu"; */ "NaD-ja-1qu.ibShadowedObjectValues[5]" = "0x056C"; /* Class = "NSComboBoxCell"; NaD-ja-1qu.ibShadowedObjectValues[6] = "0xFFFF"; ObjectID = "NaD-ja-1qu"; */ "NaD-ja-1qu.ibShadowedObjectValues[6]" = "0xFFFF"; /* Class = "NSTableView"; ibShadowedToolTip = "<key>AddProperties</key>\n<array>\n <dict>\n <key>Device</key>\n <string>NVidia</string>\n <key>Key</key>\n <string>AAPL,HasPanel</string>\n <key>Value</key>\n <data>AQAAAA==</data>\n </dict>\n <dict>\n <key>Device</key>\n <string>NVidia</string>\n <key>Key</key>\n <string>AAPL,Haslid</string>\n <key>Value</key>\n <data>AQAAAA==</data>\n </dict>\n</array>\nUsing device properties injection may be insufficient for some cases (new device, new standard, new OS), so this function lets you customize these injected properties.\n\nYou may write as many properties as you want for these devices: * ATI * NVidia * IntelGFX * LAN * WIFI * Firewire * SATA * IDE * HDA * HDMI * LPC * SmBUS * USB"; ObjectID = "Ob7-UX-r0p"; */ "Ob7-UX-r0p.ibShadowedToolTip" = "<key>AddProperties</key>\n<array>\n <dict>\n <key>Device</key>\n <string>NVidia</string>\n <key>Key</key>\n <string>AAPL,HasPanel</string>\n <key>Value</key>\n <data>AQAAAA==</data>\n </dict>\n <dict>\n <key>Device</key>\n <string>NVidia</string>\n <key>Key</key>\n <string>AAPL,Haslid</string>\n <key>Value</key>\n <data>AQAAAA==</data>\n </dict>\n</array>\nUsing device properties injection may be insufficient for some cases (new device, new standard, new OS), so this function lets you customize these injected properties.\n\nYou may write as many properties as you want for these devices: * ATI * NVidia * IntelGFX * LAN * WIFI * Firewire * SATA * IDE * HDA * HDMI * LPC * SmBUS * USB"; /* Class = "NSTextFieldCell"; title = "IntelMaxValue"; ObjectID = "P1p-BB-Jhs"; */ "P1p-BB-Jhs.title" = "IntelMaxValue"; /* Class = "NSTextField"; ibShadowedToolTip = "Replace Airport device name inside your DSDT"; ObjectID = "PQZ-Qh-lD1"; */ "PQZ-Qh-lD1.ibShadowedToolTip" = "Replace Airport device name inside your DSDT"; /* Class = "NSTextFieldCell"; title = "LAN"; ObjectID = "QOy-nA-OJ1"; */ "QOy-nA-OJ1.title" = "LAN"; /* Class = "NSTextFieldCell"; title = "Text Cell"; ObjectID = "Qec-jB-ZN6"; */ "Qec-jB-ZN6.title" = "Text Cell"; /* Class = "NSView"; ibShadowedToolTip = "First you should look your boot.log or preboot.log (obtained by press F2 in Clover GUI). There is a list of your PCI devices.\n\n4:432 0:000 PCI (00|02:05.00) : 10EC 8167 class=020000\n\nThis is LAN device\n\nVendorID= 10EC - this is Realtek\n\nDeviceID= 8167 - This is Realtek 8167/8169/8110 Gigabyte Ethernet Controller\n\nIt is located on the PCI bus (green digits)\n\nBus = 02\n\nDevice = 05 \n\nFunction = 00\n\nThis location will be unique for every device in your computer. You may have two equal graphics cards with the same IDs and same model. But their locations will be different.\n\nWe have to write into config.plist in section \"Devices\", array \"Arbitrary\" of items one per device you want to inject properties.\n\n\t<key>Devices</key>\n\t<dict>\n\t\t<key>Arbitrary</key>\n\t\t<array>\n\t\t\t<dict>\n\t\t\t\t<key>PciAddr</key>\n\t\t\t\t<string>02:05.00</string>\n\t\t\t\t<key>Comment</key>\n\t\t\t\t<string>Realtek LAN 8167</string>\n\t\t\t\t<key>CustomProperties</key>\n\t\t\t\t<array>\n\t\t\t\t\t<dict>\n\t\t\t\t\t\t<key>Key</key>\n\t\t\t\t\t\t<string>model</string>\n\t\t\t\t\t\t<key>Value</key>\n\t\t\t\t\t\t<string>Realtek 8169 Gigabit Ethernet Controller</string>\n\t\t\t\t\t</dict>\n\t\t\t\t\t<dict>\n\t\t\t\t\t\t<key>Key</key>\n\t\t\t\t\t\t<string>built-in</string>\n\t\t\t\t\t\t<key>Value</key>\n\t\t\t\t\t\t<data>AQAAAA==</data>\n\t\t\t\t\t</dict>\n\t\t\t\t</array>\n\t\t\t</dict>\n\t\t</array>"; ObjectID = "Qxp-DC-Voo"; */ "Qxp-DC-Voo.ibShadowedToolTip" = "First you should look your boot.log or preboot.log (obtained by press F2 in Clover GUI). There is a list of your PCI devices.\n\n4:432 0:000 PCI (00|02:05.00) : 10EC 8167 class=020000\n\nThis is LAN device\n\nVendorID= 10EC - this is Realtek\n\nDeviceID= 8167 - This is Realtek 8167/8169/8110 Gigabyte Ethernet Controller\n\nIt is located on the PCI bus (green digits)\n\nBus = 02\n\nDevice = 05 \n\nFunction = 00\n\nThis location will be unique for every device in your computer. You may have two equal graphics cards with the same IDs and same model. But their locations will be different.\n\nWe have to write into config.plist in section \"Devices\", array \"Arbitrary\" of items one per device you want to inject properties.\n\n\t<key>Devices</key>\n\t<dict>\n\t\t<key>Arbitrary</key>\n\t\t<array>\n\t\t\t<dict>\n\t\t\t\t<key>PciAddr</key>\n\t\t\t\t<string>02:05.00</string>\n\t\t\t\t<key>Comment</key>\n\t\t\t\t<string>Realtek LAN 8167</string>\n\t\t\t\t<key>CustomProperties</key>\n\t\t\t\t<array>\n\t\t\t\t\t<dict>\n\t\t\t\t\t\t<key>Key</key>\n\t\t\t\t\t\t<string>model</string>\n\t\t\t\t\t\t<key>Value</key>\n\t\t\t\t\t\t<string>Realtek 8169 Gigabit Ethernet Controller</string>\n\t\t\t\t\t</dict>\n\t\t\t\t\t<dict>\n\t\t\t\t\t\t<key>Key</key>\n\t\t\t\t\t\t<string>built-in</string>\n\t\t\t\t\t\t<key>Value</key>\n\t\t\t\t\t\t<data>AQAAAA==</data>\n\t\t\t\t\t</dict>\n\t\t\t\t</array>\n\t\t\t</dict>\n\t\t</array>"; /* Class = "NSComboBoxCell"; RPg-NS-WkE.ibShadowedObjectValues[0] = "ATI"; ObjectID = "RPg-NS-WkE"; */ "RPg-NS-WkE.ibShadowedObjectValues[0]" = "ATI"; /* Class = "NSComboBoxCell"; RPg-NS-WkE.ibShadowedObjectValues[1] = "NVidia"; ObjectID = "RPg-NS-WkE"; */ "RPg-NS-WkE.ibShadowedObjectValues[1]" = "NVidia"; /* Class = "NSComboBoxCell"; RPg-NS-WkE.ibShadowedObjectValues[2] = "IntelGFX"; ObjectID = "RPg-NS-WkE"; */ "RPg-NS-WkE.ibShadowedObjectValues[2]" = "IntelGFX"; /* Class = "NSComboBoxCell"; RPg-NS-WkE.ibShadowedObjectValues[3] = "LAN"; ObjectID = "RPg-NS-WkE"; */ "RPg-NS-WkE.ibShadowedObjectValues[3]" = "LAN"; /* Class = "NSComboBoxCell"; RPg-NS-WkE.ibShadowedObjectValues[4] = "WIFI"; ObjectID = "RPg-NS-WkE"; */ "RPg-NS-WkE.ibShadowedObjectValues[4]" = "WIFI"; /* Class = "NSComboBoxCell"; RPg-NS-WkE.ibShadowedObjectValues[5] = "Firewire"; ObjectID = "RPg-NS-WkE"; */ "RPg-NS-WkE.ibShadowedObjectValues[5]" = "Firewire"; /* Class = "NSComboBoxCell"; RPg-NS-WkE.ibShadowedObjectValues[6] = "SATA"; ObjectID = "RPg-NS-WkE"; */ "RPg-NS-WkE.ibShadowedObjectValues[6]" = "SATA"; /* Class = "NSComboBoxCell"; RPg-NS-WkE.ibShadowedObjectValues[7] = "IDE"; ObjectID = "RPg-NS-WkE"; */ "RPg-NS-WkE.ibShadowedObjectValues[7]" = "IDE"; /* Class = "NSComboBoxCell"; RPg-NS-WkE.ibShadowedObjectValues[8] = "HDA"; ObjectID = "RPg-NS-WkE"; */ "RPg-NS-WkE.ibShadowedObjectValues[8]" = "HDA"; /* Class = "NSComboBoxCell"; RPg-NS-WkE.ibShadowedObjectValues[9] = "HDMI"; ObjectID = "RPg-NS-WkE"; */ "RPg-NS-WkE.ibShadowedObjectValues[9]" = "HDMI"; /* Class = "NSComboBoxCell"; RPg-NS-WkE.ibShadowedObjectValues[10] = "LPC"; ObjectID = "RPg-NS-WkE"; */ "RPg-NS-WkE.ibShadowedObjectValues[10]" = "LPC"; /* Class = "NSComboBoxCell"; RPg-NS-WkE.ibShadowedObjectValues[11] = "SmBUS"; ObjectID = "RPg-NS-WkE"; */ "RPg-NS-WkE.ibShadowedObjectValues[11]" = "SmBUS"; /* Class = "NSComboBoxCell"; RPg-NS-WkE.ibShadowedObjectValues[12] = "USB"; ObjectID = "RPg-NS-WkE"; */ "RPg-NS-WkE.ibShadowedObjectValues[12]" = "USB"; /* Class = "NSTextFieldCell"; title = "Text Cell"; ObjectID = "RW2-NS-GZb"; */ "RW2-NS-GZb.title" = "Text Cell"; /* Class = "NSTableColumn"; headerCell.title = "Disabled"; ObjectID = "TGL-3O-5P1"; */ "TGL-3O-5P1.headerCell.title" = "Disabled"; /* Class = "NSTextFieldCell"; title = "XHCI"; ObjectID = "TPv-Br-7ix"; */ "TPv-Br-7ix.title" = "XHCI"; /* Class = "NSTextFieldCell"; title = "Text Cell"; ObjectID = "TgZ-ki-C6v"; */ "TgZ-ki-C6v.title" = "Text Cell"; /* Class = "NSTableColumn"; headerCell.title = "Value*"; ObjectID = "Txa-kI-pAD"; */ "Txa-kI-pAD.headerCell.title" = "Value*"; /* Class = "NSButtonCell"; title = "NoDefaultProperties"; ObjectID = "Txs-Zo-9Xd"; */ "Txs-Zo-9Xd.title" = "NoDefaultProperties"; /* Class = "NSTextField"; ibShadowedToolTip = "<key>Properties</key>\n<string>0207364862FA54HG345</string>\nTo create your own string you need the tool gfxutil that, for example, is bundled with DarwinDumper. You will need to create an xml file using a device path in standard notation as header and list properties, which will be injected. The example shows a file used to enable a video card:\n\n<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/PropertyList-1.0.dtd\">\n<plist version=\"1.0\">\n<dict>\n <key>PciRoot(0x0)/Pci(0x2,0x0)</key>\n <dict>\n <key>AAPL,HasPanel</key>\n <data>AQAAAA== </data>\n <key>built-in</key>\n <data>AA==</data>\n <key>class-code</key>\n <data>AAADAA==</data>\n <key>device_type</key>\n <data>ZGlzcGxheQA=</data>\n <key>model</key>\n <data>R01BIDk1MAA=</data>\n </dict>\n</dict>\n</plist>\nIt is possible to obtain the currently used plist with DarwinDumper, to modify it and convert it into string form with the command:\n\n./gfxutil –i xml –o hex devprop.plist devprop.hex\n.. resulting in:\n\nd30000000100000001000000c70000000500000002010c00d041030a000000000101060000027fff0400100000006d006f00640065006c0000000c000000474d4120393530001c0000006400650076006900630065005f00740079007000650000000c000000646973706c617900200000004100410050004c002c00480061007300500061006e0065006c0000000800000001000000160000006200750069006c0074002d0069006e00000005000000001a00000063006c006100730073002d0063006f006400650000000800000000000300\nThe result needs to be used as a value for DeviceProperties. The same effect can be achieved by adding a _DMS method into the according section of your DSDT. It may be practical to use the string method when no DSDT is available yet."; ObjectID = "UDA-0b-dPf"; */ "UDA-0b-dPf.ibShadowedToolTip" = "<key>Properties</key>\n<string>0207364862FA54HG345</string>\nTo create your own string you need the tool gfxutil that, for example, is bundled with DarwinDumper. You will need to create an xml file using a device path in standard notation as header and list properties, which will be injected. The example shows a file used to enable a video card:\n\n<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/PropertyList-1.0.dtd\">\n<plist version=\"1.0\">\n<dict>\n <key>PciRoot(0x0)/Pci(0x2,0x0)</key>\n <dict>\n <key>AAPL,HasPanel</key>\n <data>AQAAAA== </data>\n <key>built-in</key>\n <data>AA==</data>\n <key>class-code</key>\n <data>AAADAA==</data>\n <key>device_type</key>\n <data>ZGlzcGxheQA=</data>\n <key>model</key>\n <data>R01BIDk1MAA=</data>\n </dict>\n</dict>\n</plist>\nIt is possible to obtain the currently used plist with DarwinDumper, to modify it and convert it into string form with the command:\n\n./gfxutil –i xml –o hex devprop.plist devprop.hex\n.. resulting in:\n\nd30000000100000001000000c70000000500000002010c00d041030a000000000101060000027fff0400100000006d006f00640065006c0000000c000000474d4120393530001c0000006400650076006900630065005f00740079007000650000000c000000646973706c617900200000004100410050004c002c00480061007300500061006e0065006c0000000800000001000000160000006200750069006c0074002d0069006e00000005000000001a00000063006c006100730073002d0063006f006400650000000800000000000300\nThe result needs to be used as a value for DeviceProperties. The same effect can be achieved by adding a _DMS method into the according section of your DSDT. It may be practical to use the string method when no DSDT is available yet."; /* Class = "NSButtonCell"; title = "FixOwnership"; ObjectID = "ULp-vx-Ug8"; */ "ULp-vx-Ug8.title" = "FixOwnership"; /* Class = "NSButtonCell"; title = "Inject"; ObjectID = "UeE-Gb-QK7"; */ "UeE-Gb-QK7.title" = "Inject"; /* Class = "NSTableColumn"; headerCell.title = "Device*"; ObjectID = "Vl8-Nf-teZ"; */ "Vl8-Nf-teZ.headerCell.title" = "Device*"; /* Class = "NSTextFieldCell"; title = "DisableFunctions"; ObjectID = "WVR-X1-3oF"; */ "WVR-X1-3oF.title" = "DisableFunctions"; /* Class = "CocoaBindingsConnection"; ibShadowedIsNilPlaceholder = "PciRoot(0x0)/Pci(0x1f,0x3)"; ObjectID = "Wom-0Y-x3O"; */ "Wom-0Y-x3O.ibShadowedIsNilPlaceholder" = "PciRoot(0x0)/Pci(0x1f,0x3)"; /* Class = "CocoaBindingsConnection"; ibShadowedIsNilPlaceholder = "1808 or 0x710"; ObjectID = "Xt7-nw-zTh"; */ "Xt7-nw-zTh.ibShadowedIsNilPlaceholder" = "1808 or 0x710"; /* Class = "NSTextField"; ibShadowedToolTip = "Set Hex value for disable a function.\n\n/* Function Disable 1 RCBA 0x3418 */\n#define PCH_DISABLE_ALWAYS\t(1 << 0)\n#define PCH_DISABLE_ADSPD\t(1 << 1)\n#define PCH_DISABLE_SATA1\t(1 << 2)\n#define PCH_DISABLE_SMBUS\t(1 << 3)\n#define PCH_DISABLE_HD_AUDIO\t(1 << 4)\n#define PCH_DISABLE_EHCI2\t(1 << 13)\n#define PCH_DISABLE_LPC\t\t(1 << 14)\n#define PCH_DISABLE_EHCI1\t(1 << 15)\n#define PCH_DISABLE_PCIE(x)\t(1 << (16 + x))\n#define PCH_DISABLE_THERMAL\t(1 << 24)\n#define PCH_DISABLE_SATA2\t(1 << 25)\n#define PCH_DISABLE_XHCI\t(1 << 27)\n\n0x1 (to disable ALWAYS)\n0x2 (to disable ADSPD)\n0x4 (to disable SATA1)\n0x8 (to disable SMBUS)\n0x10 (to disable HDEF)\n0x2000 (to disable EHCI2)\n0x4000 (to disable LPC)\n0x8000 (to disable EHCI1)\n0x1000000 (to disable THERMAL)\n0x2000000 (to disable SATA2)\n0x8000000 (to disable XHCI)"; ObjectID = "Zk2-oy-DHb"; */ "Zk2-oy-DHb.ibShadowedToolTip" = "Set Hex value for disable a function.\n\n/* Function Disable 1 RCBA 0x3418 */\n#define PCH_DISABLE_ALWAYS\t(1 << 0)\n#define PCH_DISABLE_ADSPD\t(1 << 1)\n#define PCH_DISABLE_SATA1\t(1 << 2)\n#define PCH_DISABLE_SMBUS\t(1 << 3)\n#define PCH_DISABLE_HD_AUDIO\t(1 << 4)\n#define PCH_DISABLE_EHCI2\t(1 << 13)\n#define PCH_DISABLE_LPC\t\t(1 << 14)\n#define PCH_DISABLE_EHCI1\t(1 << 15)\n#define PCH_DISABLE_PCIE(x)\t(1 << (16 + x))\n#define PCH_DISABLE_THERMAL\t(1 << 24)\n#define PCH_DISABLE_SATA2\t(1 << 25)\n#define PCH_DISABLE_XHCI\t(1 << 27)\n\n0x1 (to disable ALWAYS)\n0x2 (to disable ADSPD)\n0x4 (to disable SATA1)\n0x8 (to disable SMBUS)\n0x10 (to disable HDEF)\n0x2000 (to disable EHCI2)\n0x4000 (to disable LPC)\n0x8000 (to disable EHCI1)\n0x1000000 (to disable THERMAL)\n0x2000000 (to disable SATA2)\n0x8000000 (to disable XHCI)"; /* Class = "NSButtonCell"; title = "ForceHPET"; ObjectID = "a4f-La-NY3"; */ "a4f-La-NY3.title" = "ForceHPET"; /* Class = "NSComboBoxCell"; aBO-8H-kPO.ibShadowedObjectValues[0] = "DATA"; ObjectID = "aBO-8H-kPO"; */ "aBO-8H-kPO.ibShadowedObjectValues[0]" = "DATA"; /* Class = "NSComboBoxCell"; aBO-8H-kPO.ibShadowedObjectValues[1] = "STRING"; ObjectID = "aBO-8H-kPO"; */ "aBO-8H-kPO.ibShadowedObjectValues[1]" = "STRING"; /* Class = "NSComboBoxCell"; aBO-8H-kPO.ibShadowedObjectValues[2] = "NUMBER"; ObjectID = "aBO-8H-kPO"; */ "aBO-8H-kPO.ibShadowedObjectValues[2]" = "NUMBER"; /* Class = "CocoaBindingsConnection"; ibShadowedIsNilPlaceholder = "0xXXXX8086"; ObjectID = "amy-9Z-PoQ"; */ "amy-9Z-PoQ.ibShadowedIsNilPlaceholder" = "0xXXXX8086"; /* Class = "CocoaBindingsConnection"; ibShadowedIsNilPlaceholder = "0xXXXX8086"; ObjectID = "av3-rR-WHU"; */ "av3-rR-WHU.ibShadowedIsNilPlaceholder" = "0xXXXX8086"; /* Class = "NSTextFieldCell"; title = "AirportBridgeDeviceName"; ObjectID = "b5c-a9-ktv"; */ "b5c-a9-ktv.title" = "AirportBridgeDeviceName"; /* Class = "NSTextFieldCell"; title = "IMEI"; ObjectID = "bjr-FJ-q4H"; */ "bjr-FJ-q4H.title" = "IMEI"; /* Class = "NSButton"; ibShadowedToolTip = "Renaming of Device name EH. This helps to surpass port restrictions in El Capitan onward."; ObjectID = "bzm-UW-nA9"; */ "bzm-UW-nA9.ibShadowedToolTip" = "Renaming of Device name EH. This helps to surpass port restrictions in El Capitan onward."; /* Class = "NSTextFieldCell"; title = "Text Cell"; ObjectID = "cmq-a8-msM"; */ "cmq-a8-msM.title" = "Text Cell"; /* Class = "CocoaBindingsConnection"; ibShadowedIsNilPlaceholder = "0xXXXX8086"; ObjectID = "d4G-ss-TXN"; */ "d4G-ss-TXN.ibShadowedIsNilPlaceholder" = "0xXXXX8086"; /* Class = "NSButton"; ibShadowedToolTip = "<key>AFGLowPowerState</key>\n<false/>\nThis helps remove cracking sounds at audio output after idle mode, so sound card is always on."; ObjectID = "dn8-Pw-elB"; */ "dn8-Pw-elB.ibShadowedToolTip" = "<key>AFGLowPowerState</key>\n<false/>\nThis helps remove cracking sounds at audio output after idle mode, so sound card is always on."; /* Class = "NSButton"; ibShadowedToolTip = "Fix MaxBrightness for Intel"; ObjectID = "eMI-Yt-Luy"; */ "eMI-Yt-Luy.ibShadowedToolTip" = "Fix MaxBrightness for Intel"; /* Class = "CocoaBindingsConnection"; ibShadowedIsNilPlaceholder = "hda-gfx"; ObjectID = "efq-d0-Dvy"; */ "efq-d0-Dvy.ibShadowedIsNilPlaceholder" = "hda-gfx"; /* Class = "NSButton"; ibShadowedToolTip = "<key>AddClockID</key>\n<true/>\n<true/> - Enables a good, deep sleep, which cannot be exited by keyboard or mouse input.\n<false/> - The PC will possibly sleep and it can be woken up by keyboard or mouse; or it will be automatically woken up by some attached device\nInjects the property \"AAPL,clock-id\" with a unique identifier for each device. Set it to your liking.\n\nRequires USBInjection to be enabled.\n\nDefault value is set to disabled."; ObjectID = "f2J-nt-Amm"; */ "f2J-nt-Amm.ibShadowedToolTip" = "<key>AddClockID</key>\n<true/>\n<true/> - Enables a good, deep sleep, which cannot be exited by keyboard or mouse input.\n<false/> - The PC will possibly sleep and it can be woken up by keyboard or mouse; or it will be automatically woken up by some attached device\nInjects the property \"AAPL,clock-id\" with a unique identifier for each device. Set it to your liking.\n\nRequires USBInjection to be enabled.\n\nDefault value is set to disabled."; /* Class = "NSButton"; ibShadowedToolTip = "A possibility to Set Intel Backlight"; ObjectID = "fBL-sm-PZ3"; */ "fBL-sm-PZ3.ibShadowedToolTip" = "A possibility to Set Intel Backlight"; /* Class = "NSTextFieldCell"; title = "SATA"; ObjectID = "fxj-bd-HA4"; */ "fxj-bd-HA4.title" = "SATA"; /* Class = "NSButtonCell"; title = "SetIntelBacklight"; ObjectID = "gB8-7Z-Bif"; */ "gB8-7Z-Bif.title" = "SetIntelBacklight"; /* Class = "NSButtonCell"; title = "ResetHDA"; ObjectID = "hYo-Wd-WFN"; */ "hYo-Wd-WFN.title" = "ResetHDA"; /* Class = "NSButtonCell"; title = "SetIntelMaxBacklight"; ObjectID = "ib6-Ng-Sgh"; */ "ib6-Ng-Sgh.title" = "SetIntelMaxBacklight"; /* Class = "NSTableColumn"; headerCell.title = "Key*"; ObjectID = "jWS-lq-rVF"; */ "jWS-lq-rVF.headerCell.title" = "Key*"; /* Class = "NSTableColumn"; headerCell.title = "Value*"; ObjectID = "jlN-zF-QfO"; */ "jlN-zF-QfO.headerCell.title" = "Value*"; /* Class = "NSButton"; ibShadowedToolTip = "If TRUE, hda-gfx=onboard-1 will be injected into the GFX0 and HDEF devices. Also, if an ATI or Nvidia HDMI device is present, they'll be assigned to onboard-2. If FALSE, then ATI or Nvidia devices will get onboard-1 as well as the HDAU device if present."; ObjectID = "jnb-OG-XlG"; */ "jnb-OG-XlG.ibShadowedToolTip" = "If TRUE, hda-gfx=onboard-1 will be injected into the GFX0 and HDEF devices. Also, if an ATI or Nvidia HDMI device is present, they'll be assigned to onboard-2. If FALSE, then ATI or Nvidia devices will get onboard-1 as well as the HDAU device if present."; /* Class = "NSTableColumn"; headerCell.title = "Disabled"; ObjectID = "kQG-4c-nJZ"; */ "kQG-4c-nJZ.headerCell.title" = "Disabled"; /* Class = "NSButtonCell"; title = "AFGLowPowerState"; ObjectID = "lE2-SD-jA4"; */ "lE2-SD-jA4.title" = "AFGLowPowerState"; /* Class = "NSTextFieldCell"; title = "ATI"; ObjectID = "m8Z-5b-HHv"; */ "m8Z-5b-HHv.title" = "ATI"; /* Class = "NSComboBox"; ibShadowedToolTip = "Set Intel Max Backlight Value"; ObjectID = "m9s-s9-Kw6"; */ "m9s-s9-Kw6.ibShadowedToolTip" = "Set Intel Max Backlight Value"; /* Class = "CocoaBindingsConnection"; ibShadowedIsNilPlaceholder = "0xXXXX8086"; ObjectID = "mdE-cW-z7s"; */ "mdE-cW-z7s.ibShadowedIsNilPlaceholder" = "0xXXXX8086"; /* Class = "NSButtonCell"; title = "Add ClockID"; ObjectID = "nKB-UW-G26"; */ "nKB-UW-G26.title" = "Add ClockID"; /* Class = "NSTabViewItem"; label = "Arbitrary"; ObjectID = "pj1-PC-aNN"; */ "pj1-PC-aNN.label" = "Arbitrary"; /* Class = "NSTableColumn"; headerCell.title = "Value Type"; ObjectID = "pxi-J0-2Fb"; */ "pxi-J0-2Fb.headerCell.title" = "Value Type"; /* Class = "NSTableColumn"; headerCell.title = "Properties Key*"; ObjectID = "sUY-rc-CzA"; */ "sUY-rc-CzA.headerCell.title" = "Properties Key*"; /* Class = "NSButton"; ibShadowedToolTip = "Enable or disable LAN injection, enabled by default."; ObjectID = "sap-3V-U9E"; */ "sap-3V-U9E.ibShadowedToolTip" = "Enable or disable LAN injection, enabled by default."; /* Class = "CocoaBindingsConnection"; ibShadowedIsNilPlaceholder = "0xXXXX8086"; ObjectID = "tVF-Tb-5jd"; */ "tVF-Tb-5jd.ibShadowedIsNilPlaceholder" = "0xXXXX8086"; /* Class = "NSTextFieldCell"; title = "IntelGFX"; ObjectID = "vDd-pS-Kg2"; */ "vDd-pS-Kg2.title" = "IntelGFX"; /* Class = "NSTextFieldCell"; title = "Text Cell"; ObjectID = "vF1-NB-Tga"; */ "vF1-NB-Tga.title" = "Text Cell"; /* Class = "NSTextFieldCell"; title = "Text Cell"; ObjectID = "vha-iy-O49"; */ "vha-iy-O49.title" = "Text Cell"; /* Class = "CocoaBindingsConnection"; ibShadowedIsNilPlaceholder = "0xXXXX1002"; ObjectID = "xjE-Wt-AST"; */ "xjE-Wt-AST.ibShadowedIsNilPlaceholder" = "0xXXXX1002"; /* Class = "NSBox"; title = "Add Properties"; ObjectID = "xkr-rE-wcR"; */ "xkr-rE-wcR.title" = "Add Properties"; /* Class = "NSTextFieldCell"; title = "Text Cell"; ObjectID = "yOf-aC-GdU"; */ "yOf-aC-GdU.title" = "Text Cell"; /* Class = "NSBox"; ibShadowedToolTip = "<key>FakeID</key>\n <dict>\n <key>ATI</key>\n <string>0x68181002</string>\n <key>IntelGFX</key>\n <string>0x01268086</string>\n <key>NVidia</key>\n <string>0x0</string>\n <key>LAN</key>\n <string>0x436311ab</string>\n <key>SATA</key>\n <string>0x25628086</string>\n <key>WIFI</key>\n <string>0x431214E4</string>\n <key>XHCI</key>\n <string>0x0</string>\n <key>IMEI</key>\n <string>0x1E3A8086</string>\n </dict>\nThis is a method to change PCI properties DeviceID and VendorID for the device will work with native drivers. In the example above: - AMDRadeonHD7850 has unsupported DeviceID=0x6819. Change to 0x6818 - Dell Wireless 1595, DeviceID=0x4315 is not supported. Change to 0x4312 - Marvell Yukon 8056, DeviceID=0x4353. Change to 0x4363.\nThere are other known substitutions for unsupported devices.\n\nThis substitution will work if InjectATI (Nvidia, Intel) is set. Or if FixDsdtMask set for the device.\n\nLikewise, the IMEI fix will only work if the DSDT Patch AddMCHC_0008 is enabled."; ObjectID = "yvB-P5-wy5"; */ "yvB-P5-wy5.ibShadowedToolTip" = "<key>FakeID</key>\n <dict>\n <key>ATI</key>\n <string>0x68181002</string>\n <key>IntelGFX</key>\n <string>0x01268086</string>\n <key>NVidia</key>\n <string>0x0</string>\n <key>LAN</key>\n <string>0x436311ab</string>\n <key>SATA</key>\n <string>0x25628086</string>\n <key>WIFI</key>\n <string>0x431214E4</string>\n <key>XHCI</key>\n <string>0x0</string>\n <key>IMEI</key>\n <string>0x1E3A8086</string>\n </dict>\nThis is a method to change PCI properties DeviceID and VendorID for the device will work with native drivers. In the example above: - AMDRadeonHD7850 has unsupported DeviceID=0x6819. Change to 0x6818 - Dell Wireless 1595, DeviceID=0x4315 is not supported. Change to 0x4312 - Marvell Yukon 8056, DeviceID=0x4353. Change to 0x4363.\nThere are other known substitutions for unsupported devices.\n\nThis substitution will work if InjectATI (Nvidia, Intel) is set. Or if FixDsdtMask set for the device.\n\nLikewise, the IMEI fix will only work if the DSDT Patch AddMCHC_0008 is enabled."; /* Class = "NSBox"; title = "Fake ID"; ObjectID = "yvB-P5-wy5"; */ "yvB-P5-wy5.title" = "Fake ID"; /* Class = "NSTableColumn"; headerCell.title = "Properties Value"; ObjectID = "zDl-Sr-4Fb"; */ "zDl-Sr-4Fb.headerCell.title" = "Properties Value"; /* Class = "NSTextFieldCell"; title = "WIFI"; ObjectID = "zEn-Md-gTE"; */ "zEn-Md-gTE.title" = "WIFI"; /* Class = "NSButtonCell"; title = "HDMIInjection"; ObjectID = "zQM-Rc-Ud8"; */ "zQM-Rc-Ud8.title" = "HDMIInjection"; /* Class = "NSTextFieldCell"; title = "CustomProperties"; ObjectID = "zlX-sr-gGp"; */ "zlX-sr-gGp.title" = "CustomProperties"; /* Class = "NSComboBoxCell"; zpk-Vh-PQx.ibShadowedObjectValues[0] = "STRING"; ObjectID = "zpk-Vh-PQx"; */ "zpk-Vh-PQx.ibShadowedObjectValues[0]" = "STRING"; /* Class = "NSComboBoxCell"; zpk-Vh-PQx.ibShadowedObjectValues[1] = "NUMBER"; ObjectID = "zpk-Vh-PQx"; */ "zpk-Vh-PQx.ibShadowedObjectValues[1]" = "NUMBER"; /* Class = "NSComboBoxCell"; zpk-Vh-PQx.ibShadowedObjectValues[2] = "DATA"; ObjectID = "zpk-Vh-PQx"; */ "zpk-Vh-PQx.ibShadowedObjectValues[2]" = "DATA";
{ "pile_set_name": "Github" }
package wangdaye.com.geometricweather.db.entity; import org.greenrobot.greendao.annotation.Convert; import org.greenrobot.greendao.annotation.Entity; import wangdaye.com.geometricweather.basic.model.option.provider.WeatherSource; import wangdaye.com.geometricweather.db.propertyConverter.TimeZoneConverter; import wangdaye.com.geometricweather.db.propertyConverter.WeatherSourceConverter; import org.greenrobot.greendao.annotation.Id; import org.greenrobot.greendao.annotation.Generated; import java.util.TimeZone; /** * Location entity. * * {@link wangdaye.com.geometricweather.basic.model.location.Location}. * */ @Entity public class LocationEntity { @Id public String formattedId; public String cityId; public float latitude; public float longitude; @Convert(converter = TimeZoneConverter.class, columnType = String.class) public TimeZone timeZone; public String country; public String province; public String city; public String district; @Convert(converter = WeatherSourceConverter.class, columnType = String.class) public WeatherSource weatherSource; public boolean currentPosition; public boolean residentPosition; public boolean china; public long sequence; @Generated(hash = 212187184) public LocationEntity(String formattedId, String cityId, float latitude, float longitude, TimeZone timeZone, String country, String province, String city, String district, WeatherSource weatherSource, boolean currentPosition, boolean residentPosition, boolean china, long sequence) { this.formattedId = formattedId; this.cityId = cityId; this.latitude = latitude; this.longitude = longitude; this.timeZone = timeZone; this.country = country; this.province = province; this.city = city; this.district = district; this.weatherSource = weatherSource; this.currentPosition = currentPosition; this.residentPosition = residentPosition; this.china = china; this.sequence = sequence; } @Generated(hash = 1723987110) public LocationEntity() { } public String getFormattedId() { return this.formattedId; } public void setFormattedId(String formattedId) { this.formattedId = formattedId; } public String getCityId() { return this.cityId; } public void setCityId(String cityId) { this.cityId = cityId; } public float getLatitude() { return this.latitude; } public void setLatitude(float latitude) { this.latitude = latitude; } public float getLongitude() { return this.longitude; } public void setLongitude(float longitude) { this.longitude = longitude; } public TimeZone getTimeZone() { return this.timeZone; } public void setTimeZone(TimeZone timeZone) { this.timeZone = timeZone; } public String getCountry() { return this.country; } public void setCountry(String country) { this.country = country; } public String getProvince() { return this.province; } public void setProvince(String province) { this.province = province; } public String getCity() { return this.city; } public void setCity(String city) { this.city = city; } public String getDistrict() { return this.district; } public void setDistrict(String district) { this.district = district; } public WeatherSource getWeatherSource() { return this.weatherSource; } public void setWeatherSource(WeatherSource weatherSource) { this.weatherSource = weatherSource; } public boolean getCurrentPosition() { return this.currentPosition; } public void setCurrentPosition(boolean currentPosition) { this.currentPosition = currentPosition; } public boolean getResidentPosition() { return this.residentPosition; } public void setResidentPosition(boolean residentPosition) { this.residentPosition = residentPosition; } public boolean getChina() { return this.china; } public void setChina(boolean china) { this.china = china; } public long getSequence() { return this.sequence; } public void setSequence(long sequence) { this.sequence = sequence; } }
{ "pile_set_name": "Github" }
PCBNEW-LibModule-V1 10/21/2014 8:27:10 PM # encoding utf-8 Units mm $INDEX PCM_C1 $EndINDEX $MODULE PCM_C1 Po 0 0 0 15 5446F8BE 00000000 ~~ Li PCM_C1 Cd PCM_CON_4x38_C1 Kw PCM_CON_4x38_C1 Sc 0 AR Op 0 0 0 T0 7.5 -10.25 1.5 1 0 0.15 N V 21 N "PCM_CON_4x38_C1" T1 -2.25 0 1.5 1 0 0.15 N V 21 N "Val**" $PAD Sh "1" O 1.7 2.7 0 0 0 Dr 1.5 0 0 At STD N 00E0FFFF Ne 0 "" Po 0 -7.5 .ZoneConnection 2 $EndPAD $PAD Sh "2" O 1.7 2.7 0 0 0 Dr 1.5 0 0 At STD N 00E0FFFF Ne 0 "" Po 3 -7.5 $EndPAD $PAD Sh "3" O 1.7 2.7 0 0 0 Dr 1.5 0 0 At STD N 00E0FFFF Ne 0 "" Po 6 -7.5 $EndPAD $PAD Sh "4" O 1.7 2.7 0 0 0 Dr 1.5 0 0 At STD N 00E0FFFF Ne 0 "" Po 9 -7.5 $EndPAD $PAD Sh "5" O 1.7 2.7 0 0 0 Dr 1.5 0 0 At STD N 00E0FFFF Ne 0 "" Po 12 -7.5 $EndPAD $PAD Sh "6" O 1.7 2.7 0 0 0 Dr 1.5 0 0 At STD N 00E0FFFF Ne 0 "" Po 15 -7.5 $EndPAD $PAD Sh "7" O 1.7 2.7 0 0 0 Dr 1.5 0 0 At STD N 00E0FFFF Ne 0 "" Po 18 -7.5 $EndPAD $PAD Sh "8" O 1.7 2.7 0 0 0 Dr 1.5 0 0 At STD N 00E0FFFF Ne 0 "" Po 21 -7.5 $EndPAD $PAD Sh "9" O 1.7 2.7 0 0 0 Dr 1.5 0 0 At STD N 00E0FFFF Ne 0 "" Po 24 -7.5 $EndPAD $PAD Sh "10" O 1.7 2.7 0 0 0 Dr 1.5 0 0 At STD N 00E0FFFF Ne 0 "" Po 27 -7.5 $EndPAD $PAD Sh "29" O 1.7 2.7 0 0 0 Dr 1.5 0 0 At STD N 00E0FFFF Ne 0 "" Po 0 7.5 $EndPAD $PAD Sh "30" O 1.7 2.7 0 0 0 Dr 1.5 0 0 At STD N 00E0FFFF Ne 0 "" Po 3 7.5 $EndPAD $PAD Sh "31" O 1.7 2.7 0 0 0 Dr 1.5 0 0 At STD N 00E0FFFF Ne 0 "" Po 6 7.5 $EndPAD $PAD Sh "32" O 1.7 2.7 0 0 0 Dr 1.5 0 0 At STD N 00E0FFFF Ne 0 "" Po 9 7.5 $EndPAD $PAD Sh "33" O 1.7 2.7 0 0 0 Dr 1.5 0 0 At STD N 00E0FFFF Ne 0 "" Po 12 7.5 $EndPAD $PAD Sh "34" O 1.7 2.7 0 0 0 Dr 1.5 0 0 At STD N 00E0FFFF Ne 0 "" Po 15 7.5 $EndPAD $PAD Sh "35" O 1.7 2.7 0 0 0 Dr 1.5 0 0 At STD N 00E0FFFF Ne 0 "" Po 18 7.5 $EndPAD $PAD Sh "36" O 1.7 2.7 0 0 0 Dr 1.5 0 0 At STD N 00E0FFFF Ne 0 "" Po 21 7.5 $EndPAD $PAD Sh "37" O 1.7 2.7 0 0 0 Dr 1.5 0 0 At STD N 00E0FFFF Ne 0 "" Po 24 7.5 $EndPAD $PAD Sh "38" O 1.7 2.7 0 0 0 Dr 1.5 0 0 At STD N 00E0FFFF Ne 0 "" Po 27 7.5 $EndPAD $PAD Sh "11" O 1.7 2.7 0 0 0 Dr 1.5 0 0 At STD N 00E0FFFF Ne 0 "" Po 1.5 -3 $EndPAD $PAD Sh "12" O 1.7 2.7 0 0 0 Dr 1.5 0 0 At STD N 00E0FFFF Ne 0 "" Po 4.508 -3 $EndPAD $PAD Sh "13" O 1.7 2.7 0 0 0 Dr 1.5 0 0 At STD N 00E0FFFF Ne 0 "" Po 7.508 -3 $EndPAD $PAD Sh "14" O 1.7 2.7 0 0 0 Dr 1.5 0 0 At STD N 00E0FFFF Ne 0 "" Po 10.508 -3 $EndPAD $PAD Sh "15" O 1.7 2.7 0 0 0 Dr 1.5 0 0 At STD N 00E0FFFF Ne 0 "" Po 13.508 -3 $EndPAD $PAD Sh "16" O 1.7 2.7 0 0 0 Dr 1.5 0 0 At STD N 00E0FFFF Ne 0 "" Po 16.508 -3 $EndPAD $PAD Sh "17" O 1.7 2.7 0 0 0 Dr 1.5 0 0 At STD N 00E0FFFF Ne 0 "" Po 19.508 -3 $EndPAD $PAD Sh "18" O 1.7 2.7 0 0 0 Dr 1.5 0 0 At STD N 00E0FFFF Ne 0 "" Po 22.508 -3 $EndPAD $PAD Sh "19" O 1.7 2.7 0 0 0 Dr 1.5 0 0 At STD N 00E0FFFF Ne 0 "" Po 25.508 -3 $EndPAD $PAD Sh "20" O 1.7 2.7 0 0 0 Dr 1.5 0 0 At STD N 00E0FFFF Ne 0 "" Po 1.5 3 $EndPAD $PAD Sh "21" O 1.7 2.7 0 0 0 Dr 1.5 0 0 At STD N 00E0FFFF Ne 0 "" Po 4.508 3 $EndPAD $PAD Sh "22" O 1.7 2.7 0 0 0 Dr 1.5 0 0 At STD N 00E0FFFF Ne 0 "" Po 7.508 3 $EndPAD $PAD Sh "23" O 1.7 2.7 0 0 0 Dr 1.5 0 0 At STD N 00E0FFFF Ne 0 "" Po 10.508 3 $EndPAD $PAD Sh "24" O 1.7 2.7 0 0 0 Dr 1.5 0 0 At STD N 00E0FFFF Ne 0 "" Po 13.508 3 $EndPAD $PAD Sh "25" O 1.7 2.7 0 0 0 Dr 1.5 0 0 At STD N 00E0FFFF Ne 0 "" Po 16.508 3 $EndPAD $PAD Sh "26" O 1.7 2.7 0 0 0 Dr 1.5 0 0 At STD N 00E0FFFF Ne 0 "" Po 19.508 3 $EndPAD $PAD Sh "27" O 1.7 2.7 0 0 0 Dr 1.5 0 0 At STD N 00E0FFFF Ne 0 "" Po 22.508 3 $EndPAD $PAD Sh "28" O 1.7 2.7 0 0 0 Dr 1.5 0 0 At STD N 00E0FFFF Ne 0 "" Po 25.508 3 $EndPAD $EndMODULE PCM_C1 $EndLIBRARY
{ "pile_set_name": "Github" }
data { int<lower=2> K; // num topics int<lower=2> V; // num words int<lower=1> M; // num docs int<lower=1> N; // total word instances int<lower=1,upper=V> w[N]; // word n int<lower=1,upper=M> doc[N]; // doc ID for word n vector<lower=0>[K] alpha; // topic prior vector<lower=0>[V] beta; // word prior } parameters { simplex[K] theta; // topic prevalence simplex[V] phi[K]; // word dist for topic k } model { real gamma[M,K]; theta ~ dirichlet(alpha); for (k in 1:K) phi[k] ~ dirichlet(beta); for (m in 1:M) for (k in 1:K) gamma[m,k] <- categorical_log(k,theta); for (n in 1:N) for (k in 1:K) gamma[doc[n],k] <- gamma[doc[n],k] + categorical_log(w[n],phi[k]); for (m in 1:M) increment_log_prob(log_sum_exp(gamma[m])); // to normalize s.t. gamma[m,k] = log Pr[Z2[m] = k|data] // gamma[m] <- gamma[m] - log_sum_exp(gamma[m]); }
{ "pile_set_name": "Github" }
/* This file is part of the OWL API. * The contents of this file are subject to the LGPL License, Version 3.0. * Copyright 2014, The University of Manchester * * This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. * This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. * You should have received a copy of the GNU General Public License along with this program. If not, see http://www.gnu.org/licenses/. * * Alternatively, the contents of this file may be used under the terms of the Apache License, Version 2.0 in which case, the provisions of the Apache License Version 2.0 are applicable instead of those above. * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.semanticweb.owlapi.util; import static org.semanticweb.owlapi.util.OWLAPIPreconditions.checkNotNull; import static org.semanticweb.owlapi.util.OWLAPIPreconditions.verifyNotNull; import static org.semanticweb.owlapi.util.OWLAPIStreamUtils.asList; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.stream.Stream; import javax.annotation.Nullable; import org.semanticweb.owlapi.model.EntityType; import org.semanticweb.owlapi.model.IRI; import org.semanticweb.owlapi.model.OWLAnnotation; import org.semanticweb.owlapi.model.OWLAnnotationAssertionAxiom; import org.semanticweb.owlapi.model.OWLAnnotationProperty; import org.semanticweb.owlapi.model.OWLAnnotationPropertyDomainAxiom; import org.semanticweb.owlapi.model.OWLAnnotationPropertyRangeAxiom; import org.semanticweb.owlapi.model.OWLAnnotationSubject; import org.semanticweb.owlapi.model.OWLAnnotationValue; import org.semanticweb.owlapi.model.OWLAnonymousIndividual; import org.semanticweb.owlapi.model.OWLAsymmetricObjectPropertyAxiom; import org.semanticweb.owlapi.model.OWLAxiom; import org.semanticweb.owlapi.model.OWLClass; import org.semanticweb.owlapi.model.OWLClassAssertionAxiom; import org.semanticweb.owlapi.model.OWLClassExpression; import org.semanticweb.owlapi.model.OWLDataAllValuesFrom; import org.semanticweb.owlapi.model.OWLDataComplementOf; import org.semanticweb.owlapi.model.OWLDataExactCardinality; import org.semanticweb.owlapi.model.OWLDataFactory; import org.semanticweb.owlapi.model.OWLDataHasValue; import org.semanticweb.owlapi.model.OWLDataIntersectionOf; import org.semanticweb.owlapi.model.OWLDataMaxCardinality; import org.semanticweb.owlapi.model.OWLDataMinCardinality; import org.semanticweb.owlapi.model.OWLDataOneOf; import org.semanticweb.owlapi.model.OWLDataProperty; import org.semanticweb.owlapi.model.OWLDataPropertyAssertionAxiom; import org.semanticweb.owlapi.model.OWLDataPropertyDomainAxiom; import org.semanticweb.owlapi.model.OWLDataPropertyRangeAxiom; import org.semanticweb.owlapi.model.OWLDataRange; import org.semanticweb.owlapi.model.OWLDataSomeValuesFrom; import org.semanticweb.owlapi.model.OWLDataUnionOf; import org.semanticweb.owlapi.model.OWLDatatype; import org.semanticweb.owlapi.model.OWLDatatypeDefinitionAxiom; import org.semanticweb.owlapi.model.OWLDatatypeRestriction; import org.semanticweb.owlapi.model.OWLDeclarationAxiom; import org.semanticweb.owlapi.model.OWLDifferentIndividualsAxiom; import org.semanticweb.owlapi.model.OWLDisjointClassesAxiom; import org.semanticweb.owlapi.model.OWLDisjointDataPropertiesAxiom; import org.semanticweb.owlapi.model.OWLDisjointObjectPropertiesAxiom; import org.semanticweb.owlapi.model.OWLDisjointUnionAxiom; import org.semanticweb.owlapi.model.OWLEntity; import org.semanticweb.owlapi.model.OWLEquivalentClassesAxiom; import org.semanticweb.owlapi.model.OWLEquivalentDataPropertiesAxiom; import org.semanticweb.owlapi.model.OWLEquivalentObjectPropertiesAxiom; import org.semanticweb.owlapi.model.OWLFacetRestriction; import org.semanticweb.owlapi.model.OWLFunctionalDataPropertyAxiom; import org.semanticweb.owlapi.model.OWLFunctionalObjectPropertyAxiom; import org.semanticweb.owlapi.model.OWLHasKeyAxiom; import org.semanticweb.owlapi.model.OWLIndividual; import org.semanticweb.owlapi.model.OWLInverseFunctionalObjectPropertyAxiom; import org.semanticweb.owlapi.model.OWLInverseObjectPropertiesAxiom; import org.semanticweb.owlapi.model.OWLIrreflexiveObjectPropertyAxiom; import org.semanticweb.owlapi.model.OWLLiteral; import org.semanticweb.owlapi.model.OWLNamedIndividual; import org.semanticweb.owlapi.model.OWLNegativeDataPropertyAssertionAxiom; import org.semanticweb.owlapi.model.OWLNegativeObjectPropertyAssertionAxiom; import org.semanticweb.owlapi.model.OWLObject; import org.semanticweb.owlapi.model.OWLObjectAllValuesFrom; import org.semanticweb.owlapi.model.OWLObjectComplementOf; import org.semanticweb.owlapi.model.OWLObjectExactCardinality; import org.semanticweb.owlapi.model.OWLObjectHasSelf; import org.semanticweb.owlapi.model.OWLObjectHasValue; import org.semanticweb.owlapi.model.OWLObjectIntersectionOf; import org.semanticweb.owlapi.model.OWLObjectInverseOf; import org.semanticweb.owlapi.model.OWLObjectMaxCardinality; import org.semanticweb.owlapi.model.OWLObjectMinCardinality; import org.semanticweb.owlapi.model.OWLObjectOneOf; import org.semanticweb.owlapi.model.OWLObjectProperty; import org.semanticweb.owlapi.model.OWLObjectPropertyAssertionAxiom; import org.semanticweb.owlapi.model.OWLObjectPropertyDomainAxiom; import org.semanticweb.owlapi.model.OWLObjectPropertyRangeAxiom; import org.semanticweb.owlapi.model.OWLObjectSomeValuesFrom; import org.semanticweb.owlapi.model.OWLObjectUnionOf; import org.semanticweb.owlapi.model.OWLObjectVisitor; import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLProperty; import org.semanticweb.owlapi.model.OWLReflexiveObjectPropertyAxiom; import org.semanticweb.owlapi.model.OWLSameIndividualAxiom; import org.semanticweb.owlapi.model.OWLSubAnnotationPropertyOfAxiom; import org.semanticweb.owlapi.model.OWLSubClassOfAxiom; import org.semanticweb.owlapi.model.OWLSubDataPropertyOfAxiom; import org.semanticweb.owlapi.model.OWLSubObjectPropertyOfAxiom; import org.semanticweb.owlapi.model.OWLSubPropertyChainOfAxiom; import org.semanticweb.owlapi.model.OWLSymmetricObjectPropertyAxiom; import org.semanticweb.owlapi.model.OWLTransitiveObjectPropertyAxiom; import org.semanticweb.owlapi.model.SWRLBuiltInAtom; import org.semanticweb.owlapi.model.SWRLClassAtom; import org.semanticweb.owlapi.model.SWRLDataPropertyAtom; import org.semanticweb.owlapi.model.SWRLDataRangeAtom; import org.semanticweb.owlapi.model.SWRLDifferentIndividualsAtom; import org.semanticweb.owlapi.model.SWRLIndividualArgument; import org.semanticweb.owlapi.model.SWRLLiteralArgument; import org.semanticweb.owlapi.model.SWRLObjectPropertyAtom; import org.semanticweb.owlapi.model.SWRLObjectVisitor; import org.semanticweb.owlapi.model.SWRLRule; import org.semanticweb.owlapi.model.SWRLSameIndividualAtom; import org.semanticweb.owlapi.model.SWRLVariable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * @author Matthew Horridge, The University Of Manchester, Bio-Health Informatics Group * @since 2.0.0 */ public class OWLAnnotationPropertyTransformer implements OWLObjectVisitor, SWRLObjectVisitor { private static final Logger LOGGER = LoggerFactory.getLogger(OWLAnnotationPropertyTransformer.class); private final OWLDataFactory df; private final Map<OWLEntity, OWLEntity> replacementMap; @Nullable private Object obj; /** * Creates an object duplicator that duplicates objects using the specified data factory. * * @param dataFactory The data factory to be used for the duplication. */ public OWLAnnotationPropertyTransformer(OWLDataFactory dataFactory) { this(new HashMap<OWLEntity, OWLEntity>(), dataFactory); } /** * Creates an object duplicator that duplicates objects using the specified data factory and uri * replacement map. * * @param dataFactory The data factory to be used for the duplication. * @param iriReplacementMap The map to use for the replacement of URIs. Any uris the appear in * the map will be replaced as objects are duplicated. This can be used to "rename" * entities. */ public OWLAnnotationPropertyTransformer(OWLDataFactory dataFactory, Map<OWLEntity, OWLEntity> iriReplacementMap) { df = checkNotNull(dataFactory, "dataFactory cannot be null"); checkNotNull(iriReplacementMap, "iriReplacementMap cannot be null"); replacementMap = new HashMap<>(iriReplacementMap); } /** * Creates an object duplicator that duplicates objects using the specified data factory and uri * replacement map. * * @param dataFactory The data factory to be used for the duplication. * @param entityIRIReplacementMap The map to use for the replacement of URIs. Any uris the * appear in the map will be replaced as objects are duplicated. This can be used to * "rename" entities. */ public OWLAnnotationPropertyTransformer(Map<OWLEntity, OWLEntity> entityIRIReplacementMap, OWLDataFactory dataFactory) { df = checkNotNull(dataFactory, "dataFactory cannot be null"); replacementMap = new HashMap<>( checkNotNull(entityIRIReplacementMap, "entityIRIReplacementMap cannot be null")); } /** * @param object the object to duplicate * @param <O> return type * @return the duplicate */ public <O extends OWLObject> O transformObject(O object) { checkNotNull(object, "object cannot be null"); try { return dup(object); } catch (ClassCastException e) { LOGGER.error( "Attempt to transform an axiom to correct misuse of properties failed. Property replacement: {}, axiom: {}, error: {}", replacementMap, object, e.getMessage()); obj = object; return object; } } protected void setLastObject(Object obj) { this.obj = obj; } @SuppressWarnings({"unchecked",}) protected <O extends OWLObject> O dup(O o) { o.accept(this); return (O) verifyNotNull(obj); } /** * Given an IRI belonging to an entity, returns a IRI. This may be the same IRI that the entity * has, or an alternative IRI if a replacement has been specified. * * @param entity The entity * @return The IRI */ private OWLEntity getIRI(OWLEntity entity) { OWLEntity replacement = replacementMap.get(entity); if (replacement != null) { return replacement; } return entity; } private Collection<OWLAnnotation> anns(OWLAxiom ax) { return set(ax.annotations()); } @Override public void visit(OWLAsymmetricObjectPropertyAxiom ax) { obj = df.getOWLAsymmetricObjectPropertyAxiom(dup(ax.getProperty()), anns(ax)); } @Override public void visit(OWLClassAssertionAxiom ax) { obj = df.getOWLClassAssertionAxiom(dup(ax.getClassExpression()), dup(ax.getIndividual()), anns(ax)); } @Override public void visit(OWLDataPropertyAssertionAxiom ax) { obj = df.getOWLDataPropertyAssertionAxiom(dup(ax.getProperty()), dup(ax.getSubject()), dup(ax.getObject()), anns(ax)); } @Override public void visit(OWLDataPropertyDomainAxiom ax) { obj = df.getOWLDataPropertyDomainAxiom(dup(ax.getProperty()), dup(ax.getDomain()), anns(ax)); } @Override public void visit(OWLDataPropertyRangeAxiom ax) { obj = df.getOWLDataPropertyRangeAxiom(dup(ax.getProperty()), dup(ax.getRange()), anns(ax)); } @Override public void visit(OWLSubDataPropertyOfAxiom ax) { obj = df.getOWLSubDataPropertyOfAxiom(dup(ax.getSubProperty()), dup(ax.getSuperProperty()), anns(ax)); } @Override public void visit(OWLDeclarationAxiom ax) { obj = df.getOWLDeclarationAxiom(dup(ax.getEntity()), anns(ax)); } @Override public void visit(OWLDifferentIndividualsAxiom ax) { obj = df.getOWLDifferentIndividualsAxiom(set(ax.individuals()), anns(ax)); } @Override public void visit(OWLDisjointClassesAxiom ax) { obj = df.getOWLDisjointClassesAxiom(set(ax.classExpressions()), anns(ax)); } @Override public void visit(OWLDisjointDataPropertiesAxiom ax) { obj = df.getOWLDisjointDataPropertiesAxiom(set(ax.properties()), anns(ax)); } @Override public void visit(OWLDisjointObjectPropertiesAxiom ax) { obj = df.getOWLDisjointObjectPropertiesAxiom(set(ax.properties()), anns(ax)); } @Override public void visit(OWLDisjointUnionAxiom ax) { obj = df.getOWLDisjointUnionAxiom(dup(ax.getOWLClass()), set(ax.classExpressions()), anns(ax)); } @Override public void visit(OWLAnnotationAssertionAxiom ax) { OWLAnnotationSubject subject = dup(ax.getSubject()); OWLProperty prop = dup(ax.getProperty()); OWLAnnotationValue value = dup(ax.getValue()); if (prop.isObjectPropertyExpression()) { // turn to object property assertion OWLIndividual individual; OWLIndividual relatedIndividual; if (subject instanceof OWLAnonymousIndividual) { individual = (OWLIndividual) subject; } else { individual = df.getOWLNamedIndividual((IRI) subject); } if (value instanceof OWLIndividual) { relatedIndividual = (OWLIndividual) value; } else { relatedIndividual = df.getOWLNamedIndividual((IRI) value); } obj = df.getOWLObjectPropertyAssertionAxiom(prop.asOWLObjectProperty(), individual, relatedIndividual, asList(ax.annotations())); return; } else if (prop.isDataPropertyExpression()) { // turn to data property assertion OWLIndividual individual; if (subject instanceof OWLAnonymousIndividual) { individual = (OWLIndividual) subject; } else { individual = df.getOWLNamedIndividual((IRI) subject); } obj = df.getOWLDataPropertyAssertionAxiom(prop.asOWLDataProperty(), individual, (OWLLiteral) value, asList(ax.annotations())); return; } obj = df.getOWLAnnotationAssertionAxiom(prop.asOWLAnnotationProperty(), subject, value, anns(ax)); } @Override public void visit(OWLEquivalentClassesAxiom ax) { obj = df.getOWLEquivalentClassesAxiom(set(ax.classExpressions()), anns(ax)); } @Override public void visit(OWLEquivalentDataPropertiesAxiom ax) { obj = df.getOWLEquivalentDataPropertiesAxiom(set(ax.properties()), anns(ax)); } @Override public void visit(OWLEquivalentObjectPropertiesAxiom ax) { obj = df.getOWLEquivalentObjectPropertiesAxiom(set(ax.properties()), anns(ax)); } @Override public void visit(OWLFunctionalDataPropertyAxiom ax) { obj = df.getOWLFunctionalDataPropertyAxiom(dup(ax.getProperty()), anns(ax)); } @Override public void visit(OWLFunctionalObjectPropertyAxiom ax) { obj = df.getOWLFunctionalObjectPropertyAxiom(dup(ax.getProperty()), anns(ax)); } @Override public void visit(OWLInverseFunctionalObjectPropertyAxiom ax) { obj = df.getOWLInverseFunctionalObjectPropertyAxiom(dup(ax.getProperty()), anns(ax)); } @Override public void visit(OWLInverseObjectPropertiesAxiom ax) { obj = df.getOWLInverseObjectPropertiesAxiom(dup(ax.getFirstProperty()), dup(ax.getSecondProperty()), anns(ax)); } @Override public void visit(OWLIrreflexiveObjectPropertyAxiom ax) { obj = df.getOWLIrreflexiveObjectPropertyAxiom(dup(ax.getProperty()), anns(ax)); } @Override public void visit(OWLNegativeDataPropertyAssertionAxiom ax) { obj = df.getOWLNegativeDataPropertyAssertionAxiom(dup(ax.getProperty()), dup(ax.getSubject()), dup(ax.getObject()), anns(ax)); } @Override public void visit(OWLNegativeObjectPropertyAssertionAxiom ax) { obj = df.getOWLNegativeObjectPropertyAssertionAxiom(dup(ax.getProperty()), dup(ax.getSubject()), dup(ax.getObject()), anns(ax)); } @Override public void visit(OWLObjectPropertyAssertionAxiom ax) { obj = df.getOWLObjectPropertyAssertionAxiom(dup(ax.getProperty()), dup(ax.getSubject()), dup(ax.getObject()), anns(ax)); } @Override public void visit(OWLSubPropertyChainOfAxiom ax) { obj = df.getOWLSubPropertyChainOfAxiom(asList(ax.getPropertyChain().stream().map(this::dup)), dup(ax.getSuperProperty()), anns(ax)); } @Override public void visit(OWLObjectPropertyDomainAxiom ax) { obj = df.getOWLObjectPropertyDomainAxiom(dup(ax.getProperty()), dup(ax.getDomain()), anns(ax)); } @Override public void visit(OWLObjectPropertyRangeAxiom ax) { obj = df.getOWLObjectPropertyRangeAxiom(dup(ax.getProperty()), dup(ax.getRange()), anns(ax)); } @Override public void visit(OWLSubObjectPropertyOfAxiom ax) { obj = df.getOWLSubObjectPropertyOfAxiom(dup(ax.getSubProperty()), dup(ax.getSuperProperty()), anns(ax)); } @Override public void visit(OWLReflexiveObjectPropertyAxiom ax) { obj = df.getOWLReflexiveObjectPropertyAxiom(dup(ax.getProperty()), anns(ax)); } @Override public void visit(OWLSameIndividualAxiom ax) { obj = df.getOWLSameIndividualAxiom(set(ax.individuals()), anns(ax)); } @Override public void visit(OWLSubClassOfAxiom ax) { obj = df.getOWLSubClassOfAxiom(dup(ax.getSubClass()), dup(ax.getSuperClass()), anns(ax)); } @Override public void visit(OWLSymmetricObjectPropertyAxiom ax) { obj = df.getOWLSymmetricObjectPropertyAxiom(dup(ax.getProperty()), anns(ax)); } @Override public void visit(OWLTransitiveObjectPropertyAxiom ax) { obj = df.getOWLTransitiveObjectPropertyAxiom(dup(ax.getProperty()), anns(ax)); } @Override public void visit(OWLClass ce) { obj = getIRI(ce); } @Override public void visit(OWLDataAllValuesFrom ce) { obj = df.getOWLDataAllValuesFrom(dup(ce.getProperty()), dup(ce.getFiller())); } @Override public void visit(OWLDataExactCardinality ce) { obj = df.getOWLDataExactCardinality(ce.getCardinality(), dup(ce.getProperty()), dup(ce.getFiller())); } @Override public void visit(OWLDataMaxCardinality ce) { obj = df.getOWLDataMaxCardinality(ce.getCardinality(), dup(ce.getProperty()), dup(ce.getFiller())); } @Override public void visit(OWLDataMinCardinality ce) { obj = df.getOWLDataMinCardinality(ce.getCardinality(), dup(ce.getProperty()), dup(ce.getFiller())); } @Override public void visit(OWLDataSomeValuesFrom ce) { obj = df.getOWLDataSomeValuesFrom(dup(ce.getProperty()), dup(ce.getFiller())); } @Override public void visit(OWLDataHasValue ce) { obj = df.getOWLDataHasValue(dup(ce.getProperty()), dup(ce.getFiller())); } @Override public void visit(OWLObjectAllValuesFrom ce) { obj = df.getOWLObjectAllValuesFrom(dup(ce.getProperty()), dup(ce.getFiller())); } @Override public void visit(OWLObjectComplementOf ce) { obj = df.getOWLObjectComplementOf(dup(ce.getOperand())); } @Override public void visit(OWLObjectExactCardinality ce) { obj = df.getOWLObjectExactCardinality(ce.getCardinality(), dup(ce.getProperty()), dup(ce.getFiller())); } @Override public void visit(OWLObjectIntersectionOf ce) { obj = df.getOWLObjectIntersectionOf(set(ce.operands())); } @Override public void visit(OWLObjectMaxCardinality ce) { obj = df.getOWLObjectMaxCardinality(ce.getCardinality(), dup(ce.getProperty()), dup(ce.getFiller())); } @Override public void visit(OWLObjectMinCardinality ce) { obj = df.getOWLObjectMinCardinality(ce.getCardinality(), dup(ce.getProperty()), dup(ce.getFiller())); } @Override public void visit(OWLObjectOneOf ce) { obj = df.getOWLObjectOneOf(set(ce.individuals())); } @Override public void visit(OWLObjectHasSelf ce) { obj = df.getOWLObjectHasSelf(dup(ce.getProperty())); } @Override public void visit(OWLObjectSomeValuesFrom ce) { obj = df.getOWLObjectSomeValuesFrom(dup(ce.getProperty()), dup(ce.getFiller())); } @Override public void visit(OWLObjectUnionOf ce) { obj = df.getOWLObjectUnionOf(set(ce.operands())); } @Override public void visit(OWLObjectHasValue ce) { obj = df.getOWLObjectHasValue(dup(ce.getProperty()), dup(ce.getFiller())); } @Override public void visit(OWLDataComplementOf node) { obj = df.getOWLDataComplementOf(dup(node.getDataRange())); } @Override public void visit(OWLDataOneOf node) { obj = df.getOWLDataOneOf(set(node.values())); } @Override public void visit(OWLDatatype node) { obj = getIRI(node); } @Override public void visit(OWLDatatypeRestriction node) { obj = df.getOWLDatatypeRestriction(dup(node.getDatatype()), asList(node.facetRestrictions().map(this::dup))); } @Override public void visit(OWLFacetRestriction node) { obj = df.getOWLFacetRestriction(node.getFacet(), dup(node.getFacetValue())); } @Override public void visit(OWLLiteral node) { if (node.hasLang()) { obj = df.getOWLLiteral(node.getLiteral(), node.getLang()); } else { obj = df.getOWLLiteral(node.getLiteral(), dup(node.getDatatype())); } } @Override public void visit(OWLDataProperty property) { obj = getIRI(property); } @Override public void visit(OWLObjectProperty property) { obj = getIRI(property); } @Override public void visit(OWLObjectInverseOf property) { obj = df.getOWLObjectInverseOf(dup(property.getInverse()).asOWLObjectProperty()); } @Override public void visit(OWLNamedIndividual individual) { obj = getIRI(individual); } @Override public void visit(OWLOntology ontology) { // Should we duplicate ontologies here? Probably not. obj = ontology; } @Override public void visit(SWRLRule rule) { obj = df.getSWRLRule(asList(rule.body().map(this::dup)), asList(rule.head().map(this::dup))); } @Override public void visit(SWRLClassAtom node) { obj = df.getSWRLClassAtom(dup(node.getPredicate()), dup(node.getArgument())); } @Override public void visit(SWRLDataRangeAtom node) { obj = df.getSWRLDataRangeAtom(dup(node.getPredicate()), dup(node.getArgument())); } @Override public void visit(SWRLObjectPropertyAtom node) { obj = df.getSWRLObjectPropertyAtom(dup(node.getPredicate()), dup(node.getFirstArgument()), dup(node.getSecondArgument())); } @Override public void visit(SWRLDataPropertyAtom node) { obj = df.getSWRLDataPropertyAtom(dup(node.getPredicate()), dup(node.getFirstArgument()), dup(node.getSecondArgument())); } @Override public void visit(SWRLBuiltInAtom node) { obj = df.getSWRLBuiltInAtom(node.getPredicate(), asList(node.arguments().map(this::dup))); } @Override public void visit(SWRLDifferentIndividualsAtom node) { obj = df.getSWRLDifferentIndividualsAtom(dup(node.getFirstArgument()), dup(node.getSecondArgument())); } @Override public void visit(SWRLSameIndividualAtom node) { obj = df.getSWRLSameIndividualAtom(dup(node.getFirstArgument()), dup(node.getSecondArgument())); } @Override public void visit(SWRLVariable node) { obj = df.getSWRLVariable(dup(node.getIRI())); } @Override public void visit(SWRLIndividualArgument node) { obj = df.getSWRLIndividualArgument(dup(node.getIndividual())); } @Override public void visit(SWRLLiteralArgument node) { obj = df.getSWRLLiteralArgument(dup(node.getLiteral())); } @Override public void visit(OWLHasKeyAxiom ax) { obj = df.getOWLHasKeyAxiom(dup(ax.getClassExpression()), set(ax.propertyExpressions()), anns(ax)); } @Override public void visit(OWLDataIntersectionOf node) { obj = df.getOWLDataIntersectionOf(set(node.operands())); } @Override public void visit(OWLDataUnionOf node) { obj = df.getOWLDataUnionOf(set(node.operands())); } @Override public void visit(OWLAnnotationProperty property) { obj = getIRI(property); } @Override public void visit(OWLAnnotationPropertyDomainAxiom ax) { OWLProperty prop = dup(ax.getProperty()); IRI domain = dup(ax.getDomain()); if (prop.isObjectPropertyExpression()) { // turn to object property domain OWLClassExpression d = df.getOWLClass(domain); LOGGER.warn( "Annotation property domain axiom turned to object property domain after parsing. This could introduce errors if the original domain was an anonymous expression: {} is the new domain.", domain); obj = df.getOWLObjectPropertyDomainAxiom(prop.asOWLObjectProperty(), d, asList(ax.annotations())); return; } else if (prop.isDataPropertyExpression()) { // turn to data property domain OWLClassExpression d = df.getOWLClass(domain); LOGGER.warn( "Annotation property domain axiom turned to data property domain after parsing. This could introduce errors if the original domain was an anonymous expression: {} is the new domain.", domain); obj = df.getOWLDataPropertyDomainAxiom(prop.asOWLDataProperty(), d, asList(ax.annotations())); return; } obj = df.getOWLAnnotationPropertyDomainAxiom(prop.asOWLAnnotationProperty(), domain, anns(ax)); } @Override public void visit(OWLAnnotationPropertyRangeAxiom ax) { OWLProperty prop = dup(ax.getProperty()); IRI range = dup(ax.getRange()); if (prop.isObjectPropertyExpression()) { // turn to object property domain OWLClassExpression d = df.getOWLClass(range); LOGGER.warn( "Annotation property range axiom turned to object property range after parsing. This could introduce errors if the original range was an anonymous expression: {} is the new domain.", range); obj = df.getOWLObjectPropertyRangeAxiom(prop.asOWLObjectProperty(), d, asList(ax.annotations())); return; } else if (prop.isDataPropertyExpression()) { // turn to data property domain OWLDataRange d = df.getOWLDatatype(range); LOGGER.warn( "Annotation property range axiom turned to data property range after parsing. This could introduce errors if the original range was an anonymous expression: {} is the new domain.", range); obj = df.getOWLDataPropertyRangeAxiom(prop.asOWLDataProperty(), d, asList(ax.annotations())); return; } obj = df.getOWLAnnotationPropertyRangeAxiom(prop.asOWLAnnotationProperty(), range, anns(ax)); } @Override public void visit(OWLSubAnnotationPropertyOfAxiom ax) { OWLProperty sub = dup(ax.getSubProperty()); OWLProperty sup = dup(ax.getSuperProperty()); if (sub.isObjectPropertyExpression() || sup.isObjectPropertyExpression()) { // check: it is possible that the properties represent an actual // illegal punning, where this fix cannot be applied if (sub.isOWLObjectProperty() && sup.isOWLObjectProperty()) { obj = df.getOWLSubObjectPropertyOfAxiom(sub.asOWLObjectProperty(), sup.asOWLObjectProperty(), asList(ax.annotations())); } else { // cannot repair: leave unchanged obj = ax; } return; } else if (sub.isDataPropertyExpression() || sup.isDataPropertyExpression()) { if (sub.isOWLDataProperty() && sup.isOWLDataProperty()) { obj = df.getOWLSubDataPropertyOfAxiom(sub.asOWLDataProperty(), sup.asOWLDataProperty(), asList(ax.annotations())); } else { // cannot repair: leave unchanged obj = ax; } return; } if (sub.isOWLAnnotationProperty() && sup.isOWLAnnotationProperty()) { obj = df.getOWLSubAnnotationPropertyOfAxiom(sub.asOWLAnnotationProperty(), sup.asOWLAnnotationProperty(), anns(ax)); } else { // cannot repair: leave unchanged obj = ax; } } @Override public void visit(OWLAnnotation node) { obj = df.getOWLAnnotation(dup(node.getProperty()), dup(node.getValue())); } @Override public void visit(OWLAnonymousIndividual individual) { obj = individual; } @Override public void visit(IRI iri) { obj = iri; for (EntityType<?> entityType : EntityType.values()) { assert entityType != null; OWLEntity entity = df.getOWLEntity(entityType, iri); OWLEntity replacementIRI = replacementMap.get(entity); if (replacementIRI != null) { obj = replacementIRI.getIRI(); break; } } } @Override public void visit(OWLDatatypeDefinitionAxiom ax) { obj = df.getOWLDatatypeDefinitionAxiom(dup(ax.getDatatype()), dup(ax.getDataRange()), anns(ax)); } /** * A utility function that duplicates a set of objects. * * @param <O> type * @param objects The set of object to be duplicated * @return The set of duplicated objects */ private <O extends OWLObject> Collection<O> set(Stream<O> objects) { return asList(objects.map(this::dup)); } }
{ "pile_set_name": "Github" }
/******************** (C) COPYRIGHT 2003 STMicroelectronics ******************** * File Name : 71x_type.h * Author : MCD Application Team * Date First Issued : 05/16/2003 * Description : Common data types ******************************************************************************** * History: * 30/11/2004 : V2.0 * 14/07/2004 : V1.3 * 01/01/2004 : V1.2 ******************************************************************************* THE PRESENT SOFTWARE WHICH IS FOR GUIDANCE ONLY AIMS AT PROVIDING CUSTOMERS WITH CODING INFORMATION REGARDING THEIR PRODUCTS IN ORDER FOR THEM TO SAVE TIME. AS A RESULT, STMICROELECTRONICS SHALL NOT BE HELD LIABLE FOR ANY DIRECT, INDIRECT OR CONSEQUENTIAL DAMAGES WITH RESPECT TO ANY CLAIMS ARISING FROM THE CONTENT OF SUCH SOFTWARE AND/OR THE USE MADE BY CUSTOMERS OF THE CODING INFORMATION CONTAINED HEREIN IN CONNECTION WITH THEIR PRODUCTS. *******************************************************************************/ #ifndef _71x_type_H #define _71x_type_H typedef unsigned long u32; typedef unsigned short u16; typedef unsigned char u8; typedef signed long s32; typedef signed short s16; typedef signed char s8; typedef volatile unsigned long vu32; typedef volatile unsigned short vu16; typedef volatile unsigned char vu8; typedef volatile signed long vs32; typedef volatile signed short vs16; typedef volatile signed char vs8; /*===================================================================*/ typedef enum { FALSE = 0, TRUE = !FALSE } bool; /*===================================================================*/ typedef enum { RESET = 0, SET = !RESET } FlagStatus; /*===================================================================*/ typedef enum { DISABLE = 0, ENABLE = !DISABLE} FunctionalState; /*===================================================================*/ typedef enum { INDIRECT = 0, DIRECT = !INDIRECT} RegisterAccess; /*===================================================================*/ #endif /* _71x_type_H */ /******************* (C) COPYRIGHT 2003 STMicroelectronics *****END OF FILE****/
{ "pile_set_name": "Github" }
<?php /** * @package plugins.systemPartner * @subpackage api.objects */ class KalturaSystemPartnerPackage extends KalturaObject { /** * @var int */ public $id; /** * @var string */ public $name; private static $map_between_objects = array ( "id", "name", ); public function getMapBetweenObjects() { return array_merge(parent::getMapBetweenObjects(), self::$map_between_objects); } }
{ "pile_set_name": "Github" }
/* * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.adobe.internal.fxg.dom.types; /** * The Font Weight enumeration. * * <pre> * 0 = normal * 1 = italic * </pre> * */ public enum FontStyle { /** * The enum representing an 'normal' Font Style. */ NORMAL, /** * The enum representing an 'italic' Font Style. */ ITALIC; }
{ "pile_set_name": "Github" }
/** * Licensed to Apereo under one or more contributor license agreements. See the NOTICE file * distributed with this work for additional information regarding copyright ownership. Apereo * licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use * this file except in compliance with the License. You may obtain a copy of the License at the * following location: * * <p>http://www.apache.org/licenses/LICENSE-2.0 * * <p>Unless required by applicable law or agreed to in writing, software distributed under the * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing permissions and * limitations under the License. */ package org.apereo.portal.events.aggr.groups; import java.util.Set; /** Used to map portal group keys to a more static group mapping object */ public interface AggregatedGroupLookupDao { /** Get the group mapping object for the specified group mapping id */ AggregatedGroupMapping getGroupMapping(long groupMappingId); /** Get the group mapping object for the specified group key */ AggregatedGroupMapping getGroupMapping(String portalGroupKey); /** Get the group mapping for the specified service and name */ AggregatedGroupMapping getGroupMapping(String groupService, String groupName); /** Get the set of all groups that have been aggregated */ Set<AggregatedGroupMapping> getGroupMappings(); }
{ "pile_set_name": "Github" }
/* * Copyright (c) 2018-2020 "Graph Foundation" * Graph Foundation, Inc. [https://graphfoundation.org] * * Copyright (c) 2002-2018 "Neo4j," * Neo4j Sweden AB [http://neo4j.com] * * This file is part of ONgDB Enterprise Edition. The included source * code can be redistributed and/or modified under the terms of the * GNU AFFERO GENERAL PUBLIC LICENSE Version 3 * (http://www.fsf.org/licensing/licenses/agpl-3.0.html) as found * in the associated LICENSE.txt file. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. */ package org.neo4j.management; import org.neo4j.jmx.Description; import org.neo4j.jmx.ManagementInterface; @Deprecated @ManagementInterface( name = BranchedStore.NAME ) @Description( "Information about the branched stores present in this HA cluster member" ) public interface BranchedStore { String NAME = "Branched Store"; @Description( "A list of the branched stores" ) BranchedStoreInfo[] getBranchedStores(); }
{ "pile_set_name": "Github" }
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved. // Use of this source code is governed by a MIT license found in the LICENSE file. // +build !go1.5 package codec var genCheckVendor = false
{ "pile_set_name": "Github" }
// CodeMirror, copyright (c) by Marijn Haverbeke and others // Distributed under an MIT license: http://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS mod(require("../../lib/codemirror")); else if (typeof define == "function" && define.amd) // AMD define(["../../lib/codemirror"], mod); else // Plain browser env mod(CodeMirror); })(function(CodeMirror) { "use strict"; CodeMirror.defineMode("vb", function(conf, parserConf) { var ERRORCLASS = 'error'; function wordRegexp(words) { return new RegExp("^((" + words.join(")|(") + "))\\b", "i"); } var singleOperators = new RegExp("^[\\+\\-\\*/%&\\\\|\\^~<>!]"); var singleDelimiters = new RegExp('^[\\(\\)\\[\\]\\{\\}@,:`=;\\.]'); var doubleOperators = new RegExp("^((==)|(<>)|(<=)|(>=)|(<>)|(<<)|(>>)|(//)|(\\*\\*))"); var doubleDelimiters = new RegExp("^((\\+=)|(\\-=)|(\\*=)|(%=)|(/=)|(&=)|(\\|=)|(\\^=))"); var tripleDelimiters = new RegExp("^((//=)|(>>=)|(<<=)|(\\*\\*=))"); var identifiers = new RegExp("^[_A-Za-z][_A-Za-z0-9]*"); var openingKeywords = ['class','module', 'sub','enum','select','while','if','function', 'get','set','property', 'try']; var middleKeywords = ['else','elseif','case', 'catch']; var endKeywords = ['next','loop']; var operatorKeywords = ['and', 'or', 'not', 'xor', 'in']; var wordOperators = wordRegexp(operatorKeywords); var commonKeywords = ['as', 'dim', 'break', 'continue','optional', 'then', 'until', 'goto', 'byval','byref','new','handles','property', 'return', 'const','private', 'protected', 'friend', 'public', 'shared', 'static', 'true','false']; var commontypes = ['integer','string','double','decimal','boolean','short','char', 'float','single']; var keywords = wordRegexp(commonKeywords); var types = wordRegexp(commontypes); var stringPrefixes = '"'; var opening = wordRegexp(openingKeywords); var middle = wordRegexp(middleKeywords); var closing = wordRegexp(endKeywords); var doubleClosing = wordRegexp(['end']); var doOpening = wordRegexp(['do']); var indentInfo = null; CodeMirror.registerHelper("hintWords", "vb", openingKeywords.concat(middleKeywords).concat(endKeywords) .concat(operatorKeywords).concat(commonKeywords).concat(commontypes)); function indent(_stream, state) { state.currentIndent++; } function dedent(_stream, state) { state.currentIndent--; } // tokenizers function tokenBase(stream, state) { if (stream.eatSpace()) { return null; } var ch = stream.peek(); // Handle Comments if (ch === "'") { stream.skipToEnd(); return 'comment'; } // Handle Number Literals if (stream.match(/^((&H)|(&O))?[0-9\.a-f]/i, false)) { var floatLiteral = false; // Floats if (stream.match(/^\d*\.\d+F?/i)) { floatLiteral = true; } else if (stream.match(/^\d+\.\d*F?/)) { floatLiteral = true; } else if (stream.match(/^\.\d+F?/)) { floatLiteral = true; } if (floatLiteral) { // Float literals may be "imaginary" stream.eat(/J/i); return 'number'; } // Integers var intLiteral = false; // Hex if (stream.match(/^&H[0-9a-f]+/i)) { intLiteral = true; } // Octal else if (stream.match(/^&O[0-7]+/i)) { intLiteral = true; } // Decimal else if (stream.match(/^[1-9]\d*F?/)) { // Decimal literals may be "imaginary" stream.eat(/J/i); // TODO - Can you have imaginary longs? intLiteral = true; } // Zero by itself with no other piece of number. else if (stream.match(/^0(?![\dx])/i)) { intLiteral = true; } if (intLiteral) { // Integer literals may be "long" stream.eat(/L/i); return 'number'; } } // Handle Strings if (stream.match(stringPrefixes)) { state.tokenize = tokenStringFactory(stream.current()); return state.tokenize(stream, state); } // Handle operators and Delimiters if (stream.match(tripleDelimiters) || stream.match(doubleDelimiters)) { return null; } if (stream.match(doubleOperators) || stream.match(singleOperators) || stream.match(wordOperators)) { return 'operator'; } if (stream.match(singleDelimiters)) { return null; } if (stream.match(doOpening)) { indent(stream,state); state.doInCurrentLine = true; return 'keyword'; } if (stream.match(opening)) { if (! state.doInCurrentLine) indent(stream,state); else state.doInCurrentLine = false; return 'keyword'; } if (stream.match(middle)) { return 'keyword'; } if (stream.match(doubleClosing)) { dedent(stream,state); dedent(stream,state); return 'keyword'; } if (stream.match(closing)) { dedent(stream,state); return 'keyword'; } if (stream.match(types)) { return 'keyword'; } if (stream.match(keywords)) { return 'keyword'; } if (stream.match(identifiers)) { return 'variable'; } // Handle non-detected items stream.next(); return ERRORCLASS; } function tokenStringFactory(delimiter) { var singleline = delimiter.length == 1; var OUTCLASS = 'string'; return function(stream, state) { while (!stream.eol()) { stream.eatWhile(/[^'"]/); if (stream.match(delimiter)) { state.tokenize = tokenBase; return OUTCLASS; } else { stream.eat(/['"]/); } } if (singleline) { if (parserConf.singleLineStringErrors) { return ERRORCLASS; } else { state.tokenize = tokenBase; } } return OUTCLASS; }; } function tokenLexer(stream, state) { var style = state.tokenize(stream, state); var current = stream.current(); // Handle '.' connected identifiers if (current === '.') { style = state.tokenize(stream, state); current = stream.current(); if (style === 'variable') { return 'variable'; } else { return ERRORCLASS; } } var delimiter_index = '[({'.indexOf(current); if (delimiter_index !== -1) { indent(stream, state ); } if (indentInfo === 'dedent') { if (dedent(stream, state)) { return ERRORCLASS; } } delimiter_index = '])}'.indexOf(current); if (delimiter_index !== -1) { if (dedent(stream, state)) { return ERRORCLASS; } } return style; } var external = { electricChars:"dDpPtTfFeE ", startState: function() { return { tokenize: tokenBase, lastToken: null, currentIndent: 0, nextLineIndent: 0, doInCurrentLine: false }; }, token: function(stream, state) { if (stream.sol()) { state.currentIndent += state.nextLineIndent; state.nextLineIndent = 0; state.doInCurrentLine = 0; } var style = tokenLexer(stream, state); state.lastToken = {style:style, content: stream.current()}; return style; }, indent: function(state, textAfter) { var trueText = textAfter.replace(/^\s+|\s+$/g, '') ; if (trueText.match(closing) || trueText.match(doubleClosing) || trueText.match(middle)) return conf.indentUnit*(state.currentIndent-1); if(state.currentIndent < 0) return 0; return state.currentIndent * conf.indentUnit; }, lineComment: "'" }; return external; }); CodeMirror.defineMIME("text/x-vb", "vb"); });
{ "pile_set_name": "Github" }
# this file is managed by dry-rb/devtools project name: docsite on: push: paths: - docsite/** - .github/workflows/docsite.yml branches: - master - release-** tags: jobs: update-docs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 with: fetch-depth: 0 - run: | git fetch --no-tags --prune --depth=1 origin +refs/heads/*:refs/remotes/origin/* - name: Set up Ruby uses: actions/setup-ruby@v1 with: ruby-version: "2.6.x" - name: Set up git user run: | git config --local user.email "dry-bot@dry-rb.org" git config --local user.name "dry-bot" - name: Install dependencies run: gem install ossy --no-document - name: Update release branches run: | branches=`git log --format=%B -n 1 $GITHUB_SHA | grep "docsite:release-" || echo "nothing"` if [[ ! $branches -eq "nothing" ]]; then for b in $branches do name=`echo $b | ruby -e 'puts gets[/:(.+)/, 1].gsub(/\s+/, "")'` echo "merging $GITHUB_SHA to $name" git checkout -b $name --track origin/$name echo `git log -n 1` git cherry-pick $GITHUB_SHA -m 1 done git push --all "https://dry-bot:${{secrets.GH_PAT}}@github.com/$GITHUB_REPOSITORY.git" git checkout master else echo "no need to update branches" fi - name: Trigger dry-rb.org deploy env: GITHUB_LOGIN: dry-bot GITHUB_TOKEN: ${{secrets.GH_PAT}} run: ossy github workflow dry-rb/dry-rb.org ci
{ "pile_set_name": "Github" }
@import "../scrollbars/mixins"; @import "variables"; .mdl-datepicker { display : block; width : $_datepicker_width; .mdl-datepicker__year, .mdl-datepicker__date { opacity : 0.8; cursor: pointer; &.is-active { opacity : 1; cursor: default; } } .mdl-datepicker__day_view { height: $_datepicker_content_height; @at-root .mdl-datepicker.show-year-view { .mdl-datepicker__day_view { display: none; } } .mdl-datepicker__month_selection { display : flex; flex-direction : row; flex-wrap : nowrap; justify-content : flex-start; // ⇾ align-content : space-between; // || align-items : center; // ↓ margin-bottom : 8px; i { cursor: pointer; } .mdl-datepicker__month_selection--month { flex-grow : 1; text-align : center; } } .mdl-datepicker__dow { display : flex; flex-direction : row; flex-wrap : nowrap; justify-content : space-between; // ⇾ align-content : space-around; // || align-items : center; // ↓ height : $_datepicker_line_height; font-size : $_datepicker_day_font-size; opacity : 0.6; > * { text-align : center; width : calc(100% / 7); } } .mdl-datepicker__dom { display : flex; flex-direction : column; flex-wrap : wrap; justify-content : flex-start; // ↓ align-content : space-between; // = align-items : stretch; // ⇾ .mdl-datepicker__dom__row { display : flex; flex-direction : row; flex-wrap : wrap; justify-content : space-between; // ⇾ align-content : space-around; // || align-items : center; // ↓ width: 100%; height : $_datepicker_line_height; > .mdl-datepicker__dom--day { display : flex; flex-direction : column; flex-wrap : wrap; justify-content : center; // ↓ align-content : space-around; // = align-items : center; // ⇾ text-align : center; width : calc(100% / 7); font-size : $_datepicker_day_font-size; line-height : $_datepicker_line_height; cursor: pointer; // Selection &.mdl-color--accent { border-radius : 100%; vertical-align : middle; } } } } } .mdl-datepicker__year_view { @include hide_scrollbar(false); display: none; height: $_datepicker_content_height; overflow : scroll; @at-root .mdl-datepicker.show-year-view { .mdl-datepicker__year_view { display : flex; flex-direction : column; flex-wrap : wrap; justify-content : flex-start; // ↓ align-content : space-between; // = align-items : center; // ⇾ } } ul { width: 100%; li { display : flex; flex-direction : row; flex-wrap : wrap; justify-content : center; // ⇾ align-content : space-between; // || align-items : center; // ↓ cursor : pointer; &.mdl-color-text--accent { font-size : 26px; } } } } } // Special Date-Picker-Dialog settings .mdl-dialog.mdl-datepicker { left: calc(50% - (#{$_datepicker_width}) / 2); .mdl-dialog-container & { max-height : 90%; } .mdl-dialog__toolbar { min-height : 60px; // Padding top+bottom 18px x 2 + 60 = 96 } .mdl-dialog__content { padding : 18px 18px 0 18px; } .mdl-dialog__actions { padding-top : 0; padding-right : 0; margin-right : -8px; margin-bottom : 2px; } }
{ "pile_set_name": "Github" }
import pandas as pd from sklearn.preprocessing import LabelEncoder import janitor as jn import numpy as np def load_finches_2012(): path = '../data/finch_beaks_2012.csv' return load_finches(path) def load_finches_1975(): path = '../data/finch_beaks_1975.csv' df = load_finches(path) df = df.rename_column('beak_length_mm', 'beak_length').rename_column('beak_depth_mm', 'beak_depth') return df def load_finches(path): # Load the data df = ( pd.read_csv(path) .clean_names() # clean column names .rename_column('blength', 'beak_length') # rename blength to beak_length (readability fix) .rename_column('bdepth', 'beak_depth') # rename bdepth to beak_depth (readability fix) .label_encode('species') # create a `species_enc` column that has the species encoded numerically ) return df def load_baseball(): df = pd.read_csv('../data/baseballdb/core/Batting.csv') df['AB'] = df['AB'].replace(0, np.nan) df = df.dropna() df['batting_avg'] = df['H'] / df['AB'] df = df[df['yearID'] >= 2016] df = df.iloc[0:15] df.head(5) return df def load_sterilization(): df = ( pd.read_csv('../data/sterilization.csv', na_filter=True, na_values=['#DIV/0!']) .clean_names() .label_encode('treatment') ) mapping = dict(zip(df['treatment'], df['treatment_enc'])) return df, mapping def load_kruschke(): df = ( pd.read_csv('../data/iq.csv', index_col=0) # comment out the path to the file for students. .label_encode('treatment') ) return df # Constants for load_decay tau = 71.9 # indium decay half life A = 42 # starting magnitude C = 21 # measurement error noise_scale = 1 def load_decay(): t = np.arange(0, 1000) def decay_func(ts, noise): return A * np.exp(-t/tau) + C + np.random.normal(0, noise, size=(len(t))) data = {'t': t, 'activity': decay_func(t, noise_scale)} df = pd.DataFrame(data) return df
{ "pile_set_name": "Github" }
<?php /* * This file is part of SwiftMailer. * (c) 2004-2009 Chris Corbyn * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ /** * Handles binary/7/8-bit Transfer Encoding in Swift Mailer. * * @author Chris Corbyn */ class Swift_Mime_ContentEncoder_PlainContentEncoder implements Swift_Mime_ContentEncoder { /** * The name of this encoding scheme (probably 7bit or 8bit). * * @var string */ private $_name; /** * True if canonical transformations should be done. * * @var bool */ private $_canonical; /** * Creates a new PlainContentEncoder with $name (probably 7bit or 8bit). * * @param string $name * @param bool $canonical If canonicalization transformation should be done. */ public function __construct($name, $canonical = false) { $this->_name = $name; $this->_canonical = $canonical; } /** * Encode a given string to produce an encoded string. * * @param string $string * @param int $firstLineOffset ignored * @param int $maxLineLength - 0 means no wrapping will occur * * @return string */ public function encodeString($string, $firstLineOffset = 0, $maxLineLength = 0) { if ($this->_canonical) { $string = $this->_canonicalize($string); } return $this->_safeWordWrap($string, $maxLineLength, "\r\n"); } /** * Encode stream $in to stream $out. * * @param Swift_OutputByteStream $os * @param Swift_InputByteStream $is * @param int $firstLineOffset ignored * @param int $maxLineLength optional, 0 means no wrapping will occur */ public function encodeByteStream(Swift_OutputByteStream $os, Swift_InputByteStream $is, $firstLineOffset = 0, $maxLineLength = 0) { $leftOver = ''; while (false !== $bytes = $os->read(8192)) { $toencode = $leftOver . $bytes; if ($this->_canonical) { $toencode = $this->_canonicalize($toencode); } $wrapped = $this->_safeWordWrap($toencode, $maxLineLength, "\r\n"); $lastLinePos = strrpos($wrapped, "\r\n"); $leftOver = substr($wrapped, $lastLinePos); $wrapped = substr($wrapped, 0, $lastLinePos); $is->write($wrapped); } if (strlen($leftOver)) { $is->write($leftOver); } } /** * Get the name of this encoding scheme. * * @return string */ public function getName() { return $this->_name; } /** * Not used. */ public function charsetChanged($charset) { } /** * A safer (but weaker) wordwrap for unicode. * * @param string $string * @param int $length * @param string $le * * @return string */ private function _safeWordwrap($string, $length = 75, $le = "\r\n") { if (0 >= $length) { return $string; } $originalLines = explode($le, $string); $lines = array(); $lineCount = 0; foreach ($originalLines as $originalLine) { $lines[] = ''; $currentLine =& $lines[$lineCount++]; //$chunks = preg_split('/(?<=[\ \t,\.!\?\-&\+\/])/', $originalLine); $chunks = preg_split('/(?<=\s)/', $originalLine); foreach ($chunks as $chunk) { if (0 != strlen($currentLine) && strlen($currentLine . $chunk) > $length) { $lines[] = ''; $currentLine =& $lines[$lineCount++]; } $currentLine .= $chunk; } } return implode("\r\n", $lines); } /** * Canonicalize string input (fix CRLF). * * @param string $string * * @return string */ private function _canonicalize($string) { return str_replace( array("\r\n", "\r", "\n"), array("\n", "\n", "\r\n"), $string ); } }
{ "pile_set_name": "Github" }
#include <algorithm> #include <map> #include <utility> #include <vector> #include "caffe/layers/multibox_loss_layer.hpp" #include "caffe/util/math_functions.hpp" namespace caffe { template <typename Dtype> void MultiBoxLossLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) { LossLayer<Dtype>::LayerSetUp(bottom, top); if (this->layer_param_.propagate_down_size() == 0) { this->layer_param_.add_propagate_down(true); this->layer_param_.add_propagate_down(true); this->layer_param_.add_propagate_down(false); this->layer_param_.add_propagate_down(false); } const MultiBoxLossParameter& multibox_loss_param = this->layer_param_.multibox_loss_param(); num_ = bottom[0]->num(); num_priors_ = bottom[2]->height() / 4; // Get other parameters. CHECK(multibox_loss_param.has_num_classes()) << "Must provide num_classes."; num_classes_ = multibox_loss_param.num_classes(); CHECK_GE(num_classes_, 1) << "num_classes should not be less than 1."; share_location_ = multibox_loss_param.share_location(); loc_classes_ = share_location_ ? 1 : num_classes_; match_type_ = multibox_loss_param.match_type(); overlap_threshold_ = multibox_loss_param.overlap_threshold(); use_prior_for_matching_ = multibox_loss_param.use_prior_for_matching(); background_label_id_ = multibox_loss_param.background_label_id(); use_difficult_gt_ = multibox_loss_param.use_difficult_gt(); do_neg_mining_ = multibox_loss_param.do_neg_mining(); neg_pos_ratio_ = multibox_loss_param.neg_pos_ratio(); neg_overlap_ = multibox_loss_param.neg_overlap(); code_type_ = multibox_loss_param.code_type(); encode_variance_in_target_ = multibox_loss_param.encode_variance_in_target(); map_object_to_agnostic_ = multibox_loss_param.map_object_to_agnostic(); if (map_object_to_agnostic_) { if (background_label_id_ >= 0) { CHECK_EQ(num_classes_, 2); } else { CHECK_EQ(num_classes_, 1); } } if (!this->layer_param_.loss_param().has_normalization() && this->layer_param_.loss_param().has_normalize()) { normalization_ = this->layer_param_.loss_param().normalize() ? LossParameter_NormalizationMode_VALID : LossParameter_NormalizationMode_BATCH_SIZE; } else { normalization_ = this->layer_param_.loss_param().normalization(); } if (do_neg_mining_) { CHECK(share_location_) << "Currently only support negative mining if share_location is true."; CHECK_GT(neg_pos_ratio_, 0); } vector<int> loss_shape(1, 1); // Set up localization loss layer. loc_weight_ = multibox_loss_param.loc_weight(); loc_loss_type_ = multibox_loss_param.loc_loss_type(); // fake shape. vector<int> loc_shape(1, 1); loc_shape.push_back(4); loc_pred_.Reshape(loc_shape); loc_gt_.Reshape(loc_shape); loc_bottom_vec_.push_back(&loc_pred_); loc_bottom_vec_.push_back(&loc_gt_); loc_loss_.Reshape(loss_shape); loc_top_vec_.push_back(&loc_loss_); if (loc_loss_type_ == MultiBoxLossParameter_LocLossType_L2) { LayerParameter layer_param; layer_param.set_name(this->layer_param_.name() + "_l2_loc"); layer_param.set_type("EuclideanLoss"); layer_param.add_loss_weight(loc_weight_); loc_loss_layer_ = LayerRegistry<Dtype>::CreateLayer(layer_param); loc_loss_layer_->SetUp(loc_bottom_vec_, loc_top_vec_); } else if (loc_loss_type_ == MultiBoxLossParameter_LocLossType_SMOOTH_L1) { LayerParameter layer_param; layer_param.set_name(this->layer_param_.name() + "_smooth_L1_loc"); layer_param.set_type("SmoothL1Loss"); layer_param.add_loss_weight(loc_weight_); loc_loss_layer_ = LayerRegistry<Dtype>::CreateLayer(layer_param); loc_loss_layer_->SetUp(loc_bottom_vec_, loc_top_vec_); } else { LOG(FATAL) << "Unknown localization loss type."; } // Set up confidence loss layer. conf_loss_type_ = multibox_loss_param.conf_loss_type(); conf_bottom_vec_.push_back(&conf_pred_); conf_bottom_vec_.push_back(&conf_gt_); conf_loss_.Reshape(loss_shape); conf_top_vec_.push_back(&conf_loss_); if (conf_loss_type_ == MultiBoxLossParameter_ConfLossType_SOFTMAX) { LayerParameter layer_param; layer_param.set_name(this->layer_param_.name() + "_softmax_conf"); layer_param.set_type("SoftmaxWithLoss"); layer_param.add_loss_weight(Dtype(1.)); layer_param.mutable_loss_param()->set_normalization( LossParameter_NormalizationMode_NONE); SoftmaxParameter* softmax_param = layer_param.mutable_softmax_param(); softmax_param->set_axis(1); // Fake reshape. vector<int> conf_shape(1, 1); conf_gt_.Reshape(conf_shape); conf_shape.push_back(num_classes_); conf_pred_.Reshape(conf_shape); conf_loss_layer_ = LayerRegistry<Dtype>::CreateLayer(layer_param); conf_loss_layer_->SetUp(conf_bottom_vec_, conf_top_vec_); } else if (conf_loss_type_ == MultiBoxLossParameter_ConfLossType_LOGISTIC) { LayerParameter layer_param; layer_param.set_name(this->layer_param_.name() + "_logistic_conf"); layer_param.set_type("SigmoidCrossEntropyLoss"); layer_param.add_loss_weight(Dtype(1.)); // Fake reshape. vector<int> conf_shape(1, 1); conf_shape.push_back(num_classes_); conf_gt_.Reshape(conf_shape); conf_pred_.Reshape(conf_shape); conf_loss_layer_ = LayerRegistry<Dtype>::CreateLayer(layer_param); conf_loss_layer_->SetUp(conf_bottom_vec_, conf_top_vec_); } else { LOG(FATAL) << "Unknown confidence loss type."; } } template <typename Dtype> void MultiBoxLossLayer<Dtype>::Reshape(const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) { LossLayer<Dtype>::Reshape(bottom, top); num_ = bottom[0]->num(); num_priors_ = bottom[2]->height() / 4; num_gt_ = bottom[3]->height(); CHECK_EQ(bottom[0]->num(), bottom[1]->num()); CHECK_EQ(num_priors_ * loc_classes_ * 4, bottom[0]->channels()) << "Number of priors must match number of location predictions."; CHECK_EQ(num_priors_ * num_classes_, bottom[1]->channels()) << "Number of priors must match number of confidence predictions."; } template <typename Dtype> void MultiBoxLossLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) { const Dtype* loc_data = bottom[0]->cpu_data(); const Dtype* conf_data = bottom[1]->cpu_data(); const Dtype* prior_data = bottom[2]->cpu_data(); const Dtype* gt_data = bottom[3]->cpu_data(); // Retrieve all ground truth. map<int, vector<NormalizedBBox> > all_gt_bboxes; GetGroundTruth(gt_data, num_gt_, background_label_id_, use_difficult_gt_, &all_gt_bboxes); // Retrieve all prior bboxes. It is same within a batch since we assume all // images in a batch are of same dimension. vector<NormalizedBBox> prior_bboxes; vector<vector<float> > prior_variances; GetPriorBBoxes(prior_data, num_priors_, &prior_bboxes, &prior_variances); // Retrieve all predictions. vector<LabelBBox> all_loc_preds; GetLocPredictions(loc_data, num_, num_priors_, loc_classes_, share_location_, &all_loc_preds); // Retrieve max scores for each prior. Used in negative mining. vector<vector<float> > all_max_scores; if (do_neg_mining_) { GetMaxConfidenceScores(conf_data, num_, num_priors_, num_classes_, background_label_id_, conf_loss_type_, &all_max_scores); } num_matches_ = 0; int num_negs = 0; for (int i = 0; i < num_; ++i) { map<int, vector<int> > match_indices; vector<int> neg_indices; // Check if there is ground truth for current image. if (all_gt_bboxes.find(i) == all_gt_bboxes.end()) { // There is no gt for current image. All predictions are negative. all_match_indices_.push_back(match_indices); all_neg_indices_.push_back(neg_indices); continue; } // Find match between predictions and ground truth. const vector<NormalizedBBox>& gt_bboxes = all_gt_bboxes.find(i)->second; map<int, vector<float> > match_overlaps; if (!use_prior_for_matching_) { for (int c = 0; c < loc_classes_; ++c) { int label = share_location_ ? -1 : c; if (!share_location_ && label == background_label_id_) { // Ignore background loc predictions. continue; } // Decode the prediction into bbox first. vector<NormalizedBBox> loc_bboxes; DecodeBBoxes(prior_bboxes, prior_variances, code_type_, encode_variance_in_target_, all_loc_preds[i][label], &loc_bboxes); MatchBBox(gt_bboxes, loc_bboxes, label, match_type_, overlap_threshold_, &match_indices[label], &match_overlaps[label]); } } else { // Use prior bboxes to match against all ground truth. vector<int> temp_match_indices; vector<float> temp_match_overlaps; const int label = -1; MatchBBox(gt_bboxes, prior_bboxes, label, match_type_, overlap_threshold_, &temp_match_indices, &temp_match_overlaps); if (share_location_) { match_indices[label] = temp_match_indices; match_overlaps[label] = temp_match_overlaps; } else { // Get ground truth label for each ground truth bbox. vector<int> gt_labels; for (int g = 0; g < gt_bboxes.size(); ++g) { gt_labels.push_back(gt_bboxes[g].label()); } // Distribute the matching results to different loc_class. for (int c = 0; c < loc_classes_; ++c) { if (c == background_label_id_) { // Ignore background loc predictions. continue; } match_indices[c].resize(temp_match_indices.size(), -1); match_overlaps[c] = temp_match_overlaps; for (int m = 0; m < temp_match_indices.size(); ++m) { if (temp_match_indices[m] != -1) { const int gt_idx = temp_match_indices[m]; CHECK_LT(gt_idx, gt_labels.size()); if (c == gt_labels[gt_idx]) { match_indices[c][m] = gt_idx; } } } } } } // Record matching statistics. for (map<int, vector<int> >::iterator it = match_indices.begin(); it != match_indices.end(); ++it) { const int label = it->first; // Get positive indices. int num_pos = 0; for (int m = 0; m < match_indices[label].size(); ++m) { if (match_indices[label][m] != -1) { ++num_pos; } } num_matches_ += num_pos; if (do_neg_mining_) { // Get max scores for all the non-matched priors. vector<pair<float, int> > scores_indices; int num_neg = 0; for (int m = 0; m < match_indices[label].size(); ++m) { if (match_indices[label][m] == -1 && match_overlaps[label][m] < neg_overlap_) { scores_indices.push_back(std::make_pair(all_max_scores[i][m], m)); ++num_neg; } } // Pick top num_neg negatives. num_neg = std::min(static_cast<int>(num_pos * neg_pos_ratio_), num_neg); std::sort(scores_indices.begin(), scores_indices.end(), SortScorePairDescend<int>); for (int n = 0; n < num_neg; ++n) { neg_indices.push_back(scores_indices[n].second); } num_negs += num_neg; } } all_match_indices_.push_back(match_indices); all_neg_indices_.push_back(neg_indices); } if (num_matches_ >= 1) { // Form data to pass on to loc_loss_layer_. vector<int> loc_shape(2); loc_shape[0] = 1; loc_shape[1] = num_matches_ * 4; loc_pred_.Reshape(loc_shape); loc_gt_.Reshape(loc_shape); Dtype* loc_pred_data = loc_pred_.mutable_cpu_data(); Dtype* loc_gt_data = loc_gt_.mutable_cpu_data(); int count = 0; for (int i = 0; i < num_; ++i) { for (map<int, vector<int> >::iterator it = all_match_indices_[i].begin(); it != all_match_indices_[i].end(); ++it) { const int label = it->first; const vector<int>& match_index = it->second; CHECK(all_loc_preds[i].find(label) != all_loc_preds[i].end()); const vector<NormalizedBBox>& loc_pred = all_loc_preds[i][label]; for (int j = 0; j < match_index.size(); ++j) { if (match_index[j] == -1) { continue; } // Store location prediction. CHECK_LT(j, loc_pred.size()); loc_pred_data[count * 4] = loc_pred[j].xmin(); loc_pred_data[count * 4 + 1] = loc_pred[j].ymin(); loc_pred_data[count * 4 + 2] = loc_pred[j].xmax(); loc_pred_data[count * 4 + 3] = loc_pred[j].ymax(); // Store encoded ground truth. const int gt_idx = match_index[j]; CHECK(all_gt_bboxes.find(i) != all_gt_bboxes.end()); CHECK_LT(gt_idx, all_gt_bboxes[i].size()); const NormalizedBBox& gt_bbox = all_gt_bboxes[i][gt_idx]; NormalizedBBox gt_encode; CHECK_LT(j, prior_bboxes.size()); EncodeBBox(prior_bboxes[j], prior_variances[j], code_type_, encode_variance_in_target_, gt_bbox, &gt_encode); loc_gt_data[count * 4] = gt_encode.xmin(); loc_gt_data[count * 4 + 1] = gt_encode.ymin(); loc_gt_data[count * 4 + 2] = gt_encode.xmax(); loc_gt_data[count * 4 + 3] = gt_encode.ymax(); if (encode_variance_in_target_) { for (int k = 0; k < 4; ++k) { CHECK_GT(prior_variances[j][k], 0); loc_pred_data[count * 4 + k] /= prior_variances[j][k]; loc_gt_data[count * 4 + k] /= prior_variances[j][k]; } } ++count; } } } loc_loss_layer_->Reshape(loc_bottom_vec_, loc_top_vec_); loc_loss_layer_->Forward(loc_bottom_vec_, loc_top_vec_); } // Form data to pass on to conf_loss_layer_. if (do_neg_mining_) { num_conf_ = num_matches_ + num_negs; } else { num_conf_ = num_ * num_priors_; } if (num_conf_ >= 1) { // Reshape the confidence data. vector<int> conf_shape; if (conf_loss_type_ == MultiBoxLossParameter_ConfLossType_SOFTMAX) { conf_shape.push_back(num_conf_); conf_gt_.Reshape(conf_shape); conf_shape.push_back(num_classes_); conf_pred_.Reshape(conf_shape); } else if (conf_loss_type_ == MultiBoxLossParameter_ConfLossType_LOGISTIC) { conf_shape.push_back(1); conf_shape.push_back(num_conf_); conf_shape.push_back(num_classes_); conf_gt_.Reshape(conf_shape); conf_pred_.Reshape(conf_shape); } else { LOG(FATAL) << "Unknown confidence loss type."; } if (!do_neg_mining_) { // Consider all scores. // Share data and diff with bottom[1]. CHECK_EQ(conf_pred_.count(), bottom[1]->count()); conf_pred_.ShareData(*(bottom[1])); } Dtype* conf_pred_data = conf_pred_.mutable_cpu_data(); Dtype* conf_gt_data = conf_gt_.mutable_cpu_data(); caffe_set(conf_gt_.count(), Dtype(background_label_id_), conf_gt_data); int count = 0; for (int i = 0; i < num_; ++i) { if (all_gt_bboxes.find(i) != all_gt_bboxes.end()) { // Save matched (positive) bboxes scores and labels. const map<int, vector<int> >& match_indices = all_match_indices_[i]; for (int j = 0; j < num_priors_; ++j) { for (map<int, vector<int> >::const_iterator it = match_indices.begin(); it != match_indices.end(); ++it) { const vector<int>& match_index = it->second; CHECK_EQ(match_index.size(), num_priors_); if (match_index[j] == -1) { continue; } const int gt_label = map_object_to_agnostic_ ? background_label_id_ + 1 : all_gt_bboxes[i][match_index[j]].label(); int idx = do_neg_mining_ ? count : j; switch (conf_loss_type_) { case MultiBoxLossParameter_ConfLossType_SOFTMAX: conf_gt_data[idx] = gt_label; break; case MultiBoxLossParameter_ConfLossType_LOGISTIC: conf_gt_data[idx * num_classes_ + gt_label] = 1; break; default: LOG(FATAL) << "Unknown conf loss type."; } if (do_neg_mining_) { // Copy scores for matched bboxes. caffe_copy<Dtype>(num_classes_, conf_data + j * num_classes_, conf_pred_data + count * num_classes_); ++count; } } } if (do_neg_mining_) { // Save negative bboxes scores and labels. for (int n = 0; n < all_neg_indices_[i].size(); ++n) { int j = all_neg_indices_[i][n]; CHECK_LT(j, num_priors_); caffe_copy<Dtype>(num_classes_, conf_data + j * num_classes_, conf_pred_data + count * num_classes_); switch (conf_loss_type_) { case MultiBoxLossParameter_ConfLossType_SOFTMAX: conf_gt_data[count] = background_label_id_; break; case MultiBoxLossParameter_ConfLossType_LOGISTIC: conf_gt_data[count * num_classes_ + background_label_id_] = 1; break; default: LOG(FATAL) << "Unknown conf loss type."; } ++count; } } } // Go to next image. if (do_neg_mining_) { conf_data += bottom[1]->offset(1); } else { conf_gt_data += num_priors_; } } conf_loss_layer_->Reshape(conf_bottom_vec_, conf_top_vec_); conf_loss_layer_->Forward(conf_bottom_vec_, conf_top_vec_); } top[0]->mutable_cpu_data()[0] = 0; if (this->layer_param_.propagate_down(0)) { // TODO(weiliu89): Understand why it needs to divide 2. Dtype normalizer = LossLayer<Dtype>::GetNormalizer( normalization_, num_, num_priors_, num_matches_); top[0]->mutable_cpu_data()[0] += loc_weight_ * loc_loss_.cpu_data()[0] / normalizer; } if (this->layer_param_.propagate_down(1)) { // TODO(weiliu89): Understand why it needs to divide 2. Dtype normalizer = LossLayer<Dtype>::GetNormalizer( normalization_, num_, num_priors_, num_matches_); top[0]->mutable_cpu_data()[0] += conf_loss_.cpu_data()[0] / normalizer; } } template <typename Dtype> void MultiBoxLossLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top, const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) { if (propagate_down[2]) { LOG(FATAL) << this->type() << " Layer cannot backpropagate to prior inputs."; } if (propagate_down[3]) { LOG(FATAL) << this->type() << " Layer cannot backpropagate to label inputs."; } // Back propagate on location prediction. if (propagate_down[0]) { Dtype* loc_bottom_diff = bottom[0]->mutable_cpu_diff(); caffe_set(bottom[0]->count(), Dtype(0), loc_bottom_diff); if (num_matches_ >= 1) { vector<bool> loc_propagate_down; // Only back propagate on prediction, not ground truth. loc_propagate_down.push_back(true); loc_propagate_down.push_back(false); loc_loss_layer_->Backward(loc_top_vec_, loc_propagate_down, loc_bottom_vec_); // Scale gradient. Dtype normalizer = LossLayer<Dtype>::GetNormalizer( normalization_, num_, num_priors_, num_matches_); Dtype loss_weight = top[0]->cpu_diff()[0] / normalizer; caffe_scal(loc_pred_.count(), loss_weight, loc_pred_.mutable_cpu_diff()); // Copy gradient back to bottom[0]. const Dtype* loc_pred_diff = loc_pred_.cpu_diff(); int count = 0; for (int i = 0; i < num_; ++i) { for (map<int, vector<int> >::iterator it = all_match_indices_[i].begin(); it != all_match_indices_[i].end(); ++it) { const int label = share_location_ ? 0 : it->first; const vector<int>& match_index = it->second; for (int j = 0; j < match_index.size(); ++j) { if (match_index[j] == -1) { continue; } // Copy the diff to the right place. int start_idx = loc_classes_ * 4 * j + label * 4; caffe_copy<Dtype>(4, loc_pred_diff + count * 4, loc_bottom_diff + start_idx); ++count; } } loc_bottom_diff += bottom[0]->offset(1); } } } // Back propagate on confidence prediction. if (propagate_down[1]) { Dtype* conf_bottom_diff = bottom[1]->mutable_cpu_diff(); caffe_set(bottom[1]->count(), Dtype(0), conf_bottom_diff); if (num_conf_ >= 1) { vector<bool> conf_propagate_down; // Only back propagate on prediction, not ground truth. conf_propagate_down.push_back(true); conf_propagate_down.push_back(false); conf_loss_layer_->Backward(conf_top_vec_, conf_propagate_down, conf_bottom_vec_); // Scale gradient. Dtype normalizer = LossLayer<Dtype>::GetNormalizer( normalization_, num_, num_priors_, num_matches_); Dtype loss_weight = top[0]->cpu_diff()[0] / normalizer; caffe_scal(conf_pred_.count(), loss_weight, conf_pred_.mutable_cpu_diff()); // Copy gradient back to bottom[1]. const Dtype* conf_pred_diff = conf_pred_.cpu_diff(); if (do_neg_mining_) { int count = 0; for (int i = 0; i < num_; ++i) { // Copy matched (positive) bboxes scores' diff. const map<int, vector<int> >& match_indices = all_match_indices_[i]; for (int j = 0; j < num_priors_; ++j) { for (map<int, vector<int> >::const_iterator it = match_indices.begin(); it != match_indices.end(); ++it) { const vector<int>& match_index = it->second; CHECK_EQ(match_index.size(), num_priors_); if (match_index[j] == -1) { continue; } // Copy the diff to the right place. caffe_copy<Dtype>(num_classes_, conf_pred_diff + count * num_classes_, conf_bottom_diff + j * num_classes_); ++count; } } // Copy negative bboxes scores' diff. for (int n = 0; n < all_neg_indices_[i].size(); ++n) { int j = all_neg_indices_[i][n]; CHECK_LT(j, num_priors_); caffe_copy<Dtype>(num_classes_, conf_pred_diff + count * num_classes_, conf_bottom_diff + j * num_classes_); ++count; } conf_bottom_diff += bottom[1]->offset(1); } } else { // The diff is already computed and stored. bottom[1]->ShareDiff(conf_pred_); } } } // After backward, remove match statistics. all_match_indices_.clear(); all_neg_indices_.clear(); } INSTANTIATE_CLASS(MultiBoxLossLayer); REGISTER_LAYER_CLASS(MultiBoxLoss); } // namespace caffe
{ "pile_set_name": "Github" }
/* * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software Foundation, * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. */ /** \file * \ingroup collada */ #include <string> #include "COLLADASWColor.h" #include "COLLADASWLight.h" #include "BLI_math.h" #include "LightExporter.h" #include "collada_internal.h" template<class Functor> void forEachLightObjectInExportSet(Scene *sce, Functor &f, LinkNode *export_set) { LinkNode *node; for (node = export_set; node; node = node->next) { Object *ob = (Object *)node->link; if (ob->type == OB_LAMP && ob->data) { f(ob); } } } LightsExporter::LightsExporter(COLLADASW::StreamWriter *sw, BCExportSettings &export_settings) : COLLADASW::LibraryLights(sw), export_settings(export_settings) { } void LightsExporter::exportLights(Scene *sce) { openLibrary(); forEachLightObjectInExportSet(sce, *this, this->export_settings.get_export_set()); closeLibrary(); } void LightsExporter::operator()(Object *ob) { Light *la = (Light *)ob->data; std::string la_id(get_light_id(ob)); std::string la_name(id_name(la)); COLLADASW::Color col(la->r * la->energy, la->g * la->energy, la->b * la->energy); float d, constatt, linatt, quadatt; d = la->dist; constatt = 1.0f; if (la->falloff_type == LA_FALLOFF_INVLINEAR) { linatt = 1.0f / d; quadatt = 0.0f; } else { linatt = 0.0f; quadatt = 1.0f / (d * d); } // sun if (la->type == LA_SUN) { COLLADASW::DirectionalLight cla(mSW, la_id, la_name); cla.setColor(col, false, "color"); cla.setConstantAttenuation(constatt); exportBlenderProfile(cla, la); addLight(cla); } // spot else if (la->type == LA_SPOT) { COLLADASW::SpotLight cla(mSW, la_id, la_name); cla.setColor(col, false, "color"); cla.setFallOffAngle(RAD2DEGF(la->spotsize), false, "fall_off_angle"); cla.setFallOffExponent(la->spotblend, false, "fall_off_exponent"); cla.setConstantAttenuation(constatt); cla.setLinearAttenuation(linatt); cla.setQuadraticAttenuation(quadatt); exportBlenderProfile(cla, la); addLight(cla); } // lamp else if (la->type == LA_LOCAL) { COLLADASW::PointLight cla(mSW, la_id, la_name); cla.setColor(col, false, "color"); cla.setConstantAttenuation(constatt); cla.setLinearAttenuation(linatt); cla.setQuadraticAttenuation(quadatt); exportBlenderProfile(cla, la); addLight(cla); } // area light is not supported // it will be exported as a local lamp else { COLLADASW::PointLight cla(mSW, la_id, la_name); cla.setColor(col, false, "color"); cla.setConstantAttenuation(constatt); cla.setLinearAttenuation(linatt); cla.setQuadraticAttenuation(quadatt); exportBlenderProfile(cla, la); addLight(cla); } } bool LightsExporter::exportBlenderProfile(COLLADASW::Light &cla, Light *la) { cla.addExtraTechniqueParameter("blender", "type", la->type); cla.addExtraTechniqueParameter("blender", "flag", la->flag); cla.addExtraTechniqueParameter("blender", "mode", la->mode); cla.addExtraTechniqueParameter("blender", "gamma", la->k, "blender_gamma"); cla.addExtraTechniqueParameter("blender", "red", la->r); cla.addExtraTechniqueParameter("blender", "green", la->g); cla.addExtraTechniqueParameter("blender", "blue", la->b); cla.addExtraTechniqueParameter("blender", "shadow_r", la->shdwr, "blender_shadow_r"); cla.addExtraTechniqueParameter("blender", "shadow_g", la->shdwg, "blender_shadow_g"); cla.addExtraTechniqueParameter("blender", "shadow_b", la->shdwb, "blender_shadow_b"); cla.addExtraTechniqueParameter("blender", "energy", la->energy, "blender_energy"); cla.addExtraTechniqueParameter("blender", "dist", la->dist, "blender_dist"); cla.addExtraTechniqueParameter("blender", "spotsize", RAD2DEGF(la->spotsize)); cla.addExtraTechniqueParameter("blender", "spotblend", la->spotblend); cla.addExtraTechniqueParameter("blender", "att1", la->att1); cla.addExtraTechniqueParameter("blender", "att2", la->att2); // \todo figure out how we can have falloff curve supported here cla.addExtraTechniqueParameter("blender", "falloff_type", la->falloff_type); cla.addExtraTechniqueParameter("blender", "clipsta", la->clipsta); cla.addExtraTechniqueParameter("blender", "clipend", la->clipend); cla.addExtraTechniqueParameter("blender", "bias", la->bias); cla.addExtraTechniqueParameter("blender", "soft", la->soft); cla.addExtraTechniqueParameter("blender", "bufsize", la->bufsize); cla.addExtraTechniqueParameter("blender", "samp", la->samp); cla.addExtraTechniqueParameter("blender", "buffers", la->buffers); cla.addExtraTechniqueParameter("blender", "area_shape", la->area_shape); cla.addExtraTechniqueParameter("blender", "area_size", la->area_size); cla.addExtraTechniqueParameter("blender", "area_sizey", la->area_sizey); cla.addExtraTechniqueParameter("blender", "area_sizez", la->area_sizez); return true; }
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="UTF-8"?> <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd"> <plist version="1.0"> <dict> <key>CFBundleDevelopmentRegion</key> <string>en</string> <key>CFBundleExecutable</key> <string>$(EXECUTABLE_NAME)</string> <key>CFBundleIdentifier</key> <string>$(PRODUCT_BUNDLE_IDENTIFIER)</string> <key>CFBundleInfoDictionaryVersion</key> <string>6.0</string> <key>CFBundleName</key> <string>$(PRODUCT_NAME)</string> <key>CFBundlePackageType</key> <string>APPL</string> <key>CFBundleShortVersionString</key> <string>1.0</string> <key>CFBundleSignature</key> <string>????</string> <key>CFBundleVersion</key> <string>1</string> <key>LSRequiresIPhoneOS</key> <true/> <key>UILaunchStoryboardName</key> <string>LaunchScreen</string> <key>UIMainStoryboardFile</key> <string>Main</string> <key>UIRequiredDeviceCapabilities</key> <array> <string>armv7</string> </array> <key>UISupportedInterfaceOrientations</key> <array> <string>UIInterfaceOrientationPortrait</string> <string>UIInterfaceOrientationLandscapeLeft</string> </array> </dict> </plist>
{ "pile_set_name": "Github" }
require File.expand_path('../../../spec_helper', __FILE__) require File.expand_path('../../../fixtures/constants', __FILE__) describe "Module#const_missing" do it "is called when an undefined constant is referenced via literal form" do ConstantSpecs::ClassA::CS_CONSTX.should == :CS_CONSTX end it "is called when an undefined constant is referenced via #const_get" do ConstantSpecs::ClassA.const_get(:CS_CONSTX).should == :CS_CONSTX end it "raises NameError and includes the name of the value that wasn't found" do lambda { ConstantSpecs.const_missing("HelloMissing") }.should raise_error(NameError, /ConstantSpecs::HelloMissing/) end it "raises NameError and does not include toplevel Object" do begin Object.const_missing("HelloMissing") rescue NameError => e e.message.should_not =~ / Object::/ end end end
{ "pile_set_name": "Github" }
package(default_visibility = ["//visibility:public"]) licenses(["notice"]) load( "@io_bazel_rules_go//go:def.bzl", "go_library", ) go_library( name = "go_default_library", srcs = ["storage.go"], tags = ["automanaged"], deps = [ "//pkg/api:go_default_library", "//pkg/apis/certificates:go_default_library", "//pkg/registry/cachesize:go_default_library", "//pkg/registry/certificates/certificates:go_default_library", "//vendor/k8s.io/apimachinery/pkg/runtime:go_default_library", "//vendor/k8s.io/apiserver/pkg/endpoints/request:go_default_library", "//vendor/k8s.io/apiserver/pkg/registry/generic:go_default_library", "//vendor/k8s.io/apiserver/pkg/registry/generic/registry:go_default_library", "//vendor/k8s.io/apiserver/pkg/registry/rest:go_default_library", ], ) filegroup( name = "package-srcs", srcs = glob(["**"]), tags = ["automanaged"], visibility = ["//visibility:private"], ) filegroup( name = "all-srcs", srcs = [":package-srcs"], tags = ["automanaged"], )
{ "pile_set_name": "Github" }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.lucene.index; import org.apache.lucene.store.Directory; import org.apache.lucene.store.FSDirectory; import org.apache.lucene.util.CommandLineUtil; import org.apache.lucene.util.InfoStream; import org.apache.lucene.util.PrintStreamInfoStream; import org.apache.lucene.util.SuppressForbidden; import org.apache.lucene.util.Version; import java.io.IOException; import java.nio.file.Paths; import java.nio.file.Path; import java.util.Collection; /** * This is an easy-to-use tool that upgrades all segments of an index from previous Lucene versions * to the current segment file format. It can be used from command line: * <pre> * java -cp lucene-core.jar org.apache.lucene.index.IndexUpgrader [-delete-prior-commits] [-verbose] indexDir * </pre> * Alternatively this class can be instantiated and {@link #upgrade} invoked. It uses {@link UpgradeIndexMergePolicy} * and triggers the upgrade via an forceMerge request to {@link IndexWriter}. * <p>This tool keeps only the last commit in an index; for this * reason, if the incoming index has more than one commit, the tool * refuses to run by default. Specify {@code -delete-prior-commits} * to override this, allowing the tool to delete all but the last commit. * From Java code this can be enabled by passing {@code true} to * {@link #IndexUpgrader(Directory,InfoStream,boolean)}. * <p><b>Warning:</b> This tool may reorder documents if the index was partially * upgraded before execution (e.g., documents were added). If your application relies * on &quot;monotonicity&quot; of doc IDs (which means that the order in which the documents * were added to the index is preserved), do a full forceMerge instead. * The {@link MergePolicy} set by {@link IndexWriterConfig} may also reorder * documents. */ public final class IndexUpgrader { private static final String LOG_PREFIX = "IndexUpgrader"; @SuppressForbidden(reason = "System.out required: command line tool") private static void printUsage() { System.err.println("Upgrades an index so all segments created with a previous Lucene version are rewritten."); System.err.println("Usage:"); System.err.println(" java " + IndexUpgrader.class.getName() + " [-delete-prior-commits] [-verbose] [-dir-impl X] indexDir"); System.err.println("This tool keeps only the last commit in an index; for this"); System.err.println("reason, if the incoming index has more than one commit, the tool"); System.err.println("refuses to run by default. Specify -delete-prior-commits to override"); System.err.println("this, allowing the tool to delete all but the last commit."); System.err.println("Specify a " + FSDirectory.class.getSimpleName() + " implementation through the -dir-impl option to force its use. If no package is specified the " + FSDirectory.class.getPackage().getName() + " package will be used."); System.err.println("WARNING: This tool may reorder document IDs!"); System.exit(1); } /** Main method to run {code IndexUpgrader} from the * command-line. */ @SuppressWarnings("deprecation") public static void main(String[] args) throws IOException { parseArgs(args).upgrade(); } @SuppressForbidden(reason = "System.out required: command line tool") static IndexUpgrader parseArgs(String[] args) throws IOException { String path = null; boolean deletePriorCommits = false; InfoStream out = null; String dirImpl = null; int i = 0; while (i<args.length) { String arg = args[i]; if ("-delete-prior-commits".equals(arg)) { deletePriorCommits = true; } else if ("-verbose".equals(arg)) { out = new PrintStreamInfoStream(System.out); } else if ("-dir-impl".equals(arg)) { if (i == args.length - 1) { System.out.println("ERROR: missing value for -dir-impl option"); System.exit(1); } i++; dirImpl = args[i]; } else if (path == null) { path = arg; } else { printUsage(); } i++; } if (path == null) { printUsage(); } Path p = Paths.get(path); Directory dir = null; if (dirImpl == null) { dir = FSDirectory.open(p); } else { dir = CommandLineUtil.newFSDirectory(dirImpl, p); } return new IndexUpgrader(dir, out, deletePriorCommits); } private final Directory dir; private final IndexWriterConfig iwc; private final boolean deletePriorCommits; /** Creates index upgrader on the given directory, using an {@link IndexWriter} using the given * {@code matchVersion}. The tool refuses to upgrade indexes with multiple commit points. */ public IndexUpgrader(Directory dir) { this(dir, new IndexWriterConfig(null), false); } /** Creates index upgrader on the given directory, using an {@link IndexWriter} using the given * {@code matchVersion}. You have the possibility to upgrade indexes with multiple commit points by removing * all older ones. If {@code infoStream} is not {@code null}, all logging output will be sent to this stream. */ public IndexUpgrader(Directory dir, InfoStream infoStream, boolean deletePriorCommits) { this(dir, new IndexWriterConfig(null), deletePriorCommits); if (null != infoStream) { this.iwc.setInfoStream(infoStream); } } /** Creates index upgrader on the given directory, using an {@link IndexWriter} using the given * config. You have the possibility to upgrade indexes with multiple commit points by removing * all older ones. */ public IndexUpgrader(Directory dir, IndexWriterConfig iwc, boolean deletePriorCommits) { this.dir = dir; this.iwc = iwc; this.deletePriorCommits = deletePriorCommits; } /** Perform the upgrade. */ public void upgrade() throws IOException { if (!DirectoryReader.indexExists(dir)) { throw new IndexNotFoundException(dir.toString()); } if (!deletePriorCommits) { final Collection<IndexCommit> commits = DirectoryReader.listCommits(dir); if (commits.size() > 1) { throw new IllegalArgumentException("This tool was invoked to not delete prior commit points, but the following commits were found: " + commits); } } iwc.setMergePolicy(new UpgradeIndexMergePolicy(iwc.getMergePolicy())); iwc.setIndexDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy()); try (final IndexWriter w = new IndexWriter(dir, iwc)) { InfoStream infoStream = iwc.getInfoStream(); if (infoStream.isEnabled(LOG_PREFIX)) { infoStream.message(LOG_PREFIX, "Upgrading all pre-" + Version.LATEST + " segments of index directory '" + dir + "' to version " + Version.LATEST + "..."); } w.forceMerge(1); if (infoStream.isEnabled(LOG_PREFIX)) { infoStream.message(LOG_PREFIX, "All segments upgraded to version " + Version.LATEST); infoStream.message(LOG_PREFIX, "Enforcing commit to rewrite all index metadata..."); } w.setLiveCommitData(w.getLiveCommitData()); // fake change to enforce a commit (e.g. if index has no segments) assert w.hasUncommittedChanges(); w.commit(); if (infoStream.isEnabled(LOG_PREFIX)) { infoStream.message(LOG_PREFIX, "Committed upgraded metadata to index."); } } } }
{ "pile_set_name": "Github" }
/* * Copyright (c) 1998, 2017, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ #ifndef JDWP_EVENTHELPER_H #define JDWP_EVENTHELPER_H #include "bag.h" #include "invoker.h" void eventHelper_initialize(jbyte sessionID); void eventHelper_reset(jbyte sessionID); struct bag *eventHelper_createEventBag(void); void eventHelper_recordEvent(EventInfo *evinfo, jint id, jbyte suspendPolicy, struct bag *eventBag); void eventHelper_recordClassUnload(jint id, char *signature, struct bag *eventBag); void eventHelper_recordFrameEvent(jint id, jbyte suspendPolicy, EventIndex ei, jthread thread, jclass clazz, jmethodID method, jlocation location, int needReturnValue, jvalue returnValue, struct bag *eventBag); jbyte eventHelper_reportEvents(jbyte sessionID, struct bag *eventBag); void eventHelper_reportInvokeDone(jbyte sessionID, jthread thread); void eventHelper_reportVMInit(JNIEnv *env, jbyte sessionID, jthread thread, jbyte suspendPolicy); void eventHelper_suspendThread(jbyte sessionID, jthread thread); void eventHelper_holdEvents(void); void eventHelper_releaseEvents(void); void eventHelper_lock(void); void eventHelper_unlock(void); void commandLoop_sync(void); /* commandLoop sync with cbVMDeath */ void commandLoop_exitVmDeathLockOnError(void); /* * Private interface for coordinating between eventHelper.c: commandLoop() * and ThreadReferenceImpl.c: resume() and VirtualMachineImpl.c: resume(). */ void unblockCommandLoop(void); #endif
{ "pile_set_name": "Github" }
config = require"lapis.config".get! db = require "lapis.db" image_secret = config.image_secret or config.secret import escape from require "socket.url" import filter_update from require "helpers.model" b64_for_url = do import encode_base64 from require "lapis.util.encoding" (str, len) -> str = encode_base64 str str = str\sub 1, len if len (str\gsub "[/+]", { "+": "%2B" "/": "%2F" }) unb64_from_url = do import decode_base64 from require "lapis.util.encoding" (str) -> str = str\gsub "%%2[BF]", { "%2B": "+" "%2F": "/" } decode_base64 str image_signature = (chunk) -> b64_for_url ngx.hmac_sha1(image_secret, chunk), 6 thumb = do img_prefix = "/img" (subpath, size_str, extension) -> extension = subpath\match "%.([%w_]+)$" unless extension chunk = "#{b64_for_url subpath}/#{escape size_str}" "#{img_prefix}/#{chunk}/#{image_signature chunk}.#{extension}" { :thumb, :image_signature, :b64_for_url, :unb64_from_url }
{ "pile_set_name": "Github" }
--- layout: base title: 'Statistics of ADV in UD_Latvian' udver: '2' --- ## Treebank Statistics: UD_Latvian: POS Tags: `ADV` There are 691 `ADV` lemmas (6%), 716 `ADV` types (3%) and 5489 `ADV` tokens (6%). Out of 16 observed tags, the rank of `ADV` is: 5 in number of lemmas, 5 in number of types and 5 in number of tokens. The 10 most frequent `ADV` lemmas: <em>kā, kad, jau, vēl, tad, daudz, tā, kur, ļoti, tik</em> The 10 most frequent `ADV` types: <em>kā, kad, jau, vēl, tad, tā, kur, ļoti, tik, tur</em> The 10 most frequent ambiguous lemmas: <em>kā</em> (<tt><a href="lv-pos-SCONJ.html">SCONJ</a></tt> 266, <tt><a href="lv-pos-ADV.html">ADV</a></tt> 214, <tt><a href="lv-pos-CCONJ.html">CCONJ</a></tt> 53, <tt><a href="lv-pos-PART.html">PART</a></tt> 33, <tt><a href="lv-pos-PRON.html">PRON</a></tt> 1), <em>jau</em> (<tt><a href="lv-pos-ADV.html">ADV</a></tt> 199, <tt><a href="lv-pos-PART.html">PART</a></tt> 63), <em>vēl</em> (<tt><a href="lv-pos-ADV.html">ADV</a></tt> 188, <tt><a href="lv-pos-PART.html">PART</a></tt> 11), <em>tad</em> (<tt><a href="lv-pos-ADV.html">ADV</a></tt> 176, <tt><a href="lv-pos-PART.html">PART</a></tt> 36), <em>tā</em> (<tt><a href="lv-pos-PRON.html">PRON</a></tt> 303, <tt><a href="lv-pos-ADV.html">ADV</a></tt> 146, <tt><a href="lv-pos-DET.html">DET</a></tt> 91, <tt><a href="lv-pos-PART.html">PART</a></tt> 19, <tt><a href="lv-pos-CCONJ.html">CCONJ</a></tt> 4), <em>tik</em> (<tt><a href="lv-pos-ADV.html">ADV</a></tt> 114, <tt><a href="lv-pos-PART.html">PART</a></tt> 12), <em>tur</em> (<tt><a href="lv-pos-ADV.html">ADV</a></tt> 110, <tt><a href="lv-pos-PART.html">PART</a></tt> 2), <em>labi</em> (<tt><a href="lv-pos-ADV.html">ADV</a></tt> 89, <tt><a href="lv-pos-PART.html">PART</a></tt> 1), <em>te</em> (<tt><a href="lv-pos-ADV.html">ADV</a></tt> 55, <tt><a href="lv-pos-CCONJ.html">CCONJ</a></tt> 7, <tt><a href="lv-pos-PART.html">PART</a></tt> 1), <em>bieži</em> (<tt><a href="lv-pos-ADV.html">ADV</a></tt> 43, <tt><a href="lv-pos-ADJ.html">ADJ</a></tt> 1) The 10 most frequent ambiguous types: <em>kā</em> (<tt><a href="lv-pos-SCONJ.html">SCONJ</a></tt> 257, <tt><a href="lv-pos-ADV.html">ADV</a></tt> 144, <tt><a href="lv-pos-CCONJ.html">CCONJ</a></tt> 52, <tt><a href="lv-pos-PART.html">PART</a></tt> 33, <tt><a href="lv-pos-PRON.html">PRON</a></tt> 10, <tt><a href="lv-pos-DET.html">DET</a></tt> 2), <em>jau</em> (<tt><a href="lv-pos-ADV.html">ADV</a></tt> 187, <tt><a href="lv-pos-PART.html">PART</a></tt> 63), <em>vēl</em> (<tt><a href="lv-pos-ADV.html">ADV</a></tt> 176, <tt><a href="lv-pos-PART.html">PART</a></tt> 11), <em>tad</em> (<tt><a href="lv-pos-ADV.html">ADV</a></tt> 144, <tt><a href="lv-pos-PART.html">PART</a></tt> 31), <em>tā</em> (<tt><a href="lv-pos-PRON.html">PRON</a></tt> 126, <tt><a href="lv-pos-ADV.html">ADV</a></tt> 118, <tt><a href="lv-pos-DET.html">DET</a></tt> 53, <tt><a href="lv-pos-PART.html">PART</a></tt> 11, <tt><a href="lv-pos-CCONJ.html">CCONJ</a></tt> 4), <em>tik</em> (<tt><a href="lv-pos-ADV.html">ADV</a></tt> 110, <tt><a href="lv-pos-PART.html">PART</a></tt> 10), <em>tur</em> (<tt><a href="lv-pos-ADV.html">ADV</a></tt> 80, <tt><a href="lv-pos-VERB.html">VERB</a></tt> 3, <tt><a href="lv-pos-PART.html">PART</a></tt> 2), <em>labi</em> (<tt><a href="lv-pos-ADV.html">ADV</a></tt> 51, <tt><a href="lv-pos-ADJ.html">ADJ</a></tt> 2, <tt><a href="lv-pos-PART.html">PART</a></tt> 1), <em>te</em> (<tt><a href="lv-pos-ADV.html">ADV</a></tt> 43, <tt><a href="lv-pos-CCONJ.html">CCONJ</a></tt> 6, <tt><a href="lv-pos-PART.html">PART</a></tt> 1), <em>savukārt</em> (<tt><a href="lv-pos-ADV.html">ADV</a></tt> 16, <tt><a href="lv-pos-PART.html">PART</a></tt> 1) * <em>kā</em> * <tt><a href="lv-pos-SCONJ.html">SCONJ</a></tt> 257: <em>Bārmenis <b>kā</b> uzlaizīt uzlaizīja .</em> * <tt><a href="lv-pos-ADV.html">ADV</a></tt> 144: <em>Tu redzi , <b>kā</b> sarunājas ?</em> * <tt><a href="lv-pos-CCONJ.html">CCONJ</a></tt> 52: <em>Arī M. Bērzlaps aicināja saprast , ka tā , <b>kā</b> agrāk , vairs nebūs .</em> * <tt><a href="lv-pos-PART.html">PART</a></tt> 33: <em>Viss stāvs it <b>kā</b> strauji novīta - kā puķe vāzē .</em> * <tt><a href="lv-pos-PRON.html">PRON</a></tt> 10: <em>Nebaidos vairs ne no <b>kā</b> .</em> * <tt><a href="lv-pos-DET.html">DET</a></tt> 2: <em>Nesaudzīgas mežu izciršanas rezultātā Baltijas jūras un Rīgas līča piekrastē 18. un 19. gs. veidojās ceļojošās kāpas , <b>kā</b> rezultātā smiltis vēja iedarbības dēļ pārvietojās no krasta uz zemes iekšieni .</em> * <em>jau</em> * <tt><a href="lv-pos-ADV.html">ADV</a></tt> 187: <em>- Nu , teicu <b>jau</b> , rakstniece esmu .</em> * <tt><a href="lv-pos-PART.html">PART</a></tt> 63: <em>Pa šo laiku tur bija apsēdušies <b>jau</b> divi citi kafejnīcas apmeklētāji .</em> * <em>vēl</em> * <tt><a href="lv-pos-ADV.html">ADV</a></tt> 176: <em>Dabūšu pārējo , <b>vēl</b> atdošu .</em> * <tt><a href="lv-pos-PART.html">PART</a></tt> 11: <em>Ir , mīlīši , un kā <b>vēl</b> ir .</em> * <em>tad</em> * <tt><a href="lv-pos-ADV.html">ADV</a></tt> 144: <em>Miers pasaulē būs <b>tad</b> , kad nebūs vairs neviena suņa un nevienas nātres .</em> * <tt><a href="lv-pos-PART.html">PART</a></tt> 31: <em>- Un <b>tad</b> ?</em> * <em>tā</em> * <tt><a href="lv-pos-PRON.html">PRON</a></tt> 126: <em>Es zināju , ka <b>tā</b> ir nejauka provokācija .</em> * <tt><a href="lv-pos-ADV.html">ADV</a></tt> 118: <em>Kā tu _ <b>tā</b> , bračkiņ ?!</em> * <tt><a href="lv-pos-DET.html">DET</a></tt> 53: <em>- Klau , <b>tā</b> sieviete .</em> * <tt><a href="lv-pos-PART.html">PART</a></tt> 11: <em>- Nu nav taču kur , jau <b>tā</b> kājas slapjas .</em> * <tt><a href="lv-pos-CCONJ.html">CCONJ</a></tt> 4: <em>Lai izveidotu pabeigto tēlu , var noderēt nagi ar svētku dizainu , jo mūsdienīgās nail-art iespējas kā zīmējumos , <b>tā</b> arī materiālu izvēlē ir praktiski neierobežotas .</em> * <em>tik</em> * <tt><a href="lv-pos-ADV.html">ADV</a></tt> 110: <em>Kā tu _ tā , <b>tik</b> neuzmanīgi ...</em> * <tt><a href="lv-pos-PART.html">PART</a></tt> 10: <em>" Pēc dižošanās visi ceļi ved <b>tik</b> vienam mērķim .</em> * <em>tur</em> * <tt><a href="lv-pos-ADV.html">ADV</a></tt> 80: <em>Pa šo laiku <b>tur</b> bija apsēdušies jau divi citi kafejnīcas apmeklētāji .</em> * <tt><a href="lv-pos-VERB.html">VERB</a></tt> 3: <em>Viņai liekas , viņu <b>tur</b> gaiss .</em> * <tt><a href="lv-pos-PART.html">PART</a></tt> 2: <em>Bet ne jau kādā <b>tur</b> filosofiskā vai metaforiskā nozīmē , nē .</em> * <em>labi</em> * <tt><a href="lv-pos-ADV.html">ADV</a></tt> 51: <em>Turklāt viņš ļoti <b>labi</b> mācēja “ pūderēt smadzenes ” .</em> * <tt><a href="lv-pos-ADJ.html">ADJ</a></tt> 2: <em>Tāpat viņi šaubījās , vai izdosies no sava vidus atrast mājas vecākā kandidatūru , un pieļāva iespēju , ka varētu lūgt talkā kāda kaimiņnama vecāko , kuram ir <b>labi</b> panākumi .</em> * <tt><a href="lv-pos-PART.html">PART</a></tt> 1: <em>- Aha , <b>labi</b> .</em> * <em>te</em> * <tt><a href="lv-pos-ADV.html">ADV</a></tt> 43: <em>- Viņa vispār <b>te</b> neiederas .</em> * <tt><a href="lv-pos-CCONJ.html">CCONJ</a></tt> 6: <em>Te šis , <b>te</b> tas .</em> * <tt><a href="lv-pos-PART.html">PART</a></tt> 1: <em>Ziņkāre jaucās ar vieglu nepatiku pret visu apkārtējo un nelāgu nojausmu , ka varbūt šī <b>te</b> , ko redzu , ir tā īstā dzīve , ko gadiem esmu meklējusi nepareizās vietās .</em> * <em>savukārt</em> * <tt><a href="lv-pos-ADV.html">ADV</a></tt> 16: <em>Tas <b>savukārt</b> varētu pavērt ceļu pat uz Austrumu paktu Strezas variantā .</em> * <tt><a href="lv-pos-PART.html">PART</a></tt> 1: <em>Kāds cits bērnunama audzēknis <b>savukārt</b> kā atbalsta ģimenē ticis ievietots sociālā riska ģimenē .</em> ## Morphology The form / lemma ratio of `ADV` is 1.036179 (the average of all parts of speech is 1.843235). The 1st highest number of forms (3) was observed with the lemma “bieži”: <em>bieži, biežāk, visbiežāk</em>. The 2nd highest number of forms (3) was observed with the lemma “daudz”: <em>daudz, vairāk, visvairāk</em>. The 3rd highest number of forms (3) was observed with the lemma “grūti”: <em>Visgrūtāk, grūti, grūtāk</em>. `ADV` occurs with 3 features: <tt><a href="lv-feat-PronType.html">PronType</a></tt> (1576; 29% instances), <tt><a href="lv-feat-Degree.html">Degree</a></tt> (886; 16% instances), <tt><a href="lv-feat-NumType.html">NumType</a></tt> (10; 0% instances) `ADV` occurs with 9 feature-value pairs: `Degree=Cmp`, `Degree=Pos`, `Degree=Sup`, `NumType=Mult`, `PronType=Dem`, `PronType=Int`, `PronType=Int,Neg`, `PronType=Neg`, `PronType=Tot` `ADV` occurs with 10 feature combinations. The most frequent feature combination is `_` (3017 tokens). Examples: <em>jau, vēl, ļoti, vairs, tāpēc, atkal, kopā, savukārt, vispār, gandrīz</em> ## Relations `ADV` nodes are attached to their parents using 20 different relations: <tt><a href="lv-dep-advmod.html">advmod</a></tt> (4811; 88% instances), <tt><a href="lv-dep-root.html">root</a></tt> (194; 4% instances), <tt><a href="lv-dep-conj.html">conj</a></tt> (127; 2% instances), <tt><a href="lv-dep-case.html">case</a></tt> (87; 2% instances), <tt><a href="lv-dep-advcl.html">advcl</a></tt> (56; 1% instances), <tt><a href="lv-dep-parataxis.html">parataxis</a></tt> (53; 1% instances), <tt><a href="lv-dep-cc.html">cc</a></tt> (37; 1% instances), <tt><a href="lv-dep-dep.html">dep</a></tt> (34; 1% instances), <tt><a href="lv-dep-ccomp.html">ccomp</a></tt> (31; 1% instances), <tt><a href="lv-dep-compound.html">compound</a></tt> (16; 0% instances), <tt><a href="lv-dep-acl.html">acl</a></tt> (7; 0% instances), <tt><a href="lv-dep-xcomp.html">xcomp</a></tt> (7; 0% instances), <tt><a href="lv-dep-obl.html">obl</a></tt> (6; 0% instances), <tt><a href="lv-dep-discourse.html">discourse</a></tt> (5; 0% instances), <tt><a href="lv-dep-mark.html">mark</a></tt> (5; 0% instances), <tt><a href="lv-dep-nsubj.html">nsubj</a></tt> (4; 0% instances), <tt><a href="lv-dep-csubj.html">csubj</a></tt> (3; 0% instances), <tt><a href="lv-dep-flat.html">flat</a></tt> (2; 0% instances), <tt><a href="lv-dep-iobj.html">iobj</a></tt> (2; 0% instances), <tt><a href="lv-dep-nmod.html">nmod</a></tt> (2; 0% instances) Parents of `ADV` nodes belong to 16 different parts of speech: <tt><a href="lv-pos-VERB.html">VERB</a></tt> (3680; 67% instances), <tt><a href="lv-pos-ADV.html">ADV</a></tt> (485; 9% instances), <tt><a href="lv-pos-ADJ.html">ADJ</a></tt> (429; 8% instances), <tt><a href="lv-pos-NOUN.html">NOUN</a></tt> (417; 8% instances), (194; 4% instances), <tt><a href="lv-pos-PRON.html">PRON</a></tt> (113; 2% instances), <tt><a href="lv-pos-NUM.html">NUM</a></tt> (75; 1% instances), <tt><a href="lv-pos-PUNCT.html">PUNCT</a></tt> (34; 1% instances), <tt><a href="lv-pos-DET.html">DET</a></tt> (23; 0% instances), <tt><a href="lv-pos-PROPN.html">PROPN</a></tt> (20; 0% instances), <tt><a href="lv-pos-PART.html">PART</a></tt> (11; 0% instances), <tt><a href="lv-pos-ADP.html">ADP</a></tt> (2; 0% instances), <tt><a href="lv-pos-CCONJ.html">CCONJ</a></tt> (2; 0% instances), <tt><a href="lv-pos-SCONJ.html">SCONJ</a></tt> (2; 0% instances), <tt><a href="lv-pos-SYM.html">SYM</a></tt> (1; 0% instances), <tt><a href="lv-pos-X.html">X</a></tt> (1; 0% instances) 4414 (80%) `ADV` nodes are leaves. 608 (11%) `ADV` nodes have one child. 147 (3%) `ADV` nodes have two children. 320 (6%) `ADV` nodes have three or more children. The highest child degree of a `ADV` node is 9. Children of `ADV` nodes are attached using 32 different relations: <tt><a href="lv-dep-punct.html">punct</a></tt> (505; 22% instances), <tt><a href="lv-dep-advmod.html">advmod</a></tt> (335; 15% instances), <tt><a href="lv-dep-discourse.html">discourse</a></tt> (212; 9% instances), <tt><a href="lv-dep-aux.html">aux</a></tt> (196; 9% instances), <tt><a href="lv-dep-cc.html">cc</a></tt> (140; 6% instances), <tt><a href="lv-dep-conj.html">conj</a></tt> (131; 6% instances), <tt><a href="lv-dep-advcl.html">advcl</a></tt> (119; 5% instances), <tt><a href="lv-dep-obl.html">obl</a></tt> (111; 5% instances), <tt><a href="lv-dep-nsubj.html">nsubj</a></tt> (92; 4% instances), <tt><a href="lv-dep-ccomp.html">ccomp</a></tt> (86; 4% instances), <tt><a href="lv-dep-iobj.html">iobj</a></tt> (81; 4% instances), <tt><a href="lv-dep-mark.html">mark</a></tt> (50; 2% instances), <tt><a href="lv-dep-acl.html">acl</a></tt> (39; 2% instances), <tt><a href="lv-dep-compound.html">compound</a></tt> (24; 1% instances), <tt><a href="lv-dep-_.html">_</a></tt> (22; 1% instances), <tt><a href="lv-dep-case.html">case</a></tt> (20; 1% instances), <tt><a href="lv-dep-fixed.html">fixed</a></tt> (20; 1% instances), <tt><a href="lv-dep-csubj.html">csubj</a></tt> (19; 1% instances), <tt><a href="lv-dep-dep.html">dep</a></tt> (15; 1% instances), <tt><a href="lv-dep-nmod.html">nmod</a></tt> (10; 0% instances), <tt><a href="lv-dep-obj.html">obj</a></tt> (8; 0% instances), <tt><a href="lv-dep-amod.html">amod</a></tt> (6; 0% instances), <tt><a href="lv-dep-parataxis.html">parataxis</a></tt> (6; 0% instances), <tt><a href="lv-dep-cop.html">cop</a></tt> (4; 0% instances), <tt><a href="lv-dep-det.html">det</a></tt> (4; 0% instances), <tt><a href="lv-dep-xcomp.html">xcomp</a></tt> (3; 0% instances), <tt><a href="lv-dep-flat.html">flat</a></tt> (2; 0% instances), <tt><a href="lv-dep-orphan.html">orphan</a></tt> (2; 0% instances), <tt><a href="lv-dep-root.html">root</a></tt> (2; 0% instances), <tt><a href="lv-dep-nsubj-pass.html">nsubj:pass</a></tt> (1; 0% instances), <tt><a href="lv-dep-nummod.html">nummod</a></tt> (1; 0% instances), <tt><a href="lv-dep-vocative.html">vocative</a></tt> (1; 0% instances) Children of `ADV` nodes belong to 15 different parts of speech: <tt><a href="lv-pos-PUNCT.html">PUNCT</a></tt> (505; 22% instances), <tt><a href="lv-pos-VERB.html">VERB</a></tt> (489; 22% instances), <tt><a href="lv-pos-ADV.html">ADV</a></tt> (427; 19% instances), <tt><a href="lv-pos-NOUN.html">NOUN</a></tt> (215; 9% instances), <tt><a href="lv-pos-PART.html">PART</a></tt> (211; 9% instances), <tt><a href="lv-pos-CCONJ.html">CCONJ</a></tt> (142; 6% instances), <tt><a href="lv-pos-PRON.html">PRON</a></tt> (136; 6% instances), <tt><a href="lv-pos-SCONJ.html">SCONJ</a></tt> (76; 3% instances), <tt><a href="lv-pos-ADJ.html">ADJ</a></tt> (21; 1% instances), <tt><a href="lv-pos-ADP.html">ADP</a></tt> (20; 1% instances), <tt><a href="lv-pos-PROPN.html">PROPN</a></tt> (12; 1% instances), <tt><a href="lv-pos-DET.html">DET</a></tt> (4; 0% instances), <tt><a href="lv-pos-NUM.html">NUM</a></tt> (4; 0% instances), <tt><a href="lv-pos-INTJ.html">INTJ</a></tt> (3; 0% instances), <tt><a href="lv-pos-SYM.html">SYM</a></tt> (2; 0% instances)
{ "pile_set_name": "Github" }
/* zip_source_close.c -- close zip_source (stop reading) Copyright (C) 2009-2019 Dieter Baron and Thomas Klausner This file is part of libzip, a library to manipulate ZIP archives. The authors can be contacted at <libzip@nih.at> Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. The names of the authors may not be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #include "zipint.h" int zip_source_close(zip_source_t *src) { if (!ZIP_SOURCE_IS_OPEN_READING(src)) { zip_error_set(&src->error, ZIP_ER_INVAL, 0); return -1; } src->open_count--; if (src->open_count == 0) { _zip_source_call(src, NULL, 0, ZIP_SOURCE_CLOSE); if (ZIP_SOURCE_IS_LAYERED(src)) { if (zip_source_close(src->src) < 0) { zip_error_set(&src->error, ZIP_ER_INTERNAL, 0); } } } return 0; }
{ "pile_set_name": "Github" }
platform_is :windows do require 'win32ole' describe "WIN32OLE_METHOD#return_type_detail" do before :each do ole_type = WIN32OLE_TYPE.new("Microsoft Shell Controls And Automation", "Shell") @m_browse_for_folder = WIN32OLE_METHOD.new(ole_type, "BrowseForFolder") end it "raises ArgumentError if argument is given" do -> { @m_browse_for_folder.return_type_detail(1) }.should raise_error ArgumentError end it "returns expected value for Shell Control's 'BrowseForFolder' method" do @m_browse_for_folder.return_type_detail.should be_kind_of Array @m_browse_for_folder.return_type_detail.should == ['PTR', 'USERDEFINED', 'Folder'] end end end
{ "pile_set_name": "Github" }
# frozen_string_literal: false # $Id$ # # scanf for Ruby # # Some not very comprehensive tests of block behavior. require 'test/unit' require 'scanf' require 'tmpdir' class TestScanfBlock < Test::Unit::TestCase def setup @str = <<-EOS Beethoven 1770 Bach 1685 Handel 1685 Scarlatti 1685 Brahms 1833 EOS end alias set_up setup def test_str1 res = @str.scanf("%s%d") { |name, year| "#{name} was born in #{year}." } assert_equal(res, [ "Beethoven was born in 1770.", "Bach was born in 1685.", "Handel was born in 1685.", "Scarlatti was born in 1685.", "Brahms was born in 1833." ]) end def test_str2 names = @str.scanf("%s%d") { |name, year| name.upcase } assert_equal(names, ["BEETHOVEN", "BACH", "HANDEL", "SCARLATTI", "BRAHMS"]) end def test_str3 assert_equal("".scanf("%d%f%s") {}, []) end def test_str4 assert_equal("abc".scanf("%d%f%s") {}, []) end def test_str5 assert_equal("abc".scanf("") {}, []) end def test_io1 fn = "#{Dir.tmpdir}/iotest.dat.#{$$}" File.open(fn, "w") { |fh| fh.puts(@str) } fh = File.open(fn, "rb") res = fh.scanf("%s%d") { |name, year| "#{name} was born in #{year}." } assert_equal( [ "Beethoven was born in 1770.", "Bach was born in 1685.", "Handel was born in 1685.", "Scarlatti was born in 1685.", "Brahms was born in 1833." ],res) fh.close ensure File.delete(fn) end def test_io2 fn = "#{Dir.tmpdir}/iotest.dat.#{$$}" File.open(fn, "w").close fh = File.open(fn,"rb") assert_equal(fh.scanf("") {}, []) fh.seek(0) assert_equal(fh.scanf("%d%f%s") {}, []) fh.close ensure File.delete(fn) end end
{ "pile_set_name": "Github" }
(include 'class/fixeds/class.inc) (def-class 'path 'fixeds) (dec-method :vtable 'gui/path/vtable) (dec-method :create 'gui/path/create) (dec-method :vcreate 'gui/path/create :final) (dec-method :filter_polyline 'gui/path/filter_polyline :static '(r0 r1 r2) '(r0)) (dec-method :filter_polygon 'gui/path/filter_polygon :static '(r0 r1 r2) '(r0)) (dec-method :transform 'gui/path/transform :static '(r0 r1 r2 r3 r4 r5 r6 r7) '(r0)) (dec-method :simplify 'gui/path/simplify :static '(r0 r1 r2 r3) '(r0)) (dec-method :gen_clerp 'gui/path/gen_clerp :static '(r0 r1 r2 r3 r4 r5 r6 r7 r8 r9) '(r0)) (dec-method :gen_arc 'gui/path/gen_arc :static '(r0 r1 r2 r3 r4 r5 r6 r7) '(r0)) (dec-method :gen_quadratic 'gui/path/gen_quadratic :static '(r0 r1 r2 r3 r4 r5 r6 r7 r8) '(r0)) (dec-method :gen_cubic 'gui/path/gen_cubic :static '(r0 r1 r2 r3 r4 r5 r6 r7 r8 r9 r10) '(r0)) (dec-method :stroke_joints 'gui/path/stroke_joints :static '(r0 r1 r2 r3 r4 r5 r6 r7 r8 r9 r10) '(r0)) (dec-method :stroke_polylines 'gui/path/stroke_polylines :static '(r0 r1 r2 r3 r4 r5 r6 r7) '(r0)) (dec-method :stroke_polygons 'gui/path/stroke_polygons :static '(r0 r1 r2 r3 r4 r5) '(r0)) (dec-method :lisp_transform 'gui/path/lisp_transform :static '(r0 r1) '(r0 r1)) (dec-method :lisp_simplify 'gui/path/lisp_simplify :static '(r0 r1) '(r0 r1)) (dec-method :lisp_filter 'gui/path/lisp_filter :static '(r0 r1) '(r0 r1)) (dec-method :lisp_gen_quadratic 'gui/path/lisp_gen_quadratic :static '(r0 r1) '(r0 r1)) (dec-method :lisp_gen_cubic 'gui/path/lisp_gen_cubic :static '(r0 r1) '(r0 r1)) (dec-method :lisp_gen_arc 'gui/path/lisp_gen_arc :static '(r0 r1) '(r0 r1)) (dec-method :lisp_stroke_polylines 'gui/path/lisp_stroke_polylines :static '(r0 r1) '(r0 r1)) (dec-method :lisp_stroke_polygons 'gui/path/lisp_stroke_polygons :static '(r0 r1) '(r0 r1)) (def-struct 'path 'fixeds) (def-struct-end) (def-enum 'join) (enum 'miter 'bevel 'round) (def-enum-end) (def-enum 'cap) (enum 'butt 'square 'tri 'arrow 'round) (def-enum-end)
{ "pile_set_name": "Github" }
#!/usr/bin/env python2 ## -*- coding: utf-8 -*- import sys def sx(bits, value): sign_bit = 1 << (bits - 1) return (value & (sign_bit - 1)) - (value & sign_bit) SymVar_0 = int(sys.argv[1]) ref_680 = SymVar_0 ref_691 = ref_680 # MOV operation ref_703 = ref_691 # MOV operation ref_705 = ref_703 # MOV operation ref_739 = ((ref_705 >> 56) & 0xFF) # Byte reference - MOV operation ref_740 = ((ref_705 >> 48) & 0xFF) # Byte reference - MOV operation ref_741 = ((ref_705 >> 40) & 0xFF) # Byte reference - MOV operation ref_742 = ((ref_705 >> 32) & 0xFF) # Byte reference - MOV operation ref_743 = ((ref_705 >> 24) & 0xFF) # Byte reference - MOV operation ref_744 = ((ref_705 >> 16) & 0xFF) # Byte reference - MOV operation ref_745 = ((ref_705 >> 8) & 0xFF) # Byte reference - MOV operation ref_746 = (ref_705 & 0xFF) # Byte reference - MOV operation ref_6429 = ((((ref_743) << 8 | ref_744) << 8 | ref_745) << 8 | ref_746) # MOV operation ref_6437 = (ref_6429 & 0xFFFFFFFF) # MOV operation ref_6461 = (ref_6437 & 0xFFFFFFFF) # MOV operation ref_6475 = (ref_6461 & 0xFFFFFFFF) # MOV operation ref_6612 = ((((ref_739) << 8 | ref_740) << 8 | ref_741) << 8 | ref_742) # MOV operation ref_6620 = (ref_6612 & 0xFFFFFFFF) # MOV operation ref_6644 = (ref_6620 & 0xFFFFFFFF) # MOV operation ref_6658 = (ref_6644 & 0xFFFFFFFF) # MOV operation ref_6660 = ref_6475 # MOV operation ref_6662 = ((0x0 + ((0x0 + ((ref_6660 * 0x8) & 0xFFFFFFFFFFFFFFFF)) & 0xFFFFFFFFFFFFFFFF)) & 0xFFFFFFFFFFFFFFFF) # LEA operation ref_6666 = ((0x8 + ref_6662) & 0xFFFFFFFFFFFFFFFF) # ADD operation ref_6674 = ref_6658 # MOV operation ref_6676 = ref_6666 # MOV operation ref_6730 = ref_6676 # MOV operation ref_6742 = ref_6674 # MOV operation ref_6754 = ref_6730 # MOV operation ref_6756 = ref_6742 # MOV operation ref_6758 = ref_6754 # MOV operation ref_6760 = ref_6756 # MOV operation ref_6786 = ref_6758 # MOV operation ref_6788 = ref_6760 # MOV operation ref_6790 = ref_6788 # MOV operation ref_6824 = ref_6786 # MOV operation ref_6826 = ref_6790 # MOV operation ref_6828 = (ref_6826 ^ ref_6824) # XOR operation ref_6835 = (((sx(0x40, ref_6828) * sx(0x40, ((((((((0x9D) << 8 | 0xDF) << 8 | 0xEA) << 8 | 0x8) << 8 | 0xEB) << 8 | 0x38) << 8 | 0x2D) << 8 | 0x69))) & 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF) & 0xFFFFFFFFFFFFFFFF) # IMUL operation ref_6849 = ref_6835 # MOV operation ref_6851 = (ref_6849 >> (0x2F & 0x3F)) # SHR operation ref_6873 = ref_6790 # MOV operation ref_6875 = (ref_6873 ^ (ref_6835 ^ ref_6851)) # XOR operation ref_6882 = (((sx(0x40, ref_6875) * sx(0x40, ((((((((0x9D) << 8 | 0xDF) << 8 | 0xEA) << 8 | 0x8) << 8 | 0xEB) << 8 | 0x38) << 8 | 0x2D) << 8 | 0x69))) & 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF) & 0xFFFFFFFFFFFFFFFF) # IMUL operation ref_6896 = ref_6882 # MOV operation ref_6898 = (ref_6896 >> (0x2F & 0x3F)) # SHR operation ref_6920 = (ref_6882 ^ ref_6898) # MOV operation ref_6922 = (((sx(0x40, ref_6920) * sx(0x40, ((((((((0x9D) << 8 | 0xDF) << 8 | 0xEA) << 8 | 0x8) << 8 | 0xEB) << 8 | 0x38) << 8 | 0x2D) << 8 | 0x69))) & 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF) & 0xFFFFFFFFFFFFFFFF) # IMUL operation ref_6936 = ref_6922 # MOV operation ref_6953 = ref_6936 # MOV operation ref_6971 = ref_6953 # MOV operation ref_6990 = ref_6971 # MOV operation ref_7717 = ref_6990 # MOV operation ref_7923 = ref_7717 # MOV operation ref_8600 = ref_7923 # MOV operation ref_8792 = ref_8600 # MOV operation ref_8836 = ref_8792 # MOV operation ref_8864 = ref_8836 # MOV operation ref_8876 = ref_8864 # MOV operation ref_8878 = ref_8876 # MOV operation print ref_8878 & 0xffffffffffffffff
{ "pile_set_name": "Github" }
// show that no match events happen while paused. var tap = require("tap") , child_process = require("child_process") // just some gnarly pattern with lots of matches , pattern = "test/a/!(symlink)/**" , bashResults = require("./bash-results.json") , patterns = Object.keys(bashResults) , glob = require("../") , Glob = glob.Glob , path = require("path") // run from the root of the project // this is usually where you're at anyway, but be sure. process.chdir(path.resolve(__dirname, "..")) function alphasort (a, b) { a = a.toLowerCase() b = b.toLowerCase() return a > b ? 1 : a < b ? -1 : 0 } function cleanResults (m) { // normalize discrepancies in ordering, duplication, // and ending slashes. return m.map(function (m) { return m.replace(/\/+/g, "/").replace(/\/$/, "") }).sort(alphasort).reduce(function (set, f) { if (f !== set[set.length - 1]) set.push(f) return set }, []).sort(alphasort).map(function (f) { // de-windows return (process.platform !== 'win32') ? f : f.replace(/^[a-zA-Z]:\\\\/, '/').replace(/\\/g, '/') }) } var globResults = [] tap.test("use a Glob object, and pause/resume it", function (t) { var g = new Glob(pattern) , paused = false , res = [] , expect = bashResults[pattern] g.on("pause", function () { console.error("pause") }) g.on("resume", function () { console.error("resume") }) g.on("match", function (m) { t.notOk(g.paused, "must not be paused") globResults.push(m) g.pause() t.ok(g.paused, "must be paused") setTimeout(g.resume.bind(g), 10) }) g.on("end", function (matches) { t.pass("reached glob end") globResults = cleanResults(globResults) matches = cleanResults(matches) t.deepEqual(matches, globResults, "end event matches should be the same as match events") t.deepEqual(matches, expect, "glob matches should be the same as bash results") t.end() }) })
{ "pile_set_name": "Github" }
/*************************************************************************** * * * This file was automatically generated using idlc.js * * PLEASE DO NOT EDIT!!!! * * * ***************************************************************************/ #ifndef _BufferedStream_base_H_ #define _BufferedStream_base_H_ /** @author Leo Hoo <lion@9465.net> */ #include "../object.h" #include "ifs/Stream.h" namespace fibjs { class Stream_base; class BufferedStream_base : public Stream_base { DECLARE_CLASS(BufferedStream_base); public: // BufferedStream_base static result_t _new(Stream_base* stm, obj_ptr<BufferedStream_base>& retVal, v8::Local<v8::Object> This = v8::Local<v8::Object>()); virtual result_t readText(int32_t size, exlib::string& retVal, AsyncEvent* ac) = 0; virtual result_t readLine(int32_t maxlen, exlib::string& retVal, AsyncEvent* ac) = 0; virtual result_t readLines(int32_t maxlines, v8::Local<v8::Array>& retVal) = 0; virtual result_t readUntil(exlib::string mk, int32_t maxlen, exlib::string& retVal, AsyncEvent* ac) = 0; virtual result_t writeText(exlib::string txt, AsyncEvent* ac) = 0; virtual result_t writeLine(exlib::string txt, AsyncEvent* ac) = 0; virtual result_t get_stream(obj_ptr<Stream_base>& retVal) = 0; virtual result_t get_charset(exlib::string& retVal) = 0; virtual result_t set_charset(exlib::string newVal) = 0; virtual result_t get_EOL(exlib::string& retVal) = 0; virtual result_t set_EOL(exlib::string newVal) = 0; public: template <typename T> static void __new(const T& args); public: static void s__new(const v8::FunctionCallbackInfo<v8::Value>& args); static void s_readText(const v8::FunctionCallbackInfo<v8::Value>& args); static void s_readLine(const v8::FunctionCallbackInfo<v8::Value>& args); static void s_readLines(const v8::FunctionCallbackInfo<v8::Value>& args); static void s_readUntil(const v8::FunctionCallbackInfo<v8::Value>& args); static void s_writeText(const v8::FunctionCallbackInfo<v8::Value>& args); static void s_writeLine(const v8::FunctionCallbackInfo<v8::Value>& args); static void s_get_stream(v8::Local<v8::Name> property, const v8::PropertyCallbackInfo<v8::Value>& args); static void s_get_charset(v8::Local<v8::Name> property, const v8::PropertyCallbackInfo<v8::Value>& args); static void s_set_charset(v8::Local<v8::Name> property, v8::Local<v8::Value> value, const v8::PropertyCallbackInfo<void>& args); static void s_get_EOL(v8::Local<v8::Name> property, const v8::PropertyCallbackInfo<v8::Value>& args); static void s_set_EOL(v8::Local<v8::Name> property, v8::Local<v8::Value> value, const v8::PropertyCallbackInfo<void>& args); public: ASYNC_MEMBERVALUE2(BufferedStream_base, readText, int32_t, exlib::string); ASYNC_MEMBERVALUE2(BufferedStream_base, readLine, int32_t, exlib::string); ASYNC_MEMBERVALUE3(BufferedStream_base, readUntil, exlib::string, int32_t, exlib::string); ASYNC_MEMBER1(BufferedStream_base, writeText, exlib::string); ASYNC_MEMBER1(BufferedStream_base, writeLine, exlib::string); }; } namespace fibjs { inline ClassInfo& BufferedStream_base::class_info() { static ClassData::ClassMethod s_method[] = { { "readText", s_readText, false }, { "readTextSync", s_readText, false }, { "readLine", s_readLine, false }, { "readLineSync", s_readLine, false }, { "readLines", s_readLines, false }, { "readUntil", s_readUntil, false }, { "readUntilSync", s_readUntil, false }, { "writeText", s_writeText, false }, { "writeTextSync", s_writeText, false }, { "writeLine", s_writeLine, false }, { "writeLineSync", s_writeLine, false } }; static ClassData::ClassProperty s_property[] = { { "stream", s_get_stream, block_set, false }, { "charset", s_get_charset, s_set_charset, false }, { "EOL", s_get_EOL, s_set_EOL, false } }; static ClassData s_cd = { "BufferedStream", false, s__new, NULL, ARRAYSIZE(s_method), s_method, 0, NULL, ARRAYSIZE(s_property), s_property, 0, NULL, NULL, NULL, &Stream_base::class_info() }; static ClassInfo s_ci(s_cd); return s_ci; } inline void BufferedStream_base::s__new(const v8::FunctionCallbackInfo<v8::Value>& args) { CONSTRUCT_INIT(); __new(args); } template <typename T> void BufferedStream_base::__new(const T& args) { obj_ptr<BufferedStream_base> vr; METHOD_NAME("new BufferedStream()"); CONSTRUCT_ENTER(); METHOD_OVER(1, 1); ARG(obj_ptr<Stream_base>, 0); hr = _new(v0, vr, args.This()); CONSTRUCT_RETURN(); } inline void BufferedStream_base::s_readText(const v8::FunctionCallbackInfo<v8::Value>& args) { exlib::string vr; METHOD_NAME("BufferedStream.readText"); METHOD_INSTANCE(BufferedStream_base); METHOD_ENTER(); ASYNC_METHOD_OVER(1, 1); ARG(int32_t, 0); if (!cb.IsEmpty()) { pInst->acb_readText(v0, cb); hr = CALL_RETURN_NULL; } else hr = pInst->ac_readText(v0, vr); METHOD_RETURN(); } inline void BufferedStream_base::s_readLine(const v8::FunctionCallbackInfo<v8::Value>& args) { exlib::string vr; METHOD_NAME("BufferedStream.readLine"); METHOD_INSTANCE(BufferedStream_base); METHOD_ENTER(); ASYNC_METHOD_OVER(1, 0); OPT_ARG(int32_t, 0, -1); if (!cb.IsEmpty()) { pInst->acb_readLine(v0, cb); hr = CALL_RETURN_NULL; } else hr = pInst->ac_readLine(v0, vr); METHOD_RETURN(); } inline void BufferedStream_base::s_readLines(const v8::FunctionCallbackInfo<v8::Value>& args) { v8::Local<v8::Array> vr; METHOD_NAME("BufferedStream.readLines"); METHOD_INSTANCE(BufferedStream_base); METHOD_ENTER(); METHOD_OVER(1, 0); OPT_ARG(int32_t, 0, -1); hr = pInst->readLines(v0, vr); METHOD_RETURN(); } inline void BufferedStream_base::s_readUntil(const v8::FunctionCallbackInfo<v8::Value>& args) { exlib::string vr; METHOD_NAME("BufferedStream.readUntil"); METHOD_INSTANCE(BufferedStream_base); METHOD_ENTER(); ASYNC_METHOD_OVER(2, 1); ARG(exlib::string, 0); OPT_ARG(int32_t, 1, -1); if (!cb.IsEmpty()) { pInst->acb_readUntil(v0, v1, cb); hr = CALL_RETURN_NULL; } else hr = pInst->ac_readUntil(v0, v1, vr); METHOD_RETURN(); } inline void BufferedStream_base::s_writeText(const v8::FunctionCallbackInfo<v8::Value>& args) { METHOD_NAME("BufferedStream.writeText"); METHOD_INSTANCE(BufferedStream_base); METHOD_ENTER(); ASYNC_METHOD_OVER(1, 1); ARG(exlib::string, 0); if (!cb.IsEmpty()) { pInst->acb_writeText(v0, cb); hr = CALL_RETURN_NULL; } else hr = pInst->ac_writeText(v0); METHOD_VOID(); } inline void BufferedStream_base::s_writeLine(const v8::FunctionCallbackInfo<v8::Value>& args) { METHOD_NAME("BufferedStream.writeLine"); METHOD_INSTANCE(BufferedStream_base); METHOD_ENTER(); ASYNC_METHOD_OVER(1, 1); ARG(exlib::string, 0); if (!cb.IsEmpty()) { pInst->acb_writeLine(v0, cb); hr = CALL_RETURN_NULL; } else hr = pInst->ac_writeLine(v0); METHOD_VOID(); } inline void BufferedStream_base::s_get_stream(v8::Local<v8::Name> property, const v8::PropertyCallbackInfo<v8::Value>& args) { obj_ptr<Stream_base> vr; METHOD_NAME("BufferedStream.stream"); METHOD_INSTANCE(BufferedStream_base); PROPERTY_ENTER(); hr = pInst->get_stream(vr); METHOD_RETURN(); } inline void BufferedStream_base::s_get_charset(v8::Local<v8::Name> property, const v8::PropertyCallbackInfo<v8::Value>& args) { exlib::string vr; METHOD_NAME("BufferedStream.charset"); METHOD_INSTANCE(BufferedStream_base); PROPERTY_ENTER(); hr = pInst->get_charset(vr); METHOD_RETURN(); } inline void BufferedStream_base::s_set_charset(v8::Local<v8::Name> property, v8::Local<v8::Value> value, const v8::PropertyCallbackInfo<void>& args) { METHOD_NAME("BufferedStream.charset"); METHOD_INSTANCE(BufferedStream_base); PROPERTY_ENTER(); PROPERTY_VAL(exlib::string); hr = pInst->set_charset(v0); PROPERTY_SET_LEAVE(); } inline void BufferedStream_base::s_get_EOL(v8::Local<v8::Name> property, const v8::PropertyCallbackInfo<v8::Value>& args) { exlib::string vr; METHOD_NAME("BufferedStream.EOL"); METHOD_INSTANCE(BufferedStream_base); PROPERTY_ENTER(); hr = pInst->get_EOL(vr); METHOD_RETURN(); } inline void BufferedStream_base::s_set_EOL(v8::Local<v8::Name> property, v8::Local<v8::Value> value, const v8::PropertyCallbackInfo<void>& args) { METHOD_NAME("BufferedStream.EOL"); METHOD_INSTANCE(BufferedStream_base); PROPERTY_ENTER(); PROPERTY_VAL(exlib::string); hr = pInst->set_EOL(v0); PROPERTY_SET_LEAVE(); } } #endif
{ "pile_set_name": "Github" }
<?php declare(strict_types=1); echo 'about.index.get';
{ "pile_set_name": "Github" }
// GENERATE BY ./scripts/generate.ts // DON NOT EDIT IT MANUALLY import * as React from 'react'; import SettingOutlinedSvg from "@ant-design/icons-svg/es/asn/SettingOutlined"; import AntdIcon from '../components/AntdIcon'; var SettingOutlined = function SettingOutlined(props, ref) { return React.createElement(AntdIcon, Object.assign({}, props, { ref: ref, icon: SettingOutlinedSvg })); }; SettingOutlined.displayName = 'SettingOutlined'; export default React.forwardRef(SettingOutlined);
{ "pile_set_name": "Github" }
<?php /** * Session API: WP_Session_Tokens class * * @package WordPress * @subpackage Session * @since 4.7.0 */ /** * Abstract class for managing user session tokens. * * @since 4.0.0 */ abstract class WP_Session_Tokens { /** * User ID. * * @since 4.0.0 * @var int User ID. */ protected $user_id; /** * Protected constructor. Use the `get_instance()` method to get the instance. * * @since 4.0.0 * * @param int $user_id User whose session to manage. */ protected function __construct( $user_id ) { $this->user_id = $user_id; } /** * Retrieves a session manager instance for a user. * * This method contains a {@see 'session_token_manager'} filter, allowing a plugin to swap out * the session manager for a subclass of `WP_Session_Tokens`. * * @since 4.0.0 * * @param int $user_id User whose session to manage. * @return WP_Session_Tokens The session object, which is by default an instance of * the `WP_User_Meta_Session_Tokens` class. */ final public static function get_instance( $user_id ) { /** * Filters the class name for the session token manager. * * @since 4.0.0 * * @param string $session Name of class to use as the manager. * Default 'WP_User_Meta_Session_Tokens'. */ $manager = apply_filters( 'session_token_manager', 'WP_User_Meta_Session_Tokens' ); return new $manager( $user_id ); } /** * Hashes the given session token for storage. * * @since 4.0.0 * * @param string $token Session token to hash. * @return string A hash of the session token (a verifier). */ final private function hash_token( $token ) { // If ext/hash is not present, use sha1() instead. if ( function_exists( 'hash' ) ) { return hash( 'sha256', $token ); } else { return sha1( $token ); } } /** * Retrieves a user's session for the given token. * * @since 4.0.0 * * @param string $token Session token. * @return array|null The session, or null if it does not exist. */ final public function get( $token ) { $verifier = $this->hash_token( $token ); return $this->get_session( $verifier ); } /** * Validates the given session token for authenticity and validity. * * Checks that the given token is present and hasn't expired. * * @since 4.0.0 * * @param string $token Token to verify. * @return bool Whether the token is valid for the user. */ final public function verify( $token ) { $verifier = $this->hash_token( $token ); return (bool) $this->get_session( $verifier ); } /** * Generates a session token and attaches session information to it. * * A session token is a long, random string. It is used in a cookie * to link that cookie to an expiration time and to ensure the cookie * becomes invalidated when the user logs out. * * This function generates a token and stores it with the associated * expiration time (and potentially other session information via the * {@see 'attach_session_information'} filter). * * @since 4.0.0 * * @param int $expiration Session expiration timestamp. * @return string Session token. */ final public function create( $expiration ) { /** * Filters the information attached to the newly created session. * * Can be used to attach further information to a session. * * @since 4.0.0 * * @param array $session Array of extra data. * @param int $user_id User ID. */ $session = apply_filters( 'attach_session_information', array(), $this->user_id ); $session['expiration'] = $expiration; // IP address. if ( ! empty( $_SERVER['REMOTE_ADDR'] ) ) { $session['ip'] = $_SERVER['REMOTE_ADDR']; } // User-agent. if ( ! empty( $_SERVER['HTTP_USER_AGENT'] ) ) { $session['ua'] = wp_unslash( $_SERVER['HTTP_USER_AGENT'] ); } // Timestamp. $session['login'] = time(); $token = wp_generate_password( 43, false, false ); $this->update( $token, $session ); return $token; } /** * Updates the data for the session with the given token. * * @since 4.0.0 * * @param string $token Session token to update. * @param array $session Session information. */ final public function update( $token, $session ) { $verifier = $this->hash_token( $token ); $this->update_session( $verifier, $session ); } /** * Destroys the session with the given token. * * @since 4.0.0 * * @param string $token Session token to destroy. */ final public function destroy( $token ) { $verifier = $this->hash_token( $token ); $this->update_session( $verifier, null ); } /** * Destroys all sessions for this user except the one with the given token (presumably the one in use). * * @since 4.0.0 * * @param string $token_to_keep Session token to keep. */ final public function destroy_others( $token_to_keep ) { $verifier = $this->hash_token( $token_to_keep ); $session = $this->get_session( $verifier ); if ( $session ) { $this->destroy_other_sessions( $verifier ); } else { $this->destroy_all_sessions(); } } /** * Determines whether a session is still valid, based on its expiration timestamp. * * @since 4.0.0 * * @param array $session Session to check. * @return bool Whether session is valid. */ final protected function is_still_valid( $session ) { return $session['expiration'] >= time(); } /** * Destroys all sessions for a user. * * @since 4.0.0 */ final public function destroy_all() { $this->destroy_all_sessions(); } /** * Destroys all sessions for all users. * * @since 4.0.0 */ final public static function destroy_all_for_all_users() { /** This filter is documented in wp-includes/class-wp-session-tokens.php */ $manager = apply_filters( 'session_token_manager', 'WP_User_Meta_Session_Tokens' ); call_user_func( array( $manager, 'drop_sessions' ) ); } /** * Retrieves all sessions for a user. * * @since 4.0.0 * * @return array Sessions for a user. */ final public function get_all() { return array_values( $this->get_sessions() ); } /** * Retrieves all sessions of the user. * * @since 4.0.0 * * @return array Sessions of the user. */ abstract protected function get_sessions(); /** * Retrieves a session based on its verifier (token hash). * * @since 4.0.0 * * @param string $verifier Verifier for the session to retrieve. * @return array|null The session, or null if it does not exist. */ abstract protected function get_session( $verifier ); /** * Updates a session based on its verifier (token hash). * * Omitting the second argument destroys the session. * * @since 4.0.0 * * @param string $verifier Verifier for the session to update. * @param array $session Optional. Session. Omitting this argument destroys the session. */ abstract protected function update_session( $verifier, $session = null ); /** * Destroys all sessions for this user, except the single session with the given verifier. * * @since 4.0.0 * * @param string $verifier Verifier of the session to keep. */ abstract protected function destroy_other_sessions( $verifier ); /** * Destroys all sessions for the user. * * @since 4.0.0 */ abstract protected function destroy_all_sessions(); /** * Destroys all sessions for all users. * * @since 4.0.0 */ public static function drop_sessions() {} }
{ "pile_set_name": "Github" }
# This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild from pkg_resources import parse_version import kaitaistruct from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO import collections if parse_version(kaitaistruct.__version__) < parse_version('0.9'): raise Exception("Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" % (kaitaistruct.__version__)) import pcx class PcxDcx(KaitaiStruct): """DCX is a simple extension of PCX image format allowing to bundle many PCX images (typically, pages of a document) in one file. It saw some limited use in DOS-era fax software, but was largely superseeded with multi-page TIFFs and PDFs since then. """ SEQ_FIELDS = ["magic", "files"] def __init__(self, _io, _parent=None, _root=None): self._io = _io self._parent = _parent self._root = _root if _root else self self._debug = collections.defaultdict(dict) def _read(self): self._debug['magic']['start'] = self._io.pos() self.magic = self._io.read_bytes(4) self._debug['magic']['end'] = self._io.pos() if not self.magic == b"\xB1\x68\xDE\x3A": raise kaitaistruct.ValidationNotEqualError(b"\xB1\x68\xDE\x3A", self.magic, self._io, u"/seq/0") self._debug['files']['start'] = self._io.pos() self.files = [] i = 0 while True: if not 'arr' in self._debug['files']: self._debug['files']['arr'] = [] self._debug['files']['arr'].append({'start': self._io.pos()}) _t_files = self._root.PcxOffset(self._io, self, self._root) _t_files._read() _ = _t_files self.files.append(_) self._debug['files']['arr'][len(self.files) - 1]['end'] = self._io.pos() if _.ofs_body == 0: break i += 1 self._debug['files']['end'] = self._io.pos() class PcxOffset(KaitaiStruct): SEQ_FIELDS = ["ofs_body"] def __init__(self, _io, _parent=None, _root=None): self._io = _io self._parent = _parent self._root = _root if _root else self self._debug = collections.defaultdict(dict) def _read(self): self._debug['ofs_body']['start'] = self._io.pos() self.ofs_body = self._io.read_u4le() self._debug['ofs_body']['end'] = self._io.pos() @property def body(self): if hasattr(self, '_m_body'): return self._m_body if hasattr(self, '_m_body') else None if self.ofs_body != 0: _pos = self._io.pos() self._io.seek(self.ofs_body) self._debug['_m_body']['start'] = self._io.pos() self._m_body = pcx.Pcx(self._io) self._m_body._read() self._debug['_m_body']['end'] = self._io.pos() self._io.seek(_pos) return self._m_body if hasattr(self, '_m_body') else None
{ "pile_set_name": "Github" }
<!DOCTYPE html> <html lang="en"> <head> <link rel="stylesheet" href="../../../../Styles/jqx.apireference.css" type="text/css" /> <script type="text/javascript" src="../../../../scripts/jquery-1.11.1.min.js"></script> <script type="text/javascript" src="../../../../scripts/documentation.js"></script> <meta name="keywords" content="jQuery, jQuery Validation, Validator, Vlidation Widget, jqxValidator" /> <meta name="description" content="This page represents the help documentation of the jqxValidator widget." /> <title>jqxValidator API Reference</title> <script type="text/javascript"> $(document).ready(function () { $(".documentation-option-type-click").click(function (event) { $(event.target).parents('tr').next().find(".property-content").toggle(); }); }); </script> </head> <body> <div id="properties"> <h2 class="documentation-top-header">Properties</h2> <table class="documentation-table"> <tr> <th>Name </th> <th>Type </th> <th>Default </th> </tr> <tr> <td class="documentation-option-type-click"> <span id='Span6'>arrow</span> </td> <td> <span>Boolean</span> </td> <td>true </td> </tr> <tr> <td colspan='3' style='width: 100%'> <div class="documentation-option-description property-content" style="display: none;"> <p> Sets or gets whether the arrow of the hints will be shown. </p> <h4>Code examples</h4> <p> Set the <code>arrow</code> property. </p> <pre><code>$('#jqxValidator').jqxValidator( { arrow: false } ); </code></pre> <p> Get the <code>arrow</code> property. </p> <pre><code>var arrow = $('#jqxValidator').jqxValidator('arrow'); </code></pre> <div style="padding-bottom: 5px;"> <em>Try it:</em> <a target="_blank" href="http://jsfiddle.net/jqwidgets/qeXG7/">arrow is set to false</a> </div> </div> </td> </tr> <tr> <td class="documentation-option-type-click"> <span id='Span7'>animation</span> </td> <td> <span>String</span> </td> <td>'fade' </td> </tr> <tr> <td colspan='3' style='width: 100%'> <div class="documentation-option-description property-content" style="display: none;"> <p> Sets or gets the animation of showing, hiding the hints. </p> <br /> <b>Possible Values:</b> <br /> <pre><code>'fade'</code></pre> <pre><code>'none'</code></pre> <h4>Code examples</h4> <p> Initialize a jqxValidator with the <code>animation</code> property specified. </p> <pre><code>$('#jqxValidator').jqxValidator({ animation: 'none' }); </code></pre> <p> Get the <code>animation</code> property. </p> <pre><code>var animation = $('#jqxValidator').jqxValidator('animation'); </code></pre> <div style="padding-bottom: 5px;"> <em>Try it:</em> <a target="_blank" href="http://jsfiddle.net/jqwidgets/zUafx/">animation is set to 'none'</a> </div> </div> </td> </tr> <tr> <td class="documentation-option-type-click"> <span id='Span8'>animationDuration</span> </td> <td> <span>Number</span> </td> <td>150 </td> </tr> <tr> <td colspan='3' style='width: 100%'> <div class="documentation-option-description property-content" style="display: none;"> <p> Sets or gets the duration of the animation used for showing/hiding the hints. </p> <h4>Code examples</h4> <p> Set the <code>animationDuration</code> property. </p> <pre><code>$('#jqxValidator').jqxValidator({ animationDuration: 300 }); </code></pre> <p> Get the <code>animationDuration</code> property. </p> <pre><code>var animationDuration = $('#jqxValidator').jqxValidator('animationDuration'); </code></pre> <div style="padding-bottom: 5px;"> <em>Try it:</em> <a target="_blank" href="http://jsfiddle.net/jqwidgets/WWbn6/">animationDuration is set to 3000</a> </div> </div> </td> </tr> <tr> <td class="documentation-option-type-click"> <span id='Span9'>closeOnClick</span> </td> <td> <span>Boolean</span> </td> <td>true </td> </tr> <tr> <td colspan='3' style='width: 100%'> <div class="documentation-option-description property-content" style="display: none;"> <p> Sets or gets whether the hints will be closed when the user click on them. </p> <h4>Code examples</h4> <p> Set the <code>closeOnClick</code> property. </p> <pre><code>$('#jqxValidator').jqxValidator({ closeOnClick: false }); </code></pre> <p> Get the <code>closeOnClick</code> property. </p> <pre><code>var closeOnClick = $('#jqxValidator').jqxValidator('closeOnClick'); </code></pre> <div style="padding-bottom: 5px;"> <em>Try it:</em> <a target="_blank" href="http://jsfiddle.net/jqwidgets/n4cRJ/">closeOnClick is set to false</a> </div> </div> </td> </tr> <tr> <td class="documentation-option-type-click"> <span id='Span2'>focus</span> </td> <td> <span>Boolean</span> </td> <td>true </td> </tr> <tr> <td colspan='3' style='width: 100%'> <div class="documentation-option-description property-content" style="display: none;"> <p> Sets or gets whether the jqxValidator will focus the first invalid input. </p> <h4>Code examples</h4> <p> Set the <code>focus</code> property. </p> <pre><code>$('#jqxValidator').jqxValidator( { focus: false } ); </code></pre> <p> Get the <code>focus</code> property. </p> <pre><code>var focus = $('#jqxValidator').jqxValidator('focus'); </code></pre> <div style="padding-bottom: 5px;"> <em>Try it:</em> <a target="_blank" href="http://jsfiddle.net/jqwidgets/hnVmz/">focus is set to false</a> </div> </div> </td> </tr> <tr> <td class="documentation-option-type-click"> <span id='Span12'>hintType</span> </td> <td> <span>String</span> </td> <td>"tooltip" </td> </tr> <tr> <td colspan='3' style='width: 100%'> <div class="documentation-option-description property-content" style="display: none;"> <p>Sets or gets the hint type. Possible values: 'tooltip' and 'label'.</p> <h4>Code example</h4> <p> Set the <code>hintType</code> property. </p> <pre><code>$('#jqxValidator').jqxValidator({hintType : 'label'}); </code></pre> <p> Get the <code>hintType</code> property. </p> <pre><code>var hintType = $('#jqxValidator').jqxValidator('hintType'); </code></pre> <div style="padding-bottom: 5px;"> <em>Try it:</em> <a target="_blank" href="http://jsfiddle.net/jqwidgets/FrpxJ/">hintType is set to 'label'</a> </div> </div> </td> </tr> <tr> <td class="documentation-option-type-click"> <span id='Span10'>onError</span> </td> <td> <span>Function</span> </td> <td>null </td> </tr> <tr> <td colspan='3' style='width: 100%'> <div class="documentation-option-description property-content" style="display: none;"> <p> Sets or gets callback which will be called on validation error. </p> <h4>Code examples</h4> <p> Set the <code>onError</code> property . </p> <pre><code>$('#jqxValidator').jqxValidator({ onError: function () { alert('You havent filled the form correctly!'); } }); </code></pre> <p> Get the <code>onError</code> property. </p> <pre><code>var onError = $('#jqxValidator').jqxValidator('onError'); </code></pre> <div style="padding-bottom: 5px;"> <em>Try it:</em> <a target="_blank" href="http://jsfiddle.net/jqwidgets/mGqWD/">onError is set to a custom function</a> </div> </div> </td> </tr> <tr> <td class="documentation-option-type-click"> <span id='Span11'>onSuccess</span> </td> <td> <span>Function</span> </td> <td>null </td> </tr> <tr> <td colspan='3' style='width: 100%'> <div class="documentation-option-description property-content" style="display: none;"> <p> Sets or gets the callback which will be executed on success. </p> <h4>Code examples</h4> <p> Set the <code>onSuccess</code> property. </p> <pre><code>$('#jqxValidator').jqxValidator({ onSuccess: function () { alert('Success!'); } }); </code></pre> <p> Get the <code>onSuccess</code> property. </p> <pre><code>var onSuccess = $('#jqxValidator').jqxValidator('onSuccess'); </code></pre> <div style="padding-bottom: 5px;"> <em>Try it:</em> <a target="_blank" href="http://jsfiddle.net/jqwidgets/8CtQU/">onSuccess is set to a custom function</a> </div> </div> </td> </tr> <tr> <td class="documentation-option-type-click"> <span id='Span5'>position</span> </td> <td> <span>String</span> </td> <td>'right' </td> </tr> <tr> <td colspan='3' style='width: 100%'> <div class="documentation-option-description property-content" style="display: none;"> <p> Sets or gets the default position of the hints. </p> <h4>Code examples</h4> <p> Set the <code>position</code> property. </p> <pre><code>$('#jqxValidator').jqxValidator({ position: 'topcenter' }); </code></pre> <p> Get the <code>position</code> property. </p> <pre><code>var position = $('#jqxValidator').jqxValidator('position'); </code></pre> <div style="padding-bottom: 5px;"> <em>Try it:</em> <a target="_blank" href="http://jsfiddle.net/jqwidgets/d3TyU/">position is set to 'left'</a> </div> </div> </td> </tr> <tr> <td class="documentation-option-type-click"> <span id='property-name-disabled'>rules</span> </td> <td> <span>Array</span> </td> <td>[] </td> </tr> <tr> <td colspan='3' style='width: 100%'> <div class="documentation-option-description property-content" style="display: none;"> <p> Sets jqxValidator rules. Format of a single rule is as follows: <p> <pre><code> { input: 'selector-of-the-input', message: 'Custom message on error', action: 'Custom action (keyup, change...etc)', rule: 'Build rule (ssn, phone, email...) or custom function', position: 'Position of the hint (format pos:x,y)', hintRender: 'Function for hint rendering' } </code></pre> </p> Let's look at all different properties of a single rule. <br /> The input property must be selector of the input you want to validate (we recommend to use ids - example: '#userInput'). <br /> The message property is the custom message which will popup, on validation error, for the current rule. <br /> Action is a string which is the event on which you want to validate the input (for example click, mouseup, blur, keyup...). <br /> The rule property is defining the way you want to validate the input. <br /> <br /> In jqxValidator there are built in rules like: 'ssn', 'email', 'required', 'phone', 'zipCode', 'maxLength=len', 'minLength=len', 'length=max,min'. In the last three validation rules the strings after the "=" are the rule parameters, for example: 'maxLength=13'. You can also write a function for a custom rule.<br /> <br /> 'ssn' - 'Social Security Number' Requires input like: ___-__-____<br /> 'email' - requires valid e-mail address.<br /> 'required' - requires a CheckBox or Radio Button to be checked or any value to be entered in an Input.<br /> 'phone' - requires input like: (___)___-____<br /> 'zipCode' - requires a valid zip code like: ___-__-____<br /> 'maxLength=len' - restricts the maximum input characters to 'len'.<br /> 'minLength=len' - restricts the minimum input characters to 'len'<br /> 'length=min,max' - restricts the input length to a specific range.<br /> <br /> <br /> Hint positions are as follows: 'left', 'right', 'top', 'bottom', 'bottomcenter', 'topcenter', 'topleft', 'topright', 'bottomleft', 'bottomright'. If you wish to set also an offset you can pass the position like: 'topleft:15,3'. This is going to position your message popup in top-left of the input with offset: left - 15px, top - 3px.<br /> <br /> The last property is hintRender. This is function used for hint rendering. If you don't pass one the default is going to be used. Notice that the position and hintRender are optional. If you don't set them the default values are going to be used. </p> <h4>Code examples</h4> <p> Initialize a jqxValidator with the <code>rules</code> property specified. </p> <pre><code> $('#form').jqxValidator( { rules: [{ input: '#passwordInput', message: 'The password is required!', action: 'keyup', rule: 'required' }, { input: '#passwordInput', message: 'Your password must be between 4 and 12 characters!', action: 'keyup', rule: 'length=4,12' }] } );</code></pre> <br /> Custom Rule Definition. The function returns true or false depending on whether the input is correct or not. <pre><code> { input: '#birthInput', message: 'Your birth date must be between 1/1/1900 and 1/1/2012.', action: 'valuechanged', rule: function () { var date = $('#birthInput').jqxDateTimeInput('value'); var result = date.dateTime.getFullYear() >= 1900 && date.dateTime.getFullYear() <= 2012; return result; } </code></pre> <p> Set the hintRender property of a rule. </p> <pre><code> $('#sendButton').on('click', function () { $('#testForm').jqxValidator('validate'); }); var that = this; var render = function (message, input) { if (that._message) { that._message.remove(); } that._message = $("<pre style='width: 700px; max-width: 700px; margin: 10px;' class='code'><span style="clear: both; padding: 0px; margin: 0px; color: #11a;">&lt;span style=<span style=" clear: both; padding: 0px; margin: 0px; color: #a24;">'background: red; color: white;'</span>&gt;</span><span style="clear: both; padding: 0px; margin: 0px; color: #a24;">" + message + "</span><span style="clear: both; padding: 0px; margin: 0px; color: #11a;">&lt;/span&gt;</span><div/></pre>") that._message.appendTo($(document.body)); return that._message; } $('#testForm').jqxValidator({ rules: [ { input: '#userInput', message: 'Username is required!', action: 'keyup, blur', rule: 'required', hintRender: render }, { input: '#userInput', message: 'Your username must be between 3 and 12 characters!', action: 'keyup, blur', rule: 'length=3,12', hintRender: render } ]}); </code></pre> <div style="padding-bottom: 5px;"> <em>Try it:</em> <a target="_blank" href="http://jsfiddle.net/jqwidgets/GQAfe/">rules is set to a custom function</a> </div> </div> </td> </tr> <tr> <td class="documentation-option-type-click"> <span id='Span51'>rtl</span> </td> <td> <span>Boolean</span> </td> <td>false </td> </tr> <tr> <td colspan='3' style='width: 100%'> <div class="documentation-option-description property-content" style="display: none;"> <p>Sets or gets a value indicating whether the validation messages are displayed from the left side instead of the right.</p> <h4>Code example</h4> <p> Set the <code>rtl</code> property. </p> <pre><code>$('#jqxValidator').jqxValidator({rtl : true}); </code></pre> <p> Get the <code>rtl</code> property. </p> <pre><code>var rtl = $('#jqxValidator').jqxValidator('rtl'); </code></pre> <div style="padding-bottom: 5px;"> <em>Try it:</em> <a target="_blank" href="http://jsfiddle.net/jqwidgets/dvhSq/">rtl is set to true</a> </div> </div> </td> </tr> <tr> <td colspan='3' style='width: 100%'> <h2 class="documentation-top-header">Events</h2> </td> </tr> <tr> <td class="documentation-option-type-click"> <span id='Span27'>validationError</span> </td> <td> <span>Event</span> </td> <td></td> </tr> <tr> <td colspan='3' style='width: 100%'> <div class="documentation-option-description property-content" style="display: none;"> <p> This is triggered when the form is validated with some errors. </p> <h4>Code examples</h4> <p> Bind to the <code>validationError</code> event. </p> <pre><code>$('#jqxValidator').on('validationError', function (event) { // Some code here. }); </code></pre> <div style="padding-bottom: 5px;"> <em>Try it:</em> <a target="_blank" href="http://jsfiddle.net/jqwidgets/AUGwP/">Bind to the validationError event by type jqxValidator.</a> </div> </div> </td> </tr> <tr> <td class="documentation-option-type-click"> <span id='Span13'>validationSuccess</span> </td> <td> <span>Event</span> </td> <td></td> </tr> <tr> <td colspan='3' style='width: 100%'> <div class="documentation-option-description property-content" style="display: none;"> <p> This is triggered when the form is validated whithout any errors. </p> <h4>Code examples</h4> <p> Bind to the <code>validationSuccess</code> event. </p> <pre><code>$('#jqxValidator').on('validationSuccess', function (event) { // Some code here. }); </code></pre> <div style="padding-bottom: 5px;"> <em>Try it:</em> <a target="_blank" href="http://jsfiddle.net/jqwidgets/tkUmx/">Bind to the validationSuccess event by type jqxValidator.</a> </div> </div> </td> </tr> <tr> <td colspan='3' style='width: 100%'> <h2 class="documentation-top-header">Methods</h2> </td> </tr> <tr> <td class="documentation-option-type-click"> <span id='Span20'>hideHint</span> </td> <td> <span>Method</span> </td> <td></td> </tr> <tr> <td colspan='3' style='width: 100%'> <div class="documentation-option-description property-content" style="display: none;"> <p>Hide all hints for a specific input.</p> <div class="methodArgs"> <table class="arguments"> <tbody> <tr> <th>Parameter</th> <th>Type</th> <th>Description</th> </tr> <tr> <td><em>id</em></td> <td>String</td> <td></td> </tr> </tbody> </table> <strong>Return Value</strong><br /> <em>None</em> </div> <h4>Code example</h4> <p> Invoke the <code>hideHint</code> method. </p> <pre><code>$('#jqxValidator').jqxValidator('hideHint', '#passwordInput'); </code></pre> <div style="padding-bottom: 5px;"> <em>Try it:</em> <a target="_blank" href="http://jsfiddle.net/jqwidgets/9WgYa/">hides all hints for a specific input in the jqxValidator.</a> </div> </div> </td> </tr> <tr> <td class="documentation-option-type-click"> <span id='Span21'>hide</span> </td> <td> <span>Method</span> </td> <td></td> </tr> <tr> <td colspan='3' style='width: 100%'> <div class="documentation-option-description property-content" style="display: none;"> <p>Hiding all hints for the current form.</p> <div class="methodArgs"> <table class="arguments"> <tbody> <tr> <th>Parameter</th> <th>Type</th> <th>Description</th> </tr> <tr> <td><em>None</em></td> <td></td> <td></td> </tr> </tbody> </table> <strong>Return Value</strong><br /> <em>None</em> </div> <h4>Code example</h4> <p> Invoke the <code>hide</code> method. </p> <pre><code>$('#jqxValidator').jqxValidator('hide'); </code></pre> <div style="padding-bottom: 5px;"> <em>Try it:</em> <a target="_blank" href="http://jsfiddle.net/jqwidgets/uA6Gt/">hides all hints in the jqxValidator.</a> </div> </div> </td> </tr> <tr> <td class="documentation-option-type-click"> <span id='Span22'>updatePosition</span> </td> <td> <span>Method</span> </td> <td></td> </tr> <tr> <td colspan='3' style='width: 100%'> <div class="documentation-option-description property-content" style="display: none;"> <p>Updating the positions of all hints. This is useful for example on window resize.</p> <div class="methodArgs"> <table class="arguments"> <tbody> <tr> <th>Parameter</th> <th>Type</th> <th>Description</th> </tr> <tr> <td><em>None</em></td> <td></td> <td></td> </tr> </tbody> </table> <strong>Return Value</strong><br /> <em>None</em> </div> <h4>Code example</h4> <p> Invoke the <code>checkItem</code> method. </p> <pre><code>$('#jqxValidator').jqxValidator('updatePosition'); </code></pre> <div style="padding-bottom: 5px;"> <em>Try it:</em> <a target="_blank" href="http://jsfiddle.net/jqwidgets/9Xpzn/">updates a position of all hints in the jqxValidator.</a> </div> </div> </td> </tr> <tr> <td class="documentation-option-type-click"> <span id='Span37'>validate</span> </td> <td> <span>Method</span> </td> <td></td> </tr> <tr> <td colspan='3' style='width: 100%'> <div class="documentation-option-description property-content" style="display: none;"> <p> Validating the whole form. </p> <div class="methodArgs"> <table class="arguments"> <tbody> <tr> <th>Parameter</th> <th>Type</th> <th>Description</th> </tr> <tr> <td><em>html element</em></td> <td>Object</td> <td></td> </tr> </tbody> </table> <strong>Return Value</strong><br /> <em>None</em> </div> <h4>Code example</h4> <p> Invoke the <code>validate</code> method. </p> <pre><code>$('#jqxValidator').jqxValidator('validate', element); </code></pre> <div style="padding-bottom: 5px;"> <em>Try it:</em> <a target="_blank" href="http://jsfiddle.net/jqwidgets/hQwmm/">validates the jqxValidator.</a> </div> </div> </td> </tr> <tr> <td class="documentation-option-type-click"> <span id='Span19'>validateInput</span> </td> <td> <span>Method</span> </td> <td></td> </tr> <tr> <td colspan='3' style='width: 100%'> <div class="documentation-option-description property-content" style="display: none;"> <p> Validates a single input. This method accepts a single parameter which is selector of the input you want to validate. Notice that this selector should be the same like the one you've passed in the rules array. </p> <div class="methodArgs"> <table class="arguments"> <tbody> <tr> <th>Parameter</th> <th>Type</th> <th>Description</th> </tr> <tr> <td><em>id</em></td> <td>String</td> <td></td> </tr> </tbody> </table> <strong>Return Value</strong><br /> <em>None</em> </div> <h4>Code example</h4> <p> Invoke the <code>validateInput</code> method. </p> <pre><code>$('#jqxValidator').jqxValidator('validateInput', '#passwordInput'); </code></pre> <div style="padding-bottom: 5px;"> <em>Try it:</em> <a target="_blank" href="http://jsfiddle.net/jqwidgets/hQwmm/">validates an input the jqxValidator.</a> </div> </div> </td> </tr> </table> <br /> </div> </div> </div> </body> </html>
{ "pile_set_name": "Github" }
/* Copyright (c) 2011, Open Knowledge Foundation Ltd. All rights reserved. HTTP Content-Type Autonegotiation. The functions in this package implement the behaviour specified in http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. Neither the name of the Open Knowledge Foundation Ltd. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package goautoneg import ( "sort" "strconv" "strings" ) // Structure to represent a clause in an HTTP Accept Header type Accept struct { Type, SubType string Q float64 Params map[string]string } // For internal use, so that we can use the sort interface type accept_slice []Accept func (accept accept_slice) Len() int { slice := []Accept(accept) return len(slice) } func (accept accept_slice) Less(i, j int) bool { slice := []Accept(accept) ai, aj := slice[i], slice[j] if ai.Q > aj.Q { return true } if ai.Type != "*" && aj.Type == "*" { return true } if ai.SubType != "*" && aj.SubType == "*" { return true } return false } func (accept accept_slice) Swap(i, j int) { slice := []Accept(accept) slice[i], slice[j] = slice[j], slice[i] } // Parse an Accept Header string returning a sorted list // of clauses func ParseAccept(header string) (accept []Accept) { parts := strings.Split(header, ",") accept = make([]Accept, 0, len(parts)) for _, part := range parts { part := strings.Trim(part, " ") a := Accept{} a.Params = make(map[string]string) a.Q = 1.0 mrp := strings.Split(part, ";") media_range := mrp[0] sp := strings.Split(media_range, "/") a.Type = strings.Trim(sp[0], " ") switch { case len(sp) == 1 && a.Type == "*": a.SubType = "*" case len(sp) == 2: a.SubType = strings.Trim(sp[1], " ") default: continue } if len(mrp) == 1 { accept = append(accept, a) continue } for _, param := range mrp[1:] { sp := strings.SplitN(param, "=", 2) if len(sp) != 2 { continue } token := strings.Trim(sp[0], " ") if token == "q" { a.Q, _ = strconv.ParseFloat(sp[1], 32) } else { a.Params[token] = strings.Trim(sp[1], " ") } } accept = append(accept, a) } slice := accept_slice(accept) sort.Sort(slice) return } // Negotiate the most appropriate content_type given the accept header // and a list of alternatives. func Negotiate(header string, alternatives []string) (content_type string) { asp := make([][]string, 0, len(alternatives)) for _, ctype := range alternatives { asp = append(asp, strings.SplitN(ctype, "/", 2)) } for _, clause := range ParseAccept(header) { for i, ctsp := range asp { if clause.Type == ctsp[0] && clause.SubType == ctsp[1] { content_type = alternatives[i] return } if clause.Type == ctsp[0] && clause.SubType == "*" { content_type = alternatives[i] return } if clause.Type == "*" && clause.SubType == "*" { content_type = alternatives[i] return } } } return }
{ "pile_set_name": "Github" }
using UnityEngine; namespace HT.Framework { /// <summary> /// HTFramework框架异常 /// </summary> internal sealed class HTFrameworkException : UnityException { /// <summary> /// 异常模块 /// </summary> public HTFrameworkModule Module; /// <summary> /// HTFramework框架异常 /// </summary> /// <param name="module">异常发起的模块</param> /// <param name="message">异常信息</param> public HTFrameworkException(HTFrameworkModule module, string message) : base("[" + module.ToString() + "]" + message) { Module = module; } } }
{ "pile_set_name": "Github" }
obj-$(CONFIG_SND_AOA_SOUNDBUS_I2S) += snd-aoa-i2sbus.o snd-aoa-i2sbus-objs := core.o pcm.o control.o
{ "pile_set_name": "Github" }
.a { error: (1px + 3em); }
{ "pile_set_name": "Github" }
{ "comment": "MyPaint brush file", "description": "", "group": "", "notes": "", "parent_brush_name": "classic/modelling2", "settings": { "anti_aliasing": { "base_value": 0.0, "inputs": {} }, "change_color_h": { "base_value": 0.0, "inputs": {} }, "change_color_hsl_s": { "base_value": 0.0, "inputs": {} }, "change_color_hsv_s": { "base_value": 0.0, "inputs": {} }, "change_color_l": { "base_value": 0.0, "inputs": {} }, "change_color_v": { "base_value": 0.0, "inputs": {} }, "color_h": { "base_value": 0.0, "inputs": {} }, "color_s": { "base_value": 0.0, "inputs": {} }, "color_v": { "base_value": 0.0, "inputs": {} }, "colorize": { "base_value": 0.0, "inputs": {} }, "custom_input": { "base_value": 0.0, "inputs": {} }, "custom_input_slowness": { "base_value": 0.0, "inputs": {} }, "dabs_per_actual_radius": { "base_value": 3.67, "inputs": {} }, "dabs_per_basic_radius": { "base_value": 0.0, "inputs": {} }, "dabs_per_second": { "base_value": 0.0, "inputs": {} }, "direction_filter": { "base_value": 2.0, "inputs": {} }, "elliptical_dab_angle": { "base_value": 90.0, "inputs": {} }, "elliptical_dab_ratio": { "base_value": 1.0, "inputs": {} }, "eraser": { "base_value": 0.0, "inputs": {} }, "hardness": { "base_value": 0.79, "inputs": { "pressure": [ [ 0.0, -0.25 ], [ 0.133929, -0.195312 ], [ 0.223214, -0.109375 ], [ 0.342262, -0.028646 ], [ 0.5, 0.0 ], [ 1.0, 0.0 ] ] } }, "lock_alpha": { "base_value": 0.0, "inputs": {} }, "offset_by_random": { "base_value": 0.0, "inputs": { "random": [ [ 0.0, 0.0 ], [ 0.164336, 0.0 ], [ 0.213287, 0.19 ], [ 0.265734, 0.0 ], [ 1.0, 0.0 ] ] } }, "offset_by_speed": { "base_value": 0.0, "inputs": {} }, "offset_by_speed_slowness": { "base_value": 1.0, "inputs": {} }, "opaque": { "base_value": 0.43, "inputs": {} }, "opaque_linearize": { "base_value": 0.0, "inputs": {} }, "opaque_multiply": { "base_value": 0.0, "inputs": { "pressure": [ [ 0.0, -0.78 ], [ 0.02381, 0.0 ], [ 0.061728, 0.2275 ], [ 0.732143, 0.706875 ], [ 1.0, 0.78 ] ], "speed1": [ [ 0.0, -0.31 ], [ 3.0, -0.2841666666666666 ] ], "speed2": [ [ 0.0, -0.43 ], [ 0.357143, -0.26875 ], [ 0.845238, -0.15229205405405402 ], [ 1.77381, -0.04031191891891896 ], [ 2.77381, 0.0 ], [ 4.0, 0.0 ] ] } }, "pressure_gain_log": { "base_value": 0.0, "inputs": {} }, "radius_by_random": { "base_value": 0.0, "inputs": {} }, "radius_logarithmic": { "base_value": 0.2, "inputs": { "pressure": [ [ 0.0, 0.0 ], [ 0.404321, -0.0875 ], [ 1.0, -0.42 ] ], "speed1": [ [ 0.0, -0.18571468085106385 ], [ 4.0, 1.3 ] ], "speed2": [ [ 0.0, 0.0 ], [ 1.0, 0.42 ] ], "viewzoom": [ [ -2.7699999809265137, 2.7699999172716274 ], [ 4.150000095367432, -4.15 ] ] } }, "restore_color": { "base_value": 0.0, "inputs": {} }, "slow_tracking": { "base_value": 0.0, "inputs": {} }, "slow_tracking_per_dab": { "base_value": 0.0, "inputs": {} }, "smudge": { "base_value": 0.53, "inputs": { "pressure": [ [ 0.0, -0.0 ], [ 0.299383, -0.0125 ], [ 0.5, -0.0375 ], [ 0.790123, -0.110417 ], [ 1.0, -0.2 ] ], "stroke": [ [ 0.0, -0.65 ], [ 0.132716, -0.561979 ], [ 0.358025, -0.473958 ], [ 0.648148, -0.24375 ], [ 0.87037, 0.067708 ], [ 1.0, 0.446875 ] ] } }, "smudge_length": { "base_value": 0.5, "inputs": {} }, "smudge_radius_log": { "base_value": 0.0, "inputs": {} }, "snap_to_pixel": { "base_value": 0.0, "inputs": {} }, "speed1_gamma": { "base_value": 4.63, "inputs": {} }, "speed1_slowness": { "base_value": 0.04, "inputs": {} }, "speed2_gamma": { "base_value": 0.98, "inputs": {} }, "speed2_slowness": { "base_value": 1.0, "inputs": {} }, "stroke_duration_logarithmic": { "base_value": 5.22, "inputs": {} }, "stroke_holdtime": { "base_value": 10.0, "inputs": {} }, "stroke_threshold": { "base_value": 0.0, "inputs": {} }, "tracking_noise": { "base_value": 0.0, "inputs": {} } }, "version": 3 }
{ "pile_set_name": "Github" }
/*------------------------------------------------------------------------------ * dumpssr.c : dump ssr messages in rtcm log * * 2010/06/10 new *-----------------------------------------------------------------------------*/ #include <stdio.h> #include "rtklib.h" /* print ssr messages --------------------------------------------------------*/ static void printhead(int topt, int mopt) { int i; printf("%% %s SAT ",topt?" DAY TIME ":" GPST "); if (mopt&1) { printf(" UDI IOD URA REF "); } if (mopt&2) { printf("%8s %8s %8s %8s %8s %8s ","DR","DA","DC","DDR","DDA","DDC"); } if (mopt&4) { printf("%8s %8s %8s %8s ","DCLK","DDCLK","DDDCLK","HRCLK"); } if (mopt&8) { for (i=0;i<12;i++) printf(" B%02d ",i+1); } printf("\n"); } /* print ssr messages --------------------------------------------------------*/ static void printssrmsg(int sat, const ssr_t *ssr, int topt, int mopt) { double tow; int week; char tstr[32],id[16]; if (topt) { time2str(ssr->t0,tstr,0); printf("%s ",tstr); } else { tow=time2gpst(ssr->t0,&week); printf("%4d %6.0f ",week,tow); } satno2id(sat,id); printf("%4s ",id); if (mopt&1) { printf("%4.0f %3d %3d %3d ",ssr->udint,ssr->iode,ssr->ura,ssr->refd); } if (mopt&2) { printf("%8.3f %8.3f %8.3f %8.3f %8.3f %8.3f ",ssr->deph[0],ssr->deph[1], ssr->deph[2],ssr->ddeph[0],ssr->ddeph[1],ssr->ddeph[2]); } if (mopt&4) { printf("%8.3f %8.3f %8.3f %8.3f ",ssr->dclk[0],ssr->dclk[1],ssr->dclk[2], ssr->hrclk); } if (mopt&8) { printf("%6.2f %6.2f %6.2f %6.2f %6.2f %6.2f %6.2f %6.2f %6.2f %6.2f %6.2f %6.2f ", ssr->cbias[0],ssr->cbias[1],ssr->cbias[2],ssr->cbias[3], ssr->cbias[4],ssr->cbias[5],ssr->cbias[6],ssr->cbias[7], ssr->cbias[8],ssr->cbias[9],ssr->cbias[10],ssr->cbias[11]); } printf("\n"); } /* dump ssr messages ---------------------------------------------------------*/ static void dumpssrmsg(FILE *fp, int sat, int topt, int mopt) { static rtcm_t rtcm; static gtime_t t0[MAXSAT]={{0}}; int i,stat; init_rtcm(&rtcm); while ((stat=input_rtcm3f(&rtcm,fp))>=0) { if (stat!=10) continue; /* ssr message */ for (i=0;i<MAXSAT;i++) { if (timediff(rtcm.ssr[i].t0,t0[i])==0.0) continue; t0[i]=rtcm.ssr[i].t0; if (!sat||i+1==sat) { printssrmsg(i+1,rtcm.ssr+i,topt,mopt); } } } } /* main ----------------------------------------------------------------------*/ int main(int argc, char **argv) { const char *usage="dumpssr [-t][-s sat][-i][-o][-c][-b][-h][-x tr] file"; FILE *fp; char *file=""; int i,sat=0,topt=0,mopt=0,trl=0; for (i=0;i<argc;i++) { if (!strcmp(argv[i],"-t")) topt =1; else if (!strcmp(argv[i],"-i")) mopt|=1; else if (!strcmp(argv[i],"-o")) mopt|=2; else if (!strcmp(argv[i],"-c")) mopt|=4; else if (!strcmp(argv[i],"-b")) mopt|=8; else if (!strcmp(argv[i],"-s")&&i+1<argc) sat=atoi(argv[++i]); else if (!strcmp(argv[i],"-x")&&i+1<argc) trl=atoi(argv[++i]); else if (!strcmp(argv[i],"-h")) { fprintf(stderr,"usage: %s\n",usage); return 0; } else file=argv[i]; } if (!mopt) mopt=0xFF; if (!(fp=fopen(file,"rb"))) { fprintf(stderr,"file open error: %s\n",file); return -1; } if (trl>0) { traceopen("dumpssr.trace"); tracelevel(trl); } printhead(topt,mopt); dumpssrmsg(fp,sat,topt,mopt); fclose(fp); traceclose(); return 0; }
{ "pile_set_name": "Github" }
/* Copyright (c) 2018-2020 Kevin McGill <kevin@mcgilldevtech.com> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ open class McTextField: UITextField { public var doneHandler: McPicker.DoneHandler = { _ in } public var cancelHandler: McPicker.CancelHandler? public var selectionChangedHandler: McPicker.SelectionChangedHandler? public var textFieldWillBeginEditingHandler: ((_ selections: [Int:String]) -> Void)? public var inputViewMcPicker: McPicker? { didSet { self.delegate = inputViewMcPicker } } }
{ "pile_set_name": "Github" }
package sqlite.kripton205; import com.abubusoft.kripton.AbstractMapper; import com.abubusoft.kripton.annotation.BindMap; import com.abubusoft.kripton.common.PrimitiveUtils; import com.abubusoft.kripton.escape.StringEscapeUtils; import com.abubusoft.kripton.xml.EventType; import com.abubusoft.kripton.xml.XMLParser; import com.abubusoft.kripton.xml.XMLSerializer; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonToken; /** * This class is binder map for Bean205 * * @see Bean205 */ @BindMap(Bean205.class) public class Bean205BindMap extends AbstractMapper<Bean205> { @Override public int serializeOnJackson(Bean205 object, JsonGenerator jacksonSerializer) throws Exception { jacksonSerializer.writeStartObject(); int fieldCount=0; // Serialized Field: // field id (mapped with "id") fieldCount++; jacksonSerializer.writeNumberField("id", object.getId()); // field name (mapped with "name") if (object.name!=null) { fieldCount++; jacksonSerializer.writeStringField("name", object.name); } // field surname (mapped with "surname") if (object.surname!=null) { fieldCount++; jacksonSerializer.writeStringField("surname", object.surname); } jacksonSerializer.writeEndObject(); return fieldCount; } @Override public int serializeOnJacksonAsString(Bean205 object, JsonGenerator jacksonSerializer) throws Exception { jacksonSerializer.writeStartObject(); int fieldCount=0; // Serialized Field: // field id (mapped with "id") jacksonSerializer.writeStringField("id", PrimitiveUtils.writeLong(object.getId())); // field name (mapped with "name") if (object.name!=null) { fieldCount++; jacksonSerializer.writeStringField("name", object.name); } // field surname (mapped with "surname") if (object.surname!=null) { fieldCount++; jacksonSerializer.writeStringField("surname", object.surname); } jacksonSerializer.writeEndObject(); return fieldCount; } /** * method for xml serialization */ @Override public void serializeOnXml(Bean205 object, XMLSerializer xmlSerializer, EventType currentEventType) throws Exception { if (currentEventType == EventType.START_DOCUMENT) { xmlSerializer.writeStartElement("bean205"); } // Persisted fields: // field id (mapped with "id") xmlSerializer.writeStartElement("id"); xmlSerializer.writeLong(object.getId()); xmlSerializer.writeEndElement(); // field name (mapped with "name") if (object.name!=null) { xmlSerializer.writeStartElement("name"); xmlSerializer.writeCharacters(StringEscapeUtils.escapeXml10(object.name)); xmlSerializer.writeEndElement(); } // field surname (mapped with "surname") if (object.surname!=null) { xmlSerializer.writeStartElement("surname"); xmlSerializer.writeCharacters(StringEscapeUtils.escapeXml10(object.surname)); xmlSerializer.writeEndElement(); } if (currentEventType == EventType.START_DOCUMENT) { xmlSerializer.writeEndElement(); } } /** * parse with jackson */ @Override public Bean205 parseOnJackson(JsonParser jacksonParser) throws Exception { Bean205 instance = new Bean205(); String fieldName; if (jacksonParser.currentToken() == null) { jacksonParser.nextToken(); } if (jacksonParser.currentToken() != JsonToken.START_OBJECT) { jacksonParser.skipChildren(); return instance; } while (jacksonParser.nextToken() != JsonToken.END_OBJECT) { fieldName = jacksonParser.getCurrentName(); jacksonParser.nextToken(); // Parse fields: switch (fieldName) { case "id": // field id (mapped with "id") instance.setId(jacksonParser.getLongValue()); break; case "name": // field name (mapped with "name") if (jacksonParser.currentToken()!=JsonToken.VALUE_NULL) { instance.name=jacksonParser.getText(); } break; case "surname": // field surname (mapped with "surname") if (jacksonParser.currentToken()!=JsonToken.VALUE_NULL) { instance.surname=jacksonParser.getText(); } break; default: jacksonParser.skipChildren(); break;} } return instance; } /** * parse with jackson */ @Override public Bean205 parseOnJacksonAsString(JsonParser jacksonParser) throws Exception { Bean205 instance = new Bean205(); String fieldName; if (jacksonParser.getCurrentToken() == null) { jacksonParser.nextToken(); } if (jacksonParser.getCurrentToken() != JsonToken.START_OBJECT) { jacksonParser.skipChildren(); return instance; } while (jacksonParser.nextToken() != JsonToken.END_OBJECT) { fieldName = jacksonParser.getCurrentName(); jacksonParser.nextToken(); // Parse fields: switch (fieldName) { case "id": // field id (mapped with "id") instance.setId(PrimitiveUtils.readLong(jacksonParser.getText(), 0L)); break; case "name": // field name (mapped with "name") if (jacksonParser.currentToken()!=JsonToken.VALUE_NULL) { instance.name=jacksonParser.getText(); } break; case "surname": // field surname (mapped with "surname") if (jacksonParser.currentToken()!=JsonToken.VALUE_NULL) { instance.surname=jacksonParser.getText(); } break; default: jacksonParser.skipChildren(); break;} } return instance; } /** * parse xml */ @Override public Bean205 parseOnXml(XMLParser xmlParser, EventType currentEventType) throws Exception { Bean205 instance = new Bean205(); EventType eventType = currentEventType; boolean read=true; if (currentEventType == EventType.START_DOCUMENT) { eventType = xmlParser.next(); } else { eventType = xmlParser.getEventType(); } String currentTag = xmlParser.getName().toString(); String elementName = currentTag; // No attributes found //sub-elements while (xmlParser.hasNext() && elementName!=null) { if (read) { eventType = xmlParser.next(); } else { eventType = xmlParser.getEventType(); } read=true; switch(eventType) { case START_TAG: currentTag = xmlParser.getName().toString(); switch(currentTag) { case "id": // property id (mapped on "id") instance.setId(PrimitiveUtils.readLong(xmlParser.getElementAsLong(), 0L)); break; case "name": // property name (mapped on "name") instance.name=StringEscapeUtils.unescapeXml(xmlParser.getElementText()); break; case "surname": // property surname (mapped on "surname") instance.surname=StringEscapeUtils.unescapeXml(xmlParser.getElementText()); break; default: xmlParser.skipChildren(); break; } break; case END_TAG: if (elementName.equals(xmlParser.getName())) { currentTag = elementName; elementName = null; } break; case CDSECT: case TEXT: // no property is binded to VALUE o CDATA break; default: break; } } return instance; } }
{ "pile_set_name": "Github" }
.transaction { &__dot { display: inline-block; height: 10px; width: 10px; border-radius: 50%; margin-left: 5px; vertical-align: baseline; &--pending { background-color: $gray-500; } &--success { background-color: $success; } &--failed { background-color: $danger; } &--out_of_gas { background-color: $warning; } } }
{ "pile_set_name": "Github" }
package org.n3r.eql.codedesc; public interface CodeDescMapper { String map(String code); }
{ "pile_set_name": "Github" }
# 'boxed' can only be used with named types { 'command': 'foo', 'boxed': true, 'data': { 'string': 'str' } }
{ "pile_set_name": "Github" }
cloud: gcp disk_size: 50g cycle_time: 1h max_run_time: 1h cores_per_task: 10 metal: false name: Test Pool tasks: 2 command: - cmd1 - arg1 container: MozillaSecurity/fuzzer:latest minimum_memory_per_core: 1g imageset: generic-worker-A cpu: x64 platform: linux preprocess: "" macros: ENVVAR1: "123456" ENVVAR2: 789abc
{ "pile_set_name": "Github" }
(* Modified by TrustInSoft *) (**************************************************************************) (* *) (* This file is part of Frama-C. *) (* *) (* Copyright (C) 2007-2015 *) (* CEA (Commissariat à l'énergie atomique et aux énergies *) (* alternatives) *) (* *) (* you can redistribute it and/or modify it under the terms of the GNU *) (* Lesser General Public License as published by the Free Software *) (* Foundation, version 2.1. *) (* *) (* It is distributed in the hope that it will be useful, *) (* but WITHOUT ANY WARRANTY; without even the implied warranty of *) (* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *) (* GNU Lesser General Public License for more details. *) (* *) (* See the GNU Lesser General Public License version 2.1 *) (* for more details (enclosed in the file licenses/LGPLv2.1). *) (* *) (**************************************************************************) include Plugin.S module Analysis: Parameter_sig.Bool (** Whether to perform spare code detection or not. *) module Annot : Parameter_sig.Bool (** keep more things to keep all reachable annotations. *) module GlobDecl : Parameter_sig.Bool (** remove unused global types and variables *) (* Local Variables: compile-command: "make -C ../../.." End: *)
{ "pile_set_name": "Github" }
/* * (C) Copyright 2014 * * SPDX-License-Identifier: GPL-2.0+ */ #ifndef _FIT_COMMON_H_ #define _FIT_COMMON_H_ #include "imagetool.h" #include "mkimage.h" #include <image.h> int fit_verify_header(unsigned char *ptr, int image_size, struct image_tool_params *params); int fit_check_image_types(uint8_t type); /** * Map an FDT into memory, optionally increasing its size * * @cmdname: Tool name (for displaying with error messages) * @fname: Filename containing FDT * @size_inc: Amount to increase size by (0 = leave it alone) * @blobp: Returns pointer to FDT blob * @sbuf: File status information is stored here * @delete_on_error: true to delete the file if we get an error * @return 0 if OK, -1 on error. */ int mmap_fdt(const char *cmdname, const char *fname, size_t size_inc, void **blobp, struct stat *sbuf, bool delete_on_error); #endif /* _FIT_COMMON_H_ */
{ "pile_set_name": "Github" }