repo_name
stringlengths
6
101
path
stringlengths
4
300
text
stringlengths
7
1.31M
creativeprogramming/chromium
chrome/browser/extensions/extension_message_service.cc
<filename>chrome/browser/extensions/extension_message_service.cc<gh_stars>1-10 // Copyright (c) 2009 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "chrome/browser/extensions/extension_message_service.h" #include "base/json_writer.h" #include "base/singleton.h" #include "base/stl_util-inl.h" #include "base/values.h" #include "chrome/browser/child_process_security_policy.h" #include "chrome/browser/chrome_thread.h" #include "chrome/browser/extensions/extension_tabs_module.h" #include "chrome/browser/renderer_host/render_process_host.h" #include "chrome/browser/renderer_host/render_view_host.h" #include "chrome/browser/renderer_host/resource_message_filter.h" #include "chrome/browser/tab_contents/tab_contents.h" #include "chrome/browser/tab_contents/tab_util.h" #include "chrome/common/extensions/extension.h" #include "chrome/common/notification_service.h" #include "chrome/common/render_messages.h" // Since we have 2 ports for every channel, we just index channels by half the // port ID. #define GET_CHANNEL_ID(port_id) ((port_id) / 2) #define GET_CHANNEL_OPENER_ID(channel_id) ((channel_id) * 2) #define GET_CHANNEL_RECEIVERS_ID(channel_id) ((channel_id) * 2 + 1) // Port1 is always even, port2 is always odd. #define IS_OPENER_PORT_ID(port_id) (((port_id) & 1) == 0) // Change even to odd and vice versa, to get the other side of a given channel. #define GET_OPPOSITE_PORT_ID(source_port_id) ((source_port_id) ^ 1) namespace { typedef std::map<URLRequestContext*, ExtensionMessageService*> InstanceMap; struct SingletonData { ~SingletonData() { STLDeleteContainerPairSecondPointers(map.begin(), map.end()); } Lock lock; InstanceMap map; }; static void DispatchOnConnect(IPC::Message::Sender* channel, int source_port_id, const std::string& tab_json, const std::string& extension_id) { ListValue args; args.Set(0, Value::CreateIntegerValue(source_port_id)); args.Set(1, Value::CreateStringValue(tab_json)); args.Set(2, Value::CreateStringValue(extension_id)); channel->Send(new ViewMsg_ExtensionMessageInvoke( ExtensionMessageService::kDispatchOnConnect, args)); } static void DispatchOnDisconnect(IPC::Message::Sender* channel, int source_port_id) { ListValue args; args.Set(0, Value::CreateIntegerValue(source_port_id)); channel->Send(new ViewMsg_ExtensionMessageInvoke( ExtensionMessageService::kDispatchOnDisconnect, args)); } static void DispatchOnMessage(IPC::Message::Sender* channel, const std::string& message, int source_port_id) { ListValue args; args.Set(0, Value::CreateStringValue(message)); args.Set(1, Value::CreateIntegerValue(source_port_id)); channel->Send(new ViewMsg_ExtensionMessageInvoke( ExtensionMessageService::kDispatchOnMessage, args)); } static void DispatchEvent(IPC::Message::Sender* channel, const std::string& event_name, const std::string& event_args) { ListValue args; args.Set(0, Value::CreateStringValue(event_name)); args.Set(1, Value::CreateStringValue(event_args)); channel->Send(new ViewMsg_ExtensionMessageInvoke( ExtensionMessageService::kDispatchEvent, args)); } static std::string GetChannelConnectEvent(const std::string& extension_id) { return StringPrintf("channel-connect:%s", extension_id.c_str()); } } // namespace // Since ExtensionMessageService is a collection of Singletons, we don't need to // grab a reference to it when creating Tasks involving it. template <> struct RunnableMethodTraits<ExtensionMessageService> { static void RetainCallee(ExtensionMessageService*) {} static void ReleaseCallee(ExtensionMessageService*) {} }; const char ExtensionMessageService::kDispatchOnConnect[] = "Port.dispatchOnConnect"; const char ExtensionMessageService::kDispatchOnDisconnect[] = "Port.dispatchOnDisconnect"; const char ExtensionMessageService::kDispatchOnMessage[] = "Port.dispatchOnMessage"; const char ExtensionMessageService::kDispatchEvent[] = "Event.dispatchJSON"; // static ExtensionMessageService* ExtensionMessageService::GetInstance( URLRequestContext* context) { SingletonData* data = Singleton<SingletonData>::get(); AutoLock lock(data->lock); ExtensionMessageService* instance = data->map[context]; if (!instance) { instance = new ExtensionMessageService(); data->map[context] = instance; } return instance; } ExtensionMessageService::ExtensionMessageService() : ui_loop_(NULL), initialized_(false), next_port_id_(0) { } void ExtensionMessageService::Init() { DCHECK_EQ(MessageLoop::current()->type(), MessageLoop::TYPE_UI); if (initialized_) return; initialized_ = true; ui_loop_ = MessageLoop::current(); registrar_.Add(this, NotificationType::RENDERER_PROCESS_TERMINATED, NotificationService::AllSources()); registrar_.Add(this, NotificationType::RENDERER_PROCESS_CLOSED, NotificationService::AllSources()); } void ExtensionMessageService::AddEventListener(std::string event_name, int render_process_id) { DCHECK_EQ(MessageLoop::current()->type(), MessageLoop::TYPE_UI); DCHECK(listeners_[event_name].count(render_process_id) == 0); listeners_[event_name].insert(render_process_id); } void ExtensionMessageService::RemoveEventListener(std::string event_name, int render_process_id) { DCHECK_EQ(MessageLoop::current()->type(), MessageLoop::TYPE_UI); DCHECK(listeners_[event_name].count(render_process_id) == 1); listeners_[event_name].erase(render_process_id); } void ExtensionMessageService::AllocatePortIdPair(int* port1, int* port2) { AutoLock lock(next_port_id_lock_); // TODO(mpcomplete): what happens when this wraps? int port1_id = next_port_id_++; int port2_id = next_port_id_++; DCHECK(IS_OPENER_PORT_ID(port1_id)); DCHECK(GET_OPPOSITE_PORT_ID(port1_id) == port2_id); DCHECK(GET_OPPOSITE_PORT_ID(port2_id) == port1_id); DCHECK(GET_CHANNEL_ID(port1_id) == GET_CHANNEL_ID(port2_id)); int channel_id = GET_CHANNEL_ID(port1_id); DCHECK(GET_CHANNEL_OPENER_ID(channel_id) == port1_id); DCHECK(GET_CHANNEL_RECEIVERS_ID(channel_id) == port2_id); *port1 = port1_id; *port2 = port2_id; } int ExtensionMessageService::OpenChannelToExtension( int routing_id, const std::string& extension_id, ResourceMessageFilter* source) { DCHECK_EQ(MessageLoop::current(), ChromeThread::GetMessageLoop(ChromeThread::IO)); DCHECK(initialized_); // Create a channel ID for both sides of the channel. int port1_id = -1; int port2_id = -1; AllocatePortIdPair(&port1_id, &port2_id); ui_loop_->PostTask(FROM_HERE, NewRunnableMethod(this, &ExtensionMessageService::OpenChannelOnUIThread, routing_id, port1_id, source->GetProcessId(), extension_id)); return port2_id; } void ExtensionMessageService::OpenChannelOnUIThread( int source_routing_id, int source_port_id, int source_process_id, const std::string& extension_id) { RenderProcessHost* source = RenderProcessHost::FromID(source_process_id); OpenChannelOnUIThreadImpl(source_routing_id, source_port_id, source_process_id, source, extension_id); } void ExtensionMessageService::OpenChannelOnUIThreadImpl( int source_routing_id, int source_port_id, int source_process_id, IPC::Message::Sender* source, const std::string& extension_id) { DCHECK_EQ(MessageLoop::current()->type(), MessageLoop::TYPE_UI); if (!source) return; // Source closed while task was in flight. linked_ptr<MessageChannel> channel(new MessageChannel); channel->opener.insert(source); // Get the list of processes that are listening for this extension's channel // connect event. std::string event_name = GetChannelConnectEvent(extension_id); std::set<int>& pids = listeners_[event_name]; for (std::set<int>::iterator pid = pids.begin(); pid != pids.end(); ++pid) { RenderProcessHost* renderer = RenderProcessHost::FromID(*pid); if (!renderer) continue; channel->receivers.insert(renderer); } if (channel->receivers.empty()) { // Either no one is listening, or all listeners have since closed. // TODO(mpcomplete): should we notify the source? return; } channels_[GET_CHANNEL_ID(source_port_id)] = channel; // Include info about the opener's tab (if it was a tab). std::string tab_json = "null"; TabContents* contents = tab_util::GetTabContentsByID(source_process_id, source_routing_id); if (contents) { DictionaryValue* tab_value = ExtensionTabUtil::CreateTabValue(contents); JSONWriter::Write(tab_value, false, &tab_json); } // Broadcast the connect event to the receivers. Give them the opener's // port ID (the opener has the opposite port ID). for (MessageChannel::Ports::iterator it = channel->receivers.begin(); it != channel->receivers.end(); ++it) { DispatchOnConnect(*it, source_port_id, tab_json, extension_id); } } int ExtensionMessageService::OpenAutomationChannelToExtension( int source_process_id, int routing_id, const std::string& extension_id, IPC::Message::Sender* source) { DCHECK_EQ(MessageLoop::current()->type(), MessageLoop::TYPE_UI); DCHECK(initialized_); int port1_id = -1; int port2_id = -1; // Create a channel ID for both sides of the channel. AllocatePortIdPair(&port1_id, &port2_id); // TODO(siggi): The source process- and routing ids are used to // describe the originating tab to the target extension. // This isn't really appropriate here, the originating tab // information should be supplied by the caller for // automation-initiated ports. OpenChannelOnUIThreadImpl(routing_id, port1_id, source_process_id, source, extension_id); return port2_id; } void ExtensionMessageService::CloseChannel(int port_id) { DCHECK_EQ(MessageLoop::current()->type(), MessageLoop::TYPE_UI); // Note: The channel might be gone already, if the other side closed first. MessageChannelMap::iterator it = channels_.find(GET_CHANNEL_ID(port_id)); if (it != channels_.end()) CloseChannelImpl(it, port_id); } void ExtensionMessageService::CloseChannelImpl( MessageChannelMap::iterator channel_iter, int closing_port_id) { DCHECK_EQ(MessageLoop::current()->type(), MessageLoop::TYPE_UI); // Notify the other side. MessageChannel::Ports* ports = IS_OPENER_PORT_ID(closing_port_id) ? &channel_iter->second->receivers : &channel_iter->second->opener; for (MessageChannel::Ports::iterator it = ports->begin(); it != ports->end(); ++it) { DispatchOnDisconnect(*it, GET_OPPOSITE_PORT_ID(closing_port_id)); } channels_.erase(channel_iter); } void ExtensionMessageService::PostMessageFromRenderer( int dest_port_id, const std::string& message) { DCHECK_EQ(MessageLoop::current()->type(), MessageLoop::TYPE_UI); MessageChannelMap::iterator iter = channels_.find(GET_CHANNEL_ID(dest_port_id)); if (iter == channels_.end()) return; // Figure out which port the ID corresponds to. MessageChannel::Ports* ports = IS_OPENER_PORT_ID(dest_port_id) ? &iter->second->opener : &iter->second->receivers; int source_port_id = GET_OPPOSITE_PORT_ID(dest_port_id); for (MessageChannel::Ports::iterator it = ports->begin(); it != ports->end(); ++it) { DispatchOnMessage(*it, message, source_port_id); } } void ExtensionMessageService::DispatchEventToRenderers( const std::string& event_name, const std::string& event_args) { DCHECK_EQ(MessageLoop::current()->type(), MessageLoop::TYPE_UI); std::set<int>& pids = listeners_[event_name]; // Send the event only to renderers that are listening for it. for (std::set<int>::iterator pid = pids.begin(); pid != pids.end(); ++pid) { RenderProcessHost* renderer = RenderProcessHost::FromID(*pid); if (!renderer) continue; if (!ChildProcessSecurityPolicy::GetInstance()-> HasExtensionBindings(*pid)) { // Don't send browser-level events to unprivileged processes. continue; } DispatchEvent(renderer, event_name, event_args); } } void ExtensionMessageService::Observe(NotificationType type, const NotificationSource& source, const NotificationDetails& details) { DCHECK_EQ(MessageLoop::current()->type(), MessageLoop::TYPE_UI); DCHECK(type.value == NotificationType::RENDERER_PROCESS_TERMINATED || type.value == NotificationType::RENDERER_PROCESS_CLOSED); RenderProcessHost* renderer = Source<RenderProcessHost>(source).ptr(); // Close any channels that share this renderer. We notify the opposite // port that his pair has closed. for (MessageChannelMap::iterator it = channels_.begin(); it != channels_.end(); ) { MessageChannelMap::iterator current = it++; if (current->second->opener.count(renderer) > 0) { CloseChannelImpl(current, GET_CHANNEL_OPENER_ID(current->first)); } else if (current->second->receivers.count(renderer) > 0) { CloseChannelImpl(current, GET_CHANNEL_RECEIVERS_ID(current->first)); } } // Remove this renderer from our listener maps. for (ListenerMap::iterator it = listeners_.begin(); it != listeners_.end(); ) { ListenerMap::iterator current = it++; current->second.erase(renderer->pid()); if (current->second.empty()) listeners_.erase(current); } }
ThanhNIT/LeetCode-in-Java
src/test/java/g2201_2300/s2269_find_the_k_beauty_of_a_number/SolutionTest.java
package g2201_2300.s2269_find_the_k_beauty_of_a_number; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; import org.junit.jupiter.api.Test; class SolutionTest { @Test void divisorSubstrings() { assertThat(new Solution().divisorSubstrings(240, 2), equalTo(2)); } @Test void divisorSubstrings2() { assertThat(new Solution().divisorSubstrings(430043, 2), equalTo(2)); } }
roikku/swift-explorer
src/main/java/org/swiftexplorer/gui/localization/LocalizedStringsImpl.java
/* * Copyright 2014 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.swiftexplorer.gui.localization; import org.swiftexplorer.gui.MainPanel; import java.io.UnsupportedEncodingException; import java.net.URL; import java.net.URLClassLoader; import java.util.Locale; import java.util.MissingResourceException; import java.util.ResourceBundle; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class LocalizedStringsImpl implements HasLocalizedStrings { final Logger logger = LoggerFactory.getLogger(LocalizedStringsImpl.class); private final ResourceBundle stringsBundle ; public LocalizedStringsImpl(Locale locale) { super(); stringsBundle = getStringsBundle (locale) ; } private static ResourceBundle getStringsBundle(Locale locale) { URL[] urls = new URL[]{MainPanel.class.getResource("/strings/")}; ClassLoader loader = new URLClassLoader(urls); return ResourceBundle.getBundle("StringsBundle", locale, loader); } @Override public String getLocalizedString(String key) { if (stringsBundle == null) { return key.replace("_", " ") ; } if (key == null) return null ; String ret = key ; try { ret = stringsBundle.getString(key) ; } catch (MissingResourceException e) { logger.error("Missing string in the StringsBundle: " + key, e); } // http://stackoverflow.com/questions/4659929/how-to-use-utf-8-in-resource-properties-with-resourcebundle if (ret != null) { try { return new String(ret.getBytes("ISO-8859-1"), "UTF-8"); } catch (UnsupportedEncodingException e) { logger.error("Error occurred while converting localized string encoding.", e); } } return ret ; } }
kishorkunal-raj/jung
jung-visualization/src/main/java/edu/uci/ics/jung/visualization/decorators/PickableNodePaintFunction.java
<filename>jung-visualization/src/main/java/edu/uci/ics/jung/visualization/decorators/PickableNodePaintFunction.java /* * Created on Mar 10, 2005 * * Copyright (c) 2005, The JUNG Authors * * All rights reserved. * * This software is open-source under the BSD license; see either * "license.txt" or * https://github.com/jrtom/jung/blob/master/LICENSE for a description. */ package edu.uci.ics.jung.visualization.decorators; import com.google.common.base.Preconditions; import edu.uci.ics.jung.visualization.picking.PickedInfo; import java.awt.Paint; import java.util.function.Function; /** * Paints each node according to the <code>Paint</code> parameters given in the constructor, so that * picked and non-picked nodes can be made to look different. */ public class PickableNodePaintFunction<N> implements Function<N, Paint> { protected Paint fill_paint; protected Paint picked_paint; protected PickedInfo<N> pi; /** * @param pi specifies which nodes report as "picked" * @param fill_paint <code>Paint</code> used to fill node shapes * @param picked_paint <code>Paint</code> used to fill picked node shapes */ public PickableNodePaintFunction(PickedInfo<N> pi, Paint fill_paint, Paint picked_paint) { this.pi = Preconditions.checkNotNull(pi); this.fill_paint = Preconditions.checkNotNull(fill_paint); this.picked_paint = Preconditions.checkNotNull(picked_paint); } public Paint apply(N v) { return pi.isPicked(v) ? picked_paint : fill_paint; } }
suluner/tencentcloud-sdk-cpp
cpdp/include/tencentcloud/cpdp/v20190820/model/QueryOpenBankPaymentOrderRequest.h
<gh_stars>0 /* * Copyright (c) 2017-2019 THL A29 Limited, a Tencent company. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #ifndef TENCENTCLOUD_CPDP_V20190820_MODEL_QUERYOPENBANKPAYMENTORDERREQUEST_H_ #define TENCENTCLOUD_CPDP_V20190820_MODEL_QUERYOPENBANKPAYMENTORDERREQUEST_H_ #include <string> #include <vector> #include <map> #include <tencentcloud/core/AbstractModel.h> namespace TencentCloud { namespace Cpdp { namespace V20190820 { namespace Model { /** * QueryOpenBankPaymentOrder请求参数结构体 */ class QueryOpenBankPaymentOrderRequest : public AbstractModel { public: QueryOpenBankPaymentOrderRequest(); ~QueryOpenBankPaymentOrderRequest() = default; std::string ToJsonString() const; /** * 获取渠道商户号。外部接入平台入驻云企付平台下发。 * @return ChannelMerchantId 渠道商户号。外部接入平台入驻云企付平台下发。 */ std::string GetChannelMerchantId() const; /** * 设置渠道商户号。外部接入平台入驻云企付平台下发。 * @param ChannelMerchantId 渠道商户号。外部接入平台入驻云企付平台下发。 */ void SetChannelMerchantId(const std::string& _channelMerchantId); /** * 判断参数 ChannelMerchantId 是否已赋值 * @return ChannelMerchantId 是否已赋值 */ bool ChannelMerchantIdHasBeenSet() const; /** * 获取外部商户订单号。与ChannelOrderId二者选一。 * @return OutOrderId 外部商户订单号。与ChannelOrderId二者选一。 */ std::string GetOutOrderId() const; /** * 设置外部商户订单号。与ChannelOrderId二者选一。 * @param OutOrderId 外部商户订单号。与ChannelOrderId二者选一。 */ void SetOutOrderId(const std::string& _outOrderId); /** * 判断参数 OutOrderId 是否已赋值 * @return OutOrderId 是否已赋值 */ bool OutOrderIdHasBeenSet() const; /** * 获取云平台订单号。与OutOrderId二者选一。 * @return ChannelOrderId 云平台订单号。与OutOrderId二者选一。 */ std::string GetChannelOrderId() const; /** * 设置云平台订单号。与OutOrderId二者选一。 * @param ChannelOrderId 云平台订单号。与OutOrderId二者选一。 */ void SetChannelOrderId(const std::string& _channelOrderId); /** * 判断参数 ChannelOrderId 是否已赋值 * @return ChannelOrderId 是否已赋值 */ bool ChannelOrderIdHasBeenSet() const; /** * 获取接入环境。沙箱环境填 sandbox。缺省默认调用生产环境。 * @return Environment 接入环境。沙箱环境填 sandbox。缺省默认调用生产环境。 */ std::string GetEnvironment() const; /** * 设置接入环境。沙箱环境填 sandbox。缺省默认调用生产环境。 * @param Environment 接入环境。沙箱环境填 sandbox。缺省默认调用生产环境。 */ void SetEnvironment(const std::string& _environment); /** * 判断参数 Environment 是否已赋值 * @return Environment 是否已赋值 */ bool EnvironmentHasBeenSet() const; private: /** * 渠道商户号。外部接入平台入驻云企付平台下发。 */ std::string m_channelMerchantId; bool m_channelMerchantIdHasBeenSet; /** * 外部商户订单号。与ChannelOrderId二者选一。 */ std::string m_outOrderId; bool m_outOrderIdHasBeenSet; /** * 云平台订单号。与OutOrderId二者选一。 */ std::string m_channelOrderId; bool m_channelOrderIdHasBeenSet; /** * 接入环境。沙箱环境填 sandbox。缺省默认调用生产环境。 */ std::string m_environment; bool m_environmentHasBeenSet; }; } } } } #endif // !TENCENTCLOUD_CPDP_V20190820_MODEL_QUERYOPENBANKPAYMENTORDERREQUEST_H_
tangzhangss/commonutils.github.io
src/main/java/com/tangzhangss/commonutils/datasource/service/DatasourceService.java
package com.tangzhangss.commonutils.datasource.service; import com.tangzhangss.commonutils.base.SysBaseService; import com.tangzhangss.commonutils.datasource.dao.DatasourceDao; import com.tangzhangss.commonutils.datasource.entity.DatasourceEntity; import org.springframework.stereotype.Service; @Service public class DatasourceService extends SysBaseService<DatasourceEntity, DatasourceDao> { }
leekayden/jcodec
jplayer/src/main/java/org/jcodec/player/filters/http/HttpMedia.java
<filename>jplayer/src/main/java/org/jcodec/player/filters/http/HttpMedia.java package org.jcodec.player.filters.http; import static org.jcodec.player.filters.http.HttpUtils.getHttpClient; import static org.jcodec.player.filters.http.HttpUtils.privilegedExecute; import java.io.File; import java.io.IOException; import java.net.URL; import java.util.ArrayList; import java.util.List; import org.apache.http.HttpResponse; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.params.ClientPNames; import org.apache.http.client.params.CookiePolicy; import org.apache.http.util.EntityUtils; import org.jcodec.player.filters.MediaInfo; import org.jcodec.player.filters.MediaInfo.VideoInfo; /** * This class is part of JCodec ( www.jcodec.org ) This software is distributed * under FreeBSD License * * A media resource exposed via JCodec streaming * * @author The JCodec project * */ public class HttpMedia { private List<HttpPacketSource> tracks = new ArrayList<HttpPacketSource>(); private HttpPacketSource videoTrack; private List<HttpPacketSource> audioTracks = new ArrayList<HttpPacketSource>(); public HttpMedia(URL url, File cacheWhere) throws IOException { cacheWhere = new File(cacheWhere, url.getHost() + "_" + url.getPath().replace("/", "_")); String data = requestInfo(url, getHttpClient(url.toExternalForm())); MediaInfo[] mediaInfos = MediaInfoParser.parseMediaInfos(data); for (int i = 0; i < mediaInfos.length; i++) { if (mediaInfos[i] == null) continue; try { HttpPacketSource ps = new HttpPacketSource(url.toExternalForm() + "/" + i, new File(cacheWhere + "_" + i), mediaInfos[i]); tracks.add(ps); if (mediaInfos[i] instanceof VideoInfo) videoTrack = ps; else audioTracks.add(ps); } catch (IOException e) { e.printStackTrace(); } } } private String requestInfo(URL url, HttpClient client) throws IOException { HttpGet get = new HttpGet(url.toExternalForm()); get.getParams().setParameter(ClientPNames.COOKIE_POLICY, CookiePolicy.IGNORE_COOKIES); HttpResponse response = privilegedExecute(client, get); if (response.getStatusLine().getStatusCode() == 200) { return EntityUtils.toString(response.getEntity()); } else throw new IOException("Could not get the media info [" + url.toExternalForm() + "]:" + response.getStatusLine().getStatusCode() + " " + response.getStatusLine().getReasonPhrase()); } public HttpPacketSource getVideoTrack() { return videoTrack; } public List<HttpPacketSource> getAudioTracks() { return audioTracks; } }
loyio/leetcode
CodeInterviews-SwordOffer/Offer14-II-Cut-Ropes-II/solution1.cpp
<filename>CodeInterviews-SwordOffer/Offer14-II-Cut-Ropes-II/solution1.cpp class Solution { public: int cuttingRope(int n) { int mk = 1000000007; if(n < 4){ return n-1; } int b = n % 3; long rem = 1, x = 3; for(int a = n/3-1; a > 0; a /= 2){ if(a % 2 == 1) rem = (rem * x) % mk; x = (x * x) % mk; } if(b == 0) return (int)(rem * 3 % mk); if(b == 1) return (int)(rem * 4 % mk); return (int)(rem * 6 % mk); } };
aleasims/Peach
dependencies/src/4Suite-XML-1.0.2/test/Xml/Xslt/Borrowed/rvd_20030322.py
<filename>dependencies/src/4Suite-XML-1.0.2/test/Xml/Xslt/Borrowed/rvd_20030322.py # key()-less grouping tests, including divide-and-conquer (DVC) # From: "<NAME>" <<EMAIL>> # posted to xsl-list on 22-Mar-2003. from Xml.Xslt import test_harness # The author mistakenly thought that key() could not be used on # result tree fragments that had been converted to node-sets, so # he devised an efficient DVC-based algorithm and several examples # that led up to it. # Input XML (taken from Michael Kay's book) # source_1 = """<?xml version="1.0" encoding="utf-8"?> <cities> <city name="Barcelona" country="Espana"/> <city name="Paris" country="France"/> <city name="Roma" country="Italia"/> <city name="Madrid" country="Espana"/> <city name="Milano" country="Italia"/> <city name="Firenze" country="Italia"/> <city name="Napoli" country="Italia"/> <city name="Lyon" country="France"/> </cities>""" # The following example will give you an idea of grouping # tree-fragments without using the key() function. # (partly copied from Michael Kay's book) # # 1) First the cities are sorted on the @country attribute. After this, cities # that share the same @country value will be following each other, which is a # property we can exploit in step 2. # # 2) Then the template that matches city nodes will be called N times if there are # N cities to be grouped. For each city node in the sorted set the # 'following-sibling::*[1]' node(s) are matched. If they're not equal, the city # node will mark a new group. # # As Michael Kay already pointed out in his book, the efficiency of this approach # depends on the implementation of 'following-sibling::*[1]'. If this expression # has time complexity O(1) then the overall time complexity of getting all the # groups will be O(N) (leaving sorting out of the equation). # # 3) Strangely enough, the last step is actually the most problematic. Let's say # the second step gave us 3 groups. Then, for each group, the expression # '$sorted-tree-fragment[country = $country] will be evaluated with time # complexity O(N). # # So, does this mean the overall time complexity will be 3*N = O(N)? # The answer is definitely no! It does hold for a small number of groups, but if # we have N/2 groups then time complexity will be O(N^2). # Selecting nodes with XPATH expressions is usually OK, but in this example we # want to select the K cities that share the same @country value in O(K) time, not # O(N) time. # # So the question we really want to anwer is: 'how can we efficiently select a # subset of nodes without traversing them all?'. The anwser is: 'this all depends # on the selection criterium.' # Still, if the selection criterium isn't too complex, we can still hope for a # better solution. # One solution is that we don't use XPATH expressions to select nodes, but rather # walk through the nodes by using recursive calls. # sheet_1 = """<?xml version="1.0" encoding="utf-8"?> <xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform" xmlns:exsl="http://exslt.org/common" exclude-result-prefixes="exsl"> <xsl:output method="xml" indent="yes"/> <xsl:template match="/"> <result> <xsl:apply-templates/> </result> </xsl:template> <xsl:template match="cities"> <xsl:variable name="sorted"> <xsl:for-each select="./city"> <xsl:sort select="@country"/> <xsl:copy-of select="."/> </xsl:for-each> </xsl:variable> <xsl:variable name="sorted-tree-fragment" select="exsl:node-set($sorted)/*"/> <!-- Gets the groups --> <xsl:variable name="groups"> <xsl:apply-templates select="$sorted-tree-fragment"/> </xsl:variable> <!-- Iterate through all the groups --> <xsl:for-each select="exsl:node-set($groups)/*"> <xsl:variable name="country" select="@id"/> <xsl:copy> <xsl:copy-of select="@*"/> <!-- Copy the nodes with the same country --> <xsl:copy-of select="$sorted-tree-fragment[@country = $country]"/> </xsl:copy> </xsl:for-each> </xsl:template> <xsl:template match="city"> <xsl:variable name="preceding" select="./preceding-sibling::*[1]"/> <xsl:if test="not(./@country = $preceding/@country)"> <group id="{./@country}"/> </xsl:if> </xsl:template> </xsl:stylesheet>""" expected_1 = """<?xml version="1.0" encoding="UTF-8"?> <result> <group id="Espana"> <city name="Barcelona" country="Espana"/> <city name="Madrid" country="Espana"/> </group> <group id="France"> <city name="Paris" country="France"/> <city name="Lyon" country="France"/> </group> <group id="Italia"> <city name="Roma" country="Italia"/> <city name="Milano" country="Italia"/> <city name="Firenze" country="Italia"/> <city name="Napoli" country="Italia"/> </group> </result>""" # GROUPING USING RECURSION # # One idea to reduce time complexity of the previous example is by slightly # modifying the match='city' template [...see original post for more...] # # The time complexity of the recursive solution can be proven to be O(N) but with # the recursion depth also to be O(N). # # Unfortunately, most XSLT implementations have a maximum recursion depth (~1000) # so this is not a general solution. # source_2 = source_1 sheet_2 = """<?xml version="1.0" encoding="utf-8"?> <xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform" xmlns:exsl="http://exslt.org/common" exclude-result-prefixes="exsl"> <xsl:output method="xml" indent="yes"/> <xsl:template match="/"> <result> <xsl:apply-templates/> </result> </xsl:template> <xsl:template match="cities"> <xsl:variable name="sorted"> <xsl:for-each select="./city"> <xsl:sort select="@country"/> <xsl:copy-of select="."/> </xsl:for-each> </xsl:variable> <xsl:variable name="sorted-tree-fragment" select="exsl:node-set($sorted)/*"/> <xsl:variable name="groups"> <xsl:apply-templates select="exsl:node-set($sorted)/*[1]"/> </xsl:variable> <xsl:apply-templates select="exsl:node-set($groups)/*"/> </xsl:template> <xsl:template match="city"> <xsl:variable name="preceding" select="./preceding-sibling::*[1]"/> <xsl:choose> <xsl:when test="not(./@country = $preceding/@country)"> <group id="{./@country}"> <xsl:copy-of select="."/> <xsl:apply-templates select="./following-sibling::*[1]"/> </group> </xsl:when> <xsl:otherwise> <xsl:copy-of select="."/> <xsl:apply-templates select="./following-sibling::*[1]"/> </xsl:otherwise> </xsl:choose> </xsl:template> <!-- bad: unoptimizable <xsl:template match="group"> <xsl:copy> <xsl:copy-of select="@*"/> <xsl:copy-of select="./city"/> </xsl:copy> <xsl:apply-templates select="./group"/> </xsl:template> --> <xsl:template match="group"> <xsl:call-template name="process-group"> <xsl:with-param name="group-node" select="."/> </xsl:call-template> </xsl:template> <xsl:template name="process-group"> <xsl:param name="group-node"/> <xsl:if test="$group-node"> <xsl:copy> <xsl:copy-of select="$group-node/@*"/> <xsl:copy-of select="$group-node/city"/> </xsl:copy> <xsl:call-template name="process-group"> <xsl:with-param name="group-node" select="$group-node/group"/> </xsl:call-template> </xsl:if> </xsl:template> </xsl:stylesheet>""" expected_2 = expected_1 # DVC AND THE BINARY TREE # # <NAME> was one of the first to mention Divide and Conquer (DVC) # algorithms to reduce recursion depth. Because most XSLT implementations out # there still do not support tail-recursion elimination, DVC is the way to go if # you want to process a lot of nodes. # # The idea behind DVC is that to attack a big problem, you should divide it into a # number of smaller problems. # # Not surprisingly, dividing a problem into just 2 subproblems is enough to reduce # recursion depth to be O(log2(N)). # # The following example will give you an idea of how this works: # source_3 = """<nodes> <node v="1"/> <node v="2"/> <node v="3"/> <node v="4"/> <node v="5"/> <node v="6"/> <node v="7"/> <node v="8"/> </nodes>""" sheet_3 = """<?xml version="1.0" encoding="utf-8"?> <xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform" xmlns:exsl="http://exslt.org/common" exclude-result-prefixes="exsl"> <xsl:output method="xml" indent="yes"/> <xsl:template match="/"> <xsl:call-template name="partition"> <xsl:with-param name="nodes" select="//node"/> </xsl:call-template> </xsl:template> <xsl:template name="partition"> <xsl:param name="nodes"/> <xsl:variable name="half" select="floor(count($nodes) div 2)"/> <b> <xsl:choose> <xsl:when test="count($nodes) &lt;= 1"> <!-- There is only one node left: stop dividing problem --> <xsl:copy-of select="$nodes"/> </xsl:when> <xsl:otherwise> <!-- divide in first half of nodes (left) --> <xsl:call-template name="partition"> <xsl:with-param name="nodes" select="$nodes[position() &lt;= $half]"/> </xsl:call-template> <!-- divide in second half of nodes (right) --> <xsl:call-template name="partition"> <xsl:with-param name="nodes" select="$nodes[position() &gt; $half]"/> </xsl:call-template> </xsl:otherwise> </xsl:choose> </b> </xsl:template> </xsl:stylesheet>""" # The result is what is called a binary tree representation. At first this # representation doesn't seem all that useful. Later we will see that specialised # binary trees can be (re-)used to implement almost any recursive function without # exceeding the maximum recursion depth. # expected_3 = """<?xml version="1.0" encoding="UTF-8"?> <b> <b> <b> <b> <node v="1"/> </b> <b> <node v="2"/> </b> </b> <b> <b> <node v="3"/> </b> <b> <node v="4"/> </b> </b> </b> <b> <b> <b> <node v="5"/> </b> <b> <node v="6"/> </b> </b> <b> <b> <node v="7"/> </b> <b> <node v="8"/> </b> </b> </b> </b>""" # Let's sum all the @v values with the use of the binary (fragment) tree: # # [...] the overall 'copy' complexity is O(log2(N)*N). # # Although the number of recursive calls is O(N) the XSLT processor still spends # at least O(log2(N)*N) time because it must copy (and select) half of the nodes # for the each recursive call (twice). # # Copying nodes should be avoided as much as possible because it slows down # recursion considerably. # source_4 = expected_3 sheet_4 = """<?xml version="1.0" encoding="utf-8"?> <xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform" xmlns:exsl="http://exslt.org/common" exclude-result-prefixes="exsl"> <xsl:output method="text"/> <xsl:template match="/"> <xsl:variable name="btree"> <xsl:call-template name="partition"> <xsl:with-param name="nodes" select="//node"/> </xsl:call-template> </xsl:variable> <xsl:call-template name="sum-binary-tree"> <xsl:with-param name="bnode" select="exsl:node-set($btree)/*"/> </xsl:call-template> </xsl:template> <xsl:template name="sum-binary-tree"> <xsl:param name="bnode"/> <xsl:choose> <xsl:when test="$bnode/node"> <xsl:value-of select="$bnode/node/@v"/> </xsl:when> <xsl:otherwise> <xsl:variable name="first"> <xsl:call-template name="sum-binary-tree"> <xsl:with-param name="bnode" select="$bnode/b[1]"/> </xsl:call-template> </xsl:variable> <xsl:variable name="second"> <xsl:call-template name="sum-binary-tree"> <xsl:with-param name="bnode" select="$bnode/b[2]"/> </xsl:call-template> </xsl:variable> <xsl:value-of select="$first + $second"/> </xsl:otherwise> </xsl:choose> </xsl:template> <xsl:template name="partition"> <xsl:param name="nodes"/> <xsl:variable name="half" select="floor(count($nodes) div 2)"/> <b> <xsl:choose> <xsl:when test="count($nodes) &lt;= 1"> <!-- There is only one node left: stop dividing problem --> <xsl:copy-of select="$nodes"/> </xsl:when> <xsl:otherwise> <!-- divide in first half of nodes (left) --> <xsl:call-template name="partition"> <xsl:with-param name="nodes" select="$nodes[position() &lt;= $half]"/> </xsl:call-template> <!-- divide in second half of nodes (right) --> <xsl:call-template name="partition"> <xsl:with-param name="nodes" select="$nodes[position() &gt; $half]"/> </xsl:call-template> </xsl:otherwise> </xsl:choose> </b> </xsl:template> </xsl:stylesheet>""" expected_4 = "36" # MODIFIED DVC ALGORITHM: RANGE PARTITIONING # # The following implementation of a binary partition doesn't copy a list of nodes # but just one node at each recursive call. It uses the so called 'sibling' axis # to walk through the list. Because there are O(N) recursive calls, this means # that O(N) nodes are copied. Does this mean that the overall time complexity will # be O(N) too? The answer is: probably yes, but at worst it will be O(N^2). # # Let's compare overall time complexity with the possible implementations of # 'following-sibling::[w]' # # following-sibling::*[w] | total time # _____________________________________ # O(1) | O(N) # O(w) | O(log2(N)*N) # O(N) | O(N^2) # # So at worst it will be quadratic. So the question still remains if it is # theoretically possible to do binary partitioning without copying to much nodes. # Nevertheless, experiments with XALAN have shown that the implementation is not # quadratic. # source_5 = source_3 sheet_5 = """<?xml version="1.0" encoding="utf-8"?> <xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform" xmlns:exsl="http://exslt.org/common" exclude-result-prefixes="exsl"> <xsl:output method="xml" indent="yes"/> <xsl:template match="/"> <xsl:call-template name="partition-ranges"> <xsl:with-param name="node" select="//node[1]"/> </xsl:call-template> </xsl:template> <xsl:template name="partition-ranges"> <xsl:param name="node"/> <xsl:param name="s" select="(count($node/preceding-sibling::*)) + 1"/> <xsl:param name="e" select="(count($node/following-sibling::*)) + $s"/> <xsl:if test="$node"> <xsl:element name="r"> <xsl:attribute name="s"> <xsl:value-of select="$s"/> </xsl:attribute> <xsl:attribute name="e"> <xsl:value-of select="$e"/> </xsl:attribute> <xsl:choose> <xsl:when test="$s = $e"> <xsl:copy-of select="$node"/> </xsl:when> <xsl:otherwise> <xsl:variable name="w" select="floor(($e - $s + 1) div 2)"/> <xsl:variable name="m" select="$s + $w"/> <xsl:call-template name="partition-ranges"> <xsl:with-param name="node" select="$node"/> <xsl:with-param name="s" select="$s"/> <xsl:with-param name="e" select="$m - 1"/> </xsl:call-template> <xsl:call-template name="partition-ranges"> <xsl:with-param name="node" select="$node/following-sibling::*[$w]"/> <xsl:with-param name="s" select="$m"/> <xsl:with-param name="e" select="$e"/> </xsl:call-template> </xsl:otherwise> </xsl:choose> </xsl:element> </xsl:if> </xsl:template> </xsl:stylesheet>""" expected_5 = """<?xml version="1.0" encoding="UTF-8"?> <r s="1" e="8"> <r s="1" e="4"> <r s="1" e="2"> <r s="1" e="1"> <node v="1"/> </r> <r s="2" e="2"> <node v="2"/> </r> </r> <r s="3" e="4"> <r s="3" e="3"> <node v="3"/> </r> <r s="4" e="4"> <node v="4"/> </r> </r> </r> <r s="5" e="8"> <r s="5" e="6"> <r s="5" e="5"> <node v="5"/> </r> <r s="6" e="6"> <node v="6"/> </r> </r> <r s="7" e="8"> <r s="7" e="7"> <node v="7"/> </r> <r s="8" e="8"> <node v="8"/> </r> </r> </r> </r>""" # GROUPING WITH A BINARY TREE # # The new and improved grouping algorithm is more or less the same as the first # one except where using ranges to select nodes which are in the same group. # Thus: # # 1) we sort the nodes for a given key # 2) then compute the ranges of nodes which have the same key # 3) and then select the (sorted) nodes for each range. # # To efficiently select a range of nodes we will be using the binary tree. # # Here's the whole solution: # source_6 = source_1 sheet_6 = """<?xml version="1.0" encoding="utf-8"?> <xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform" xmlns:exsl="http://exslt.org/common" exclude-result-prefixes="exsl"> <xsl:output method="xml" indent="yes"/> <!-- Group cities on country --> <xsl:template match="/"> <result> <xsl:call-template name="group-on-key"> <xsl:with-param name="nodes" select="//city"/> <xsl:with-param name="key" select="'country'"/> </xsl:call-template> </result> </xsl:template> <!-- Template: group-on-key Use this template to group <nodes> which share a common attribute <key> The result will be sub-sets of <nodes> surrounded by <group/> tags --> <xsl:template name="group-on-key"> <xsl:param name="nodes"/> <xsl:param name="key"/> <xsl:variable name="items"> <xsl:for-each select="$nodes"> <item> <key> <xsl:value-of select="./@*[name() = $key]"/> </key> <value> <xsl:copy-of select="."/> </value> </item> </xsl:for-each> </xsl:variable> <xsl:variable name="grouped-items"> <xsl:call-template name="group-on-item"> <xsl:with-param name="nodes" select="exsl:node-set($items)/*"/> <xsl:with-param name="key" select="$key"/> </xsl:call-template> </xsl:variable> <xsl:for-each select="exsl:node-set($grouped-items)/*"> <xsl:copy> <xsl:for-each select="./*"> <xsl:copy-of select="./value/*[1]"/> </xsl:for-each> </xsl:copy> </xsl:for-each> </xsl:template> <!-- Template: group-on-item Use this template to group <nodes> which share a common structure. You can build this structure yourself if you want to group on something else The structure is the <item> structure and has the following layout <item> <key> aKey (can be anything, preferrably a string) </key> <value> aValue (can be anything, probably a node(set)) </value> </item> <items> will we grouped on the string value of <key> The result will be sub-sets of <items> surrounded by <group/> tags --> <xsl:template name="group-on-item"> <xsl:param name="nodes"/> <!-- Step 1 --> <xsl:variable name="sorted"> <xsl:for-each select="$nodes"> <xsl:sort select="./key[1]"/> <xsl:copy-of select="."/> </xsl:for-each> </xsl:variable> <xsl:variable name="sorted-tree" select="exsl:node-set($sorted)/*"/> <!-- Step 2.1 --> <xsl:variable name="pivots"> <xsl:call-template name="pivots"> <xsl:with-param name="nodes" select="$sorted-tree"/> </xsl:call-template> </xsl:variable> <!-- Step 2.2 --> <xsl:variable name="ranges"> <xsl:call-template name="ranges"> <xsl:with-param name="pivots" select="exsl:node-set($pivots)/*"/> <xsl:with-param name="length" select="count($sorted-tree)"/> </xsl:call-template> </xsl:variable> <!-- Step 3.1 --> <xsl:variable name="partition-ranges"> <xsl:call-template name="partition-ranges"> <xsl:with-param name="node" select="$sorted-tree[1]"/> </xsl:call-template> </xsl:variable> <xsl:variable name="root-partition" select="exsl:node-set($partition-ranges)/*[1]"/> <!-- Step 3.2 --> <xsl:for-each select="exsl:node-set($ranges)/r"> <xsl:variable name="s" select="./@s"/> <xsl:variable name="e" select="./@e"/> <group> <xsl:call-template name="range-in-partition"> <xsl:with-param name="s" select="$s"/> <xsl:with-param name="e" select="$e"/> <xsl:with-param name="p" select="$root-partition"/> </xsl:call-template> </group> </xsl:for-each> </xsl:template> <xsl:template name="pivots"> <xsl:param name="nodes"/> <xsl:param name="key"/> <xsl:for-each select="$nodes"> <xsl:if test="not(string(./key[1]) = string(./following-sibling::*[1]/key[1]))"> <pivot> <xsl:value-of select="position()"/> </pivot> </xsl:if> </xsl:for-each> </xsl:template> <xsl:template name="ranges"> <xsl:param name="pivots" select=".."/> <xsl:param name="length" select="0"/> <xsl:choose> <xsl:when test="count($pivots) &gt;= 1"> <xsl:for-each select="$pivots"> <xsl:variable name="p" select="./preceding-sibling::*[1]"/> <r> <xsl:attribute name="s"> <xsl:choose> <xsl:when test="$p"> <xsl:value-of select="$p + 1"/> </xsl:when> <xsl:otherwise> <xsl:value-of select="1"/> </xsl:otherwise> </xsl:choose> </xsl:attribute> <xsl:attribute name="e"> <xsl:value-of select="string(.)"/> </xsl:attribute> </r> </xsl:for-each> </xsl:when> <xsl:otherwise> <r> <xsl:attribute name="s"> <xsl:value-of select="1"/> </xsl:attribute> <xsl:attribute name="e"> <xsl:value-of select="$length"/> </xsl:attribute> </r> </xsl:otherwise> </xsl:choose> </xsl:template> <!-- Template: range-in-partition Selects a RANGE of nodes using a binary tree XSLT isn't really helping to make things easy but try to do this in a DVC style directly without the help of a binary tree. --> <xsl:template name="range-in-partition"> <xsl:param name="p"/> <xsl:param name="s" select="$p/@s"/> <xsl:param name="e" select="$p/@e"/> <xsl:variable name="ps" select="number($p/@s)"/> <xsl:variable name="pe" select="number($p/@e)"/> <xsl:if test="$s &lt;= $pe and $e &gt;= $ps"> <xsl:if test="$ps = $pe"> <xsl:copy-of select="$p/*[1]"/> </xsl:if> <xsl:choose> <xsl:when test="$s &gt; $ps"> <xsl:variable name="s1" select="$s"/> <xsl:choose> <xsl:when test="$e &lt; $pe"> <xsl:variable name="e1" select="$e"/> <xsl:for-each select="$p/*"> <xsl:call-template name="range-in-partition"> <xsl:with-param name="s" select="$s1"/> <xsl:with-param name="e" select="$e1"/> <xsl:with-param name="p" select="."/> </xsl:call-template> </xsl:for-each> </xsl:when> <xsl:otherwise> <xsl:variable name="e1" select="$pe"/> <xsl:for-each select="$p/*"> <xsl:call-template name="range-in-partition"> <xsl:with-param name="s" select="$s1"/> <xsl:with-param name="e" select="$e1"/> <xsl:with-param name="p" select="."/> </xsl:call-template> </xsl:for-each> </xsl:otherwise> </xsl:choose> </xsl:when> <xsl:otherwise> <xsl:variable name="s1" select="$ps"/> <xsl:choose> <xsl:when test="$e &lt; $pe"> <xsl:variable name="e1" select="$e"/> <xsl:for-each select="$p/*"> <xsl:call-template name="range-in-partition"> <xsl:with-param name="s" select="$s1"/> <xsl:with-param name="e" select="$e1"/> <xsl:with-param name="p" select="."/> </xsl:call-template> </xsl:for-each> </xsl:when> <xsl:otherwise> <xsl:variable name="e1" select="$pe"/> <xsl:for-each select="$p/*"> <xsl:call-template name="range-in-partition"> <xsl:with-param name="s" select="$s1"/> <xsl:with-param name="e" select="$e1"/> <xsl:with-param name="p" select="."/> </xsl:call-template> </xsl:for-each> </xsl:otherwise> </xsl:choose> </xsl:otherwise> </xsl:choose> </xsl:if> </xsl:template> <xsl:template name="partition-ranges"> <xsl:param name="node"/> <xsl:param name="s" select="(count($node/preceding-sibling::*)) + 1"/> <xsl:param name="e" select="(count($node/following-sibling::*)) + $s"/> <xsl:if test="$node"> <xsl:element name="r"> <xsl:attribute name="s"> <xsl:value-of select="$s"/> </xsl:attribute> <xsl:attribute name="e"> <xsl:value-of select="$e"/> </xsl:attribute> <xsl:choose> <xsl:when test="$s = $e"> <xsl:copy-of select="$node"/> </xsl:when> <xsl:otherwise> <xsl:variable name="w" select="floor(($e - $s + 1) div 2)"/> <xsl:variable name="m" select="$s + $w"/> <xsl:call-template name="partition-ranges"> <xsl:with-param name="node" select="$node"/> <xsl:with-param name="s" select="$s"/> <xsl:with-param name="e" select="$m - 1"/> </xsl:call-template> <xsl:call-template name="partition-ranges"> <xsl:with-param name="node" select="$node/following-sibling::*[$w]"/> <xsl:with-param name="s" select="$m"/> <xsl:with-param name="e" select="$e"/> </xsl:call-template> </xsl:otherwise> </xsl:choose> </xsl:element> </xsl:if> </xsl:template> </xsl:stylesheet>""" # note this is missing the group ids as compared to the first example # expected_6 = """<?xml version="1.0" encoding="UTF-8"?> <result> <group> <city name="Barcelona" country="Espana"/> <city name="Madrid" country="Espana"/> </group> <group> <city name="Paris" country="France"/> <city name="Lyon" country="France"/> </group> <group> <city name="Roma" country="Italia"/> <city name="Milano" country="Italia"/> <city name="Firenze" country="Italia"/> <city name="Napoli" country="Italia"/> </group> </result>""" def Test(tester): source = test_harness.FileInfo(string=source_1) sheet = test_harness.FileInfo(string=sheet_1) test_harness.XsltTest(tester, source, [sheet], expected_1, title='grouping without keys') source = test_harness.FileInfo(string=source_2) sheet = test_harness.FileInfo(string=sheet_2) test_harness.XsltTest(tester, source, [sheet], expected_2, title='grouping using recursion') source = test_harness.FileInfo(string=source_3) sheet = test_harness.FileInfo(string=sheet_3) test_harness.XsltTest(tester, source, [sheet], expected_3, title='generate a binary tree') source = test_harness.FileInfo(string=source_4) sheet = test_harness.FileInfo(string=sheet_4) test_harness.XsltTest(tester, source, [sheet], expected_4, title='sum nodes in a binary tree') source = test_harness.FileInfo(string=source_5) sheet = test_harness.FileInfo(string=sheet_5) test_harness.XsltTest(tester, source, [sheet], expected_5, title='generate a binary tree with range partitioning') source = test_harness.FileInfo(string=source_6) sheet = test_harness.FileInfo(string=sheet_6) test_harness.XsltTest(tester, source, [sheet], expected_6, title='efficient divide-and-conquer based grouping') return
iitsoftware/swiftmq-ce
swiftlets/sys_mgmt/src/main/java/com/swiftmq/impl/mgmt/standard/jmx/JMXUtil.java
<reponame>iitsoftware/swiftmq-ce /* * Copyright 2019 IIT Software GmbH * * IIT Software GmbH licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.swiftmq.impl.mgmt.standard.jmx; import com.swiftmq.impl.mgmt.standard.SwiftletContext; import com.swiftmq.mgmt.Entity; import com.swiftmq.swiftlet.SwiftletManager; import com.swiftmq.util.SwiftUtilities; import javax.management.MBeanServer; import javax.management.MBeanServerFactory; import javax.management.ObjectName; import java.lang.management.ManagementFactory; import java.util.ArrayList; public class JMXUtil { private static final String DOMAIN = "com.swiftmq."; SwiftletContext ctx = null; MBeanServer mbs = null; boolean groupNames = false; public JMXUtil(SwiftletContext ctx, boolean groupNames) { this.ctx = ctx; this.groupNames = groupNames; mbs = getMBeanServer(); } private MBeanServer getMBeanServer() { MBeanServer mbeanServer = null; String option = (String) ctx.root.getEntity("jmx").getEntity("mbean-server").getProperty("usage-option").getValue(); if (option.equals("use-platform-server")) mbeanServer = ManagementFactory.getPlatformMBeanServer(); else if (option.equals("use-named-server")) { String serverName = (String) ctx.root.getEntity("jmx").getEntity("mbean-server").getProperty("server-name").getValue(); ArrayList list = MBeanServerFactory.findMBeanServer(null); if (list != null && list.size() > 0) { for (int i = 0; i < list.size(); i++) { MBeanServer srv = (MBeanServer) list.get(i); if (srv.getDefaultDomain().equals(serverName)) { mbeanServer = srv; break; } } } if (mbeanServer == null) { ctx.logSwiftlet.logWarning(ctx.mgmtSwiftlet.getName(), "Unable to find MBean Server '" + (String) ctx.root.getEntity("jmx").getEntity("mbean-server").getProperty("server-name").getValue() + "'. Create the MBean Server with this name!"); mbeanServer = MBeanServerFactory.createMBeanServer((String) ctx.root.getEntity("jmx").getEntity("mbean-server").getProperty("server-name").getValue()); } } else if (option.equals("create-named-server")) mbeanServer = MBeanServerFactory.createMBeanServer((String) ctx.root.getEntity("jmx").getEntity("mbean-server").getProperty("server-name").getValue()); return mbeanServer; } public void setGroupNames(boolean groupNames) { this.groupNames = groupNames; } public static String toJMXString(String s) { return s.replace(':', '_').replace('=', '_').replace(' ', '_'); } public String getContext(Entity entity) { String res = null; String[] econtext = entity.getContext(); String context = toJMXString(SwiftletManager.getInstance().getRouterName()) + ":"; if (groupNames) { StringBuffer b = new StringBuffer(); for (int i = 0; i < econtext.length; i++) { if (i > 0) b.append(", "); if (i > 0) { b.append("name"); b.append(i); } else b.append("type"); b.append("="); b.append(toJMXString(econtext[i])); } res = context + b.toString(); } else res = context + "type=" + toJMXString(SwiftUtilities.concat(econtext, "/")); return res; } public void registerMBean(EntityMBean mbean) { try { ObjectName name = new ObjectName(DOMAIN + getContext(mbean.getEntity())); if (ctx.traceSpace.enabled) ctx.traceSpace.trace(ctx.mgmtSwiftlet.getName(), toString() + "/registerMBean, name=" + name); mbean.setObjectName(name); mbs.registerMBean(mbean, name); } catch (Exception e) { e.printStackTrace(); } } public void unregisterMBean(EntityMBean mbean) { try { if (ctx.traceSpace.enabled) ctx.traceSpace.trace(ctx.mgmtSwiftlet.getName(), toString() + "/unregisterMBean, name=" + mbean.getObjectName()); mbs.unregisterMBean(mbean.getObjectName()); } catch (Exception e) { e.printStackTrace(); } } public String toString() { return "JMXUtil"; } }
cdsc-github/parade-ara-simulator
src/modules/scratch-pad/scratch-pad.hh
/* scratch-pad.hh: Scratchpad used in PARADE */ #ifndef SCRATCH_PAD_H #define SCRATCH_PAD_H #include <stdint.h> #include <vector> class ScratchPad { public: std::vector<uint8_t> data; std::vector<bool> valid; void CheckSize(uint64_t size) { while(data.size() <= size) { data.push_back(0xcd); valid.push_back(false); } } void Clear() { data = std::vector<uint8_t>(); valid = std::vector<bool>(); } }; typedef struct ScratchPadHandle_t { ScratchPad* sp; } ScratchPadHandle; typedef struct scratch_pad_interface{ void (*read)(ScratchPadHandle* obj, uint64_t address, void* dataRd, unsigned int size); void (*write)(ScratchPadHandle* obj, uint64_t address, const void* dataWr, unsigned int size); void (*clear)(ScratchPadHandle* obj); } scratch_pad_interface_t; ScratchPadHandle* CreateNewScratchPad(); int DeleteScratchPad(ScratchPadHandle* obj); scratch_pad_interface_t* CreateScratchPadInterface(); #endif /* SCRATCH_PAD_H */
aabtop/reify
src/idt/targets/pure_cpp/templates/CppImmutableRefCounted.stache.h
// {{! // clang-format off // }} #ifndef _{{namespace}}_CPP_IMMUT_REF_COUNTED_IST_GENERATED_H_ #define _{{namespace}}_CPP_IMMUT_REF_COUNTED_IST_GENERATED_H_ #include <array> #include <memory> #include <string> #include <tuple> #include <variant> #include <vector> #include <iostream> {{#enable_hashes}} #include "reify/pure_cpp/hashing.h" {{/enable_hashes}} namespace {{namespace}} { {{#declarationSequence}} {{{.}}} {{/declarationSequence}} } // {{namespace}} // This is the same regardless of domain, so we only want to define it once, // but it's nice to avoid a header dependency if we can by putting the // definition here. #ifndef CPP_IMMUT_REF_COUNTED_IST_GENERATED_H_GENERIC_NEW #define CPP_IMMUT_REF_COUNTED_IST_GENERATED_H_GENERIC_NEW namespace reify { template <typename T> inline std::shared_ptr<const T> New(T&& x) { return std::make_shared<T>(std::move(x)); } template <typename T> using Reference = decltype(New(std::declval<std::decay_t<T>>())); } // namespace reify #endif // CPP_IMMUT_REF_COUNTED_IST_GENERATED_H_GENERIC_NEW {{#enable_hashes}} #include "reify/pure_cpp/hashing_post_definitions.h" {{/enable_hashes}} #endif // _{{namespace}}_CPP_IMMUT_REF_COUNTED_IST_GENERATED_H_
gilles-leblanc/gameproject
UI/screens/map_explorer.rb
<filename>UI/screens/map_explorer.rb require 'gosu' require_relative '../map_overview' require_relative '../info_box' require_relative '../view_port' # The screen where we show the map and the basic controls. This is the main # screen for everything that is exploration related. It includes a 2d first # person view of what the character would see. A description of the current # characters in the party, a mini-map and a compass. class MapExplorer # Determine the speed at which new button presses are processed TICKS_PER_STEP = 15 def initialize(parent_window) @parent_window = parent_window @model = parent_window.model @current_position = parent_window.model.generate_starting_position @previous_map_position = [] @compass = [:north, :west, :south, :east] @key_countdown = 0 @view_port = ViewPort.new(parent_window) @info_box = InfoBox.new(parent_window) @map_overview = MapOverview.new(650, 130, parent_window) @acted = false end def update if @key_countdown > 0 @key_countdown -= 1 if @key_countdown == 0 @key_countdown = TICKS_PER_STEP button_presses end end end def draw @view_port.draw(model.map, @current_position, @compass) @info_box.draw(model.map, @current_position, @compass) @map_overview.draw(model.map, @current_position, @compass) end def button_down(id) if @key_countdown == 0 # First step @key_countdown = TICKS_PER_STEP button_presses end end private def model @parent_window.model end def button_presses if @parent_window.button_down? Gosu::KbUp step_forward @acted = false elsif @parent_window.button_down? Gosu::KbDown step_backward @acted = false elsif @parent_window.button_down? Gosu::KbLeft @compass.rotate! elsif @parent_window.button_down? Gosu::KbRight @compass.rotate!(-1) end act_for_tile end def act_for_tile tile_type = model.map.tile_at(@current_position[:x], @current_position[:y]).type if tile_type == :city || tile_type == :cave @previous_map_position.push([previous_tile, model.map]) model.map = model.world_map.get_map_at_position(@current_position[:x], @current_position[:y]) @current_position = @model.starting_position.get_sub_map_position(model.map) end if tile_type == :entrance old_position = @previous_map_position.pop @current_position = old_position[0] model.map = old_position[1] end unless model.map.tile_at(@current_position[:x], @current_position[:y]).event.nil? || @acted model.map.tile_at(@current_position[:x], @current_position[:y]).event.act @acted = true end end def step_forward case @compass[0] when :north @current_position[:y] -= 1 if model.map.tile_at(@current_position[:x], @current_position[:y] - 1).passable? when :south @current_position[:y] += 1 if model.map.tile_at(@current_position[:x], @current_position[:y] + 1).passable? when :west @current_position[:x] -= 1 if model.map.tile_at(@current_position[:x] - 1, @current_position[:y]).passable? when :east @current_position[:x] += 1 if model.map.tile_at(@current_position[:x] + 1, @current_position[:y]).passable? end end def step_backward case @compass[0] when :north @current_position[:y] += 1 if model.map.tile_at(@current_position[:x], @current_position[:y] + 1).passable? when :south @current_position[:y] -= 1 if model.map.tile_at(@current_position[:x], @current_position[:y] - 1).passable? when :west @current_position[:x] += 1 if model.map.tile_at(@current_position[:x] + 1, @current_position[:y]).passable? when :east @current_position[:x] -= 1 if model.map.tile_at(@current_position[:x] - 1, @current_position[:y]).passable? end end def previous_tile case @compass[0] when :north { x: @current_position[:x], y: @current_position[:y] + 1 } when :south { x: @current_position[:x], y: @current_position[:y] - 1 } when :west { x: @current_position[:x] + 1, y: @current_position[:y] } when :east { x: @current_position[:x] - 1, y: @current_position[:y] } end end end
ozdanborne/libcalico-go
lib/api/profile.go
<gh_stars>0 // Copyright (c) 2016 Tigera, Inc. All rights reserved. // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package api import ( . "github.com/tigera/libcalico-go/lib/api/unversioned" ) type ProfileMetadata struct { ObjectMetadata Name string `json:"name,omitempty" validate:"omitempty,name"` Labels map[string]string `json:"labels,omitempty" validate:"omitempty,labels"` } type ProfileSpec struct { IngressRules []Rule `json:"ingress,omitempty" validate:"omitempty,dive"` EgressRules []Rule `json:"egress,omitempty" validate:"omitempty,dive"` Tags []string `json:"tags,omitempty" validate:"omitempty,dive,tag"` } type Profile struct { TypeMetadata Metadata ProfileMetadata `json:"metadata,omitempty"` Spec ProfileSpec `json:"spec,omitempty"` } func NewProfile() *Profile { return &Profile{TypeMetadata: TypeMetadata{Kind: "profile", APIVersion: "v1"}} } type ProfileList struct { TypeMetadata Metadata ListMetadata `json:"metadata,omitempty"` Items []Profile `json:"items" validate:"dive,omitempty"` } func NewProfileList() *ProfileList { return &ProfileList{TypeMetadata: TypeMetadata{Kind: "profileList", APIVersion: "v1"}} }
antarikshray/websiterudra
node_modules/react-icons/fa/television.js
<reponame>antarikshray/websiterudra import React from 'react' import Icon from 'react-icon-base' const FaTelevision = props => ( <Icon viewBox="0 0 40 40" {...props}> <g><path d="m37.2 27.3v-19.9q0-0.3-0.2-0.5t-0.5-0.2h-33.2q-0.2 0-0.4 0.2t-0.2 0.5v19.9q0 0.3 0.2 0.5t0.4 0.2h33.2q0.3 0 0.5-0.2t0.2-0.5z m2.7-19.9v19.9q0 1.4-1 2.4t-2.4 0.9h-15.2v2.7h7.3q0.3 0 0.4 0.2t0.2 0.5v1.3q0 0.3-0.2 0.5t-0.4 0.1h-17.3q-0.3 0-0.5-0.1t-0.2-0.5v-1.3q0-0.3 0.2-0.5t0.5-0.2h7.3v-2.7h-15.3q-1.3 0-2.3-0.9t-1-2.4v-19.9q0-1.4 1-2.4t2.3-0.9h33.2q1.4 0 2.4 0.9t1 2.4z"/></g> </Icon> ) export default FaTelevision
Allison12345/weather-report-wxapp
components/my-weather-search/index.js
<reponame>Allison12345/weather-report-wxapp Component({ data: { img: 'https://6d79-mywxapp-q4z0b-1301425530.tcb.qcloud.la/search-outline.png?sign=5fd5c85d0e4b46858a35d07b2726fcde&t=1584787271', city: '' }, methods: { onCancel() { this.setData({ city: '' }) }, onChange(e) { this.setData({ city: e.detail.value }) }, onComfirm(e) { this.getCityInfo(e.detail.value) }, getCityInfo(address) { wx.request({ url: `https://apis.map.qq.com/ws/geocoder/v1/?address=${address}`, data: { key: '<KEY>' }, header: { 'content-type': 'application/json' }, success: res => { const { status, message, result } = res.data if (status === 0) { const { province, city, district } = result.address_components wx.navigateTo({ url: `/pages/weather-page/index?city=${province + city + district}` }) return } wx.showToast({ title: message, icon: 'none' }) }, fail: res => { wx.showToast({ title: res.message, icon: 'none' }) } }) } } })
anhtrangg/PhotonChat
PhotonChat/Photon-objc/inc/Enums/EGConnectionProtocol.h
#pragma once #define EGConnectionProtocol_UDP ((nByte)0) ///<Use UDP to connect to Photon, which allows you to send operations reliable or unreliable on demand. #define EGConnectionProtocol_TCP ((nByte)1) ///<Use TCP to connect to Photon. #define EGConnectionProtocol_DEFAULT ((nByte)UDP)
shaunstoltz/wax-prosemirror
wax-prosemirror-services/src/WaxToolGroups/TableToolGroupService/TableToolGroupService.js
import Service from '../../Service'; import Tables from './Tables'; class TableToolGroupService extends Service { register() { this.container.bind('Tables').to(Tables); } } export default TableToolGroupService;
kristianmandrup/feathers-rest-koa
test/koa/routes.test.js
<reponame>kristianmandrup/feathers-rest-koa<filename>test/koa/routes.test.js import { assert, configure, Routes, createRoutes } from '../config'; describe('REST provider', function () { let config = configure.bind(this); let app = config(); describe('Routes', function () { const routes = createRoutes(app, { logging: true }) it('can set service uri and route paths for base and :id', done => { let serviceName = 'person' routes.uri = serviceName assert.equal(routes._uri, serviceName) assert.equal(routes.baseRoute, serviceName) assert.equal(routes.idRoute, `${serviceName}/:__feathersId`) done() }); }); });
REANNZ/saml-service
spec/factories/mdrpi_registration_policies.rb
<reponame>REANNZ/saml-service # frozen_string_literal: true FactoryBot.define do factory :mdrpi_registration_policy, class: 'MDRPI::RegistrationPolicy', parent: :localized_uri do registration_info factory: :mdrpi_registration_info end end
julien6387/supvisors
supvisors/tests/test_viewhostaddress.py
#!/usr/bin/python # -*- coding: utf-8 -*- # ====================================================================== # Copyright 2020 <NAME> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ====================================================================== import pytest from supervisor.web import StatusView from unittest.mock import call, Mock from supvisors.viewcontext import CPU, INTF, ViewContext from supvisors.viewhandler import ViewHandler from supvisors.viewhostaddress import HostAddressView from supvisors.viewimage import address_cpu_img, address_io_img, address_mem_img from supvisors.webutils import HOST_NODE_PAGE from .base import DummyHttpContext @pytest.fixture def http_context(): return DummyHttpContext('ui/hostaddress.html') @pytest.fixture def view(http_context): """ Fixture for the instance to test. """ # apply the forced inheritance done in supvisors.plugin StatusView.__bases__ = (ViewHandler,) # create the instance to be tested return HostAddressView(http_context) def test_init(view): """ Test the values set at construction. """ assert view.page_name == HOST_NODE_PAGE def test_write_contents_no_plot(mocker, view): """ Test the write_contents method. """ mocked_network = mocker.patch('supvisors.viewhostaddress.HostAddressView.write_network_statistics') mocked_memory = mocker.patch('supvisors.viewhostaddress.HostAddressView.write_memory_statistics') mocked_processor = mocker.patch('supvisors.viewhostaddress.HostAddressView.write_processor_statistics') mocked_export = mocker.patch('supvisors.plot.StatisticsPlot.export_image') # force import error on SupvisorsPlot mocker.patch.dict('sys.modules', {'supvisors.plot': None}) # set context (meant to be set through render) dummy_stats = Mock(cpu='cpu', mem='mem', io='io') view.view_ctx = Mock(**{'get_node_stats.return_value': dummy_stats}) # replace root structure mocked_root = Mock() # test call view.write_contents(mocked_root) assert mocked_processor.call_args_list == [call(mocked_root, 'cpu')] assert mocked_memory.call_args_list == [call(mocked_root, 'mem')] assert mocked_network.call_args_list == [call(mocked_root, 'io')] assert not mocked_export.called def test_write_contents(mocker, view): """ Test the write_contents method. """ # skip test if matplotlib is not installed pytest.importorskip('matplotlib', reason='cannot test as optional matplotlib is not installed') mocked_network = mocker.patch('supvisors.viewhostaddress.HostAddressView.write_network_statistics') mocked_memory = mocker.patch('supvisors.viewhostaddress.HostAddressView.write_memory_statistics') mocked_processor = mocker.patch('supvisors.viewhostaddress.HostAddressView.write_processor_statistics') mocked_io = mocker.patch('supvisors.viewhostaddress.HostAddressView._write_io_image') mocked_mem = mocker.patch('supvisors.viewhostaddress.HostAddressView._write_mem_image') mocked_cpu = mocker.patch('supvisors.viewhostaddress.HostAddressView._write_cpu_image') # set context (meant to be set through render) dummy_stats = Mock(cpu='cpu', mem='mem', io='io') view.view_ctx = Mock(**{'get_node_stats.return_value': dummy_stats}) # replace root structure mocked_root = Mock() # test call view.write_contents(mocked_root) assert mocked_processor.call_args_list == [call(mocked_root, 'cpu')] assert mocked_memory.call_args_list == [call(mocked_root, 'mem')] assert mocked_network.call_args_list == [call(mocked_root, 'io')] assert mocked_cpu.call_args_list == [call('cpu')] assert mocked_mem.call_args_list == [call('mem')] assert mocked_io.call_args_list == [call('io')] def test_write_processor_single_title(view): """ Test the _write_processor_single_title method. """ # set context (meant to be set through render) view.view_ctx = Mock(**{'format_url.return_value': 'http://addr:port/index.html', 'cpu_id_to_string.return_value': '1'}) # replace root structure mocked_title_mid = Mock(attrib={}) mocked_tr = Mock(**{'findmeld.return_value': mocked_title_mid}) # in first call, elt is not the selected element view._write_processor_single_title(mocked_tr, 1, 0) assert mocked_tr.findmeld.call_args_list == [call('cpunum_a_mid')] assert mocked_title_mid.attrib == {} assert mocked_title_mid.attributes.call_args_list == [call(href='http://addr:port/index.html')] assert mocked_title_mid.content.call_args_list == [call('cpu#1')] mocked_tr.findmeld.reset_mock() mocked_title_mid.attributes.reset_mock() # in first call, elt is the selected element view._write_processor_single_title(mocked_tr, 1, 1) assert mocked_tr.findmeld.call_args_list == [call('cpunum_a_mid')] assert mocked_title_mid.attrib == {'class': 'button off active'} assert not mocked_title_mid.attributes.called assert mocked_title_mid.content.call_args_list == [call('cpu#1')] def test_write_processor_single_statistics(mocker, view): """ Test the _write_processor_single_statistics method. """ mocked_common = mocker.patch('supvisors.viewhostaddress.HostAddressView._write_common_statistics') # replace root element mocked_root = Mock() # test method call view._write_processor_single_statistics(mocked_root, [1.523, 2.456]) assert mocked_common.call_args_list == [call(mocked_root, [1.523, 2.456], 'cpuval_td_mid', 'cpuavg_td_mid', 'cpuslope_td_mid', 'cpudev_td_mid')] def test_write_processor_statistics(mocker, view): """ Test the write_processor_statistics method. """ mocked_stats = mocker.patch('supvisors.viewhostaddress.HostAddressView._write_processor_single_statistics') mocked_title = mocker.patch('supvisors.viewhostaddress.HostAddressView._write_processor_single_title') # set context (meant to be set through render) view.view_ctx = Mock(parameters={CPU: 1}) # build root structure mocked_trs = [Mock(attrib={}) for _ in range(2)] mocked_mid = Mock(**{'repeat.return_value': [(mocked_trs[0], 'cpu stats 0'), (mocked_trs[1], 'cpu stats 1')]}) mocked_root = Mock(**{'findmeld.return_value': mocked_mid}) # test call view.write_processor_statistics(mocked_root, []) assert mocked_title.call_args_list == [call(mocked_trs[0], 1, 0), call(mocked_trs[1], 1, 1)] assert mocked_stats.call_args_list == [call(mocked_trs[0], 'cpu stats 0'), call(mocked_trs[1], 'cpu stats 1')] assert mocked_trs[0].attrib == {'class': 'brightened'} assert mocked_trs[1].attrib == {'class': 'shaded'} def test_write_memory_statistics(mocker, view): """ Test the write_memory_statistics method. """ mocked_common = mocker.patch('supvisors.viewhostaddress.HostAddressView._write_common_statistics') # replace root element mocked_root = Mock() # test method call view.write_memory_statistics(mocked_root, [1.523, 2.456]) assert mocked_common.call_args_list == [call(mocked_root, [1.523, 2.456], 'memval_td_mid', 'memavg_td_mid', 'memslope_td_mid', 'memdev_td_mid')] def test_write_network_single_title(view): """ Test the _write_network_single_title method. """ # set context (meant to be set through render) view.view_ctx = Mock(**{'format_url.return_value': 'http://addr:port/index.html'}) # replace root structure mocked_href_mid = Mock(attrib={}) mocked_title_mid = Mock(attrib={}, **{'findmeld.return_value': mocked_href_mid}) mocked_tr = Mock(**{'findmeld.return_value': mocked_title_mid}) # in first call, elt is not the first line (rowspan False) view._write_network_single_title(mocked_tr, 'eth0', 'lo', False, True) assert mocked_tr.findmeld.call_args_list == [call('intf_td_mid')] assert mocked_title_mid.attrib == {} assert not mocked_title_mid.findmeld.called assert mocked_href_mid.attrib == {} assert mocked_title_mid.replace.call_args_list == [call('')] mocked_tr.findmeld.reset_mock() mocked_title_mid.replace.reset_mock() # in second call, elt is the first line (rowspan True), shaded and is not the selected interface view._write_network_single_title(mocked_tr, 'eth0', 'lo', True, True) assert mocked_tr.findmeld.call_args_list == [call('intf_td_mid')] assert mocked_title_mid.attrib == {'class': 'shaded', 'rowspan': '2'} assert mocked_title_mid.findmeld.call_args_list == [call('intf_a_mid')] assert mocked_href_mid.attrib == {} assert mocked_href_mid.attributes.call_args_list == [call(href='http://addr:port/index.html')] assert not mocked_title_mid.replace.called # reset context mocked_tr.findmeld.reset_mock() mocked_href_mid.attributes.reset_mock() mocked_title_mid.findmeld.reset_mock() del mocked_title_mid.attrib['class'] # in third call, elt is the first line (rowspan True), not shaded and is the selected interface view._write_network_single_title(mocked_tr, 'lo', 'lo', True, False) assert mocked_tr.findmeld.call_args_list == [call('intf_td_mid')] assert mocked_title_mid.attrib == {'class': 'brightened', 'rowspan': '2'} assert mocked_title_mid.findmeld.call_args_list == [call('intf_a_mid')] assert mocked_href_mid.attrib == {'class': 'button off active'} assert not mocked_href_mid.attributes.called assert not mocked_title_mid.replace.called def test_write_network_single_statistics(mocker, view): """ Test the _write_network_single_statistics method. """ mocked_common = mocker.patch('supvisors.viewhostaddress.HostAddressView._write_common_statistics') # replace root structure mocked_title_mid = Mock() mocked_tr = Mock(**{'findmeld.return_value': mocked_title_mid}) # in first call, test no rate, slope and standard deviation view._write_network_single_statistics(mocked_tr, [1.523, 2.456], False) assert mocked_tr.findmeld.call_args_list == [call('intfrxtx_td_mid')] assert mocked_title_mid.content.call_args_list == [call('Tx')] assert mocked_common.call_args_list == [call(mocked_tr, [1.523, 2.456], 'intfval_td_mid', 'intfavg_td_mid', 'intfslope_td_mid', 'intfdev_td_mid')] mocked_tr.reset_mock() mocked_title_mid.content.reset_mock() mocked_common.reset_mock() # in second call, test no rate, slope and standard deviation view._write_network_single_statistics(mocked_tr, [1.523, 2.456], True) assert mocked_tr.findmeld.call_args_list == [call('intfrxtx_td_mid')] assert mocked_title_mid.content.call_args_list == [call('Rx')] assert mocked_common.call_args_list == [call(mocked_tr, [1.523, 2.456], 'intfval_td_mid', 'intfavg_td_mid', 'intfslope_td_mid', 'intfdev_td_mid')] def test_write_network_statistics(mocker, view): """ Test the write_network_statistics method. """ mocked_stats = mocker.patch('supvisors.viewhostaddress.HostAddressView._write_network_single_statistics') mocked_title = mocker.patch('supvisors.viewhostaddress.HostAddressView._write_network_single_title') # set context (meant to be set through render) view.view_ctx = Mock(parameters={INTF: 'eth0'}) # build root structure mocked_trs = [Mock(attrib={}) for _ in range(4)] mocked_mid = Mock(**{'repeat.return_value': [(mocked_trs[0], ('lo', 'lo recv')), (mocked_trs[1], ('lo', 'lo sent')), (mocked_trs[2], ('eth0', 'eth0 recv')), (mocked_trs[3], ('eth0', 'eth0 sent'))]}) mocked_root = Mock(**{'findmeld.return_value': mocked_mid}) # test method with dummy stats dummy_stats = {'lo': ['lo recv', 'lo sent'], 'eth0': ['eth0 recv', 'eth0 sent']} view.write_network_statistics(mocked_root, dummy_stats) # check calls assert mocked_root.findmeld.call_args_list == [call('intf_tr_mid')] assert mocked_mid.repeat.call_args_list == [call([('lo', 'lo recv'), ('lo', 'lo sent'), ('eth0', 'eth0 recv'), ('eth0', 'eth0 sent')])] assert mocked_trs[0].attrib['class'] == 'brightened' assert mocked_trs[1].attrib['class'] == 'brightened' assert mocked_trs[2].attrib['class'] == 'shaded' assert mocked_trs[3].attrib['class'] == 'shaded' assert mocked_title.call_args_list == [call(mocked_trs[0], 'eth0', 'lo', True, False), call(mocked_trs[1], 'eth0', 'lo', False, False), call(mocked_trs[2], 'eth0', 'eth0', True, True), call(mocked_trs[3], 'eth0', 'eth0', False, True)] assert mocked_stats.call_args_list == [call(mocked_trs[0], 'lo recv', True), call(mocked_trs[1], 'lo sent', False), call(mocked_trs[2], 'eth0 recv', True), call(mocked_trs[3], 'eth0 sent', False)] def test_write_common_statistics(mocker, view): """ Test the _write_common_statistics method. """ mocked_class = mocker.patch('supvisors.viewhostaddress.HostAddressView.set_slope_class') mocked_stats = mocker.patch('supvisors.viewhostaddress.get_stats', side_effect=[(10.231, None, (None, 2), None), (8.999, 2, (-1.1, 4), 5.72)]) # replace root structure mocked_val_mid = Mock() mocked_avg_mid = Mock() mocked_slope_mid = Mock() mocked_dev_mid = Mock() mocked_tr = Mock(**{'findmeld.side_effect': [mocked_val_mid, mocked_avg_mid, mocked_val_mid, mocked_avg_mid, mocked_slope_mid, mocked_dev_mid]}) # in first call, test empty stats view._write_common_statistics(mocked_tr, [], 'val_mid', 'avg_mid', 'slope_mid', 'dev_mid') assert not mocked_tr.findmeld.called assert not mocked_stats.called assert not mocked_class.called assert not mocked_val_mid.called assert not mocked_avg_mid.called assert not mocked_slope_mid.called assert not mocked_dev_mid.called # in second call, test no rate, slope and standard deviation view._write_common_statistics(mocked_tr, [1.523, 2.456], 'val_mid', 'avg_mid', 'slope_mid', 'dev_mid') assert mocked_tr.findmeld.call_args_list == [call('val_mid'), call('avg_mid')] assert mocked_stats.call_args_list == [call([1.523, 2.456])] assert not mocked_class.called assert mocked_val_mid.content.call_args_list == [call('2.46')] assert mocked_avg_mid.content.call_args_list == [call('10.23')] assert not mocked_slope_mid.called assert not mocked_dev_mid.called mocked_stats.reset_mock() mocked_val_mid.content.reset_mock() mocked_avg_mid.content.reset_mock() # in third call, test no rate, slope and standard deviation view._write_common_statistics(mocked_tr, [1.523, 2.456], 'val_mid', 'avg_mid', 'slope_mid', 'dev_mid') assert mocked_stats.call_args_list == [call([1.523, 2.456])] assert mocked_class.call_args_list == [call(mocked_val_mid, 2)] assert mocked_tr.findmeld.call_args_list == [call('val_mid'), call('avg_mid'), call('val_mid'), call('avg_mid'), call('slope_mid'), call('dev_mid')] assert mocked_val_mid.content.call_args_list == [call('2.46')] assert mocked_avg_mid.content.call_args_list == [call('9.00')] assert mocked_slope_mid.content.call_args_list == [call('-1.10')] assert mocked_dev_mid.content.call_args_list == [call('5.72')] def test_write_cpu_image(mocker, view): """ Test the _write_cpu_image method. """ mocked_export = mocker.patch('supvisors.plot.StatisticsPlot.export_image') mocked_add = mocker.patch('supvisors.plot.StatisticsPlot.add_plot') # set context (meant to be set through render) view.view_ctx = Mock(parameters={CPU: 0}, **{'cpu_id_to_string.return_value': ViewContext.cpu_id_to_string(0)}) # just test calls to StatisticsPlot dummy_stats = ['#all stats', '#0 stats', '#1 stats'] view._write_cpu_image(dummy_stats) assert mocked_add.call_args_list == [call('CPU #all', '%', '#all stats')] assert mocked_export.call_args_list == [call(address_cpu_img)] def test_write_mem_image(mocker, view): """ Test the _write_mem_image method. """ mocked_export = mocker.patch('supvisors.plot.StatisticsPlot.export_image') mocked_add = mocker.patch('supvisors.plot.StatisticsPlot.add_plot') # just test calls to StatisticsPlot dummy_stats = ['mem 1', 'mem 2'] view._write_mem_image(dummy_stats) assert mocked_add.call_args_list == [call('MEM', '%', dummy_stats)] assert mocked_export.call_args_list == [call(address_mem_img)] def test_write_io_image(mocker, view): """ Test the _write_io_image method. """ mocked_export = mocker.patch('supvisors.plot.StatisticsPlot.export_image') mocked_add = mocker.patch('supvisors.plot.StatisticsPlot.add_plot') # set context (meant to be set through render) view.view_ctx = Mock(parameters={INTF: 'eth0'}) # just test calls to StatisticsPlot dummy_stats = {'lo': ['lo recv', 'lo sent'], 'eth0': ['eth0 recv', 'eth0 sent']} view._write_io_image(dummy_stats) assert mocked_add.call_args_list == [call('eth0 recv', 'kbits/s', 'eth0 recv'), call('eth0 sent', 'kbits/s', 'eth0 sent')] assert mocked_export.call_args_list == [call(address_io_img)]
naetherm/PixelLight
Plugins/PLCompositing/src/Shaders/Deferred/SRPDeferred.cpp
<filename>Plugins/PLCompositing/src/Shaders/Deferred/SRPDeferred.cpp /*********************************************************\ * File: SRPDeferred.cpp * * * Copyright (C) 2002-2013 The PixelLight Team (http://www.pixellight.org/) * * This file is part of PixelLight. * * Permission is hereby granted, free of charge, to any person obtaining a copy of this software * and associated documentation files (the "Software"), to deal in the Software without * restriction, including without limitation the rights to use, copy, modify, merge, publish, * distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the * Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all copies or * substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING * BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \*********************************************************/ //[-------------------------------------------------------] //[ Includes ] //[-------------------------------------------------------] #include <PLMath/Vector2.h> #include <PLMath/Vector4.h> #include <PLRenderer/Renderer/Renderer.h> #include <PLRenderer/Renderer/VertexBuffer.h> #include "PLCompositing/Shaders/Deferred/SRPDeferred.h" //[-------------------------------------------------------] //[ Namespace ] //[-------------------------------------------------------] using namespace PLCore; using namespace PLMath; using namespace PLRenderer; namespace PLCompositing { //[-------------------------------------------------------] //[ RTTI interface ] //[-------------------------------------------------------] pl_class_metadata(SRPDeferred, "PLCompositing", PLScene::SceneRendererPass, "Abstract scene renderer pass for deferred rendering") pl_class_metadata_end(SRPDeferred) //[-------------------------------------------------------] //[ Public functions ] //[-------------------------------------------------------] /** * @brief * Returns the first found instance of a SRPDeferredGBuffer scene renderer pass instance within the scene renderer pipeline */ SRPDeferredGBuffer *SRPDeferred::GetGBuffer() const { static const String sClassName = "PLCompositing::SRPDeferredGBuffer"; return reinterpret_cast<SRPDeferredGBuffer*>(GetFirstInstanceOfSceneRendererPassClass(sClassName)); } /** * @brief * Returns the vertex buffer of this deferred scene renderer pass */ VertexBuffer *SRPDeferred::GetVertexBuffer() { // Initialize vertex buffer if (!m_pVertexBuffer) { // Get the renderer instance Renderer *pRenderer = GetRenderer(); if (pRenderer) { // Create the vertex buffer m_pVertexBuffer = pRenderer->CreateVertexBuffer(); // Add vertex position attribute to the vertex buffer m_pVertexBuffer->AddVertexAttribute(VertexBuffer::Position, 0, VertexBuffer::Float4); // Add texture coordinate attribute to the vertex buffer m_pVertexBuffer->AddVertexAttribute(VertexBuffer::TexCoord, 0, VertexBuffer::Float2); // Allocate m_pVertexBuffer->Allocate(4); // Fill if (m_pVertexBuffer->Lock(Lock::WriteOnly)) { // Vertex 0 - lower/left corner // Position float *pfVertex = static_cast<float*>(m_pVertexBuffer->GetData(0, VertexBuffer::Position)); pfVertex[Vector4::X] = -1.0f; pfVertex[Vector4::Y] = -1.0f; pfVertex[Vector4::Z] = 0.0f; pfVertex[Vector4::W] = 1.0f; // Texture coordinate 0 pfVertex = static_cast<float*>(m_pVertexBuffer->GetData(0, VertexBuffer::TexCoord, 0)); pfVertex[Vector2::X] = 0.0f; pfVertex[Vector2::Y] = 0.0f; // Vertex 1 - lower/right corner // Position pfVertex = static_cast<float*>(m_pVertexBuffer->GetData(1, VertexBuffer::Position)); pfVertex[Vector4::X] = 1.0f; pfVertex[Vector4::Y] = -1.0f; pfVertex[Vector4::Z] = 0.0f; pfVertex[Vector4::W] = 1.0f; // Texture coordinate 0 pfVertex = static_cast<float*>(m_pVertexBuffer->GetData(1, VertexBuffer::TexCoord, 0)); pfVertex[Vector2::X] = 1.0f; pfVertex[Vector2::Y] = 0.0f; // Vertex 2 - upper/left corner // Position pfVertex = static_cast<float*>(m_pVertexBuffer->GetData(2, VertexBuffer::Position)); pfVertex[Vector4::X] = -1.0f; pfVertex[Vector4::Y] = 1.0f; pfVertex[Vector4::Z] = 0.0f; pfVertex[Vector4::W] = 1.0f; // Texture coordinate 0 pfVertex = static_cast<float*>(m_pVertexBuffer->GetData(2, VertexBuffer::TexCoord, 0)); pfVertex[Vector2::X] = 0.0f; pfVertex[Vector2::Y] = 1.0f; // Vertex 3 - upper/right corner // Position pfVertex = static_cast<float*>(m_pVertexBuffer->GetData(3, VertexBuffer::Position)); pfVertex[Vector4::X] = 1.0f; pfVertex[Vector4::Y] = 1.0f; pfVertex[Vector4::Z] = 0.0f; pfVertex[Vector4::W] = 1.0f; // Texture coordinate 0 pfVertex = static_cast<float*>(m_pVertexBuffer->GetData(3, VertexBuffer::TexCoord, 0)); pfVertex[Vector2::X] = 1.0f; pfVertex[Vector2::Y] = 1.0f; // Unlock the vertex buffer m_pVertexBuffer->Unlock(); } } } // Return the vertex buffer return m_pVertexBuffer; } //[-------------------------------------------------------] //[ Protected functions ] //[-------------------------------------------------------] /** * @brief * Constructor */ SRPDeferred::SRPDeferred() : m_pVertexBuffer(nullptr) { } /** * @brief * Destructor */ SRPDeferred::~SRPDeferred() { // Destroy the vertex buffer if (m_pVertexBuffer) delete m_pVertexBuffer; } //[-------------------------------------------------------] //[ Namespace ] //[-------------------------------------------------------] } // PLCompositing
rikkuness/ebiten
internal/shader/testdata/issue1248.go
package main func Foo() vec2 { var a, b vec2 a, b = b, a var c, d, e vec2 c, d, e = d, e, c return a }
larrydiamond/hazelcast
hazelcast/src/test/java/com/hazelcast/client/config/XmlYamlClientConfigBuilderEqualsTest.java
/* * Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.client.config; import com.hazelcast.config.helpers.DeclarativeConfigFileHelper; import com.hazelcast.internal.nio.IOUtil; import com.hazelcast.test.HazelcastParallelClassRunner; import com.hazelcast.test.annotation.ParallelJVMTest; import com.hazelcast.test.annotation.QuickTest; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import static org.junit.Assert.assertEquals; @RunWith(HazelcastParallelClassRunner.class) @Category({QuickTest.class, ParallelJVMTest.class}) public class XmlYamlClientConfigBuilderEqualsTest { @Test public void testDefaultClientConfig() { ClientConfig xmlConfig = new ClientClasspathXmlConfig("hazelcast-client-default.xml"); ClientConfig yamlConfig = new ClientClasspathYamlConfig("hazelcast-client-default.yaml"); String xmlConfigFromXml = ClientConfigXmlGenerator.generate(xmlConfig); String xmlConfigFromYaml = ClientConfigXmlGenerator.generate(yamlConfig); assertEquals(xmlConfigFromXml, xmlConfigFromYaml); } @Test public void testFullExampleClientConfig() throws IOException { String fullExampleXml = readResourceToString("hazelcast-client-full-example.xml"); String fullExampleYaml = readResourceToString("hazelcast-client-full-example.yaml"); // remove imports to prevent the test from failing with importing non-existing files fullExampleXml = fullExampleXml.replace("<import resource=\"your-client-configuration-XML-file\"/>", ""); fullExampleYaml = fullExampleYaml .replace("\r", "") .replace("import:\n - your-client-configuration-YAML-file", ""); ClientConfig xmlConfig = buildConfigFromXml(fullExampleXml); ClientConfig yamlConfig = buildConfigFromYaml(fullExampleYaml); String xmlConfigFromXml = ClientConfigXmlGenerator.generate(xmlConfig, 4); String xmlConfigFromYaml = ClientConfigXmlGenerator.generate(yamlConfig, 4); assertEquals(xmlConfigFromXml, xmlConfigFromYaml); } @Test public void testFullClientConfig() throws IOException { String fullConfigXml = readResourceToString("hazelcast-client-full.xml"); String fullConfigYaml = readResourceToString("hazelcast-client-full.yaml"); // remove imports to prevent the test from failing with importing non-existing files fullConfigXml = fullConfigXml.replace("<import resource=\"your-client-configuration-XML-file\"/>", ""); fullConfigYaml = fullConfigYaml .replace("\r", "") .replace("import:\n - your-client-configuration-YAML-file", ""); ClientConfig xmlConfig = buildConfigFromXml(fullConfigXml); ClientConfig yamlConfig = buildConfigFromYaml(fullConfigYaml); String xmlConfigFromXml = ClientConfigXmlGenerator.generate(xmlConfig, 4); String xmlConfigFromYaml = ClientConfigXmlGenerator.generate(yamlConfig, 4); assertEquals(xmlConfigFromXml, xmlConfigFromYaml); } @Test public void testFullClientFailoverConfig() throws Exception { DeclarativeConfigFileHelper helper = new DeclarativeConfigFileHelper(); helper.givenXmlClientConfigFileOnClasspath("your-first-hazelcast-client-configuration.xml", "instance0"); helper.givenXmlClientConfigFileOnClasspath("your-second-hazelcast-client-configuration.xml", "instance1"); helper.givenYamlClientConfigFileOnClasspath("your-first-hazelcast-client-configuration.yaml", "instance0"); helper.givenYamlClientConfigFileOnClasspath("your-second-hazelcast-client-configuration.yaml", "instance1"); try { ClientFailoverConfig xmlConfig = new ClientFailoverClasspathXmlConfig("hazelcast-client-failover-full-example.xml"); ClientFailoverConfig yamlConfig = new ClientFailoverClasspathYamlConfig( "hazelcast-client-failover-full-example.yaml"); assertEquals(xmlConfig.getTryCount(), yamlConfig.getTryCount()); ClientConfig xmlClientConfig0 = xmlConfig.getClientConfigs().get(0); ClientConfig xmlClientConfig1 = xmlConfig.getClientConfigs().get(1); ClientConfig yamlClientConfig0 = yamlConfig.getClientConfigs().get(0); ClientConfig yamlClientConfig1 = yamlConfig.getClientConfigs().get(1); assertEquals("instance0", xmlClientConfig0.getInstanceName()); assertEquals("instance1", xmlClientConfig1.getInstanceName()); assertEquals("instance0", yamlClientConfig0.getInstanceName()); assertEquals("instance1", yamlClientConfig1.getInstanceName()); } finally { helper.ensureTestConfigDeleted(); } } private String readResourceToString(String resource) throws IOException { InputStream xmlInputStream = getClass().getClassLoader().getResourceAsStream(resource); return new String(IOUtil.toByteArray(xmlInputStream)); } private static ClientConfig buildConfigFromXml(String xml) { ByteArrayInputStream bis = new ByteArrayInputStream(xml.getBytes()); return new XmlClientConfigBuilder(bis).build(); } private static ClientConfig buildConfigFromYaml(String yaml) { ByteArrayInputStream bis = new ByteArrayInputStream(yaml.getBytes()); return new YamlClientConfigBuilder(bis).build(); } }
thewtex/RTK
include/rtkDownsampleImageFilter.h
<filename>include/rtkDownsampleImageFilter.h /*========================================================================= * * Copyright RTK Consortium * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0.txt * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * *=========================================================================*/ #ifndef rtkDownsampleImageFilter_h #define rtkDownsampleImageFilter_h #include "itkImageToImageFilter.h" namespace rtk { /** \class DownsampleImageFilter * \brief Downsamples an image by a factor in each dimension. * * This filter is inspired from Dan Mueller's GIFT package * http://www.insight-journal.org/browse/publication/103 * * \author <NAME> * * \ingroup RTK */ template <class TInputImage, class TOutputImage = TInputImage> class ITK_EXPORT DownsampleImageFilter: public itk::ImageToImageFilter<TInputImage,TOutputImage> { public: /** Standard class typedefs. */ typedef DownsampleImageFilter Self; typedef itk::ImageToImageFilter<TInputImage,TOutputImage> Superclass; typedef itk::SmartPointer<Self> Pointer; typedef itk::SmartPointer<const Self> ConstPointer; /** Method for creation through the object factory. */ itkNewMacro(Self); /** Run-time type information (and related methods). */ itkTypeMacro(DownsampleImageFilter, ImageToImageFilter); /** Typedef to images */ typedef TOutputImage OutputImageType; typedef TInputImage InputImageType; typedef typename OutputImageType::Pointer OutputImagePointer; typedef typename InputImageType::Pointer InputImagePointer; typedef typename InputImageType::ConstPointer InputImageConstPointer; /** Typedef to describe the output image region type. */ typedef typename TOutputImage::RegionType OutputImageRegionType; /** ImageDimension enumeration. */ itkStaticConstMacro(ImageDimension, unsigned int, TInputImage::ImageDimension ); /** Set the downsample factors. Values are clamped to * a minimum value of 1.*/ void SetFactors(unsigned int factors[]); /** Sets the downsample factor for the given dimension. * All other dimensions are set to 1 */ void SetFactor(unsigned int dimension, unsigned int factor); /** DownsampleImageFilter produces an image which is a different * resolution and with a different pixel spacing than its input * image. As such, DownsampleImageFilter needs to provide an * implementation for GenerateOutputInformation() in order to inform * the pipeline execution model. The original documentation of this * method is below. * \sa ProcessObject::GenerateOutputInformaton() */ void GenerateOutputInformation() ITK_OVERRIDE; /** DownsampleImageFilter needs a larger input requested region than the output * requested region. As such, DownsampleImageFilter needs to provide an * implementation for GenerateInputRequestedRegion() in order to inform the * pipeline execution model. * \sa ProcessObject::GenerateInputRequestedRegion() */ void GenerateInputRequestedRegion() ITK_OVERRIDE; protected: DownsampleImageFilter(); virtual ~DownsampleImageFilter() ITK_OVERRIDE {} /** DownsampleImageFilter can be implemented as a multithreaded filter. * Therefore, this implementation provides a ThreadedGenerateData() routine * which is called for each processing thread. The output image data is * allocated automatically by the superclass prior to calling * ThreadedGenerateData(). ThreadedGenerateData can only write to the * portion of the output image specified by the parameter * "outputRegionForThread" * * \sa ImageToImageFilter::ThreadedGenerateData(), * ImageToImageFilter::GenerateData() */ // virtual void BeforeThreadedGenerateData(); #if ITK_VERSION_MAJOR<5 void ThreadedGenerateData(const OutputImageRegionType& outputRegionForThread, itk::ThreadIdType itkNotUsed(threadId)) ITK_OVERRIDE; #else void DynamicThreadedGenerateData(const OutputImageRegionType& outputRegionForThread) ITK_OVERRIDE; #endif // virtual void AfterThreadedGenerateData(); private: DownsampleImageFilter(const Self&); //purposely not implemented void operator=(const Self&); //purposely not implemented unsigned int m_Factors[ImageDimension]; int m_Offsets[ImageDimension]; }; } // end namespace rtk #ifndef rtk_MANUAL_INSTANTIATION #include "rtkDownsampleImageFilter.hxx" #endif #endif
hariPrasad525/Nitya_Annaccounting
src/com/nitya/accounter/web/client/ui/win8portlets/PortletData.java
<reponame>hariPrasad525/Nitya_Annaccounting package com.nitya.accounter.web.client.ui.win8portlets; import java.io.Serializable; import com.google.gwt.user.client.rpc.IsSerializable; public class PortletData implements Serializable, IsSerializable { /** * */ private static final long serialVersionUID = 1L; public PortletData() { } }
WhoBrokeTheBuild/Dusk-mk2
src/Dusk/Tracking/TrackedObject.hpp
<reponame>WhoBrokeTheBuild/Dusk-mk2<filename>src/Dusk/Tracking/TrackedObject.hpp #ifndef DUSK_TRACKING_TRACKED_OBJECT_HPP #define DUSK_TRACKING_TRACKED_OBJECT_HPP #include <Dusk/Config.hpp> #include <Dusk/Platform.hpp> #include <Dusk/Types.hpp> namespace dusk { #define DUSK_CLASSNAME(NAME) virtual inline string GetClassName() const override { return NAME; } class TrackedObject { public: virtual string GetClassName() const = 0; // Normal new operator void* operator new(size_t size); void* operator new[](size_t size); // Operator for placement new, takes in the filename and line number void* operator new(size_t size, int lineNumber, const char* filename); void* operator new[](size_t size, int lineNumber, const char* filename); // Normal delete operator void operator delete(void* ptr); void operator delete[](void* ptr); // Required because of the placement new operator, should not be used inline void operator delete(void* ptr, int lineNumber, const char* filename) { ::operator delete(ptr); } inline void operator delete[](void* ptr, int lineNumber, const char* filename) { ::operator delete[](ptr); } }; // class MemoryTrackedObject #ifdef DUSK_DEBUG_BUILD #define New new (__LINE__, __FILE__) #else #define New new #endif } // namespace Tracking #endif // DUSK_TRACKING_TRACKED_OBJECT_HPP
Nic30/hwtLib
hwtLib/amba/axis_comp/strformat_test.py
<filename>hwtLib/amba/axis_comp/strformat_test.py import unittest from hwt.interfaces.utils import addClkRstn, propagateClkRstn from hwt.simulator.simTestCase import SimTestCase from hwt.synthesizer.param import Param from hwt.synthesizer.unit import Unit from hwtLib.amba.axis import AxiStream, axis_recieve_bytes, axis_send_bytes from hwtLib.amba.axis_comp.strformat_fn import axiS_strFormat from hwtLib.types.ctypes import uint8_t from hwtSimApi.constants import CLK_PERIOD class _example_AxiS_strFormat_no_args(Unit): def _config(self): self.DATA_WIDTH = Param(8) def _declr(self): addClkRstn(self) with self._paramsShared(): self.out = AxiStream()._m() def _impl(self): o = axiS_strFormat(self, "f0", self.DATA_WIDTH, "test 1234") self.out(o) propagateClkRstn(self) class _example_AxiS_strFormat_args_numbers(_example_AxiS_strFormat_no_args): def _config(self): _example_AxiS_strFormat_no_args._config(self) self.FORMAT = Param("0b{0:08b}, 0o{0:04o}, {0:03d}, 0x{0:02x}, 0x{0:02X}") def _impl(self): n = self._sig("n", dtype=uint8_t) n(13) o = axiS_strFormat( self, "f0", self.DATA_WIDTH, self.FORMAT, n) self.out(o) propagateClkRstn(self) class _example_AxiS_strFormat_kwargs_numbers(_example_AxiS_strFormat_no_args): def _config(self): _example_AxiS_strFormat_no_args._config(self) self.FORMAT = "0b{arg0:08b}, 0o{arg0:04o}, {arg0:03d}, 0x{arg0:02x}, 0x{arg0:02X}" def _impl(self): n = self._sig("n", dtype=uint8_t) n(13) o = axiS_strFormat( self, "f0", self.DATA_WIDTH, self.FORMAT, arg0=n) self.out(o) propagateClkRstn(self) class _example_AxiS_strFormat_1x_str(Unit): def _config(self): self.DATA_WIDTH = Param(8) def _declr(self): addClkRstn(self) with self._paramsShared(): self.out = AxiStream()._m() self.str0 = AxiStream() def _impl(self): o = axiS_strFormat(self, "f0", self.DATA_WIDTH, "str0:{0:s}", self.str0) self.out(o) propagateClkRstn(self) class _example_AxiS_strFormat_3x_str(_example_AxiS_strFormat_1x_str): def _declr(self): super(_example_AxiS_strFormat_3x_str, self)._declr() with self._paramsShared(): self.str1 = AxiStream() self.str2 = AxiStream() def _impl(self): o = axiS_strFormat(self, "f0", self.DATA_WIDTH, "{0:s}{1:s}xyz{str2:s}", self.str0, self.str1, str2=self.str2) self.out(o) propagateClkRstn(self) class AxiS_strFormat_TC(SimTestCase): def tearDown(self): self.rmSim() SimTestCase.tearDown(self) def test_args_numbers(self): u = self.compileSimAndStart(_example_AxiS_strFormat_args_numbers()) self.runSim(200 * CLK_PERIOD) for _ in range(3): frame = axis_recieve_bytes(u.out) s = bytes(frame[1]).decode("utf-8") self.assertEqual(s, "0b{0:08b}, 0o{0:04o}, {0:03d}, 0x{0:02x}, 0x{0:02X}".format(13)) def test_kwargs_numbers(self): u = self.compileSimAndStart(_example_AxiS_strFormat_kwargs_numbers()) self.runSim(200 * CLK_PERIOD) for _ in range(3): frame = axis_recieve_bytes(u.out) s = bytes(frame[1]).decode("utf-8") self.assertEqual(s, "0b{0:08b}, 0o{0:04o}, {0:03d}, 0x{0:02x}, 0x{0:02X}".format(13)) def test_no_args(self): u = self.compileSimAndStart(_example_AxiS_strFormat_no_args()) self.randomize(u.out) self.runSim(50 * CLK_PERIOD) for _ in range(3): frame = axis_recieve_bytes(u.out) s = bytes(frame[1]).decode("utf-8") self.assertEqual(s, 'test 1234') def test_1x_str(self): u = self.compileSimAndStart(_example_AxiS_strFormat_1x_str()) self.randomize(u.out) self.randomize(u.str0) strings = ["test0", "x", "1234567890"] for s in strings: axis_send_bytes(u.str0, s.encode("utf-8")) self.runSim(200 * CLK_PERIOD) for s_ref in strings: frame = axis_recieve_bytes(u.out) s = bytes(frame[1]).decode("utf-8") self.assertEqual(s, "str0:{0:s}".format(s_ref)) def test_3x_str(self): u = self.compileSimAndStart(_example_AxiS_strFormat_3x_str()) self.randomize(u.out) self.randomize(u.str0) self.randomize(u.str1) self.randomize(u.str2) strings = [("test0", "str1", "str3"), ("x", "y", "z"), ("1234567890", "abc", "\t\n")] for s0, s1, s2 in strings: axis_send_bytes(u.str0, s0.encode("utf-8")) axis_send_bytes(u.str1, s1.encode("utf-8")) axis_send_bytes(u.str2, s2.encode("utf-8")) self.runSim(200 * CLK_PERIOD) for s_ref in strings: frame = axis_recieve_bytes(u.out) s = bytes(frame[1]).decode("utf-8") self.assertEqual(s, "{0:s}{1:s}xyz{2:s}".format(*s_ref)) if __name__ == "__main__": suite = unittest.TestSuite() # suite.addTest(AxiS_strFormat_TC('test_args_numbers')) suite.addTest(unittest.makeSuite(AxiS_strFormat_TC)) runner = unittest.TextTestRunner(verbosity=3) runner.run(suite) # from hwt.synthesizer.utils import to_rtl_str # u = _example_AxiS_strFormat_1x_str() # print(to_rtl_str(u))
aruniiird/zig
lib/libc/include/x86_64-linux-any/asm/mman.h
/* SPDX-License-Identifier: GPL-2.0 WITH Linux-syscall-note */ #ifndef _ASM_X86_MMAN_H #define _ASM_X86_MMAN_H #define MAP_32BIT 0x40 /* only give out 32bit addresses */ #ifdef CONFIG_X86_INTEL_MEMORY_PROTECTION_KEYS /* * Take the 4 protection key bits out of the vma->vm_flags * value and turn them in to the bits that we can put in * to a pte. * * Only override these if Protection Keys are available * (which is only on 64-bit). */ #define arch_vm_get_page_prot(vm_flags) __pgprot( \ ((vm_flags) & VM_PKEY_BIT0 ? _PAGE_PKEY_BIT0 : 0) | \ ((vm_flags) & VM_PKEY_BIT1 ? _PAGE_PKEY_BIT1 : 0) | \ ((vm_flags) & VM_PKEY_BIT2 ? _PAGE_PKEY_BIT2 : 0) | \ ((vm_flags) & VM_PKEY_BIT3 ? _PAGE_PKEY_BIT3 : 0)) #define arch_calc_vm_prot_bits(prot, key) ( \ ((key) & 0x1 ? VM_PKEY_BIT0 : 0) | \ ((key) & 0x2 ? VM_PKEY_BIT1 : 0) | \ ((key) & 0x4 ? VM_PKEY_BIT2 : 0) | \ ((key) & 0x8 ? VM_PKEY_BIT3 : 0)) #endif #include <asm-generic/mman.h> #endif /* _ASM_X86_MMAN_H */
penhauer-xiao/jiebago
finalseg/prob_trans.go
<reponame>penhauer-xiao/jiebago package finalseg var probTrans = make(map[byte]map[byte]float64) func init() { probTrans['B'] = map[byte]float64{'E': -0.510825623765990, 'M': -0.916290731874155} probTrans['E'] = map[byte]float64{'B': -0.5897149736854513, 'S': -0.8085250474669937} probTrans['M'] = map[byte]float64{'E': -0.33344856811948514, 'M': -1.2603623820268226} probTrans['S'] = map[byte]float64{'B': -0.7211965654669841, 'S': -0.6658631448798212} }
celsiustx/metaflow
setup.py
<gh_stars>1-10 from os.path import dirname, join from setuptools import setup, find_packages version = '2.3.5' dir = dirname(__file__) with open(join(dir, 'requirements.txt'), 'r') as f: install_requires = [ line.rstrip('\n') for line in f.readlines() ] with open(join(dir, 'requirements-test.txt'), 'r') as f: tests_require = [ line.rstrip('\n') for line in f.readlines() ] setup( name='metaflow', version=version, description='Metaflow: More Data Science, Less Engineering', author='Machine Learning Infrastructure Team at Netflix', author_email='<EMAIL>', license='Apache License 2.0', packages=find_packages(exclude=['metaflow_test']), py_modules=['metaflow', ], package_data={'metaflow' : ['tutorials/*/*']}, entry_points=''' [console_scripts] metaflow=metaflow.main_cli:main ''', install_requires = install_requires, extras_require = { 'test': tests_require }, tests_require = tests_require, )
Sean10/Algorithm_code
leetcode_archived_cpp/LeetCode_61.cpp
/** * Definition for singly-linked list. * struct ListNode { * int val; * ListNode *next; * ListNode(int x) : val(x), next(NULL) {} * }; */ class Solution { public: ListNode* rotateRight(ListNode* head, int k) { if(k == 0 || !head || !head->next) return head; ListNode dummy(0); ListNode* left = &dummy; left->next = head; int len = 0; ListNode* curr = head; while(curr) { curr = curr->next; len ++; } //cout << len+1; if(k >= len) k = k%(len); while(k--) { ListNode* tail = nullptr, *pre = nullptr; curr = head; while(curr->next && curr->next->next) { curr = curr->next; } pre = curr; tail = curr->next; tail->next = left->next; left->next = tail; pre->next = nullptr; } return dummy.next; } };
tgonzales/zuen
src/h/reflow/elementsIsCustomNode.js
<reponame>tgonzales/zuen import * as f from '@f' import isCustomElement from './isCustomElement' /** * Valida se o tagName do elemento e vElemento sao um elemento customizado * * @name elementsIsCustomNode * @function * @access private * @param {HTMLElement} element Elemento html que esta no DOM * @param {HTMLElement} vElement Elemento virtual * @return {Boolean} Verdadeiro se os elementos forem um elemento customizado */ export default (element, vElement) => f.and(isCustomElement(element.tagName), isCustomElement(vElement.tagName))
xlmentx/Parallel-Computing
src/layer/softmax.h
<reponame>xlmentx/Parallel-Computing<filename>src/layer/softmax.h #ifndef SRC_LAYER_SOFTMAX_H_ #define SRC_LAYER_SOFTMAX_H_ #include "../layer.h" class Softmax: public Layer { public: void forward(const Matrix& bottom); void backward(const Matrix& bottom, const Matrix& grad_top); }; #endif // SRC_LAYER_SOFTMAX_H_
jgbyrne/sway
sway/base64.c
<filename>sway/base64.c /* * Adapted from https://github.com/littlstar/b64.c * License under the MIT License: * Copyright (c) 2014 Little Star Media, Inc. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ #include <ctype.h> #include <stdlib.h> #include "util.h" static const char b64_table[] = { 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '+', '/' }; char *b64_encode(const char *src, size_t len, size_t *flen) { int i = 0; int j = 0; char *enc = NULL; size_t size = len * 4 / 3; size_t idx = 0; unsigned char buf[4]; char tmp[3]; // alloc enc = (char *) malloc(size + 1); if (NULL == enc) { return NULL; } // parse until end of source while (len--) { // read up to 3 bytes at a time into `tmp' tmp[i++] = *(src++); // if 3 bytes read then encode into `buf' if (3 == i) { buf[0] = (tmp[0] & 0xfc) >> 2; buf[1] = ((tmp[0] & 0x03) << 4) + ((tmp[1] & 0xf0) >> 4); buf[2] = ((tmp[1] & 0x0f) << 2) + ((tmp[2] & 0xc0) >> 6); buf[3] = tmp[2] & 0x3f; // shouldn't really happen if (idx + 4 > size) { size += 16; enc = (char *) realloc(enc, size + 1); } for (i = 0; i < 4; ++i) { enc[idx++] = b64_table[buf[i]]; } // reset index i = 0; } } // remainder if (i > 0) { // fill `tmp' with `\0' at most 3 times for (j = i; j < 3; ++j) { tmp[j] = '\0'; } // perform same codec as above buf[0] = (tmp[0] & 0xfc) >> 2; buf[1] = ((tmp[0] & 0x03) << 4) + ((tmp[1] & 0xf0) >> 4); buf[2] = ((tmp[1] & 0x0f) << 2) + ((tmp[2] & 0xc0) >> 6); buf[3] = tmp[2] & 0x3f; // perform same write to `enc` with new allocation size_t delta = (i > 3 ? 0 : 3 - i) + (j > i + 1 ? 0 : i + 1 - j); if (idx + delta > size) { size += delta; enc = (char *) realloc(enc, size + 1); } for (j = 0; (j < i + 1); ++j) { enc[idx++] = b64_table[buf[j]]; } // while there is still a remainder // append `=' to `enc' while ((i++ < 3)) { enc[idx++] = '='; } } enc[idx] = '\0'; if (flen) *flen = size; return enc; } unsigned char *b64_decode(const char *src, size_t len, size_t *decsize) { int i = 0; int j = 0; int l = 0; // max size estimate size_t size = len * 3 / 4; size_t idx = 0; unsigned char *dec = NULL; unsigned char buf[3]; unsigned char tmp[4]; // alloc dec = (unsigned char *) malloc(size + 1); if (NULL == dec) { return NULL; } // parse until end of source while (len--) { if (isspace(src[j])) { j++; continue; } // break if char is `=' or not base64 char if ('=' == src[j]) { break; } if (!(isalnum(src[j]) || '+' == src[j] || '/' == src[j])) { break; } // read up to 4 bytes at a time into `tmp' tmp[i++] = src[j++]; // if 4 bytes read then decode into `buf' if (4 == i) { // translate values in `tmp' from table for (i = 0; i < 4; ++i) { // find translation char in `b64_table' for (l = 0; l < 64; ++l) { if (tmp[i] == b64_table[l]) { tmp[i] = l; break; } } } // decode buf[0] = (tmp[0] << 2) + ((tmp[1] & 0x30) >> 4); buf[1] = ((tmp[1] & 0xf) << 4) + ((tmp[2] & 0x3c) >> 2); buf[2] = ((tmp[2] & 0x3) << 6) + tmp[3]; // unlikely if (idx + 3 > size) { size += 16; dec = (unsigned char *) realloc(dec, size + 1); } if (dec != NULL){ for (i = 0; i < 3; ++i) { dec[idx++] = buf[i]; } } else { return NULL; } // reset i = 0; } } // remainder if (i > 0) { // fill `tmp' with `\0' at most 4 times for (j = i; j < 4; ++j) { tmp[j] = '\0'; } // translate remainder for (j = 0; j < 4; ++j) { // find translation char in `b64_table' for (l = 0; l < 64; ++l) { if (tmp[j] == b64_table[l]) { tmp[j] = l; break; } } } // decode remainder buf[0] = (tmp[0] << 2) + ((tmp[1] & 0x30) >> 4); buf[1] = ((tmp[1] & 0xf) << 4) + ((tmp[2] & 0x3c) >> 2); buf[2] = ((tmp[2] & 0x3) << 6) + tmp[3]; // write remainer decoded buffer to `dec' if (idx + (i - 1) > size) { size += 16; dec = (unsigned char *) realloc(dec, size + 1); } if (dec != NULL){ for (j = 0; (j < i - 1); ++j) { dec[idx++] = buf[j]; } } else { return NULL; } } dec[idx] = '\0'; // Return back the size of decoded string if demanded. if (decsize != NULL) { *decsize = size; } return dec; }
Rinqt/stock
frontend/src/app/main/companies/widgets/CompanyItem.js
import React, { Component } from "react"; import { withStyles } from "@material-ui/core/styles"; import ListItem from "@material-ui/core/ListItem"; import ListItemSecondaryAction from "@material-ui/core/ListItemSecondaryAction"; import ListItemText from "@material-ui/core/ListItemText"; import Checkbox from "@material-ui/core/Checkbox"; import { withRouter } from "react-router-dom"; import { updateHistory } from "../../../api/history.api"; import { bindActionCreators } from "redux"; import { connect } from "react-redux"; const styles = theme => ({ layoutRoot: {} }); class CompanyItem extends Component { state = { checked: false }; /** * Method to redirect user to single company details page */ loadCompany = () => { const { company, updateHistory } = this.props; this.props.history.push(`/app/company/${company.symbol}`); updateHistory(company); }; /** * Method to manage checkbox click */ handleToggle = () => () => { const { clickHandle, company } = this.props; this.setState({ checked: !this.state.checked }); clickHandle(company, !this.state.checked); }; render() { const { company } = this.props; return ( <ListItem button onClick={this.loadCompany}> <ListItemText primary={company.name} /> <ListItemSecondaryAction> <Checkbox edge="end" onChange={this.handleToggle()} checked={this.state.checked} /> </ListItemSecondaryAction> </ListItem> ); } } const mapDispatchToProps = dispatch => bindActionCreators( { updateHistory: updateHistory }, dispatch ); export default withStyles(styles, { withTheme: true })( withRouter(connect(null, mapDispatchToProps)(CompanyItem)) );
shuigedeng/taotao-cloud-paren
taotao-cloud-microservice/taotao-cloud-sys/taotao-cloud-sys-biz/src/main/java/com/taotao/cloud/sys/biz/mybatis/service/impl/SysDictItemServiceImpl.java
// package com.taotao.cloud.sys.biz.service.impl; // // import com.taotao.cloud.data.mybatis.plus.service.impl.SuperServiceImpl; // import com.taotao.cloud.sys.biz.entity.SysDictItem; // import com.taotao.cloud.sys.biz.mapper.SysDictItemMapper; // import com.taotao.cloud.sys.biz.service.ISysDictItemService; // import org.springframework.stereotype.Service; // // /** // * SysDictItemServiceImpl // * // * @author shuigedeng // * @since 2020/4/30 11:24 // */ // @Service // public class SysDictItemServiceImpl extends SuperServiceImpl<SysDictItemMapper, SysDictItem> implements ISysDictItemService { // // }
AndreyIg/debezium-incubator
debezium-connector-cassandra/src/main/java/io/debezium/connector/cassandra/transforms/type/deserializer/BasicTypeDeserializer.java
<reponame>AndreyIg/debezium-incubator /* * Copyright Debezium Authors. * * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0 */ package io.debezium.connector.cassandra.transforms.type.deserializer; import org.apache.avro.Schema; import org.apache.cassandra.db.marshal.AbstractType; public class BasicTypeDeserializer extends TypeDeserializer { private Schema schema; public BasicTypeDeserializer(Schema schema) { this.schema = schema; } @Override public Schema getSchema(AbstractType<?> abstractType) { return schema; } }
kambiz-aghaiepour/rally
tests/benchmark/scenarios/test_authenticate.py
# Copyright (C) 2014 Yahoo! Inc. All Rights Reserved. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.benchmark.scenarios.authenticate import authenticate from rally.benchmark.scenarios import base from tests import fakes from tests import test class AuthenticateTestCase(test.TestCase): @mock.patch("rally.osclients") def test_keystone(self, mock_osclients): fc = fakes.FakeClients() mock_osclients.Clients.return_value = fc scenario = authenticate.Authenticate(admin_clients=mock_osclients, clients=mock_osclients) scenario.keystone() self.assertEqual(scenario._clients.keystone.call_count, 1) @mock.patch("rally.osclients") @mock.patch("rally.osclients") def test_validate_glance(self, mock_admin_clients, mock_users_clients): images_list = [mock.Mock(), mock.Mock()] fc = fakes.FakeClients() mock_admin_clients.Clients.return_value = fc mock_users_clients.Clients.return_value = fc scenario = authenticate.Authenticate(admin_clients=mock_admin_clients, clients=mock_users_clients) scenario._clients.glance.images.list = mock.MagicMock( return_value=images_list) image_name = "__intentionally_non_existent_image___" with base.AtomicAction(scenario, "authenticate.validate_glance"): scenario.validate_glance(5) scenario._clients.glance().images.list.assert_called_with( name=image_name) self.assertEqual(scenario._clients.glance().images.list.call_count, 5) @mock.patch("rally.osclients") @mock.patch("rally.osclients") def test_validate_nova(self, mock_admin_clients, mock_users_clients): flavors_list = [mock.Mock(), mock.Mock()] fc = fakes.FakeClients() mock_admin_clients.clients.return_value = fc mock_users_clients.clients.return_value = fc scenario = authenticate.Authenticate(admin_clients=mock_admin_clients, clients=mock_users_clients) scenario._clients.nova.flavors.list = mock.MagicMock( return_value=flavors_list) with base.AtomicAction(scenario, "authenticate.validate_nova"): scenario.validate_nova(5) self.assertEqual(scenario._clients.nova().flavors.list.call_count, 5) @mock.patch("rally.osclients") @mock.patch("rally.osclients") def test_validate_cinder(self, mock_admin_clients, mock_users_clients): volume_types_list = [mock.Mock(), mock.Mock()] fc = fakes.FakeClients() mock_admin_clients.clients.return_value = fc mock_users_clients.clients.return_value = fc scenario = authenticate.Authenticate(admin_clients=mock_admin_clients, clients=mock_users_clients) scenario._clients.cinder.volume_types.list = mock.MagicMock( return_value=volume_types_list) with base.AtomicAction(scenario, "authenticate.validate_cinder"): scenario.validate_cinder(5) self.assertEqual(scenario._clients.cinder().volume_types. list.call_count, 5) @mock.patch("rally.osclients") @mock.patch("rally.osclients") def test_validate_neutron(self, mock_admin_clients, mock_users_clients): fc = fakes.FakeClients() mock_admin_clients.clients.return_value = fc mock_users_clients.clients.return_value = fc scenario = authenticate.Authenticate(admin_clients=mock_admin_clients, clients=mock_users_clients) scenario._clients.neutron.get_auth_info = mock.MagicMock() with base.AtomicAction(scenario, "authenticate.validate_neutron"): scenario.validate_neutron(5) self.assertEqual(scenario._clients.neutron().get_auth_info.call_count, 5) @mock.patch("rally.osclients") @mock.patch("rally.osclients") def test_validate_heat(self, mock_admin_clients, mock_users_clients): stacks_list = [mock.Mock(), mock.Mock()] fc = fakes.FakeClients() mock_admin_clients.clients.return_value = fc mock_users_clients.clients.return_value = fc scenario = authenticate.Authenticate(admin_clients=mock_admin_clients, clients=mock_users_clients) scenario._clients.heat.stacks.list = mock.MagicMock( return_value=stacks_list) with base.AtomicAction(scenario, "authenticate.validate_heat"): scenario.validate_heat(5) scenario._clients.heat().stacks.list.assert_called_with(limit=0) self.assertEqual(scenario._clients.heat().stacks.list.call_count, 5)
arbindo/mimock
mimock-ui/src/components/mockManagement/MockDetails/detailtoolbar/DetailToolbar.test.js
<gh_stars>1-10 import React from 'react'; import { render } from '@testing-library/react'; import DetailToolbar from './DetailToolbar'; import { BrowserRouter } from 'react-router-dom'; describe('DetailToolbar', () => { it('should render detail toolbar component', async () => { const tree = await render(<DetailToolbar />, { wrapper: BrowserRouter }); const { container, getByTestId } = tree; expect(getByTestId('detail-toolbar-container')).toBeInTheDocument(); expect(container).toMatchSnapshot(); }); });
FreeApophis/arduino-christmas-lights
ChristmasLightsController/clearance/ClearFade.h
<filename>ChristmasLightsController/clearance/ClearFade.h #pragma once #include "Clearance.h" // Clear the strip from the ether side class ClearFade final : public Clearance { public: explicit ClearFade(AbstractLedStrip* strip); auto Init() -> void override; auto Show() -> void override; private: byte _decrement; };
JULIELab/jcore-dependencies
dragontool/src/main/java/dragon/ml/seqmodel/data/BasicDataset.java
<filename>dragontool/src/main/java/dragon/ml/seqmodel/data/BasicDataset.java package dragon.ml.seqmodel.data; import java.util.Vector; /** * <p>Basic data structure of a set of sequence data</p> * <p></p> * <p>Copyright: Copyright (c) 2005</p> * <p>Company: IST, Drexel University</p> * @author <NAME> * @version 1.0 */ public class BasicDataset implements Dataset{ private Vector vector; private int originalLabelNum, labelNum, markovOrder; private int curPos; public BasicDataset(int originalLabelNum, int markovOrder) { vector=new Vector(); this.originalLabelNum=originalLabelNum; this.markovOrder=markovOrder; labelNum=1; for(int i=0;i<markovOrder; i++) labelNum*=originalLabelNum; } public Dataset copy(){ BasicDataset dataset; dataset=new BasicDataset(originalLabelNum,markovOrder); startScan(); while(hasNext()){ dataset.add(next().copy()); } return dataset; } public int size(){ return vector.size(); } public void startScan(){ curPos=0; } public boolean hasNext(){ return curPos<vector.size(); } public DataSequence next(){ curPos++; return (DataSequence)vector.get(curPos-1); } public boolean add(DataSequence seq){ seq.setParent(this); vector.add(seq); return true; } public int getLabelNum(){ return labelNum; } public int getOriginalLabelNum() { return originalLabelNum; } public int getMarkovOrder() { return markovOrder; } }
deltacat/dbstress
cmd/root.go
package cmd import ( "fmt" "os" "strings" "github.com/deltacat/dbstress/config" "github.com/sirupsen/logrus" "github.com/spf13/cobra" ) var rootCmd = &cobra.Command{ Use: "dbstress", Short: "Create artificial load on an InfluxDB/MySQL instance", Long: "This application create stress test on influxdb or mysql.\nPlease rename dbstress.sample.toml to dbstress.toml then make necessary change", PersistentPreRun: runRootPersistentPre, } var ( cfg config.Config // global configure holder pps uint64 quiet bool strict, kapacitorMode bool tlsSkipVerify bool measurement, seriesKey, fieldStr string ) // Execute run root cmd func Execute(v VersionInfo) { version = v if err := rootCmd.Execute(); err != nil { fmt.Println(err) os.Exit(-1) } } func init() { cobra.OnInitialize(initConfig) setDefaultConfig() rootCmd.PersistentFlags().Uint64VarP(&pps, "pps", "", 200000, "Points Per Second") rootCmd.PersistentFlags().BoolVarP(&quiet, "quiet", "q", false, "Only print the write throughput") rootCmd.PersistentFlags().BoolVarP(&kapacitorMode, "kapacitor", "k", false, "Use Kapacitor mode, namely do not try to run any queries.") rootCmd.PersistentFlags().BoolVarP(&strict, "strict", "", false, "Strict mode will exit as soon as an error or unexpected status is encountered") loggerFormatter := new(logrus.TextFormatter) loggerFormatter.TimestampFormat = "2006-01-02 15:04:05" loggerFormatter.FullTimestamp = true loggerFormatter.PadLevelText = true logrus.SetFormatter(loggerFormatter) } func runRootPersistentPre(cmd *cobra.Command, args []string) { cfg = config.Cfg measurement = cfg.Points.Measurement seriesKey = cfg.Points.SeriesKey fieldStr = cfg.Points.FieldsStr if !strings.Contains(seriesKey, ",") && !strings.Contains(seriesKey, "=") { logrus.Warnf("expect series like 'ctr,some=tag', got '%s'", seriesKey) os.Exit(1) return } }
jzonthemtn/nlp-building-blocks
idyl-e3-entity-extraction-engine/idyl-e3-system-initializer/src/main/java/com/mtnfog/idyl/e3/system/initializer/NativeSystemInitializerService.java
<filename>idyl-e3-entity-extraction-engine/idyl-e3-system-initializer/src/main/java/com/mtnfog/idyl/e3/system/initializer/NativeSystemInitializerService.java /******************************************************************************* * Copyright 2019 Mountain Fog, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy * of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. ******************************************************************************/ package com.mtnfog.idyl.e3.system.initializer; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.springframework.stereotype.Component; import com.mtnfog.idyl.e3.model.Backend; import com.mtnfog.idyl.e3.model.services.SystemInitializerService; /** * Initializes a CPU system. * * @author Mountain Fog, Inc. * */ @Component public class NativeSystemInitializerService implements SystemInitializerService { private static final Logger LOGGER = LogManager.getLogger(NativeSystemInitializerService.class); @Override public void initialize() { LOGGER.info("Initializing native system with {} logical thread(s).", getProcessors()); } /** * {@inheritDoc} * This will give you the number of logical threads. * If you have hyper-threading on, this will be double the number of cores. */ @Override public int getProcessors() { return Runtime.getRuntime().availableProcessors(); } @Override public Backend getBackend() { return Backend.NATIVE; } }
martin-jordan/service-manual-publisher
spec/helpers/redirect_destination_helper_spec.rb
require "rails_helper" RSpec.describe RedirectDestinationHelper, "#redirect_destination_select_options", type: :helper do it "should include all published guides ordered by slug" do create( :guide, :with_published_edition, slug: "/service-manual/agile-delivery/team-wall", ) create( :guide, :with_published_edition, slug: "/service-manual/agile-delivery/core-principles-agile", ) create(:guide, :has_been_unpublished) create(:guide, :with_draft_edition) expect(helper.redirect_destination_select_options).to include( "Guides" => [ "/service-manual/agile-delivery/core-principles-agile", "/service-manual/agile-delivery/team-wall", ], ) end it "should include the homepage and the service standard" do expect(helper.redirect_destination_select_options).to include( "Other" => ["/service-manual", "/service-manual/service-standard"], ) end it "should include all topics with sub sections" do topic = create(:topic, path: "/service-manual/agile-delivery") create(:topic_section, title: "Working with agile methods", topic: topic) create(:topic_section, title: "Governing agile services", topic: topic) expect(helper.redirect_destination_select_options).to include( "Topics" => [ "/service-manual/agile-delivery", ["/service-manual/agile-delivery → Governing agile services", "/service-manual/agile-delivery#governing-agile-services"], ["/service-manual/agile-delivery → Working with agile methods", "/service-manual/agile-delivery#working-with-agile-methods"], ], ) end it "should exclude topic sections without titles" do topic = create(:topic, path: "/service-manual/agile-delivery") create(:topic_section, title: "", topic: topic) expect(helper.redirect_destination_select_options).to include( "Topics" => ["/service-manual/agile-delivery"], ) end end
abagusetty/Uintah
src/StandAlone/tools/extractors/lineextract.cc
/* * The MIT License * * Copyright (c) 1997-2021 The University of Utah * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to * deal in the Software without restriction, including without limitation the * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or * sell copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS * IN THE SOFTWARE. */ /* * lineextract.cc: Print out a uintah data archive * * Written by: * <NAME> * Department of Mechancial Engineering * by stealing timeextract from: * <NAME> * <NAME> * Department of Computer Science * University of Utah * June 2004 * */ #include <Core/DataArchive/DataArchive.h> #include <Core/Disclosure/TypeDescription.h> #include <Core/Geometry/Point.h> #include <Core/Geometry/Vector.h> #include <Core/Grid/Box.h> #include <Core/Grid/Grid.h> #include <Core/Grid/Level.h> #include <Core/Grid/Variables/CellIterator.h> #include <Core/Grid/Variables/NodeIterator.h> #include <Core/Grid/Variables/ParticleVariable.h> #include <Core/Grid/Variables/SFCXVariable.h> #include <Core/Grid/Variables/SFCYVariable.h> #include <Core/Grid/Variables/SFCZVariable.h> //#include <Core/Grid/Variables/ShareAssignParticleVariable.h> #include <Core/Math/Matrix3.h> #include <Core/Math/MinMax.h> #include <Core/OS/Dir.h> #include <Core/Parallel/Parallel.h> #include <algorithm> #include <cstdio> #include <fstream> #include <iomanip> #include <iostream> #include <sstream> #include <string> #include <vector> using namespace std; using namespace Uintah; bool verbose = false; bool quiet = false; bool pad = false; bool d_printCell_coords = false; bool d_printNode_coords = false; void usage( const std::string & badarg, const std::string & progname ) { if(badarg != "") { cerr << "Error parsing argument: " << badarg << endl; } cerr << "Usage: " << progname << " [options] " << "-uda <archive file>\n\n"; cerr << "Valid options are:\n"; cerr << " -h, --help\n"; cerr << " -v, --variable: <variable name>\n"; cerr << " -m, --material: <material number> [defaults to 0]\n"; cerr << " -tlow, --timesteplow: [int] (sets start output timestep to int) [defaults to 0]\n"; cerr << " -thigh, --timestephigh: [int] (sets end output timestep to int) [defaults to last timestep]\n"; cerr << " -timestep, --timestep: [int] (only outputs from timestep int) [defaults to 0]\n"; cerr << " -istart, --indexs: <i> <j> <k> [ints] starting point cell index [defaults to 0 0 0]\n"; cerr << " -iend, --indexe: <i> <j> <k> [ints] end-point cell index [defaults to 0 0 0]\n"; cerr << " -startPt <x> <y> <z> [doubles] starting point of line in physical coordinates\n"; cerr << " -endPt <x> <y> <z> [doubles] end-point of line in physical coordinates\n"; cerr << " -pr, --precision: [int] (specify precision of output data) [defaults to 16. maximum 32]\n"; cerr << " -l, --level: [int] (level index to query range from) [defaults to 0]\n"; cerr << " -o, --out: <outputfilename> [defaults to stdout]\n"; cerr << " -vv, --verbose: (prints status of output)\n"; cerr << " -ni, --noindex: Do not print out the cell indices. Only print the value."; cerr << " -q, --quiet: (only print data values)\n"; cerr << " -pad, --pad: (print zero values for cell locations not currently in the specified level)\n"; cerr << " -cellCoords: (prints the cell centered coordinates on that level)\n"; cerr << " -nodeCoords: (prints the node centered coordinates on that level)\n"; cerr << " --cellIndexFile: <filename> (file that contains a list of cell indices)\n"; cerr << " [int 100, 43, 0]\n"; cerr << " [int 101, 43, 0]\n"; cerr << " [int 102, 44, 0]\n"; cerr << "----------------------------------------------------------------------------------------\n"; cerr << " For particle variables the average over all particles in a cell is returned.\n"; exit(1); } // arguments are the dataarchive, the successive arguments are the same as // the arguments to archive->query for data values. Then comes a type // dexcription of the variable being queried, and last is an output stream. //______________________________________________________________________ // template<class T> void printData( DataArchive * archive, string & variable_name, const Uintah::TypeDescription * variable_type, int material, const bool use_cellIndex_file, int levelIndex, IntVector & var_start, IntVector & var_end, vector<IntVector> cells, unsigned long time_start, unsigned long time_end, unsigned long output_precision, const bool printValueOnly, ostream & out ) { // Query time info from dataarchive. vector<int> index; vector<double> times; archive->queryTimesteps(index, times); ASSERTEQ(index.size(), times.size()); if( !quiet ){ cout << "There are " << index.size() << " timesteps\n"; } // set default max time value if (time_end == (unsigned long)-1) { if (verbose) { cout <<"Initializing time_step_upper to "<<times.size()-1<<"\n"; } time_end = times.size() - 1; } //__________________________________ // bullet proofing if (time_end >= times.size() || time_end < time_start) { cerr << "timestephigh("<<time_end<<") must be greater than " << time_start << " and less than " << times.size()-1 << endl; exit(1); } if (time_start >= times.size() || time_end > times.size()) { cerr << "timestep must be between 0 and " << times.size()-1 << endl; exit(1); } //__________________________________ // make sure the user knows it could be really slow if he // tries to output a big range of data... IntVector var_range = var_end - var_start; if (var_range.x() && var_range.y() && var_range.z()) { cerr << "PERFORMANCE WARNING: Outputting over 3 dimensions!\n"; } else if ((var_range.x() && var_range.y()) || (var_range.x() && var_range.z()) || (var_range.y() && var_range.z())){ cerr << "PERFORMANCE WARNING: Outputting over 2 dimensions\n"; } // set defaults for output stream out.setf(ios::scientific,ios::floatfield); out.precision(output_precision); //__________________________________ // loop over timesteps for (unsigned long time_step = time_start; time_step <= time_end; time_step++) { cerr << "%outputting for times["<<time_step<<"] = " << times[time_step]<< endl; //__________________________________ // does the requested level exist bool levelExists = false; GridP grid = archive->queryGrid(time_step); int numLevels = grid->numLevels(); for (int L = 0;L < numLevels; L++) { const LevelP level = grid->getLevel(L); if (level->getIndex() == levelIndex){ levelExists = true; } } if (!levelExists){ cerr<< " Level " << levelIndex << " does not exist at this timestep " << time_step << endl; } if(levelExists){ // only extract data if the level exists const LevelP level = grid->getLevel(levelIndex); //__________________________________ // User input starting and ending indicies if(!use_cellIndex_file) { // find the corresponding patches Level::selectType patches; level->selectPatches(var_start, var_end + IntVector(1,1,1), patches,true); if( patches.size() == 0){ cerr << " Could not find any patches on Level " << level->getIndex() << " that contain cells along line: " << var_start << " and " << var_end << " Double check the starting and ending indices "<< endl; exit(1); } // query all the data up front vector<Variable*> vars(patches.size()); for (unsigned int p = 0; p < patches.size(); p++) { if (patches[p]->isVirtual()) continue; switch (variable_type->getType()) { case Uintah::TypeDescription::CCVariable: vars[p] = scinew CCVariable<T>; archive->query( *(CCVariable<T>*)vars[p], variable_name, material, patches[p], time_step); break; case Uintah::TypeDescription::NCVariable: vars[p] = scinew NCVariable<T>; archive->query( *(NCVariable<T>*)vars[p], variable_name, material, patches[p], time_step); break; case Uintah::TypeDescription::SFCXVariable: vars[p] = scinew SFCXVariable<T>; archive->query( *(SFCXVariable<T>*)vars[p], variable_name, material, patches[p], time_step); break; case Uintah::TypeDescription::SFCYVariable: vars[p] = scinew SFCYVariable<T>; archive->query( *(SFCYVariable<T>*)vars[p], variable_name, material, patches[p], time_step); break; case Uintah::TypeDescription::SFCZVariable: vars[p] = scinew SFCZVariable<T>; archive->query( *(SFCZVariable<T>*)vars[p], variable_name, material, patches[p], time_step); break; default: cerr << "Unknown variable type: " << variable_type->getName() << endl; } } for (CellIterator ci(var_start, var_end + IntVector(1,1,1)); !ci.done(); ci++) { IntVector c = *ci; // find out which patch the variable is on unsigned int p = 0; bool foundCell = false; Vector dx = level->dCell(); Vector shift(0,0,0); // shift the cellPosition if it's a (X,Y,Z)FC variable T val = T(); for (; p < patches.size(); p++) { const Patch* patch = patches[p]; if(patch->isVirtual()){ continue; } switch (variable_type->getType()) { case Uintah::TypeDescription::CCVariable: if(patch->containsCell(c)){ val = (*dynamic_cast<CCVariable<T>*>(vars[p]))[c]; foundCell = true; } break; case Uintah::TypeDescription::NCVariable: if(patch->containsNode(c)){ val = (*dynamic_cast<NCVariable<T>*>(vars[p]))[c]; foundCell = true; } break; case Uintah::TypeDescription::SFCXVariable: if(patch->containsSFCX(c)){ val = (*dynamic_cast<SFCXVariable<T>*>(vars[p]))[c]; shift.x(-dx.x()/2.0); foundCell = true; } break; case Uintah::TypeDescription::SFCYVariable: if(patch->containsSFCY(c)){ val = (*dynamic_cast<SFCYVariable<T>*>(vars[p]))[c]; shift.y(-dx.y()/2.0); foundCell = true; } break; case Uintah::TypeDescription::SFCZVariable: if(patch->containsSFCY(c)){ val = (*dynamic_cast<SFCZVariable<T>*>(vars[p]))[c]; shift.z(-dx.z()/2.0); foundCell = true; } break; default: break; } } // patch loop if(foundCell){ if(d_printCell_coords){ Point point = level->getCellPosition(c); Vector here = point.asVector() + shift; out << here.x() << " "<< here.y() << " " << here.z() << " "<<val << endl;; } else if(d_printNode_coords){ Point point = level->getNodePosition(c); Vector here = point.asVector() + shift; out << here.x() << " "<< here.y() << " " << here.z() << " "<<val << endl;; } else if (printValueOnly) { out << val << endl; } else{ out << c.x() << " "<< c.y() << " " << c.z() << " "<< val << endl;; } } else{ if(pad){ if(d_printCell_coords){ Point point = level->getCellPosition(c); Vector here = point.asVector() + shift; out << here.x() << " "<< here.y() << " " << here.z() << " "<< val << endl;; }else if(d_printNode_coords){ Point point = level->getNodePosition(c); Vector here = point.asVector() + shift; out << here.x() << " "<< here.y() << " " << here.z() << " "<< val << endl;; } else if (printValueOnly) { out << val << endl;; } else{ out << c.x() << " "<< c.y() << " " << c.z() << " "<< val << endl;; } }// if pad with zeros } } // cell iterator for (unsigned i = 0; i < vars.size(); i++) delete vars[i]; } //__________________________________ // If the cell indicies were read from a file. if(use_cellIndex_file) { for (unsigned int i = 0; i< cells.size(); i++) { IntVector c = cells[i]; vector<T> values; try { archive->query(values, variable_name, material, c, times[time_step], times[time_step], levelIndex); } catch (const VariableNotFoundInGrid& exception) { cerr << "Caught VariableNotFoundInGrid Exception: " << exception.message() << endl; exit(1); } if(d_printCell_coords){ Point p = level->getCellPosition(c); out << p.x() << " "<< p.y() << " " << p.z() << " "<< values[0] << endl; }else if(d_printCell_coords){ Point p = level->getCellPosition(c); out << p.x() << " "<< p.y() << " " << p.z() << " "<< values[0] << endl; }else{ out << c.x() << " "<< c.y() << " " << c.z() << " "<< values[0] << endl; } } } out << endl; } // if level exists } // timestep loop } //______________________________________________________________________ // compute the average of all particles. template<class T> void compute_ave(ParticleVariable<T>& var, CCVariable<T>& ave, ParticleVariable<Point>& pos, const Patch* patch) { IntVector lo = patch->getExtraCellLowIndex(); IntVector hi = patch->getExtraCellHighIndex(); ave.allocate(lo,hi); T zero(0); ave.initialize(zero); CCVariable<double> count; count.allocate(lo,hi); count.initialize(0.0); ParticleSubset* pset = var.getParticleSubset(); if(pset->numParticles() > 0){ ParticleSubset::iterator iter = pset->begin(); for( ;iter != pset->end(); iter++ ){ IntVector c; patch->findCell(pos[*iter], c); ave[c] = ave[c] + var[*iter]; count[c] += 1; } for(CellIterator iter=patch->getCellIterator(); !iter.done(); iter++){ IntVector c = *iter; ave[c] = ave[c]/(count[c] + 1e-100); } } } //______________________________________________________________________ // Used for Particle Variables template<class T> void printData_PV( DataArchive * archive, string & variable_name, const Uintah::TypeDescription * variable_type, int material, const bool use_cellIndex_file, int levelIndex, IntVector & var_start, IntVector & var_end, vector<IntVector> cells, unsigned long time_start, unsigned long time_end, unsigned long output_precision, ostream & out ) { // query time info from dataarchive vector<int> index; vector<double> times; archive->queryTimesteps(index, times); ASSERTEQ(index.size(), times.size()); if (!quiet){ cout << "There are " << index.size() << " timesteps\n"; } // set default max time value if (time_end == (unsigned long)-1) { if (verbose) { cout <<"Initializing time_step_upper to "<<times.size()-1<<"\n"; } time_end = times.size() - 1; } //__________________________________ // bullet proofing if (time_end >= times.size() || time_end < time_start) { cerr << "timestephigh("<<time_end<<") must be greater than " << time_start << " and less than " << times.size()-1 << endl; exit(1); } if (time_start >= times.size() || time_end > times.size()) { cerr << "timestep must be between 0 and " << times.size()-1 << endl; exit(1); } //__________________________________IntVector c = cells[i]; // make sure the user knows it could be really slow if he // tries to output a big range of data... IntVector var_range = var_end - var_start; if (var_range.x() && var_range.y() && var_range.z()) { cerr << "PERFORMANCE WARNING: Outputting over 3 dimensions!\n"; } else if ((var_range.x() && var_range.y()) || (var_range.x() && var_range.z()) || (var_range.y() && var_range.z())){ cerr << "PERFORMANCE WARNING: Outputting over 2 dimensions\n"; } // set defaults for output stream out.setf(ios::scientific,ios::floatfield); out.precision(output_precision); //__________________________________ // loop over timesteps for (unsigned long time_step = time_start; time_step <= time_end; time_step++) { cerr << "%outputting for times["<<time_step<<"] = " << times[time_step]<< endl; //__________________________________ // does the requested level exist bool levelExists = false; GridP grid = archive->queryGrid(time_step); int numLevels = grid->numLevels(); for (int L = 0;L < numLevels; L++) { const LevelP level = grid->getLevel(L); if (level->getIndex() == levelIndex){ levelExists = true; } } if (!levelExists){ cerr<< " Level " << levelIndex << " does not exist at this timestep " << time_step << endl; } if(levelExists){ // only extract data if the level exists const LevelP level = grid->getLevel(levelIndex); // find the corresponding patches Level::selectType patches; level->selectPatches(var_start, var_end + IntVector(1,1,1), patches,true); if( patches.size() == 0){ cerr << " Could not find any patches on Level " << level->getIndex() << " that contain cells along line: " << var_start << " and " << var_end << " Double check the starting and ending indices "<< endl; exit(1); } // query all the data and compute the average up front vector<Variable*> vars(patches.size()); vector<Variable*> ave(patches.size()); for (unsigned int p = 0; p < patches.size(); p++) { vars[p] = scinew ParticleVariable<T>; ave[p] = scinew CCVariable<T>; archive->query( *(ParticleVariable<T>*)vars[p], variable_name, material, patches[p], time_step); Variable* pos; pos = scinew ParticleVariable<Point>; archive->query( *(ParticleVariable<Point>*)pos, "p.x", material, patches[p], time_step); compute_ave<T>(*(ParticleVariable<T>*)vars[p], *(CCVariable<T>*)ave[p], *(ParticleVariable<Point>*)pos, patches[p]); } //__________________________________ // User input starting and ending indicies if(!use_cellIndex_file) { for (CellIterator ci(var_start, var_end + IntVector(1,1,1)); !ci.done(); ci++) { IntVector c = *ci; // find out which patch it's on (to keep the printing in sorted order. // alternatively, we could just iterate through the patches) unsigned int p = 0; for (; p < patches.size(); p++) { IntVector low = patches[p]->getExtraCellLowIndex(); IntVector high = patches[p]->getExtraCellHighIndex(); if (c.x() >= low.x() && c.y() >= low.y() && c.z() >= low.z() && c.x() < high.x() && c.y() < high.y() && c.z() < high.z()) break; } if (p == patches.size()) { continue; } T val; val = (*dynamic_cast<CCVariable<T>*>(ave[p]))[c]; if(d_printCell_coords){ Point point = level->getCellPosition(c); out << point.x() << " "<< point.y() << " " << point.z() << " "<<val << endl;; } else if(d_printNode_coords){ Point point = level->getNodePosition(c); out << point.x() << " "<< point.y() << " " << point.z() << " "<<val << endl;; }else{ out << c.x() << " "<< c.y() << " " << c.z() << " "<< val << endl;; } } for (unsigned i = 0; i < vars.size(); i++) delete vars[i]; } //__________________________________ // If the cell indicies were read from a file. if(use_cellIndex_file) { for (unsigned int i = 0; i< cells.size(); i++) { IntVector c = cells[i]; unsigned int p = 0; for (; p < patches.size(); p++) { IntVector low = patches[p]->getExtraCellLowIndex(); IntVector high = patches[p]->getExtraCellHighIndex(); if (c.x() >= low.x() && c.y() >= low.y() && c.z() >= low.z() && c.x() < high.x() && c.y() < high.y() && c.z() < high.z()) break; } if (p == patches.size()) { continue; } T val; val = (*dynamic_cast<CCVariable<T>*>(ave[p]))[c]; if(d_printCell_coords){ Point point = level->getCellPosition(c); out << point.x() << " "<< point.y() << " " << point.z() << " "<< val << endl; } else if(d_printNode_coords){ Point point = level->getNodePosition(c); out << point.x() << " "<< point.y() << " " << point.z() << " "<< val << endl; }else{ out << c.x() << " "<< c.y() << " " << c.z() << " "<< val << endl; } } } // if cell index file out << endl; } // if level exists } // timestep loop } /*_______________________________________________________________________ Function: readCellIndicies-- Purpose: reads in a list of cell indicies _______________________________________________________________________ */ void readCellIndicies( const string& filename, vector<IntVector>& cells ) { // open the file ifstream fp(filename.c_str()); if (!fp){ cerr << "Couldn't open the file that contains the cell indicies " << filename<< endl; } char c; int i,j,k; string text, comma; while (fp >> c) { fp >> text>>i >> comma >> j >> comma >> k; IntVector indx(i,j,k); cells.push_back(indx); fp.get(c); } // We should do some bullet proofing here //for (int i = 0; i<(int) cells.size(); i++) { // cout << cells[i] << endl; //} } #ifdef __bgq__ //////////////////////////////////////////////////////////////////////// // // On BGQ machines (eg: vulcan@llnl), and on many other strange // architectures, static variables don't construct correctly. This is // a particular problem with our type system as the types are // registered when they are first constructed and then placed into a // lookup table. The following hack just creates dummy variables of // the types that we need which forces the types to be registered. // const Uintah::TypeDescription * bgq_hack() { SFCXVariable<float> sfcxvar; SFCYVariable<float> sfcyvar; SFCZVariable<float> sfczvar; CCVariable<float> ccfloatvar; CCVariable<Vector> ccvectorvar; const Uintah::TypeDescription * td; td = sfcxvar.getTypeDescription(); td = sfcyvar.getTypeDescription(); td = sfczvar.getTypeDescription(); td = ccfloatvar.getTypeDescription(); td = ccvectorvar.getTypeDescription(); return td; } #endif // //////////////////////////////////////////////////////////////////////// //______________________________________________________________________ // Notes: // Now the material index is kind of a hard thing. There is no way // to reliably determine a default material. Materials are defined // on the patch for each varialbe, so this subset of materials could // change over patches. We can guess, that there will be a material // 0. This shouldn't cause the program to crash. It will spit out // an exception and exit gracefully. int main( int argc, char** argv ) { Uintah::Parallel::initializeManager(argc, argv); //__________________________________ // Default Values bool use_cellIndex_file = false; bool findCellIndices = false; bool printValueOnly = false; unsigned long time_start = 0; unsigned long time_end = (unsigned long)-1; unsigned long output_precision = 16; string input_uda_name; string input_file_cellIndices; string output_file_name("-"); IntVector var_start(0,0,0); IntVector var_end(0,0,0); Point start_pt(-9,-9,-9); Point end_pt(-9,-9,-9); int levelIndex = 0; vector<IntVector> cells; string variable_name; int material = 0; //__________________________________ // Parse arguments for( int i = 1; i < argc; i++ ){ const string s = argv[i]; if(s == "-v" || s == "--variable") { variable_name = string(argv[++i]); } else if (s == "-m" || s == "--material") { material = atoi(argv[++i]); } else if (s == "-vv" || s == "--verbose") { verbose = true; } else if (s == "-q" || s == "--quiet") { quiet = true; } else if (s == "-pad" || s == "--pad") { pad = true; } else if (s == "-tlow" || s == "--timesteplow") { time_start = strtoul(argv[++i],(char**)nullptr,10); } else if (s == "-thigh" || s == "--timestephigh") { time_end = strtoul(argv[++i],(char**)nullptr,10); } else if (s == "-pr" || s == "--precision") { output_precision = strtoul(argv[++i],(char**)nullptr,10); if (output_precision > 32) { std::cout << "Output precision cannot be larger than 32. Setting precision to 32 \n"; output_precision = 32; } if (output_precision < 1 ) { std::cout << "Output precision cannot be less than 1. Setting precision to 16 \n"; output_precision = 16; } } else if (s == "-timestep" || s == "--timestep") { int val = strtoul(argv[++i],(char**)nullptr,10); time_start = val; time_end = val; } else if (s == "-istart" || s == "--indexs") { int x = atoi(argv[++i]); int y = atoi(argv[++i]); int z = atoi(argv[++i]); var_start = IntVector(x,y,z); } else if (s == "-iend" || s == "--indexe") { int x = atoi(argv[++i]); int y = atoi(argv[++i]); int z = atoi(argv[++i]); var_end = IntVector(x,y,z); } else if (s == "-startPt" ) { double x = atof(argv[++i]); double y = atof(argv[++i]); double z = atof(argv[++i]); start_pt = Point(x,y,z); } else if (s == "-endPt" ) { double x = atof(argv[++i]); double y = atof(argv[++i]); double z = atof(argv[++i]); end_pt = Point(x,y,z); findCellIndices = true; } else if (s == "-l" || s == "--level") { levelIndex = atoi(argv[++i]); } else if( (s == "-h") || (s == "--help") ) { usage( "", argv[0] ); } else if (s == "-uda") { input_uda_name = string(argv[++i]); } else if (s == "-o" || s == "--out") { output_file_name = string(argv[++i]); } else if (s == "-ni" || s == "--noindex") { printValueOnly = true; } else if (s == "--cellIndexFile") { use_cellIndex_file = true; input_file_cellIndices = string(argv[++i]); } else if (s == "--cellCoords" || s == "-cellCoords" ) { d_printCell_coords = true; } else if (s == "--nodeCoords" || s == "-nodeCoords" ) { d_printNode_coords = true; }else { usage( s, argv[0] ); } } if( input_uda_name == "" ){ cerr << "No archive file specified\n"; usage("", argv[0]); } #ifdef __bgq__ bgq_hack(); #endif try { DataArchive* archive = scinew DataArchive(input_uda_name); vector<string> vars; vector<int> num_matls; vector<const Uintah::TypeDescription*> types; archive->queryVariables( vars, num_matls, types ); ASSERTEQ( vars.size(), types.size() ); if (verbose) { cout << "There are " << vars.size() << " variables registered with types:\n"; for( unsigned int index = 0; index < vars.size(); index++ ) { cout << index << ": " << vars[ index ] << " - " << types[ index ]->getName() << "\n"; } } int var_index = -1; for( unsigned int index = 0; index < vars.size(); index++ ) { if( variable_name == vars[ index ] ) { var_index = index; break; } } //__________________________________ // bulletproofing if( var_index == -1 ) { cerr << "\n"; cerr << "Variable \"" << variable_name << "\" was not found.\n"; cerr << "If you did not specify the variable name, use: --variable [name].\n"; cerr << "\n"; cerr << "Possible variable names are:\n"; for( unsigned int index = 0; index < vars.size(); index++ ) { cout << "vars[" << index << "] = " << vars[ index ] << "\n"; } cerr << "\n"; cerr << "Exiting!!\n\n"; cerr << "\n"; exit(-1); } //__________________________________ // get type and subtype of data const Uintah::TypeDescription * td = types[var_index]; const Uintah::TypeDescription * subtype = td->getSubType(); if( subtype == nullptr ) { cout << "\n"; cout << "An ERROR occurred. Subtype is nullptr. Most likely this means that the automatic\n"; cout << "type instantiation is not working... Are you running on a strange architecture?\n"; cout << "Types should be constructed when global static variables of each type are instantiated\n"; cout << "automatically when the program loads. The registering of the types occurs in:\n"; cout << "src/Core/Disclosure/TypeDescription.cc in register_type() (called from the\n"; cout << "TypeDescription() constructor(s). However, I'm not quite sure where the variables\n"; cout << "are initially (or in this case not initially) instantiated... Need to track\n"; cout << "that down and force them to be created... Dd.\n"; cout << "\n"; cout << "NOTE: You can try adding the type of the variable you are trying to extract into\n"; cout << " the bgq_hack() function in lineextract.cc.\n"; cout << "\n"; exit( 1 ); } //__________________________________ // Open output file, call printData with it's ofstream // if no output file, call with cout ostream *output_stream = &cout; if (output_file_name != "-") { if (verbose) cout << "Opening \""<<output_file_name<<"\" for writing.\n"; ofstream *output = new ofstream(); output->open(output_file_name.c_str()); if (!(*output)) { // bullet proofing cerr << "Could not open "<<output_file_name<<" for writing.\n"; exit(1); } output_stream = output; } //__________________________________ // find the cell index if( findCellIndices ){ vector<int> index; vector<double> times; archive->queryTimesteps(index, times); ASSERTEQ(index.size(), times.size()); GridP grid = archive->queryGrid(time_start); const LevelP level = grid->getLevel(levelIndex); if (level){ var_start=level->getCellIndex(start_pt); var_end =level->getCellIndex(end_pt); } } if (!quiet) { cout << vars[var_index] << ": " << types[var_index]->getName() << " being extracted for material "<<material << ", Level: " << levelIndex << ", at index "<<var_start << " to " << var_end <<endl; } //__________________________________ // read in cell indices from a file if ( use_cellIndex_file) { readCellIndicies(input_file_cellIndices, cells); } //__________________________________ // print data // N C / C C V A R I A B L E S if(td->getType() != Uintah::TypeDescription::ParticleVariable){ switch (subtype->getType()) { case Uintah::TypeDescription::double_type: printData<double>(archive, variable_name, td, material, use_cellIndex_file, levelIndex, var_start, var_end, cells, time_start, time_end, output_precision, printValueOnly, *output_stream); break; case Uintah::TypeDescription::float_type: printData<float>(archive, variable_name, td, material, use_cellIndex_file, levelIndex, var_start, var_end, cells, time_start, time_end, output_precision, printValueOnly, *output_stream); break; case Uintah::TypeDescription::int_type: printData<int>(archive, variable_name, td, material, use_cellIndex_file, levelIndex, var_start, var_end, cells, time_start, time_end, output_precision, printValueOnly, *output_stream); break; case Uintah::TypeDescription::Vector: printData<Vector>(archive, variable_name, td, material, use_cellIndex_file, levelIndex, var_start, var_end, cells, time_start, time_end, output_precision, printValueOnly, *output_stream); break; case Uintah::TypeDescription::Matrix3: printData<Matrix3>(archive, variable_name, td, material, use_cellIndex_file, levelIndex, var_start, var_end, cells, time_start, time_end, output_precision, printValueOnly, *output_stream); break; case Uintah::TypeDescription::Stencil7: printData<Stencil7>(archive, variable_name, td, material, use_cellIndex_file, levelIndex, var_start, var_end, cells, time_start, time_end, output_precision, printValueOnly, *output_stream); break; // don't break on else - flow to the error statement case Uintah::TypeDescription::bool_type: case Uintah::TypeDescription::short_int_type: case Uintah::TypeDescription::long_type: case Uintah::TypeDescription::long64_type: cerr << "Subtype is not implemented\n"; exit(1); break; default: cerr << "Unknown subtype\n"; exit(1); } } //__________________________________ // P A R T I C L E V A R I A B L E if(td->getType() == Uintah::TypeDescription::ParticleVariable){ switch (subtype->getType()) { case Uintah::TypeDescription::double_type: printData_PV<double>(archive, variable_name, td, material, use_cellIndex_file, levelIndex, var_start, var_end, cells, time_start, time_end, output_precision, *output_stream); break; case Uintah::TypeDescription::float_type: printData_PV<float>(archive, variable_name, td, material, use_cellIndex_file, levelIndex, var_start, var_end, cells, time_start, time_end, output_precision, *output_stream); break; case Uintah::TypeDescription::int_type: printData_PV<int>(archive, variable_name, td, material, use_cellIndex_file, levelIndex, var_start, var_end, cells, time_start, time_end, output_precision, *output_stream); break; case Uintah::TypeDescription::Vector: printData_PV<Vector>(archive, variable_name, td, material, use_cellIndex_file, levelIndex, var_start, var_end, cells, time_start, time_end, output_precision, *output_stream); break; case Uintah::TypeDescription::Other: // don't break on else - flow to the error statement case Uintah::TypeDescription::Matrix3: case Uintah::TypeDescription::bool_type: case Uintah::TypeDescription::short_int_type: case Uintah::TypeDescription::long_type: case Uintah::TypeDescription::long64_type: cerr << "Subtype is not implemented\n"; exit(1); break; default: cerr << "Unknown subtype\n"; exit(1); } } // Delete the output file if it was created. if (output_file_name != "-") { delete((ofstream*)output_stream); } } catch (Exception& e) { cerr << "Caught exception: " << e.message() << endl; exit(1); } catch(...){ cerr << "Caught unknown exception\n"; exit(1); } }
thomz12/h5
H5/H5/Resources/.generated/System/Collections/IEnumerator.js
<reponame>thomz12/h5 H5.define("System.Collections.IEnumerator", { $kind: "interface" });
pbasting/cactus
api/impl/cactusSerialisation.c
/* * Copyright (C) 2009-2011 by <NAME> (<EMAIL>) * * Released under the MIT license, see LICENSE.txt */ #include "cactusGlobalsPrivate.h" //////////////////////////////////////////////// //////////////////////////////////////////////// //////////////////////////////////////////////// //Functions for serialising the objects. //////////////////////////////////////////////// //////////////////////////////////////////////// //////////////////////////////////////////////// void binaryRepresentation_writeElementType(char elementCode, void (*writeFn)(const void * ptr, size_t size, size_t count)) { writeFn(&elementCode, sizeof(char), 1); } void binaryRepresentation_writeString(const char *name, void (*writeFn)(const void * ptr, size_t size, size_t count)) { int64_t i = strlen(name); writeFn(&i, sizeof(int64_t), 1); writeFn(name, sizeof(char), i); } void binaryRepresentation_writeInteger(int64_t i, void (*writeFn)(const void * ptr, size_t size, size_t count)) { writeFn(&i, sizeof(int64_t), 1); } void binaryRepresentation_writeName(Name name, void (*writeFn)(const void * ptr, size_t size, size_t count)) { binaryRepresentation_writeInteger(name, writeFn); } void binaryRepresentation_writeFloat(float f, void (*writeFn)(const void * ptr, size_t size, size_t count)) { writeFn(&f, sizeof(float), 1); } void binaryRepresentation_writeBool(bool i, void (*writeFn)(const void * ptr, size_t size, size_t count)) { writeFn(&i, sizeof(bool), 1); } char binaryRepresentation_peekNextElementType(void *binaryString) { return *((char *)binaryString); } char binaryRepresentation_popNextElementType(void **binaryString) { char *c; c = *binaryString; *binaryString = c + 1; return *c; } char *binaryRepresentation_getString(void **binaryString) { int64_t i; char *cA; i = binaryRepresentation_getInteger(binaryString); cA = st_malloc(sizeof(char)*(i+1)); memcpy(cA, *binaryString, sizeof(char)*i); cA[i] = '\0'; *binaryString = *((char **)binaryString) + i; return cA; } char *binaryRepresentation_getStringStatic_cA = NULL; const char *binaryRepresentation_getStringStatic(void **binaryString) { if(binaryRepresentation_getStringStatic_cA != NULL) { free(binaryRepresentation_getStringStatic_cA); } binaryRepresentation_getStringStatic_cA = binaryRepresentation_getString(binaryString); return binaryRepresentation_getStringStatic_cA; } int64_t binaryRepresentation_getInteger(void **binaryString) { int64_t *i; i = *binaryString; *binaryString = i + 1; return *i; } Name binaryRepresentation_getName(void **binaryString) { return binaryRepresentation_getInteger(binaryString); } float binaryRepresentation_getFloat(void **binaryString) { float *i; i = *binaryString; *binaryString = i + 1; return *i; } bool binaryRepresentation_getBool(void **binaryString) { bool *i; i = *binaryString; *binaryString = i + 1; return *i; } int64_t binaryRepresentation_makeBinaryRepresentationP_i = 0; void binaryRepresentation_makeBinaryRepresentationP(const void * ptr, size_t size, size_t count) { /* * Records the cummulative size of the substrings written out in creating the flower. */ assert(ptr != NULL); binaryRepresentation_makeBinaryRepresentationP_i += size * count; } char *binaryRepresentation_makeBinaryRepresentationP2_vA = NULL; void binaryRepresentation_makeBinaryRepresentationP2(const void * ptr, size_t size, size_t count) { /* * Cummulates all the binary data into one array */ memcpy(binaryRepresentation_makeBinaryRepresentationP2_vA, ptr, size*count); binaryRepresentation_makeBinaryRepresentationP2_vA += size * count; } void *binaryRepresentation_makeBinaryRepresentation(void *object, void (*writeBinaryRepresentation)(void *, void (*writeFn)(const void * ptr, size_t size, size_t count)), int64_t *recordSize) { void *vA; binaryRepresentation_makeBinaryRepresentationP_i = 0; writeBinaryRepresentation(object, binaryRepresentation_makeBinaryRepresentationP); assert(binaryRepresentation_makeBinaryRepresentationP_i < INT64_MAX); vA = st_malloc(binaryRepresentation_makeBinaryRepresentationP_i); binaryRepresentation_makeBinaryRepresentationP2_vA = vA; writeBinaryRepresentation(object, binaryRepresentation_makeBinaryRepresentationP2); *recordSize = binaryRepresentation_makeBinaryRepresentationP_i; return vA; } void *binaryRepresentation_resizeObjectAsPowerOf2(void *vA, int64_t *recordSize) { if(*recordSize == 0) { *recordSize = 1; } int64_t finalSize = pow(2, log(*recordSize * 2)/log(2.0)); assert(finalSize >= *recordSize); vA = realloc(vA, finalSize); if(vA == NULL) { st_errAbort("Could not realloc memory\n"); } *recordSize = finalSize; return vA; }
AriCheng/flare
flare/base/monitoring/monitoring_system.h
<filename>flare/base/monitoring/monitoring_system.h<gh_stars>100-1000 // Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved. // // Licensed under the BSD 3-Clause License (the "License"); you may not use this // file except in compliance with the License. You may obtain a copy of the // License at // // https://opensource.org/licenses/BSD-3-Clause // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, WITHOUT // WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the // License for the specific language governing permissions and limitations under // the License. #ifndef FLARE_BASE_MONITORING_MONITORING_SYSTEM_H_ #define FLARE_BASE_MONITORING_MONITORING_SYSTEM_H_ #include <memory> #include <string> #include <vector> #include "flare/base/dependency_registry.h" #include "flare/base/monitoring/event.h" namespace flare::monitoring { // Interface of monitoring system. Each monitoring system should provide their // own implementation. // // The implementation should be thread-safe (for obvious reasons.). class MonitoringSystem { public: virtual ~MonitoringSystem() = default; struct Personality { std::chrono::nanoseconds minimum_report_interval; }; struct EventBuffers { std::vector<Event> discrete_events; std::vector<CoalescedCounterEvent> counter_events; std::vector<CoalescedGaugeEvent> gauge_events; std::vector<CoalescedTimerEvent> timer_events; }; // Get personality of this monitoring system. Personality determines how // should Flare interact with the provider. virtual const Personality& GetPersonality() const = 0; // Report events to the monitoring system. virtual void Report(const EventBuffers& events) = 0; }; FLARE_DECLARE_OBJECT_DEPENDENCY_REGISTRY(monitoring_system_registry, MonitoringSystem); } // namespace flare::monitoring #define FLARE_MONITORING_REGISTER_MONITORING_SYSTEM(Name, Implementation) \ FLARE_REGISTER_OBJECT_DEPENDENCY( \ flare::monitoring::monitoring_system_registry, Name, \ [] { return std::make_unique<Implementation>(); }) #endif // FLARE_BASE_MONITORING_MONITORING_SYSTEM_H_
BrassGoggledCoders/ReEngineered-Toolbox
src/main/java/xyz/brassgoggledcoders/reengineeredtoolbox/api/face/FaceInstance.java
<filename>src/main/java/xyz/brassgoggledcoders/reengineeredtoolbox/api/face/FaceInstance.java package xyz.brassgoggledcoders.reengineeredtoolbox.api.face; import com.google.common.collect.Maps; import net.minecraft.entity.player.PlayerEntity; import net.minecraft.nbt.CompoundNBT; import net.minecraft.util.Hand; import net.minecraft.util.ResourceLocation; import net.minecraft.util.math.BlockRayTraceResult; import net.minecraft.util.text.ITextComponent; import net.minecraft.world.World; import net.minecraftforge.common.capabilities.Capability; import net.minecraftforge.common.util.INBTSerializable; import net.minecraftforge.common.util.LazyOptional; import xyz.brassgoggledcoders.reengineeredtoolbox.api.conduit.ConduitClient; import xyz.brassgoggledcoders.reengineeredtoolbox.api.container.face.IFaceContainer; import xyz.brassgoggledcoders.reengineeredtoolbox.api.screen.face.IFaceScreen; import xyz.brassgoggledcoders.reengineeredtoolbox.api.socket.ISocket; import xyz.brassgoggledcoders.reengineeredtoolbox.api.socket.SocketContext; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.annotation.ParametersAreNonnullByDefault; import java.util.Map; import java.util.UUID; public class FaceInstance implements INBTSerializable<CompoundNBT> { private final SocketContext socketContext; private final Map<String, ConduitClient<?, ?, ?>> conduitClients; private UUID uuid; public FaceInstance(SocketContext socketContext) { this.socketContext = socketContext; this.uuid = UUID.randomUUID(); this.conduitClients = Maps.newHashMap(); } public void onTick() { } @Nonnull public <T> LazyOptional<T> getCapability(@Nonnull Capability<T> cap) { return LazyOptional.empty(); } @Override public CompoundNBT serializeNBT() { CompoundNBT nbt = new CompoundNBT(); nbt.putUniqueId("uuid", this.uuid); return nbt; } @Override public void deserializeNBT(CompoundNBT nbt) { if (nbt.contains("uuid")) { this.uuid = nbt.getUniqueId("uuid"); } } public SocketContext getSocketContext() { return this.socketContext; } public Face getFace() { return this.getSocketContext().getFace(); } public ISocket getSocket() { return this.getSocketContext().getSocket(); } public World getWorld() { return this.getSocket().getWorld(); } public ResourceLocation getSpriteLocation() { return this.getFace().getDefaultSpriteLocation(); } public CompoundNBT getUpdateTag() { return new CompoundNBT(); } public void handleUpdateTag(CompoundNBT updateNBT) { } @ParametersAreNonnullByDefault public boolean onActivated(PlayerEntity player, Hand hand, BlockRayTraceResult hit) { return false; } @Nullable public IFaceContainer getContainer() { return null; } @Nullable public IFaceScreen getScreen() { return null; } /** * This call isn't side specific so the block will output based on highest strength through all faces * * @return The Strength of signal that a Comparator should output */ public int getComparatorStrength() { return 0; } public int getStrongPower() { return 0; } public boolean canConnectRedstone() { return false; } public Map<String, ConduitClient<?, ?, ?>> getConduitClients() { return conduitClients; } protected void registerClient(String name, ConduitClient<?, ?, ?> conduitClient) { this.conduitClients.put(name, conduitClient); } protected void registerClient(ConduitClient<?, ?, ?> conduitClient) { this.registerClient(conduitClient.getConduitType().getDefaultClientName(), conduitClient); } protected void openScreen(PlayerEntity playerEntity) { this.getSocket().openScreen(playerEntity, this); } public UUID getUuid() { return uuid; } public void setUuid(UUID uuid) { this.uuid = uuid; } public ITextComponent getName() { return this.getFace().getName(); } }
denim2x/wikia-is-answered
engine.py
import re from collections import defaultdict from _bareasgi import text_reader, text_response, json_response #from redis import StrictRedis #from redis.exceptions import ResponseError import _rom as rom from config import dialogflow as _dialogflow, redis, answers as _answers, kb as _kb from document import Document from search import search from dialogflow import Dialogflow, KnowledgeBase from uuid import uuid1 ping = bytes(str(uuid1()), 'utf-8') #ping = str(uuid1()) for server in redis: host, port = server['host'], int(server.get('port', '6379')) try: rom.init(host=host, port=port, password=server.get('auth'), decode_responses=False) db = rom.get_connection() #db = StrictRedis(host=server['host'], port=server['port'], password=server.get('pass'), db=0, decode_responses=True) if db is None: raise Exception if ping == db.execute_command('ECHO', ping): server = db.connection_pool.connection_kwargs print('[INFO] Redis connection:', f"{server['host']}:{server['port']}") break except Exception as e: print("[WARN] Redis connection failed:", e if str(e) else f'{host}:{port}') class _Fragment(*rom.Model): path = rom.String(required=True, unique=True) name = rom.String(required=True, unique=True) document = rom.ManyToOne('_Document', required=True, on_delete='no action') class _Document(*rom.Model): name = rom.String(required=True, unique=True) url = rom.String(unique=True) caption = rom.String(required=True) site = rom.String(default='<unknown>') fragments = rom.OneToMany('_Fragment') dialogflow = Dialogflow() fandom = KnowledgeBase(_dialogflow['fandom']) _url = fandom.caption # TODO: Delete database entries not present in Fandom KB _keys = [e for e in db.keys() if e != '_answers'] if _kb == 'blank' and _keys: db.unlink(*_keys) rom.bgsave() print('[INFO] Redis: Dropped KB entries') docs = defaultdict(list) for fragment in fandom: if not _Fragment.get_by(name=fragment.display_name): name, heads = Document.parse_name(fragment.display_name) docs[name].append(fragment) for name, fragments in docs.items(): _doc = _Document.get_by(name=name) if not _doc: doc = Document(_url, name) _doc = _Document(name=name, url=doc.url, caption=doc.caption, site=doc.site) for fragment in fragments: _fragment = _Fragment(path=fragment.name, name=fragment.display_name, document=_doc) rom.session.flush() if _answers == 'blank' and db.hlen('_answers'): db.hdel('_answers', *db.hkeys('_answers')) rom.bgsave() print('[INFO] Redis: Dropped answer entries') sites = defaultdict(dict) async def knowledge(scope, info, matches, content): for _doc in _Document.query.all(): sites[_doc['site']].setdefault(_doc['url'], _doc['caption']) res = [] for site, docs in sites.items(): _docs = ({'caption': caption, 'url': url} for url, caption in docs.items()) res.append({ 'caption': site, 'documents': sorted(_docs, key=lambda e: e['caption']) }) return json_response(sorted(res, key=lambda e: e['caption'])) from util import PriorityQueue from phrase_metric import similarity, validate # FIXME def _search(self, text, threshold=0.8): keys = (key.decode() for key in db.hkeys(self)) keys = ((similarity(text, key), key) for key in keys) s, key = max(keys, key=lambda k: k[0], default=(0, None)) if s > threshold: return key _save = db.hset def find_answer(query): ret = _search('_answers', query) return db.hget('_answers', ret).decode() if ret else None def save_answer(query, answer): _save('_answers', query, answer) rom.bgsave() return answer async def message(scope, info, matches, content): text = re.sub(r'\s+', ' ', (await text_reader(content)).strip().lstrip('.').strip()) if text == '': return text_response(dialogflow.event('WELCOME')) answers = dialogflow.get_answers(text, kb=False) if answers: return text_response(answers[0]) query = text.strip('?!').strip() if not validate(query): return text_response(dialogflow.event('fallback')) answer = find_answer(query) if answer: return text_response(answer) total = 4 fragments = PriorityQueue(total, lambda f, r: 1 - r) for url in search(query)[:1]: doc = Document(url) if not doc: print('[WARN] URL request failed:', doc.url) continue for fragment_name in doc: if _Fragment.get_by(name=fragment_name): print('[INFO] Found fragment:', fragment_name) total -= 1 if total == 0: break continue print('[INFO] Generating fragment:', fragment_name) fragment = doc[fragment_name] if not fragment: print('[INFO] Skipping empty fragment:', fragment_name) continue fragments.add((doc, fragment_name, fragment), similarity(query, fragment)) for doc, name, fragment in fragments[:total]: print('[INFO] Uploading fragment:', name) res = fandom.create(name, fragment) if res is None: print('[WARN] Fragment upload failed:', name) continue _doc = _Document.get_by(name=doc.name) if not _doc: _doc = _Document(name=doc.name, url=doc.url, caption=doc.caption, site=doc.site) _fragment = _Fragment(path=res.name, name=name, document=_doc) print('[INFO] Fragment uploaded:', name) rom.session.flush() #lambda a: _Fragment.get_by(path=a.source).document['url'] in urls) answers = dialogflow.get_answers(query) if not answers: return text_response(dialogflow.event('fallback')) answer = max(answers, key=lambda a: a.match_confidence * similarity(query, a.answer)) return text_response(save_answer(query, answer.answer))
sallyf/sallyf
src/main/java/com/sallyf/sallyf/Form/FormManager.java
<reponame>sallyf/sallyf<gh_stars>1-10 package com.sallyf.sallyf.Form; import com.sallyf.sallyf.Container.Container; import com.sallyf.sallyf.Container.ServiceInterface; import com.sallyf.sallyf.Form.Renderer.*; import com.sallyf.sallyf.Utils.ClassUtils; import java.util.ArrayList; import java.util.Collection; public class FormManager implements ServiceInterface { private ArrayList<RendererInterface<?, ?>> renderers = new ArrayList<>(); @Override public void initialize(Container container) { addRenderer(FormRenderer.class); addRenderer(SubmitRenderer.class); addRenderer(TextRenderer.class); addRenderer(PasswordRenderer.class); addRenderer(HiddenRenderer.class); addRenderer(CheckboxRenderer.class); addRenderer(TextareaRenderer.class); addRenderer(ChoiceRenderer.class); addRenderer(RadioRenderer.class); } public String render(FormView formView) { for (RendererInterface renderer : renderers) { if (renderer.supports(formView.getForm().getBuilder().getFormType())) { return renderer.renderRow(formView); } } return renderChildren(formView); } public String renderChildren(FormView<?, ?, ?> formView) { StringBuilder s = new StringBuilder(); Collection<FormView> values = formView.getChildren(); for (FormView childView : values) { s.append(render(childView)); } return s.toString(); } public void addRenderer(Class<? extends RendererInterface<?, ?>> rendererClass) { // If failure, ignore, fallback on parameterless constructor RendererInterface<?, ?> renderer = ClassUtils.newInstance(rendererClass, e -> {}, this); if (null == renderer) { renderer = ClassUtils.newInstance(rendererClass); } renderers.add(renderer); } public ArrayList<RendererInterface<?, ?>> getRenderers() { return renderers; } }
HaroldMills/NFC
setup.py
<reponame>HaroldMills/NFC """ Setup.py for Vesper pip package. All of the commands below should be issued from the directory containing this file. To build the Vesper package: python setup.py sdist bdist_wheel To upload the Vesper package to the test Python package index: python -m twine upload --repository-url https://test.pypi.org/legacy/ dist/* To upload the Vesper package to the real Python package index: python -m twine upload dist/* To create a conda environment using a local Vesper package: conda create -n test python=3.10 conda activate test pip install dist/vesper-<version>.tar.gz To create a conda environment using a Vesper package from the test PyPI: conda create -n test python=3.10 conda activate test pip install --extra-index-url https://test.pypi.org/simple/ vesper To create a conda environment using a Vesper package from the real PyPI: conda create -n test python=3.10 conda activate test pip install vesper==<version> To create a conda environment for Vesper development: conda create -n vesper-dev python=3.10 conda activate vesper-dev conda install pyaudio pip install bokeh django jsonschema matplotlib pytz resampy ruamel_yaml skyfield sphinx sphinx_rtd_theme tensorflow To create a conda environment using the latest, local Vesper source code: conda create -n vesper-latest python=3.10 conda activate vesper-latest conda install pyaudio pip install -e /Users/harold/Documents/Code/Python/Vesper Whenever you modify plugin entry points, you must run: python setup.py develop for the plugin manager to be able to see the changes. If you don't do this, you will see ImportError exceptions when the plugin manager tries to load entry points that no longer exist. To run Django unit tests: cd "Desktop/Test Archive" conda activate vesper-latest vesper_admin test -p "dtest_*.py" vesper.django To run non-Django unit tests: cd /Users/harold/Documents/Code/Python/Vesper/vesper conda activate vesper-latest python -m unittest discover -s /Users/harold/Documents/Code/Python/Vesper/vesper """ from importlib.machinery import SourceFileLoader from pathlib import Path from setuptools import find_packages, setup def load_version_module(package_name): module_name = f'{package_name}.version' file_path = Path(f'{package_name}/version.py') loader = SourceFileLoader(module_name, str(file_path)) return loader.load_module() version = load_version_module('vesper') setup( name='vesper', version=version.full_version, description=( 'Software for acoustical monitoring of nocturnal bird migration.'), url='https://github.com/HaroldMills/Vesper', author='<NAME>', author_email='<EMAIL>', license='MIT', # TODO: Consider making the `vesper` Python package a native # namespace package, allowing it to be split across multiple, # separate distribution packages to allow optional ones (e.g. # ones containing optional plugins) to be omitted from an # installation. See # https://packaging.python.org/guides/packaging-namespace-packages/ # for a discussion of namespace packages. # # Two important points from that discussion are that: # # 1. Every distribution package that is part of a `vesper` # namespace package must omit `__init__.py` from its `vesper` # package directory. Note that this will affect where the # `__version__` package attribute is defined, pushing it down # one level of the package hierarchy, into the `__init__.py` # of each subpackage. See PEP 396 for more about `__version__` # for namespace packages. # # 2. The `setup.py` file of every distribution package must use # `setuptools.find_namespace_packages` rather than # `setuptools.find_packages` to find its packages. packages=find_packages( # We exclude the unit test packages since some of them contain a # lot of data, for example large audio files. exclude=['tests', 'tests.*', '*.tests.*', '*.tests'] ), classifiers=[ 'Programming Language :: Python :: 3', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', ], install_requires=[ 'django~=4.0.0', 'jsonschema~=4.5.0', 'pytz', 'resampy', 'ruamel_yaml', 'skyfield~=1.42.0', 'tensorflow~=2.9.0', ], entry_points={ 'console_scripts': [ 'vesper_admin=vesper.django.manage:main', 'vesper_recorder=vesper.scripts.vesper_recorder:_main', 'vesper_play_recorder_test_signal=vesper.scripts.play_recorder_test_signal:_main', 'vesper_show_audio_input_devices=vesper.scripts.show_audio_input_devices:_main', ] }, include_package_data=True, zip_safe=False )
emartech/xplenty.jar
xplenty.jar-core/src/main/java/com/xplenty/api/model/Job.java
<gh_stars>0 /** * */ package com.xplenty.api.model; import java.util.Date; import java.util.List; import java.util.Map; import javax.xml.bind.annotation.XmlRootElement; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.annotation.JsonProperty; import com.xplenty.api.Xplenty.JobStatus; import com.xplenty.api.exceptions.XplentyAPIException; /** * Data model for Xplenty job * * @author <NAME> * */ @XmlRootElement @JsonInclude(Include.NON_NULL) @JsonIgnoreProperties(ignoreUnknown = true) public class Job extends XplentyObject<Job> { public Job() { super(Job.class); } protected Long id; protected JobStatus status; protected Map<String, String> variables; //for backwords compatibility @JsonProperty("dynamic_variables") protected Map<String, String> dynamicVariables; @JsonProperty("owner_id") protected Long ownerId; protected Double progress; @JsonProperty("outputs_count") protected Integer outputsCount; protected List<JobOutput> outputs; @JsonProperty("started_at") protected Date startedAt; @JsonProperty("created_at") protected Date createdAt; @JsonProperty("completed_at") protected Date completedAt; @JsonProperty("failed_at") protected Date failedAt; @JsonProperty("updated_at") protected Date updatedAt; @JsonProperty("cluster_id") protected Long clusterId; @JsonProperty("package_id") protected Long packageId; protected String errors; protected String url; @JsonProperty("runtime_in_seconds") protected Long runtimeInSeconds; /** * Shorthand method for {@code waitForStatus(null, JobStatus...)} Will wait forever until the required status is received. * @param statuses see {@link #waitForStatus(Long, JobStatus...)} */ public void waitForStatus(JobStatus... statuses) { waitForStatus(null, statuses); } /** * Blocks execution until required status is received from the Xplenty server, or until timeout occurs. * @param timeout time in seconds before terminating the wait, {@code null} to wait forever * @param statuses list of statuses to wait for, see {@link JobStatus} for the list of supported statuses */ public void waitForStatus(Long timeout, JobStatus... statuses) { if (getParentApiInstance() == null) throw new XplentyAPIException("The parent API instance is not set"); long start = System.currentTimeMillis(); statusWait: while (true) { try { Thread.sleep(XplentyObject.StatusRefreshInterval); } catch (InterruptedException e) { throw new XplentyAPIException("Error sleeping", e); } Job c = getParentApiInstance().jobInformation(id); for (JobStatus status: statuses) { if (c.getStatus() == status) break statusWait; } if (System.currentTimeMillis() - timeout*1000 > start) throw new XplentyAPIException("Timeout occurred while waiting for required job status"); } } public Job withId(long id) { this.id = id; return this; } public Job withPackage(long packageId) { this.packageId = packageId; return this; } public Job onCluster(long clusterId) { this.clusterId = clusterId; return this; } public Job withVariables(Map<String, String> vars) { this.variables = vars; return this; } public Job withDynamicVariables(Map<String, String> dynVars) { this.dynamicVariables = dynVars; return this; } public Long getId() { return id; } public JobStatus getStatus() { return status; } public Map<String, String> getVariables() { return variables; } public Long getOwnerId() { return ownerId; } public Double getProgress() { return progress; } public Integer getOutputsCount() { return outputsCount; } public List<JobOutput> getOutputs() { return outputs; } public Date getStartedAt() { return startedAt; } public Date getCreatedAt() { return createdAt; } public Date getFailedAt() { return failedAt; } public Date getCompletedAt() { return completedAt; } public Date getUpdatedAt() { return updatedAt; } public Long getClusterId() { return clusterId; } public Long getPackageId() { return packageId; } public String getErrors() { return errors; } public String getUrl() { return url; } public Long getRuntimeInSeconds() { return runtimeInSeconds; } @SuppressWarnings("unused") private void setId(long id) { this.id = id; } @SuppressWarnings("unused") private void setOwnerId(long ownerId) { this.ownerId = ownerId; } @SuppressWarnings("unused") private void setStatus(JobStatus status) { this.status = status; } @SuppressWarnings("unused") private void setVariables(Map<String, String> variables) { this.variables = variables; } @SuppressWarnings("unused") private void setProgress(double progress) { this.progress = progress; } @SuppressWarnings("unused") private void setOutputsCount(int outputsCount) { this.outputsCount = outputsCount; } @SuppressWarnings("unused") private void setOutputs(List<JobOutput> outputs) { this.outputs = outputs; } @SuppressWarnings("unused") private void setStartedAt(Date startedAt) { this.startedAt = startedAt; } @SuppressWarnings("unused") private void setCreatedAt(Date createdAt) { this.createdAt = createdAt; } @SuppressWarnings("unused") private void setFailedAt(Date failedAt) { this.failedAt = failedAt; } @SuppressWarnings("unused") private void setCompletedAt(Date completedAt) { this.completedAt = completedAt; } @SuppressWarnings("unused") private void setUpdatedAt(Date updatedAt) { this.updatedAt = updatedAt; } @SuppressWarnings("unused") private void setClusterId(long clusterId) { this.clusterId = clusterId; } @SuppressWarnings("unused") private void setPackageId(long packageId) { this.packageId = packageId; } @SuppressWarnings("unused") private void setErrors(String errors) { this.errors = errors; } @SuppressWarnings("unused") private void setUrl(String url) { this.url = url; } @SuppressWarnings("unused") private void setRuntimeInSeconds(long runtimeInSeconds) { this.runtimeInSeconds = runtimeInSeconds; } }
PAXANDDOS/UCODE-Track-C
ushell/libmx/src/mx_bubble_sort.c
#include "libmx.h" int mx_bubble_sort(char **arr, int size) { char* temp; int count = 0; for (int i = 0; i < size - 1; i++) { for (int g = 0; g < size - 1 - i; g++) { if (mx_strcmp(arr[g], arr[g + 1]) > 0) { temp = arr[g]; arr[g] = arr[g + 1]; arr[g + 1] = temp; count++; } } } return count; }
wuxiaohua17/mycloud-demo
mycloud-order/src/test/java/cn/com/ut/ApplicationTests.java
<reponame>wuxiaohua17/mycloud-demo package cn.com.ut; import javax.persistence.EntityManager; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.core.env.Environment; import org.springframework.test.context.junit4.SpringRunner; import cn.com.ut.demo.entity.Order; import cn.com.ut.demo.repository.OrderRepository; import java.math.BigDecimal; //import org.hibernate.query.NativeQuery; @RunWith(SpringRunner.class) @SpringBootTest public class ApplicationTests { @Autowired private Environment environment; @Autowired private OrderRepository orderRepository; @Autowired private EntityManager entityManager; @Test public void testEnv() { // String s = environment.getProperty("server.port"); // System.out.println("==" + s); System.out.println(new BigDecimal(0.01).multiply(new BigDecimal(0.8))); } @Test public void testJpa() { Order order1 = orderRepository.getOne("31"); order1.setGoodsNum(2); Order order2 = orderRepository.getByOrderId("68"); order2.setGoodsNum(3); System.out.println("======="); } }
daysgobye/ccm
src/components/laxheader/laxheader.js
<gh_stars>0 import React, { Component } from 'react'; import Content from "../utility/Content/Content" import { Parallax, Background } from 'react-parallax'; class LaxHeader extends Component { constructor(props) { super(props); this.state = {} } render() { return ( <div className="hero"> <Parallax blur={0} bgImage={this.props.bgimg} bgImageAlt="the cat" strength={300} > <Content> {this.props.children} </Content> </Parallax> </div > ); } } export default LaxHeader;
LotteWong/giotto
http_proxy_middleware/http_rate_limit.go
package http_proxy_middleware import ( "fmt" "net/http" "time" "github.com/LotteWong/giotto-gateway-core/common_middleware" "github.com/LotteWong/giotto-gateway-core/constants" "github.com/LotteWong/giotto-gateway-core/models/po" "github.com/LotteWong/giotto-gateway-core/service" "github.com/gin-gonic/gin" "github.com/pkg/errors" ) func HttpRateLimitMiddleware() gin.HandlerFunc { return func(c *gin.Context) { httpServiceInterface, ok := c.Get("service") if !ok { common_middleware.ResponseError(c, http.StatusInternalServerError, errors.New("service not found")) c.Abort() return } httpServiceDetail := httpServiceInterface.(*po.ServiceDetail) appInterface, ok := c.Get("app") if !ok { common_middleware.ResponseError(c, http.StatusInternalServerError, errors.New("app not found")) c.Abort() return } app := appInterface.(*po.App) if httpServiceDetail.AccessControl.ServiceHostFlowLimit != 0 { svrServiceName := constants.ServiceFlowCountPrefix + httpServiceDetail.Info.ServiceName svrRateLimit, err := service.GetRateLimitService().GetRateLimit(svrServiceName) if err != nil { common_middleware.ResponseError(c, http.StatusInternalServerError, err) c.Abort() return } _, _, svrAllow := svrRateLimit.Allow( svrServiceName, httpServiceDetail.AccessControl.ServiceHostFlowLimit, time.Duration(httpServiceDetail.AccessControl.ServiceHostFlowInterval)*time.Second, ) // log.Printf("svr name:%s, count:%d\n", svrServiceName, svrCount) if !svrAllow { common_middleware.ResponseError(c, http.StatusInternalServerError, errors.New(fmt.Sprintf("service host flow limit is %d, rate limit exceeds", httpServiceDetail.AccessControl.ServiceHostFlowLimit))) c.Abort() return } } if httpServiceDetail.AccessControl.ClientIpFlowLimit != 0 { cltServiceName := constants.ServiceFlowCountPrefix + httpServiceDetail.Info.ServiceName + "_" + c.ClientIP() cltRateLimit, err := service.GetRateLimitService().GetRateLimit(cltServiceName) if err != nil { common_middleware.ResponseError(c, http.StatusInternalServerError, err) c.Abort() return } _, _, cltAllow := cltRateLimit.Allow( cltServiceName, httpServiceDetail.AccessControl.ClientIpFlowLimit, time.Duration(httpServiceDetail.AccessControl.ClientIpFlowInterval)*time.Second, ) // log.Printf("clt name:%s, count:%d\n", cltServiceName, cltCount) if !cltAllow { common_middleware.ResponseError(c, http.StatusInternalServerError, errors.New(fmt.Sprintf("client ip flow limit is %d, rate limit exceeds", httpServiceDetail.AccessControl.ClientIpFlowLimit))) c.Abort() return } } if app.Qps != 0 { appServiceName := constants.AppFlowCountPrefix + app.AppId appRateLimit, err := service.GetRateLimitService().GetRateLimit(appServiceName) if err != nil { common_middleware.ResponseError(c, http.StatusInternalServerError, err) c.Abort() return } _, _, appAllow := appRateLimit.Allow(appServiceName, app.Qps, 1*time.Second) // log.Printf("app name:%s, count:%d\n", appServiceName, appCount) if !appAllow { common_middleware.ResponseError(c, http.StatusInternalServerError, errors.New(fmt.Sprintf("app flow limit is %d, rate limit exceeds", app.Qps))) c.Abort() return } } c.Next() } }
greezybacon/clox
src/Parse/stream.h
<filename>src/Parse/stream.h #ifndef STREAM_H #define STREAM_H #include <stdio.h> struct stream; typedef struct stream_ops { char (*next)(struct stream*); char (*peek)(struct stream*); const char* (*read)(struct stream*, int start, int length); void (*cleanup)(struct stream*); } StreamOps; typedef struct stream { void* context; int line; // Current line number int offset; // Current char of current line int pos; // Position in stream char current; char* name; // Name of the stream (STDIN or filename) char (*next)(struct stream*); char (*peek)(struct stream*); const char* (*read)(struct stream*, int start, int length); StreamOps* ops; } Stream; void stream_init(Stream*); int stream_init_file(Stream*, FILE* restrict); int stream_init_buffer(Stream*, const char*, size_t); void stream_uninit(Stream *); #endif
tvang8394/danneckersWebRedone
assets/jss/nextjs-material-kit-pro/pages/ecommerceStyle.js
import { title, main, mainRaised, mrAuto, mlAuto, container, description, blackColor, whiteColor, grayColor, hexToRgb, } from "assets/jss/nextjs-material-kit-pro.js"; import imagesStyles from "assets/jss/nextjs-material-kit-pro/imagesStyles.js"; const styles = { ...imagesStyles, title, main, mainRaised, mrAuto, mlAuto, description, textCenter: { textAlign: "center !important", }, container: { ...container, zIndex: "2", }, brand: { "& h1, & h4": { color: whiteColor, }, }, card: {}, subscribeButton: {}, cardBody: { padding: "15px", "& form": { marginBottom: "0", }, }, cardForm: { margin: "0 0 0 14px", padding: 0, top: 10, }, subscribeLine: { padding: "1.875rem 0px", "& $card": { marginTop: "30px", }, "& form": { margin: "0px" }, "&$subscribeLineImage:after": { position: "absolute", zIndex: 1, width: "100%", height: "100%", display: "block", left: 0, top: 0, content: "''", backgroundColor: "rgba(" + hexToRgb(blackColor) + ",0.66)", }, }, subscribeLineImage: { position: "relative", backgroundPosition: "top center", backgroundSize: "cover", "& $container": { zIndex: 2, position: "relative", }, "& $title": { color: whiteColor, }, "& $description": { color: grayColor[0], }, }, socialFeed: { "& p": { display: "table-cell", verticalAlign: "top", overflow: "hidden", paddingBottom: "10px", maxWidth: 300, }, "& i": { fontSize: "20px", display: "table-cell", paddingRight: "10px", }, }, img: { width: "20%", marginRight: "5%", marginBottom: "5%", float: "left", }, list: { marginBottom: "0", padding: "0", marginTop: "0", }, inlineBlock: { display: "inline-block", padding: "0px", width: "auto", }, left: { float: "left!important", display: "block", }, right: { padding: "15px 0", margin: "0", float: "right", }, aClass: { textDecoration: "none", backgroundColor: "transparent", }, rightLinks: { float: "right!important", "& ul": { marginBottom: 0, marginTop: 10, padding: 0, listStyle: "none", height: 38, "& li": { display: "inline-block", }, }, "& i": { fontSize: "20px", }, }, pullCenter: { display: "inline-block", float: "none" }, block: { color: "inherit", padding: "0.9375rem", fontWeight: "500", fontSize: "12px", textTransform: "uppercase", borderRadius: "3px", textDecoration: "none", position: "relative", display: "block", }, }; export default styles;
gregakinman/atlasdb
atlasdb-ete-tests/src/test/java/com/palantir/atlasdb/ete/LockWithTimelockEteTest.java
/* * (c) Copyright 2019 Palantir Technologies Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.palantir.atlasdb.ete; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; import org.junit.Test; import com.palantir.atlasdb.lock.LockResource; public class LockWithTimelockEteTest { private LockResource lockResource = EteSetup.createClientToSingleNode(LockResource.class); @Test public void smallV1LockSucceeds() throws InterruptedException { assertThat(lockResource.lockUsingLegacyLockApi(1, 100)).isTrue(); } @Test public void smallV2LockSucceeds() { assertThat(lockResource.lockUsingTimelockApi(1, 100)).isTrue(); } @Test public void largeV1LockSucceeds() throws InterruptedException { assertThat(lockResource.lockUsingLegacyLockApi(50, 100_000)).isTrue(); } @Test public void largeV2LockSucceeds() { assertThat(lockResource.lockUsingTimelockApi(50, 100_000)).isTrue(); } @Test public void hugeV1LockThrowsOnClientSide() throws InterruptedException { assertThatThrownBy(() -> lockResource.lockUsingLegacyLockApi(100, 500_000)) .isInstanceOf(RuntimeException.class) .hasMessageContaining("INVALID_ARGUMENT"); } @Test public void hugeV2ThrowsOnClientSide() { assertThatThrownBy(() -> lockResource.lockUsingTimelockApi(100, 500_000)) .isInstanceOf(RuntimeException.class) .hasMessageContaining("INVALID_ARGUMENT"); } }
delchev/xsk
modules/parsers/parser-hana/src/main/java/models/TableFunctionDefinitionModel.java
/* * Copyright (c) 2022 SAP SE or an SAP affiliate company and XSK contributors * * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License, v2.0 * which accompanies this distribution, and is available at * http://www.apache.org/licenses/LICENSE-2.0 * * SPDX-FileCopyrightText: 2022 SAP SE or an SAP affiliate company and XSK contributors * SPDX-License-Identifier: Apache-2.0 */ package models; import java.util.Objects; public class TableFunctionDefinitionModel { private final String schema; private final String name; public TableFunctionDefinitionModel(String schema, String name) { this.schema = schema; this.name = name; } public String getSchema() { return schema; } public String getName() { return name; } public void checkForAllMandatoryFieldsPresence() { checkPresence(name, "name"); } private <T> void checkPresence(T field, String fieldName) { if (Objects.isNull(field)) { throw new TableFunctionMissingPropertyException("Missing mandatory field " + fieldName); } } }
nilium/libsnow-common
include/snow/types/range_set.hh
<filename>include/snow/types/range_set.hh /* * Copyright <NAME> 2013. * * Distributed under the Boost Software License, Version 1.0. * (See accompanying file LICENSE_1_0.txt or copy at * http://www.boost.org/LICENSE_1_0.txt) */ #pragma once #include <snow/config.hh> #include <stdexcept> #include <list> namespace snow { template <typename T> struct S_EXPORT range_set_t { using range_type = T; using value_type = typename range_type::value_type; // Currently backed by a linked list. Could probably be backed by a set with // some changes to the routines below. using range_list_t = std::list<range_type>; using iterator = typename range_list_t::iterator; using const_iterator = typename range_list_t::const_iterator; using const_reverse_iterator = typename range_list_t::const_reverse_iterator; auto add(const range_type &range) -> range_set_t& { if (range.length == 0) return *this; iterator iter = ranges_.begin(); range_type morphed = range; iterator insert_pos = iter; while (iter != ranges_.end()) { if (morphed.max() < iter->min() || iter->max() < morphed.min()) { insert_pos = iter; break; } if (iter->contains(morphed)) { // if the set already contains the range, end early return *this; } else if (morphed.contains(*iter)) { // remove the old range if it's smaller than the new one insert_pos = iter = ranges_.erase(iter); } else if (morphed.intersects(*iter)) { // create a union of the ranges and remove the old range if the two // intersect or touch morphed = morphed.joined(*iter); insert_pos = iter = ranges_.erase(iter); } else { ++iter; } } ranges_.insert(insert_pos, morphed); return *this; } auto subtract(const range_type &range) -> range_set_t& { if (range.length == 0) return *this; iterator iter = ranges_.begin(); while (iter != ranges_.end()) { if (range.max() < iter->min() || iter->max() < range.min()) break; if (range.contains(*iter)) { // if the set's range is equal to the subtracted range, remove it entirely ranges_.erase(iter); break; } else if (iter->contains(range)) { range_type left, right; if (iter->split(range, &left, &right)) { if (right.length == 0) { std::swap(left, right); } if (right.length != 0) { *iter = right; if (left.length != 0) { ranges_.insert(iter, left); } } } else { s_throw(std::runtime_error, "Failed to split range"); } break; } else if (range.intersects(*iter)) { range_type subbed = iter->difference(range); std::cout << subbed << std::endl; if (subbed.length != 0) { *iter = subbed; } else { iter = ranges_.erase(iter); } } else { ++iter; } } return *this; } auto clear() -> range_set_t& { ranges_.clear(); return *this; } auto empty() const -> bool { return ranges_.empty(); } auto intersects(const range_type &range) const -> bool { for (auto &subrange : *this) if (subrange.intersects(range)) return false; } // Returns true if any portion of range is contained by a range in the set. template <typename Q> auto contains(const Q &inner) const -> bool { for (auto &subrange : *this) if (subrange.contains(inner)) return true; return false; } auto begin() const -> const_iterator { return ranges_.cbegin(); } auto end() const -> const_iterator { return ranges_.cend(); } auto cbegin() const -> const_iterator { return ranges_.cbegin(); } auto cend() const -> const_iterator { return ranges_.cend(); } auto rbegin() const -> const_reverse_iterator { return ranges_.crbegin(); } auto rend() const -> const_reverse_iterator { return ranges_.crend(); } auto crbegin() const -> const_reverse_iterator { return ranges_.crbegin(); } auto crend() const -> const_reverse_iterator { return ranges_.crend(); } private: range_list_t ranges_; }; } // namespace snow
yujiecong/Qt-Zhku-Client
Qt-Zhku-Client/StrProcessing.h
<gh_stars>1-10 #ifndef STRPROCESSING_H #define STRPROCESSING_H #include <QWidget> #include <QtNetwork> class StrProcessing { public: StrProcessing(); QString gbk2Utf8(const QByteArray &inStr); QString utf82Gbk(const QByteArray &inStr); QByteArray toUrlEncode(const QByteArray &inStr); QString getMd5( QString ); QJsonObject qString2Json(QString jsonString); QString qJson2QString(QJsonObject jsonObject); }; #endif // STRPROCESSING_H
fabiodarice/Python
PyCharm/Exercicios/Aula16/ex074.py
<filename>PyCharm/Exercicios/Aula16/ex074.py # Importação de bibliotecas from random import randint # Título do programa print('\033[1;34;40mMAIOR E MENOR VALORES EM TUPLA\033[m') # Objetos # Lógica n = (randint(1, 10), randint(1, 10), randint(1, 10), randint(1, 10), randint(1, 10)) print(f'Os valores sorteados foram:', '', end='') for c in range(0, 5): print(n[c], '', end='') print(f'\nO maior valor sorteado foi {max(n)}') print(f'O manor valor sorteado foi {min(n)}')
tfnick/FEBS-Shiro
src/main/java/cc/mrbird/febs/approve/mapper/DatasetMapper.java
<filename>src/main/java/cc/mrbird/febs/approve/mapper/DatasetMapper.java<gh_stars>0 package cc.mrbird.febs.approve.mapper; import cc.mrbird.febs.approve.entity.Dataset; import com.baomidou.mybatisplus.core.mapper.BaseMapper; /** * Mapper * * @author YangXiao * @date 2019-09-27 15:50:29 */ public interface DatasetMapper extends BaseMapper<Dataset> { }
Nedson202/Knowledge-store-mobile
src/components/Books/BookMeta.js
<reponame>Nedson202/Knowledge-store-mobile<gh_stars>0 import React from 'react'; import { Image, Rating, } from 'react-native-elements'; import { View, TouchableWithoutFeedback, } from 'react-native'; import { appRootStyle } from '../../styles'; import { CONTAIN, ICON_SMALL } from '../../settings'; import Spinner from '../common/Spinner'; import CustomText from '../common/CustomText'; import { toHTTPS } from '../../utils'; const BookMeta = ({ mainBookMeta, book, toggleImageEnlarger, renderAuthors, navigateToRecommended }) => { const { id, name, image: uri, genre, authors, googleAverageRating, averageRating, } = book; const displayAuthors = renderAuthors(authors); const numberOfLines = mainBookMeta ? null : 2; return ( <View style={appRootStyle.bookMetaView}> <View style={appRootStyle.bookMetaText}> <TouchableWithoutFeedback onPress={navigateToRecommended(id, mainBookMeta)} > <CustomText style={[mainBookMeta && { fontSize: 18 }, appRootStyle.bookTitle]} numberOfLines={numberOfLines} > {name} </CustomText> </TouchableWithoutFeedback> <CustomText style={appRootStyle.bookAuthor} > {genre} </CustomText> <CustomText style={appRootStyle.bookAuthor} > {displayAuthors} </CustomText> {mainBookMeta && <Rating readonly ratingCount={5} imageSize={20} startingValue={googleAverageRating || averageRating} style={{ marginTop: 20, marginLeft: 0, }} /> } </View> <TouchableWithoutFeedback onPress={toggleImageEnlarger(uri)} > <View style={[{ width: '30%', marginLeft: -15, }, !mainBookMeta && { marginLeft: 15, }]}> <Image resizeMode={CONTAIN} source={{ uri: toHTTPS(uri) }} style={[appRootStyle.bookMetaImage, !mainBookMeta && { width: '100%', height: 100, }]} PlaceholderContent={<Spinner size={ICON_SMALL} />} /> </View> </TouchableWithoutFeedback> </View> ); }; export default BookMeta;
pop1234o/BestPracticeApp
Algorithm/src/main/java/com/liyafeng/algorithm/sword2offer/N16_ReverseList.java
package com.liyafeng.algorithm.sword2offer; public class N16_ReverseList { /** * * 反转链表 * * 我们可以用循环,用缓存next的方式来反转 * * 也可以用 递归 的方式来反转 * 因为递归天生就有参数缓存 * * 但是递归会有内存开销,不适合长的链表 * 但是递归代码简洁 * * @param args */ public static void main(String[] args) { List list = new List(); list.add(new List.Node(1)); list.add(new List.Node(2)); list.add(new List.Node(3)); list.add(new List.Node(4)); list.add(new List.Node(5)); list.add(new List.Node(7)); List.Node head = reverse2(list); while (head != null) { System.out.println(head.value); head = head.next; } } /** * 1 2 3 4 5 6 * <p> * next是临时变量 * <p> * 首先pre 为null * node指向头 * <p> * 缓存node.next * <p> * 然后让当前node.next = pre * <p> * 然后指针 pre->node node->next 移动 * <p> * 先缓存,后操作,然后读缓存 *============================= * 1->2->3 * currentNode = 1;//这个用来缓存当前的node * pre =null;//之个缓存上一个node,一遍我们的next指向他 * while(node!=null){ * next = node.next; //缓存当前节点的next * if(next==null){//这里是遍历完成 * head = node; * } * currentNode.next = pre;//当前节点的next重写指向 * pre = currentNode;//向后移动,当前节点变为pre * node = next;//当前节点变为下一个 * } * * @param list * @return */ private static List.Node reverse(List list) { //缓存当前节点和前一个节点 List.Node currentNode = list.head; List.Node pre = null; while (currentNode != null) { //临时缓存下一个节点,作用就是最后当前节点指向下一个 List.Node next = currentNode.next; if (next == null) { list.head = currentNode; } //改变指向 currentNode.next = pre; //pre, currentNode指针向后移动 pre = currentNode; currentNode = next; } return list.head; } public static List.Node reverse2(List list) { List.Node node = reverseInner(list.head, list.head.next); list.head.next = null; list.head = node; return list.head; } private static List.Node reverseInner(List.Node head, List.Node next) { if (next != null) { List.Node node = reverseInner(next, next.next); next.next = head; return node; } else { return head; } //最后一个节点 ->前一个 } public static class List { Node head; static class Node { public Node(int value) { this.value = value; } int value; Node next; } public void add(Node node) { if (head == null) { head = node; } else { node.next = head; head = node; } } } }
fredmorcos/attic
Projects/PlantMaker/archive/20100517/src/utils.py
<gh_stars>1-10 def bestSolution(solutions): maxFitness = 0 maxFitnessIndex = 0 for i, solution in enumerate(solutions): if i == 0: maxFitness = solution.fitness else: if solution.fitness > maxFitness: maxFitness = solution.fitness maxFitnessIndex = i return solutions[maxFitnessIndex]
UKHomeOffice/removals_integration
api/lib/ModelHelpers.js
<filename>api/lib/ModelHelpers.js 'use strict'; const _ = require('lodash'); module.exports = { findAndUpdateOrCreate: function findAndUpdateOrCreate (criteria, values) { return this.update(criteria, values) .then(result => _.isEmpty(result) ? this.create(values) : result[0]); }, normalizeGender: (gender) => { switch (gender) { case 'f': case 'female': gender = 'female'; break; case 'm': case 'male': gender = 'male'; break; default: gender = null; } return gender; }, mixin: function (model) { model.normalizeGender = this.normalizeGender; model.findAndUpdateOrCreate = this.findAndUpdateOrCreate; return model; } };
OCEO-YJ/Synapse-Repository-Services
services/repository/src/main/java/org/sagebionetworks/repo/web/PaginatedParameters.java
package org.sagebionetworks.repo.web; /** * Parameters for paginated results. * * @author jmhill * */ public class PaginatedParameters { long offset = 0; long limit = 10; String sortBy = null; boolean ascending = true;; public PaginatedParameters(){ } public PaginatedParameters(long offset, long limit, String sortBy, boolean ascending) { super(); this.offset = offset; this.limit = limit; this.sortBy = sortBy; this.ascending = ascending; } public long getOffset() { return offset; } public void setOffset(long offset) { this.offset = offset; } public long getLimit() { return limit; } public void setLimit(long limit) { this.limit = limit; } public String getSortBy() { return sortBy; } public void setSortBy(String sortBy) { this.sortBy = sortBy; } public boolean getAscending() { return ascending; } public void setAscending(boolean ascending) { this.ascending = ascending; } }
mehditeymorian/SmartTask
app/src/main/java/ir/timurid/smarttask/db/TodoDao.java
package ir.timurid.smarttask.db; import androidx.lifecycle.LiveData; import androidx.room.Dao; import androidx.room.Delete; import androidx.room.Insert; import androidx.room.Query; import androidx.room.Update; import java.util.List; import ir.timurid.smarttask.model.Todo; import ir.timurid.smarttask.model.TodoInfo; @Dao public interface TodoDao { String GET_BASE_QUERY = "SELECT TodoInfo.*, Category.title AS categoryTitle, Category.color AS categoryColor " + "FROM TodoInfo INNER JOIN Category ON TodoInfo.categoryId == Category.categoryId AND "; @Query(GET_BASE_QUERY + "TodoInfo.doneDate IS NULL") LiveData<List<Todo>> getAllUndone(); @Query(GET_BASE_QUERY + "TodoInfo.doneDate IS NOT NULL ORDER BY TodoInfo.doneDate DESC") LiveData<List<Todo>> getAllDone(); @Query(GET_BASE_QUERY + "TodoInfo.todoId == :todoId") LiveData<Todo> getTodoById(long todoId); @Query("SELECT * FROM TodoInfo WHERE title == :title") TodoInfo getTodoInfoByTitle(String title); @Insert long insert(TodoInfo todoInfo); @Update void update(TodoInfo info); @Delete void delete(TodoInfo info); }
jiachen247/JLite-Compiler
src/main/java/parsetree/statement/CallStatement.java
<filename>src/main/java/parsetree/statement/CallStatement.java<gh_stars>0 package main.java.parsetree.statement; import java.util.ArrayList; import java.util.List; import main.java.ir3.VarDecl3; import main.java.ir3.exp.Exp3Result; import main.java.ir3.stmt.CallStatement3; import main.java.ir3.stmt.Stmt3; import main.java.ir3.stmt.Stmt3Result; import main.java.parsetree.expression.CallExpression; import main.java.staticcheckers.CheckError; import main.java.staticcheckers.type.BasicType; import main.java.staticcheckers.type.Environment; public class CallStatement extends Statement { private CallExpression callExpression; public CallStatement(int x, int y, CallExpression callExpression) { super(x, y); this.callExpression = callExpression; } @Override public String toString() { return callExpression.toString() + ";"; } @Override public BasicType typeCheck(Environment env, List<CheckError> errors) { return callExpression.typeCheck(env, errors); } @Override public Stmt3Result toIR() { List<VarDecl3> temps = new ArrayList<>(); List<Stmt3> stmts = new ArrayList<>(); Exp3Result callResult = callExpression.toIR(); temps.addAll(callResult.getTempVars()); stmts.addAll(callResult.getStatements()); stmts.add(new CallStatement3(callResult.getResult())); return new Stmt3Result(temps, stmts); } }
yuqingchen/Leetcode
106_Construct_Binary_Tree_from_Inorder_and_Postorder_Traversal.py
<gh_stars>1-10 class Solution: def buildTree(self, inorder: List[int], postorder: List[int]) -> TreeNode: if not postorder: return root = TreeNode(postorder[-1]) rootpos = inorder.index(postorder[-1]) root.left = self.buildTree(inorder[:rootpos], postorder[:rootpos]) root.right = self.buildTree(inorder[rootpos + 1 :], postorder[rootpos : -1]) return root
shadiga/Client
app/src/main/java/com/freecoders/photobook/db/SQLiteHelper.java
package com.freecoders.photobook.db; import android.content.Context; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteOpenHelper; import android.util.Log; /** * Created by Alex on 2014-11-27. */ public class SQLiteHelper extends SQLiteOpenHelper { public static final String TABLE_FRIENDS = "friends"; public static final String TABLE_IMAGES = "images"; public static final String COLUMN_ID = "_id"; public static final String COLUMN_NAME = "Name"; public static final String COLUMN_CONTACT_KEY = "ContactKey"; public static final String COLUMN_USER_ID = "UserId"; public static final String COLUMN_AVATAR = "Avatar"; public static final String COLUMN_STATUS = "Status"; public static final String COLUMN_TYPE = "Type"; public static final String COLUMN_MEDIASTORE_ID = "mediaStoreId"; public static final String COLUMN_ORIG_URI = "origUri"; public static final String COLUMN_THUMB_URI = "thumbUri"; public static final String COLUMN_SERVER_ID = "serverId"; public static final String COLUMN_TITLE = "title"; public static final String COLUMN_BUCKET_ID = "bucketId"; private static final String DATABASE_NAME = "photobook.db"; private static final int DATABASE_VERSION = 6; // Database creation sql statement private static final String DATABASE_CREATE_FRIENDS_TABLE = "create table " + TABLE_FRIENDS + "(" + COLUMN_ID + " integer primary key autoincrement, " + COLUMN_NAME + " varchar(1000), " + COLUMN_CONTACT_KEY + " varchar(500), " + COLUMN_USER_ID + " varchar(100), " + COLUMN_AVATAR + " varchar(3000), " + COLUMN_STATUS + " int, " + COLUMN_TYPE + " int);"; private static final String DATABASE_CREATE_IMAGES_TABLE = "create table " + TABLE_IMAGES + "(" + COLUMN_ID + " integer primary key autoincrement, " + COLUMN_MEDIASTORE_ID + " integer, " + COLUMN_ORIG_URI + " text, " + COLUMN_THUMB_URI + " text, " + COLUMN_SERVER_ID + " text, " + COLUMN_TITLE + " text, " + COLUMN_BUCKET_ID + " text, " + COLUMN_STATUS + " int);"; public SQLiteHelper(Context context) { super(context, DATABASE_NAME, null, DATABASE_VERSION); } @Override public void onCreate(SQLiteDatabase database) { database.execSQL(DATABASE_CREATE_FRIENDS_TABLE); database.execSQL(DATABASE_CREATE_IMAGES_TABLE); } @Override public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) { Log.w(SQLiteHelper.class.getName(), "Upgrading database from version " + oldVersion + " to " + newVersion + ", which will destroy all old data"); db.execSQL("DROP TABLE IF EXISTS " + TABLE_FRIENDS); db.execSQL("DROP TABLE IF EXISTS " + TABLE_IMAGES); onCreate(db); } }
andyglick/openclover-git
clover-idea/src/com/atlassian/clover/idea/treetables/ProjectTreeCellRenderer.java
package com.atlassian.clover.idea.treetables; import com.atlassian.clover.api.registry.MethodInfo; import com.atlassian.clover.registry.entities.BaseClassInfo; import com.atlassian.clover.idea.util.ui.CloverIcons; import com.atlassian.clover.idea.coverage.CoverageTreeModel; import com.atlassian.clover.idea.testexplorer.SourceFolderDescription; import com.atlassian.clover.api.registry.HasMetrics; import com.atlassian.clover.registry.entities.PackageFragment; import com.atlassian.clover.registry.entities.FullPackageInfo; import com.atlassian.clover.registry.entities.TestCaseInfo; import javax.swing.JTree; import javax.swing.tree.DefaultMutableTreeNode; import java.awt.Component; import java.awt.Graphics; public class ProjectTreeCellRenderer extends javax.swing.tree.DefaultTreeCellRenderer { @Override public Component getTreeCellRendererComponent(JTree tree, Object value, boolean sel, boolean expanded, boolean leaf, int row, boolean hasFocus) { super.getTreeCellRendererComponent(tree, value, sel, expanded, leaf, row, hasFocus); this.tree = tree; final DefaultMutableTreeNode node = (DefaultMutableTreeNode) value; Object userObject = node.getUserObject(); if (userObject instanceof CoverageTreeModel.NodeWrapper) { userObject = ((CoverageTreeModel.NodeWrapper) userObject).getHasMetrics(); } if (userObject instanceof TestCaseInfo) { final TestCaseInfo tci = (TestCaseInfo) userObject; setIcon(CellRendererUtil.getIconForTestCaseInfo(tci)); setText(tci.getTestName()); } else if (userObject instanceof PackageFragment || userObject instanceof FullPackageInfo) { final HasMetrics packageInfo = (HasMetrics) userObject; setIcon(expanded ? CloverIcons.PACKAGE_OPEN : CloverIcons.PACKAGE_CLOSED); //LeftToRightOverride - fool JLabel to put ellipsis on the left hand side of text final String mangledName = new StringBuilder(packageInfo.getName()).append('\u202e').reverse().toString(); setText(mangledName); } else if (userObject instanceof BaseClassInfo) { final BaseClassInfo classInfo = (BaseClassInfo) userObject; setIcon(CellRendererUtil.getIconForClassInfo(classInfo)); setText(classInfo.getName()); } else if (userObject instanceof SourceFolderDescription) { final SourceFolderDescription sourceFolder = (SourceFolderDescription) userObject; setText(sourceFolder.getName()); if (sourceFolder.isTestFolder()) { setIcon(expanded ? CloverIcons.TEST_ROOT_FOLDER_OPEN : CloverIcons.TEST_ROOT_FOLDER); } else { setIcon(expanded ? CloverIcons.SOURCE_ROOT_FOLDER_OPEN : CloverIcons.SOURCE_ROOT_FOLDER); } } else if (userObject instanceof MethodInfo) { final MethodInfo methodInfo = (MethodInfo) userObject; setIcon(CellRendererUtil.getIconForMethodInfo(methodInfo)); setText(methodInfo.getName()); } else { setText(userObject != null ? userObject.toString() : "ERROR"); setIcon(null); } return this; } private JTree tree; /** * Adjust JLabel size. */ @Override public void paint(Graphics g) { final int maxWidth = tree.getWidth() - getBounds().x; if (maxWidth < getWidth()) { setSize(maxWidth, getHeight()); } super.paint(g); } }
vidkidz/crossbridge
llvm-gcc-4.2-2.9/gcc/testsuite/gcc.dg/dremf-type-compat-4.c
/* Test for bogus diagnostics for dremf definition, as in bug 16666. The GNU extension permitting a prototype to override the promotion of old-style parameter declarations should only apply when the prototype is visible, not for a built-in prototype. */ /* { dg-do compile } */ /* { dg-options "" } */ float dremf(x, y) float x, y; /* { dg-warning "warning: conflicting types for built-in function 'dremf'" } */ { return x + y; }
PacificBiosciences/pbcopper
include/pbcopper/align/AffineAlignment.h
<gh_stars>1-10 #ifndef PBCOPPER_ALIGN_AFFINEALIGNMENT_H #define PBCOPPER_ALIGN_AFFINEALIGNMENT_H #include <string> namespace PacBio { namespace Align { // // Support for pairwise alignment with an affine gap penalty. // class PairwiseAlignment; struct AffineAlignmentParams { float MatchScore; float MismatchScore; float GapOpen; float GapExtend; float PartialMatchScore; AffineAlignmentParams(float matchScore, float mismatchScore, float gapOpen, float gapExtend, float partialMatchScore = 0); }; AffineAlignmentParams DefaultAffineAlignmentParams(); AffineAlignmentParams IupacAwareAffineAlignmentParams(); // // Affine gap-penalty alignment. // PairwiseAlignment* AlignAffine( const std::string& target, const std::string& query, AffineAlignmentParams params = DefaultAffineAlignmentParams()); // NOLINT // // Affine gap-penalty alignment with partial awareness of IUPAC ambiguous bases--- // half-penalizes partial mismatches. For example: (M = IUPAC A/C) // T->A = -1, // T->M = -1, // A->M = -0.5 // PairwiseAlignment* AlignAffineIupac( const std::string& target, const std::string& query, AffineAlignmentParams params = IupacAwareAffineAlignmentParams()); // NOLINT } // namespace Align } // namespace PacBio #endif // PBCOPPER_ALIGN_AFFINEALIGNMENT_H
portlandrisk/cytoscape
src/extensions/renderer/canvas/drawing-edges.js
<reponame>portlandrisk/cytoscape<filename>src/extensions/renderer/canvas/drawing-edges.js 'use strict'; var CRp = {}; CRp.drawEdge = function( context, edge, shiftToOriginWithBb, drawLabel, drawOverlayInstead ){ var rs = edge._private.rscratch; var usePaths = this.usePaths(); // if bezier ctrl pts can not be calculated, then die if( rs.badLine || isNaN(rs.allpts[0]) ){ // isNaN in case edge is impossible and browser bugs (e.g. safari) return; } if( !edge.visible() ){ return; } var bb; if( shiftToOriginWithBb ){ bb = shiftToOriginWithBb; context.translate( -bb.x1, -bb.y1 ); } var overlayPadding = edge.pstyle( 'overlay-padding' ).pfValue; var overlayOpacity = edge.pstyle( 'overlay-opacity' ).value; var overlayColor = edge.pstyle( 'overlay-color' ).value; // Edge color & opacity if( drawOverlayInstead ){ if( overlayOpacity === 0 ){ // exit early if no overlay return; } this.strokeStyle( context, overlayColor[0], overlayColor[1], overlayColor[2], overlayOpacity ); context.lineCap = 'round'; if( rs.edgeType == 'self' && !usePaths ){ context.lineCap = 'butt'; } } else { var lineColor = edge.pstyle( 'line-color' ).value; this.strokeStyle( context, lineColor[0], lineColor[1], lineColor[2], edge.pstyle( 'opacity' ).value ); context.lineCap = 'butt'; } context.lineJoin = 'round'; var edgeWidth = edge.pstyle( 'width' ).pfValue + (drawOverlayInstead ? 2 * overlayPadding : 0); var lineStyle = drawOverlayInstead ? 'solid' : edge.pstyle( 'line-style' ).value; context.lineWidth = edgeWidth; var shadowBlur = edge.pstyle( 'shadow-blur' ).pfValue; var shadowOpacity = edge.pstyle( 'shadow-opacity' ).value; var shadowColor = edge.pstyle( 'shadow-color' ).value; var shadowOffsetX = edge.pstyle( 'shadow-offset-x' ).pfValue; var shadowOffsetY = edge.pstyle( 'shadow-offset-y' ).pfValue; this.shadowStyle( context, shadowColor, drawOverlayInstead ? 0 : shadowOpacity, shadowBlur, shadowOffsetX, shadowOffsetY ); this.drawEdgePath( edge, context, rs.allpts, lineStyle, edgeWidth ); this.drawArrowheads( context, edge, drawOverlayInstead ); this.shadowStyle( context, 'transparent', 0 ); // reset for next guy if( !drawOverlayInstead ){ this.drawEdge( context, edge, false, drawLabel, true ); } this.drawElementText( context, edge, drawLabel ); if( shiftToOriginWithBb ){ context.translate( bb.x1, bb.y1 ); } }; CRp.drawEdgePath = function( edge, context, pts, type, width ){ var rs = edge._private.rscratch; var canvasCxt = context; var path; var pathCacheHit = false; var usePaths = this.usePaths(); if( usePaths ){ var pathCacheKey = pts.join( '$' ); var keyMatches = rs.pathCacheKey && rs.pathCacheKey === pathCacheKey; if( keyMatches ){ path = context = rs.pathCache; pathCacheHit = true; } else { path = context = new Path2D(); // eslint-disable-line no-undef rs.pathCacheKey = pathCacheKey; rs.pathCache = path; } } if( canvasCxt.setLineDash ){ // for very outofdate browsers switch( type ){ case 'dotted': canvasCxt.setLineDash( [ 1, 1 ] ); break; case 'dashed': canvasCxt.setLineDash( [ 6, 3 ] ); break; case 'solid': canvasCxt.setLineDash( [ ] ); break; } } if( !pathCacheHit && !rs.badLine ){ if( context.beginPath ){ context.beginPath(); } context.moveTo( pts[0], pts[1] ); switch( rs.edgeType ){ case 'bezier': case 'self': case 'compound': case 'multibezier': for( var i = 2; i + 3 < pts.length; i += 4 ){ context.quadraticCurveTo( pts[ i ], pts[ i + 1], pts[ i + 2], pts[ i + 3] ); } break; case 'straight': case 'segments': case 'haystack': for( var i = 2; i + 1 < pts.length; i += 2 ){ context.lineTo( pts[ i ], pts[ i + 1] ); } break; } } context = canvasCxt; if( usePaths ){ context.stroke( path ); } else { context.stroke(); } // reset any line dashes if( context.setLineDash ){ // for very outofdate browsers context.setLineDash( [ ] ); } }; CRp.drawArrowheads = function( context, edge, drawOverlayInstead ){ if( drawOverlayInstead ){ return; } // don't do anything for overlays var rs = edge._private.rscratch; var isHaystack = rs.edgeType === 'haystack'; if( !isHaystack ){ this.drawArrowhead( context, edge, 'source', rs.arrowStartX, rs.arrowStartY, rs.srcArrowAngle ); } this.drawArrowhead( context, edge, 'mid-target', rs.midX, rs.midY, rs.midtgtArrowAngle ); this.drawArrowhead( context, edge, 'mid-source', rs.midX, rs.midY, rs.midsrcArrowAngle ); if( !isHaystack ){ this.drawArrowhead( context, edge, 'target', rs.arrowEndX, rs.arrowEndY, rs.tgtArrowAngle ); } }; CRp.drawArrowhead = function( context, edge, prefix, x, y, angle ){ if( isNaN( x ) || x == null || isNaN( y ) || y == null || isNaN( angle ) || angle == null ){ return; } var self = this; var arrowShape = edge.pstyle( prefix + '-arrow-shape' ).value; if( arrowShape === 'none' ){ return; } var gco = context.globalCompositeOperation; var arrowClearFill = edge.pstyle( prefix + '-arrow-fill' ).value === 'hollow' ? 'both' : 'filled'; var arrowFill = edge.pstyle( prefix + '-arrow-fill' ).value; var opacity = edge.pstyle( 'opacity' ).value; if( arrowShape === 'half-triangle-overshot' ){ arrowFill = 'hollow'; arrowClearFill = 'hollow'; } if( opacity !== 1 || arrowFill === 'hollow' ){ // then extra clear is needed context.globalCompositeOperation = 'destination-out'; self.fillStyle( context, 255, 255, 255, 1 ); self.strokeStyle( context, 255, 255, 255, 1 ); self.drawArrowShape( edge, prefix, context, arrowClearFill, edge.pstyle( 'width' ).pfValue, edge.pstyle( prefix + '-arrow-shape' ).value, x, y, angle ); context.globalCompositeOperation = gco; } // otherwise, the opaque arrow clears it for free :) var color = edge.pstyle( prefix + '-arrow-color' ).value; self.fillStyle( context, color[0], color[1], color[2], opacity ); self.strokeStyle( context, color[0], color[1], color[2], opacity ); self.drawArrowShape( edge, prefix, context, arrowFill, edge.pstyle( 'width' ).pfValue, edge.pstyle( prefix + '-arrow-shape' ).value, x, y, angle ); }; CRp.drawArrowShape = function( edge, arrowType, context, fill, edgeWidth, shape, x, y, angle ){ var r = this; var usePaths = this.usePaths(); var rs = edge._private.rscratch; var pathCacheHit = false; var path; var canvasContext = context; var translation = { x: x, y: y }; var size = this.getArrowWidth( edgeWidth ); var shapeImpl = r.arrowShapes[ shape ]; if( usePaths ){ var pathCacheKey = size + '$' + shape + '$' + angle + '$' + x + '$' + y; rs.arrowPathCacheKey = rs.arrowPathCacheKey || {}; rs.arrowPathCache = rs.arrowPathCache || {}; var alreadyCached = rs.arrowPathCacheKey[ arrowType ] === pathCacheKey; if( alreadyCached ){ path = context = rs.arrowPathCache[ arrowType ]; pathCacheHit = true; } else { path = context = new Path2D(); // eslint-disable-line no-undef rs.arrowPathCacheKey[ arrowType ] = pathCacheKey; rs.arrowPathCache[ arrowType ] = path; } } if( context.beginPath ){ context.beginPath(); } if( !pathCacheHit ){ shapeImpl.draw( context, size, angle, translation ); } if( !shapeImpl.leavePathOpen && context.closePath ){ context.closePath(); } context = canvasContext; if( fill === 'filled' || fill === 'both' ){ if( usePaths ){ context.fill( path ); } else { context.fill(); } } if( fill === 'hollow' || fill === 'both' ){ context.lineWidth = ( shapeImpl.matchEdgeWidth ? edgeWidth : 1 ); context.lineJoin = 'miter'; if( usePaths ){ context.stroke( path ); } else { context.stroke(); } } }; module.exports = CRp;
TommyLike/mindspore
mindspore/nn/metrics/topk.py
# Copyright 2020 Huawei Technologies Co., Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================ """Topk.""" import numpy as np from .metric import Metric class TopKCategoricalAccuracy(Metric): """ Calculates the top-k categorical accuracy. Note: The method `update` must receive input of the form :math:`(y_{pred}, y)`. If some samples have the same accuracy, the first sample will be chosen. Args: k (int): Specifies the top-k categorical accuracy to compute. Raises: TypeError: If `k` is not int. ValueError: If `k` is less than 1. Examples: >>> x = mindspore.Tensor(np.array([[0.2, 0.5, 0.3, 0.6, 0.2], [0.1, 0.35, 0.5, 0.2, 0.], >>> [0.9, 0.6, 0.2, 0.01, 0.3]]), mindspore.float32) >>> y = mindspore.Tensor(np.array([2, 0, 1]), mindspore.float32) >>> topk = nn.TopKCategoricalAccuracy(3) >>> topk.clear() >>> topk.update(x, y) >>> result = topk.eval() 0.6666666666666666 """ def __init__(self, k): super(TopKCategoricalAccuracy, self).__init__() if not isinstance(k, int): raise TypeError('k should be integer type, but got {}'.format(type(k))) if k < 1: raise ValueError('k must be at least 1, but got {}'.format(k)) self.k = k self.clear() def clear(self): """Clear the internal evaluation result.""" self._correct_num = 0 self._samples_num = 0 def update(self, *inputs): """ Updates the internal evaluation result y_pred and y. Args: inputs: Input y_pred and y. y_pred and y are Tensor, list or numpy.ndarray. y_pred is in most cases (not strictly) a list of floating numbers in range :math:`[0, 1]` and the shape is :math:`(N, C)`, where :math:`N` is the number of cases and :math:`C` is the number of categories. y contains values of integers. The shape is :math:`(N, C)` if one-hot encoding is used. Shape can also be :math:`(N, 1)` if category index is used. """ if len(inputs) != 2: raise ValueError('Topk need 2 inputs (y_pred, y), but got {}'.format(len(inputs))) y_pred = self._convert_data(inputs[0]) y = self._convert_data(inputs[1]) if y_pred.ndim == y.ndim and self._check_onehot_data(y): y = y.argmax(axis=1) indices = np.argsort(-y_pred, axis=1)[:, :self.k] repeated_y = y.reshape(-1, 1).repeat(self.k, axis=1) correct = np.equal(indices, repeated_y).sum(axis=1) self._correct_num += correct.sum() self._samples_num += repeated_y.shape[0] def eval(self): """ Computes the top-k categorical accuracy. Returns: Float, computed result. """ if self._samples_num == 0: raise RuntimeError('Total samples num must not be 0.') return self._correct_num / self._samples_num class Top1CategoricalAccuracy(TopKCategoricalAccuracy): """ Calculates the top-1 categorical accuracy. This class is a specialized class for TopKCategoricalAccuracy. Refer to class 'TopKCategoricalAccuracy' for more details. Examples: >>> x = mindspore.Tensor(np.array([[0.2, 0.5, 0.3, 0.6, 0.2], [0.1, 0.35, 0.5, 0.2, 0.], >>> [0.9, 0.6, 0.2, 0.01, 0.3]]), mindspore.float32) >>> y = mindspore.Tensor(np.array([2, 0, 1]), mindspore.float32) >>> topk = nn.Top1CategoricalAccuracy() >>> topk.clear() >>> topk.update(x, y) >>> result = topk.eval() """ def __init__(self): super(Top1CategoricalAccuracy, self).__init__(1) class Top5CategoricalAccuracy(TopKCategoricalAccuracy): """ Calculates the top-5 categorical accuracy. This class is a specialized class for TopKCategoricalAccuracy. Refer to class 'TopKCategoricalAccuracy' for more details. Examples: >>> x = mindspore.Tensor(np.array([[0.2, 0.5, 0.3, 0.6, 0.2], [0.1, 0.35, 0.5, 0.2, 0.], >>> [0.9, 0.6, 0.2, 0.01, 0.3]]), mindspore.float32) >>> y = mindspore.Tensor(np.array([2, 0, 1]), mindspore.float32) >>> topk = nn.Top5CategoricalAccuracy() >>> topk.clear() >>> topk.update(x, y) >>> result = topk.eval() """ def __init__(self): super(Top5CategoricalAccuracy, self).__init__(5)
rekbun/leetcode
src/leetcode/NextPermutation.java
<gh_stars>0 package leetcode; import java.util.Arrays; public class NextPermutation { private int findRightLow(int[] src) { int low=-1; for(int i=1;i<src.length;i++) { if(src[i]>src[i-1]) { low=i-1; } } return low; } private int findLeftLow(int[] src,int si) { int high=si+1; for(int i=si+1;i<src.length;i++) { if(src[i]<=src[high] && src[i]>src[si]) { high=i; } } return high; } private void swap(int[] src,int fi,int ei) { int temp=src[fi]; src[fi]=src[ei]; src[ei]=temp; } private void reverse(int[] src,int si) { int j=src.length-1; for(int i=si;i<j;i++,j--) { swap(src, i, j); } } public void nextPermutation(int[] src) { if(src==null || src.length==0) { return; } int fi=findRightLow(src); if(fi==-1) { Arrays.sort(src); return; } int ei=findLeftLow(src,fi); swap(src,fi,ei); reverse(src,fi+1); } }
wowonrails/qae
spec/lib/sic_code_spec.rb
<filename>spec/lib/sic_code_spec.rb<gh_stars>1-10 require "rails_helper" describe SICCode do describe "regex" do let(:regex) { described_class::REGEX } it "matches the sic codes with correct format" do expect("1020").to match(regex) expect("1020/1").to match(regex) expect("1120/2").to match(regex) end it "does not match the wrong format codes" do expect("10201").to_not match(regex) expect("1020/12").to_not match(regex) expect("111/2").to_not match(regex) expect("111").to_not match(regex) expect("11111/1").to_not match(regex) end end describe "#by_year" do subject { SICCode.first } it "gets average growth by year" do expect(subject.by_year(1)).to eq(subject.year1) end end end
chs6558/chs6558.github.io
node_modules/styled-icons/remix-line/InputMethod/InputMethod.esm.js
export * from '@styled-icons/remix-line/InputMethod';
snazy/mockito
src/main/java/org/mockito/internal/junit/VerificationCollectorImpl.java
/* * Copyright (c) 2016 Mockito contributors * This program is made available under the terms of the MIT License. */ package org.mockito.internal.junit; import static org.mockito.internal.progress.ThreadSafeMockingProgress.mockingProgress; import org.junit.runner.Description; import org.junit.runners.model.Statement; import org.mockito.exceptions.base.MockitoAssertionError; import org.mockito.internal.progress.MockingProgressImpl; import org.mockito.internal.verification.api.VerificationData; import org.mockito.junit.VerificationCollector; import org.mockito.verification.VerificationMode; import org.mockito.verification.VerificationStrategy; /** * Mockito implementation of VerificationCollector. */ public class VerificationCollectorImpl implements VerificationCollector { private StringBuilder builder; private int numberOfFailures; public VerificationCollectorImpl() { this.resetBuilder(); } public Statement apply(final Statement base, final Description description) { return new Statement() { @Override public void evaluate() throws Throwable { try { VerificationCollectorImpl.this.assertLazily(); base.evaluate(); VerificationCollectorImpl.this.collectAndReport(); } finally { // If base.evaluate() throws an error, we must explicitly reset the VerificationStrategy // to prevent subsequent tests to be assert lazily mockingProgress().setVerificationStrategy(MockingProgressImpl.getDefaultVerificationStrategy()); } } }; } public void collectAndReport() throws MockitoAssertionError { mockingProgress().setVerificationStrategy(MockingProgressImpl.getDefaultVerificationStrategy()); if (this.numberOfFailures > 0) { String error = this.builder.toString(); this.resetBuilder(); throw new MockitoAssertionError(error); } } public VerificationCollector assertLazily() { mockingProgress().setVerificationStrategy(new VerificationStrategy() { public VerificationMode maybeVerifyLazily(VerificationMode mode) { return new VerificationWrapper(mode); } }); return this; } private void resetBuilder() { this.builder = new StringBuilder() .append("There were multiple verification failures:"); this.numberOfFailures = 0; } private void append(String message) { this.numberOfFailures++; this.builder.append('\n') .append(this.numberOfFailures).append(". ") .append(message.substring(1, message.length())); } private class VerificationWrapper implements VerificationMode { private final VerificationMode delegate; private VerificationWrapper(VerificationMode delegate) { this.delegate = delegate; } public void verify(VerificationData data) { try { this.delegate.verify(data); } catch (MockitoAssertionError error) { VerificationCollectorImpl.this.append(error.getMessage()); } } public VerificationMode description(String description) { throw new IllegalStateException("Should not fail in this mode"); } } }
XiongMr/XOJ_Backend
src/main/java/me/xiongxuan/xoj/entity/Problem.java
<reponame>XiongMr/XOJ_Backend<filename>src/main/java/me/xiongxuan/xoj/entity/Problem.java package me.xiongxuan.xoj.entity; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import javax.persistence.*; import java.time.LocalDateTime; import java.util.Objects; /** * * @author XiongXuan * @date 2018/4/25 */ @Entity @Table(name = "oj_problem") @JsonIgnoreProperties(value={"hibernateLazyInitializer"}) public class Problem { @Id @TableGenerator( name = "AppSeqStore", initialValue = 1000, allocationSize = 1 ) @GeneratedValue( strategy = GenerationType.TABLE, generator = "AppSeqStore" ) private Integer problemId; /** * 由谁创建的 */ @ManyToOne(optional = false) @JoinColumn(name = "create_by") @JsonIgnoreProperties(value = { "inviter" }) private User createBy; @Column(nullable = false, length = 50) private String title; /** * 题目描述,存放的是题目描述的html文本 */ @Lob @Basic(fetch = FetchType.LAZY) @Column(columnDefinition = "Text") private String description; /** * 题目描述,存放的是题目描述的markdown文本 */ @Lob @Basic(fetch = FetchType.LAZY) @Column(columnDefinition = "Text") private String descriptionMarkdown; /** * 存放的是输入描述的html文本 */ @Lob @Basic(fetch = FetchType.LAZY) @Column(columnDefinition = "Text") private String inputDescription; /** * 存放的是输入描述的markdown文本 */ @Lob @Basic(fetch = FetchType.LAZY) @Column(columnDefinition = "Text") private String inputDescriptionMarkdown; /** * 存放的是输出描述的html文本 */ @Lob @Basic(fetch = FetchType.LAZY) @Column(columnDefinition = "Text") private String outputDescription; /** * 存放的是输出描述的markdown文本 */ @Lob @Basic(fetch = FetchType.LAZY) @Column(columnDefinition = "Text") private String outputDescriptionMarkdown; /** * 样例,以json格式存放 * 例如:[{"input": "1 1", "output": "2"},{"input": "2 1", "output": "3"},{"input": "2 2", "output": "4"}] */ @Lob @Basic(fetch = FetchType.LAZY) @Column(columnDefinition = "Text") private String samples; /** * 提交数 */ private Integer submit = 0; /** * 通过数 */ private Integer accept = 0; /** * 时间限制 */ private Integer timeLimit; /** * 内存限制 */ private Integer memoryLimit; /** * 测试数据md5 */ private String testCaseMd5; /** * 提示,存放的是markdown文本 */ @Lob @Basic(fetch = FetchType.LAZY) @Column(columnDefinition = "Text") private String hintMarkdown; /** * 提示,存放的是html文本 */ @Lob @Basic(fetch = FetchType.LAZY) @Column(columnDefinition = "Text") private String hint; /** * 题目类型,整型,默认为0,代表本oj原创题,problem_type_id的外键。 */ @ManyToOne(optional = false) @JoinColumn(name = "problem_type_id") private ProblemType problemType = new ProblemType(0); /** * 该题目所在OJ对应的提交题号,字符型,最大为10。本oj题目对应的是problem_id,杭电对应的是它的题号 */ @Column(length = 15) private String remoteProblemId; /** * 题目是否在题目列表中可见,默认为1,代表可见 */ private Integer problemVisiable = 1; private LocalDateTime createTime = LocalDateTime.now(); public Problem() { } public Problem(Integer problemId, String title, Integer submit, Integer accept, ProblemType problemType) { this.problemId = problemId; this.title = title; this.submit = submit; this.accept = accept; this.problemType = problemType; } public Problem(Integer problemId, User createBy, String title, String description, String inputDescription, String outputDescription, String samples, Integer submit, Integer accept, Integer timeLimit, Integer memoryLimit, String hint, ProblemType problemType, LocalDateTime createTime) { this.problemId = problemId; this.createBy = createBy; this.title = title; this.description = description; this.inputDescription = inputDescription; this.outputDescription = outputDescription; this.samples = samples; this.submit = submit; this.accept = accept; this.timeLimit = timeLimit; this.memoryLimit = memoryLimit; this.hint = hint; this.problemType = problemType; this.createTime = createTime; } public Problem(Integer problemId, User createBy, String title, String descriptionMarkdown, String inputDescriptionMarkdown, String outputDescriptionMarkdown, String samples, Integer timeLimit, Integer memoryLimit, String hintMarkdown, ProblemType problemType, String remoteProblemId, Integer problemVisiable) { this.problemId = problemId; this.createBy = createBy; this.title = title; this.descriptionMarkdown = descriptionMarkdown; this.inputDescriptionMarkdown = inputDescriptionMarkdown; this.outputDescriptionMarkdown = outputDescriptionMarkdown; this.samples = samples; this.timeLimit = timeLimit; this.memoryLimit = memoryLimit; this.hintMarkdown = hintMarkdown; this.problemType = problemType; this.remoteProblemId = remoteProblemId; this.problemVisiable = problemVisiable; } public Problem(Integer problemId) { this.problemId = problemId; } public String getTestCaseMd5() { return testCaseMd5; } public void setTestCaseMd5(String testCaseMd5) { this.testCaseMd5 = testCaseMd5; } public Integer getProblemId() { return problemId; } public void setProblemId(Integer problemId) { this.problemId = problemId; } public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public String getInputDescription() { return inputDescription; } public void setInputDescription(String inputDescription) { this.inputDescription = inputDescription; } public String getOutputDescription() { return outputDescription; } public void setOutputDescription(String outputDescription) { this.outputDescription = outputDescription; } public String getSamples() { return samples; } public void setSamples(String samples) { this.samples = samples; } public Integer getSubmit() { return submit; } public void setSubmit(Integer submit) { this.submit = submit; } public Integer getAccept() { return accept; } public void setAccept(Integer accept) { this.accept = accept; } public Integer getTimeLimit() { return timeLimit; } public void setTimeLimit(Integer timeLimit) { this.timeLimit = timeLimit; } public Integer getMemoryLimit() { return memoryLimit; } public void setMemoryLimit(Integer memoryLimit) { this.memoryLimit = memoryLimit; } public String getHint() { return hint; } public void setHint(String hint) { this.hint = hint; } public String getRemoteProblemId() { return remoteProblemId; } public void setRemoteProblemId(String remoteProblemId) { this.remoteProblemId = remoteProblemId; } public Integer getProblemVisiable() { return problemVisiable; } public void setProblemVisiable(Integer problemVisiable) { this.problemVisiable = problemVisiable; } public LocalDateTime getCreateTime() { return createTime; } public void setCreateTime(LocalDateTime createTime) { this.createTime = createTime; } public User getCreateBy() { return createBy; } public void setCreateBy(User createBy) { this.createBy = createBy; } public String getDescriptionMarkdown() { return descriptionMarkdown; } public void setDescriptionMarkdown(String descriptionMarkdown) { this.descriptionMarkdown = descriptionMarkdown; } public String getInputDescriptionMarkdown() { return inputDescriptionMarkdown; } public void setInputDescriptionMarkdown(String inputDescriptionMarkdown) { this.inputDescriptionMarkdown = inputDescriptionMarkdown; } public String getOutputDescriptionMarkdown() { return outputDescriptionMarkdown; } public void setOutputDescriptionMarkdown(String outputDescriptionMarkdown) { this.outputDescriptionMarkdown = outputDescriptionMarkdown; } public String getHintMarkdown() { return hintMarkdown; } public void setHintMarkdown(String hintMarkdown) { this.hintMarkdown = hintMarkdown; } public ProblemType getProblemType() { return problemType; } public void setProblemType(ProblemType problemType) { this.problemType = problemType; } @Override public String toString() { return "Problem{" + "problemId=" + problemId + ", createBy=" + createBy + ", title='" + title + '\'' + ", description='" + description + '\'' + ", descriptionMarkdown='" + descriptionMarkdown + '\'' + ", inputDescription='" + inputDescription + '\'' + ", inputDescriptionMarkdown='" + inputDescriptionMarkdown + '\'' + ", outputDescription='" + outputDescription + '\'' + ", outputDescriptionMarkdown='" + outputDescriptionMarkdown + '\'' + ", samples='" + samples + '\'' + ", submit=" + submit + ", accept=" + accept + ", timeLimit=" + timeLimit + ", memoryLimit=" + memoryLimit + ", hintMarkdown='" + hintMarkdown + '\'' + ", hint='" + hint + '\'' + ", problemType=" + problemType + ", remoteProblemId='" + remoteProblemId + '\'' + ", problemVisiable=" + problemVisiable + ", createTime=" + createTime + '}'; } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof Problem)) return false; Problem problem = (Problem) o; return problemId.equals(problem.problemId); } @Override public int hashCode() { return Objects.hash(problemId); } }
safecass/safecass
libs/db/MongoDB.h
<reponame>safecass/safecass<gh_stars>0 //------------------------------------------------------------------------ // // SAFECASS: Safety Architecture For Engineering Computer-Assisted Surgical Systems // // Copyright (C) 2012-2014 <NAME> and <NAME> // //------------------------------------------------------------------------ // // Created on : Aug 14, 2012 // Last revision: May 8, 2014 // Author : <NAME> (<EMAIL>) // Github : https://github.com/minyang/casros // #ifndef _MongoDB_h #define _MongoDB_h #include "common.h" #include "jsonSerializer.h" namespace SC { class SCLIB_EXPORT MongoDB { protected: //! Convert Monitor topic messages from JSON to MongoDB entry format static const std::string ConvertTopicMessageToDBEntry_Monitor(JSONSerializer & jsonSerializer); //! Convert Fault topic messages from JSON to MongoDB entry format static const std::string ConvertTopicMessageToDBEntry_Event(JSONSerializer & jsonSerializer); public: //! Constructor MongoDB(void); //! Destructor ~MongoDB(); //! Convert topic messages from JSON to MongoDB entry format static const std::string ConvertTopicMessageToDBEntry( const Topic::Type topic, JSONSerializer & jsonSerializer); }; }; #endif // _MongoDB_h
Therealdomacio/tiktok
core/web/p2p_keys_controller_test.go
package web_test import ( "net/http" "testing" "github.com/libp2p/go-libp2p-core/peer" "github.com/smartcontractkit/chainlink/core/internal/cltest" "github.com/smartcontractkit/chainlink/core/services/offchainreporting" "github.com/smartcontractkit/chainlink/core/store/models/p2pkey" "github.com/smartcontractkit/chainlink/core/utils" "github.com/smartcontractkit/chainlink/core/web" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestP2PKeysController_Index_HappyPath(t *testing.T) { t.Parallel() client, OCRKeyStore, cleanup := setupP2PKeysControllerTests(t) defer cleanup() p2pKeys := []p2pkey.EncryptedP2PKey{} keys, _ := OCRKeyStore.FindEncryptedP2PKeys() response, cleanup := client.Get("/v2/p2p_keys") defer cleanup() cltest.AssertServerResponse(t, response, http.StatusOK) err := web.ParseJSONAPIResponse(cltest.ParseResponseBody(t, response), &p2pKeys) assert.NoError(t, err) require.Len(t, p2pKeys, len(keys)) assert.Equal(t, keys[0].ID, p2pKeys[0].ID) assert.Equal(t, keys[0].PubKey, p2pKeys[0].PubKey) assert.Equal(t, keys[0].PeerID, p2pKeys[0].PeerID) } func TestP2PKeysController_Create_HappyPath(t *testing.T) { t.Parallel() client, OCRKeyStore, cleanup := setupP2PKeysControllerTests(t) defer cleanup() keys, _ := OCRKeyStore.FindEncryptedP2PKeys() initialLength := len(keys) response, cleanup := client.Post("/v2/p2p_keys", nil) defer cleanup() cltest.AssertServerResponse(t, response, http.StatusOK) keys, _ = OCRKeyStore.FindEncryptedP2PKeys() require.Len(t, keys, initialLength+1) encryptedP2PKey := p2pkey.EncryptedP2PKey{} err := web.ParseJSONAPIResponse(cltest.ParseResponseBody(t, response), &encryptedP2PKey) assert.NoError(t, err) lastKeyIndex := len(keys) - 1 assert.Equal(t, keys[lastKeyIndex].ID, encryptedP2PKey.ID) assert.Equal(t, keys[lastKeyIndex].PubKey, encryptedP2PKey.PubKey) assert.Equal(t, keys[lastKeyIndex].PeerID, encryptedP2PKey.PeerID) _, exists := OCRKeyStore.DecryptedP2PKey(peer.ID(encryptedP2PKey.PeerID)) assert.Equal(t, exists, true) } func TestP2PKeysController_Delete_InvalidP2PKey(t *testing.T) { t.Parallel() client, _, cleanup := setupP2PKeysControllerTests(t) defer cleanup() invalidP2PKeyID := "bad_key_id" response, cleanup := client.Delete("/v2/p2p_keys/" + invalidP2PKeyID) defer cleanup() assert.Equal(t, http.StatusUnprocessableEntity, response.StatusCode) } func TestP2PKeysController_Delete_NonExistentP2PKeyID(t *testing.T) { t.Parallel() client, _, cleanup := setupP2PKeysControllerTests(t) defer cleanup() nonExistentP2PKeyID := "1234567890" response, cleanup := client.Delete("/v2/p2p_keys/" + nonExistentP2PKeyID) defer cleanup() assert.Equal(t, http.StatusNotFound, response.StatusCode) } func TestP2PKeysController_Delete_HappyPath(t *testing.T) { t.Parallel() client, OCRKeyStore, cleanup := setupP2PKeysControllerTests(t) defer cleanup() require.NoError(t, OCRKeyStore.Unlock(cltest.Password)) keys, _ := OCRKeyStore.FindEncryptedP2PKeys() initialLength := len(keys) _, encryptedKeyBundle, _ := OCRKeyStore.GenerateEncryptedP2PKey() response, cleanup := client.Delete("/v2/p2p_keys/" + encryptedKeyBundle.GetID()) defer cleanup() assert.Equal(t, http.StatusOK, response.StatusCode) assert.Error(t, utils.JustError(OCRKeyStore.FindEncryptedP2PKeyByID(encryptedKeyBundle.ID))) keys, _ = OCRKeyStore.FindEncryptedP2PKeys() assert.Equal(t, initialLength, len(keys)) } func setupP2PKeysControllerTests(t *testing.T) (cltest.HTTPClientCleaner, *offchainreporting.KeyStore, func()) { t.Helper() app, cleanup := cltest.NewApplication(t, cltest.LenientEthMock) require.NoError(t, app.Start()) client := app.NewHTTPClient() OCRKeyStore := app.GetStore().OCRKeyStore return client, OCRKeyStore, cleanup }
onezens/QQTweak
qqtw/qqheaders7.2/QQSMItemElementVideo.h
<gh_stars>1-10 // // Generated by class-dump 3.5 (64 bit). // // class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2013 by <NAME>. // #import "QQSMItemElementBase.h" @class NSString, UIImage; @interface QQSMItemElementVideo : QQSMItemElementBase { NSString *_src; NSString *_cover; long long _load; NSString *_vInfo; NSString *_tInfo; long long _preTime; long long _preWidth; long long _preHeight; long long _fullTime; NSString *_summary; long long _busiType; NSString *_aID; long long _preStartPosi; UIImage *_currentFrame; double _currentTime; _Bool _isPlaying; _Bool _isPreviewEnd; _Bool _isReported; _Bool _hasPlayed; } @property(nonatomic) long long busiType; // @synthesize busiType=_busiType; @property(retain, nonatomic) UIImage *currentFrame; // @synthesize currentFrame=_currentFrame; @property(nonatomic) double currentTime; // @synthesize currentTime=_currentTime; - (void)dealloc; - (id)getElementName; @property(nonatomic) _Bool hasPlayed; // @synthesize hasPlayed=_hasPlayed; @property(nonatomic) _Bool isPlaying; // @synthesize isPlaying=_isPlaying; @property(nonatomic) _Bool isReported; // @synthesize isReported=_isReported; // Remaining properties @property(retain, nonatomic) NSString *aID; // @dynamic aID; @property(retain, nonatomic) NSString *cover; // @dynamic cover; @property(nonatomic) long long fullTime; // @dynamic fullTime; @property(nonatomic) long long load; // @dynamic load; @property(nonatomic) long long preHeight; // @dynamic preHeight; @property(nonatomic) long long preStartPosi; // @dynamic preStartPosi; @property(nonatomic) long long preTime; // @dynamic preTime; @property(nonatomic) long long preWidth; // @dynamic preWidth; @property(retain, nonatomic) NSString *src; // @dynamic src; @property(retain, nonatomic) NSString *summary; // @dynamic summary; @property(retain, nonatomic) NSString *tInfo; // @dynamic tInfo; @property(retain, nonatomic) NSString *vInfo; // @dynamic vInfo; @end
xlasers/spring-modules
spring-boot/spring-boot-basic/convert/src/main/java/com/xlaser4j/demo/config/DateConverter.java
<reponame>xlasers/spring-modules package com.xlaser4j.demo.config; import java.text.SimpleDateFormat; import java.util.Date; import lombok.SneakyThrows; import org.springframework.context.annotation.Configuration; import org.springframework.core.convert.converter.Converter; /** * @package: com.xlaser4j.demo.config * @author: Elijah.D * @time: 2020/1/26 12:54 * @description: * @modified: Elijah.D */ @Configuration public class DateConverter implements Converter<String, Date> { /** * convert sting to date * * @param s source * @return target */ @SneakyThrows @Override public Date convert(String s) { SimpleDateFormat format = new SimpleDateFormat("yyyy-mm-dd"); return format.parse(s); } }
diekhans/kent
src/hg/lib/delHinds2.c
/* delHinds2.c was originally generated by the autoSql program, which also * generated delHinds2.h and delHinds2.sql. This module links the database and * the RAM representation of objects. */ /* Copyright (C) 2014 The Regents of the University of California * See README in this or parent directory for licensing information. */ #include "common.h" #include "linefile.h" #include "dystring.h" #include "jksql.h" #include "delHinds2.h" void delHinds2StaticLoad(char **row, struct delHinds2 *ret) /* Load a row from delHinds2 table into ret. The contents of ret will * be replaced at the next call to this function. */ { ret->chrom = row[0]; ret->chromStart = sqlUnsigned(row[1]); ret->chromEnd = sqlUnsigned(row[2]); ret->name = row[3]; ret->frequency = sqlFloat(row[4]); } struct delHinds2 *delHinds2Load(char **row) /* Load a delHinds2 from row fetched with select * from delHinds2 * from database. Dispose of this with delHinds2Free(). */ { struct delHinds2 *ret; AllocVar(ret); ret->chrom = cloneString(row[0]); ret->chromStart = sqlUnsigned(row[1]); ret->chromEnd = sqlUnsigned(row[2]); ret->name = cloneString(row[3]); ret->frequency = sqlFloat(row[4]); return ret; } struct delHinds2 *delHinds2LoadAll(char *fileName) /* Load all delHinds2 from a whitespace-separated file. * Dispose of this with delHinds2FreeList(). */ { struct delHinds2 *list = NULL, *el; struct lineFile *lf = lineFileOpen(fileName, TRUE); char *row[5]; while (lineFileRow(lf, row)) { el = delHinds2Load(row); slAddHead(&list, el); } lineFileClose(&lf); slReverse(&list); return list; } struct delHinds2 *delHinds2LoadAllByChar(char *fileName, char chopper) /* Load all delHinds2 from a chopper separated file. * Dispose of this with delHinds2FreeList(). */ { struct delHinds2 *list = NULL, *el; struct lineFile *lf = lineFileOpen(fileName, TRUE); char *row[5]; while (lineFileNextCharRow(lf, chopper, row, ArraySize(row))) { el = delHinds2Load(row); slAddHead(&list, el); } lineFileClose(&lf); slReverse(&list); return list; } struct delHinds2 *delHinds2CommaIn(char **pS, struct delHinds2 *ret) /* Create a delHinds2 out of a comma separated string. * This will fill in ret if non-null, otherwise will * return a new delHinds2 */ { char *s = *pS; if (ret == NULL) AllocVar(ret); ret->chrom = sqlStringComma(&s); ret->chromStart = sqlUnsignedComma(&s); ret->chromEnd = sqlUnsignedComma(&s); ret->name = sqlStringComma(&s); ret->frequency = sqlFloatComma(&s); *pS = s; return ret; } void delHinds2Free(struct delHinds2 **pEl) /* Free a single dynamically allocated delHinds2 such as created * with delHinds2Load(). */ { struct delHinds2 *el; if ((el = *pEl) == NULL) return; freeMem(el->chrom); freeMem(el->name); freez(pEl); } void delHinds2FreeList(struct delHinds2 **pList) /* Free a list of dynamically allocated delHinds2's */ { struct delHinds2 *el, *next; for (el = *pList; el != NULL; el = next) { next = el->next; delHinds2Free(&el); } *pList = NULL; } void delHinds2Output(struct delHinds2 *el, FILE *f, char sep, char lastSep) /* Print out delHinds2. Separate fields with sep. Follow last field with lastSep. */ { if (sep == ',') fputc('"',f); fprintf(f, "%s", el->chrom); if (sep == ',') fputc('"',f); fputc(sep,f); fprintf(f, "%u", el->chromStart); fputc(sep,f); fprintf(f, "%u", el->chromEnd); fputc(sep,f); if (sep == ',') fputc('"',f); fprintf(f, "%s", el->name); if (sep == ',') fputc('"',f); fputc(sep,f); fprintf(f, "%g", el->frequency); fputc(lastSep,f); } /* -------------------------------- End autoSql Generated Code -------------------------------- */
RichardBradley/scapegoat
src/test/scala/com/sksamuel/scapegoat/inspections/ParameterlessMethodReturnsUnitTest.scala
<gh_stars>0 package com.sksamuel.scapegoat.inspections import com.sksamuel.scapegoat.PluginRunner import org.scalatest.{FreeSpec, Matchers} /** @author <NAME> */ class ParameterlessMethodReturnsUnitTest extends FreeSpec with ASTSugar with Matchers with PluginRunner { override val inspections = Seq(new ParameterlessMethodReturnsUnit) "ParameterlessMethodReturnsUnit" - { "should report warning" in { val code = """object Test { | def paramless: Unit = () | def paramless2 : Int = 4 | def params(): Unit = () | def params2() : Int = 4 } """.stripMargin compileCodeSnippet(code) compiler.scapegoat.reporter.warnings.size shouldBe 1 } } }
alancnet/artifactory
web/angular-web/src/main/webapp/specs/dao/artifact/artifact_views_dao.spec.js
describe('unit test:pom view tab dao', function () { var artifactViewsDao; var RESOURCE; var pomViewTabDataMock = { "view":"pom", "path": "DecodedBase64/DecodedBase64/DecodedBase64/DecodedBase64-DecodedBase64.pom", "repoKey": "ext-releases-local" }; // inject the main module beforeEach(m('artifactory.dao')); // run this code before each case beforeEach(inject(function (ArtifactViewsDao, _RESOURCE_, $httpBackend) { artifactViewsDao = ArtifactViewsDao; RESOURCE = _RESOURCE_; server = $httpBackend; })); afterEach(function () { server.flush(); }); it('fetch should send a put request to serve', function () { server.expectPOST(RESOURCE.API_URL + RESOURCE.VIEWS+"/pom").respond(200); artifactViewsDao.fetch(pomViewTabDataMock); }); });
marquinhusgoncalves/js-tdd
es6/5_3-novos-metodos-para-strings.js
<filename>es6/5_3-novos-metodos-para-strings.js let text = '<NAME>'; // Verifica se começa com console.log(text.startsWith('rem', 2)); // Verifica se termina com console.log(text.endsWith('ane', 25)); // Repete a string x vezes console.log('test'.repeat(10)); // Procura se existe em uma string console.log(text.includes('ipsum'));
google-ar/chromium
third_party/WebKit/LayoutTests/http/tests/fetch/script-tests/response-content.js
if (self.importScripts) { importScripts('../resources/fetch-test-helpers.js'); } promise_test(function() { var response = new Response; return response.text() .then(function(text) { assert_equals(text, '', 'response.text() must return an empty string' + 'if body is null'); }); }, 'Behavior of Response with no constructor arguments.'); promise_test(function() { var response = new Response('test string'); assert_equals( response.headers.get('Content-Type'), 'text/plain;charset=UTF-8', 'A Response constructed with a string should have a Content-Type.'); return response.text() .then(function(text) { assert_equals(text, 'test string', 'Response body text should match the string on ' + 'construction.'); }); }, 'Behavior of Response with string content.'); promise_test(function() { var intView = new Int32Array([0, 1, 2, 3, 4, 55, 6, 7, 8, 9]); var buffer = intView.buffer; var response = new Response(buffer); assert_false(response.headers.has('Content-Type'), 'A Response constructed with ArrayBuffer should not have a ' + 'content type.'); return response.arrayBuffer() .then(function(buffer) { var resultIntView = new Int32Array(buffer); assert_array_equals( resultIntView, [0, 1, 2, 3, 4, 55, 6, 7, 8, 9], 'Response body ArrayBuffer should match ArrayBuffer ' + 'it was constructed with.'); }); }, 'Behavior of Response with ArrayBuffer content.'); promise_test(function() { var intView = new Int32Array([0, 1, 2, 3, 4, 55, 6, 7, 8, 9]); var response = new Response(intView); assert_false(response.headers.has('Content-Type'), 'A Response constructed with ArrayBufferView ' + 'should not have a content type.'); return response.arrayBuffer() .then(function(buffer) { var resultIntView = new Int32Array(buffer); assert_array_equals( resultIntView, [0, 1, 2, 3, 4, 55, 6, 7, 8, 9], 'Response body ArrayBuffer should match ArrayBufferView ' + 'it was constructed with.'); }); }, 'Behavior of Response with ArrayBufferView content without a slice.'); promise_test(function() { var intView = new Int32Array([0, 1, 2, 3, 4, 55, 6, 7, 8, 9]); var slice = intView.subarray(1, 4); // Should be [1, 2, 3] var response = new Response(slice); assert_false(response.headers.has('Content-Type'), 'A Response constructed with ArrayBufferView ' + 'should not have a content type.'); return response.arrayBuffer() .then(function(buffer) { var resultIntView = new Int32Array(buffer); assert_array_equals( resultIntView, [1, 2, 3], 'Response body ArrayBuffer should match ArrayBufferView ' + 'slice it was constructed with.'); }); }, 'Behavior of Response with ArrayBufferView content with a slice.'); promise_test(function() { var formData = new FormData(); formData.append('sample string', '1234567890'); formData.append('sample blob', new Blob(['blob content'])); formData.append('sample file', new File(['file content'], 'file.dat')); var response = new Response(formData); return response.text() .then(function(result) { var reg = new RegExp('multipart\/form-data; boundary=(.*)'); var regResult = reg.exec(getContentType(response.headers)); var boundary = regResult[1]; var expected_body = '--' + boundary + '\r\n' + 'Content-Disposition: form-data; name="sample string"\r\n' + '\r\n' + '1234567890\r\n' + '--' + boundary + '\r\n' + 'Content-Disposition: form-data; name="sample blob"; ' + 'filename="blob"\r\n' + 'Content-Type: application/octet-stream\r\n' + '\r\n' + 'blob content\r\n' + '--' + boundary + '\r\n' + 'Content-Disposition: form-data; name="sample file"; ' + 'filename="file.dat"\r\n' + 'Content-Type: application/octet-stream\r\n' + '\r\n' + 'file content\r\n' + '--' + boundary + '--\r\n'; assert_equals( result, expected_body, 'Creating a Response with FormData body must succeed.'); }); }, 'Behavior of Response with FormData content'); promise_test(function() { const urlSearchParams = new URLSearchParams(); urlSearchParams.append('sample string', '1234567890'); urlSearchParams.append('sample string 2', '1234567890 & 2'); var response = new Response(urlSearchParams); assert_equals( response.headers.get('Content-Type'), 'application/x-www-form-urlencoded;charset=UTF-8', 'A Response constructed with a URLSearchParams should have a Content-Type.'); return response.text() .then(function(result) { assert_equals( result, 'sample+string=1234567890&sample+string+2=1234567890+%26+2', 'Creating a Response with URLSearchParams body must succeed.'); }); }, 'Behavior of Response with URLSearchParams content'); promise_test(function() { var headers = new Headers; headers.set('Content-Language', 'ja'); var response = new Response( 'test string', {method: 'GET', headers: headers}); assert_false(response.bodyUsed); var response2 = response.clone(); assert_false(response.bodyUsed, 'bodyUsed is not set by clone().'); assert_false(response2.bodyUsed, 'bodyUsed is not set by clone().'); response.headers.set('Content-Language', 'en'); assert_equals( response2.headers.get('Content-Language'), 'ja', 'Headers of cloned ' + 'response should not change when original response headers are changed.'); var p = response.text(); assert_true(response.bodyUsed, 'bodyUsed should be true when locked.'); assert_false(response2.bodyUsed, 'Cloned bodies should not share bodyUsed.'); assert_throws({name: 'TypeError'}, function() { response3 = response.clone(); }, 'Response.clone() should throw if the body was used.'); return p.then(function(text) { assert_true(response.bodyUsed); assert_false(response2.bodyUsed); return response2.text(); }).then(function(text) { assert_equals(text, 'test string', 'Response clone response body text should match.'); assert_true(response2.bodyUsed); }); }, 'Behavior of bodyUsed in Response and clone behavior.'); promise_test(function() { var response = new Response(null); assert_equals( response.headers.get('Content-Type'), null, 'A Response constructed with null body should have no Content-Type.'); return response.text() .then(function(text) { assert_equals(text, '', 'Response with null body accessed as text should ' + 'resolve to the empty string.'); }); }, 'Behavior of Response passed null for body.'); promise_test(function() { var response = new Response(); assert_equals( response.headers.get('Content-Type'), null, 'A Response constructed with no body should have no Content-Type.'); return response.text() .then(function(text) { assert_equals(text, '', 'Response with no body accessed as text should ' + 'resolve to the empty string.'); }); }, 'Behavior of Response with no body.'); done();
pps-lab/fl-analysis
src/data/leaf_loader.py
"""Loads leaf datasets""" import os import numpy as np import pathlib from src.data.leaf.model_utils import read_data def load_leaf_dataset(dataset, use_val_set=False): eval_set = 'test' if not use_val_set else 'val' base_dir = pathlib.Path(__file__).parent.resolve() train_data_dir = os.path.join(base_dir, 'leaf', dataset, 'data', 'train') test_data_dir = os.path.join(base_dir, 'leaf', dataset, 'data', eval_set) users, groups, train_data, test_data = read_data(train_data_dir, test_data_dir) return users, train_data, test_data # ------------------------ # utils for shakespeare dataset ALL_LETTERS = "\n !\"&'(),-.0123456789:;>?ABCDEFGHIJKLMNOPQRSTUVWXYZ[]abcdefghijklmnopqrstuvwxyz}" NUM_LETTERS = len(ALL_LETTERS) def word_to_indices(word): '''returns a list of character indices Args: word: string Return: indices: int list with length len(word) ''' indices = [] for c in word: indices.append(ALL_LETTERS.find(c)) return indices def _one_hot(index, size): '''returns one-hot vector with given size and value 1 at given index ''' vec = [0 for _ in range(size)] vec[int(index)] = 1 return vec def letter_to_vec(letter): '''returns one-hot representation of given letter ''' index = ALL_LETTERS.find(letter) return index # return _one_hot(index, NUM_LETTERS) def process_text_input_indices(x_batch: list): x_batch = [word_to_indices(word) for word in x_batch] x_batch = np.array(x_batch) return x_batch def process_char_output_indices(y_batch: list): y_batch = [letter_to_vec(c) for c in y_batch] y_batch = np.array(y_batch, dtype=np.uint8) return y_batch
haleyjd/vib
vibconsole/vc2010/jslib/symbolExtension.js
<filename>vibconsole/vc2010/jslib/symbolExtension.js // // Symbol // // Best possible approximation of ECMAScript 6 Symbol type; not perfect, but highly functional. // if(!this.Symbol) { (function (global) { // unique symbol generator var sid = 0; var randStr = function () { return (Utils ? Utils.GenerateUUID() : (++sid + Math.random()).toString(36)); }; var uid = function (key) { return 'Symbol(' + key + ')_' + randStr(); }; // global symbol map var globalMap = Object.create(null); // internal value and key property symbols var VALUE = uid('value'); var KEY = uid('key'); var REF = uid('ref'); // regexp which matches Symbol names var symRegExp = /^Symbol\(.*\)_[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i; // original value of Object.getOwnPropertyNames var origGetOwnPropertyNames = Object.getOwnPropertyNames; // setter which creates a closure var setter = function (key) { return function(value) { // Store the received value and mark it as non-enumerable Object.defineProperty(this, key, { enumerable: false, configurable: true, writable: true, value: value }); }; }; var createSymbolInternal = function (internalID) { var newSymbol = Object.create(Symbol.prototype); Object.defineProperty(newSymbol, VALUE, { enumerable: false, configurable: false, writable: false, value: internalID }); // Create a dummy property on Object.prototype, so that when a Symbol // is used to assign a property, it will become non-enumerable. if(!Object.prototype[internalID]) { Object.defineProperty(Object.prototype, internalID, { enumerable: false, configurable: true, get: function () {}, set: setter(internalID) }); } // Define a reference on this Symbol to a natively implemented // reference counter which deletes the Object.prototype dummy // properties once there are no longer any references to a // related Symbol object (this is accomplished via SpiderMonkey's // JSFinalizeOp callback in NativeSymbolRef's JSClass) Object.defineProperty(newSymbol, REF, { enumerable: false, configurable: false, writable: false, value: new NativeSymbolRef(internalID, Object.prototype) }); return newSymbol; }; // // Symbol // function Symbol(name) { if(this instanceof Symbol && !this[VALUE]) throw new TypeError('Symbol is not a constructor'); return createSymbolInternal(uid(name)); } // // Symbol.prototype.toString // // This is supposed to return "Symbol(name)", which is only descriptive // and cannot be used to obtain the value of the property, but we do not // have native support for lookup of properties by anything but names or // indices, so this WILL actually return the internal value of the symbol. // You're not supposed to use it for anything evil, by contract :P // Object.defineProperty(Symbol.prototype, 'toString', { enumerable: false, configurable: true, writable: true, value: function toString() { return this[VALUE]; } }); // // Symbol.prototype.valueOf - same deal as toString // Object.defineProperty(Symbol.prototype, 'valueOf', { enumerable: false, configurable: true, writable: true, value: function valueOf() { return this[VALUE]; } }); // // Symbol.for // // Add a Symbol to the global registry. // Object.defineProperty(Symbol, 'for', { enumerable: false, configurable: true, writable: true, value: function for(key) { var sym; if(!(sym = globalMap[key])) { sym = globalMap[key] = Symbol(key); Object.defineProperty(sym, KEY, { enumerable: false, configurable: false, writable: false, value: key }); } return sym; } }); // // Symbol.keyFor // // Retrieve a Symbol stored in the global registry by name. // Object.defineProperty(Symbol, 'keyFor', { enumerable: false, configurable: true, writable: true, value: function keyFor(sym) { return sym[KEY]; } }); // // Symbol.iterator // // Create the "well-known" Symbol for the ECMAScript 6 iterator protocol. // Object.defineProperty(Symbol, 'iterator', { enumerable: false, configurable: true, writable: true, value: Symbol('iterator') }); // // Expose global object Symbol // Object.defineProperty(global, 'Symbol', { enumerable: false, configurable: true, writable: true, value: Symbol }); // // Overrides // // // Object.getOwnPropertyNames: // Shall not return names of Symbol properties. // if(origGetOwnPropertyNames) { let testFunc = function (elem) { return !symRegExp.test(elem); }; // NOT symbols Object.getOwnPropertyNames = function getOwnPropertyNames(obj) { return origGetOwnPropertyNames(obj).filter(testFunc); }; } // // Additional exports // // // Object.getOwnPropertySymbols // NB: Non-compliant to the extent the objects returned will not // compare equal to the original Symbol objects constructed to add // the properties to the object - this is impossible to accomplish // without weak maps, as otherwise there would be a severe garbage // collection issue with Symbol instances. // if(origGetOwnPropertyNames && !Object.getOwnPropertySymbols) { let testFunc = function (elem) { return symRegExp.test(elem); }; // ONLY symbols Object.defineProperty(Object, 'getOwnPropertySymbols', { enumerable: false, configurable: true, writable: true, value: function getOwnPropertySymbols(obj) { var symNames = origGetOwnPropertyNames(obj).filter(testFunc); var symbols = []; symNames.forEach(function (symName) { symbols.push(createSymbolInternal(symName)); }); return symbols; } }); } })(this); }
Team-OctOS/host_gerrit
gerrit-gwtui/src/main/java/com/google/gerrit/client/change/Actions.java
<reponame>Team-OctOS/host_gerrit<filename>gerrit-gwtui/src/main/java/com/google/gerrit/client/change/Actions.java // Copyright (C) 2013 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.client.change; import com.google.gerrit.client.Gerrit; import com.google.gerrit.client.actions.ActionButton; import com.google.gerrit.client.actions.ActionInfo; import com.google.gerrit.client.changes.ChangeInfo; import com.google.gerrit.client.changes.ChangeInfo.CommitInfo; import com.google.gerrit.client.changes.ChangeInfo.RevisionInfo; import com.google.gerrit.client.rpc.NativeMap; import com.google.gerrit.reviewdb.client.Change; import com.google.gwt.core.client.GWT; import com.google.gwt.event.dom.client.ClickEvent; import com.google.gwt.uibinder.client.UiBinder; import com.google.gwt.uibinder.client.UiField; import com.google.gwt.uibinder.client.UiHandler; import com.google.gwt.user.client.ui.Button; import com.google.gwt.user.client.ui.Composite; import com.google.gwt.user.client.ui.FlowPanel; import com.google.gwtexpui.safehtml.client.SafeHtmlBuilder; import java.util.TreeSet; class Actions extends Composite { private static final String[] CORE = { "abandon", "restore", "revert", "topic", "cherrypick", "submit", "rebase", "message", "publish", "/"}; interface Binder extends UiBinder<FlowPanel, Actions> {} private static final Binder uiBinder = GWT.create(Binder.class); @UiField Button cherrypick; @UiField Button deleteChange; @UiField Button deleteRevision; @UiField Button publish; @UiField Button rebase; @UiField Button revert; @UiField Button submit; @UiField Button abandon; private AbandonAction abandonAction; @UiField Button restore; private RestoreAction restoreAction; private Change.Id changeId; private ChangeInfo changeInfo; private String revision; private String project; private String subject; private String message; private boolean canSubmit; Actions() { initWidget(uiBinder.createAndBindUi(this)); getElement().setId("change_actions"); } void display(ChangeInfo info, String revision) { this.revision = revision; boolean hasUser = Gerrit.isSignedIn(); RevisionInfo revInfo = info.revision(revision); CommitInfo commit = revInfo.commit(); changeId = info.legacy_id(); project = info.project(); subject = commit.subject(); message = commit.message(); changeInfo = info; initChangeActions(info, hasUser); initRevisionActions(info, revInfo, hasUser); } private void initChangeActions(ChangeInfo info, boolean hasUser) { NativeMap<ActionInfo> actions = info.has_actions() ? info.actions() : NativeMap.<ActionInfo> create(); actions.copyKeysIntoChildren("id"); if (hasUser) { a2b(actions, "/", deleteChange); a2b(actions, "abandon", abandon); a2b(actions, "restore", restore); a2b(actions, "revert", revert); for (String id : filterNonCore(actions)) { add(new ActionButton(info, actions.get(id))); } } } private void initRevisionActions(ChangeInfo info, RevisionInfo revInfo, boolean hasUser) { NativeMap<ActionInfo> actions = revInfo.has_actions() ? revInfo.actions() : NativeMap.<ActionInfo> create(); actions.copyKeysIntoChildren("id"); canSubmit = false; if (hasUser) { canSubmit = actions.containsKey("submit"); if (canSubmit) { ActionInfo action = actions.get("submit"); submit.setTitle(action.title()); submit.setHTML(new SafeHtmlBuilder() .openDiv() .append(action.label()) .closeDiv()); } a2b(actions, "/", deleteRevision); a2b(actions, "cherrypick", cherrypick); a2b(actions, "publish", publish); a2b(actions, "rebase", rebase); for (String id : filterNonCore(actions)) { add(new ActionButton(info, revInfo, actions.get(id))); } } } private void add(ActionButton b) { ((FlowPanel) getWidget()).add(b); } private static TreeSet<String> filterNonCore(NativeMap<ActionInfo> m) { TreeSet<String> ids = new TreeSet<>(m.keySet()); for (String id : CORE) { ids.remove(id); } return ids; } void setSubmitEnabled() { submit.setVisible(canSubmit); } boolean isSubmitEnabled() { return submit.isVisible() && submit.isEnabled(); } @UiHandler("abandon") void onAbandon(ClickEvent e) { if (abandonAction == null) { abandonAction = new AbandonAction(abandon, changeId); } abandonAction.show(); } @UiHandler("publish") void onPublish(ClickEvent e) { DraftActions.publish(changeId, revision); } @UiHandler("deleteRevision") void onDeleteRevision(ClickEvent e) { DraftActions.delete(changeId, revision); } @UiHandler("deleteChange") void onDeleteChange(ClickEvent e) { DraftActions.delete(changeId); } @UiHandler("restore") void onRestore(ClickEvent e) { if (restoreAction == null) { restoreAction = new RestoreAction(restore, changeId); } restoreAction.show(); } @UiHandler("rebase") void onRebase(ClickEvent e) { RebaseAction.call(changeId, revision); } @UiHandler("submit") void onSubmit(ClickEvent e) { SubmitAction.call(changeInfo, changeInfo.revision(revision)); } @UiHandler("cherrypick") void onCherryPick(ClickEvent e) { CherryPickAction.call(cherrypick, changeInfo, revision, project, message); } @UiHandler("revert") void onRevert(ClickEvent e) { RevertAction.call(revert, changeId, revision, project, subject); } private static void a2b(NativeMap<ActionInfo> actions, String a, Button b) { if (actions.containsKey(a)) { b.setVisible(true); b.setTitle(actions.get(a).title()); } } }
IkkiKing/SkillboxDiplom
src/main/java/com/ikkiking/api/response/tag/Tag.java
package com.ikkiking.api.response.tag; import lombok.AllArgsConstructor; import lombok.Data; @Data @AllArgsConstructor public class Tag { private String name; private double weight; }
junedkazi/paper-handlebars
helpers/limit.js
<filename>helpers/limit.js 'use strict'; const _ = require('lodash'); /** * Limit an array to the second argument * * @example * {{limit array 4}} */ const factory = () => { return function(data, limit) { if (_.isString(data)) { return data.substring(0, limit); } if (!_.isArray(data)) { return []; } return data.slice(0, limit); }; }; module.exports = [{ name: 'limit', factory: factory, }];
jdpigeon/dharmadefender
public/js/util.js
/* global */ var Util = {}; (function(){ "use strict"; /** * Generate an Unique ID * @return {string} Unique ID of length 4 */ Util.guid = function(){ function s4() { return Math.floor((1 + Math.random()) * 0x10000) .toString(16) .substring(1); } return s4(); }; })(); module.exports = Util;