repo_name
stringlengths
6
101
path
stringlengths
4
300
text
stringlengths
7
1.31M
kskinoue0612/iriclib_v4
h5cgnsflowsolution.cpp
#include "error_macros.h" #include "h5cgnsflowsolution.h" #include "h5util.h" #include "iriclib_errorcodes.h" #include "internal/iric_logger.h" #include "private/h5cgnsflowsolution_impl.h" using namespace iRICLib; #define LABEL "FlowSolution_t" std::string H5CgnsFlowSolution::label() { return LABEL; } H5CgnsFlowSolution::H5CgnsFlowSolution(const std::string& name, hid_t groupId, H5CgnsZone* zone) : impl {new Impl {}} { impl->m_name = name; impl->m_groupId = groupId; impl->m_zone = zone; } H5CgnsFlowSolution::~H5CgnsFlowSolution() { H5Gclose(impl->m_groupId); delete impl; } std::string H5CgnsFlowSolution::name() const { return impl->m_name; } int H5CgnsFlowSolution::readValueNames(std::vector<std::string>* names) const { _IRIC_LOGGER_TRACE_CALL_START("H5Util::getGroupNames"); int ier = H5Util::getGroupNames(impl->m_groupId, names); _IRIC_LOGGER_TRACE_CALL_END_WITHVAL("H5Util::getGroupNames", ier); RETURN_IF_ERR; return IRIC_NO_ERROR; } int H5CgnsFlowSolution::readValueNames(std::set<std::string>* names) const { _IRIC_LOGGER_TRACE_CALL_START("H5Util::getGroupNames"); int ier = H5Util::getGroupNames(impl->m_groupId, names); _IRIC_LOGGER_TRACE_CALL_END_WITHVAL("H5Util::getGroupNames", ier); RETURN_IF_ERR; return IRIC_NO_ERROR; } int H5CgnsFlowSolution::readValueType(const std::string& name, H5Util::DataArrayValueType *type) const { _IRIC_LOGGER_TRACE_CALL_START("H5Util::readDataArrayValueType"); int ier = H5Util::readDataArrayValueType(impl->m_groupId, name, type); _IRIC_LOGGER_TRACE_CALL_END_WITHVAL("H5Util::readDataArrayValueType", ier); RETURN_IF_ERR; return IRIC_NO_ERROR; } int H5CgnsFlowSolution::readValue(const std::string& name, std::vector<int>* values) const { _IRIC_LOGGER_TRACE_CALL_START("H5Util::readDataArrayValue"); int ier = H5Util::readDataArrayValue(impl->m_groupId, name, values); _IRIC_LOGGER_TRACE_CALL_END_WITHVAL("H5Util::readDataArrayValue", ier); RETURN_IF_ERR; return IRIC_NO_ERROR; } int H5CgnsFlowSolution::readValue(const std::string& name, std::vector<double>* values) const { _IRIC_LOGGER_TRACE_CALL_START("H5Util::readDataArrayValue"); int ier = H5Util::readDataArrayValue(impl->m_groupId, name, values); _IRIC_LOGGER_TRACE_CALL_END_WITHVAL("H5Util::readDataArrayValue", ier); RETURN_IF_ERR; return IRIC_NO_ERROR; } int H5CgnsFlowSolution::readValueAsDouble(const std::string& name, std::vector<double>* values) const { H5Util::DataArrayValueType type; int ier = readValueType(name, &type); RETURN_IF_ERR; if (type == H5Util::DataArrayValueType::RealDouble) { return readValue(name, values); } else if (type == H5Util::DataArrayValueType::Int) { std::vector<int> buffer; ier = readValue(name, &buffer); RETURN_IF_ERR; values->assign(buffer.size(), 0); for (unsigned int i = 0; i < buffer.size(); ++i) { (*values)[i] = buffer.at(i); } } return IRIC_NO_ERROR; } int H5CgnsFlowSolution::writeValue(const std::string& name, const std::vector<int>& values) const { std::vector<hsize_t> dims; int ier = dataDims(&dims); RETURN_IF_ERR; _IRIC_LOGGER_TRACE_CALL_START("H5Util::createDataArray"); ier = H5Util::createDataArray(impl->m_groupId, name, values, dims); _IRIC_LOGGER_TRACE_CALL_END_WITHVAL("H5Util::createDataArray", ier); RETURN_IF_ERR; return IRIC_NO_ERROR; } int H5CgnsFlowSolution::writeValue(const std::string& name, const std::vector<double>& values) const { std::vector<hsize_t> dims; int ier = dataDims(&dims); RETURN_IF_ERR; _IRIC_LOGGER_TRACE_CALL_START("H5Util::createDataArray"); ier = H5Util::createDataArray(impl->m_groupId, name, values, dims); _IRIC_LOGGER_TRACE_CALL_END_WITHVAL("H5Util::createDataArray", ier); RETURN_IF_ERR; return IRIC_NO_ERROR; } H5CgnsZone* H5CgnsFlowSolution::zone() const { return impl->m_zone; }
h-crisis/assistant
Webpage/test/komori/template_test/csvt.js
/** * Created by komori on 2016/07/12. */ var getCSV = d3.dsv(',', 'text/csv; charset=shift_jis'); var tdfkNum; var mareaNum; var lat; var lon; var hospLatLon; var cont = 0; var tdfk = ['北海道', '青森県', '岩手県', '宮城県', '秋田県', '山形県', '福島県', '茨城県', '栃木県', '群馬県', '埼玉県', '千葉県', '東京都', '神奈川県', '新潟県', '富山県', '石川県', '福井県', '山梨県', '長野県', '岐阜県', '静岡県', '愛知県', '三重県', '滋賀県', '京都府', '大阪府', '兵庫県', '奈良県', '和歌山県', '鳥取県', '島根県', '岡山県', '広島県', '山口県', '徳島県', '香川県', '愛媛県', '高知県', '福岡県', '佐賀県', '長崎県', '熊本県', '大分県', '宮崎県', '鹿児島県', '沖縄県']; var marea = new Array(47); marea[0] = ['101:南渡島','102:南檜山','103:北渡島檜山','104:札幌','105:後志','106:南空知','107:中空知','108:北空知','109:西胆振','110:東胆振','111:日高','112:上川中部','113:上川北部', '114:富良野','115:留萌','116:宗谷','117:北網','118:遠紋','119:十勝','120:釧路','121:根室']; function hoIndexButton() { if (document.getElementById( 'tdfkinfo' ).style.display == 'none') { document.getElementById('vishospinfo').style.display = 'none'; var i = 0; for (i = 0; i < 47; i++) { tdfkinfo.innerHTML = tdfkinfo.innerHTML + "<tr><td style='font-size:24px;color:white;background-color:#888888;text-align:center' type=button id=tdfkBtn value=" + i + " onclick=choiceTdfk(this)>" + tdfk[i] + "</td></tr>"; } document.getElementById('tdfkinfo').style.height document.getElementById('tdfkinfo').style.display = 'block'; cont ++; return document.getElementById('tdfkBtn').value } else { tdfkinfo.innerHTML = ""; document.getElementById( 'tdfkinfo' ).style.display = 'none'; document.getElementById( 'hospinfo' ).style.display = 'none'; cont++; } }; function choiceTdfk(obj) { tdfkNum = obj.getAttribute('value'); getCSV('geojson/hcrisis_medical_status.csv', function (data) { // dataを処理する hospinfo.innerHTML = ""; var code = ""; var aa = 0; var bb = 0; var text = ""; /* if(tdfkNum == 0) { for (var i = 0; i < 21; i++) { code = marea[0][i].substr(0,3); hospinfo.innerHTML = hospinfo.innerHTML + "<tr><td style='font-size:24px;color:white;background-color:#888888;text-align:center' type=button id=mdsBtn value=" + code + " onclick=choiceMds(this)>" + marea[0][i] + "</td></tr>"; } } */ hospinfo.innerHTML = "<form action='' method='post'> + <p>国を選択して下さい</p>" + "<select name='country'>" + "<option value='Japan' selected='selected' class='msg'>1番目の要素を選択して下さい</option>" + "<option value='Japan' class='japan'>日本</option>" + "<option value='America' class='America'>アメリカ</option>" + "<option value='Australia' class='Australia'>オーストラリア</option>" + "</select>" + "<p>都市を選択して下さい</p>" + "<select name='city'>" + "<option value='Japan' selected='selected' class='msg'>都市を選択して下さい</option>" + "<option value='Tokyo' class='japan'>東京</option>" + "<option value='Kyoto' class='japan'>京都</option>" + "<option value='Osaka' class='japan'>大阪</option>" + "<option value='NY' class='America'>ニューヨーク</option>" + "<option value='LA' class='America'>ロサンゼルス</option>" + "<option value='Sydney' class='Australia'>シドニー</option>" + "</select>" + "</form>" + "<button id='abb' style='' value='>送信</button>" + "<label><input id='aff' value=''></label>" document.getElementById('hospinfo').style.display = 'block'; })}; function choiceMds(obj) { mareaNum = obj.getAttribute('value'); getCSV('geojson/hcrisis_medical_status.csv', function (data) { for (var i = 0; i < data.length; i++){ if (data[i].marea_code == mareaNum) { console.log(data[i].name1) } } })} function visHoButton() { if (document.getElementById('vishospinfo').style.display == 'none') { document.getElementById('tdfkinfo').style.display = 'none'; document.getElementById('hospinfo').style.display = 'none'; var extent = map.getView().calculateExtent(map.getSize()); var bottomLeft = ol.proj.transform(ol.extent.getBottomLeft(extent), 'EPSG:3857', 'EPSG:4326'); var topRight = ol.proj.transform(ol.extent.getTopRight(extent), 'EPSG:3857', 'EPSG:4326'); getCSV('geojson/hcrisis_medical_status.csv', function (data) { for (var i = 0; i < data.length; i++) { var emisLat = data[i].緯度; var emisLon = data[i].経度; if (emisLat <= topRight[1] && emisLat >= bottomLeft[1]) { if (emisLon <= topRight[0] && emisLon >= bottomLeft[0]) { vishospinfo.innerHTML = vishospinfo.innerHTML + "<tr><td style='font-size:24px;color:white;background-color:#888888;text-align:center' type=button id=tdkBtn value=" + i + " onclick=choiceHosp(this)>" + data[i].医療機関名 + "</td></tr>"; } } } }); document.getElementById('vishospinfo').style.display = 'block'; } else{ document.getElementById('vishospinfo').style.display = 'none'; vishospinfo.innerHTML = ""; } }
IThawk/learnCode
mq/springboot-amqp/src/main/java/com/ithawk/demo/rabbitmq/springboot/v1/amqp/admin/AmqpConfig.java
package com.ithawk.demo.rabbitmq.springboot.v1.amqp.admin; import com.ithawk.demo.rabbitmq.springboot.v1.util.ResourceUtil; import org.springframework.amqp.rabbit.connection.CachingConnectionFactory; import org.springframework.amqp.rabbit.connection.ConnectionFactory; import org.springframework.amqp.rabbit.core.RabbitAdmin; import org.springframework.amqp.rabbit.listener.SimpleMessageListenerContainer; import org.springframework.amqp.support.ConsumerTagStrategy; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @Configuration public class AmqpConfig { @Bean public ConnectionFactory connectionFactory() throws Exception { CachingConnectionFactory cachingConnectionFactory = new CachingConnectionFactory(); cachingConnectionFactory.setUri(ResourceUtil.getKey("rabbitmq.uri")); return cachingConnectionFactory; } @Bean public RabbitAdmin amqpAdmin(ConnectionFactory connectionFactory) { RabbitAdmin admin = new RabbitAdmin(connectionFactory); // admin.setAutoStartup(true); return admin; } @Bean public SimpleMessageListenerContainer container(ConnectionFactory connectionFactory) { SimpleMessageListenerContainer container = new SimpleMessageListenerContainer(connectionFactory); container.setConsumerTagStrategy(new ConsumerTagStrategy() { public String createConsumerTag(String queue) { return null; } }); return container; } }
ferrarimarco/open-scissor
docker/d-streamon-master/d-streamon/streamon/core/block/DBQueue.hpp
<filename>docker/d-streamon-master/d-streamon/streamon/core/block/DBQueue.hpp #ifndef __DB_QUEUE_H__ #define __DB_QUEUE_H__ #if __GNUC__ == 4 && __GNUC_MINOR__ == 4 #include <cstdatomic> #else #include <atomic> #endif #include <array> #include <stdexcept> #include <algorithm> #include <cassert> #include <chrono> #include <thread> template<typename T,unsigned int Qlen> class DBQueue { struct slot { std::atomic_bool valid; T payload; slot(): valid(false),payload() {} }; struct buffer { std::array<slot,Qlen> vec; buffer(): vec() {} }; static int exchange_queue(int q) { assert(q == 0 || q == 1); return q == 0 ? 1 : 0; } std::array<buffer,2> m_queues; std::atomic_uint m_pointer; unsigned int m_read; unsigned int m_consumer_q; unsigned int m_full_slots; std::atomic_bool m_full; static const unsigned int MASK=0|(1<<31)|(1<<30); void swap_queues() { for(unsigned int i=0; i<m_full_slots; ++i) { m_queues[m_consumer_q].vec[i].valid.store(false,std::memory_order_release); } unsigned int newpoint=m_consumer_q<<30; m_consumer_q=exchange_queue(m_consumer_q); unsigned int old_pointer=m_pointer.exchange(newpoint,std::memory_order_release); m_full.store(false,std::memory_order_release); old_pointer&=~MASK; m_full_slots=std::min(old_pointer,Qlen); m_read=0; } bool consume(T& in) { while(m_read < m_full_slots) { for (int i=0; i<100; ++i) { if(m_queues[m_consumer_q].vec[m_read].valid.load(std::memory_order_relaxed)) { int index = m_read++; in = std::move(m_queues[m_consumer_q].vec[index].payload); return true; } std::this_thread::sleep_for(std::chrono::microseconds(10)); } m_read++; } return false; } public: DBQueue():m_queues(),m_pointer(0),m_read(0),m_consumer_q(1),m_full_slots(0),m_full(false) {} DBQueue(const DBQueue&)=delete; DBQueue(DBQueue&& other):m_queues(),m_pointer(other.m_pointer.load()),m_read(other.m_read),m_consumer_q(other.m_consumer_q),m_full_slots(other.m_full_slots),m_full(other.m_full.load()) { for (int q=0; q<2; ++q) for (unsigned int i=0; i<Qlen; ++i) { if(other.m_queues[q].vec[i].valid.load()) { m_queues[q].vec[i].payload=std::move(other.m_queues[q].vec[i].payload); m_queues[q].vec[i].valid.store(true); } } } DBQueue& operator=(const DBQueue&)=delete; DBQueue& operator=(DBQueue&& other) { m_pointer=other.m_pointer.load(); m_read=other.m_read; m_consumer_q=other.m_consumer_q; m_full_slots=other.m_full_slots; m_full=other.m_full.load(); for (int q=0; q<2; ++q) for (unsigned int i=0; i<Qlen; ++i) { if(other.m_queues[q].vec[i].valid.load()) { m_queues[q].vec[i].payload=std::move(other.m_queues[q].vec[i].payload); m_queues[q].vec[i].valid.store(true); } } return *this; } bool pop(T& in) { if(consume(in)) return true; swap_queues(); return consume(in); } // perfect forwarding... template <typename Tp> bool push(Tp&& in) { if(m_full.load(std::memory_order_acquire)) return false; unsigned int cur_pointer = m_pointer.fetch_add(1,std::memory_order_acquire); unsigned int cur_queue=(cur_pointer&MASK)>>30; assert((cur_queue==0)||(cur_queue==1)); unsigned int my_slot = cur_pointer&(~MASK); if (my_slot >= Qlen - 1) { if (my_slot == Qlen -1) m_full.store(true,std::memory_order_release); else return false; } m_queues[cur_queue].vec[my_slot].payload=std::forward<Tp>(in); m_queues[cur_queue].vec[my_slot].valid.store(true,std::memory_order_release); return true; } //this has to be executed out of the critical session void reset() { for (auto qit=m_queues.begin(); qit!=m_queues.end(); ++qit) for (auto it=qit->vec.begin(); it!=qit->vec.end(); ++it) { if(it->valid.load()) { T to_del(std::move(it->payload)); it->valid.store(false,std::memory_order_relaxed); } } m_full.store(false,std::memory_order_relaxed); m_pointer.store(0,std::memory_order_relaxed); m_consumer_q=1; m_read=0; m_full_slots=0; } }; #endif //__DB_QUEUE_H__
fideoman/geronimo
framework/modules/geronimo-management/src/main/java/org/apache/geronimo/management/geronimo/JCAManagedConnectionFactory.java
<filename>framework/modules/geronimo-management/src/main/java/org/apache/geronimo/management/geronimo/JCAManagedConnectionFactory.java /** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.geronimo.management.geronimo; import java.util.Map; import java.util.List; /** * @version $Rev$ $Date$ */ public interface JCAManagedConnectionFactory extends org.apache.geronimo.management.JCAManagedConnectionFactory { public String getManagedConnectionFactoryClass(); public String getConnectionFactoryInterface() ; public String[] getImplementedInterfaces(); public String getConnectionFactoryImplClass(); public String getConnectionInterface(); public String getConnectionImplClass(); /** * Gets the config properties in the form of a map where the key is the * property name and the value is property type (as a Class). * * @return map of config property name to config property type name */ public Map<String, Class> getConfigProperties(); public void setConfigProperty(String property, Object value) throws Exception; public Object getConfigProperty(String property) throws Exception; }
kostiushko-vlad/QT-BPMN
xmldom/xmltag_p.h
<reponame>kostiushko-vlad/QT-BPMN /***************************************************************************** * xmltag_p.h * * Created: 26.03.2019 2019 by <NAME> * * Copyright 2019 Xooo. All rights reserved. * *****************************************************************************/ #ifndef XMLTAG_P_H #define XMLTAG_P_H #endif // XMLTAG_P_H
rsadasiv/oop_nlp
similarity/src/main/java/com/outofprintmagazine/nlp/Ta.java
package com.outofprintmagazine.nlp; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Properties; import java.util.Set; import edu.stanford.nlp.pipeline.CoreDocument; import edu.stanford.nlp.pipeline.StanfordCoreNLP; import edu.mit.jverbnet.data.FrameType; import edu.mit.jverbnet.data.IFrame; import edu.mit.jverbnet.data.IMember; import edu.mit.jverbnet.data.IVerbClass; import edu.mit.jverbnet.data.IWordnetKey; import edu.mit.jverbnet.index.IVerbIndex; import edu.mit.jverbnet.index.VerbIndex; import edu.mit.jwi.Dictionary; import edu.mit.jwi.IDictionary; import edu.stanford.nlp.paragraphs.ParagraphAnnotator; public class Ta { private StanfordCoreNLP pipeline; private StanfordCoreNLP wikiParser; private IDictionary wordnet = null; private HashMap<String, ArrayList<String>> verbnet = new HashMap<String, ArrayList<String>>(); private HashMap<String, HashMap<String, String>> dictionaries = new HashMap<String, HashMap<String, String>>(); private HashMap<String, List<String>> lists = new HashMap<String, List<String>>(); public static Properties getWikiProps() { Properties props = new Properties(); props.setProperty("annotators","tokenize,ssplit,pos,lemma"); return props; } public static Properties getDefaultProps() { // set up pipeline properties Properties props = new Properties(); // set the list of annotators to run // adding our own annotator property props.put("customAnnotatorClass.oop_paragraphs", "com.outofprintmagazine.nlp.annotators.ParagraphAnnotator"); props.put("customAnnotatorClass.oop_gender", "com.outofprintmagazine.nlp.annotators.GenderAnnotator"); props.put("paragraphs.paragraphBreak", "two"); // configure pipeline // edu.stanford.nlp.paragraphs.ParagraphAnnotator x = new edu.stanford.nlp.paragraphs.ParagraphAnnotator(props, verbose) props.setProperty("annotators","tokenize,ssplit,pos,lemma,ner,parse,depparse,sentiment,oop_paragraphs,oop_gender,coref"); props.setProperty("annotators", "tokenize,ssplit,pos,ner,lemma,gender,oop_paragraphs,oop_gender,parse,depparse,natlog,coref,openie,quote"); //props.setProperty("annotators","tokenize,ssplit,pos,lemma,ner,parse,sentiment,oop_paragraphs,oop_gender"); props.setProperty("ner.applyFineGrained", "false"); //props.setProperty("dcoref.maxdist", "2"); props.setProperty("coref.algorithm", "statistical"); props.setProperty("coref.maxMentionDistance", "15"); props.setProperty("coref.maxMentionDistanceWithStringMatch", "300"); props.setProperty("coref.statisical.pairwiseScoreThresholds", ".5"); props.setProperty("openie.resolve_coref", "true"); props.setProperty("parse.maxlen", "70"); props.setProperty("pos.maxlen", "70"); props.setProperty("ner.maxlen", "70"); props.setProperty("ner.useSUTime", "false"); props.setProperty("ner.applyNumericClassifiers", "false"); props.setProperty("quote.maxLength", "70"); return props; } public StanfordCoreNLP getPipeline() { return pipeline; } public StanfordCoreNLP getWikiPipeline() { return wikiParser; } public IDictionary getWordnet() { return wordnet; } public HashMap<String, ArrayList<String>> getVerbnet() throws IOException { if (verbnet.size() == 0) { initVerbnet(); } return verbnet; } private void initVerbnet() throws IOException { String pathToVerbnet = "C:\\Users\\rsada\\git\\oop_nlp\\similarity\\resources\\new_vn\\"; URL url = new URL("file", null, pathToVerbnet); IVerbIndex index = new VerbIndex(url); index.open(); Iterator<IVerbClass> verbClassIter = index.iteratorRoots(); while (verbClassIter.hasNext()) { IVerbClass verb = verbClassIter.next(); for (IMember member : verb.getMembers()) { for (IWordnetKey senseKey : member.getWordnetTypes().keySet()) { ArrayList<String> senseKeyList = verbnet.get(senseKey.toString()); if (senseKeyList == null) { senseKeyList = new ArrayList<String>(); } senseKeyList.add(verb.getID()); //System.err.println("adding: " + senseKey.toString() + " " + senseKeyList.size()); verbnet.put(senseKey.toString(), senseKeyList); } } } } public HashMap<String, HashMap<String, String>> getDictionaries() { return dictionaries; } public HashMap<String, String> getDictionary(String name) throws IOException { HashMap<String, String> retval = dictionaries.get(name); if (retval == null) { setDictionary(name); retval = dictionaries.get(name); } return retval; } public void setDictionary(String name, HashMap<String, String> dictionary) { dictionaries.put(name, dictionary); } public void setDictionary(String fileName) throws IOException { HashMap<String, String> dict = new HashMap<String, String>(); //FileReader fileReader = new FileReader("C:\\Users\\rsada\\git\\oop_nlp\\similarity\\resources\\COCA\\en_uncommon.txt"); BufferedReader bufferedReader = new BufferedReader(new FileReader(fileName)); String line; while ((line = bufferedReader.readLine()) != null) { String[] name_value = line.split(" "); dict.put(name_value[0], name_value[1]); } bufferedReader.close(); setDictionary(fileName, dict); } public HashMap<String, List<String>> getLists() { return lists; } public List<String> getList(String name) { return lists.get(name); } public void setList(String name, List<String> list) { lists.put(name, list); } public void setList(String fileName) throws IOException { setList(fileName, Files.readAllLines(new File(fileName).toPath(), StandardCharsets.UTF_8)); } public Ta() throws IOException { this(Ta.getDefaultProps()); } public Ta(Properties props) throws IOException { super(); pipeline = new StanfordCoreNLP(props); wikiParser = new StanfordCoreNLP(getWikiProps()); wordnet = new Dictionary(new URL("file", null, "C:\\Users\\rsada\\git\\oop_nlp\\similarity\\resources\\wn3.1.dict\\dict")); wordnet.open(); } public CoreDocument annotate(String text) { return annotate(pipeline, text); } public CoreDocument annotate(StanfordCoreNLP nlp, String text) { // create a document object CoreDocument document = new CoreDocument(text); // annnotate the document nlp.annotate(document); return document; } }
altuntepe/drv_mei_cpe
src/drv_mei_cpe_nucleus.h
<reponame>altuntepe/drv_mei_cpe<filename>src/drv_mei_cpe_nucleus.h #ifndef _DRV_MEI_CPE_NUCLEUS_H #define _DRV_MEI_CPE_NUCLEUS_H /****************************************************************************** Copyright (c) 2007-2015 <NAME>iligungs-GmbH & Co. KG For licensing information, see the file 'LICENSE' in the root folder of this software module. ******************************************************************************/ /* ============================================================================ Description : This file contains the includes and the defines specific to the Nucleus OS Remarks : Please use the compiler switches here if you have more than one OS. ========================================================================= */ /* ============================================================================ Global Includes ========================================================================= */ /* NUCLEUS Includes*/ /* ============================================================================ typedefs interrupt wrapping (Nucleus) ========================================================================= */ #define MEI_DRVOS_SIGNAL_PENDING 0 /** Function typedef for the NUCLEUS intConnect() */ typedef int (*MEI_IntConnect_WrapNUCLEUS_t)(void *pIntVector, void* pISRRoutine, int ISRParams ); /** Function typedef for the NUCLEUS Interrupt enable intEnable() */ typedef int (*MEI_IntEnable_WrapNUCLEUS_t)(int IRQNum); /** Function typedef for the NUCLEUS Interrupt disable intDisable() */ typedef int (*MEI_IntDisable_WrapNUCLEUS_t)(int IRQNum); /** Function typedef for the NUCLEUS Interrupt Service Routine */ typedef void (*MEI_IntServRoutine_WrapNUCLEUS_t)(int ISRParams); /* ============================================================================ global function (Nucleus) - declarations ========================================================================= */ extern int MEI_DevCreate(void); extern int MEI_DevDelete(void); /* set wrapper functions for the interrupt handling */ extern int MEI_FctIntConnectSet(MEI_IntConnect_WrapNUCLEUS_t pIntConnectFct); extern int MEI_FctIntEnableSet(MEI_IntEnable_WrapNUCLEUS_t pIntEnableFct); extern int MEI_FctIntDisableSet(MEI_IntDisable_WrapNUCLEUS_t pIntDisableFct); #endif /* _DRV_MEI_CPE_NUCLEUS_H */
tqrg-bot/finagle
finagle-core/src/test/scala/com/twitter/finagle/builder/ClientBuilderTest.scala
package com.twitter.finagle.builder import com.twitter.finagle._ import com.twitter.finagle.integration.IntegrationBase import com.twitter.finagle.param.ProtocolLibrary import com.twitter.finagle.stats.NullStatsReceiver import com.twitter.finagle.service.{RetryPolicy, FailureAccrualFactory} import com.twitter.finagle.stats.InMemoryStatsReceiver import com.twitter.util._ import com.twitter.util.registry.{GlobalRegistry, SimpleRegistry} import java.util.concurrent.atomic.{AtomicInteger, AtomicBoolean} import org.junit.runner.RunWith import org.mockito.Mockito.{verify, when} import org.mockito.Matchers import org.mockito.Matchers._ import org.scalatest.FunSuite import org.scalatest.mock.MockitoSugar import org.scalatest.junit.JUnitRunner import org.scalatest.concurrent.{Eventually, IntegrationPatience} @RunWith(classOf[JUnitRunner]) class ClientBuilderTest extends FunSuite with Eventually with IntegrationPatience with MockitoSugar with IntegrationBase { trait ClientBuilderHelper { val preparedFactory = mock[ServiceFactory[String, String]] val preparedServicePromise = new Promise[Service[String, String]] when(preparedFactory.status) thenReturn Status.Open when(preparedFactory()) thenReturn preparedServicePromise when(preparedFactory.close(any[Time])) thenReturn Future.Done when(preparedFactory.map(Matchers.any())) thenReturn preparedFactory.asInstanceOf[ServiceFactory[Any, Nothing]] val m = new MockChannel when(m.codec.prepareConnFactory(any[ServiceFactory[String, String]], any[Stack.Params])) .thenReturn(preparedFactory) } test("ClientBuilder should invoke prepareConnFactory on connection") { new ClientBuilderHelper { val client = m.build() val requestFuture = client("123") verify(m.codec).prepareConnFactory(any[ServiceFactory[String, String]], any[Stack.Params]) verify(preparedFactory)() assert(!requestFuture.isDefined) val service = mock[Service[String, String]] when(service("123")) thenReturn Future.value("321") when(service.close(any[Time])) thenReturn Future.Done preparedServicePromise() = Return(service) verify(service)("123") assert(requestFuture.poll == Some(Return("321"))) } } def verifyProtocolRegistry(name: String, expected: String)(build: => Service[String, String]) = { test(name + " registers protocol library") { val simple = new SimpleRegistry() GlobalRegistry.withRegistry(simple) { build val entries = GlobalRegistry.get.toSet val unspecified = entries.count(_.key.startsWith(Seq("client", "not-specified"))) assert(unspecified == 0, "saw registry keys with 'not-specified' protocol") val specified = entries.count(_.key.startsWith(Seq("client", expected))) assert(specified > 0, "did not see expected protocol registry keys") } } } verifyProtocolRegistry("#codec(Codec)", expected = "fancy") { val ctx = new ClientBuilderHelper {} when(ctx.m.codec.protocolLibraryName).thenReturn("fancy") ClientBuilder() .name("test") .hostConnectionLimit(1) .codec(ctx.m.codec) .hosts("") .build() } verifyProtocolRegistry("#codec(CodecFactory)", expected = "fancy") { val ctx = new ClientBuilderHelper {} val cf = new CodecFactory[String, String] { def client: Client = (_: ClientCodecConfig) => ctx.m.codec def server: Server = ??? override def protocolLibraryName = "fancy" } ClientBuilder() .name("test") .hostConnectionLimit(1) .codec(cf) .hosts("") .build() } verifyProtocolRegistry("#codec(CodecFactory#Client)", expected = "fancy") { val ctx = new ClientBuilderHelper {} when(ctx.m.codec.protocolLibraryName).thenReturn("fancy") val cfClient: CodecFactory[String, String]#Client = { (_: ClientCodecConfig) => ctx.m.codec } ClientBuilder() .name("test") .hostConnectionLimit(1) .codec(cfClient) .hosts("") .build() } verifyProtocolRegistry("configured protocol", expected = "extra fancy") { val ctx = new ClientBuilderHelper {} when(ctx.m.codec.protocolLibraryName).thenReturn("fancy") val cfClient: CodecFactory[String, String]#Client = { (_: ClientCodecConfig) => ctx.m.codec } val stk = ClientBuilder.stackClientOfCodec(cfClient) ClientBuilder() .name("test") .hostConnectionLimit(1) .hosts("") .stack(stk.configured(ProtocolLibrary("extra fancy"))) .build() } test("ClientBuilder should close properly") { new ClientBuilderHelper { val svc = ClientBuilder().hostConnectionLimit(1).codec(m.codec).hosts("").build() val f = svc.close() eventually { f.isDefined } } } private class MyException extends Exception private val retryMyExceptionOnce = RetryPolicy.tries[Try[Nothing]]( 2, // 2 tries == 1 attempt + 1 retry { case Throw(_: MyException) => true }) test("ClientBuilder should collect stats on 'tries' for retrypolicy") { new ClientBuilderHelper { val inMemory = new InMemoryStatsReceiver val builder = ClientBuilder() .name("test") .hostConnectionLimit(1) .codec(m.codec) .daemon(true) // don't create an exit guard .hosts(Seq(m.clientAddress)) .retryPolicy(retryMyExceptionOnce) .reportTo(inMemory) val client = builder.build() val service = mock[Service[String, String]] when(service("123")) thenReturn Future.exception(new MyException()) when(service.close(any[Time])) thenReturn Future.Done preparedServicePromise() = Return(service) val f = client("123") eventually { assert(f.isDefined) } assert(inMemory.counters(Seq("test", "tries", "requests")) == 1) assert( // 1 request and 1 retry inMemory.counters(Seq("test", "requests")) == 2 ) } } test("ClientBuilder should collect stats on 'tries' with no retrypolicy") { new ClientBuilderHelper { val inMemory = new InMemoryStatsReceiver val builder = ClientBuilder() .name("test") .hostConnectionLimit(1) .codec(m.codec) .daemon(true) // don't create an exit guard .hosts(Seq(m.clientAddress)) .reportTo(inMemory) val client = builder.build() val numFailures = 5 val service = mock[Service[String, String]] when(service("123")) thenReturn Future.exception(WriteException(new Exception())) when(service.close(any[Time])) thenReturn Future.Done preparedServicePromise() = Return(service) val f = client("123") assert(f.isDefined) assert(inMemory.counters(Seq("test", "tries", "requests")) == 1) // failure accrual marks the only node in the balancer as Busy which in turn caps requeues // this relies on a retry budget that allows for `numFailures` requeues assert(inMemory.counters(Seq("test", "requests")) == numFailures) } } test("ClientBuilder with stack should collect stats on 'tries' for retrypolicy") { new ClientBuilderHelper { val inMemory = new InMemoryStatsReceiver val builder = ClientBuilder() .name("test") .hostConnectionLimit(1) .stack(m.client) .daemon(true) // don't create an exit guard .hosts(Seq(m.clientAddress)) .retryPolicy(retryMyExceptionOnce) .reportTo(inMemory) val client = builder.build() val service = mock[Service[String, String]] when(service("123")) thenReturn Future.exception(new MyException()) when(service.close(any[Time])) thenReturn Future.Done preparedServicePromise() = Return(service) val f = client("123") eventually { assert(f.isDefined) } assert(inMemory.counters(Seq("test", "tries", "requests")) == 1) // 1 request + 1 retry assert(inMemory.counters(Seq("test", "requests")) == 2) } } test("ClientBuilder with stack should collect stats on 'tries' with no retrypolicy") { new ClientBuilderHelper { val inMemory = new InMemoryStatsReceiver val numFailures = 21 // There will be 20 requeues by default val builder = ClientBuilder() .name("test") .hostConnectionLimit(1) .stack(m.client) .daemon(true) // don't create an exit guard .hosts(Seq(m.clientAddress)) .failureAccrualParams(25 -> Duration.fromSeconds(10)) .reportTo(inMemory) val client = builder.build() val service = mock[Service[String, String]] when(service("123")) thenReturn Future.exception(WriteException(new Exception())) when(service.close(any[Time])) thenReturn Future.Done preparedServicePromise() = Return(service) val f = client("123") assert(f.isDefined) assert(inMemory.counters(Seq("test", "tries", "requests")) == 1) // failure accrual marks the only node in the balancer as Busy which in turn caps requeues // this relies on a retry budget that allows for `numFailures` requeues assert(inMemory.counters(Seq("test", "requests")) == numFailures) } } private class SpecificException extends RuntimeException test("Retries have locals propagated") { new ClientBuilderHelper { val specificExceptionRetry: PartialFunction[Try[Nothing], Boolean] = { case Throw(e: SpecificException) => true } val builder = ClientBuilder() .name("test") .hostConnectionLimit(1) .stack(m.client) .daemon(true) // don't create an exit guard .hosts(Seq(m.clientAddress)) .retryPolicy(RetryPolicy.tries(2, specificExceptionRetry)) .reportTo(NullStatsReceiver) val client = builder.build() val aLocal = new Local[Int] val first = new AtomicBoolean(false) val localOnRetry = new AtomicInteger(0) // 1st call fails and triggers a retry which // captures the value of the local val service = Service.mk[String, String] { str: String => if (first.compareAndSet(false, true)) { Future.exception(new SpecificException()) } else { localOnRetry.set(aLocal().getOrElse(-1)) Future(str) } } preparedServicePromise() = Return(service) aLocal.let(999) { val rep = client("hi") assert("hi" == Await.result(rep, Duration.fromSeconds(5))) } assert(999 == localOnRetry.get) } } }
SanjoSolutions/unnamed
grid_error.js
<gh_stars>1-10 // ***MASONRY*** for Lazy Grids - 24kb - https://masonry.desandro.com/ import Masonry from "masonry-layout"; // ***IMAGES LOADED*** for Lazy Grids - 10kb - https://imagesloaded.desandro.com/ import imagesLoaded from "imagesloaded"; // ***INFINITE SCROLL*** for Lazy Grids - 26kb - https://infinite-scroll.com/ import InfiniteScroll from "infinite-scroll"; // make imagesLoaded available for InfiniteScroll!!! InfiniteScroll.imagesLoaded = imagesLoaded; // Grid query selector var grid = document.querySelector(".grid"); if (grid) { var msnry = new Masonry(grid, { percentPosition: true, itemSelector: "none", columnWidth: ".grid-sizer", gutter: ".gutter-sizer", // nicer reveal transition visibleStyle: { transform: "translateY(0)", opacity: 1 }, hiddenStyle: { transform: "translateY(100px)", opacity: 0 }, }); // make imagesLoaded assign msnry item selector imagesLoaded(grid, function () { grid.classList.remove("are-images-unloaded"); msnry.options.itemSelector = ".grid-item"; var items = grid.querySelectorAll(".grid-item"); msnry.appended(items); }); // init Infinte Scroll var infScroll = new InfiniteScroll(grid, { path: ".pagination__next", append: ".grid-item", outlayer: msnry, prefill: true, status: ".page-load-status", hideNav: ".pagination", scrollThreshold: 800, history: false, }); }
Dedederek/samples
cplusplus/level1_single_api/5_200dk_peripheral/gpio/src/gpio.cpp
/** * Copyright 2020 Huawei Technologies Co., Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <memory> #include <fstream> #include <sstream> #include <stdio.h> #include <string.h> #include <unistd.h> #include <sys/ioctl.h> #include <sys/types.h> #include <sys/stat.h> #include <fcntl.h> #include <sys/select.h> #include <sys/time.h> #include <errno.h> #include <string.h> #include "gpio.h" //ascend310 pin: GPIO0 GPIO1 #define ASCEND310_GPIO_0_DIR "/sys/class/gpio/gpio504/direction" #define ASCEND310_GPIO_1_DIR "/sys/class/gpio/gpio444/direction" #define ASCEND310_GPIO_0_VAL "/sys/class/gpio/gpio504/value" #define ASCEND310_GPIO_1_VAL "/sys/class/gpio/gpio444/value" /* I2C Device*/ #define I2C1_DEV_NAME "/dev/i2c-1" #define I2C_RETRIES 0x0701 #define I2C_TIMEOUT 0x0702 #define I2C_SLAVE 0x0703 #define I2C_RDWR 0x0707 #define I2C_M_RD 0x01 #define PCA6416_SLAVE_ADDR 0x20 #define PCA6416_GPIO_CFG_REG 0x07 #define PCA6416_GPIO_PORARITY_REG 0x05 #define PCA6416_GPIO_OUT_REG 0x03 #define PCA6416_GPIO_IN_REG 0x01 //GPIO MASK #define GPIO3_MASK 0x10 #define GPIO4_MASK 0x20 #define GPIO5_MASK 0x40 #define GPIO6_MASK 0x80 #define GPIO7_MASK 0x08 /* * i2c_write, for configure PCA6416 register. */ int gpio::i2c_write(unsigned char slave, unsigned char reg, unsigned char value) { int ret; struct i2c_rdwr_ioctl_data ssm_msg = {0}; unsigned char buf[2] = {0}; ssm_msg.nmsgs = 1; ssm_msg.msgs = (struct i2c_msg *)malloc(ssm_msg.nmsgs * sizeof(struct i2c_msg)); if (ssm_msg.msgs == NULL) { ERROR_LOG("Memory alloc error!\n"); return -1; } buf[0] = reg; buf[1] = value; (ssm_msg.msgs[0]).flags = 0; (ssm_msg.msgs[0]).addr = (unsigned short)slave; (ssm_msg.msgs[0]).buf = buf; (ssm_msg.msgs[0]).len = 2; ret = ioctl(fd, I2C_RDWR, &ssm_msg); if (ret < 0) { ERROR_LOG("write error, ret=%#x, errorno=%#x, %s!\n", ret, errno, strerror(errno)); free(ssm_msg.msgs); ssm_msg.msgs = NULL; return -1; } free(ssm_msg.msgs); ssm_msg.msgs = NULL; return 0; } /* * i2c_read, for reading PCA6416 register. */ int gpio::i2c_read(unsigned char slave, unsigned char reg, unsigned char *buf) { int ret; struct i2c_rdwr_ioctl_data ssm_msg = {0}; unsigned char regs[2] = {0}; regs[0] = reg; regs[1] = reg; ssm_msg.nmsgs = 2; ssm_msg.msgs = (struct i2c_msg *)malloc(ssm_msg.nmsgs * sizeof(struct i2c_msg)); if (ssm_msg.msgs == NULL) { ERROR_LOG("Memory alloc error!\n"); return -1; } (ssm_msg.msgs[0]).flags = 0; (ssm_msg.msgs[0]).addr = slave; (ssm_msg.msgs[0]).buf = regs; (ssm_msg.msgs[0]).len = 1; (ssm_msg.msgs[1]).flags = I2C_M_RD; (ssm_msg.msgs[1]).addr = slave; (ssm_msg.msgs[1]).buf = buf; (ssm_msg.msgs[1]).len = 1; ret = ioctl(fd, I2C_RDWR, &ssm_msg); if (ret < 0) { ERROR_LOG("read data error,ret=%#x !\n", ret); free(ssm_msg.msgs); ssm_msg.msgs = NULL; return -1; } free(ssm_msg.msgs); ssm_msg.msgs = NULL; return 0; } /* * i2c_init, for access i2c device. */ int gpio::i2c_1_init() { // open i2c-1 device fd = open(I2C1_DEV_NAME, O_RDWR); if (fd < 0) { ERROR_LOG("i2c-1 Can't open !\n"); return -1; } // set i2c-1 retries time if (ioctl(fd, I2C_RETRIES, 1) < 0) { close(fd); fd = 0; ERROR_LOG("set i2c-1 retry fail!\n"); return -1; } // set i2c-1 timeout time, 10ms as unit if (ioctl(fd, I2C_TIMEOUT, 1) < 0) { close(fd); fd = 0; ERROR_LOG("set i2c-1 timeout fail!\n"); return -1; } return 0; } gpio::gpio(void) { i2c_1_init(); } gpio::~gpio(void) { } int gpio::PCA6416_gpio_set_direction(int pin,int dir) { unsigned char slave; unsigned char reg; unsigned char data; int ret; if((pin!=3)&&(pin!=4)&&(pin!=5)&&(pin!=6)&&(pin!=7)){ ERROR_LOG("PCA6416 pin not right ,pin param must be 3,4,5,6,7\n"); return -1; } // set GPIO as output slave = PCA6416_SLAVE_ADDR; reg = PCA6416_GPIO_CFG_REG; data = 0; ret = i2c_read(slave, reg, &data); if (ret != 0) { close(fd); fd = 0; ERROR_LOG("GPIO read %#x %#x to %#x fail!\n", slave, data, reg); return -1; } if(dir == 0) { if(pin == 3) data |= GPIO3_MASK; else if(pin == 4) data |= GPIO4_MASK; else if(pin == 5) data |= GPIO5_MASK; else if(pin == 6) data |= GPIO6_MASK; else if(pin == 7) data |= GPIO7_MASK; } else { if(pin == 3) data &= ~GPIO3_MASK; else if(pin == 4) data &= ~GPIO4_MASK; else if(pin == 5) data &= ~GPIO5_MASK; else if(pin == 6) data &= ~GPIO6_MASK; else if(pin == 7) data &= ~GPIO7_MASK; } ret = i2c_write(slave, reg, data); if (ret != 0) { close(fd); fd = 0; ERROR_LOG("GPIO write %#x %#x to %#x fail!\n", slave, data, reg); return -1; } return 0; } int gpio::PCA6416_gpio_set_value(int pin,int val) { unsigned char slave; unsigned char reg; unsigned char data; int ret; if((pin!=3)&&(pin!=4)&&(pin!=5)&&(pin!=6)&&(pin!=7)){ ERROR_LOG("PCA6416 pin not right ,pin param must be 3,4,5,6,7\n"); return -1; } // Set GPIO output level slave = PCA6416_SLAVE_ADDR; reg = PCA6416_GPIO_OUT_REG; data = 0; ret = i2c_read(slave, reg, &data); if (ret != 0) { close(fd); fd = 0; ERROR_LOG("GPIO read %#x %#x to %#x fail!\n", slave, data, reg); return -1; } if(val == 0) { if(pin == 3) data &= ~GPIO3_MASK; else if(pin == 4) data &= ~GPIO4_MASK; else if(pin == 5) data &= ~GPIO5_MASK; else if(pin == 6) data &= ~GPIO6_MASK; else if(pin == 7) data &= ~GPIO7_MASK; } else{ if(pin == 3) data |= GPIO3_MASK; else if(pin == 4) data |= GPIO4_MASK; else if(pin == 5) data |= GPIO5_MASK; else if(pin == 6) data |= GPIO6_MASK; else if(pin == 7) data |= GPIO7_MASK; } ret = i2c_write(slave, reg, data); if (ret != 0) { close(fd); fd = 0; ERROR_LOG("GPIO write %#x %#x to %#x fail!\n", slave, data, reg); return -1; } return 0; } int gpio::PCA6416_gpio_get_value(int pin,int *val) { unsigned char slave; unsigned char reg; unsigned char data; int ret; if((pin!=3)&&(pin!=4)&&(pin!=5)&&(pin!=6)&&(pin!=7)){ ERROR_LOG("PCA6416 pin not right ,pin param must be 3,4,5,6,7\n"); return -1; } // get GPIO inputput level slave = PCA6416_SLAVE_ADDR; reg = PCA6416_GPIO_IN_REG; data = 0; ret = i2c_read(slave, reg, &data); if (ret != 0) { close(fd); fd = 0; ERROR_LOG("GPIO read %#x %#x to %#x fail!\n", slave, data, reg); return -1; } if(pin == 3) data &= GPIO3_MASK; else if(pin == 4) data &= GPIO4_MASK; else if(pin == 5) data &= GPIO5_MASK; else if(pin == 6) data &= GPIO6_MASK; else if(pin == 7) data &= GPIO7_MASK; if(data>0) *val = 1; else *val = 0; return 0; } int gpio::ASCEND310_gpio_set_direction(int pin,int dir) { int fd_direction; if(pin == 0) fd_direction = open(ASCEND310_GPIO_0_DIR, O_WRONLY); else if(pin == 1) fd_direction = open(ASCEND310_GPIO_1_DIR, O_WRONLY); else { ERROR_LOG("ASCEND310 pin not right ,pin param must be 0,1\n"); return -1; } if(-1 == fd_direction) { ERROR_LOG("open gpio DIR file error pin=%d",pin); return(-1); } if(dir == 0) { if(-1 == write(fd_direction, "in", sizeof("in"))) { ERROR_LOG("gpio write operation error pin=%d",pin); close(fd_direction); return(-1); } } else { if(-1 == write(fd_direction, "out", sizeof("out"))) { ERROR_LOG("gpio write operation error pin=%d",pin); close(fd_direction); return(-1); } } close(fd_direction); return 0; } int gpio::ASCEND310_gpio_set_value(int pin,int val) { int fd_gpio_value; unsigned char value; if(pin == 0) fd_gpio_value = open(ASCEND310_GPIO_0_VAL, O_WRONLY); else if(pin == 1) fd_gpio_value = open(ASCEND310_GPIO_1_VAL, O_WRONLY); else { ERROR_LOG("ASCEND310 pin not right ,pin param must be 0,1\n"); return -1; } if(-1 == fd_gpio_value) { ERROR_LOG("open gpio VAL file error pin=%d",pin); return(-1); } if(val == 0) { value = '0'; if(-1 == write(fd_gpio_value, &value, sizeof(value))) { ERROR_LOG("gpio write operation error pin=%d",pin); close(fd_gpio_value); return(1); } } else { value = '1'; if(-1 == write(fd_gpio_value, &value, sizeof(value))) { ERROR_LOG("gpio write operation error pin=%d",pin); close(fd_gpio_value); return(-1); } } close(fd_gpio_value); return 0; } int gpio::ASCEND310_gpio_get_value(int pin,int *val) { int fd_gpio_value; char value_str[3]; if(pin == 0) fd_gpio_value = open(ASCEND310_GPIO_0_VAL, O_RDONLY); else if(pin == 1) fd_gpio_value = open(ASCEND310_GPIO_1_VAL, O_RDONLY); else { ERROR_LOG("ASCEND310 pin not right ,pin param must be 0,1\n"); return -1; } if(-1 == fd_gpio_value) { ERROR_LOG("open gpio VAL file error pin=%d",pin); return(-1); } if (-1 == read(fd_gpio_value, value_str, 3)) { ERROR_LOG("Failed to read value pin=%d",pin); return -1; } *val = atoi(value_str); close(fd_gpio_value); return 0; } int gpio::gpio_set_direction(int pin,int direction) { if((pin == 0)||(pin == 1)) return ASCEND310_gpio_set_direction(pin,direction); else return PCA6416_gpio_set_direction(pin,direction); } int gpio::gpio_set_value(int pin,int val) { if((pin == 0)||(pin == 1)) return ASCEND310_gpio_set_value(pin,val); else return PCA6416_gpio_set_value(pin,val); } int gpio::gpio_get_value(int pin,int *val) { if((pin == 0)||(pin == 1)) return ASCEND310_gpio_get_value(pin,val); else return PCA6416_gpio_get_value(pin,val); }
Teemotj/developer-community
dc-recruit/src/main/java/cn/treeshell/recruit/service/impl/RecruitServiceImpl.java
<reponame>Teemotj/developer-community package cn.treeshell.recruit.service.impl; import cn.hutool.core.util.StrUtil; import cn.treeshell.recruit.model.Recruit; import cn.treeshell.recruit.mapper.RecruitMapper; import cn.treeshell.recruit.service.RecruitService; import com.alicp.jetcache.anno.CacheInvalidate; import com.alicp.jetcache.anno.CacheUpdate; import com.alicp.jetcache.anno.Cached; import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import java.util.List; /** * 职位 服务实现类 * * @author panjing * @since 2020-03-23 */ @Service @Transactional public class RecruitServiceImpl extends ServiceImpl<RecruitMapper, Recruit> implements RecruitService { /** * 推荐职位 * @return */ @Override @Cached(name = "dc-recruit:recruits:recommend", expire = 3600) public List<Recruit> recommend() { QueryWrapper<Recruit> wrapper = new QueryWrapper<>(); wrapper.eq("state", "2"); wrapper.orderByAsc("create_time"); wrapper.last("limit 10"); return this.baseMapper.selectList(wrapper); } /** * 最新职位 * @return */ @Override public List<Recruit> newList() { QueryWrapper<Recruit> wrapper = new QueryWrapper<>(); wrapper.eq("state", "1"); wrapper.orderByDesc("create_time"); wrapper.last("limit 10"); return this.baseMapper.selectList(wrapper); } /** * 查询所有数据 * @return */ @Override @Cached(name = "dc-recruit:recruits:", expire = 3600) public List<Recruit> findAll() { return this.baseMapper.selectList(null); } /** * 根据 ID 查询 * @param id * @return */ @Override @Cached(name = "dc-recruit:recruit:", key = "#id", expire = 3600) public Recruit findById(String id) { return this.baseMapper.selectById(id); } /** * 分页 + 多条件查询 * @param recruit * @param page * @param size * @return */ @Override public IPage<Recruit> findSearch(Recruit recruit, int page, int size) { return this.baseMapper.selectPage(new Page<>(page, size), createWrapper(recruit)); } /** * 多条件查询 * @param recruit * @return */ @Override public List<Recruit> findSearch(Recruit recruit) { return this.baseMapper.selectList(createWrapper(recruit)); } /** * 新增 * @param recruit */ @Override public void add(Recruit recruit) { this.baseMapper.insert(recruit); } /** * 修改 * @param recruit */ @Override @CacheUpdate(name = "dc-recruit:recruit:", key = "#recruit.id", value ="#recruit") public void modify(Recruit recruit) { this.baseMapper.updateById(recruit); } /** * 删除 * @param id */ @Override @CacheInvalidate(name = "dc-recruit:recruit:", key = "#id") public void remove(String id) { this.baseMapper.deleteById(id); } /** * 动态条件构造 * @param recruit * @return */ private QueryWrapper<Recruit> createWrapper(Recruit recruit) { QueryWrapper<Recruit> wrapper = new QueryWrapper<>(); wrapper.like(StrUtil.isNotBlank(recruit.getId()), "id", recruit.getId()); wrapper.like(StrUtil.isNotBlank(recruit.getJobName()), "job_name", recruit.getJobName()); wrapper.like(StrUtil.isNotBlank(recruit.getSalary()), "salary", recruit.getSalary()); wrapper.like(StrUtil.isNotBlank(recruit.getConditions()), "condition", recruit.getConditions()); wrapper.like(StrUtil.isNotBlank(recruit.getEducation()), "education", recruit.getEducation() ); wrapper.like(StrUtil.isNotBlank(recruit.getAddress()), "address", recruit.getAddress()); wrapper.like(StrUtil.isNotBlank(recruit.getEnterpriseId()), "enterprise_id", recruit.getEnterpriseId()); wrapper.like(StrUtil.isNotBlank(recruit.getLabel()), "label", recruit.getLabel()); wrapper.like(StrUtil.isNotBlank(recruit.getContent1()), "content1", recruit.getContent1()); wrapper.like(StrUtil.isNotBlank(recruit.getContent2()), "content2", recruit.getContent2()); wrapper.eq(StrUtil.isNotBlank(recruit.getType()), "type", recruit.getType()); wrapper.eq(StrUtil.isNotBlank(recruit.getState()), "state", recruit.getState()); return wrapper; } }
lPrimemaster/DCScan-ModulesAPI
DCS_Core/src/threading.cpp
<filename>DCS_Core/src/threading.cpp #pragma once #include <thread> #include "../include/DCS_ModuleCore.h" #include "../include/internal.h" const DCS::u16 DCS::Threading::GetMaxHardwareConcurrency() { return std::thread::hardware_concurrency(); } DCS::Threading::TPool* DCS::Threading::CreatePersistentPool(u16 size, std::vector<std::function<void(std::mutex*, std::condition_variable*, std::array<std::atomic_int, 16>*)>> workers) { u16 isize = size; TPool* pool = new TPool(); if (size > GetMaxHardwareConcurrency()) { isize = GetMaxHardwareConcurrency(); LOG_WARNING("Attempt to create a thread_pool with size > max_physical_threads"); LOG_WARNING("Creating %d threads instead.", isize); LOG_CRITICAL("Discarded last %d workers from TPool %x!", workers.size() - isize, pool); } LOG_DEBUG("Initialized persistent threads. (size=%d)", isize); for (u16 i = 0; i < isize; i++) pool->workers.push_back(std::thread(workers[i], &pool->lock, &pool->signal, &pool->flags)); return pool; } void DCS::Threading::JoinPool(TPool* pool) { for (std::thread& w : pool->workers) { w.join(); } pool->workers.clear(); } DCS::u64 DCS::Threading::GetPoolWorkCount(TPool* pool) { return pool->workers.size(); } void DCS::Threading::DestroyPool(TPool* pool) { if (!pool->workers.size()) { LOG_DEBUG("Deleted pool object %x.", pool); delete pool; } else LOG_WARNING("Cannot destroy TPool object (%x) while working async. Maybe missing a DCS::Threading::JoinPool(TPool*) call.", pool); }
stellar-expert/albedo
frontend/src/hw-signer/hw-signer.js
<filename>frontend/src/hw-signer/hw-signer.js import EventEmitter from 'events' import {DEVICE_CONNECTED, DEVICE_DISCONNECTED} from './adapters/adapter-events' import {ACCOUNT_TYPES} from '../state/account' import LedgerAdapter from './adapters/ledger-adapter' export default class HwSigner extends EventEmitter { constructor(accountType) { super() this.adapter = null switch (accountType) { case ACCOUNT_TYPES.LEDGER_ACCOUNT: this.setAdapter(LedgerAdapter) break default: throw new Error(`Unsupported hardware wallet type: ${accountType}.`) } } setAdapter(adapter) { if (this.adapter) { this.adapter.removeAllListeners() } this.adapter = adapter this.adapter.on(DEVICE_CONNECTED, () => { this.emit(DEVICE_CONNECTED) }) this.adapter.on(DEVICE_DISCONNECTED, () => { this.emit(DEVICE_CONNECTED) }) } async init(params) { return await this.adapter.init(params) } async getPublicKey(params) { if (this.adapter) { return await this.adapter.getPublicKey(params) } else { throw 'No adapter was set' } } async signTransaction(...args) { if (this.adapter) { return await this.adapter.signTransaction(...args) } else { throw 'No adapter was set' } } async signMessage(...args) { if (this.adapter) { return await this.adapter.signMessage(...args) } else { throw 'No adapter was set' } } async getDeviceId() { if (this.adapter) { return await this.adapter.getDeviceId() } else { throw 'No adapter was set' } } }
pep-pig/grafana
pkg/services/alerting/notifiers/threema_test.go
package notifiers import ( "errors" "testing" "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/alerting" "github.com/grafana/grafana/pkg/services/encryption/ossencryption" "github.com/stretchr/testify/require" ) func TestThreemaNotifier(t *testing.T) { t.Run("Parsing alert notification from settings", func(t *testing.T) { t.Run("empty settings should return error", func(t *testing.T) { json := `{ }` settingsJSON, _ := simplejson.NewJson([]byte(json)) model := &models.AlertNotification{ Name: "threema_testing", Type: "threema", Settings: settingsJSON, } _, err := NewThreemaNotifier(model, ossencryption.ProvideService().GetDecryptedValue) require.Error(t, err) }) t.Run("valid settings should be parsed successfully", func(t *testing.T) { json := ` { "gateway_id": "*3MAGWID", "recipient_id": "ECHOECHO", "api_secret": "1234" }` settingsJSON, _ := simplejson.NewJson([]byte(json)) model := &models.AlertNotification{ Name: "threema_testing", Type: "threema", Settings: settingsJSON, } not, err := NewThreemaNotifier(model, ossencryption.ProvideService().GetDecryptedValue) require.Nil(t, err) threemaNotifier := not.(*ThreemaNotifier) require.Nil(t, err) require.Equal(t, "threema_testing", threemaNotifier.Name) require.Equal(t, "threema", threemaNotifier.Type) require.Equal(t, "*3MAGWID", threemaNotifier.GatewayID) require.Equal(t, "ECHOECHO", threemaNotifier.RecipientID) require.Equal(t, "1234", threemaNotifier.APISecret) }) t.Run("invalid Threema Gateway IDs should be rejected (prefix)", func(t *testing.T) { json := ` { "gateway_id": "ECHOECHO", "recipient_id": "ECHOECHO", "api_secret": "1234" }` settingsJSON, _ := simplejson.NewJson([]byte(json)) model := &models.AlertNotification{ Name: "threema_testing", Type: "threema", Settings: settingsJSON, } not, err := NewThreemaNotifier(model, ossencryption.ProvideService().GetDecryptedValue) require.Nil(t, not) var valErr alerting.ValidationError require.True(t, errors.As(err, &valErr)) require.Equal(t, "Invalid Threema Gateway ID: Must start with a *", valErr.Reason) }) t.Run("invalid Threema Gateway IDs should be rejected (length)", func(t *testing.T) { json := ` { "gateway_id": "*ECHOECHO", "recipient_id": "ECHOECHO", "api_secret": "1234" }` settingsJSON, _ := simplejson.NewJson([]byte(json)) model := &models.AlertNotification{ Name: "threema_testing", Type: "threema", Settings: settingsJSON, } not, err := NewThreemaNotifier(model, ossencryption.ProvideService().GetDecryptedValue) require.Nil(t, not) var valErr alerting.ValidationError require.True(t, errors.As(err, &valErr)) require.Equal(t, "Invalid Threema Gateway ID: Must be 8 characters long", valErr.Reason) }) t.Run("invalid Threema Recipient IDs should be rejected (length)", func(t *testing.T) { json := ` { "gateway_id": "*3MAGWID", "recipient_id": "ECHOECH", "api_secret": "1234" }` settingsJSON, _ := simplejson.NewJson([]byte(json)) model := &models.AlertNotification{ Name: "threema_testing", Type: "threema", Settings: settingsJSON, } not, err := NewThreemaNotifier(model, ossencryption.ProvideService().GetDecryptedValue) require.Nil(t, not) var valErr alerting.ValidationError require.True(t, errors.As(err, &valErr)) require.Equal(t, "Invalid Threema Recipient ID: Must be 8 characters long", valErr.Reason) }) }) }
txl0591/grpc
test/core/transport/chttp2/stream_map_test.c
/* * * Copyright 2015 gRPC authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ #include "src/core/ext/transport/chttp2/transport/stream_map.h" #include <grpc/support/log.h> #include "test/core/util/test_config.h" #define LOG_TEST(x) gpr_log(GPR_INFO, "%s", x) /* test creation & destruction */ static void test_no_op(void) { grpc_chttp2_stream_map map; LOG_TEST("test_no_op"); grpc_chttp2_stream_map_init(&map, 8); grpc_chttp2_stream_map_destroy(&map); } /* test lookup on an empty map */ static void test_empty_find(void) { grpc_chttp2_stream_map map; LOG_TEST("test_empty_find"); grpc_chttp2_stream_map_init(&map, 8); GPR_ASSERT(NULL == grpc_chttp2_stream_map_find(&map, 39128)); grpc_chttp2_stream_map_destroy(&map); } /* test it's safe to delete twice */ static void test_double_deletion(void) { grpc_chttp2_stream_map map; LOG_TEST("test_double_deletion"); grpc_chttp2_stream_map_init(&map, 8); GPR_ASSERT(0 == grpc_chttp2_stream_map_size(&map)); grpc_chttp2_stream_map_add(&map, 1, (void *)1); GPR_ASSERT((void *)1 == grpc_chttp2_stream_map_find(&map, 1)); GPR_ASSERT(1 == grpc_chttp2_stream_map_size(&map)); GPR_ASSERT((void *)1 == grpc_chttp2_stream_map_delete(&map, 1)); GPR_ASSERT(0 == grpc_chttp2_stream_map_size(&map)); GPR_ASSERT(NULL == grpc_chttp2_stream_map_find(&map, 1)); GPR_ASSERT(NULL == grpc_chttp2_stream_map_delete(&map, 1)); GPR_ASSERT(NULL == grpc_chttp2_stream_map_find(&map, 1)); GPR_ASSERT(NULL == grpc_chttp2_stream_map_delete(&map, 1)); GPR_ASSERT(NULL == grpc_chttp2_stream_map_find(&map, 1)); GPR_ASSERT(NULL == grpc_chttp2_stream_map_delete(&map, 1)); GPR_ASSERT(NULL == grpc_chttp2_stream_map_find(&map, 1)); grpc_chttp2_stream_map_destroy(&map); } /* test add & lookup */ static void test_basic_add_find(uint32_t n) { grpc_chttp2_stream_map map; uint32_t i; size_t got; LOG_TEST("test_basic_add_find"); gpr_log(GPR_INFO, "n = %d", n); grpc_chttp2_stream_map_init(&map, 8); GPR_ASSERT(0 == grpc_chttp2_stream_map_size(&map)); for (i = 1; i <= n; i++) { grpc_chttp2_stream_map_add(&map, i, (void *)(uintptr_t)i); } GPR_ASSERT(n == grpc_chttp2_stream_map_size(&map)); GPR_ASSERT(NULL == grpc_chttp2_stream_map_find(&map, 0)); GPR_ASSERT(NULL == grpc_chttp2_stream_map_find(&map, n + 1)); for (i = 1; i <= n; i++) { got = (uintptr_t)grpc_chttp2_stream_map_find(&map, i); GPR_ASSERT(i == got); } grpc_chttp2_stream_map_destroy(&map); } /* verify that for_each gets the right values during test_delete_evens_XXX */ static void verify_for_each(void *user_data, uint32_t stream_id, void *ptr) { uint32_t *for_each_check = user_data; GPR_ASSERT(ptr); GPR_ASSERT(*for_each_check == stream_id); *for_each_check += 2; } static void check_delete_evens(grpc_chttp2_stream_map *map, uint32_t n) { uint32_t for_each_check = 1; uint32_t i; size_t got; GPR_ASSERT(NULL == grpc_chttp2_stream_map_find(map, 0)); GPR_ASSERT(NULL == grpc_chttp2_stream_map_find(map, n + 1)); for (i = 1; i <= n; i++) { if (i & 1) { got = (uintptr_t)grpc_chttp2_stream_map_find(map, i); GPR_ASSERT(i == got); } else { GPR_ASSERT(NULL == grpc_chttp2_stream_map_find(map, i)); } } grpc_chttp2_stream_map_for_each(map, verify_for_each, &for_each_check); if (n & 1) { GPR_ASSERT(for_each_check == n + 2); } else { GPR_ASSERT(for_each_check == n + 1); } } /* add a bunch of keys, delete the even ones, and make sure the map is consistent */ static void test_delete_evens_sweep(uint32_t n) { grpc_chttp2_stream_map map; uint32_t i; LOG_TEST("test_delete_evens_sweep"); gpr_log(GPR_INFO, "n = %d", n); grpc_chttp2_stream_map_init(&map, 8); for (i = 1; i <= n; i++) { grpc_chttp2_stream_map_add(&map, i, (void *)(uintptr_t)i); } for (i = 1; i <= n; i++) { if ((i & 1) == 0) { GPR_ASSERT((void *)(uintptr_t)i == grpc_chttp2_stream_map_delete(&map, i)); } } check_delete_evens(&map, n); grpc_chttp2_stream_map_destroy(&map); } /* add a bunch of keys, delete the even ones immediately, and make sure the map is consistent */ static void test_delete_evens_incremental(uint32_t n) { grpc_chttp2_stream_map map; uint32_t i; LOG_TEST("test_delete_evens_incremental"); gpr_log(GPR_INFO, "n = %d", n); grpc_chttp2_stream_map_init(&map, 8); for (i = 1; i <= n; i++) { grpc_chttp2_stream_map_add(&map, i, (void *)(uintptr_t)i); if ((i & 1) == 0) { grpc_chttp2_stream_map_delete(&map, i); } } check_delete_evens(&map, n); grpc_chttp2_stream_map_destroy(&map); } /* add a bunch of keys, delete old ones after some time, ensure the backing array does not grow */ static void test_periodic_compaction(uint32_t n) { grpc_chttp2_stream_map map; uint32_t i; uint32_t del; LOG_TEST("test_periodic_compaction"); gpr_log(GPR_INFO, "n = %d", n); grpc_chttp2_stream_map_init(&map, 16); GPR_ASSERT(map.capacity == 16); for (i = 1; i <= n; i++) { grpc_chttp2_stream_map_add(&map, i, (void *)(uintptr_t)i); if (i > 8) { del = i - 8; GPR_ASSERT((void *)(uintptr_t)del == grpc_chttp2_stream_map_delete(&map, del)); } } GPR_ASSERT(map.capacity == 16); grpc_chttp2_stream_map_destroy(&map); } int main(int argc, char **argv) { uint32_t n = 1; uint32_t prev = 1; uint32_t tmp; grpc_test_init(argc, argv); test_no_op(); test_empty_find(); test_double_deletion(); while (n < 100000) { test_basic_add_find(n); test_delete_evens_sweep(n); test_delete_evens_incremental(n); test_periodic_compaction(n); tmp = n; n += prev; prev = tmp; } return 0; }
IBMStreams/OSStreams
src/java/platform/com.ibm.streams.platform/src/main/java/com/ibm/streams/instance/sam/model/topology/TopologyApplication.java
<reponame>IBMStreams/OSStreams /* * Copyright 2021 IBM Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.ibm.streams.instance.sam.model.topology; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.ibm.streams.instance.sam.model.ValidatorHelper.checkContentsNotNullAndValidate; import static com.ibm.streams.instance.sam.model.ValidatorHelper.checkContentsPositionIndex; import static com.ibm.streams.instance.sam.model.ValidatorHelper.validateIfPresent; import com.google.common.collect.ImmutableSortedMap; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Sets; import com.ibm.streams.admin.internal.api.StreamsException; import com.ibm.streams.admin.internal.api.trace.Trace; import com.ibm.streams.admin.internal.api.trace.TraceLogger.Level; import com.ibm.streams.instance.sam.SAMStaticContext; import com.ibm.streams.instance.sam.model.ModelDumper; import com.ibm.streams.instance.sam.model.Validator; import com.ibm.streams.instance.sam.model.logical.spl.SubmissionTimeValue; import com.ibm.streams.instance.sam.persistence.IDotGraph; import com.ibm.streams.messages.StreamsRuntimeMessagesKey; import com.ibm.streams.sch.composer.Composer_helper; import java.math.BigInteger; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.IdentityHashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.SortedMap; import java.util.SortedSet; import java.util.TreeMap; import java.util.TreeSet; import java.util.stream.Collectors; public class TopologyApplication extends TopologyObject implements IDotGraph, Validator { private final BigInteger id; private final String name; private final String applicationScope; private final String version; private final String dataDirectory; private final String applicationDirectory; private final String outputDirectory; private final String checkpointDirectory; private final String adlPath; private final String buildId; private final String bundlePath; private final String productVersion; private final ToolkitMap toolkitMap; private RuntimeConstant display; private SortedMap<BigInteger, CustomMetric> customMetrics; private SortedMap<BigInteger, Library> libraries; private SortedMap<BigInteger, Hostpool> hostpools; private SortedMap<BigInteger, ParallelRegion> parallelRegions; private SortedMap<String, BigInteger> parallelRegionNames; // value of this map is key in parallelRegions map private SortedMap<BigInteger, TopologyTupleTypeType> tupleTypes; private SortedSet<BigInteger> nodeIndexes; private transient SortedSet<TopologyNode> nodes; private Map<BigInteger, SubmissionTimeValue> submissionTimeValues; private final YieldBehaviour yieldBehaviour; public TopologyApplication( String name, String applicationScope, String version, String dataDirectory, String applicationDirectory, String outputDirectory, String checkpointDirectory, String adlPath, String buildId, String bundlePath, ToolkitMap toolkitMap, String productVersion, YieldBehaviour yieldBehaviour) { this.tracker = SAMStaticContext.getTopologyTracker(); this.name = name; this.applicationScope = applicationScope; this.version = version; this.dataDirectory = dataDirectory; this.applicationDirectory = applicationDirectory; this.outputDirectory = outputDirectory; this.checkpointDirectory = checkpointDirectory; this.adlPath = adlPath; this.buildId = buildId; this.bundlePath = bundlePath; this.productVersion = productVersion; this.toolkitMap = toolkitMap; this.yieldBehaviour = yieldBehaviour; customMetrics = new TreeMap<>(); libraries = new TreeMap<>(); hostpools = new TreeMap<>(); parallelRegions = new TreeMap<>(); tupleTypes = new TreeMap<>(); nodeIndexes = new TreeSet<>(); nodes = new TreeSet<>(); submissionTimeValues = new HashMap<>(); // for oleta, topology application id = fused application id = job id this.id = SAMStaticContext.getIDGeneratorFactory().newTopologyApplicationId(); build(); } @Override public void build() { tracker.registerTopologyApplication(id, this); } public String getProductVersion() { return productVersion; } // product version without "." public String getBundleVersion() { return productVersion.replaceAll("\\.", ""); } public ToolkitMap getToolkitMap() { return toolkitMap; } public BigInteger getId() { return id; } public String getName() { return name; } public String getApplicationScope() { return applicationScope; } public String getVersion() { return version; } public String getDataDirectory() { return dataDirectory; } public String getApplicationDirectory() { return applicationDirectory; } public String getOutputDirectory() { return outputDirectory; } public String getCheckpointDirectory() { return checkpointDirectory; } public String getAdlPath() { return adlPath; } public ImmutableSortedMap<BigInteger, CustomMetric> getCustomMetrics() { return ImmutableSortedMap.copyOf(customMetrics); } public void setCustomMetrics(SortedMap<BigInteger, CustomMetric> customMetrics) { this.customMetrics = customMetrics; } public ImmutableSortedMap<BigInteger, Library> getLibraries() { return ImmutableSortedMap.copyOf(libraries); } public void setLibraries(SortedMap<BigInteger, Library> libraries) { this.libraries = libraries; } public ImmutableSortedMap<BigInteger, Hostpool> getHostpools() { return ImmutableSortedMap.copyOf(hostpools); } public SortedMap<BigInteger, Hostpool> getHostpools_forUpdate() { return (hostpools); } public void setHostpools(SortedMap<BigInteger, Hostpool> hostpools) { this.hostpools = hostpools; } public ImmutableSortedMap<BigInteger, ParallelRegion> getParallelRegions() { return ImmutableSortedMap.copyOf(parallelRegions); } public void setParallelRegions(SortedMap<BigInteger, ParallelRegion> parallelRegions) { this.parallelRegions = parallelRegions; } public void setParallelRegionNames(SortedMap<String, BigInteger> parallelRegionNames) { this.parallelRegionNames = parallelRegionNames; } public ImmutableSortedMap<BigInteger, TopologyTupleTypeType> getTupleTypes() { return ImmutableSortedMap.copyOf(tupleTypes); } public void setTupleTypes(SortedMap<BigInteger, TopologyTupleTypeType> tupleTypes) { this.tupleTypes = tupleTypes; } public ImmutableSortedSet<BigInteger> getNodeIndexes() { return ImmutableSortedSet.copyOf(nodeIndexes); } public void setNodes(List<TopologyNode> nodes) { this.nodes.clear(); this.nodes.addAll(nodes); for (TopologyNode node : nodes) { nodeIndexes.add(node.getIndex()); } if (Trace.isEnabled(Level.TRACE)) { Trace.logTrace( "TopologyApplication.setNodes() nodeIndexes for id=" + id + ", tracker=" + tracker + ": " + nodeIndexes); } } private void removeNodes(SortedSet<TopologyNode> nodes) { this.nodes.removeAll(nodes); for (TopologyNode node : nodes) { nodeIndexes.remove(node.getIndex()); node.deepUnbuild(); } } public RuntimeConstant getDisplay() { return display; } public void setDisplay(RuntimeConstant display) { this.display = display; } public Map<BigInteger, SubmissionTimeValue> getSubmissionTimeValues() { return submissionTimeValues; } public void setSubmissionTimeValues(Map<BigInteger, SubmissionTimeValue> values) { this.submissionTimeValues = values; } public String getBuildId() { return buildId; } public String getBundlePath() { return bundlePath; } // for recovery public void populateTransientFields() { this.tracker = SAMStaticContext.getTopologyTracker(); nodes = null; getNodes(); } // refresh operator cache, retrieve operators from registry private void refreshNodes() { nodes = new TreeSet<>(); if (Trace.isEnabled(Level.TRACE)) { Trace.logTrace( "TopologyApplication.refreshNodes() nodeIndexes for id=" + id + ", tracker=" + tracker + ": " + nodeIndexes); } for (BigInteger index : nodeIndexes) { TopologyNode tn = tracker.getTopologyNode(id, index); if (tn == null) { throw new IllegalStateException("TopologyNode id=" + id + " index=" + index + " is null"); } nodes.add(tn); } } public SortedSet<TopologyNode> getNodes() { if (nodes == null || nodes.size() != nodeIndexes.size()) { refreshNodes(); } return nodes; } public String toDotGraph() { // Initialization Stuff String graph = "digraph " + name + " {\n"; graph += "\tnode [shape=record];"; // Nodes for (BigInteger index : nodeIndexes) graph += tracker.getTopologyNode(id, index).toDotGraph() + "\n"; graph += "\n}"; return graph; } @Override public String toString() { ModelDumper dumper = new ModelDumper(); dumper.startLine("<topologyModel"); dumper.append(" name=\"" + name); dumper.append("\" applicationScope=\"" + applicationScope); dumper.append("\" dataDirectory=\"" + dataDirectory); dumper.append("\" adlPath=\"" + adlPath); dumper.append("\" checkpointDirectory=\"" + checkpointDirectory); dumper.append("\" outputDirectory=\"" + outputDirectory); dumper.append("\" version=\"" + version); dumper.append("\" productVersion=\"" + productVersion); dumper.append("\">").end(); dumper.indent(); dumper.line("<libraries>"); dumper.indent(); dumper.append(libraries); dumper.outdent(); dumper.line("</libraries>"); dumper.line("<hostpools>"); dumper.indent(); dumper.append(hostpools); dumper.outdent(); dumper.line("</hostpools>"); dumper.line("<parallelRegions>"); dumper.indent(); dumper.append(parallelRegions); dumper.outdent(); dumper.append("</parallelRegions>"); dumper.line("<tupleTypes>"); dumper.indent(); dumper.append(tupleTypes); dumper.outdent(); dumper.line("</tupleTypes>"); dumper.line("<nodes>"); dumper.indent(); dumper.append(getNodes()); dumper.outdent(); dumper.line("</nodes>"); dumper.line("<customMetrics>"); dumper.indent(); dumper.append(customMetrics); dumper.outdent(); dumper.line("</customMetrics>"); dumper.outdent(); dumper.line("</topologyModel>"); return dumper.toString(); } public void validate() throws Exception { checkNotNull(name); checkNotNull(applicationScope); // Version can be null // Data Directory can be null checkNotNull(applicationDirectory); checkNotNull(outputDirectory); // Checkpoint Directory can be null // ADL Path can be null validateIfPresent(display); checkContentsPositionIndex(customMetrics.keySet()); checkContentsNotNullAndValidate(customMetrics.values()); checkContentsPositionIndex(libraries.keySet()); checkContentsNotNullAndValidate(libraries.values()); checkContentsPositionIndex(hostpools.keySet()); checkContentsNotNullAndValidate(hostpools.values()); checkContentsPositionIndex(tupleTypes.keySet()); // TODO: Re-enable when tuple things are sorted out // checkContentsNotNullAndValidate(tupleTypes.values()); // We need at least 1 node per application checkArgument(nodeIndexes.size() > 0, "At least 1 Topology Node required per application"); checkContentsNotNullAndValidate(getNodes()); } public YieldBehaviour getYieldBehaviour() { return yieldBehaviour; } // for change detection @Override public int hashCode() { return Objects.hash(id, hostpools, nodeIndexes, parallelRegions); } private void transferOwnership( Set<TopologyNode> nodes, Map<BigInteger, BigInteger> nodeIndexFix, Map<TopologyNodeOutputPort, Map<BigInteger, BigInteger>> splitterIndexFix, Map<BigInteger, BigInteger> hostpoolIndexFix) { // Transfering the ownership of a set of nodes from another TopologyApplication to this // TopologyApplication is tricky. We must do it in the order we do it below. We also must // transfer all connected nodes at the same time. That is, if we have a bunch of nodes // that are connected, and we want to transfer them, we need to collect them all into a // single set and pass that set to transferOwership. Connected nodes will share // TopologyNodeConnection objects, and we must handle them all at once. // // Transfering ownership is tricky because we have to change node indexes, change the // application ID, fixup connections (because of the changed node indexes), unregister // the nodes, ports and connections with the old application's object tracker, and then // re-register the nodes, ports and connections with this application's object tracker. // The tricky part is that application IDs and node indexes are how much of this is // stored in the topology tracker. In principle, we have to do this in four phases: // // 1. Load all connections from the topology tracker. // 2. Unregister all connections from the topology tracker. // 3. Change all connection node indexes and application IDs, while fixing // connection indexes. // 4. Re-register all connections using their new information with the tracker, // while handling all ports and nodes at the same time. // // Because TopologyNodeConnections are shared objects between TopologyNodePorts, we have // to do all of each step at once, which means we must iterate over all nodes four times. // We cannot iterate over the nodes once, and try to do these steps individually for // each node in isolation. // 1. Load all connections from the topology tracker. Note that getOperConnections will // retrieve the TopologyNodeConnections from the tracker if they are not already stored // locally, which is why we call it without using its result. for (TopologyNode node : nodes) { for (TopologyNodeInputPort iport : node.getInputPorts().values()) { iport.getOperConnections(); } for (TopologyNodeOutputPort oport : node.getOutputPorts().values()) { oport.getOperConnections(); } } // 2. Unregister all connections from the topology tracker. These are shared among // TopologyNodePorts, so we have to do all at once. It is safe to do this now // because we can be certain we've loaded all of them from the tracker. for (TopologyNode node : nodes) { for (TopologyNodeInputPort iport : node.getInputPorts().values()) { for (TopologyNodeConnection tnc : iport.getOperConnections()) { tnc.unbuild(); } } for (TopologyNodeOutputPort oport : node.getOutputPorts().values()) { for (TopologyNodeConnection tnc : oport.getOperConnections()) { tnc.unbuild(); } } } // 3. Change all connection node indexes and application IDs, while fixing // connection indexes. It is safe to do this because we can be certain none // are registered with the tracker. // Tracks which ConnectionIds have already been fixed - since they are shared // by TopologyNodeConnections and TopologyNode objects, we run into the danger of // double-fixing a ConnectionId when we encounter it through a different object. IdentityHashMap<TopologyNodeConnectionId, Boolean> fixed = new IdentityHashMap<>(); for (TopologyNode node : nodes) { for (TopologyNodeInputPort iport : node.getInputPorts().values()) { iport.fixConnections(id, nodeIndexFix, fixed); } for (TopologyNodeOutputPort oport : node.getOutputPorts().values()) { oport.fixConnections(id, nodeIndexFix, fixed); } } // 4. Re-register all connections using their new information with the tracker, // while handling all ports and nodes at the same time. for (TopologyNode node : nodes) { BigInteger nodeIndex = node.getIndex(); if (nodeIndexFix.containsKey(nodeIndex)) { nodeIndex = nodeIndexFix.get(nodeIndex); } for (TopologyNodeInputPort iport : node.getInputPorts().values()) { iport.transferOwnership(id, nodeIndex); for (TopologyNodeConnection tnc : iport.getOperConnections()) { tnc.build(); } } for (TopologyNodeOutputPort oport : node.getOutputPorts().values()) { oport.transferOwnership(id, nodeIndex, splitterIndexFix.get(oport)); for (TopologyNodeConnection tnc : oport.getOperConnections()) { if (splitterIndexFix.get(oport) != null) { if (splitterIndexFix.get(oport).containsKey(tnc.getSplitterIndex())) { tnc.setSplitterIndex(splitterIndexFix.get(oport).get(tnc.getSplitterIndex())); } } tnc.build(); } } // Note that we can only do this AFTER building our connections and transferring // our ports. If we try to do it before, we will not be properly connected // with our ports. node.transferOwnership(id, nodeIndex, hostpoolIndexFix); this.nodeIndexes.add(node.getIndex()); } this.nodes.addAll(nodes); } private static void checkString(String a, String b, String msg) throws StreamsException { if (a == null && b == null) { return; } if (a != null) { if (b != null) { if (!a.equals(b)) { throw new StreamsException( "TopologyApplications have different " + msg + "; " + a + ", " + b); } } else { throw new StreamsException( "TopologyApplications have different " + msg + "; " + a + ", null"); } } else { throw new StreamsException("TopologyApplications have different " + msg + "; null, " + b); } } public BigInteger getParallelRegionWidth(String regionName) { return parallelRegions.get(parallelRegionNames.get(regionName)).getLocalMaxChannels(); } public Map<String, BigInteger> getParallelRegionWidths() { Map<String, BigInteger> nameToWidth = new TreeMap<>(); for (ParallelRegion pr : parallelRegions.values()) { nameToWidth.put(pr.getName(), pr.getLocalMaxChannels()); } return nameToWidth; } private static boolean localChannelEquals(List<ParallelRegion> a, List<ParallelRegion> b) { if (a == null && b == null) { return true; } if (a == null || b == null) { return false; } Iterator<ParallelRegion> aIt = a.iterator(); Iterator<ParallelRegion> bIt = b.iterator(); while (aIt.hasNext() && bIt.hasNext()) { if (!aIt.next().getLocalChannelIndex().equals(bIt.next().getLocalChannelIndex())) { return false; } } return true; } private static TopologyNode findLogicalNode(TopologyNode node, Set<TopologyNode> otherNodes) { for (TopologyNode otherNode : otherNodes) { if (node.getLogicalIndex().equals(otherNode.getLogicalIndex()) && localChannelEquals(node.getParallelRegions(), otherNode.getParallelRegions())) { return otherNode; } } return null; } public String resolveParallelRegionName(String pattern) throws StreamsException { if (Trace.isEnabled(Level.TRACE)) { Trace.logTrace( "ENTER TopologyApplication.resolveParallelRegionName with pattern=" + pattern + ", parallelRegionNames=" + parallelRegionNames); } List<String> matches = new ArrayList<>(); for (String name : parallelRegionNames.keySet()) { if (name.matches(Composer_helper.prepareConditionString(pattern))) { matches.add(name); } } if (matches.size() == 0) { throw new StreamsException( StreamsRuntimeMessagesKey.Key.SAMNoMatchingParallelRegion, pattern, getId()); } if (matches.size() > 1) { throw new StreamsException( StreamsRuntimeMessagesKey.Key.SAMConfigOverlayMultipleParallelRegionsSpecified, pattern, matches.toString(), getId()); } return matches.get(0); } public void transferParallelRegionDifference( TopologyApplication updated, String regionPattern, SortedSet<TopologyNode> changedNodes, SortedSet<TopologyNode> addedNodes, SortedSet<TopologyNode> removedNodes) throws StreamsException { // We only allow diffs of applications that differ in nodes only - they must derive from the // same ADL, // which means their names, toolkits, product version, etc. must be the same. In theory, we're // leaving // out a bunch of fields that should be equal, but I think it's okay to just check the strings, // which // are easy to check, and the most important ones are names and versions. checkString(this.getProductVersion(), updated.getProductVersion(), "ProductVersion"); checkString(this.getName(), updated.getName(), "Name"); checkString(this.getApplicationScope(), updated.getApplicationScope(), "ApplicationScope"); checkString(this.getVersion(), updated.getVersion(), "Version"); checkString( this.getApplicationDirectory(), updated.getApplicationDirectory(), "ApplicationDirectory"); checkString(this.getOutputDirectory(), updated.getOutputDirectory(), "OutputDirectory"); checkString( this.getCheckpointDirectory(), updated.getCheckpointDirectory(), "CheckpointDirectory"); checkString(this.getAdlPath(), updated.getAdlPath(), "AdlPath"); checkString(this.getBuildId(), updated.getBuildId(), "BuildId"); checkString(this.getBundlePath(), updated.getBundlePath(), "BundlePath"); SortedSet<TopologyNode> originalNodes = this.getNodes(); SortedSet<TopologyNode> updatedNodes = updated.getNodes(); // should never happen as TopologyApplications always have one node, but // it still makes logical sense for a diff if (originalNodes.isEmpty() && updatedNodes.isEmpty()) { return; } String regionName = resolveParallelRegionName(regionPattern); BigInteger originalWidth = this.getParallelRegionWidth(regionName); BigInteger updatedWidth = updated.getParallelRegionWidth(regionName); if (originalWidth.equals(updatedWidth)) { return; } // We accomplish the diff of the two TopologyApplications and the transfer in four phases: // // 1. Determine which nodes were added, deleted or changed in all affected parallel regions. // 2. Determine which nodes have splitters or mergers that feed affected parallel regions. // 3. Figure out all splitter indexes that have changed in connections between ports, // assigning new // indexes as needed. // 4. Figure out all of the hostpool indexes that have changed, assigning new indexes are // needed. // 5. Apply all changes to this TopologyApplication. // updated node index -> fixed node index (original or new) Map<BigInteger, BigInteger> nodeIndexFix = new TreeMap<>(); // while we're only changing one parallel region, if it's part of a parallel region nesting, // then we // need to keep track of all affected regions Map<String, ParallelRegion> affectedParallelRegions = new HashMap<>(); // keeps track of the nodes in the original application which are the logical // equivalent of the "changed" nodes; these nodes to be replaced with the changed set. SortedSet<TopologyNode> toReplace = new TreeSet<>(); // 1. Determine which nodes were added, deleted or changed in all affected parallel regions. // During // this phase, we also have to figure out if the nodes from the updated TopologyApplication // have // indexes that conflict with this one. If so, we need to track the conflicting indexes, // and // come up with new indexes to assign to them later. if (originalWidth.compareTo(updatedWidth) < 0) { // we're adding nodes // We will use nextIndex to assign new, "corrected" indexes to the added nodes. We need to // make sure it does // not clash with either the original or updated nodes. int nextIndex = Math.max( originalNodes.last().getIndex().intValue(), updatedNodes.last().getIndex().intValue()) + 1; for (TopologyNode node : updatedNodes) { // Note that at this outer-level, we have to check for parallel regions in general; it is // not correct to // just ask if the node is in the parallel region we care about. That's because we need to // do index fixups // on parallel nodes outside of the target parallel region. if (node.getParallelRegions() != null) { TopologyNode originalNode = findLogicalNode(node, originalNodes); if (originalNode == null) { SortedSet<TopologyNode> greaterThanOrEqual = originalNodes.tailSet(node); if (greaterThanOrEqual.size() > 0 && greaterThanOrEqual.first().getIndex().equals(node.getIndex())) { nodeIndexFix.put(node.getIndex(), BigInteger.valueOf(nextIndex)); ++nextIndex; } addedNodes.add(node); affectedParallelRegions.putAll( node.getParallelRegions().stream() .collect(Collectors.toMap(e -> e.getName(), e -> e))); } else { if (!originalNode.getIndex().equals(node.getIndex())) { nodeIndexFix.put(node.getIndex(), originalNode.getIndex()); } // We need to replace this node if: // 1. Its in the targetted parallel region. This means, at least, that // we need to change its maxChannels number. // 2. Its name does not match with its logical equivalent. If that's // the case, then we need to change its name, but also we need to // change at least one of its global channel numbers. if (node.isInParallelRegion(regionName) || !node.getName().equals(originalNode.getName())) { toReplace.add(originalNode); changedNodes.add(node); } } } } } else { // we're removing nodes for (TopologyNode node : originalNodes) { if (node.getParallelRegions() != null) { TopologyNode updatedNode = findLogicalNode(node, updatedNodes); if (updatedNode == null) { removedNodes.add(node); affectedParallelRegions.putAll( node.getParallelRegions().stream() .collect(Collectors.toMap(e -> e.getName(), e -> e))); } else { if (!updatedNode.getIndex().equals(node.getIndex())) { nodeIndexFix.put(updatedNode.getIndex(), node.getIndex()); } if (updatedNode.isInParallelRegion(regionName) || !updatedNode.getName().equals(node.getName())) { toReplace.add(node); changedNodes.add(updatedNode); } } } } } // 2. Determine which nodes have splitters or mergers that feed affected parallel regions. // These // nodes are changed, and need to also be replaced. for (TopologyNode node : updatedNodes) { if (node.isInParallelRegion(regionName)) { for (TopologyNodeOutputPort oport : node.getOutputPorts().values()) { for (TopologyNodeConnection conn : oport.getOperConnections()) { // the current node IS in the parallel region, so if the node its output port connects // to // IS NOT in the parallel region, then it's not in any of our sets yet and we need to // add // it; these are the merge points TopologyNode dest = conn.getDestination().getOwningOperator(); if (!dest.isInParallelRegion(regionName)) { changedNodes.add(dest); toReplace.add(findLogicalNode(dest, originalNodes)); } } } for (TopologyNodeInputPort iport : node.getInputPorts().values()) { for (TopologyNodeConnection conn : iport.getOperConnections()) { // the current node IS in the parallel region, so if the node its input port connects to // IS NOT in the parallel region, then it's not in any of our sets yet and we need to // add // it; these are splitters TopologyNode src = conn.getSource().getOwningOperator(); if (!src.isInParallelRegion(regionName)) { changedNodes.add(src); toReplace.add(findLogicalNode(src, originalNodes)); } } } } } // {updated output port -> {updated splitter index -> fixed splitter index (original or new)} } Map<TopologyNodeOutputPort, Map<BigInteger, BigInteger>> splitterIndexFix = new TreeMap<>(); // 3. Figure out all splitter indexes that have changed in connections between ports, // assigning new // indexes as needed. This only happens in the nested UDP case. In nested UDP, it is // possible // for the total number of splitters on an node's output port to change. When that happens, // the // connections between the original and updated nodes will disagree on splitter indexes. // Similar // to the node index fixup, we need to track these to apply later. // // Note that we have to do this AFTER phase 2 because we need the full nodeIndexFix map. We // need to // compare connections paying attenion to the node indexes that updated connections will // have. for (TopologyNode node : updatedNodes) { for (TopologyNodeOutputPort oport : node.getOutputPorts().values()) { TopologyNode originalNode = null; if (oport.getSplitters() != null && (originalNode = findLogicalNode(node, originalNodes)) != null) { TopologyNodeOutputPort originalOPort = originalNode.getOutputPorts().get(oport.getIndex()); for (TopologyNodeConnection tnc : oport.getOperConnections()) { for (TopologyNodeConnection originalTnc : originalOPort.getOperConnections()) { if (tnc.getSplitterIndex() != null && !tnc.getSplitterIndex().equals(originalTnc.getSplitterIndex()) && tnc.getId().equalsWithNodeIndexFix(originalTnc.getId(), nodeIndexFix)) { if (splitterIndexFix.get(oport) == null) { splitterIndexFix.put(oport, new TreeMap<>()); } splitterIndexFix .get(oport) .put(tnc.getSplitterIndex(), originalTnc.getSplitterIndex()); } } } if (splitterIndexFix.get(oport) != null) { long nextIndex = Math.max( oport.maxSplitterIndex().longValue(), Collections.max(splitterIndexFix.get(oport).values()).longValue()) + 1; Map<BigInteger, Splitter> updatedSplitters = oport.getAllSplitters(); Map<BigInteger, BigInteger> moreFixes = new TreeMap<>(); for (BigInteger value : splitterIndexFix.get(oport).values()) { if (updatedSplitters.containsKey(value) && !splitterIndexFix.get(oport).containsKey(value)) { moreFixes.put(value, BigInteger.valueOf(nextIndex)); ++nextIndex; } } splitterIndexFix.get(oport).putAll(moreFixes); } } } } // updated hostpool index -> fixed hostpool index (original or new) Map<BigInteger, BigInteger> hostpoolIndexFix = new TreeMap<>(); // 4. Figure out all hostpool indexes that have changed, assigning new indexes as needed. This // only happens in // when the replicatedHostTags option is used in multiple parallel regions. int nextIndex = Math.max(this.hostpools.lastKey().intValue(), updated.hostpools.lastKey().intValue()) + 1; for (Hostpool originalHp : this.hostpools.values()) { for (Hostpool updatedHp : updated.hostpools.values()) { // There are two situations where we might encounter hostpool collisions: in the index, and // in the name. When // we encounter a name collision, we MUST map the updated hostpool to the original one. // That's an identity // issue; // the updated hostpool IS the original hostpool. When the indexes collide, they are // different hostpools that // just // have an index collision, so we must create a new index for the updated hostpool. But we // have to be careful // when // we do this, as we need to make sure that we remap all name collisions, and that if we // already mapped an index // collision, we don't miss a name collision. Hence, we check the existing fixup map before // doing an index // remap, // but do the name remap whenever we see it. if (!originalHp.getIndex().equals(updatedHp.getIndex()) && originalHp.getName().equals(updatedHp.getName())) { hostpoolIndexFix.put(updatedHp.getIndex(), originalHp.getIndex()); } else if (!hostpoolIndexFix.containsKey(updatedHp.getIndex()) && originalHp.getIndex().equals(updatedHp.getIndex()) && !originalHp.getName().equals(updatedHp.getName())) { hostpoolIndexFix.put(updatedHp.getIndex(), BigInteger.valueOf(nextIndex)); ++nextIndex; } } } // 5. Apply all changes to this TopologyApplication: remove deleted and original changed // nodes, add // new and updated changed nodes, transfer ownership of nodes to this TopologyApplication, // apply // all of the node, splitter and hostpool index fixups, add or remove hostpools, and update // the // parallel region sizes. removeNodes(toReplace); removeNodes(removedNodes); transferOwnership( Sets.union(addedNodes, changedNodes), nodeIndexFix, splitterIndexFix, hostpoolIndexFix); for (ParallelRegion pr : affectedParallelRegions.values()) { parallelRegions .get(pr.getRegionIndex()) .setLocalMaxChannels( updated.parallelRegions.get(pr.getRegionIndex()).getLocalMaxChannels()); parallelRegions .get(pr.getRegionIndex()) .setGlobalMaxChannels( updated.parallelRegions.get(pr.getRegionIndex()).getGlobalMaxChannels()); } // if we're decreasing channels, we may need to remove hostpools - but we need to go through our // index fixup this.hostpools .entrySet() .removeIf( entry -> !updated.hostpools.containsKey( hostpoolIndexFix.getOrDefault(entry.getKey(), entry.getKey()))); // if we're adding channels, we may need to add hostpools - but we need to apply the index // fixups first for (Hostpool updatedHp : updated.hostpools.values()) { BigInteger updatedIndex = updatedHp.getIndex(); if (hostpoolIndexFix.containsKey(updatedIndex)) { updatedIndex = hostpoolIndexFix.get(updatedIndex); } updatedHp.setIndex(updatedIndex); this.hostpools.putIfAbsent(updatedIndex, updatedHp); } } }
Kohze/SpatialMapDev
node_modules/office-ui-fabric-react/lib/components/Tooltip/Tooltip.scss.js
<filename>node_modules/office-ui-fabric-react/lib/components/Tooltip/Tooltip.scss.js "use strict"; /* tslint:disable */ var load_themed_styles_1 = require("@microsoft/load-themed-styles"); var styles = { root: 'root_3e796555', hasMediumDelay: 'hasMediumDelay_3e796555', content: 'content_3e796555', subText: 'subText_3e796555', }; load_themed_styles_1.loadStyles([{ "rawString": ".root_3e796555{max-width:364px;background:" }, { "theme": "white", "defaultValue": "#ffffff" }, { "rawString": ";padding:8px;pointer-events:none}.root_3e796555.hasMediumDelay_3e796555{-webkit-animation-delay:.3s;animation-delay:.3s}.content_3e796555{font-size:12px;color:" }, { "theme": "neutralPrimary", "defaultValue": "#333333" }, { "rawString": ";word-wrap:break-word;overflow-wrap:break-word}.subText_3e796555{margin:0}" }]); module.exports = styles; /* tslint:enable */ //# sourceMappingURL=Tooltip.scss.js.map
Consult2016/super-devops
dopaas-common/src/main/java/com/wl4g/dopaas/common/bean/cmdb/Host.java
/* * Copyright 2017 ~ 2050 the original author or authors <<EMAIL>, <EMAIL>>. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.wl4g.dopaas.common.bean.cmdb; import java.util.List; import com.wl4g.component.core.bean.BaseBean; public class Host extends BaseBean { private static final long serialVersionUID = -7546448616357790576L; private String name; private String hostname; private Long idcId; private Integer status; private List<Long> sshIds; public String getName() { return name; } public void setName(String name) { this.name = name == null ? null : name.trim(); } public String getHostname() { return hostname; } public void setHostname(String hostname) { this.hostname = hostname == null ? null : hostname.trim(); } public Long getIdcId() { return idcId; } public void setIdcId(Long idcId) { this.idcId = idcId; } public Integer getStatus() { return status; } public void setStatus(Integer status) { this.status = status; } public List<Long> getSshIds() { return sshIds; } public void setSshIds(List<Long> sshIds) { this.sshIds = sshIds; } }
webfirmframework/wffweb
wffweb/src/test/java/com/webfirmframework/wffweb/tag/html/WhenURIUseCaseTest.java
<reponame>webfirmframework/wffweb package com.webfirmframework.wffweb.tag.html; import static org.junit.Assert.assertEquals; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import org.junit.Before; import org.junit.Test; import com.webfirmframework.wffweb.server.page.BrowserPage; import com.webfirmframework.wffweb.tag.html.attribute.Name; import com.webfirmframework.wffweb.tag.html.stylesandsemantics.Div; import com.webfirmframework.wffweb.tag.htmlwff.NoTag; public class WhenURIUseCaseTest { private Html html; private Set<AbstractHtml> expectedTagsForURIChange; private Div mainDiv; private String initialUri = "/someuri"; private String uri1 = "/user"; private String uri11 = "/user/dashboard"; private String uri111 = "/user/dashboard/items"; private String uri112 = "/user/dashboard/otheritems"; private BrowserPage browserPage; @Before public void setup() { expectedTagsForURIChange = new HashSet<>(); html = new Html(null); mainDiv = new Div(html); browserPage = new BrowserPage() { @Override public String webSocketUrl() { // TODO Auto-generated method stub return "wss://wffweb"; } @Override public AbstractHtml render() { super.setURI(initialUri); return html; } }; browserPage.toHtmlString(); } @Test public void testUsage1() { StringBuilder controlFlow = new StringBuilder(); AbstractHtml div1 = new Div(null).whenURI((uri) -> uri.startsWith(initialUri), () -> { controlFlow.append("div1.whenURI\n"); return new AbstractHtml[] {new NoTag(null, "somecontent1")}; }).currentAs(); mainDiv.appendChild(div1); assertEquals("div1.whenURI\n", controlFlow.toString()); assertEquals("<div data-wff-id=\"S3\">somecontent1</div>", div1.toBigHtmlString()); browserPage.setURI(uri1); assertEquals("<div data-wff-id=\"S3\"></div>", div1.toBigHtmlString()); assertEquals("div1.whenURI\n", controlFlow.toString()); } @Test public void testUsage2() { StringBuilder controlFlow = new StringBuilder(); AbstractHtml div1 = new Div(null).whenURI(uri -> uri.startsWith(initialUri), (event) -> { controlFlow.append("div1.whenURI\n"); event.sourceTag().addInnerHtmls(new AbstractHtml[] {new NoTag(null, "somecontent1")}); }).currentAs(); mainDiv.appendChild(div1); assertEquals("div1.whenURI\n", controlFlow.toString()); assertEquals("<div data-wff-id=\"S3\">somecontent1</div>", div1.toBigHtmlString()); browserPage.setURI(uri1); assertEquals("div1.whenURI\n", controlFlow.toString()); assertEquals("<div data-wff-id=\"S3\">somecontent1</div>", div1.toBigHtmlString()); } @Test public void testUsage3() { AtomicInteger counter = new AtomicInteger(); StringBuilder controlFlow = new StringBuilder(); AbstractHtml div3 = new Div(null, new Name("div3-" + counter.incrementAndGet())); div3.whenURI(uri -> uri.startsWith(uri111), () -> { controlFlow.append("div3.whenURI1+"); return new AbstractHtml[] {new NoTag(null, "somecontent1 uri111")}; }).currentAs(); div3.whenURI(uri -> uri.startsWith(uri112), () -> { controlFlow.append("div3.whenURI2+"); return new AbstractHtml[] {new NoTag(null, "somecontent2 uri112")}; }).currentAs(); AbstractHtml div2 = new Div(null, new Name("div2-" + counter.incrementAndGet())).whenURI(uri -> uri.startsWith(uri11), () -> { controlFlow.append("div2.whenURI+"); return new AbstractHtml[] {div3}; }).currentAs(); AbstractHtml div1 = new Div(null, new Name("div1-" + counter.incrementAndGet())).whenURI(uri -> uri.startsWith(uri1), () -> { controlFlow.append("div1.whenURI+"); return new AbstractHtml[] {div2}; }).currentAs(); mainDiv.appendChild(div1); String expectedCFForInitialURI = ""; assertEquals(expectedCFForInitialURI, controlFlow.toString()); String expectedForInitialURI = "<div data-wff-id=\"S3\" name=\"div1-3\"></div>"; assertEquals(expectedForInitialURI, div1.toBigHtmlString()); controlFlow.delete(0, controlFlow.length()); browserPage.setURI(uri1); String expectedCFForURI1 = "div1.whenURI+"; assertEquals(expectedCFForURI1, controlFlow.toString()); String expectedForURI1 = "<div data-wff-id=\"S3\" name=\"div1-3\"><div data-wff-id=\"S4\" name=\"div2-2\"></div></div>"; assertEquals(expectedForURI1, div1.toBigHtmlString()); controlFlow.delete(0, controlFlow.length()); browserPage.setURI(uri11); String expectedCFForURI11 = "div1.whenURI+div2.whenURI+"; assertEquals(expectedCFForURI11, controlFlow.toString()); String expectedForURI11 = "<div data-wff-id=\"S3\" name=\"div1-3\"><div data-wff-id=\"S4\" name=\"div2-2\"><div data-wff-id=\"S5\" name=\"div3-1\"></div></div></div>"; assertEquals(expectedForURI11, div1.toBigHtmlString()); controlFlow.delete(0, controlFlow.length()); browserPage.setURI(uri111); String expectedCFForURI111 = "div1.whenURI+div2.whenURI+div3.whenURI1+"; assertEquals(expectedCFForURI111, controlFlow.toString()); String expectedForURI111 = "<div data-wff-id=\"S3\" name=\"div1-3\"><div data-wff-id=\"S4\" name=\"div2-2\"><div data-wff-id=\"S5\" name=\"div3-1\">somecontent1 uri111</div></div></div>"; assertEquals(expectedForURI111, div1.toBigHtmlString()); controlFlow.delete(0, controlFlow.length()); browserPage.setURI(uri112); String expectedCFForURI112 = "div1.whenURI+div2.whenURI+div3.whenURI2+"; assertEquals(expectedCFForURI112, controlFlow.toString()); String expectedForURI112 = "<div data-wff-id=\"S3\" name=\"div1-3\"><div data-wff-id=\"S4\" name=\"div2-2\"><div data-wff-id=\"S5\" name=\"div3-1\">somecontent2 uri112</div></div></div>"; assertEquals(expectedForURI112, div1.toBigHtmlString()); record ResultCombination(String expectedHtml, String expectedControlFlow, String uri) { } List<ResultCombination> resultCombinations = new ArrayList<>(); resultCombinations.add(new ResultCombination(expectedForURI111, expectedCFForURI111, uri111)); resultCombinations.add(new ResultCombination(expectedForURI112, expectedCFForURI112, uri112)); String prevURI = ""; for(int i = 0; i < 250; i++) { for (var each: resultCombinations) { if (!prevURI.equals(each.uri)) { controlFlow.delete(0, controlFlow.length()); } browserPage.setURI(each.uri); prevURI = each.uri; assertEquals(each.expectedControlFlow, controlFlow.toString()); assertEquals(each.expectedHtml, div1.toBigHtmlString()); } Collections.shuffle(resultCombinations); } resultCombinations.add(new ResultCombination(expectedForInitialURI, expectedCFForInitialURI, initialUri)); resultCombinations.add(new ResultCombination(expectedForURI1, expectedCFForURI1, uri1)); resultCombinations.add(new ResultCombination(expectedForURI11, expectedCFForURI11, uri11)); for(int i = 0; i < 250; i++) { for (var each: resultCombinations) { if (!prevURI.equals(each.uri)) { controlFlow.delete(0, controlFlow.length()); } browserPage.setURI(each.uri); prevURI = each.uri; assertEquals(each.expectedControlFlow, controlFlow.toString()); } } } }
Andreas237/AndroidPolicyAutomation
ExtractedJars/Ibotta_com.ibotta.android/javafiles/com/google/android/gms/common/internal/ClientSettings$OptionalApiSettings.java
// Decompiled by Jad v1.5.8g. Copyright 2001 <NAME>. // Jad home page: http://www.kpdus.com/jad.html // Decompiler options: packimports(3) annotate safe package com.google.android.gms.common.internal; import java.util.Collections; import java.util.Set; // Referenced classes of package com.google.android.gms.common.internal: // ClientSettings, Preconditions public static final class ClientSettings$OptionalApiSettings { public final Set mScopes; public ClientSettings$OptionalApiSettings(Set set) { // 0 0:aload_0 // 1 1:invokespecial #15 <Method void Object()> Preconditions.checkNotNull(((Object) (set))); // 2 4:aload_1 // 3 5:invokestatic #21 <Method Object Preconditions.checkNotNull(Object)> // 4 8:pop mScopes = Collections.unmodifiableSet(set); // 5 9:aload_0 // 6 10:aload_1 // 7 11:invokestatic #27 <Method Set Collections.unmodifiableSet(Set)> // 8 14:putfield #29 <Field Set mScopes> // 9 17:return } }
tanzle-aames/a-simple-triangle
part-17-vulkan-android/main/src/core/graphics-wrapper.hpp
#pragma once #if defined(__EMSCRIPTEN__) #include <GLES2/gl2.h> #define USING_GLES #elif __APPLE__ #include <vulkan/vulkan.hpp> #define GL_SILENCE_DEPRECATION #include "TargetConditionals.h" #if TARGET_OS_IPHONE #include <OpenGLES/ES2/gl.h> #define USING_GLES #else #include <OpenGL/gl3.h> #endif #elif __ANDROID__ #include <vulkan_wrapper.h> #include <vulkan/vulkan.hpp> #include <GLES2/gl2.h> #define USING_GLES #elif WIN32 #define GLEW_STATIC #include <GL/glew.h> #endif
abrunan/job4j
chapter_002/src/main/java/ru/job4j/tracker/Input.java
package ru.job4j.tracker; public interface Input { String askString(String message); int askInt(String message); int askInt(String message, int max); }
drecuk/JavaQuadtreeAgentFlocking
TrophicNetworkFlock/src/utilities/NodeType.java
<reponame>drecuk/JavaQuadtreeAgentFlocking package utilities; public enum NodeType { root, branch, leaf }
bartholomews/spotify4s
modules/play/src/main/scala/io/bartholomews/spotify4s/playJson/SimpleTrackPlayJson.scala
<reponame>bartholomews/spotify4s package io.bartholomews.spotify4s.playJson import io.bartholomews.iso.CountryCodeAlpha2 import io.bartholomews.spotify4s.core.entities._ import play.api.libs.functional.syntax.toFunctionalBuilderOps import play.api.libs.json.{Format, JsPath, Json, Reads} import sttp.model.Uri object SimpleTrackPlayJson { import codecs._ val reads: Reads[SimpleTrack] = (JsPath \ "artists") .read[List[SimpleArtist]](Reads.list(SimpleArtistPlayJson.reads)) .and((JsPath \ "available_markets").read[List[CountryCodeAlpha2]].orElse(Reads.pure(List.empty))) .and((JsPath \ "disc_number").read[Int]) .and((JsPath \ "duration_ms").read[Int]) .and((JsPath \ "explicit").read[Boolean]) .and((JsPath \ "external_urls").readNullable[ExternalResourceUrl]) .and((JsPath \ "href").readNullable[Uri]) .and((JsPath \ "id").readNullable[SpotifyId]) .and((JsPath \ "is_playable").readNullable[Boolean]) .and((JsPath \ "linked_from").readNullable[LinkedTrack]) .and((JsPath \ "restrictions").readNullable[Restrictions]) .and((JsPath \ "name").read[String]) .and((JsPath \ "preview_url").readNullable[Uri]) .and((JsPath \ "track_number").read[Int]) .and((JsPath \ "uri").read[SpotifyUri]) .and((JsPath \ "is_local").read[Boolean])(SimpleTrack.apply _) val format: Format[SimpleTrack] = Format[SimpleTrack](reads, Json.writes[SimpleTrack]) }
sjj3086786/aliyun-openapi-java-sdk
aliyun-java-sdk-smartag-inner/src/main/java/com/aliyuncs/smartag_inner/transform/v20180313/InnerListCloudConnectNetworksResponseUnmarshaller.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.aliyuncs.smartag_inner.transform.v20180313; import java.util.ArrayList; import java.util.List; import com.aliyuncs.smartag_inner.model.v20180313.InnerListCloudConnectNetworksResponse; import com.aliyuncs.smartag_inner.model.v20180313.InnerListCloudConnectNetworksResponse.CloudConnectNetwork; import java.util.Map; import com.aliyuncs.transform.UnmarshallerContext; public class InnerListCloudConnectNetworksResponseUnmarshaller { public static InnerListCloudConnectNetworksResponse unmarshall(InnerListCloudConnectNetworksResponse innerListCloudConnectNetworksResponse, UnmarshallerContext context) { innerListCloudConnectNetworksResponse.setRequestId(context.stringValue("InnerListCloudConnectNetworksResponse.RequestId")); innerListCloudConnectNetworksResponse.setTotal(context.integerValue("InnerListCloudConnectNetworksResponse.Total")); innerListCloudConnectNetworksResponse.setPageNumber(context.integerValue("InnerListCloudConnectNetworksResponse.PageNumber")); innerListCloudConnectNetworksResponse.setPageSize(context.integerValue("InnerListCloudConnectNetworksResponse.PageSize")); innerListCloudConnectNetworksResponse.setAliUid(context.longValue("InnerListCloudConnectNetworksResponse.AliUid")); innerListCloudConnectNetworksResponse.setBid(context.stringValue("InnerListCloudConnectNetworksResponse.Bid")); List<CloudConnectNetwork> cloudConnectNetworks = new ArrayList<CloudConnectNetwork>(); for (int i = 0; i < context.lengthValue("InnerListCloudConnectNetworksResponse.CloudConnectNetworks.Length"); i++) { CloudConnectNetwork cloudConnectNetwork = new CloudConnectNetwork(); cloudConnectNetwork.setInstanceId(context.stringValue("InnerListCloudConnectNetworksResponse.CloudConnectNetworks["+ i +"].InstanceId")); cloudConnectNetwork.setName(context.stringValue("InnerListCloudConnectNetworksResponse.CloudConnectNetworks["+ i +"].Name")); cloudConnectNetwork.setAssociatedCloudBoxCount(context.integerValue("InnerListCloudConnectNetworksResponse.CloudConnectNetworks["+ i +"].AssociatedCloudBoxCount")); cloudConnectNetwork.setAvailableCloudBoxCount(context.integerValue("InnerListCloudConnectNetworksResponse.CloudConnectNetworks["+ i +"].AvailableCloudBoxCount")); cloudConnectNetwork.setAssociatedCenId(context.stringValue("InnerListCloudConnectNetworksResponse.CloudConnectNetworks["+ i +"].AssociatedCenId")); cloudConnectNetwork.setAssociatedCenOwnerId(context.longValue("InnerListCloudConnectNetworksResponse.CloudConnectNetworks["+ i +"].AssociatedCenOwnerId")); cloudConnectNetwork.setDescription(context.stringValue("InnerListCloudConnectNetworksResponse.CloudConnectNetworks["+ i +"].Description")); cloudConnectNetwork.setCreateTime(context.longValue("InnerListCloudConnectNetworksResponse.CloudConnectNetworks["+ i +"].CreateTime")); cloudConnectNetwork.setIsDefault(context.booleanValue("InnerListCloudConnectNetworksResponse.CloudConnectNetworks["+ i +"].IsDefault")); cloudConnectNetworks.add(cloudConnectNetwork); } innerListCloudConnectNetworksResponse.setCloudConnectNetworks(cloudConnectNetworks); return innerListCloudConnectNetworksResponse; } }
chcbaram/Teensy41
sdk_fw/hw/hw_def.h
<reponame>chcbaram/Teensy41 /* * hw_def.h * * Created on: 2020. 3. 11. * Author: Baram */ #ifndef SRC_HW_HW_DEF_H_ #define SRC_HW_HW_DEF_H_ #include "def.h" #include "bsp.h" #define _HW_DEF_RTOS_MEM_SIZE(x) ((x)/4) #define _HW_DEF_RTOS_THREAD_PRI_MAIN osPriorityNormal #define _HW_DEF_RTOS_THREAD_PRI_CMDIF osPriorityNormal #define _HW_DEF_RTOS_THREAD_PRI_LCD osPriorityNormal #define _HW_DEF_RTOS_THREAD_PRI_I2S osPriorityAboveNormal #define _HW_DEF_RTOS_THREAD_PRI_AUDIO osPriorityNormal #define _HW_DEF_RTOS_THREAD_PRI_UPDATE osPriorityNormal #define _HW_DEF_RTOS_THREAD_MEM_MAIN _HW_DEF_RTOS_MEM_SIZE(12*1024) #define _HW_DEF_RTOS_THREAD_MEM_CMDIF _HW_DEF_RTOS_MEM_SIZE( 6*1024) #define _HW_DEF_RTOS_THREAD_MEM_LCD _HW_DEF_RTOS_MEM_SIZE( 1*1024) #define _HW_DEF_RTOS_THREAD_MEM_I2S _HW_DEF_RTOS_MEM_SIZE( 1*1024) #define _HW_DEF_RTOS_THREAD_MEM_AUDIO _HW_DEF_RTOS_MEM_SIZE( 1*1024) #define _HW_DEF_RTOS_THREAD_MEM_UPDATE _HW_DEF_RTOS_MEM_SIZE( 1*1024) #define _USE_HW_MICROS #define _USE_HW_VCP #define _USE_HW_CLOCKS #define _USE_HW_JOYPAD #define _USE_HW_RTOS #define _USE_HW_I2S #define _USE_HW_FLASH #define _USE_HW_AUDIO #define _USE_HW_PXP #define _USE_HW_BATTERY #define _USE_HW_RESET #define _USE_HW_SPEAKER #define _USE_HW_LED #define HW_LED_MAX_CH 1 #define _USE_HW_UART #define HW_UART_MAX_CH 1 #define _USE_HW_SWTIMER #define HW_SWTIMER_MAX_CH 8 #define _USE_HW_CMDIF #define HW_CMDIF_LIST_MAX 32 #define HW_CMDIF_CMD_STR_MAX 16 #define HW_CMDIF_CMD_BUF_LENGTH 128 #define _USE_HW_GPIO #define HW_GPIO_MAX_CH 4 #define _USE_HW_PSRAM #define HW_PSRAM_ADDR 0x70000000 #define HW_PSRAM_LENGTH (16*1024*1024) #define _USE_HW_FILES #define _USE_HW_FATFS #define _USE_HW_SD #define HW_SD_PIN_DETECTED 0 #define HW_SD_PIN_PWREN -1 #define _USE_HW_BUTTON #define HW_BUTTON_MAX_CH 12 #define _USE_HW_LCD #define _USE_HW_ILI9341 #define HW_LCD_WIDTH 320 #define HW_LCD_HEIGHT 240 #define _USE_HW_ADC #define HW_ADC_MAX_CH 3 #define _USE_HW_PWM #define HW_PWM_MAX_CH 1 #define _USE_HW_MIXER #define HW_MIXER_MAX_CH 8 #define HW_MIXER_MAX_BUF_LEN (16*4*8) #define _USE_HW_MEM #define HW_MEM_ADDR 0x70800000 #define HW_MEM_LENGTH (8*1024*1024) #define _USE_HW_CMD #define HW_CMD_MAX_DATA_LENGTH 2048 #define FLASH_ADDR_TAG 0x70400000 #define FLASH_ADDR_FW 0x70400400 #define FLASH_ADDR_START 0x70400000 #define FLASH_ADDR_END (FLASH_ADDR_START + 4*1024*1024) #define _PIN_BUTTON_A 0 #define _PIN_BUTTON_B 1 #define _PIN_BUTTON_X 2 #define _PIN_BUTTON_Y 3 #define _PIN_BUTTON_START 4 #define _PIN_BUTTON_SELECT 5 #define _PIN_BUTTON_HOME 6 #define _PIN_BUTTON_MENU 7 #define _PIN_BUTTON_LEFT 8 #define _PIN_BUTTON_RIGHT 9 #define _PIN_BUTTON_UP 10 #define _PIN_BUTTON_DOWN 11 #define _PIN_GPIO_LCD_BKT_EN 1 #define _PIN_GPIO_LCD_RST 2 #define _PIN_GPIO_BAT_CHG 3 #define _PIN_GPIO_SPK_EN 4 #endif /* SRC_HW_HW_DEF_H_ */
astri-isns/skygear-SDK-Android
skygear/src/androidTest/java/io/skygear/skygear/UserSaveRequestUnitTest.java
package io.skygear.skygear; import android.support.test.runner.AndroidJUnit4; import org.junit.Test; import org.junit.runner.RunWith; import java.util.Arrays; import java.util.List; import java.util.Map; import static junit.framework.Assert.assertEquals; import static junit.framework.Assert.assertTrue; @RunWith(AndroidJUnit4.class) public class UserSaveRequestUnitTest { @Test public void testUserSaveRequestCreationFlow() throws Exception { User user = new User("123", "<PASSWORD>", "user123", "<EMAIL>"); user.addRole(new Role("Citizen")); user.addRole(new Role("Programmer")); UserSaveRequest request = new UserSaveRequest(user); assertEquals("user:update", request.action); Map<String, Object> data = request.data; assertEquals("123", data.get("_id")); assertEquals("<EMAIL>", data.get("email")); List<String> roleNameList = Arrays.asList((String[]) data.get("roles")); assertTrue(roleNameList.contains("Citizen")); assertTrue(roleNameList.contains("Programmer")); } }
iguerra94/WeatherNow
app/src/main/java/com/iguerra94/weathernow/views/signup_screens/UserRegisteredActivity.java
package com.iguerra94.weathernow.views.signup_screens; import android.content.Context; import android.content.Intent; import android.os.Bundle; import android.view.MenuItem; import android.view.View; import android.widget.Button; import android.widget.TextView; import androidx.annotation.NonNull; import androidx.appcompat.app.AppCompatActivity; import com.iguerra94.weathernow.R; import com.iguerra94.weathernow.utils.LocaleHelper; import com.iguerra94.weathernow.utils.sharedPrefs.SharedPrefsKeys; import com.iguerra94.weathernow.utils.sharedPrefs.SharedPrefsManager; import com.iguerra94.weathernow.utils.sharedPrefs.SharedPrefsValues; import com.iguerra94.weathernow.views.login_screens.LoginActivity; import com.iguerra94.weathernow.views.splash_screen.SplashActivity; import com.iguerra94.weathernow.views.toolbar.SimpleToolbar; public class UserRegisteredActivity extends AppCompatActivity implements View.OnClickListener { @Override protected void attachBaseContext(Context newBase) { super.attachBaseContext(LocaleHelper.onAttach(newBase)); } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_user_registered); setupToolbar(); String userFirstName = getIntent().getStringExtra("USER_FIRST_NAME"); TextView userRegisteredRSuccessTextView = findViewById(R.id.user_registered_success_text_view); String currentLocale = SharedPrefsManager.getInstance(this).readString(SharedPrefsKeys.APP_LANGUAGE_LOCALE); String message = (currentLocale.equals(SharedPrefsValues.APP_LANGUAGE_LOCALE.SPANISH)) ? "¡Exito! Bienvenido " + userFirstName + " a WeatherNow." : "¡Success! Welcome " + userFirstName + " to WeatherNow."; userRegisteredRSuccessTextView.setText(message); Button btnSigninAfterRegister = findViewById(R.id.btnSigninAfterRegister); btnSigninAfterRegister.setOnClickListener(this); } private void setupToolbar() { SimpleToolbar userRegisteredToolbar = findViewById(R.id.user_registered_toolbar); userRegisteredToolbar.setBackgroundColor(getResources().getColor(android.R.color.transparent)); userRegisteredToolbar.setTitleTextColor(getResources().getColor(R.color.colorBlack)); setSupportActionBar(userRegisteredToolbar); getSupportActionBar().setTitle(getResources().getString(R.string.action_back_splash)); getSupportActionBar().setDisplayHomeAsUpEnabled(true); } @Override public boolean onOptionsItemSelected(@NonNull MenuItem item) { if (item.getItemId() == android.R.id.home) { Intent intent = new Intent(this, SplashActivity.class); intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_NEW_TASK); startActivity(intent); finish(); return true; } return super.onOptionsItemSelected(item); } @Override public void onClick(View view) { if (view.getId() == R.id.btnSigninAfterRegister) { Intent intent = new Intent(this, LoginActivity.class); intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_NEW_TASK); startActivity(intent); finish(); } } @Override public void onBackPressed() { super.onBackPressed(); Intent intent = new Intent(this, SplashActivity.class); intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_NEW_TASK); startActivity(intent); finish(); } }
yingqi0607/tp
src/test/java/seedu/tr4cker/logic/commands/DeleteExpiredCommandTest.java
<filename>src/test/java/seedu/tr4cker/logic/commands/DeleteExpiredCommandTest.java package seedu.tr4cker.logic.commands; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertTrue; import static seedu.tr4cker.logic.commands.CommandTestUtil.assertCommandFailure; import static seedu.tr4cker.logic.commands.CommandTestUtil.assertCommandSuccess; import static seedu.tr4cker.testutil.TypicalIndexes.INDEX_FIRST_TASK; import static seedu.tr4cker.testutil.TypicalIndexes.INDEX_SECOND_TASK; import static seedu.tr4cker.testutil.TypicalTasks.getTypicalTr4cker; import org.junit.jupiter.api.Test; import seedu.tr4cker.commons.core.Messages; import seedu.tr4cker.commons.core.index.Index; import seedu.tr4cker.model.Model; import seedu.tr4cker.model.ModelManager; import seedu.tr4cker.model.UserPrefs; import seedu.tr4cker.model.task.Task; /** * Contains integration tests (interaction with the Model, UndoCommand and RedoCommand) and unit tests for * {@code DeleteExpiredCommand}. */ public class DeleteExpiredCommandTest { private final Model model = new ModelManager(getTypicalTr4cker(), new UserPrefs()); @Test public void execute_validIndexExpiredUList_success() { Task taskToDelete = model.getFilteredExpiredTaskList().get(INDEX_FIRST_TASK.getZeroBased()); DeleteExpiredCommand deleteExpiredCommand = new DeleteExpiredCommand(INDEX_FIRST_TASK); String expectedMessage = String.format(DeleteExpiredCommand.MESSAGE_DELETE_TASK_SUCCESS, taskToDelete); ModelManager expectedModel = new ModelManager(model.getTr4cker(), new UserPrefs()); expectedModel.deleteTask(taskToDelete); assertCommandSuccess(deleteExpiredCommand, model, expectedMessage, expectedModel); } @Test public void execute_invalidIndexExpiredList_throwsCommandException() { Index outOfBoundIndex = Index.fromOneBased(model.getFilteredExpiredTaskList().size() + 1); DeleteExpiredCommand deleteExpiredCommand = new DeleteExpiredCommand(outOfBoundIndex); assertCommandFailure(deleteExpiredCommand, model, Messages.MESSAGE_INVALID_TASK_DISPLAYED_INDEX); } @Test public void equals() { DeleteExpiredCommand deleteFirstCommand = new DeleteExpiredCommand(INDEX_FIRST_TASK); DeleteExpiredCommand deleteSecondCommand = new DeleteExpiredCommand(INDEX_SECOND_TASK); // same object -> returns true assertTrue(deleteFirstCommand.equals(deleteFirstCommand)); // same values -> returns true DeleteCommand deleteFirstCommandCopy = new DeleteCommand(INDEX_FIRST_TASK); assertTrue(deleteFirstCommand.equals(deleteFirstCommandCopy)); // different types -> returns false assertFalse(deleteFirstCommand.equals(1)); // null -> returns false assertFalse(deleteFirstCommand.equals(null)); // different task -> returns false assertFalse(deleteFirstCommand.equals(deleteSecondCommand)); } }
syazwanirahimin/dotcom
node_modules/@react-icons/all-files/bs/BsFileEarmarkMinus.js
<reponame>syazwanirahimin/dotcom // THIS FILE IS AUTO GENERATED var GenIcon = require('../lib').GenIcon module.exports.BsFileEarmarkMinus = function BsFileEarmarkMinus (props) { return GenIcon({"tag":"svg","attr":{"viewBox":"0 0 16 16","fill":"currentColor"},"child":[{"tag":"path","attr":{"d":"M9 1H4a2 2 0 00-2 2v10a2 2 0 002 2h5v-1H4a1 1 0 01-1-1V3a1 1 0 011-1h5v2.5A1.5 1.5 0 0010.5 6H13v2h1V6L9 1z"}},{"tag":"path","attr":{"fillRule":"evenodd","d":"M11 11.5a.5.5 0 01.5-.5h4a.5.5 0 010 1h-4a.5.5 0 01-.5-.5z","clipRule":"evenodd"}}]})(props); };
codacy-badger/Inci_e5b
InciDashboard_e5b/src/test/java/uo/asw/tests/cucumber/steps/AniadirComentarioIncidenciaSteps.java
<reponame>codacy-badger/Inci_e5b package uo.asw.tests.cucumber.steps; import static org.junit.Assert.assertEquals; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.SpringApplicationContextLoader; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.context.ContextConfiguration; import cucumber.api.java.en.Given; import cucumber.api.java.en.Then; import cucumber.api.java.en.When; import uo.asw.InciDashboardE5bApplication; import uo.asw.dbManagement.DBManagementFacade; import uo.asw.dbManagement.model.Incidence; @ContextConfiguration(classes=InciDashboardE5bApplication.class, loader=SpringApplicationContextLoader.class) @SpringBootTest public class AniadirComentarioIncidenciaSteps { @Autowired private DBManagementFacade dbManagement; private String operatorIdentifier; private Incidence incidence; @Given("^el operario con el identificador \"([^\"]*)\"$") public void el_operario_con_el_identificador(String operatorIdentifier) throws Throwable { this.operatorIdentifier = operatorIdentifier; } @Given("^la primera de sus incidencias asignadas$") public void la_primera_de_sus_incidencias_asignadas() throws Throwable { incidence = dbManagement.getOperatorIncidences(operatorIdentifier).get(0); } @When("^el operario añade a la incidencia el comentario \"([^\"]*)\"$") public void el_operario_añade_a_la_incidencia_el_comentario(String comentario) throws Throwable { incidence.setOperatorComments(comentario); } @When("^la incidencia se actualiza en base de datos$") public void la_incidencia_se_actualiza_en_base_de_datos() throws Throwable { dbManagement.updateIncidence(incidence); } @When("^la incidencia se recupera de la base de datos$") public void la_incidencia_se_recupera_de_la_base_de_datos() throws Throwable { incidence = dbManagement.getOperatorIncidences(operatorIdentifier).get(0); } @Then("^la incidencia tiene el comentario \"([^\"]*)\"$") public void la_incidencia_tiene_el_comentario(String comentario) throws Throwable { assertEquals(comentario, incidence.getOperatorComments()); } }
ajchdev/outside-event
node_modules/@wordpress/block-editor/src/components/color-palette/test/control.js
/** * External dependencies */ import { create, act } from 'react-test-renderer'; import { noop } from 'lodash'; /** * Internal dependencies */ import ColorPaletteControl from '../control'; describe( 'ColorPaletteControl', () => { it( 'matches the snapshot', async () => { let root; await act( async () => { root = create( <ColorPaletteControl label="Test Color" value="#f00" colors={ [ { color: '#f00', name: 'red' } ] } disableCustomColors={ false } onChange={ noop } /> ); } ); expect( root.toJSON() ).toMatchSnapshot(); } ); } );
MrAwesomeRocks/caelus-cml
src/libraries/fvOptions/sources/derived/explicitPorositySource/explicitPorositySource.cpp
/*---------------------------------------------------------------------------*\ Copyright (C) 2012-2015 OpenFOAM Foundation ------------------------------------------------------------------------------- License This file is part of Caelus. Caelus is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Caelus is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with Caelus. If not, see <http://www.gnu.org/licenses/>. \*---------------------------------------------------------------------------*/ #include "explicitPorositySource.hpp" #include "fvMesh.hpp" #include "fvMatrices.hpp" #include "porosityModel.hpp" #include "addToRunTimeSelectionTable.hpp" // * * * * * * * * * * * * * * Static Data Members * * * * * * * * * * * * * // namespace CML { namespace fv { defineTypeNameAndDebug(explicitPorositySource, 0); addToRunTimeSelectionTable ( option, explicitPorositySource, dictionary ); } } // * * * * * * * * * * * * * * * * Constructors * * * * * * * * * * * * * * // CML::fv::explicitPorositySource::explicitPorositySource ( const word& name, const word& modelType, const dictionary& dict, const fvMesh& mesh ) : option(name, modelType, dict, mesh), porosityPtr_(nullptr) { read(dict); if (selectionMode_ != smCellZone) { FatalErrorInFunction << "The porosity region must be specified as a cellZone. Current " << "selection mode is " << selectionModeTypeNames_[selectionMode_] << exit(FatalError); } porosityPtr_.reset ( porosityModel::New ( name_, mesh_, coeffs_, cellSetName_ ).ptr() ); } // * * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * // void CML::fv::explicitPorositySource::addSup ( fvMatrix<vector>& eqn, const label fieldI ) { fvMatrix<vector> porosityEqn(eqn.psi(), eqn.dimensions()); porosityPtr_->addResistance(porosityEqn); eqn -= porosityEqn; } void CML::fv::explicitPorositySource::addSup ( const volScalarField& rho, fvMatrix<vector>& eqn, const label fieldI ) { fvMatrix<vector> porosityEqn(eqn.psi(), eqn.dimensions()); porosityPtr_->addResistance(porosityEqn); eqn -= porosityEqn; } void CML::fv::explicitPorositySource::addSup ( const volScalarField& alpha, const volScalarField& rho, fvMatrix<vector>& eqn, const label fieldI ) { fvMatrix<vector> porosityEqn(eqn.psi(), eqn.dimensions()); porosityPtr_->addResistance(porosityEqn); eqn -= alpha*porosityEqn; } void CML::fv::explicitPorositySource::writeData(Ostream& os) const { os << indent << name_ << endl; dict_.write(os); } bool CML::fv::explicitPorositySource::read(const dictionary& dict) { if (option::read(dict)) { if (coeffs_.found("UNames")) { coeffs_.lookup("UNames") >> fieldNames_; } else if (coeffs_.found("UName")) { word UName(coeffs_.lookup("UName")); fieldNames_ = wordList(1, UName); } else { fieldNames_ = wordList(1, "U"); } applied_.setSize(fieldNames_.size(), false); return true; } else { return false; } } // ************************************************************************* //
jmacwhitesource/cloud-pipeline
api/src/main/java/com/epam/pipeline/manager/metadata/MetadataEntityManager.java
<filename>api/src/main/java/com/epam/pipeline/manager/metadata/MetadataEntityManager.java<gh_stars>0 /* * Copyright 2017-2019 EPAM Systems, Inc. (https://www.epam.com/) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.epam.pipeline.manager.metadata; import com.epam.pipeline.common.MessageConstants; import com.epam.pipeline.common.MessageHelper; import com.epam.pipeline.controller.PagedResult; import com.epam.pipeline.controller.vo.metadata.MetadataEntityVO; import com.epam.pipeline.dao.metadata.MetadataClassDao; import com.epam.pipeline.dao.metadata.MetadataEntityDao; import com.epam.pipeline.entity.AbstractSecuredEntity; import com.epam.pipeline.entity.metadata.FireCloudClass; import com.epam.pipeline.entity.metadata.MetadataClass; import com.epam.pipeline.entity.metadata.MetadataClassDescription; import com.epam.pipeline.entity.metadata.MetadataEntity; import com.epam.pipeline.entity.metadata.MetadataField; import com.epam.pipeline.entity.metadata.MetadataFilter; import com.epam.pipeline.entity.metadata.PipeConfValue; import com.epam.pipeline.entity.security.acl.AclClass; import com.epam.pipeline.manager.datastorage.DataStorageManager; import com.epam.pipeline.manager.metadata.parser.EntityTypeField; import com.epam.pipeline.manager.metadata.parser.MetadataEntityConverter; import com.epam.pipeline.manager.metadata.parser.MetadataParsingResult; import com.epam.pipeline.manager.pipeline.FolderManager; import com.epam.pipeline.manager.security.SecuredEntityManager; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.math.NumberUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; import org.springframework.util.Assert; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.UUID; import java.util.function.Function; import java.util.stream.Collectors; @Service public class MetadataEntityManager implements SecuredEntityManager { private static final Logger LOGGER = LoggerFactory.getLogger(MetadataEntityManager.class); @Autowired private MetadataEntityDao metadataEntityDao; @Autowired private MetadataClassDao metadataClassDao; @Autowired private MessageHelper messageHelper; @Autowired private FolderManager folderManager; @Autowired private DataStorageManager storageManager; public Map<String, Integer> loadRootMetadataEntities() { Map<String, Integer> countEntities = new HashMap<>(); List<MetadataEntity> entities = metadataEntityDao.loadRootMetadataEntities(); entities.forEach(e -> countEntities.merge(e.getClassEntity().getName(), 1, Integer::sum)); return countEntities; } @Transactional(propagation = Propagation.REQUIRED) public MetadataClass createMetadataClass(final String className) { if (StringUtils.isEmpty(className)) { throw new IllegalArgumentException("User entity class name must be not empty."); } MetadataClass metadataClass = new MetadataClass(); metadataClass.setName(className); metadataClassDao.createMetadataClass(metadataClass); return metadataClass; } public List<MetadataClass> loadAllMetadataClasses() { return metadataClassDao.loadAllMetadataClasses(); } public MetadataClass loadClass(String name) { MetadataClass metadataClass = metadataClassDao.loadMetadataClass(name); Assert.notNull(metadataClass, messageHelper.getMessage(MessageConstants.ERROR_METADATA_ENTITY_CLASS_NOT_FOUND, name)); return metadataClass; } public MetadataClass loadClass(Long id) { MetadataClass metadataClass = metadataClassDao.loadMetadataClass(id); Assert.notNull(metadataClass, messageHelper.getMessage(MessageConstants.ERROR_METADATA_ENTITY_CLASS_NOT_FOUND, id)); return metadataClass; } @Transactional(propagation = Propagation.REQUIRED) public MetadataClass deleteMetadataClass(Long id) { Assert.notNull(id, messageHelper.getMessage(MessageConstants.ERROR_INVALID_METADATA_ENTITY_CLASS_ID, id)); MetadataClass metadataClass = loadClass(id); metadataClassDao.deleteMetadataClass(id); return metadataClass; } @Transactional(propagation = Propagation.REQUIRED) public MetadataClass updateExternalClassName(Long id, FireCloudClass externalClassName) { Assert.notNull(id, messageHelper.getMessage(MessageConstants.ERROR_INVALID_METADATA_ENTITY_CLASS_ID, id)); MetadataClass metadataClass = loadClass(id); Assert.notNull(metadataClass, messageHelper.getMessage(MessageConstants.ERROR_METADATA_ENTITY_CLASS_NOT_FOUND, id)); metadataClass.setFireCloudClassName(externalClassName); metadataClassDao.updateMetadataClass(metadataClass); return metadataClass; } @Transactional(propagation = Propagation.REQUIRED) public MetadataEntity updateMetadataEntity(MetadataEntityVO metadataEntityVO) { Assert.notNull(metadataEntityVO.getParentId(), messageHelper.getMessage(MessageConstants.ERROR_PARENT_REQUIRED)); MetadataEntity metadataEntity = metadataEntityVO.convertToMetadataEntity(); folderManager.load(metadataEntity.getParent().getId()); Long entityId = metadataEntity.getId(); if (entityId != null) { MetadataEntity existingMetadataEntity = existingMetadataItem(entityId, false); if (existingMetadataEntity != null) { metadataEntityDao.updateMetadataEntity(metadataEntity); return metadataEntity; } LOGGER.debug("Metadata entity with id %d was not found. A new one will be created.", entityId); } String externalId = metadataEntity.getExternalId(); if (StringUtils.isNotBlank(externalId)) { Optional<MetadataEntity> existingMetadataEntity = metadataEntityDao.loadByExternalId( metadataEntity.getParent().getId(), metadataEntity.getClassEntity().getName(), externalId); Assert.isTrue(!existingMetadataEntity.isPresent(), messageHelper.getMessage(MessageConstants.ERROR_METADATA_ENTITY_ALREADY_EXIST, externalId)); } else { metadataEntity.setExternalId(UUID.randomUUID().toString()); } metadataEntityDao.createMetadataEntity(metadataEntity); return metadataEntity; } @Override public MetadataEntity load(Long id) { Assert.notNull(id, messageHelper.getMessage(MessageConstants.ERROR_INVALID_METADATA_ENTITY_ID, id)); return metadataEntityDao.loadMetadataEntityById(id); } @Override public Integer loadTotalCount() { throw new UnsupportedOperationException(); } @Override public Collection<? extends AbstractSecuredEntity> loadAllWithParents(Integer page, Integer pageSize) { throw new UnsupportedOperationException(); } @Override public MetadataEntity loadWithParents(final Long id) { return metadataEntityDao.loadMetadataEntityWithParents(id); } public List<MetadataEntity> loadMetadataEntityByClassNameAndFolderId(Long id, String className) { Assert.notNull(className, messageHelper.getMessage(MessageConstants.ERROR_METADATA_ENTITY_CLASS_NOT_FOUND)); return metadataEntityDao.loadMetadataEntityByClassNameAndFolderId(id, className); } @Transactional(propagation = Propagation.REQUIRED) public MetadataEntity updateMetadataItemKey(MetadataEntityVO metadataEntityVO) { MetadataEntity metadataEntity = metadataEntityVO.convertToMetadataEntity(); Long entityId = metadataEntity.getId(); MetadataEntity dbEntity = load(entityId); Assert.notNull(dbEntity, messageHelper.getMessage(MessageConstants.ERROR_METADATA_ENTITY_NOT_FOUND, dbEntity)); Assert.notNull(metadataEntity.getData(), messageHelper.getMessage(MessageConstants.ERROR_METADATA_UPDATE_KEY_NOT_FOUND, 0)); Assert.isTrue(metadataEntity.getData().size() == 1, messageHelper.getMessage(MessageConstants.ERROR_METADATA_UPDATE_KEY_NOT_FOUND, metadataEntity.getData().size())); Map.Entry<String, PipeConfValue> metadataEntry = metadataEntity.getData().entrySet().iterator().next(); metadataEntityDao.updateMetadataEntityDataKey(metadataEntity, metadataEntry.getKey(), metadataEntry.getValue().getValue(), metadataEntry.getValue().getType()); return metadataEntityDao.loadMetadataEntityById(metadataEntity.getId()); } @Transactional(propagation = Propagation.REQUIRED) public void insertCopiesOfExistentMetadataEntities(Long existentParentId, Long parentIdToAdd) { metadataEntityDao.insertCopiesOfExistentMetadataEntities(existentParentId, parentIdToAdd); } @Transactional(propagation = Propagation.REQUIRED) public MetadataEntity deleteMetadataEntity(Long id) { Assert.notNull(id, messageHelper.getMessage(MessageConstants.ERROR_INVALID_METADATA_ENTITY_ID, id)); MetadataEntity metadataEntity = metadataEntityDao.loadMetadataEntityById(id); Assert.notNull(metadataEntity, messageHelper .getMessage(MessageConstants.ERROR_METADATA_ENTITY_NOT_FOUND, id)); metadataEntityDao.deleteMetadataEntity(metadataEntity.getId()); return metadataEntity; } @Transactional(propagation = Propagation.REQUIRED) public MetadataEntity deleteMetadataItemKey(Long id, String key) { MetadataEntity existingMetadataEntity = existingMetadataItem(id, true); existingMetadataEntity.getData().keySet().remove(key); metadataEntityDao.deleteMetadataItemKey(existingMetadataEntity.getId(), key); return existingMetadataEntity; } @Transactional(propagation = Propagation.REQUIRED) public Set<Long> deleteMetadataEntities(Set<Long> entitiesIds) { if (CollectionUtils.isEmpty(entitiesIds)) { throw new IllegalArgumentException( messageHelper.getMessage(MessageConstants.ERROR_METADATA_ENTITIES_NOT_FOUND)); } metadataEntityDao.deleteMetadataEntities(entitiesIds); return entitiesIds; } /** * Deletes all metadata entities, present in project {@link com.epam.pipeline.entity.pipeline.Folder}. * Optionally supports deletion only of specified {@link MetadataClass} * @param projectId specifies {@link com.epam.pipeline.entity.pipeline.Folder} to delete metadata from * @param entityClassName optional name of {@link MetadataClass}, if it is specified only entities of * this class are deleted */ @Transactional(propagation = Propagation.REQUIRED) public void deleteMetadataEntitiesInProject(Long projectId, String entityClassName) { Objects.requireNonNull(projectId); if (StringUtils.isNotBlank(entityClassName)) { MetadataClass metadataClass = loadClass(entityClassName); metadataEntityDao.deleteMetadataClassFromProject(projectId, metadataClass.getId()); } else { metadataEntityDao.deleteMetadataFromFolder(projectId); } } /** * Deletes all {@link MetadataEntity} instances * from a {@link com.epam.pipeline.entity.pipeline.Folder} * specified by {@param folderId} * @param folderId to delete metadata entities */ @Transactional(propagation = Propagation.REQUIRED) public void deleteMetadataFromFolder(Long folderId) { metadataEntityDao.deleteMetadataFromFolder(folderId); } public MetadataEntity loadByExternalId(String id, String className, Long folderId) { Set<MetadataEntity> entities = getExistingEntities(Collections.singleton(id), folderId, className); Assert.isTrue(CollectionUtils.isNotEmpty(entities), messageHelper .getMessage(MessageConstants.ERROR_METADATA_ENTITY_NOT_FOUND, id)); return entities.iterator().next(); } public PagedResult<List<MetadataEntity>> filterMetadata(MetadataFilter filter) { Assert.notNull(filter.getFolderId(), messageHelper.getMessage(MessageConstants.ERROR_INVALID_METADATA_FILTER, "folderId", filter.getFolderId())); folderManager.load(filter.getFolderId()); Assert.notNull(filter.getMetadataClass(), messageHelper.getMessage(MessageConstants.ERROR_INVALID_METADATA_FILTER, "class", filter.getFolderId())); loadClass(filter.getMetadataClass()); Assert.isTrue(filter.getPage() > 0, messageHelper.getMessage(MessageConstants.ERROR_PAGE_INDEX)); Assert.isTrue(filter.getPageSize() > 0, messageHelper.getMessage(MessageConstants.ERROR_PAGE_SIZE)); List<MetadataEntity> result = metadataEntityDao.filterEntities(filter); List<PipeConfValue> paths = result.stream() .map(entry -> entry.getData().values()) .flatMap(Collection::stream) .filter(param -> param.getType() != null && param.getType().equals(EntityTypeField.PATH_TYPE)) .collect(Collectors.toList()); storageManager.analyzePaths(paths); return new PagedResult<>(result, metadataEntityDao.countEntities(filter)); } public List<MetadataField> getMetadataKeys(Long folderId, String className) { Assert.notNull(folderId, messageHelper.getMessage(MessageConstants.ERROR_INVALID_METADATA_FILTER, "folderId", folderId)); folderManager.load(folderId); Assert.notNull(className, messageHelper.getMessage(MessageConstants.ERROR_INVALID_METADATA_FILTER, "class", className)); MetadataClass metadataClass = loadClass(className); return metadataEntityDao.getMetadataKeys(folderId, metadataClass.getId()); } public Collection<MetadataClassDescription> getMetadataFields(Long folderId) { Assert.notNull(folderId, messageHelper.getMessage(MessageConstants.ERROR_INVALID_METADATA_FILTER, "folderId", folderId)); folderManager.load(folderId); return metadataEntityDao.getMetadataFields(folderId); } public Set<MetadataEntity> getExistingEntities(Set<String> externalIds, Long folderId, String className) { return metadataEntityDao.loadExisting(folderId, className, externalIds); } public Set<MetadataEntity> loadEntitiesByIds(Set<Long> ids) { if (CollectionUtils.isEmpty(ids)) { return Collections.emptySet(); } return metadataEntityDao.loadByIds(ids); } /** * Converts list of list of {@link MetadataEntity}s to map that represents entities data to upload to the FireCloud * workspace. * @param ids list of {@link MetadataEntity}s to be converted * @return entities data content represented with the following form: * key - file name to be uploaded * value - data content that ready for upload */ public Map<String, String> loadEntitiesData(Set<Long> ids) { Set<MetadataEntity> metadataEntities = loadEntitiesByIds(ids); Long folderId = getCommonFolderForEntities(metadataEntities); List<MetadataEntity> entities = loadReferencesForEntities(new ArrayList<>(ids), folderId); return MetadataEntityConverter.convert(entities); } @Transactional(propagation = Propagation.REQUIRED) public List<MetadataEntity> createAndUpdateEntities(Long parentId, MetadataParsingResult parsedData) { checkUploadIntegrity(parsedData.getReferences(), parentId); Map<String, MetadataEntity> existing = getExistingEntities( parsedData.getEntities().keySet(), parentId, parsedData.getMetadataClass().getName()) .stream() .collect(Collectors.toMap(MetadataEntity::getExternalId, Function.identity())); List<MetadataEntity> entitiesToUpdate = new ArrayList<>(); List<MetadataEntity> entitiesToCreate = new ArrayList<>(); parsedData.getEntities().values().forEach(e -> { if (existing.containsKey(e.getExternalId())) { MetadataEntity current = existing.get(e.getExternalId()); if (StringUtils.isNotBlank(e.getName())) { current.setName(e.getName()); } current.getData().putAll(e.getData()); entitiesToUpdate.add(current); } else { entitiesToCreate.add(e); } }); List<MetadataEntity> result = new ArrayList<>(entitiesToCreate.size() + entitiesToUpdate.size()); result.addAll(metadataEntityDao.batchInsert(entitiesToCreate)); result.addAll(metadataEntityDao.batchUpdate(entitiesToUpdate)); return result; } @Transactional(propagation = Propagation.REQUIRED) public void updateMetadataEntities(List<MetadataEntity> metadataEntities) { metadataEntityDao.batchUpdate(metadataEntities); } public List<MetadataEntity> loadReferencesForEntities(List<Long> entityIds, Long parentId) { return metadataEntityDao.loadAllReferences(entityIds, parentId); } /** * @param references Class name to external IDs map * @param folderId */ void checkUploadIntegrity(Map<String, Set<String>> references, Long folderId) { references.entrySet().forEach(ref -> { loadClass(ref.getKey()); Set<MetadataEntity> existing = getExistingEntities(ref.getValue(), folderId, ref.getKey()); Assert.isTrue(existing.size() == ref.getValue().size(), "Not all required references are present"); }); } private Long getCommonFolderForEntities(Set<MetadataEntity> metadataEntities) { if (CollectionUtils.isEmpty(metadataEntities)) { return null; } MetadataEntity metadataEntity = metadataEntities.stream().findFirst() .orElseThrow(() -> new IllegalArgumentException("Cannot determine folder for entities.")); Long folderId = metadataEntity.getParent().getId(); Assert.isTrue(metadataEntities.stream() .allMatch(entity -> Objects.equals(folderId, entity.getParent().getId())), messageHelper.getMessage(MessageConstants.ERROR_FOLDER_INVALID_ID)); return folderId; } private MetadataEntity existingMetadataItem(Long id, boolean checkExistence) { MetadataEntity entity = metadataEntityDao.loadMetadataEntityById(id); if (checkExistence) { Assert.notNull(entity, messageHelper .getMessage(MessageConstants.ERROR_METADATA_ENTITY_NOT_FOUND, id)); } return entity; } @Override public AbstractSecuredEntity loadByNameOrId(String identifier) { if (NumberUtils.isDigits(identifier)) { MetadataEntity metadataEntity = metadataEntityDao.loadMetadataEntityById(Long.parseLong(identifier)); if (metadataEntity != null) { return metadataEntity; } } //Search by name is not supported for metadata throw new UnsupportedOperationException(messageHelper .getMessage(MessageConstants.ERROR_UNSUPPORTED_OPERATION, "metadata entity")); } @Override public AbstractSecuredEntity changeOwner(Long id, String owner) { throw new UnsupportedOperationException("Can not perform operation with metadata entity."); } @Override public AclClass getSupportedClass() { return AclClass.METADATA_ENTITY; } }
javakf/RDF
src/main/java/com/yoya/rdf/log/LogManager.java
/* * Copyright (c) 2016, baihw (<EMAIL>). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and limitations under the License. * */ package com.yoya.rdf.log; /** * Created by baihw on 16-3-4. * * 日志管理对象 */ public class LogManager{ /** * 可配置参数名称:日志工厂实现类 */ public static final String CNF_KEY_LOGFACTORY = "logFactory"; /** * 日志工厂实现类在用户未配置时使用的默认实现类名称。 */ public static final String DEF_LOGFACTORY = "com.yoya.rdf.log.impl.SimpleLogFactory"; // 当前使用的日志工厂类实例 private static ILogFactory _factory; /** * 默认初始化当前使用的日志工厂类实例为框架自带的日志工厂类实现。 */ static{ // String factoryName = Rdf.me().getProperty( CNF_KEY_LOGFACTORY ); // if( null == factoryName || 0 == factoryName.length() ){ // factoryName = DEF_LOGFACTORY; // } String factoryName = DEF_LOGFACTORY; try{ Class<?> lfCla = Class.forName( factoryName ); Object lfObj = lfCla.newInstance(); _factory = ( ILogFactory )lfObj; }catch( ClassNotFoundException | InstantiationException | IllegalAccessException e ){ throw new RuntimeException( e ); } } /** * 设置日志对象工厂类 * * @param factory 日志对象工厂类 */ public synchronized static void setLogFactory( ILogFactory factory ){ _factory = factory; } /** * 获取日志对象 * * @param category 日志类别名称 * @return 日志对象 */ public static ILog getLog( Class<?> category ){ return _factory.getLog( category.getName() ); } /** * 获取日志对象 * * @param category 日志类别名称 * @return 日志对象 */ public static ILog getLog( String category ){ return _factory.getLog( category ); } }
tonyastolfi/batteries
src/batteries/seq/loop_control.hpp
<reponame>tonyastolfi/batteries // Copyright 2021 <NAME> // #pragma once #ifndef BATTERIES_SEQ_LOOP_CONTROL_HPP #define BATTERIES_SEQ_LOOP_CONTROL_HPP #include <type_traits> #include <utility> namespace batt { namespace seq { enum LoopControl { kContinue = 0, kBreak = 1, }; template < typename Fn, typename... Args, typename = std::enable_if_t<std::is_convertible_v<std::invoke_result_t<Fn&&, Args&&...>, LoopControl>>> LoopControl run_loop_fn(Fn&& fn, Args&&... args) { return std::forward<Fn>(fn)(std::forward<Args>(args)...); } template < typename Fn, typename... Args, typename = std::enable_if_t<!std::is_convertible_v<std::invoke_result_t<Fn&&, Args&&...>, LoopControl>>, typename = void> LoopControl run_loop_fn(Fn&& fn, Args&&... args) { std::forward<Fn>(fn)(std::forward<Args>(args)...); return kContinue; } } // namespace seq } // namespace batt #endif // BATTERIES_SEQ_LOOP_CONTROL_HPP
xmarcosx/edfi-notebook
src/v5.1/resources/swagger_client/models/tpdm_employment_event.py
# coding: utf-8 """ Ed-Fi Operational Data Store API The Ed-Fi ODS / API enables applications to read and write education data stored in an Ed-Fi ODS through a secure REST interface. *** > *Note: Consumers of ODS / API information should sanitize all data for display and storage. The ODS / API provides reasonable safeguards against cross-site scripting attacks and other malicious content, but the platform does not and cannot guarantee that the data it contains is free of all potentially harmful content.* *** # noqa: E501 OpenAPI spec version: 3 Generated by: https://github.com/swagger-api/swagger-codegen.git """ import pprint import re # noqa: F401 import six from swagger_client.configuration import Configuration class TpdmEmploymentEvent(object): """NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'id': 'str', 'employment_event_type_descriptor': 'str', 'open_staff_position_reference': 'EdFiOpenStaffPositionReference', 'early_hire': 'bool', 'hire_date': 'date', 'internal_external_hire_descriptor': 'str', 'mutual_consent': 'bool', 'restricted_choice': 'bool', 'etag': 'str' } attribute_map = { 'id': 'id', 'employment_event_type_descriptor': 'employmentEventTypeDescriptor', 'open_staff_position_reference': 'openStaffPositionReference', 'early_hire': 'earlyHire', 'hire_date': 'hireDate', 'internal_external_hire_descriptor': 'internalExternalHireDescriptor', 'mutual_consent': 'mutualConsent', 'restricted_choice': 'restrictedChoice', 'etag': '_etag' } def __init__(self, id=None, employment_event_type_descriptor=None, open_staff_position_reference=None, early_hire=None, hire_date=None, internal_external_hire_descriptor=None, mutual_consent=None, restricted_choice=None, etag=None, _configuration=None): # noqa: E501 """TpdmEmploymentEvent - a model defined in Swagger""" # noqa: E501 if _configuration is None: _configuration = Configuration() self._configuration = _configuration self._id = None self._employment_event_type_descriptor = None self._open_staff_position_reference = None self._early_hire = None self._hire_date = None self._internal_external_hire_descriptor = None self._mutual_consent = None self._restricted_choice = None self._etag = None self.discriminator = None if id is not None: self.id = id self.employment_event_type_descriptor = employment_event_type_descriptor self.open_staff_position_reference = open_staff_position_reference if early_hire is not None: self.early_hire = early_hire if hire_date is not None: self.hire_date = hire_date if internal_external_hire_descriptor is not None: self.internal_external_hire_descriptor = internal_external_hire_descriptor if mutual_consent is not None: self.mutual_consent = mutual_consent if restricted_choice is not None: self.restricted_choice = restricted_choice if etag is not None: self.etag = etag @property def id(self): """Gets the id of this TpdmEmploymentEvent. # noqa: E501 # noqa: E501 :return: The id of this TpdmEmploymentEvent. # noqa: E501 :rtype: str """ return self._id @id.setter def id(self, id): """Sets the id of this TpdmEmploymentEvent. # noqa: E501 :param id: The id of this TpdmEmploymentEvent. # noqa: E501 :type: str """ self._id = id @property def employment_event_type_descriptor(self): """Gets the employment_event_type_descriptor of this TpdmEmploymentEvent. # noqa: E501 The type of the employment event (e.g., transfer, new hire, title change). # noqa: E501 :return: The employment_event_type_descriptor of this TpdmEmploymentEvent. # noqa: E501 :rtype: str """ return self._employment_event_type_descriptor @employment_event_type_descriptor.setter def employment_event_type_descriptor(self, employment_event_type_descriptor): """Sets the employment_event_type_descriptor of this TpdmEmploymentEvent. The type of the employment event (e.g., transfer, new hire, title change). # noqa: E501 :param employment_event_type_descriptor: The employment_event_type_descriptor of this TpdmEmploymentEvent. # noqa: E501 :type: str """ if self._configuration.client_side_validation and employment_event_type_descriptor is None: raise ValueError("Invalid value for `employment_event_type_descriptor`, must not be `None`") # noqa: E501 if (self._configuration.client_side_validation and employment_event_type_descriptor is not None and len(employment_event_type_descriptor) > 306): raise ValueError("Invalid value for `employment_event_type_descriptor`, length must be less than or equal to `306`") # noqa: E501 self._employment_event_type_descriptor = employment_event_type_descriptor @property def open_staff_position_reference(self): """Gets the open_staff_position_reference of this TpdmEmploymentEvent. # noqa: E501 :return: The open_staff_position_reference of this TpdmEmploymentEvent. # noqa: E501 :rtype: EdFiOpenStaffPositionReference """ return self._open_staff_position_reference @open_staff_position_reference.setter def open_staff_position_reference(self, open_staff_position_reference): """Sets the open_staff_position_reference of this TpdmEmploymentEvent. :param open_staff_position_reference: The open_staff_position_reference of this TpdmEmploymentEvent. # noqa: E501 :type: EdFiOpenStaffPositionReference """ if self._configuration.client_side_validation and open_staff_position_reference is None: raise ValueError("Invalid value for `open_staff_position_reference`, must not be `None`") # noqa: E501 self._open_staff_position_reference = open_staff_position_reference @property def early_hire(self): """Gets the early_hire of this TpdmEmploymentEvent. # noqa: E501 Indicator of whether this was an early hire. # noqa: E501 :return: The early_hire of this TpdmEmploymentEvent. # noqa: E501 :rtype: bool """ return self._early_hire @early_hire.setter def early_hire(self, early_hire): """Sets the early_hire of this TpdmEmploymentEvent. Indicator of whether this was an early hire. # noqa: E501 :param early_hire: The early_hire of this TpdmEmploymentEvent. # noqa: E501 :type: bool """ self._early_hire = early_hire @property def hire_date(self): """Gets the hire_date of this TpdmEmploymentEvent. # noqa: E501 The month, day, and year on which an individual was hired for a position. # noqa: E501 :return: The hire_date of this TpdmEmploymentEvent. # noqa: E501 :rtype: date """ return self._hire_date @hire_date.setter def hire_date(self, hire_date): """Sets the hire_date of this TpdmEmploymentEvent. The month, day, and year on which an individual was hired for a position. # noqa: E501 :param hire_date: The hire_date of this TpdmEmploymentEvent. # noqa: E501 :type: date """ self._hire_date = hire_date @property def internal_external_hire_descriptor(self): """Gets the internal_external_hire_descriptor of this TpdmEmploymentEvent. # noqa: E501 Indicates whether the hire was an internal or external person. # noqa: E501 :return: The internal_external_hire_descriptor of this TpdmEmploymentEvent. # noqa: E501 :rtype: str """ return self._internal_external_hire_descriptor @internal_external_hire_descriptor.setter def internal_external_hire_descriptor(self, internal_external_hire_descriptor): """Sets the internal_external_hire_descriptor of this TpdmEmploymentEvent. Indicates whether the hire was an internal or external person. # noqa: E501 :param internal_external_hire_descriptor: The internal_external_hire_descriptor of this TpdmEmploymentEvent. # noqa: E501 :type: str """ if (self._configuration.client_side_validation and internal_external_hire_descriptor is not None and len(internal_external_hire_descriptor) > 306): raise ValueError("Invalid value for `internal_external_hire_descriptor`, length must be less than or equal to `306`") # noqa: E501 self._internal_external_hire_descriptor = internal_external_hire_descriptor @property def mutual_consent(self): """Gets the mutual_consent of this TpdmEmploymentEvent. # noqa: E501 Indicator of whether this was a mutual consent hire. # noqa: E501 :return: The mutual_consent of this TpdmEmploymentEvent. # noqa: E501 :rtype: bool """ return self._mutual_consent @mutual_consent.setter def mutual_consent(self, mutual_consent): """Sets the mutual_consent of this TpdmEmploymentEvent. Indicator of whether this was a mutual consent hire. # noqa: E501 :param mutual_consent: The mutual_consent of this TpdmEmploymentEvent. # noqa: E501 :type: bool """ self._mutual_consent = mutual_consent @property def restricted_choice(self): """Gets the restricted_choice of this TpdmEmploymentEvent. # noqa: E501 Indicator of whether this was a restricted choice hire. # noqa: E501 :return: The restricted_choice of this TpdmEmploymentEvent. # noqa: E501 :rtype: bool """ return self._restricted_choice @restricted_choice.setter def restricted_choice(self, restricted_choice): """Sets the restricted_choice of this TpdmEmploymentEvent. Indicator of whether this was a restricted choice hire. # noqa: E501 :param restricted_choice: The restricted_choice of this TpdmEmploymentEvent. # noqa: E501 :type: bool """ self._restricted_choice = restricted_choice @property def etag(self): """Gets the etag of this TpdmEmploymentEvent. # noqa: E501 A unique system-generated value that identifies the version of the resource. # noqa: E501 :return: The etag of this TpdmEmploymentEvent. # noqa: E501 :rtype: str """ return self._etag @etag.setter def etag(self, etag): """Sets the etag of this TpdmEmploymentEvent. A unique system-generated value that identifies the version of the resource. # noqa: E501 :param etag: The etag of this TpdmEmploymentEvent. # noqa: E501 :type: str """ self._etag = etag def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value if issubclass(TpdmEmploymentEvent, dict): for key, value in self.items(): result[key] = value return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, TpdmEmploymentEvent): return False return self.to_dict() == other.to_dict() def __ne__(self, other): """Returns true if both objects are not equal""" if not isinstance(other, TpdmEmploymentEvent): return True return self.to_dict() != other.to_dict()
sarincr/JavaScript-for-Front-End-Development-
JS LIBRARIES/MarkoJS/node_modules/@marko/translator-default/dist/taglib/core/conditional/util.js
<reponame>sarincr/JavaScript-for-Front-End-Development- "use strict";var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");exports.__esModule = true;exports.buildIfStatement = buildIfStatement;var _compiler = require("@marko/compiler"); var _withPreviousLocation = _interopRequireDefault(require("../../../util/with-previous-location")); function buildIfStatement(path, args) { if (!args || !args.length) { const name = path.get("name"); throw name.buildCodeFrameError( `Invalid '<${name.node.value}>' tag, expected arguments like '<${name.node.value}(test)>'.`); } const ifStatement = _compiler.types.ifStatement( args.length === 1 ? args[0] : _compiler.types.sequenceExpression(args), _compiler.types.blockStatement(path.node.body.body)); let nextPath = path.getNextSibling(); // Provide the if statement to the next part of the if chain. if (nextPath.isMarkoTag()) { const nextTagName = nextPath.get("name"); if ( nextTagName.isStringLiteral({ value: "else" }) || nextTagName.isStringLiteral({ value: "else-if" })) { nextPath.node.ifStatement = ifStatement; } } return (0, _withPreviousLocation.default)(ifStatement, path.node); } //# sourceMappingURL=util.js.map
amvb/GUCEF
dependencies/agar/include/agar/gui/hbox.h
<filename>dependencies/agar/include/agar/gui/hbox.h /* Public domain */ #ifndef _AGAR_WIDGET_HBOX_H_ #define _AGAR_WIDGET_HBOX_H_ #include <agar/gui/box.h> #include <agar/gui/begin.h> typedef struct ag_hbox { struct ag_box box; } AG_HBox; #define AG_HBOX_HOMOGENOUS AG_BOX_HOMOGENOUS #define AG_HBOX_HFILL AG_BOX_HFILL #define AG_HBOX_VFILL AG_BOX_VFILL #define AG_HBOX_EXPAND (AG_BOX_HFILL|AG_BOX_VFILL) #define AG_HBoxNew(p, fl) (AG_HBox *)AG_BoxNew((p), AG_BOX_HORIZ, (fl)) #define AG_HBoxInit(b, fl) AG_BoxInit((AG_Box *)(b), AG_BOX_HORIZ, (fl)) #define AG_HBoxSetHomogenous(b, fl) AG_BoxSetHomogenous((AG_Box *)(b), (fl)) #define AG_HBoxSetPadding(b, pad) AG_BoxSetPadding((AG_Box *)(b), (pad)) #define AG_HBoxSetSpacing(b, sp) AG_BoxSetSpacing((AG_Box *)(b), (sp)) #include <agar/gui/close.h> #endif /* _AGAR_WIDGET_HBOX_H_ */
karlmattsmith/SocatLAS
JavaSource/gov/noaa/pmel/tmap/addxml/ReadESG.java
package gov.noaa.pmel.tmap.addxml; import gov.noaa.pmel.tmap.jdom.LASDocument; import java.io.UnsupportedEncodingException; import java.util.Iterator; import java.util.List; import java.util.Vector; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.jdom.Document; import org.jdom.Element; import thredds.catalog.CollectionType; import thredds.catalog.InvAccess; import thredds.catalog.InvCatalog; import thredds.catalog.InvCatalogFactory; import thredds.catalog.InvDataset; import thredds.catalog.ServiceType; public class ReadESG { private static final Logger log = LoggerFactory.getLogger(ReadESG.class); public void read(String src) { // addXML ax = new addXML(); InvCatalogFactory factory = new InvCatalogFactory("default", false); InvCatalog catalog = (InvCatalog) factory.readXML(src); StringBuilder buff = new StringBuilder(); int count = catalog.getDatasets().size(); if (!catalog.check(buff, true)) { log.error("Invalid catalog <" + src + ">\n" + buff.toString()); } else { for (int index = 0; index < count; index++ ) { factory = new InvCatalogFactory("default", false); catalog = (InvCatalog) factory.readXML(src); InvDataset invDataset = catalog.getDatasets().get(index); System.out.println(invDataset.getName()); String file = "/home/rhs/NCAR/las_categories_"; try { file = file+JDOMUtils.MD5Encode(invDataset.getName())+".xml"; } catch (UnsupportedEncodingException e) { e.printStackTrace(); } Element las_categories = new Element("las_categories"); // CategoryBean cb = new CategoryBean(); // cb.setName(invDataset.getName()); // cb.setID(invDataset.getID()); // // This is the top level... // //cb.setContributors(getContributors(invDataset)); // Vector topCats = new Vector(); for (Iterator topLevelIt = invDataset.getDatasets().iterator(); topLevelIt.hasNext(); ) { InvDataset topDS = (InvDataset) topLevelIt.next(); // CategoryBean topCB = new CategoryBean(); // topCB.setName(topDS.getName()); String id = topDS.getID(); if ( id == null ) { try { id = "id_"+JDOMUtils.MD5Encode(topDS.getName()); } catch (UnsupportedEncodingException e) { id = "id_"+String.valueOf(Math.random()); } } System.out.println("top: "+topDS.getName()+", "+topDS.getID()); //topCB.setID(id); // for (Iterator subDatasetsIt = topDS.getDatasets().iterator(); subDatasetsIt.hasNext(); ) { // InvDataset subDataset = (InvDataset) subDatasetsIt.next(); // topCB.addCatID(subDataset.getID()); // // These will be the catalog containers that will contain the aggregations... // for (Iterator grandChildrenIt = subDataset.getDatasets().iterator(); grandChildrenIt.hasNext(); ) { // InvDataset grandChild = (InvDataset) grandChildrenIt.next(); // if ( grandChild.hasAccess() && grandChild.getName().contains("aggregation")) { // // We are done. // String url = null; // InvAccess access = null; // for (Iterator ait = grandChild.getAccess().iterator(); ait.hasNext(); ) { // access = (InvAccess) ait.next(); // if (access.getService().getServiceType() == ServiceType.DODS || // access.getService().getServiceType() == ServiceType.OPENDAP || // access.getService().getServiceType() == ServiceType.NETCDF) { // url = access.getStandardUrlName(); // } // } // if ( url != null && url.contains("aggregation") ){ // FilterBean filter = new FilterBean(); // filter.setAction("apply-variable"); // String tag = grandChild.getID(); // filter.setContainstag(tag); // topCB.addFilter(filter); // topCB.addCatID(grandChild.getID()); // } // } // } // } // if ( topCB.getFilters().size() > 0 ) { // topCats.add(topCB); // } } // if ( topCats.size() > 0 ) { // cb.setCategories(topCats); // } // las_categories.addContent(cb.toXml()); // ax.processESGCategories(invDataset, las_categories); // LASDocument document = new LASDocument(); // document.setRootElement(las_categories); // System.out.println("Writing "+file+" for "+invDataset.getName()); // document.write(file); // document = null; } } } }
guilhermejccavalcanti/immutables
mongo/src/org/immutables/mongo/repository/internal/BsonEncoding.java
/* Copyright 2013-2015 Immutables Authors and Contributors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.immutables.mongo.repository.internal; import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonParser; import com.google.common.base.Preconditions; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.io.ByteStreams; import com.google.common.primitives.Ints; import com.google.common.primitives.UnsignedBytes; import com.google.gson.TypeAdapter; import com.google.gson.stream.JsonWriter; import com.mongodb.DBCallback; import com.mongodb.DBCollection; import com.mongodb.DBDecoder; import com.mongodb.DBDecoderFactory; import com.mongodb.DBEncoder; import com.mongodb.DBObject; import com.mongodb.DefaultDBEncoder; import com.mongodb.LazyDBCallback; import de.undercouch.bson4jackson.BsonFactory; import de.undercouch.bson4jackson.BsonGenerator; import de.undercouch.bson4jackson.BsonParser; import java.io.BufferedInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.ListIterator; import java.util.Map; import java.util.Set; import javax.annotation.Nullable; import org.bson.BSONCallback; import org.bson.BSONObject; import org.bson.BasicBSONDecoder; import org.bson.io.BasicOutputBuffer; import org.bson.io.OutputBuffer; /** * MongoDB driver specific encoding and jumping hoops. */ @SuppressWarnings("resource") public final class BsonEncoding { private static final BsonFactory BSON_FACTORY = new BsonFactory() .enable(BsonParser.Feature.HONOR_DOCUMENT_LENGTH); private static final JsonFactory JSON_FACTORY = new JsonFactory() .enable(JsonParser.Feature.ALLOW_SINGLE_QUOTES) .enable(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES); /** * This field name will cause an MongoDB confuse if not unwrapped correctly so it may be a good * choice. */ private static final String PREENCODED_VALUE_WRAPPER_FIELD_NAME = "$"; private BsonEncoding() {} /** * Although it may seem that re-parsing is bizarre, but it is one [of not so many] ways to do * proper marshaling. This kind of inefficiency will only hit query constraints that have many * object with custom marshaling, which considered to be a rare case. * @param adapted adapted value that know how to write itself to {@link JsonWriter} * @return object converted to MongoDB driver's {@link BSONObject}. */ public static Object unwrapBsonable(Support.Adapted<?> adapted) { try { ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); BsonGenerator generator = BSON_FACTORY.createGenerator(outputStream); BsonWriter writer = new BsonWriter(generator); writer.beginObject().name(PREENCODED_VALUE_WRAPPER_FIELD_NAME); adapted.write(writer); writer.endObject(); writer.close(); BSONObject object = new BasicBSONDecoder().readObject(outputStream.toByteArray()); return object.get(PREENCODED_VALUE_WRAPPER_FIELD_NAME); } catch (IOException ex) { throw Throwables.propagate(ex); } } public static DBObject unwrapJsonable(String json) { try { JsonParser parser = JSON_FACTORY.createParser(json); parser.nextToken(); ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); BsonGenerator generator = BSON_FACTORY.createGenerator(outputStream); generator.copyCurrentStructure(parser); generator.close(); parser.close(); byte[] data = outputStream.toByteArray(); return (DBObject) new LazyDBCallback(null).createObject(data, 0); } catch (IOException ex) { throw Throwables.propagate(ex); } } public static <T> T unmarshalDbObject(DBObject dbObject, TypeAdapter<T> adaper) throws IOException { BasicOutputBuffer buffer = new BasicOutputBuffer(); encoder().writeObject(buffer, dbObject); BsonParser parser = BSON_FACTORY.createParser(buffer.toByteArray()); BsonReader reader = new BsonReader(parser); T instance = adaper.read(reader); reader.close(); return instance; } private static class CountingOutputBufferStream extends OutputStream { final OutputBuffer buffer; int count; CountingOutputBufferStream(OutputBuffer buffer) { this.buffer = buffer; } @Override public void write(byte[] bytes, int offset, int length) throws IOException { buffer.write(bytes, offset, length); count += length; } @Override public void write(int byteValue) throws IOException { buffer.write(byteValue); count++; } } public static DBEncoder encoder() { return Encoder.ENCODER; } enum Encoder implements DBEncoder { ENCODER; @Override public int writeObject(OutputBuffer buffer, BSONObject object) { try { if (object instanceof WritableObjectPosition) { return ((WritableObjectPosition) object).writePlainCurrent(buffer); } return DefaultDBEncoder.FACTORY.create().writeObject(buffer, object); } catch (IOException ex) { throw Throwables.propagate(ex); } } } public static <T> DBObject wrapUpdateObject(T instance, TypeAdapter<T> adaper) { return new UpdateObject<>(instance, adaper); } public static <T> List<DBObject> wrapInsertObjectList(ImmutableList<T> list, TypeAdapter<T> adaper) { return new InsertObjectList<>(list, adaper); } interface WritableObjectPosition { int writeCurrent(OutputBuffer buffer) throws IOException; int writePlainCurrent(OutputBuffer buffer) throws IOException; } private static class UpdateObject<T> implements DBObject, WritableObjectPosition { private final T instance; private final TypeAdapter<T> adaper; UpdateObject(T instance, TypeAdapter<T> adaper) { this.instance = instance; this.adaper = adaper; } @Override public int writeCurrent(OutputBuffer buffer) throws IOException { CountingOutputBufferStream outputStream = new CountingOutputBufferStream(buffer); BsonWriter writer = new BsonWriter(BSON_FACTORY.createGenerator(outputStream)); adaper.write(writer, instance); writer.close(); return outputStream.count; } @Override public int writePlainCurrent(OutputBuffer buffer) throws IOException { return writeCurrent(buffer); } @Override public Object put(String key, Object v) { throw new UnsupportedOperationException(); } @Override public void putAll(BSONObject o) { throw new UnsupportedOperationException(); } @Override public void putAll(Map m) { throw new UnsupportedOperationException(); } @Override public Object get(String key) { throw new UnsupportedOperationException(); } @Override public Map toMap() { throw new UnsupportedOperationException(); } @Override public Object removeField(String key) { throw new UnsupportedOperationException(); } @Deprecated @Override public boolean containsKey(String s) { throw new UnsupportedOperationException(); } @Override public boolean containsField(String s) { throw new UnsupportedOperationException(); } @Override public Set<String> keySet() { return ImmutableSet.of(); } @Override public void markAsPartialObject() {} @Override public boolean isPartialObject() { return false; } } private static class InsertObjectList<T> implements DBObject, List<DBObject>, WritableObjectPosition { private final ImmutableList<T> list; private int position; @Nullable private JsonWriter writer; private CountingOutputBufferStream outputStream; private final TypeAdapter<T> adaper; InsertObjectList(ImmutableList<T> list, TypeAdapter<T> adaper) { this.list = list; this.adaper = adaper; } @Override public int writeCurrent(OutputBuffer buffer) throws IOException { createGeneratorIfNecessary(buffer); int previousByteCount = outputStream.count; adaper.write(writer, list.get(position)); if (isLastPosition()) { closeWriter(); } return outputStream.count - previousByteCount; } @Override public int writePlainCurrent(OutputBuffer buffer) throws IOException { CountingOutputBufferStream outputStream = new CountingOutputBufferStream(buffer); BsonWriter writer = new BsonWriter(BSON_FACTORY.createGenerator(outputStream)); adaper.write(writer, list.get(position)); writer.close(); return outputStream.count; } private void closeWriter() throws IOException { if (writer != null) { writer.close(); writer = null; } } private void createGeneratorIfNecessary(OutputBuffer buffer) throws IOException { if (writer == null) { outputStream = new CountingOutputBufferStream(buffer); writer = new BsonWriter(BSON_FACTORY.createGenerator(outputStream)); } } private boolean isLastPosition() { return position == list.size() - 1; } @Override public DBObject get(int index) { position = index; return this; } @Override public int size() { return list.size(); } @Override public Object put(String key, Object v) { throw new UnsupportedOperationException(); } @Override public void putAll(BSONObject o) { throw new UnsupportedOperationException(); } @Override public void putAll(Map m) { throw new UnsupportedOperationException(); } @Override public Object get(String key) { throw new UnsupportedOperationException(); } @Override public Map toMap() { throw new UnsupportedOperationException(); } @Override public Object removeField(String key) { throw new UnsupportedOperationException(); } @Deprecated @Override public boolean containsKey(String s) { throw new UnsupportedOperationException(); } @Override public boolean containsField(String s) { throw new UnsupportedOperationException(); } @Override public Set<String> keySet() { throw new UnsupportedOperationException(); } @Override public void markAsPartialObject() { throw new UnsupportedOperationException(); } @Override public boolean isPartialObject() { return false; } @Override public boolean add(DBObject e) { throw new UnsupportedOperationException(); } @Override public void add(int index, DBObject element) { throw new UnsupportedOperationException(); } @Override public boolean addAll(Collection<? extends DBObject> c) { throw new UnsupportedOperationException(); } @Override public boolean addAll(int index, Collection<? extends DBObject> c) { throw new UnsupportedOperationException(); } @Override public void clear() { throw new UnsupportedOperationException(); } @Override public boolean contains(Object o) { throw new UnsupportedOperationException(); } @Override public boolean containsAll(Collection<?> c) { throw new UnsupportedOperationException(); } @Override public int indexOf(Object o) { throw new UnsupportedOperationException(); } @Override public boolean isEmpty() { return false; } @Override public Iterator<DBObject> iterator() { return ImmutableSet.<DBObject>of().iterator(); } @Override public int lastIndexOf(Object o) { throw new UnsupportedOperationException(); } @Override public ListIterator<DBObject> listIterator() { return ImmutableList.<DBObject>of().listIterator(); } @Override public ListIterator<DBObject> listIterator(int index) { return ImmutableList.<DBObject>of().listIterator(); } @Override public DBObject set(int index, DBObject element) { throw new UnsupportedOperationException(); } @Override public boolean remove(Object o) { throw new UnsupportedOperationException(); } @Override public DBObject remove(int index) { throw new UnsupportedOperationException(); } @Override public boolean removeAll(Collection<?> c) { throw new UnsupportedOperationException(); } @Override public boolean retainAll(Collection<?> c) { throw new UnsupportedOperationException(); } @Override public List<DBObject> subList(int fromIndex, int toIndex) { throw new UnsupportedOperationException(); } @Override public Object[] toArray() { throw new UnsupportedOperationException(); } @Override public <V> V[] toArray(V[] a) { throw new UnsupportedOperationException(); } } public static <T> ImmutableList<T> unwrapResultObjectList(List<DBObject> result) { if (result.isEmpty()) { return ImmutableList.of(); } // Safe as long as caller will use same T for decoder and unwrap @SuppressWarnings("unchecked") List<T> results = ((ResultDecoder<T>) result.get(0)).results; return ImmutableList.copyOf(results); } public static <T> DBDecoderFactory newResultDecoderFor(TypeAdapter<T> adaper, int expectedSize) { return new ResultDecoder<>(adaper, expectedSize); } /** * Special input stream that operates from as writable byte buffer that is filled with BSON object * from other input stream ({@link #resetObjectFrom(InputStream)}). * Extending buffered input stream * to prevent excessive wraping in another buffered stream by {@link BsonParser} */ private static final class ObjectBufferInputStream extends BufferedInputStream { private byte[] buffer; private int position; private int limit; ObjectBufferInputStream(int capacity) { super(null, 1); ensureBufferWithCapacity(capacity); } private void ensureBufferWithCapacity(int capacity) { if (buffer == null || buffer.length < capacity) { Preconditions.checkArgument(capacity >= 4); byte[] temp = buffer; this.buffer = new byte[capacity]; if (temp != null) { System.arraycopy(temp, 0, buffer, 0, temp.length); } } } void resetObjectFrom(InputStream inputStream) throws IOException { ByteStreams.readFully(inputStream, buffer, 0, Ints.BYTES); int objectSize = Ints.fromBytes( buffer[3], buffer[2], buffer[1], buffer[0]); ensureBufferWithCapacity(objectSize); ByteStreams.readFully(inputStream, buffer, Ints.BYTES, objectSize - Ints.BYTES); position = 0; limit = objectSize; } @Override public int available() throws IOException { return limit - position; } @Override public int read(byte[] b, int off, int len) throws IOException { len = Math.min(len, available()); System.arraycopy(buffer, position, b, off, len); position += len; return len; } @Override public int read() throws IOException { if (available() > 0) { return UnsignedBytes.toInt(buffer[position++]); } return -1; } } private static final class ResultDecoder<T> implements DBDecoderFactory, DBDecoder, DBObject { final List<T> results; private final TypeAdapter<T> adaper; @Nullable private BsonReader parser; private final ObjectBufferInputStream bufferStream = new ObjectBufferInputStream(2012); ResultDecoder(TypeAdapter<T> adaper, int expectedSize) { this.adaper = adaper; this.results = Lists.newArrayListWithExpectedSize(expectedSize); } private BsonReader createParserIfNecessary() throws IOException { if (parser != null) { parser.close(); } parser = new BsonReader(BSON_FACTORY.createParser(bufferStream)); return parser; } @Override public DBObject decode(InputStream inputStream, DBCollection collection) throws IOException { bufferStream.resetObjectFrom(inputStream); createParserIfNecessary(); T object = adaper.read(parser); results.add(object); return this; } @Override public DBDecoder create() { return this; } @Override public BSONObject readObject(byte[] b) { throw new UnsupportedOperationException(); } @Override public BSONObject readObject(InputStream in) throws IOException { throw new UnsupportedOperationException(); } @Override public int decode(byte[] b, BSONCallback callback) { throw new UnsupportedOperationException(); } @Override public int decode(InputStream in, BSONCallback callback) throws IOException { throw new UnsupportedOperationException(); } @Override public DBCallback getDBCallback(DBCollection collection) { throw new UnsupportedOperationException(); } @Override public DBObject decode(byte[] b, DBCollection collection) { throw new UnsupportedOperationException(); } @Override public Object put(String key, Object v) { throw new UnsupportedOperationException(); } @Override public void putAll(BSONObject o) { throw new UnsupportedOperationException(); } @Override public void putAll(Map m) { throw new UnsupportedOperationException(); } @Override public Object get(String key) { return null; } @Override public Map toMap() { throw new UnsupportedOperationException(); } @Override public Object removeField(String key) { throw new UnsupportedOperationException(); } @Deprecated @Override public boolean containsKey(String s) { throw new UnsupportedOperationException(); } @Override public boolean containsField(String s) { throw new UnsupportedOperationException(); } @Override public Set<String> keySet() { throw new UnsupportedOperationException(); } @Override public void markAsPartialObject() {} @Override public boolean isPartialObject() { return false; } } }
mmakaay/flask_micron
tests/test_flask_integration.py
# -*- coding: utf-8 -*- """The tests in this module are used to make sure that Micron integrates correctly with the Flask environment and its testing features. The code does not extensively test all available Micron features, but mainly checks if the plumbing isn't leaky. """ from tests import MicronTestCase class Tests(MicronTestCase): def setUp(self): super(Tests, self).setUp() @self.micron.method() def greet(who='World'): return 'Hello, %s' % who @self.micron.method() def ping(): return 'pong' def test_GivenMicronWrappedApp_PingReturnsPong(self): response = self.request('/ping') self.assertEqual('pong', response.output) def test_GivenMicronMethod_MethodGetIsNotAllowed(self): response = self.client.get('/ping') self.assertEqual('405 METHOD NOT ALLOWED', response.status) def test_GivenMicronMethod_ResponseIsReturned(self): response = self.request('/greet', "you") self.assertEqual('Hello, you', response.output)
scala-steward/csw
csw-services/src/main/scala/csw/services/cli/Command.scala
package csw.services.cli import caseapp.{CommandName, HelpMessage, ExtraName => Short} sealed trait Command object Command { @CommandName("start") @HelpMessage("starts all the CSW services by default if no other option is provided") final case class Start( @HelpMessage("start config server") @Short("c") config: Boolean = false, @HelpMessage("start event server") @Short("e") event: Boolean = false, @HelpMessage("start alarm server") @Short("a") alarm: Boolean = false, @HelpMessage( "start database service, set 'PGDATA' env variable where postgres is installed e.g. for mac: /usr/local/var/postgres" ) @Short("d") database: Boolean = false, @HelpMessage("start auth/aas service") @Short("k") auth: Boolean = false, @HelpMessage("name of the interface") @Short("i") interfaceName: Option[String] ) extends Command object Start { def apply( config: Boolean = false, event: Boolean = false, alarm: Boolean = false, database: Boolean = false, auth: Boolean = false, interfaceName: Option[String] = None ): Start = // mark all flags=true when no option is provided to start command if (config || event || alarm || database || auth) new Start(config, event, alarm, database, auth, interfaceName) else new Start(true, true, true, true, true, interfaceName) } }
simapetr/uart_terminal
docs/uart_terminal_doc/html/annotated_dup.js
var annotated_dup = [ [ "auto_comp_c", "classauto__comp__c.html", "classauto__comp__c" ], [ "autocomp_buffer_t", "structautocomp__buffer__t.html", "structautocomp__buffer__t" ], [ "button_buffer_t", "structbutton__buffer__t.html", "structbutton__buffer__t" ], [ "button_js_c", "classbutton__js__c.html", "classbutton__js__c" ], [ "check_box_buffer_t", "structcheck__box__buffer__t.html", "structcheck__box__buffer__t" ], [ "check_box_js_c", "classcheck__box__js__c.html", "classcheck__box__js__c" ], [ "config_ini", "classconfig__ini.html", "classconfig__ini" ], [ "console_buffer_t", "structconsole__buffer__t.html", "structconsole__buffer__t" ], [ "data_plot", "classdata__plot.html", "classdata__plot" ], [ "data_plot_buffer", "structdata__plot__buffer.html", "structdata__plot__buffer" ], [ "editor", "classeditor.html", "classeditor" ], [ "editor_event_buffer_t", "structeditor__event__buffer__t.html", "structeditor__event__buffer__t" ], [ "editor_frame", "classeditor__frame.html", "classeditor__frame" ], [ "event_buffer_t", "structevent__buffer__t.html", "structevent__buffer__t" ], [ "file_js_c", "classfile__js__c.html", "classfile__js__c" ], [ "gauge_buffer_t", "structgauge__buffer__t.html", "structgauge__buffer__t" ], [ "gauge_js_c", "classgauge__js__c.html", "classgauge__js__c" ], [ "graph_buffer_t", "structgraph__buffer__t.html", "structgraph__buffer__t" ], [ "graph_js_c", "classgraph__js__c.html", "classgraph__js__c" ], [ "gui_frame", "classgui__frame.html", "classgui__frame" ], [ "jerry_context_data_manager_t", "structjerry__context__data__manager__t.html", "structjerry__context__data__manager__t" ], [ "jerry_heap_stats_t", "structjerry__heap__stats__t.html", "structjerry__heap__stats__t" ], [ "jerry_object_native_info_t", "structjerry__object__native__info__t.html", "structjerry__object__native__info__t" ], [ "jerry_property_descriptor_t", "structjerry__property__descriptor__t.html", "structjerry__property__descriptor__t" ], [ "jerryscript_c", "classjerryscript__c.html", "classjerryscript__c" ], [ "led_buffer_t", "structled__buffer__t.html", "structled__buffer__t" ], [ "led_js_c", "classled__js__c.html", "classled__js__c" ], [ "main_app", "classmain__app.html", "classmain__app" ], [ "main_frame", "classmain__frame.html", "classmain__frame" ], [ "main_frame_js_c", "classmain__frame__js__c.html", "classmain__frame__js__c" ], [ "method_t", "structmethod__t.html", "structmethod__t" ], [ "object_t", "structobject__t.html", "structobject__t" ], [ "packet_buffer_t", "structpacket__buffer__t.html", "structpacket__buffer__t" ], [ "panel_buffer_t", "structpanel__buffer__t.html", "structpanel__buffer__t" ], [ "panel_js_c", "classpanel__js__c.html", "classpanel__js__c" ], [ "project_js_c", "classproject__js__c.html", "classproject__js__c" ], [ "send_buffer_t", "structsend__buffer__t.html", "structsend__buffer__t" ], [ "signal_buffer_t", "structsignal__buffer__t.html", "structsignal__buffer__t" ], [ "sizer_buffer_t", "structsizer__buffer__t.html", "structsizer__buffer__t" ], [ "sizer_js_c", "classsizer__js__c.html", "classsizer__js__c" ], [ "slider_buffer_t", "structslider__buffer__t.html", "structslider__buffer__t" ], [ "slider_js_c", "classslider__js__c.html", "classslider__js__c" ], [ "static_text_buffer_t", "structstatic__text__buffer__t.html", "structstatic__text__buffer__t" ], [ "static_text_js_c", "classstatic__text__js__c.html", "classstatic__text__js__c" ], [ "text_edit_c", "classtext__edit__c.html", "classtext__edit__c" ], [ "textctrl_buffer_t", "structtextctrl__buffer__t.html", "structtextctrl__buffer__t" ], [ "textctrl_js_c", "classtextctrl__js__c.html", "classtextctrl__js__c" ], [ "thread_c", "classthread__c.html", "classthread__c" ], [ "time_js_c", "classtime__js__c.html", "classtime__js__c" ], [ "timer_buffer_t", "structtimer__buffer__t.html", "structtimer__buffer__t" ], [ "timer_js_c", "classtimer__js__c.html", "classtimer__js__c" ], [ "uart_cfg_t", "structuart__cfg__t.html", "structuart__cfg__t" ], [ "uart_event_buffer_t", "structuart__event__buffer__t.html", "structuart__event__buffer__t" ], [ "uart_js_c", "classuart__js__c.html", "classuart__js__c" ], [ "uart_port", "classuart__port.html", "classuart__port" ], [ "var_t", "structvar__t.html", "structvar__t" ] ];
FlySkyBear/ERMaster-N
org.insightech.er/src/org/insightech/er/editor/view/action/outline/index/CreateIndexAction.java
package org.insightech.er.editor.view.action.outline.index; import java.util.List; import org.eclipse.gef.EditPart; import org.eclipse.gef.ui.parts.TreeViewer; import org.eclipse.jface.dialogs.IDialogConstants; import org.eclipse.swt.widgets.Event; import org.eclipse.ui.PlatformUI; import org.insightech.er.ResourceString; import org.insightech.er.editor.controller.command.diagram_contents.not_element.index.CreateIndexCommand; import org.insightech.er.editor.model.ERDiagram; import org.insightech.er.editor.model.diagram_contents.element.node.table.ERTable; import org.insightech.er.editor.view.action.outline.AbstractOutlineBaseAction; import org.insightech.er.editor.view.dialog.element.table.sub.IndexDialog; public class CreateIndexAction extends AbstractOutlineBaseAction { public static final String ID = CreateIndexAction.class.getName(); public CreateIndexAction(TreeViewer treeViewer) { super(ID, ResourceString.getResourceString("action.title.create.index"), treeViewer); } /** * {@inheritDoc} */ @Override public void execute(Event event) { ERDiagram diagram = this.getDiagram(); List selectedEditParts = this.getTreeViewer().getSelectedEditParts(); EditPart editPart = (EditPart) selectedEditParts.get(0); ERTable table = (ERTable) editPart.getModel(); IndexDialog dialog = new IndexDialog(PlatformUI.getWorkbench() .getActiveWorkbenchWindow().getShell(), null, table); if (dialog.open() == IDialogConstants.OK_ID) { CreateIndexCommand command = new CreateIndexCommand(diagram, dialog .getResultIndex()); this.execute(command); } } }
1690296356/jdk
test/lib/jdk/test/lib/security/timestamp/TsaSigner.java
<filename>test/lib/jdk/test/lib/security/timestamp/TsaSigner.java /* * Copyright (c) 2020, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package jdk.test.lib.security.timestamp; import java.io.ByteArrayOutputStream; import java.math.BigInteger; import java.security.Signature; import java.security.cert.X509Certificate; import java.util.Date; import java.util.Objects; import jdk.test.lib.hexdump.HexPrinter; import sun.security.pkcs.ContentInfo; import sun.security.pkcs.PKCS7; import sun.security.pkcs.SignerInfo; import sun.security.util.*; import sun.security.x509.AlgorithmId; import sun.security.x509.X500Name; /** * Process time-stamping request and generate signed data. */ public class TsaSigner { private static final boolean DEBUG = Boolean.getBoolean("test.debug"); private static final HexPrinter HEX_PRINTER = HexPrinter.simple(); protected final SignerEntry signerEntry; protected final byte[] requestData; private final RespInterceptor interceptor; /** * Initialization. * * @param signerEntry a {@link SignerEntry} instance * @param requestData the time-stamping request data * @param interceptor the interceptor for customizing signing */ public TsaSigner(SignerEntry signerEntry, byte[] requestData, RespInterceptor interceptor) { Objects.requireNonNull(signerEntry); Objects.requireNonNull(interceptor); this.signerEntry = signerEntry; this.requestData = requestData; this.interceptor = interceptor; } /** * Initialization. * * @param signerEntry a {@link SignerEntry} instance * @param requestData the time-stamping request * @param param the application parameters */ public TsaSigner(SignerEntry signerEntry, byte[] requestData, TsaParam param) { this(signerEntry, requestData, new DefaultRespInterceptor<TsaParam>(param)); } /** * Sign data. * * @returns the time-stamping response data */ public byte[] sign() throws Exception { TsaParam requestParam = parseRequestParam(); byte[] responseSeqData = createResponse(requestParam); return responseSeqData; } // Parse the parameters from the time-stamping request data. private TsaParam parseRequestParam() throws Exception { TsaParam param = TsaParam.newInstance(); if (requestData == null) { return param; } System.out.println("===== Request ===="); debug("Request", requestData); DerValue request = new DerValue(requestData); param.version(request.data.getInteger()); print("reqVersion", param.version()); DerValue messageImprintValue = request.data.getDerValue(); debug("messageImprintValue", messageImprintValue.toByteArray()); DerValue digestAlgoValue = messageImprintValue.data.getDerValue(); debug("digestAlgoValue", digestAlgoValue.toByteArray()); param.digestAlgo(AlgorithmId.parse(digestAlgoValue).getName()); print("reqDigestAlgo", param.digestAlgo()); param.hashedMessage(messageImprintValue.data.getOctetString()); debug("reqHashedMessage", param.hashedMessage()); while (request.data.available() > 0) { DerValue value = request.data.getDerValue(); if (value.tag == DerValue.tag_Integer) { param.nonce(value.getBigInteger()); print("reqNonce", param.nonce()); } else if (value.tag == DerValue.tag_Boolean) { param.certReq(value.getBoolean()); print("certReq", param.certReq()); } else if (value.tag == DerValue.tag_ObjectId) { param.policyId(value.getOID().toString()); print("reqPolicyId", param.policyId()); } } return param; } // Create the time-stamping response data with the given the time-stamping // request parameters. private byte[] createResponse(TsaParam requestParam) throws Exception { System.out.println("===== Response ===="); TsaParam respParam = interceptor.getRespParam(requestParam); DerOutputStream statusInfoOut = new DerOutputStream(); int status = respParam.status(); print("Status", status); statusInfoOut.putInteger(status); DerOutputStream responseOut = new DerOutputStream(); responseOut.write(DerValue.tag_Sequence, statusInfoOut); debug("Status info", statusInfoOut.toByteArray()); System.out.println("Generated status info"); // Here, when the status is either 0 or 1, the response will contains // the signed data. Note that even though the signed data is not // generated, no failure info will be sent. if (status == 0 || status == 1) { System.out.println("Signer: " + signerEntry.cert.getSubjectX500Principal().getName()); String issuerName = signerEntry.cert.getIssuerX500Principal().getName(); print("Issuer", issuerName); DerOutputStream tstInfoOut = new DerOutputStream(); int version = respParam.version(); print("version", version); tstInfoOut.putInteger(version); String policyId = respParam.policyId(); print("policyId", policyId); tstInfoOut.putOID(ObjectIdentifier.of(policyId)); String digestAlgo = respParam.digestAlgo(); print("digestAlgo", digestAlgo); DerOutputStream digestAlgoOut = new DerOutputStream(); AlgorithmId digestAlgoId = AlgorithmId.get(digestAlgo); digestAlgoId.encode(digestAlgoOut); byte[] hashedMessage = respParam.hashedMessage(); debug("hashedMessage", hashedMessage); digestAlgoOut.putOctetString(hashedMessage); tstInfoOut.write(DerValue.tag_Sequence, digestAlgoOut); BigInteger serialNumber = respParam.serialNumber(); print("serialNumber", serialNumber); tstInfoOut.putInteger(serialNumber); Date genTime = respParam.genTime(); print("genTime", genTime); tstInfoOut.putGeneralizedTime(genTime); BigInteger nonce = respParam.nonce(); if (nonce != null) { tstInfoOut.putInteger(nonce); } DerOutputStream tstInfoSeqOut = new DerOutputStream(); tstInfoSeqOut.write(DerValue.tag_Sequence, tstInfoOut); byte[] tstInfoSeqData = tstInfoSeqOut.toByteArray(); debug("TST Info", tstInfoSeqData); DerOutputStream eContentOut = new DerOutputStream(); eContentOut.putOctetString(tstInfoSeqData); ContentInfo eContentInfo = new ContentInfo( ObjectIdentifier.of(KnownOIDs.TimeStampTokenInfo), new DerValue(eContentOut.toByteArray())); String defaultSigAlgo = SignatureUtil.getDefaultSigAlgForKey( signerEntry.privateKey); String sigAlgo = interceptor.getSigAlgo(defaultSigAlgo); Signature signature = Signature.getInstance(sigAlgo); System.out.println( "Signature algorithm: " + signature.getAlgorithm()); signature.initSign(signerEntry.privateKey); signature.update(tstInfoSeqData); SignerInfo signerInfo = new SignerInfo( new X500Name(issuerName), signerEntry.cert.getSerialNumber(), SignatureUtil.getDigestAlgInPkcs7SignerInfo( signature, sigAlgo, signerEntry.privateKey, false), AlgorithmId.get(sigAlgo), signature.sign()); X509Certificate[] signerCertChain = interceptor.getSignerCertChain( signerEntry.certChain, requestParam.certReq()); PKCS7 p7 = new PKCS7(new AlgorithmId[] { digestAlgoId }, eContentInfo, signerCertChain, new SignerInfo[] { signerInfo }); ByteArrayOutputStream signedDataOut = new ByteArrayOutputStream(); p7.encodeSignedData(signedDataOut); byte[] signedData = signedDataOut.toByteArray(); debug("Signed data", signedData); responseOut.putDerValue(new DerValue(signedData)); System.out.println("Generated signed data"); } DerOutputStream responseSeqOut = new DerOutputStream(); responseSeqOut.write(DerValue.tag_Sequence, responseOut); byte[] responseSeqData = responseSeqOut.toByteArray(); debug("Response", responseSeqData); System.out.println("Generated response"); return responseSeqData; } private static void print(String name, Object value) { System.out.println(name + ": " + value); } private static void debug(String name, byte[] bytes) { if (DEBUG) { System.out.println(name + ":"); HEX_PRINTER.format(bytes); } } }
ss77995ss/react-redux-homework
test/actions/index.test.js
<reponame>ss77995ss/react-redux-homework<filename>test/actions/index.test.js import configureMockStore from 'redux-mock-store'; import thunk from 'redux-thunk'; import axios from 'axios'; import MockAdapter from 'axios-mock-adapter'; import { mockCreateAction } from '../data/mockData'; import { ROW_CREATER, ROW_DELETER, CELL_UPDATER, FILTER_TEXT, } from '../constants/actionTypes'; import * as actions from '../../src/client/actions/index'; import { FETCH_DATA } from '../../src/client/constants/actionTypes'; const middlewares = [thunk]; const mockStore = configureMockStore(middlewares); describe('./actions', () => { const url = '/api/tabledatas'; const store = mockStore({ data: {} }); let mock; beforeEach(() => { mock = new MockAdapter(axios); }); afterEach(() => { mock.restore(); store.clearActions(); }); describe('When rowCreater action is called', () => { it('should create an action to create new row', () => { const expectedAction = [{ type: ROW_CREATER, data: mockCreateAction, }]; mock.onPost(url).reply(200, { mockCreateAction, }); return store.dispatch(actions.rowCreater(url, mockCreateAction)) .then(() => { expect(store.getActions()).toEqual(expectedAction); }); }); }); describe('When filterText action is called', () => { it('should create an action to read throw the table', () => { const mockData = 'afasfdssda'; const expectedAction = { type: FILTER_TEXT, text: mockData, }; expect(actions.filterText(mockData)).toEqual(expectedAction); }); }); describe('When rowDeleter action is called', () => { it('should create an action to delete a row', () => { const mockData = mockCreateAction; const expectedAction = [{ type: ROW_DELETER, data: mockData, }]; mock.onDelete(url).reply(200, { data: mockData, }); return store.dispatch(actions.rowDeleter(url, mockData)) .then(() => { expect(store.getActions()).toEqual(expectedAction); }); }); }); describe('When cellUpdater action is called', () => { it('should create an action to change data of a cell', () => { const mockData = mockCreateAction; const expectedAction = [{ type: CELL_UPDATER, data: mockData, }]; mock.onPut(url).reply(200, { mockData, }); return store.dispatch(actions.cellUpdater(url, mockData)) .then(() => { expect(store.getActions()).toEqual(expectedAction); }); }); }); describe('When fetchData action is called', () => { it('should create an action to get data from database', () => { const mockData = mockCreateAction; const expectedAction = [{ type: FETCH_DATA, data: { mockData }, }]; mock.onGet(url).reply(200, { mockData, }); return store.dispatch(actions.fetchData(url)) .then(() => { expect(store.getActions()).toEqual(expectedAction); }); }); }); });
jia57196/code41
project/c++/mri/src/PI-slim-napa/recdopt_py/download_manager/retrieval_progress.py
import sys, time, logging,recdopt_context, ctypes, math from recdopt_context import * log = logging.getLogger('recd') sys.path.append('..') from common.time_utils import TimeUtils # # Base class for file retrieval queries, iterations, steps. # class RetrievalProgressBase: def __init__(self): self.startSecs = int(0) self.progress = float(0.0) self.progressPerc = float(0.0) def Start(self): self.startSecs = int(time.time()) self.progress = 0.0 self.progressPerc = 0.0 def SetProgress(self, progress): self.progress = progress; self.progressPerc = self.progress * 100.0 def IncrementProgress(self, incremental_progress): self.progress += incremental_progress; self.progressPerc = self.progress * 100.0 def Finished(self): self.progress = 1.0 self.progressPerc = 100.0 # # One step of a file retrieval query. # Maintains progress of a single step of a query. # class RetrievalProgressStep(RetrievalProgressBase): def __init__(self, step_index, step_weight): RetrievalProgressBase.__init__(self) self.index = int(step_index) self.indexForGui = int(self.index + 1) self.weight = step_weight def IncrementProgress(self, incremental_progress): RetrievalProgressBase.IncrementProgress(self, incremental_progress) #log.info("""Incremented step progress by %s, new progress = %s""" % (incremental_progress, self.progress)) # # One iteration of a file retrieval query, contains a list of steps. # Maintains progress of a single iteration. # class RetrievalProgressIteration(RetrievalProgressBase): def __init__(self, iter_index, iter_weight): RetrievalProgressBase.__init__(self) self.index = iter_index self.indexForGui = int(self.index + 1) self.weight = iter_weight self.numSteps = 4 self.startTime = 0 # Figure out how much each step will contribute to the iteration. # NOTE: For now just giving each step equal weight, this can be tweaked. total_weighted_steps = 0 for step_index in range(0, self.numSteps, 1): total_weighted_steps += 1 # Create steps. step_weight_list = [0.02,0.02,0.06,0.90] self.stepList = [] for step_index in range(0, self.numSteps, 1): adjusted_step_index = (self.index * self.numSteps) + step_index self.stepList.append( RetrievalProgressStep(adjusted_step_index, step_weight_list[step_index]) ) def IncrementProgress(self, incremental_progress): RetrievalProgressBase.IncrementProgress(self, incremental_progress) #log.info("""Incremented iteration progress by %s, new progress = %s""" % (incremental_progress, self.progress)) # # An entire file retrieval query, contains a list of iterations. # Maintains progress of the entire query. # class RetrievalProgress(RetrievalProgressBase): def __init__(self, time_ranges): RetrievalProgressBase.__init__(self) #self.bytes = int(0) #self.pkts = int(0) self.numIterations = len(time_ranges) self.totalSteps = 0 #self.fileName = file_name self.initial_wall_time = time.time() self.last_guess = [] self.last_guess.append(0) self.last_guess_time = [] self.last_guess_time.append(self.initial_wall_time) # Figure out the total length of the query. total_secs_in_query = 0 for (start_time, end_time) in time_ranges: total_secs_in_query += (end_time - start_time) #log.info("""RetrievalProgress: total_secs_in_query = %s, num_iterations = %s""" % (total_secs_in_query,self.numIterations)) # Create iterations self.iterationList = [] iter_index = 0 total_weight = 0 iter_weight = 0 for (start_time, end_time) in time_ranges: num_secs_in_iter = end_time - start_time if (total_secs_in_query > 0): iter_weight = float(num_secs_in_iter) / total_secs_in_query #log.info("""RetrievalProgress: creating interation %s, secs_in_iter = %s, iter_weight = %s""" % (iter_index,num_secs_in_iter,iter_weight)) self.iterationList.append( RetrievalProgressIteration(iter_index, iter_weight) ) self.totalSteps += self.iterationList[iter_index].numSteps iter_index += 1 total_weight += iter_weight #log.info("""RetrievalProgress: total_weight = %s""" % (total_weight)) # Update the query progress. def IncrementProgress(self, incremental_progress): RetrievalProgressBase.IncrementProgress(self, incremental_progress) #log.info("""Incremented query progress by %s, new progress = %s""" % (incremental_progress, self.progress)) #self.bytes = int(recdopt.get_written_bytes(ctypes.c_char_p(self.fileName))) #self.pkts = int(recdopt.get_written_packets(ctypes.c_char_p(self.fileName))) def time_estimate(self): current_wall_time = time.time() seconds_so_far = current_wall_time - self.initial_wall_time estimated_remaining_seconds = self.last_guess[0] if seconds_so_far <= 5: # Wait a few secs to make the first guess. return "Please wait..." if self.progress <= 0: # If no progress yet, can't make a guess. return "Please wait..." # Only make a new guess every 5 secs so it doesn't jump around so much. seconds_since_last_guess = current_wall_time - self.last_guess_time[0] if seconds_since_last_guess > 5: # Make a new guess. estimated_remaining_seconds = int( math.ceil( (seconds_so_far * (1.0 - self.progress)) / self.progress ) ) self.last_guess_time[0] = current_wall_time self.last_guess[0] = estimated_remaining_seconds else: # Just decrement the current guess. if (estimated_remaining_seconds > seconds_since_last_guess): estimated_remaining_seconds -= seconds_since_last_guess else: estimated_remaining_seconds = 0 self.last_guess[0] = estimated_remaining_seconds return TimeUtils.time_remaining(estimated_remaining_seconds) # # Progress update functions. # These functions control the RetrievalProgress classes. # # Progress is based on how many steps of the total steps in a query # we have completed and the relative weight of each step. # # Progress never goes backwards, only forwards! # Update the progress of the current step, the iteration it is part of and the entire query. def IncrementProgressByStepProgress(query, iter, step, current_step_progress): incremental_step_progress = current_step_progress - step.progress step.IncrementProgress(incremental_step_progress) iter.IncrementProgress(incremental_step_progress * step.weight) query.IncrementProgress(incremental_step_progress * step.weight * iter.weight) # Increment progress when a step is finished. def StepFinished(query, iter, step): additional_step_progress = 1.0 - step.progress step.Finished() iter.IncrementProgress(additional_step_progress * step.weight) query.IncrementProgress(additional_step_progress * step.weight * iter.weight)
navikt/fp-formidling
domenetjenester/brevbestiller/src/main/java/no/nav/foreldrepenger/melding/brevbestiller/task/ProduserBrevTask.java
package no.nav.foreldrepenger.melding.brevbestiller.task; import javax.enterprise.context.ApplicationScoped; import javax.inject.Inject; import no.nav.foreldrepenger.melding.brevbestiller.impl.BrevBestillerTjeneste; import no.nav.foreldrepenger.melding.hendelser.HendelseRepository; import no.nav.vedtak.felles.prosesstask.api.ProsessTask; import no.nav.vedtak.felles.prosesstask.api.ProsessTaskData; import no.nav.vedtak.felles.prosesstask.api.ProsessTaskHandler; @ApplicationScoped @ProsessTask("formidling.bestillBrev") public class ProduserBrevTask implements ProsessTaskHandler { private BrevBestillerTjeneste brevBestillerApplikasjonTjeneste; private HendelseRepository hendelseRepository; public ProduserBrevTask() { //CDI } @Inject public ProduserBrevTask(BrevBestillerTjeneste brevBestillerApplikasjonTjeneste, HendelseRepository hendelseRepository) { this.brevBestillerApplikasjonTjeneste = brevBestillerApplikasjonTjeneste; this.hendelseRepository = hendelseRepository; } @Override public void doTask(ProsessTaskData prosessTaskData) { long hendelseId = Long.parseLong(prosessTaskData.getPropertyValue(BrevTaskProperties.HENDELSE_ID)); brevBestillerApplikasjonTjeneste.bestillBrev(hendelseRepository.hentDokumentHendelseMedId(hendelseId)); } }
evasyuk/project-zxsvm
src/api/pieces/baseApi.js
<gh_stars>0 export default class BaseApi { constructor(apiClient) { if (!apiClient) throw new Error('[apiClient] required') this.apiClient = apiClient } }
gunnarmorling/waltz
waltz-test/src/main/java/com/wepay/waltz/test/smoketest/TxnContext.java
package com.wepay.waltz.test.smoketest; import com.wepay.riff.util.Logging; import com.wepay.waltz.client.PartitionLocalLock; import com.wepay.waltz.client.TransactionBuilder; import com.wepay.waltz.client.TransactionContext; import org.slf4j.Logger; import java.util.Collections; import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ConcurrentMap; class TxnContext extends TransactionContext { private static final Logger logger = Logging.getLogger(TxnContext.class); private static final int REMOVE_SIGN_BIT = 0x7FFFFFFF; public final UUID uuid; public final CompletableFuture<Boolean> completionFuture = new CompletableFuture<>(); private final int lock; private final ConcurrentMap<UUID, Long> applied; private final int clientId; private int execCount = 0; TxnContext(int clientId, int lock, ConcurrentMap<UUID, Long> applied) { this.clientId = clientId; this.uuid = UUID.randomUUID(); this.lock = lock; this.applied = applied; } @Override public int partitionId(int numPartitions) { return (uuid.hashCode() & REMOVE_SIGN_BIT) % numPartitions; } @Override public boolean execute(TransactionBuilder builder) { if (execCount++ > 0) { // retry System.out.print("+"); if (applied.containsKey(uuid)) { System.out.print('!'); logger.error("duplicate transaction executed: client=" + clientId); } } builder.setHeader(0); builder.setTransactionData(uuid, TxnSerializer.INSTANCE); builder.setWriteLocks(Collections.singletonList(new PartitionLocalLock("test", lock))); return true; } @Override public void onCompletion(boolean result) { completionFuture.complete(result); } @Override public void onException(Throwable ex) { logger.error("exception: ", ex); } @Override public String toString() { return "[" + clientId + ":" + uuid + "]"; } }
TheAgentK/MetaDrone
game/Docs/html/search/files_13.js
var searchData= [ ['vectortemplates_2ecs',['VectorTemplates.cs',['../_vector_templates_8cs.html',1,'']]] ];
matheusmr13/SaasAPI
backend/src/main/java/br/com/cyop/exception/NotFoundException.java
<reponame>matheusmr13/SaasAPI<filename>backend/src/main/java/br/com/cyop/exception/NotFoundException.java package br.com.cyop.exception; public class NotFoundException extends RuntimeException { }
kadaan/consulate
config/client.go
// Copyright © 2018 <NAME> <<EMAIL>> // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package config import "time" const ( // DefaultQueryIdleConnectionTimeout is the default maximum amount // of time an idle (keep-alive) connection will remain idle before // closing itself. DefaultQueryIdleConnectionTimeout = 90 * time.Second // DefaultQueryMaxIdleConnectionCount is the default maximum number // of idle (keep-alive) connections across all hosts. DefaultQueryMaxIdleConnectionCount = 100 // DefaultQueryTimeout is the default time limit for requests. DefaultQueryTimeout = 5 * time.Second ) // ClientConfig represents the configuration for the http.Client. type ClientConfig struct { QueryTimeout time.Duration QueryMaxIdleConnectionCount int QueryIdleConnectionTimeout time.Duration } // DefaultClientConfig gets a default ClientConfig. func DefaultClientConfig() *ClientConfig { return &ClientConfig{ QueryIdleConnectionTimeout: DefaultQueryIdleConnectionTimeout, QueryMaxIdleConnectionCount: DefaultQueryMaxIdleConnectionCount, QueryTimeout: DefaultQueryTimeout, } }
Bam4d/BrowserHack
src/save.c
<filename>src/save.c /* SCCS Id: @(#)save.c 3.4 2003/11/14 */ /* Copyright (c) <NAME>, Amsterdam, 1985. */ /* NetHack may be freely redistributed. See license for details. */ #include "hack.h" #include "lev.h" #include "quest.h" #ifdef WEB_GRAPHICS #include <emscripten.h> #endif #ifndef NO_SIGNAL #include <signal.h> #endif #if !defined(LSC) && !defined(O_WRONLY) && !defined(AZTEC_C) #include <fcntl.h> #endif #ifdef MFLOPPY long bytes_counted; static int count_only; #endif #ifdef MICRO int dotcnt, dotrow; /* also used in restore */ #endif #ifdef ZEROCOMP STATIC_DCL void FDECL(bputc, (int)); #endif STATIC_DCL void FDECL(savelevchn, (int,int)); STATIC_DCL void FDECL(savedamage, (int,int)); STATIC_DCL void FDECL(saveobjchn, (int,struct obj *,int)); STATIC_DCL void FDECL(savemonchn, (int,struct monst *,int)); STATIC_DCL void FDECL(savetrapchn, (int,struct trap *,int)); STATIC_DCL void FDECL(savegamestate, (int,int)); #ifdef MFLOPPY STATIC_DCL void FDECL(savelev0, (int,XCHAR_P,int)); STATIC_DCL boolean NDECL(swapout_oldest); STATIC_DCL void FDECL(copyfile, (char *,char *)); #endif /* MFLOPPY */ #ifdef GCC_WARN static long nulls[10]; #else #define nulls nul #endif #if defined(UNIX) || defined(VMS) || defined(__EMX__) || defined(WIN32) #define HUP if (!program_state.done_hup) #else #define HUP #endif /* need to preserve these during save to avoid accessing freed memory */ static unsigned ustuck_id = 0, usteed_id = 0; int dosave() { clear_nhwindow(WIN_MESSAGE); if(yn("Really save?") == 'n') { clear_nhwindow(WIN_MESSAGE); if(multi > 0) nomul(0); } else { clear_nhwindow(WIN_MESSAGE); pline("Saving..."); #if defined(UNIX) || defined(VMS) || defined(__EMX__) program_state.done_hup = 0; #endif if(dosave0()) { program_state.something_worth_saving = 0; u.uhp = -1; /* universal game's over indicator */ /* make sure they see the Saving message */ display_nhwindow(WIN_MESSAGE, TRUE); exit_nhwindows("Be seeing you..."); terminate(EXIT_SUCCESS); } else (void)doredraw(); } return 0; } #if defined(UNIX) || defined(VMS) || defined (__EMX__) || defined(WIN32) /*ARGSUSED*/ void hangup(sig_unused) /* called as signal() handler, so sent at least one arg */ int sig_unused; { # ifdef NOSAVEONHANGUP (void) signal(SIGINT, SIG_IGN); clearlocks(); # ifndef VMS terminate(EXIT_FAILURE); # endif # else /* SAVEONHANGUP */ if (!program_state.done_hup++) { if (program_state.something_worth_saving) (void) dosave0(); # ifdef VMS /* don't call exit when already within an exit handler; that would cancel any other pending user-mode handlers */ if (!program_state.exiting) # endif { clearlocks(); terminate(EXIT_FAILURE); } } # endif return; } #endif /* returns 1 if save successful */ int dosave0() { const char *fq_save; register int fd, ofd; xchar ltmp; d_level uz_save; char whynot[BUFSZ]; if (!SAVEF[0]) return 0; fq_save = fqname(SAVEF, SAVEPREFIX, 1); /* level files take 0 */ #if defined(UNIX) || defined(VMS) (void) signal(SIGHUP, SIG_IGN); #endif #ifndef NO_SIGNAL (void) signal(SIGINT, SIG_IGN); #endif #if defined(MICRO) && defined(MFLOPPY) if (!saveDiskPrompt(0)) return 0; #endif HUP if (iflags.window_inited) { uncompress(fq_save); fd = open_savefile(); if (fd > 0) { (void) close(fd); clear_nhwindow(WIN_MESSAGE); There("seems to be an old save file."); if (yn("Overwrite the old file?") == 'n') { compress(fq_save); return 0; } } } HUP mark_synch(); /* flush any buffered screen output */ fd = create_savefile(); if(fd < 0) { HUP pline("Cannot open save file."); (void) delete_savefile(); /* ab@unido */ return(0); } vision_recalc(2); /* shut down vision to prevent problems in the event of an impossible() call */ /* undo date-dependent luck adjustments made at startup time */ if(flags.moonphase == FULL_MOON) /* ut-sally!fletcher */ change_luck(-1); /* and unido!ab */ if(flags.friday13) change_luck(1); if(iflags.window_inited) HUP clear_nhwindow(WIN_MESSAGE); #ifdef MICRO dotcnt = 0; dotrow = 2; curs(WIN_MAP, 1, 1); if (strncmpi("X11", windowprocs.name, 3)) putstr(WIN_MAP, 0, "Saving:"); #endif #ifdef MFLOPPY /* make sure there is enough disk space */ if (iflags.checkspace) { long fds, needed; savelev(fd, ledger_no(&u.uz), COUNT_SAVE); savegamestate(fd, COUNT_SAVE); needed = bytes_counted; for (ltmp = 1; ltmp <= maxledgerno(); ltmp++) if (ltmp != ledger_no(&u.uz) && level_info[ltmp].where) needed += level_info[ltmp].size + (sizeof ltmp); fds = freediskspace(fq_save); if (needed > fds) { HUP { There("is insufficient space on SAVE disk."); pline("Require %ld bytes but only have %ld.", needed, fds); } flushout(); (void) close(fd); (void) delete_savefile(); return 0; } co_false(); } #endif /* MFLOPPY */ store_version(fd); #ifdef STORE_PLNAME_IN_FILE bwrite(fd, (genericptr_t) plname, PL_NSIZ); #endif ustuck_id = (u.ustuck ? u.ustuck->m_id : 0); #ifdef STEED usteed_id = (u.usteed ? u.usteed->m_id : 0); #endif savelev(fd, ledger_no(&u.uz), WRITE_SAVE | FREE_SAVE); savegamestate(fd, WRITE_SAVE | FREE_SAVE); /* While copying level files around, zero out u.uz to keep * parts of the restore code from completely initializing all * in-core data structures, since all we're doing is copying. * This also avoids at least one nasty core dump. */ uz_save = u.uz; u.uz.dnum = u.uz.dlevel = 0; /* these pointers are no longer valid, and at least u.usteed * may mislead place_monster() on other levels */ u.ustuck = (struct monst *)0; #ifdef STEED u.usteed = (struct monst *)0; #endif for(ltmp = (xchar)1; ltmp <= maxledgerno(); ltmp++) { if (ltmp == ledger_no(&uz_save)) continue; if (!(level_info[ltmp].flags & LFILE_EXISTS)) continue; #ifdef MICRO curs(WIN_MAP, 1 + dotcnt++, dotrow); if (dotcnt >= (COLNO - 1)) { dotrow++; dotcnt = 0; } if (strncmpi("X11", windowprocs.name, 3)){ putstr(WIN_MAP, 0, "."); } mark_synch(); #endif ofd = open_levelfile(ltmp, whynot); if (ofd < 0) { HUP pline("%s", whynot); (void) close(fd); (void) delete_savefile(); HUP killer = whynot; HUP done(TRICKED); return(0); } minit(); /* ZEROCOMP */ getlev(ofd, hackpid, ltmp, FALSE); (void) close(ofd); bwrite(fd, (genericptr_t) &ltmp, sizeof ltmp); /* level number*/ savelev(fd, ltmp, WRITE_SAVE | FREE_SAVE); /* actual level*/ delete_levelfile(ltmp); } bclose(fd); u.uz = uz_save; /* get rid of current level --jgm */ delete_levelfile(ledger_no(&u.uz)); delete_levelfile(0); compress(fq_save); #ifdef WEB_GRAPHICS /* need manual sync for emscripten */ EM_ASM( FS.syncfs(function (err) { if(err) console.log('Cannot sync FS, savegame may not work!'); });); #endif return(1); } STATIC_OVL void savegamestate(fd, mode) register int fd, mode; { int uid; #ifdef MFLOPPY count_only = (mode & COUNT_SAVE); #endif uid = getuid(); bwrite(fd, (genericptr_t) &uid, sizeof uid); bwrite(fd, (genericptr_t) &flags, sizeof(struct flag)); bwrite(fd, (genericptr_t) &u, sizeof(struct you)); /* must come before migrating_objs and migrating_mons are freed */ save_timers(fd, mode, RANGE_GLOBAL); save_light_sources(fd, mode, RANGE_GLOBAL); saveobjchn(fd, invent, mode); saveobjchn(fd, migrating_objs, mode); savemonchn(fd, migrating_mons, mode); if (release_data(mode)) { invent = 0; migrating_objs = 0; migrating_mons = 0; } bwrite(fd, (genericptr_t) mvitals, sizeof(mvitals)); save_dungeon(fd, (boolean)!!perform_bwrite(mode), (boolean)!!release_data(mode)); savelevchn(fd, mode); bwrite(fd, (genericptr_t) &moves, sizeof moves); bwrite(fd, (genericptr_t) &monstermoves, sizeof monstermoves); bwrite(fd, (genericptr_t) &quest_status, sizeof(struct q_score)); bwrite(fd, (genericptr_t) spl_book, sizeof(struct spell) * (MAXSPELL + 1)); save_artifacts(fd); save_oracles(fd, mode); if(ustuck_id) bwrite(fd, (genericptr_t) &ustuck_id, sizeof ustuck_id); #ifdef STEED if(usteed_id) bwrite(fd, (genericptr_t) &usteed_id, sizeof usteed_id); #endif bwrite(fd, (genericptr_t) pl_character, sizeof pl_character); bwrite(fd, (genericptr_t) pl_fruit, sizeof pl_fruit); bwrite(fd, (genericptr_t) &current_fruit, sizeof current_fruit); savefruitchn(fd, mode); savenames(fd, mode); save_waterlevel(fd, mode); bflush(fd); } #ifdef INSURANCE void savestateinlock() { int fd, hpid; static boolean havestate = TRUE; char whynot[BUFSZ]; /* When checkpointing is on, the full state needs to be written * on each checkpoint. When checkpointing is off, only the pid * needs to be in the level.0 file, so it does not need to be * constantly rewritten. When checkpointing is turned off during * a game, however, the file has to be rewritten once to truncate * it and avoid restoring from outdated information. * * Restricting havestate to this routine means that an additional * noop pid rewriting will take place on the first "checkpoint" after * the game is started or restored, if checkpointing is off. */ if (flags.ins_chkpt || havestate) { /* save the rest of the current game state in the lock file, * following the original int pid, the current level number, * and the current savefile name, which should not be subject * to any internal compression schemes since they must be * readable by an external utility */ fd = open_levelfile(0, whynot); if (fd < 0) { pline("%s", whynot); pline("Probably someone removed it."); killer = whynot; done(TRICKED); return; } (void) read(fd, (genericptr_t) &hpid, sizeof(hpid)); if (hackpid != hpid) { Sprintf(whynot, "Level #0 pid (%d) doesn't match ours (%d)!", hpid, hackpid); pline("%s", whynot); killer = whynot; done(TRICKED); } (void) close(fd); fd = create_levelfile(0, whynot); if (fd < 0) { pline("%s", whynot); killer = whynot; done(TRICKED); return; } (void) write(fd, (genericptr_t) &hackpid, sizeof(hackpid)); if (flags.ins_chkpt) { int currlev = ledger_no(&u.uz); (void) write(fd, (genericptr_t) &currlev, sizeof(currlev)); save_savefile_name(fd); store_version(fd); #ifdef STORE_PLNAME_IN_FILE bwrite(fd, (genericptr_t) plname, PL_NSIZ); #endif ustuck_id = (u.ustuck ? u.ustuck->m_id : 0); #ifdef STEED usteed_id = (u.usteed ? u.usteed->m_id : 0); #endif savegamestate(fd, WRITE_SAVE); } bclose(fd); } havestate = flags.ins_chkpt; } #endif #ifdef MFLOPPY boolean savelev(fd, lev, mode) int fd; xchar lev; int mode; { if (mode & COUNT_SAVE) { bytes_counted = 0; savelev0(fd, lev, COUNT_SAVE); /* probably bytes_counted will be filled in again by an * immediately following WRITE_SAVE anyway, but we'll * leave it out of checkspace just in case */ if (iflags.checkspace) { while (bytes_counted > freediskspace(levels)) if (!swapout_oldest()) return FALSE; } } if (mode & (WRITE_SAVE | FREE_SAVE)) { bytes_counted = 0; savelev0(fd, lev, mode); } if (mode != FREE_SAVE) { level_info[lev].where = ACTIVE; level_info[lev].time = moves; level_info[lev].size = bytes_counted; } return TRUE; } STATIC_OVL void savelev0(fd,lev,mode) #else void savelev(fd,lev,mode) #endif int fd; xchar lev; int mode; { #ifdef TOS short tlev; #endif /* if we're tearing down the current level without saving anything (which happens upon entrance to the endgame or after an aborted restore attempt) then we don't want to do any actual I/O */ if (mode == FREE_SAVE) goto skip_lots; if (iflags.purge_monsters) { /* purge any dead monsters (necessary if we're starting * a panic save rather than a normal one, or sometimes * when changing levels without taking time -- e.g. * create statue trap then immediately level teleport) */ dmonsfree(); } if(fd < 0) panic("Save on bad file!"); /* impossible */ #ifdef MFLOPPY count_only = (mode & COUNT_SAVE); #endif if (lev >= 0 && lev <= maxledgerno()) level_info[lev].flags |= VISITED; bwrite(fd,(genericptr_t) &hackpid,sizeof(hackpid)); #ifdef TOS tlev=lev; tlev &= 0x00ff; bwrite(fd,(genericptr_t) &tlev,sizeof(tlev)); #else bwrite(fd,(genericptr_t) &lev,sizeof(lev)); #endif #ifdef RLECOMP { /* perform run-length encoding of rm structs */ struct rm *prm, *rgrm; int x, y; uchar match; rgrm = &levl[0][0]; /* start matching at first rm */ match = 0; for (y = 0; y < ROWNO; y++) { for (x = 0; x < COLNO; x++) { prm = &levl[x][y]; if (prm->glyph == rgrm->glyph && prm->typ == rgrm->typ && prm->seenv == rgrm->seenv && prm->horizontal == rgrm->horizontal && prm->flags == rgrm->flags && prm->lit == rgrm->lit && prm->waslit == rgrm->waslit && prm->roomno == rgrm->roomno && prm->edge == rgrm->edge) { match++; if (match > 254) { match = 254; /* undo this match */ goto writeout; } } else { /* the run has been broken, * write out run-length encoding */ writeout: bwrite(fd, (genericptr_t)&match, sizeof(uchar)); bwrite(fd, (genericptr_t)rgrm, sizeof(struct rm)); /* start encoding again. we have at least 1 rm * in the next run, viz. this one. */ match = 1; rgrm = prm; } } } if (match > 0) { bwrite(fd, (genericptr_t)&match, sizeof(uchar)); bwrite(fd, (genericptr_t)rgrm, sizeof(struct rm)); } } #else bwrite(fd,(genericptr_t) levl,sizeof(levl)); #endif /* RLECOMP */ bwrite(fd,(genericptr_t) &monstermoves,sizeof(monstermoves)); bwrite(fd,(genericptr_t) &upstair,sizeof(stairway)); bwrite(fd,(genericptr_t) &dnstair,sizeof(stairway)); bwrite(fd,(genericptr_t) &upladder,sizeof(stairway)); bwrite(fd,(genericptr_t) &dnladder,sizeof(stairway)); bwrite(fd,(genericptr_t) &sstairs,sizeof(stairway)); bwrite(fd,(genericptr_t) &updest,sizeof(dest_area)); bwrite(fd,(genericptr_t) &dndest,sizeof(dest_area)); bwrite(fd,(genericptr_t) &level.flags,sizeof(level.flags)); bwrite(fd, (genericptr_t) doors, sizeof(doors)); save_rooms(fd); /* no dynamic memory to reclaim */ /* from here on out, saving also involves allocated memory cleanup */ skip_lots: /* must be saved before mons, objs, and buried objs */ save_timers(fd, mode, RANGE_LEVEL); save_light_sources(fd, mode, RANGE_LEVEL); savemonchn(fd, fmon, mode); save_worm(fd, mode); /* save worm information */ savetrapchn(fd, ftrap, mode); saveobjchn(fd, fobj, mode); saveobjchn(fd, level.buriedobjlist, mode); saveobjchn(fd, billobjs, mode); if (release_data(mode)) { fmon = 0; ftrap = 0; fobj = 0; level.buriedobjlist = 0; billobjs = 0; } save_engravings(fd, mode); savedamage(fd, mode); save_regions(fd, mode); if (mode != FREE_SAVE) bflush(fd); } #ifdef ZEROCOMP /* The runs of zero-run compression are flushed after the game state or a * level is written out. This adds a couple bytes to a save file, where * the runs could be mashed together, but it allows gluing together game * state and level files to form a save file, and it means the flushing * does not need to be specifically called for every other time a level * file is written out. */ #define RLESC '\0' /* Leading character for run of LRESC's */ #define flushoutrun(ln) (bputc(RLESC), bputc(ln), ln = -1) #ifndef ZEROCOMP_BUFSIZ # define ZEROCOMP_BUFSIZ BUFSZ #endif static NEARDATA unsigned char outbuf[ZEROCOMP_BUFSIZ]; static NEARDATA unsigned short outbufp = 0; static NEARDATA short outrunlength = -1; static NEARDATA int bwritefd; static NEARDATA boolean compressing = FALSE; /*dbg() { HUP printf("outbufp %d outrunlength %d\n", outbufp,outrunlength); }*/ STATIC_OVL void bputc(c) int c; { #ifdef MFLOPPY bytes_counted++; if (count_only) return; #endif if (outbufp >= sizeof outbuf) { (void) write(bwritefd, outbuf, sizeof outbuf); outbufp = 0; } outbuf[outbufp++] = (unsigned char)c; } /*ARGSUSED*/ void bufon(fd) int fd; { compressing = TRUE; return; } /*ARGSUSED*/ void bufoff(fd) int fd; { if (outbufp) { outbufp = 0; panic("closing file with buffered data still unwritten"); } outrunlength = -1; compressing = FALSE; return; } void bflush(fd) /* flush run and buffer */ register int fd; { bwritefd = fd; if (outrunlength >= 0) { /* flush run */ flushoutrun(outrunlength); } #ifdef MFLOPPY if (count_only) outbufp = 0; #endif if (outbufp) { if (write(fd, outbuf, outbufp) != outbufp) { #if defined(UNIX) || defined(VMS) || defined(__EMX__) if (program_state.done_hup) terminate(EXIT_FAILURE); else #endif bclose(fd); /* panic (outbufp != 0) */ } outbufp = 0; } } void bwrite(fd, loc, num) int fd; genericptr_t loc; register unsigned num; { register unsigned char *bp = (unsigned char *)loc; if (!compressing) { #ifdef MFLOPPY bytes_counted += num; if (count_only) return; #endif if ((unsigned) write(fd, loc, num) != num) { #if defined(UNIX) || defined(VMS) || defined(__EMX__) if (program_state.done_hup) terminate(EXIT_FAILURE); else #endif panic("cannot write %u bytes to file #%d", num, fd); } } else { bwritefd = fd; for (; num; num--, bp++) { if (*bp == RLESC) { /* One more char in run */ if (++outrunlength == 0xFF) { flushoutrun(outrunlength); } } else { /* end of run */ if (outrunlength >= 0) { /* flush run */ flushoutrun(outrunlength); } bputc(*bp); } } } } void bclose(fd) int fd; { bufoff(fd); (void) close(fd); return; } #else /* ZEROCOMP */ static int bw_fd = -1; static FILE *bw_FILE = 0; static boolean buffering = FALSE; void bufon(fd) int fd; { #ifdef UNIX if(bw_fd >= 0) panic("double buffering unexpected"); bw_fd = fd; if((bw_FILE = fdopen(fd, "w")) == 0) panic("buffering of file %d failed", fd); #endif buffering = TRUE; } void bufoff(fd) int fd; { bflush(fd); buffering = FALSE; } void bflush(fd) int fd; { #ifdef UNIX if(fd == bw_fd) { if(fflush(bw_FILE) == EOF) panic("flush of savefile failed!"); } #endif return; } void bwrite(fd,loc,num) register int fd; register genericptr_t loc; register unsigned num; { boolean failed; #ifdef MFLOPPY bytes_counted += num; if (count_only) return; #endif #ifdef UNIX if (buffering) { if(fd != bw_fd) panic("unbuffered write to fd %d (!= %d)", fd, bw_fd); failed = (fwrite(loc, (int)num, 1, bw_FILE) != 1); } else #endif /* UNIX */ { /* lint wants the 3rd arg of write to be an int; lint -p an unsigned */ #if defined(BSD) || defined(ULTRIX) failed = (write(fd, loc, (int)num) != (int)num); #else /* e.g. SYSV, __TURBOC__ */ failed = (write(fd, loc, num) != num); #endif } if (failed) { #if defined(UNIX) || defined(VMS) || defined(__EMX__) if (program_state.done_hup) terminate(EXIT_FAILURE); else #endif panic("cannot write %u bytes to file #%d", num, fd); } } void bclose(fd) int fd; { bufoff(fd); #ifdef UNIX if (fd == bw_fd) { (void) fclose(bw_FILE); bw_fd = -1; bw_FILE = 0; } else #endif (void) close(fd); return; } #endif /* ZEROCOMP */ STATIC_OVL void savelevchn(fd, mode) register int fd, mode; { s_level *tmplev, *tmplev2; int cnt = 0; for (tmplev = sp_levchn; tmplev; tmplev = tmplev->next) cnt++; if (perform_bwrite(mode)) bwrite(fd, (genericptr_t) &cnt, sizeof(int)); for (tmplev = sp_levchn; tmplev; tmplev = tmplev2) { tmplev2 = tmplev->next; if (perform_bwrite(mode)) bwrite(fd, (genericptr_t) tmplev, sizeof(s_level)); if (release_data(mode)) free((genericptr_t) tmplev); } if (release_data(mode)) sp_levchn = 0; } STATIC_OVL void savedamage(fd, mode) register int fd, mode; { register struct damage *damageptr, *tmp_dam; unsigned int xl = 0; damageptr = level.damagelist; for (tmp_dam = damageptr; tmp_dam; tmp_dam = tmp_dam->next) xl++; if (perform_bwrite(mode)) bwrite(fd, (genericptr_t) &xl, sizeof(xl)); while (xl--) { if (perform_bwrite(mode)) bwrite(fd, (genericptr_t) damageptr, sizeof(*damageptr)); tmp_dam = damageptr; damageptr = damageptr->next; if (release_data(mode)) free((genericptr_t)tmp_dam); } if (release_data(mode)) level.damagelist = 0; } STATIC_OVL void saveobjchn(fd, otmp, mode) register int fd, mode; register struct obj *otmp; { register struct obj *otmp2; unsigned int xl; int minusone = -1; while(otmp) { otmp2 = otmp->nobj; if (perform_bwrite(mode)) { xl = otmp->oxlth + otmp->onamelth; bwrite(fd, (genericptr_t) &xl, sizeof(int)); bwrite(fd, (genericptr_t) otmp, xl + sizeof(struct obj)); } if (Has_contents(otmp)) saveobjchn(fd,otmp->cobj,mode); if (release_data(mode)) { if (otmp->oclass == FOOD_CLASS) food_disappears(otmp); if (otmp->oclass == SPBOOK_CLASS) book_disappears(otmp); otmp->where = OBJ_FREE; /* set to free so dealloc will work */ otmp->timed = 0; /* not timed any more */ otmp->lamplit = 0; /* caller handled lights */ dealloc_obj(otmp); } otmp = otmp2; } if (perform_bwrite(mode)) bwrite(fd, (genericptr_t) &minusone, sizeof(int)); } STATIC_OVL void savemonchn(fd, mtmp, mode) register int fd, mode; register struct monst *mtmp; { register struct monst *mtmp2; unsigned int xl; int minusone = -1; struct permonst *monbegin = &mons[0]; if (perform_bwrite(mode)) bwrite(fd, (genericptr_t) &monbegin, sizeof(monbegin)); while (mtmp) { mtmp2 = mtmp->nmon; if (perform_bwrite(mode)) { xl = mtmp->mxlth + mtmp->mnamelth; bwrite(fd, (genericptr_t) &xl, sizeof(int)); bwrite(fd, (genericptr_t) mtmp, xl + sizeof(struct monst)); } if (mtmp->minvent) saveobjchn(fd,mtmp->minvent,mode); if (release_data(mode)) dealloc_monst(mtmp); mtmp = mtmp2; } if (perform_bwrite(mode)) bwrite(fd, (genericptr_t) &minusone, sizeof(int)); } STATIC_OVL void savetrapchn(fd, trap, mode) register int fd, mode; register struct trap *trap; { register struct trap *trap2; while (trap) { trap2 = trap->ntrap; if (perform_bwrite(mode)) bwrite(fd, (genericptr_t) trap, sizeof(struct trap)); if (release_data(mode)) dealloc_trap(trap); trap = trap2; } if (perform_bwrite(mode)) bwrite(fd, (genericptr_t)nulls, sizeof(struct trap)); } /* save all the fruit names and ID's; this is used only in saving whole games * (not levels) and in saving bones levels. When saving a bones level, * we only want to save the fruits which exist on the bones level; the bones * level routine marks nonexistent fruits by making the fid negative. */ void savefruitchn(fd, mode) register int fd, mode; { register struct fruit *f2, *f1; f1 = ffruit; while (f1) { f2 = f1->nextf; if (f1->fid >= 0 && perform_bwrite(mode)) bwrite(fd, (genericptr_t) f1, sizeof(struct fruit)); if (release_data(mode)) dealloc_fruit(f1); f1 = f2; } if (perform_bwrite(mode)) bwrite(fd, (genericptr_t)nulls, sizeof(struct fruit)); if (release_data(mode)) ffruit = 0; } /* also called by prscore(); this probably belongs in dungeon.c... */ void free_dungeons() { #ifdef FREE_ALL_MEMORY savelevchn(0, FREE_SAVE); save_dungeon(0, FALSE, TRUE); #endif return; } void freedynamicdata() { unload_qtlist(); free_invbuf(); /* let_to_name (invent.c) */ free_youbuf(); /* You_buf,&c (pline.c) */ tmp_at(DISP_FREEMEM, 0); /* temporary display effects */ #ifdef FREE_ALL_MEMORY # define freeobjchn(X) (saveobjchn(0, X, FREE_SAVE), X = 0) # define freemonchn(X) (savemonchn(0, X, FREE_SAVE), X = 0) # define freetrapchn(X) (savetrapchn(0, X, FREE_SAVE), X = 0) # define freefruitchn() savefruitchn(0, FREE_SAVE) # define freenames() savenames(0, FREE_SAVE) # define free_oracles() save_oracles(0, FREE_SAVE) # define free_waterlevel() save_waterlevel(0, FREE_SAVE) # define free_worm() save_worm(0, FREE_SAVE) # define free_timers(R) save_timers(0, FREE_SAVE, R) # define free_light_sources(R) save_light_sources(0, FREE_SAVE, R); # define free_engravings() save_engravings(0, FREE_SAVE) # define freedamage() savedamage(0, FREE_SAVE) # define free_animals() mon_animal_list(FALSE) /* move-specific data */ dmonsfree(); /* release dead monsters */ /* level-specific data */ free_timers(RANGE_LEVEL); free_light_sources(RANGE_LEVEL); freemonchn(fmon); free_worm(); /* release worm segment information */ freetrapchn(ftrap); freeobjchn(fobj); freeobjchn(level.buriedobjlist); freeobjchn(billobjs); free_engravings(); freedamage(); /* game-state data */ free_timers(RANGE_GLOBAL); free_light_sources(RANGE_GLOBAL); freeobjchn(invent); freeobjchn(migrating_objs); freemonchn(migrating_mons); freemonchn(mydogs); /* ascension or dungeon escape */ /* freelevchn(); [folded into free_dungeons()] */ free_animals(); free_oracles(); freefruitchn(); freenames(); free_waterlevel(); free_dungeons(); /* some pointers in iflags */ if (iflags.wc_font_map) free(iflags.wc_font_map); if (iflags.wc_font_message) free(iflags.wc_font_message); if (iflags.wc_font_text) free(iflags.wc_font_text); if (iflags.wc_font_menu) free(iflags.wc_font_menu); if (iflags.wc_font_status) free(iflags.wc_font_status); if (iflags.wc_tile_file) free(iflags.wc_tile_file); #ifdef AUTOPICKUP_EXCEPTIONS free_autopickup_exceptions(); #endif #endif /* FREE_ALL_MEMORY */ return; } #ifdef MFLOPPY boolean swapin_file(lev) int lev; { char to[PATHLEN], from[PATHLEN]; Sprintf(from, "%s%s", permbones, alllevels); Sprintf(to, "%s%s", levels, alllevels); set_levelfile_name(from, lev); set_levelfile_name(to, lev); if (iflags.checkspace) { while (level_info[lev].size > freediskspace(to)) if (!swapout_oldest()) return FALSE; } # ifdef WIZARD if (wizard) { pline("Swapping in `%s'.", from); wait_synch(); } # endif copyfile(from, to); (void) unlink(from); level_info[lev].where = ACTIVE; return TRUE; } STATIC_OVL boolean swapout_oldest() { char to[PATHLEN], from[PATHLEN]; int i, oldest; long oldtime; if (!ramdisk) return FALSE; for (i = 1, oldtime = 0, oldest = 0; i <= maxledgerno(); i++) if (level_info[i].where == ACTIVE && (!oldtime || level_info[i].time < oldtime)) { oldest = i; oldtime = level_info[i].time; } if (!oldest) return FALSE; Sprintf(from, "%s%s", levels, alllevels); Sprintf(to, "%s%s", permbones, alllevels); set_levelfile_name(from, oldest); set_levelfile_name(to, oldest); # ifdef WIZARD if (wizard) { pline("Swapping out `%s'.", from); wait_synch(); } # endif copyfile(from, to); (void) unlink(from); level_info[oldest].where = SWAPPED; return TRUE; } STATIC_OVL void copyfile(from, to) char *from, *to; { # ifdef TOS if (_copyfile(from, to)) panic("Can't copy %s to %s", from, to); # else char buf[BUFSIZ]; /* this is system interaction, therefore * BUFSIZ instead of NetHack's BUFSZ */ int nfrom, nto, fdfrom, fdto; if ((fdfrom = open(from, O_RDONLY | O_BINARY, FCMASK)) < 0) panic("Can't copy from %s !?", from); if ((fdto = open(to, O_WRONLY | O_BINARY | O_CREAT | O_TRUNC, FCMASK)) < 0) panic("Can't copy to %s", to); do { nfrom = read(fdfrom, buf, BUFSIZ); nto = write(fdto, buf, nfrom); if (nto != nfrom) panic("Copyfile failed!"); } while (nfrom == BUFSIZ); (void) close(fdfrom); (void) close(fdto); # endif /* TOS */ } void co_false() /* see comment in bones.c */ { count_only = FALSE; return; } #endif /* MFLOPPY */ /*save.c*/
cybriq/p9
pkg/btcjson/chainsvrwsntfns.go
package btcjson const ( // BlockConnectedNtfnMethod is the legacy, deprecated method used for notifications from the chain server that a // block has been connected. NOTE: Deprecated. Use FilteredBlockConnectedNtfnMethod instead. BlockConnectedNtfnMethod = "blockconnected" // BlockDisconnectedNtfnMethod is the legacy, deprecated method used for notifications from the chain server that a // block has been disconnected. NOTE: Deprecated. Use FilteredBlockDisconnectedNtfnMethod instead. BlockDisconnectedNtfnMethod = "blockdisconnected" // FilteredBlockConnectedNtfnMethod is the new method used for notifications from the chain server that a block has // been connected. FilteredBlockConnectedNtfnMethod = "filteredblockconnected" // FilteredBlockDisconnectedNtfnMethod is the new method used for notifications from the chain server that a block // has been disconnected. FilteredBlockDisconnectedNtfnMethod = "filteredblockdisconnected" // RecvTxNtfnMethod is the legacy, deprecated method used for notifications from the chain server that a transaction // which pays to a registered address has been processed. NOTE: Deprecated. Use RelevantTxAcceptedNtfnMethod and // FilteredBlockConnectedNtfnMethod instead. RecvTxNtfnMethod = "recvtx" // RedeemingTxNtfnMethod is the legacy, deprecated method used for notifications from the chain server that a // transaction which spends a registered outpoint has been processed. NOTE: Deprecated. Use // RelevantTxAcceptedNtfnMethod and FilteredBlockConnectedNtfnMethod instead. RedeemingTxNtfnMethod = "redeemingtx" // RescanFinishedNtfnMethod is the legacy, deprecated method used for notifications from the chain server that a // legacy, deprecated rescan operation has finished. NOTE: Deprecated. Not used with rescanblocks command. RescanFinishedNtfnMethod = "rescanfinished" // RescanProgressNtfnMethod is the legacy, deprecated method used for notifications from the chain server that a // legacy, deprecated rescan operation this is underway has made progress. NOTE: Deprecated. Not used with // rescanblocks command. RescanProgressNtfnMethod = "rescanprogress" // TxAcceptedNtfnMethod is the method used for notifications from the chain server that a transaction has been // accepted into the mempool. TxAcceptedNtfnMethod = "txaccepted" // TxAcceptedVerboseNtfnMethod is the method used for notifications from the chain server that a transaction has // been accepted into the mempool. This differs from TxAcceptedNtfnMethod in that it provides more details in the // notification. TxAcceptedVerboseNtfnMethod = "txacceptedverbose" // RelevantTxAcceptedNtfnMethod is the new method used for notifications from the chain server that inform a client // that a transaction that matches the loaded filter was accepted by the mempool. RelevantTxAcceptedNtfnMethod = "relevanttxaccepted" ) // BlockConnectedNtfn defines the blockconnected JSON-RPC notification. NOTE: Deprecated. Use FilteredBlockConnectedNtfn // instead. type BlockConnectedNtfn struct { Hash string Height int32 Time int64 } // NewBlockConnectedNtfn returns a new instance which can be used to issue a blockconnected JSON-RPC notification. // // NOTE: Deprecated. Use NewFilteredBlockConnectedNtfn instead. func NewBlockConnectedNtfn(hash string, height int32, time int64) *BlockConnectedNtfn { return &BlockConnectedNtfn{ Hash: hash, Height: height, Time: time, } } // BlockDisconnectedNtfn defines the blockdisconnected JSON-RPC notification. // // NOTE: Deprecated. Use FilteredBlockDisconnectedNtfn instead. type BlockDisconnectedNtfn struct { Hash string Height int32 Time int64 } // NewBlockDisconnectedNtfn returns a new instance which can be used to issue a blockdisconnected JSON-RPC notification. // // NOTE: Deprecated. Use NewFilteredBlockDisconnectedNtfn instead. func NewBlockDisconnectedNtfn(hash string, height int32, time int64) *BlockDisconnectedNtfn { return &BlockDisconnectedNtfn{ Hash: hash, Height: height, Time: time, } } // FilteredBlockConnectedNtfn defines the filteredblockconnected JSON-RPC notification. type FilteredBlockConnectedNtfn struct { Height int32 Header string SubscribedTxs []string } // NewFilteredBlockConnectedNtfn returns a new instance which can be used to issue a filteredblockconnected JSON-RPC notification. func NewFilteredBlockConnectedNtfn(height int32, header string, subscribedTxs []string) *FilteredBlockConnectedNtfn { return &FilteredBlockConnectedNtfn{ Height: height, Header: header, SubscribedTxs: subscribedTxs, } } // FilteredBlockDisconnectedNtfn defines the filteredblockdisconnected JSON-RPC notification. type FilteredBlockDisconnectedNtfn struct { Height int32 Header string } // NewFilteredBlockDisconnectedNtfn returns a new instance which can be used to issue a filteredblockdisconnected // JSON-RPC notification. func NewFilteredBlockDisconnectedNtfn(height int32, header string) *FilteredBlockDisconnectedNtfn { return &FilteredBlockDisconnectedNtfn{ Height: height, Header: header, } } // BlockDetails describes details of a tx in a block. type BlockDetails struct { Height int32 `json:"height"` Hash string `json:"hash"` Index int `json:"index"` Time int64 `json:"time"` } // RecvTxNtfn defines the recvtx JSON-RPC notification. // // NOTE: Deprecated. Use RelevantTxAcceptedNtfn and FilteredBlockConnectedNtfn instead. type RecvTxNtfn struct { HexTx string Block *BlockDetails } // NewRecvTxNtfn returns a new instance which can be used to issue a recvtx JSON-RPC notification. // // NOTE: Deprecated. Use NewRelevantTxAcceptedNtfn and NewFilteredBlockConnectedNtfn instead. func NewRecvTxNtfn(hexTx string, block *BlockDetails) *RecvTxNtfn { return &RecvTxNtfn{ HexTx: hexTx, Block: block, } } // RedeemingTxNtfn defines the redeemingtx JSON-RPC notification. // // NOTE: Deprecated. Use RelevantTxAcceptedNtfn and FilteredBlockConnectedNtfn instead. type RedeemingTxNtfn struct { HexTx string Block *BlockDetails } // NewRedeemingTxNtfn returns a new instance which can be used to issue a redeemingtx JSON-RPC notification. // // NOTE: Deprecated. Use NewRelevantTxAcceptedNtfn and NewFilteredBlockConnectedNtfn instead. func NewRedeemingTxNtfn(hexTx string, block *BlockDetails) *RedeemingTxNtfn { return &RedeemingTxNtfn{ HexTx: hexTx, Block: block, } } // RescanFinishedNtfn defines the rescanfinished JSON-RPC notification. // // NOTE: Deprecated. Not used with rescanblocks command. type RescanFinishedNtfn struct { Hash string Height int32 Time int64 } // NewRescanFinishedNtfn returns a new instance which can be used to issue a rescanfinished JSON-RPC notification. // // NOTE: Deprecated. Not used with rescanblocks command. func NewRescanFinishedNtfn(hash string, height int32, time int64) *RescanFinishedNtfn { return &RescanFinishedNtfn{ Hash: hash, Height: height, Time: time, } } // RescanProgressNtfn defines the rescanprogress JSON-RPC notification. // // NOTE: Deprecated. Not used with rescanblocks command. type RescanProgressNtfn struct { Hash string Height int32 Time int64 } // NewRescanProgressNtfn returns a new instance which can be used to issue a rescanprogress JSON-RPC notification. // // NOTE: Deprecated. Not used with rescanblocks command. func NewRescanProgressNtfn(hash string, height int32, time int64) *RescanProgressNtfn { return &RescanProgressNtfn{ Hash: hash, Height: height, Time: time, } } // TxAcceptedNtfn defines the txaccepted JSON-RPC notification. type TxAcceptedNtfn struct { TxID string Amount float64 } // NewTxAcceptedNtfn returns a new instance which can be used to issue a txaccepted JSON-RPC notification. func NewTxAcceptedNtfn(txHash string, amount float64) *TxAcceptedNtfn { return &TxAcceptedNtfn{ TxID: txHash, Amount: amount, } } // TxAcceptedVerboseNtfn defines the txacceptedverbose JSON-RPC notification. type TxAcceptedVerboseNtfn struct { RawTx TxRawResult } // NewTxAcceptedVerboseNtfn returns a new instance which can be used to issue a txacceptedverbose JSON-RPC notification. func NewTxAcceptedVerboseNtfn(rawTx TxRawResult) *TxAcceptedVerboseNtfn { return &TxAcceptedVerboseNtfn{ RawTx: rawTx, } } // RelevantTxAcceptedNtfn defines the parameters to the relevanttxaccepted JSON-RPC notification. type RelevantTxAcceptedNtfn struct { Transaction string `json:"transaction"` } // NewRelevantTxAcceptedNtfn returns a new instance which can be used to issue a relevantxaccepted JSON-RPC notification. func NewRelevantTxAcceptedNtfn(txHex string) *RelevantTxAcceptedNtfn { return &RelevantTxAcceptedNtfn{Transaction: txHex} } func init() { // The commands in this file are only usable by websockets and are notifications. flags := UFWebsocketOnly | UFNotification MustRegisterCmd(BlockConnectedNtfnMethod, (*BlockConnectedNtfn)(nil), flags) MustRegisterCmd(BlockDisconnectedNtfnMethod, (*BlockDisconnectedNtfn)(nil), flags) MustRegisterCmd(FilteredBlockConnectedNtfnMethod, (*FilteredBlockConnectedNtfn)(nil), flags) MustRegisterCmd(FilteredBlockDisconnectedNtfnMethod, (*FilteredBlockDisconnectedNtfn)(nil), flags) MustRegisterCmd(RecvTxNtfnMethod, (*RecvTxNtfn)(nil), flags) MustRegisterCmd(RedeemingTxNtfnMethod, (*RedeemingTxNtfn)(nil), flags) MustRegisterCmd(RescanFinishedNtfnMethod, (*RescanFinishedNtfn)(nil), flags) MustRegisterCmd(RescanProgressNtfnMethod, (*RescanProgressNtfn)(nil), flags) MustRegisterCmd(TxAcceptedNtfnMethod, (*TxAcceptedNtfn)(nil), flags) MustRegisterCmd(TxAcceptedVerboseNtfnMethod, (*TxAcceptedVerboseNtfn)(nil), flags) MustRegisterCmd(RelevantTxAcceptedNtfnMethod, (*RelevantTxAcceptedNtfn)(nil), flags) }
scattering-central/FISH
java/FittingTable.java
package fish; import java.util.Formatter; import javax.swing.table.AbstractTableModel; import fish.plot.PlotFrame; /** * Custom table model for the fitting table * * @author dqr75132 * */ public class FittingTable extends AbstractTableModel { private FishFrame frame; private static final Common_two two_=Core.getTwo_(); private static final Common_jfit jfit_=Core.getJfit_(); FittingTable(FishFrame fishFrame) { frame=fishFrame; } /** * Always returns seven columns as the fitting table always has this number * (although some may be hidden) */ public int getColumnCount() { return 7; } /** * Returns the row count, which is the number of parameters in the current model */ public int getRowCount() { return two_.getNp(); } /** * Returns whether a cell is editable */ public boolean isCellEditable(int row, int col){ float[] ps=two_.getPs(); int[] lm=two_.getLm(); if ((col==3 || (col==5 && (ps[row]==0 || ps[row]==1))) && ps[row]>=0 && lm[row]!=88){ return true; } else { return false; } } /** * Find the data value to display in a particular table cell */ public Object getValueAt(int row, int col) { Formatter f = new Formatter(); int[] lm=two_.getLm(); if(lm[row]==88 && col>2) { f.format(""); } else if(col==0){ f.format("%d",lm[row]); } else if(col==1){ int[] ltyp=two_.getLtyp(); f.format("%d",ltyp[row]); } else if(col==2) { ModelList models=frame.getModelList(); Model model=models.getModel(); String s=model.getParameterLabel(row); if(s.equals(" ")){ int[] ltyp=two_.getLtyp(); if(ltyp[row]==0){ f.format("(All sets)",ltyp[row]); } else{ f.format("(Set %d)",ltyp[row]); } } else{ f.format("%s",s); } } else if(col==3) { float[] v=two_.getV(); f.format("%e", v[row]); } else if(col==4) { float[] esd=two_.getEsd(); f.format("%e", esd[row]); } else if(col==5) { float[] ps=two_.getPs(); if(ps[row]==-1.0){ f.format("Constrained"); } else if(ps[row]==-2.0){ f.format("Polydisperse"); } else if(ps[row]==0){ return false; } else if(ps[row]==1){ return true; } else { f.format("%f", ps[row]); } } else if(col==6) { float[] dv=two_.getDv(); f.format("%e", dv[row]); } return f; } /** * Changes the data value corresponding to a point in the table */ public void setValueAt(Object val, int row, int col){ if(col==5 && val.getClass().getName().equals("java.lang.Boolean")){ if((Boolean)val==true){ float[] ps=two_.getPs(); ps[row]=1; two_.setPs(ps); } if((Boolean)val==false){ float[] ps=two_.getPs(); ps[row]=0; two_.setPs(ps); } } else if(col==3){ float[] v=two_.getV(); try { v[row]=Float.valueOf((String)val); int inFit=jfit_.getInfit(); two_.setV(v); int cycle=two_.getNyc(); // if we're not in a fit, set up data sets etc. to enable calculation if(inFit==0){ frame.fitInit(); } // calculate the new CALC data Core.talk("OFF"); Core.talk("R"); Core.talk("ON"); // for the moment, we don't count each edit as a separate cycle // so decrement the cycle number two_.setNyc(cycle); // if we weren't already in a fit, set infit back to 0 as it // will have been set to 1 if(inFit==0){ jfit_.setInfit(0); } PlotFrame.refreshAll(false); } catch(NumberFormatException e) { System.out.println("Incorrectly formatted number input in fitting table"); } } } }
sunilbhara/lakeFS
catalog/mvcc/cataloger_rollback_commit.go
<filename>catalog/mvcc/cataloger_rollback_commit.go package mvcc import ( "context" "fmt" "github.com/treeverse/lakefs/catalog" "github.com/treeverse/lakefs/db" ) func (c *cataloger) RollbackCommit(ctx context.Context, repository, branch, reference string) error { if err := Validate(ValidateFields{ {Name: "repository", IsValid: ValidateRepositoryName(repository)}, {Name: "reference", IsValid: ValidateReference(reference)}, }); err != nil { return err } ref, err := ParseRef(reference) if err != nil { return err } if ref.CommitID <= UncommittedID { return catalog.ErrInvalidReference } if ref.Branch != branch { return catalog.ErrInvalidReference } _, err = c.db.Transact(func(tx db.Tx) (interface{}, error) { // extract branch id from reference branchID, err := getBranchID(tx, repository, ref.Branch, LockTypeUpdate) if err != nil { return nil, err } // validate no child branch point to parent commit var count int err = tx.GetPrimitive(&count, `SELECT COUNT(*) from catalog_commits WHERE merge_source_branch = $1 AND merge_source_commit > $2 AND merge_type = 'from_parent'`, branchID, ref.CommitID) if err != nil { return nil, fmt.Errorf("check merge with branch: %w", err) } if count > 0 { return nil, catalog.ErrRollbackWithActiveBranch } // delete all commits after this commit on this branch _, err = tx.Exec(`DELETE FROM catalog_commits WHERE branch_id = $1 AND commit_id > $2`, branchID, ref.CommitID) if err != nil { return nil, fmt.Errorf("delete commits on branch %d, after commit %d: %w", branchID, ref.CommitID, err) } // delete all entries created after this commit _, err = tx.Exec(`DELETE FROM catalog_entries WHERE branch_id = $1 AND min_commit > $2`, branchID, ref.CommitID) if err != nil { return nil, fmt.Errorf("delete entries %d, after min commit %d: %w", branchID, ref.CommitID, err) } // update max_commit to infinite _, err = tx.Exec(`UPDATE catalog_entries SET max_commit = $1 WHERE branch_id = $2 AND max_commit >= $3 AND NOT max_commit = $1`, MaxCommitID, branchID, ref.CommitID) if err != nil { return nil, fmt.Errorf("clear entries %d, max commit %d: %w", branchID, ref.CommitID, err) } return nil, nil }, c.txOpts(ctx, db.ReadCommitted())...) return err }
strassek/chromiumos-platform2
foomatic_shell/parser_fuzzer.cc
// Copyright 2020 The Chromium OS Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "foomatic_shell/grammar.h" #include "foomatic_shell/parser.h" #include "foomatic_shell/scanner.h" #include <cstdint> #include <string> #include <vector> extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) { const std::string input(reinterpret_cast<const char*>(data), size); foomatic_shell::Scanner scanner(input); std::vector<foomatic_shell::Token> tokens; if (!scanner.ParseWholeInput(&tokens)) { // The input is not correct. However, we still want to feed a parser with // produced tokens. The EOF token must be added at the end to mimic a // correct sequence of tokens. foomatic_shell::Token eof; eof.type = foomatic_shell::Token::Type::kEOF; eof.begin = eof.end = input.end(); tokens.push_back(eof); } foomatic_shell::Parser parser(tokens); foomatic_shell::Script script; parser.ParseWholeInput(&script); return 0; }
asrashley/dash-live
tests/mixins.py
import logging import os class TestCaseMixin(object): @property def classname(self): clz = type(self) if clz.__module__.startswith('__'): return clz.__name__ return clz.__module__ + '.' + clz.__name__ def _assert_true(self, result, a, b, msg, template): if not result: if msg is not None: raise AssertionError(msg) raise AssertionError(template.format(a,b)) def assertTrue(self, result, msg=None): self._assert_true(result, result, None, msg, r'{} not True') def assertFalse(self, result, msg=None): self._assert_true(not result, result, None, msg, r'{} not False') def assertEqual(self, a, b, msg=None): self._assert_true(a==b, a, b, msg, r'{} != {}') def assertNotEqual(self, a, b, msg=None): self._assert_true(a!=b, a, b, msg, r'{} == {}') def assertAlmostEqual(self, a, b, places=7, msg=None, delta=None): if delta is not None: d = abs(a - b) self._assert_true(d<=delta, a, b, msg, '{} !~= {} (delta %f)'%(delta)) else: ar = round(a, places) br = round(b, places) self._assert_true(ar==br, a, b, msg, '{} !~= {}') def assertGreaterThan(self, a, b, msg=None): self._assert_true(a>b, a, b, msg, r'{} <= {}') def assertGreaterThanOrEqual(self, a, b, msg=None): self._assert_true(a>=b, a, b, msg, r'{} < {}') def assertLessThan(self, a, b, msg=None): self._assert_true(a<b, a, b, msg, r'{} >= {}') def assertLessThanOrEqual(self, a, b, msg=None): self._assert_true(a<=b, a, b, msg, r'{} > {}') def assertIn(self, a, b, msg=None): self._assert_true(a in b, a, b, msg, r'{} not in {}') def assertNotIn(self, a, b, msg=None): self._assert_true(a not in b, a, b, msg, r'{} in {}') def assertIsNone(self, a, msg=None): self._assert_true(a is None, a, None, msg, r'{} is not None') def assertIsNotNone(self, a, msg=None): self._assert_true(a is not None, a, None, msg, r'{} is None') def assertEndsWith(self, a, b, msg=None): self._assert_true(a.endswith(b), a, b, msg, r'{} does not end with {}') def assertIsInstance(self, a, types, msg=None): self._assert_true(isinstance(a, types), a, types, msg, r'{} is not instance of {}') def _check_true(self, result, a, b, msg, template): if not result: if msg is None: msg = template.format(a,b) log = getattr(self, "log", None) if log is None: log = logging.getLogger(self.classname) log.warning('%s', msg) def checkTrue(self, result, msg=None): self._check_true(result, result, None, msg, r'{} not True') def checkFalse(self, result, msg=None): self._check_true(not result, result, None, msg, r'{} not False') def checkEqual(self, a, b, msg=None): self._check_true(a==b, a, b, msg, r'{} != {}') def checkNotEqual(self, a, b, msg=None): self._check_true(a!=b, a, b, msg, r'{} == {}') def checkAlmostEqual(self, a, b, places=7, msg=None, delta=None): if delta is not None: d = abs(a - b) self._check_true(d<=delta, a, b, msg, '{} !~= {} (delta %f)'%(delta)) else: ar = round(a, places) br = round(b, places) self._check_true(ar==br, a, b, msg, '{} !~= {}') def checkGreaterThan(self, a, b, msg=None): self._check_true(a>b, a, b, msg, r'{} <= {}') def checkGreaterThanOrEqual(self, a, b, msg=None): self._check_true(a>=b, a, b, msg, r'{} < {}') def checkLessThan(self, a, b, msg=None): self._check_true(a<b, a, b, msg, r'{} >= {}') def checkLessThanOrEqual(self, a, b, msg=None): self._check_true(a<=b, a, b, msg, r'{} > {}') def checkIn(self, a, b, msg=None): self._check_true(a in b, a, b, msg, r'{} not in {}') def checkNotIn(self, a, b, msg=None): self._check_true(a not in b, a, b, msg, r'{} in {}') def checkIsNone(self, a, msg=None): self._check_true(a is None, a, None, msg, r'{} is not None') def checkIsNotNone(self, a, msg=None): self._check_true(a is not None, a, None, msg, r'{} is None') def checkEndsWith(self, a, b, msg=None): self._check_true(a.endswith(b), a, b, msg, r'{} does not end with {}') def checkIsInstance(self, a, types, msg=None): self._check_true(isinstance(a, types), a, types, msg, r'{} is not instance of {}') class HideMixinsFilter(logging.Filter): """A logging.Filter that hides mixins.py in log messages. Using the HideMixinsFilter in a logging adapter will cause the filename and line numbers in log messages to be set to the caller of mixins functions, rather than just seeing the line number in mixings.py that calls logger.error """ def filter(self, record): if record.filename.endswith('mixins.py'): # Replace the log record with the function & line number that called # mixins.py record.filename, record.lineno, record.funcName = self.find_caller() return True def find_caller(self): n_frames_upper = 2 f = logging.currentframe() for _ in range(2 + n_frames_upper): if f is not None: f = f.f_back rv = "(unknown file)", 0, "(unknown function)" while hasattr(f, "f_code"): co = f.f_code filename = os.path.normcase(co.co_filename) basename = os.path.basename(co.co_filename) if filename == logging._srcfile or basename == 'mixins.py': f = f.f_back continue rv = (basename, f.f_lineno, co.co_name) break return rv
HernandezM22/DatlasHackMTY
real-data/node_modules/@deck.gl/layers/dist/es5/geojson-layer/geojson-layer.js
"use strict"; var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault"); Object.defineProperty(exports, "__esModule", { value: true }); exports["default"] = void 0; var _classCallCheck2 = _interopRequireDefault(require("@babel/runtime/helpers/classCallCheck")); var _createClass2 = _interopRequireDefault(require("@babel/runtime/helpers/createClass")); var _possibleConstructorReturn2 = _interopRequireDefault(require("@babel/runtime/helpers/possibleConstructorReturn")); var _getPrototypeOf2 = _interopRequireDefault(require("@babel/runtime/helpers/getPrototypeOf")); var _inherits2 = _interopRequireDefault(require("@babel/runtime/helpers/inherits")); var _core = require("@deck.gl/core"); var _scatterplotLayer = _interopRequireDefault(require("../scatterplot-layer/scatterplot-layer")); var _pathLayer = _interopRequireDefault(require("../path-layer/path-layer")); var _solidPolygonLayer = _interopRequireDefault(require("../solid-polygon-layer/solid-polygon-layer")); var _utils = require("../utils"); var _geojson = require("./geojson"); var defaultLineColor = [0, 0, 0, 255]; var defaultFillColor = [0, 0, 0, 255]; var defaultProps = { stroked: true, filled: true, extruded: false, wireframe: false, lineWidthUnits: 'meters', lineWidthScale: 1, lineWidthMinPixels: 0, lineWidthMaxPixels: Number.MAX_SAFE_INTEGER, lineJointRounded: false, lineMiterLimit: 4, elevationScale: 1, pointRadiusUnits: 'meters', pointRadiusScale: 1, pointRadiusMinPixels: 0, pointRadiusMaxPixels: Number.MAX_SAFE_INTEGER, getLineColor: { type: 'accessor', value: defaultLineColor }, getFillColor: { type: 'accessor', value: defaultFillColor }, getRadius: { type: 'accessor', value: 1 }, getLineWidth: { type: 'accessor', value: 1 }, getElevation: { type: 'accessor', value: 1000 }, material: true }; function getCoordinates(f) { return f.geometry.coordinates; } var GeoJsonLayer = function (_CompositeLayer) { (0, _inherits2["default"])(GeoJsonLayer, _CompositeLayer); function GeoJsonLayer() { (0, _classCallCheck2["default"])(this, GeoJsonLayer); return (0, _possibleConstructorReturn2["default"])(this, (0, _getPrototypeOf2["default"])(GeoJsonLayer).apply(this, arguments)); } (0, _createClass2["default"])(GeoJsonLayer, [{ key: "initializeState", value: function initializeState() { this.state = { features: {} }; if (this.props.getLineDashArray) { _core.log.removed('getLineDashArray', 'PathStyleExtension')(); } } }, { key: "updateState", value: function updateState(_ref) { var props = _ref.props, changeFlags = _ref.changeFlags; if (!changeFlags.dataChanged) { return; } var features = (0, _geojson.getGeojsonFeatures)(props.data); var wrapFeature = this.getSubLayerRow.bind(this); if (Array.isArray(changeFlags.dataChanged)) { var oldFeatures = this.state.features; var newFeatures = {}; var featuresDiff = {}; for (var key in oldFeatures) { newFeatures[key] = oldFeatures[key].slice(); featuresDiff[key] = []; } var _iteratorNormalCompletion = true; var _didIteratorError = false; var _iteratorError = undefined; try { for (var _iterator = changeFlags.dataChanged[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) { var dataRange = _step.value; var partialFeatures = (0, _geojson.separateGeojsonFeatures)(features, wrapFeature, dataRange); for (var _key in oldFeatures) { featuresDiff[_key].push((0, _utils.replaceInRange)({ data: newFeatures[_key], getIndex: function getIndex(f) { return f.__source.index; }, dataRange: dataRange, replace: partialFeatures[_key] })); } } } catch (err) { _didIteratorError = true; _iteratorError = err; } finally { try { if (!_iteratorNormalCompletion && _iterator["return"] != null) { _iterator["return"](); } } finally { if (_didIteratorError) { throw _iteratorError; } } } this.setState({ features: newFeatures, featuresDiff: featuresDiff }); } else { this.setState({ features: (0, _geojson.separateGeojsonFeatures)(features, wrapFeature), featuresDiff: {} }); } } }, { key: "renderLayers", value: function renderLayers() { var _this$state = this.state, features = _this$state.features, featuresDiff = _this$state.featuresDiff; var pointFeatures = features.pointFeatures, lineFeatures = features.lineFeatures, polygonFeatures = features.polygonFeatures, polygonOutlineFeatures = features.polygonOutlineFeatures; var _this$props = this.props, stroked = _this$props.stroked, filled = _this$props.filled, extruded = _this$props.extruded, wireframe = _this$props.wireframe, material = _this$props.material, transitions = _this$props.transitions; var _this$props2 = this.props, lineWidthUnits = _this$props2.lineWidthUnits, lineWidthScale = _this$props2.lineWidthScale, lineWidthMinPixels = _this$props2.lineWidthMinPixels, lineWidthMaxPixels = _this$props2.lineWidthMaxPixels, lineJointRounded = _this$props2.lineJointRounded, lineMiterLimit = _this$props2.lineMiterLimit, pointRadiusUnits = _this$props2.pointRadiusUnits, pointRadiusScale = _this$props2.pointRadiusScale, pointRadiusMinPixels = _this$props2.pointRadiusMinPixels, pointRadiusMaxPixels = _this$props2.pointRadiusMaxPixels, elevationScale = _this$props2.elevationScale, lineDashJustified = _this$props2.lineDashJustified; var _this$props3 = this.props, getLineColor = _this$props3.getLineColor, getFillColor = _this$props3.getFillColor, getRadius = _this$props3.getRadius, getLineWidth = _this$props3.getLineWidth, getLineDashArray = _this$props3.getLineDashArray, getElevation = _this$props3.getElevation, updateTriggers = _this$props3.updateTriggers; var PolygonFillLayer = this.getSubLayerClass('polygons-fill', _solidPolygonLayer["default"]); var PolygonStrokeLayer = this.getSubLayerClass('polygons-stroke', _pathLayer["default"]); var LineStringsLayer = this.getSubLayerClass('line-strings', _pathLayer["default"]); var PointsLayer = this.getSubLayerClass('points', _scatterplotLayer["default"]); var polygonFillLayer = this.shouldRenderSubLayer('polygons-fill', polygonFeatures) && new PolygonFillLayer({ _dataDiff: featuresDiff.polygonFeatures && function () { return featuresDiff.polygonFeatures; }, extruded: extruded, elevationScale: elevationScale, filled: filled, wireframe: wireframe, material: material, getElevation: this.getSubLayerAccessor(getElevation), getFillColor: this.getSubLayerAccessor(getFillColor), getLineColor: this.getSubLayerAccessor(getLineColor), transitions: transitions && { getPolygon: transitions.geometry, getElevation: transitions.getElevation, getFillColor: transitions.getFillColor, getLineColor: transitions.getLineColor } }, this.getSubLayerProps({ id: 'polygons-fill', updateTriggers: { getElevation: updateTriggers.getElevation, getFillColor: updateTriggers.getFillColor, getLineColor: updateTriggers.getLineColor } }), { data: polygonFeatures, getPolygon: getCoordinates }); var polygonLineLayer = !extruded && stroked && this.shouldRenderSubLayer('polygons-stroke', polygonOutlineFeatures) && new PolygonStrokeLayer({ _dataDiff: featuresDiff.polygonOutlineFeatures && function () { return featuresDiff.polygonOutlineFeatures; }, widthUnits: lineWidthUnits, widthScale: lineWidthScale, widthMinPixels: lineWidthMinPixels, widthMaxPixels: lineWidthMaxPixels, rounded: lineJointRounded, miterLimit: lineMiterLimit, dashJustified: lineDashJustified, getColor: this.getSubLayerAccessor(getLineColor), getWidth: this.getSubLayerAccessor(getLineWidth), getDashArray: this.getSubLayerAccessor(getLineDashArray), transitions: transitions && { getPath: transitions.geometry, getColor: transitions.getLineColor, getWidth: transitions.getLineWidth } }, this.getSubLayerProps({ id: 'polygons-stroke', updateTriggers: { getColor: updateTriggers.getLineColor, getWidth: updateTriggers.getLineWidth, getDashArray: updateTriggers.getLineDashArray } }), { data: polygonOutlineFeatures, getPath: getCoordinates }); var pathLayer = this.shouldRenderSubLayer('linestrings', lineFeatures) && new LineStringsLayer({ _dataDiff: featuresDiff.lineFeatures && function () { return featuresDiff.lineFeatures; }, widthUnits: lineWidthUnits, widthScale: lineWidthScale, widthMinPixels: lineWidthMinPixels, widthMaxPixels: lineWidthMaxPixels, rounded: lineJointRounded, miterLimit: lineMiterLimit, dashJustified: lineDashJustified, getColor: this.getSubLayerAccessor(getLineColor), getWidth: this.getSubLayerAccessor(getLineWidth), getDashArray: this.getSubLayerAccessor(getLineDashArray), transitions: transitions && { getPath: transitions.geometry, getColor: transitions.getLineColor, getWidth: transitions.getLineWidth } }, this.getSubLayerProps({ id: 'line-strings', updateTriggers: { getColor: updateTriggers.getLineColor, getWidth: updateTriggers.getLineWidth, getDashArray: updateTriggers.getLineDashArray } }), { data: lineFeatures, getPath: getCoordinates }); var pointLayer = this.shouldRenderSubLayer('points', pointFeatures) && new PointsLayer({ _dataDiff: featuresDiff.pointFeatures && function () { return featuresDiff.pointFeatures; }, stroked: stroked, filled: filled, radiusUnits: pointRadiusUnits, radiusScale: pointRadiusScale, radiusMinPixels: pointRadiusMinPixels, radiusMaxPixels: pointRadiusMaxPixels, lineWidthUnits: lineWidthUnits, lineWidthScale: lineWidthScale, lineWidthMinPixels: lineWidthMinPixels, lineWidthMaxPixels: lineWidthMaxPixels, getFillColor: this.getSubLayerAccessor(getFillColor), getLineColor: this.getSubLayerAccessor(getLineColor), getRadius: this.getSubLayerAccessor(getRadius), getLineWidth: this.getSubLayerAccessor(getLineWidth), transitions: transitions && { getPosition: transitions.geometry, getFillColor: transitions.getFillColor, getLineColor: transitions.getLineColor, getRadius: transitions.getRadius, getLineWidth: transitions.getLineWidth } }, this.getSubLayerProps({ id: 'points', updateTriggers: { getFillColor: updateTriggers.getFillColor, getLineColor: updateTriggers.getLineColor, getRadius: updateTriggers.getRadius, getLineWidth: updateTriggers.getLineWidth } }), { data: pointFeatures, getPosition: getCoordinates, highlightedObjectIndex: this._getHighlightedIndex(pointFeatures) }); return [!extruded && polygonFillLayer, polygonLineLayer, pathLayer, pointLayer, extruded && polygonFillLayer]; } }, { key: "_getHighlightedIndex", value: function _getHighlightedIndex(data) { var highlightedObjectIndex = this.props.highlightedObjectIndex; return Number.isFinite(highlightedObjectIndex) ? data.findIndex(function (d) { return d.__source.index === highlightedObjectIndex; }) : null; } }]); return GeoJsonLayer; }(_core.CompositeLayer); exports["default"] = GeoJsonLayer; GeoJsonLayer.layerName = 'GeoJsonLayer'; GeoJsonLayer.defaultProps = defaultProps; //# sourceMappingURL=geojson-layer.js.map
Russell-IO/ews-java-api
src/main/java/microsoft/exchange/webservices/data/UserConfigurationDictionary.java
<reponame>Russell-IO/ews-java-api<filename>src/main/java/microsoft/exchange/webservices/data/UserConfigurationDictionary.java<gh_stars>0 /************************************************************************** Exchange Web Services Java API Copyright (c) Microsoft Corporation All rights reserved. MIT License Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the ""Software""), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. **************************************************************************/ package microsoft.exchange.webservices.data; import javax.xml.stream.XMLStreamException; import java.lang.reflect.Array; import java.util.*; import java.util.Map.Entry; /** * Represents a user configuration's Dictionary property. */ @EditorBrowsable(state = EditorBrowsableState.Never) public final class UserConfigurationDictionary extends ComplexProperty implements Iterable<Object> { // TODO: Consider implementing IsDirty mechanism in ComplexProperty. /** * The dictionary. */ private Map<Object, Object> dictionary; /** * The is dirty. */ private boolean isDirty = false; /** * Initializes a new instance of "UserConfigurationDictionary" class. */ protected UserConfigurationDictionary() { super(); this.dictionary = new HashMap<Object, Object>(); } /** * Gets the element with the specified key. * * @param key The key of the element to get or set. * @return The element with the specified key. */ public Object getElements(Object key) { return this.dictionary.get(key); } /** * Sets the element with the specified key. * * @param key The key of the element to get or set * @param value the value * @throws Exception the exception */ public void setElements(Object key, Object value) throws Exception { this.validateEntry(key, value); this.dictionary.put(key, value); this.changed(); } /** * Adds an element with the provided key and value to the user configuration * dictionary. * * @param key The object to use as the key of the element to add. * @param value The object to use as the value of the element to add. * @throws Exception the exception */ public void addElement(Object key, Object value) throws Exception { this.validateEntry(key, value); this.dictionary.put(key, value); this.changed(); } /** * Determines whether the user configuration dictionary contains an element * with the specified key. * * @param key The key to locate in the user configuration dictionary. * @return true if the user configuration dictionary contains an element * with the key; otherwise false. */ public boolean containsKey(Object key) { return this.dictionary.containsKey(key); } /** * Removes the element with the specified key from the user configuration * dictionary. * * @param key The key of the element to remove. * @return true if the element is successfully removed; otherwise false. */ public boolean remove(Object key) { boolean isRemoved = false; if (key != null) { this.dictionary.remove(key); isRemoved = true; } if (isRemoved) { this.changed(); } return isRemoved; } /** * Gets the value associated with the specified key. * * @param key The key whose value to get. * @param value When this method returns, the value associated with the * specified key, if the key is found; otherwise, null. * @return true if the user configuration dictionary contains the key; * otherwise false. */ public boolean tryGetValue(Object key, OutParam<Object> value) { if (this.dictionary.containsKey(key)) { value.setParam(this.dictionary.get(key)); return true; } else { value.setParam(null); return false; } } /** * Gets the number of elements in the user configuration dictionary. * * @return the count */ public int getCount() { return this.dictionary.size(); } /** * Removes all items from the user configuration dictionary. */ public void clear() { if (this.dictionary.size() != 0) { this.dictionary.clear(); this.changed(); } } /** * Gets the enumerator. * * @return the enumerator */ @SuppressWarnings("unchecked") /** * Returns an enumerator that iterates through * the user configuration dictionary. * @return An IEnumerator that can be used * to iterate through the user configuration dictionary. */ public Iterator getEnumerator() { return (this.dictionary.values().iterator()); } /** * Gets the isDirty flag. * * @return the checks if is dirty */ protected boolean getIsDirty() { return this.isDirty; } /** * Sets the isDirty flag. * * @param value the new checks if is dirty */ protected void setIsDirty(boolean value) { this.isDirty = value; } /** * Instance was changed. */ @Override protected void changed() { super.changed(); this.isDirty = true; } /** * Writes elements to XML. * * @param writer accepts EwsServiceXmlWriter * @throws javax.xml.stream.XMLStreamException the xML stream exception * @throws ServiceXmlSerializationException the service xml serialization exception */ @Override protected void writeElementsToXml(EwsServiceXmlWriter writer) throws XMLStreamException, ServiceXmlSerializationException { EwsUtilities.EwsAssert(writer != null, "UserConfigurationDictionary.WriteElementsToXml", "writer is null"); Iterator<Entry<Object, Object>> it = this.dictionary.entrySet() .iterator(); while (it.hasNext()) { Entry<Object, Object> dictionaryEntry = it.next(); writer.writeStartElement(XmlNamespace.Types, XmlElementNames.DictionaryEntry); this.writeObjectToXml(writer, XmlElementNames.DictionaryKey, dictionaryEntry.getKey()); this.writeObjectToXml(writer, XmlElementNames.DictionaryValue, dictionaryEntry.getValue()); writer.writeEndElement(); } } /** * Writes a dictionary object (key or value) to Xml. * * @param writer The writer. * @param xmlElementName The Xml element name. * @param dictionaryObject The object to write. * @throws javax.xml.stream.XMLStreamException the xML stream exception * @throws ServiceXmlSerializationException the service xml serialization exception */ private void writeObjectToXml(EwsServiceXmlWriter writer, String xmlElementName, Object dictionaryObject) throws XMLStreamException, ServiceXmlSerializationException { EwsUtilities.EwsAssert(writer != null, "UserConfigurationDictionary.WriteObjectToXml", "writer is null"); EwsUtilities.EwsAssert(xmlElementName != null, "UserConfigurationDictionary.WriteObjectToXml", "xmlElementName is null"); writer.writeStartElement(XmlNamespace.Types, xmlElementName); if (dictionaryObject == null) { EwsUtilities.EwsAssert((!xmlElementName .equals(XmlElementNames.DictionaryKey)), "UserConfigurationDictionary.WriteObjectToXml", "Key is null"); writer.writeAttributeValue( EwsUtilities.EwsXmlSchemaInstanceNamespacePrefix, XmlAttributeNames.Nil, EwsUtilities.XSTrue); } else { this.writeObjectValueToXml(writer, dictionaryObject); } writer.writeEndElement(); } /** * Writes a dictionary Object's value to Xml. * * @param writer The writer. * @param dictionaryObject The dictionary object to write. <br /> * Object values are either: <br /> * an array of strings, an array of bytes (which will be encoded into base64) <br /> * or a single value. Single values can be: <br /> * - datetime, boolean, byte, int, long, string * @throws javax.xml.stream.XMLStreamException the xML stream exception * @throws ServiceXmlSerializationException the service xml serialization exception */ private void writeObjectValueToXml(final EwsServiceXmlWriter writer, final Object dictionaryObject) throws XMLStreamException, ServiceXmlSerializationException { // Preconditions if (dictionaryObject == null) { throw new NullPointerException("DictionaryObject must not be null"); } if (writer == null) { throw new NullPointerException( "EwsServiceXmlWriter must not be null"); } // Processing final UserConfigurationDictionaryObjectType dictionaryObjectType; if (dictionaryObject instanceof String[]) { dictionaryObjectType = UserConfigurationDictionaryObjectType.StringArray; this.writeEntryTypeToXml(writer, dictionaryObjectType); for (String arrayElement : (String[]) dictionaryObject) { this.writeEntryValueToXml(writer, arrayElement); } } else { final String valueAsString; if (dictionaryObject instanceof String) { dictionaryObjectType = UserConfigurationDictionaryObjectType.String; valueAsString = String.valueOf(dictionaryObject); } else if (dictionaryObject instanceof Boolean) { dictionaryObjectType = UserConfigurationDictionaryObjectType.Boolean; valueAsString = EwsUtilities .boolToXSBool((Boolean) dictionaryObject); } else if (dictionaryObject instanceof Byte) { dictionaryObjectType = UserConfigurationDictionaryObjectType.Byte; valueAsString = String.valueOf(dictionaryObject); } else if (dictionaryObject instanceof Date) { dictionaryObjectType = UserConfigurationDictionaryObjectType.DateTime; valueAsString = writer.getService() .convertDateTimeToUniversalDateTimeString( (Date) dictionaryObject); } else if (dictionaryObject instanceof Integer) { // removed unsigned integer because in Java, all types are // signed, there are no unsigned versions dictionaryObjectType = UserConfigurationDictionaryObjectType.Integer32; valueAsString = String.valueOf(dictionaryObject); } else if (dictionaryObject instanceof Long) { // removed unsigned integer because in Java, all types are // signed, there are no unsigned versions dictionaryObjectType = UserConfigurationDictionaryObjectType.Integer64; valueAsString = String.valueOf(dictionaryObject); } else if (dictionaryObject instanceof byte[]) { dictionaryObjectType = UserConfigurationDictionaryObjectType.ByteArray; valueAsString = Base64EncoderStream.encode((byte[]) dictionaryObject); } else if (dictionaryObject instanceof Byte[]) { dictionaryObjectType = UserConfigurationDictionaryObjectType.ByteArray; // cast Byte[] to byte[] Byte[] from = (Byte[]) dictionaryObject; byte[] to = new byte[from.length]; for (int currentIndex = 0; currentIndex < from.length; currentIndex++) { to[currentIndex] = (byte) from[currentIndex]; } valueAsString = Base64EncoderStream.encode(to); } else { throw new IllegalArgumentException(String.format( "Unsupported type: %s", dictionaryObject.getClass() .toString())); } this.writeEntryTypeToXml(writer, dictionaryObjectType); this.writeEntryValueToXml(writer, valueAsString); } } /** * Writes a dictionary entry type to Xml. * * @param writer The writer. * @param dictionaryObjectType Type to write. * @throws javax.xml.stream.XMLStreamException the xML stream exception * @throws ServiceXmlSerializationException the service xml serialization exception */ private void writeEntryTypeToXml(EwsServiceXmlWriter writer, UserConfigurationDictionaryObjectType dictionaryObjectType) throws XMLStreamException, ServiceXmlSerializationException { writer.writeStartElement(XmlNamespace.Types, XmlElementNames.Type); writer .writeValue(dictionaryObjectType.toString(), XmlElementNames.Type); writer.writeEndElement(); } /** * Writes a dictionary entry value to Xml. * * @param writer The writer. * @param value Value to write. * @throws javax.xml.stream.XMLStreamException the xML stream exception * @throws ServiceXmlSerializationException the service xml serialization exception */ private void writeEntryValueToXml(EwsServiceXmlWriter writer, String value) throws XMLStreamException, ServiceXmlSerializationException { writer.writeStartElement(XmlNamespace.Types, XmlElementNames.Value); // While an entry value can't be null, if the entry is an array, an // element of the array can be null. if (value != null) { writer.writeValue(value, XmlElementNames.Value); } writer.writeEndElement(); } /* * (non-Javadoc) * * @see * microsoft.exchange.webservices.ComplexProperty#loadFromXml(microsoft. * exchange.webservices.EwsServiceXmlReader, * microsoft.exchange.webservices.XmlNamespace, java.lang.String) */ @Override /** * Loads this dictionary from the specified reader. * @param reader The reader. * @param xmlNamespace The dictionary's XML namespace. * @param xmlElementName Name of the XML element * representing the dictionary. */ protected void loadFromXml(EwsServiceXmlReader reader, XmlNamespace xmlNamespace, String xmlElementName) throws Exception { super.loadFromXml(reader, xmlNamespace, xmlElementName); this.isDirty = false; } /* * (non-Javadoc) * * @see * microsoft.exchange.webservices.ComplexProperty#tryReadElementFromXml( * microsoft.exchange.webservices.EwsServiceXmlReader) */ @Override /** * Tries to read element from XML. * @param reader The reader. * @return True if element was read. */ protected boolean tryReadElementFromXml(EwsServiceXmlReader reader) throws Exception { reader.ensureCurrentNodeIsStartElement(this.getNamespace(), XmlElementNames.DictionaryEntry); this.loadEntry(reader); return true; } /** * Loads an entry, consisting of a key value pair, into this dictionary from * the specified reader. * * @param reader The reader. * @throws Exception the exception */ private void loadEntry(EwsServiceXmlReader reader) throws Exception { EwsUtilities.EwsAssert(reader != null, "UserConfigurationDictionary.LoadEntry", "reader is null"); Object key; Object value = null; // Position at DictionaryKey reader.readStartElement(this.getNamespace(), XmlElementNames.DictionaryKey); key = this.getDictionaryObject(reader); // Position at DictionaryValue reader.readStartElement(this.getNamespace(), XmlElementNames.DictionaryValue); String nil = reader.readAttributeValue(XmlNamespace.XmlSchemaInstance, XmlAttributeNames.Nil); boolean hasValue = (nil == null) || (!nil.getClass().equals(Boolean.TYPE)); if (hasValue) { value = this.getDictionaryObject(reader); } this.dictionary.put(key, value); } /** * Extracts a dictionary object (key or entry value) from the specified * reader. * * @param reader The reader. * @return Dictionary object. * @throws Exception the exception */ private Object getDictionaryObject(EwsServiceXmlReader reader) throws Exception { EwsUtilities.EwsAssert(reader != null, "UserConfigurationDictionary.loadFromXml", "reader is null"); UserConfigurationDictionaryObjectType type = this.getObjectType(reader); List<String> values = this.getObjectValue(reader, type); return this.constructObject(type, values, reader); } /** * Extracts a dictionary object (key or entry value) as a string list from * the specified reader. * * @param reader The reader. * @param type The object type. * @return String list representing a dictionary object. * @throws Exception the exception */ private List<String> getObjectValue(EwsServiceXmlReader reader, UserConfigurationDictionaryObjectType type) throws Exception { EwsUtilities.EwsAssert(reader != null, "UserConfigurationDictionary.LoadFromXml", "reader is null"); List<String> values = new ArrayList<String>(); reader.readStartElement(this.getNamespace(), XmlElementNames.Value); do { String value = null; if (reader.isEmptyElement()) { // Only string types can be represented with empty values. if (type.equals(UserConfigurationDictionaryObjectType.String) || type .equals(UserConfigurationDictionaryObjectType. StringArray)) { value = ""; } else { EwsUtilities .EwsAssert( false, "UserConfigurationDictionary." + "GetObjectValue", "Empty element passed for type: " + type.toString()); } } else { value = reader.readElementValue(); } values.add(value); reader.read(); // Position at next element or // DictionaryKey/DictionaryValue end element } while (reader.isStartElement(this.getNamespace(), XmlElementNames.Value)); return values; } /** * Extracts the dictionary object (key or entry value) type from the * specified reader. * * @param reader The reader. * @return Dictionary object type. * @throws Exception the exception */ private UserConfigurationDictionaryObjectType getObjectType( EwsServiceXmlReader reader) throws Exception { EwsUtilities.EwsAssert(reader != null, "UserConfigurationDictionary.LoadFromXml", "reader is null"); reader.readStartElement(this.getNamespace(), XmlElementNames.Type); String type = reader.readElementValue(); return UserConfigurationDictionaryObjectType.valueOf(type); } /** * Constructs a dictionary object (key or entry value) from the specified * type and string list. * * @param type Object type to construct. * @param value Value of the dictionary object as a string list * @param reader The reader. * @return Dictionary object. */ private Object constructObject(UserConfigurationDictionaryObjectType type, List<String> value, EwsServiceXmlReader reader) { EwsUtilities.EwsAssert(value != null, "UserConfigurationDictionary.ConstructObject", "value is null"); EwsUtilities .EwsAssert( (value.size() == 1 || type == UserConfigurationDictionaryObjectType.StringArray), "UserConfigurationDictionary.ConstructObject", "value is array but type is not StringArray"); EwsUtilities .EwsAssert(reader != null, "UserConfigurationDictionary.ConstructObject", "reader is null"); Object dictionaryObject = null; if (type.equals(UserConfigurationDictionaryObjectType.Boolean)) { dictionaryObject = Boolean.parseBoolean(value.get(0)); } else if (type.equals(UserConfigurationDictionaryObjectType.Byte)) { dictionaryObject = Byte.parseByte(value.get(0)); } else if (type.equals(UserConfigurationDictionaryObjectType.ByteArray)) { dictionaryObject = Base64EncoderStream.decode(value.get(0)); } else if (type.equals(UserConfigurationDictionaryObjectType.DateTime)) { Date dateTime = reader.getService() .convertUniversalDateTimeStringToDate(value.get(0)); if (dateTime != null) { dictionaryObject = dateTime; } else { EwsUtilities.EwsAssert(false, "UserConfigurationDictionary.ConstructObject", "DateTime is null"); } } else if (type.equals(UserConfigurationDictionaryObjectType.Integer32)) { dictionaryObject = Integer.parseInt(value.get(0)); } else if (type.equals(UserConfigurationDictionaryObjectType.Integer64)) { dictionaryObject = Long.parseLong(value.get(0)); } else if (type.equals(UserConfigurationDictionaryObjectType.String)) { dictionaryObject = String.valueOf(value.get(0)); } else if (type .equals(UserConfigurationDictionaryObjectType.StringArray)) { dictionaryObject = value.toArray(); } else if (type .equals(UserConfigurationDictionaryObjectType. UnsignedInteger32)) { dictionaryObject = Integer.parseInt(value.get(0)); } else if (type .equals(UserConfigurationDictionaryObjectType. UnsignedInteger64)) { dictionaryObject = Long.parseLong(value.get(0)); } else { EwsUtilities.EwsAssert(false, "UserConfigurationDictionary.ConstructObject", "Type not recognized: " + type.toString()); } return dictionaryObject; } /** * Validates the specified key and value. * * @param key The key. * @param value The diction dictionary entry key.ary entry value. * @throws Exception the exception */ private void validateEntry(Object key, Object value) throws Exception { this.validateObject(key); this.validateObject(value); } /** * Validates the dictionary object (key or entry value). * * @param dictionaryObject Object to validate. * @throws Exception the exception */ @SuppressWarnings("unchecked") private void validateObject(Object dictionaryObject) throws Exception { // Keys may not be null but we rely on the internal dictionary to throw // if the key is null. if (dictionaryObject != null) { if (dictionaryObject.getClass().isArray()) { int length = Array.getLength(dictionaryObject); Class wrapperType = Array.get(dictionaryObject, 0).getClass(); Object[] newArray = (Object[]) Array. newInstance(wrapperType, length); for (int i = 0; i < length; i++) { newArray[i] = Array.get(dictionaryObject, i); } this.validateArrayObject(newArray); } else { this.validateObjectType(dictionaryObject); } } else { throw new NullPointerException(); } } /** * Validate the array object. * * @param dictionaryObjectAsArray Object to validate * @throws microsoft.exchange.webservices.data.ServiceLocalException the service local exception */ private void validateArrayObject(Object[] dictionaryObjectAsArray) throws ServiceLocalException { // This logic is based on // Microsoft.Exchange.Data.Storage.ConfigurationDictionary. // CheckElementSupportedType(). // if (dictionaryObjectAsArray is string[]) if (dictionaryObjectAsArray instanceof String[]) { if (dictionaryObjectAsArray.length > 0) { for (Object arrayElement : dictionaryObjectAsArray) { if (arrayElement == null) { throw new ServiceLocalException( Strings.NullStringArrayElementInvalid); } } } else { throw new ServiceLocalException(Strings.ZeroLengthArrayInvalid); } } else if (dictionaryObjectAsArray instanceof Byte[]) { if (dictionaryObjectAsArray.length <= 0) { throw new ServiceLocalException(Strings.ZeroLengthArrayInvalid); } } else { throw new ServiceLocalException(String.format( Strings.ObjectTypeNotSupported, dictionaryObjectAsArray .getClass())); } } /** * Validates the dictionary object type. * * @param theObject Object to validate. * @throws microsoft.exchange.webservices.data.ServiceLocalException the service local exception */ private void validateObjectType(Object theObject) throws ServiceLocalException { // This logic is based on // Microsoft.Exchange.Data.Storage.ConfigurationDictionary. // CheckElementSupportedType(). boolean isValidType = false; if (theObject != null) { if (theObject instanceof String || theObject instanceof Boolean || theObject instanceof Byte || theObject instanceof Long || theObject instanceof Date || theObject instanceof Integer) { isValidType = true; } } if (!isValidType) { throw new ServiceLocalException( String.format(Strings.ObjectTypeNotSupported, (theObject != null ? theObject.getClass().toString() : "null"))); } } /* * (non-Javadoc) * * @see java.lang.Iterable#iterator() */ @Override public Iterator<Object> iterator() { return this.dictionary.values().iterator(); } }
pinkman0u0/dtboot
src/main/java/com/pinkman/dtboot/service/SysLogService.java
<filename>src/main/java/com/pinkman/dtboot/service/SysLogService.java package com.pinkman.dtboot.service; import com.pinkman.dtboot.entity.SysLog; /** * @description: * @author: PINKMAN * @create: 2019-09-25 10:38 **/ public interface SysLogService { void save(SysLog sysLog); }
bigjunnn/addressbook-level3
src/test/java/dukecooks/model/workout/exercise/ExerciseSetAttemptTest.java
package dukecooks.model.workout.exercise; import static dukecooks.testutil.exercise.TypicalExercises.SET_ATTEMPT_WITH_DISTANCE_AND_TIMING; import static dukecooks.testutil.exercise.TypicalExercises.SET_ATTEMPT_WITH_WEIGHT_AND_REPETITIONS; import static dukecooks.testutil.exercise.TypicalExercises.SET_ATTEMPT_WITH_WEIGHT_AND_REPETITIONS_2; import static dukecooks.testutil.exercise.TypicalExercises.SET_ATTEMPT_WITH_WEIGHT_AND_TIMING; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertTrue; import org.junit.jupiter.api.Test; public class ExerciseSetAttemptTest { @Test public void equals() { ExerciseSetAttempt attempt1 = SET_ATTEMPT_WITH_WEIGHT_AND_REPETITIONS; ExerciseSetAttempt attempt2 = SET_ATTEMPT_WITH_DISTANCE_AND_TIMING; ExerciseSetAttempt attempt3 = SET_ATTEMPT_WITH_WEIGHT_AND_REPETITIONS_2; ExerciseSetAttempt attempt4 = SET_ATTEMPT_WITH_WEIGHT_AND_TIMING; // same values -> returns true assertTrue(attempt1.equals(attempt1)); // same object -> returns true assertTrue(attempt1.equals(attempt1.clone())); // null -> returns false assertFalse(attempt1.equals(null)); // different type -> returns false assertFalse(attempt1.equals(5)); // different person -> returns false assertFalse(attempt1.equals(attempt2)); // different weight value -> returns false assertFalse(attempt1.equals(attempt3)); // same weight value, different second value type -> returns false assertFalse(attempt1.equals(attempt4)); } }
sarrvesh/Obit
ObitSystem/Obit/python/TableList.py
# $Id$ #----------------------------------------------------------------------- # Copyright (C) 2005,2019 # Associated Universities, Inc. Washington DC, USA. # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; either version 2 of # the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public # License along with this program; if not, write to the Free # Software Foundation, Inc., 675 Massachusetts Ave, Cambridge, # MA 02139, USA. # # Correspondence concerning this software should be addressed as follows: # Internet email: <EMAIL>. # Postal address: <NAME> # National Radio Astronomy Observatory # 520 Edgemont Road # Charlottesville, VA 22903-2475 USA #----------------------------------------------------------------------- # Python shadow class to ObitTableList class from __future__ import absolute_import import Obit, _Obit, InfoList, OErr class TableList(Obit.TableList): """ Python Obit TableList class This contains information about the Tables associated with an image or dataset Image Members with python interfaces: List - (virtual) Python list of table names and numbers """ def __init__(self, name) : super(TableList, self).__init__() Obit.CreateTableList(self.this, name) def __del__(self, DeleteTableList=_Obit.DeleteTableList): DeleteTableList(self.this) def __setattr__(self,name,value): if name == "me" : Obit.TableList_Set_me(self.this,value) return self.__dict__[name] = value def __getattr__(self,name): if name == "me" : return Obit.TableList_Get_me(self.this) raise AttributeError(str(name)) def __repr__(self): return "<C TableList instance>" def PGetList (inTL, err): """ Returns the contents of an TableList as a Python list returns list inTL = Python TableList to read err = Python Obit Error/message stack """ ################################################################ # Checks if not PIsA(inTL): raise TypeError("inTL MUST be a Python Obit TableList") if err.isErr: return None # existing error # return Obit.TableListGetList(inTL.me, err.me) # end PGetList def PPrint (inTL, err): """ Print the contents of an TableList on stderr inTL = Python TableList to read err = Python Obit Error/message stack """ ################################################################ # Checks if not PIsA(inTL): raise TypeError("inTL MUST be a Python Obit TableList") if err.isErr: return None # existing error # return Obit.TableListPrint(inTL.me, err.me) # end PPrint def PCheck (inTL, err): """ Check the contents of an TableList Any errors lister on err inTL = Python TableList to check err = Python Obit Error/message stack """ ################################################################ # Checks if not PIsA(inTL): raise TypeError("inTL MUST be a Python Obit TableList") if err.isErr: return # existing error # Obit.TableListCheck(inTL.me, err.me) # end PCheck def PGetHigh (inTL, tabType): """ Find highest version of a table of a given type returns list inTL = Python TableList tabType = Table type, e.g. "AIPS CC" """ ################################################################ # Checks if not PIsA(inTL): raise TypeError("inTL MUST be a Python Obit TableList") # return Obit.TableListGetHigh(inTL.me, tabType) # end PGetHigh def PPutHi (inTL, err): """ Adds History to Table List inTL = Python TableList err = Python Obit Error/message stack """ ################################################################ # Checks if not PIsA(inTL): raise TypeError("inTL MUST be a Python Obit TableList") # Obit.TableListPutHi(inTL.me, err.me) # end PPutHi def PIsA (inTL): """ Tells if the input really is a Python Obit TableList returns true or false (1,0) inTL = Python TableList to test """ ################################################################ # Checks if not isinstance(inTL, TableList): return False # return Obit.TableListIsA(inTL.me) # end PIsA
LarryNorth/18xx
lib/engine/part/node.rb
# frozen_string_literal: true module Engine module Part class Node < Base attr_accessor :lanes def clear! @paths = nil @exits = nil end def solo? @tile.nodes.one? end def paths @paths ||= @tile.paths.select { |p| p.nodes.any? { |n| n == self } } end def exits @exits ||= paths.flat_map(&:exits) end def rect? false end def select(paths, corporation: nil) on = paths.map { |p| [p, 0] }.to_h walk(on: on, corporation: corporation) do |path| on[path] = 1 if on[path] end on.keys.select { |p| on[p] == 1 } end # Explore the paths and nodes reachable from this node # # visited: a hashset of visited Nodes # visited_paths: a hashset of visited Paths # on: see Path::Walk # corporation: If set don't walk on adjacent nodes which are blocked for the passed corporation # skip_track: If passed, don't walk on track of that type (ie: :broad track for 1873) # max_nodes: If passed, stop walking after visiting the number of nodes # # This method recursively bubbles up yielded values from nested Node::Walk and Path::Walk calls def walk( visited: {}, on: nil, corporation: nil, visited_paths: {}, counter: Hash.new(0), skip_track: nil, tile_type: :normal, max_nodes: nil ) return if visited[self] visited[self] = true return if max_nodes && visited.size >= max_nodes paths.each do |node_path| next if node_path.track == skip_track node_path.walk(visited: visited_paths, counter: counter, on: on, tile_type: tile_type) do |path, vp, ct| yield path next if path.terminal? path.nodes.each do |next_node| next if next_node == self next if corporation && next_node.blocks?(corporation) next_node.walk( visited: visited, counter: ct, on: on, corporation: corporation, visited_paths: vp, skip_track: skip_track, tile_type: tile_type, max_nodes: max_nodes, ) { |p| yield p } end end end visited.delete(self) unless tile_type == :lawson end end end end
geoadmin/web-mapviewer
tests/unit/example.spec.js
<reponame>geoadmin/web-mapviewer import { expect } from 'chai' import { mount } from '@vue/test-utils' import LangSwitchButton from '@/modules/i18n/components/LangSwitchButton' // A small example of a unit test of a component in isolation describe('LangSwitchButton.vue', () => { it('Show lang in upper case', () => { const lang = 'fr' const wrapper = mount(LangSwitchButton, { propsData: { lang, onClick: () => {}, isActive: true, }, }) expect(wrapper.text()).to.include(lang.toUpperCase()) }) })
jmix-framework/jmix
jmix-flowui/flowui/src/main/java/io/jmix/flowui/xml/facet/FacetProvider.java
/* * Copyright 2019 Haulmont. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.jmix.flowui.xml.facet; import io.jmix.flowui.facet.Facet; import io.jmix.flowui.xml.layout.ComponentLoader.ComponentContext; import org.dom4j.Element; /** * Interface for Spring Beans providing non-visual components for screens. * * @param <T> type of facet * @see Facet */ public interface FacetProvider<T extends Facet> { /** * @return facet interface */ Class<T> getFacetClass(); /** * @return new instance of the facet class */ T create(); /** * @return facet XML tag */ String getFacetTag(); /** * Loads properties of the facet from XML. * * @param facet facet * @param element XML element * @param context loading context */ void loadFromXml(T facet, Element element, ComponentContext context); }
yanyushr/fuchsia
src/graphics/lib/compute/hotsort/vk/hs_glsl_preamble.h
<reponame>yanyushr/fuchsia // Copyright 2019 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef HS_GLSL_PREAMBLE_ONCE #define HS_GLSL_PREAMBLE_ONCE // // // #define HS_EVAL(a) a #define HS_HASH_SYMBOL # #define HS_HASH() HS_EVAL(HS_HASH_SYMBOL) #define HS_GLSL_EXT() HS_HASH()extension // will be indented one space #define HS_GLSL_EXT_REQUIRE(name) HS_GLSL_EXT() name : require #define HS_GLSL_VERSION(ver) HS_HASH()version ver // will be indented one space // // // HS_GLSL_VERSION(460) HS_GLSL_EXT_REQUIRE(GL_KHR_shader_subgroup_basic) HS_GLSL_EXT_REQUIRE(GL_KHR_shader_subgroup_shuffle) // // // #if HS_KEY_DWORDS == 2 HS_GLSL_EXT_REQUIRE(GL_ARB_gpu_shader_int64) #endif // // // #endif // // //
CaostGrace/HearWindMusic
app/src/main/java/cn/onegroup/mobile1603/ui/widget/UnderView.java
package cn.onegroup.mobile1603.ui.widget; import android.animation.Animator; import android.animation.AnimatorListenerAdapter; import android.animation.ObjectAnimator; import android.animation.ValueAnimator; import android.content.Context; import android.os.Handler; import android.os.Message; import android.support.annotation.Nullable; import android.util.AttributeSet; import android.view.MotionEvent; import android.view.View; import cn.onegroup.mobile1603.ui.activity.LockActivity; /** * Created by CaostGrace on 2017/12/22 0022. * mail:<EMAIL> * github:https://github.com/CaostGrace * 简书:http://www.jianshu.com/u/b252a19d88f3 * 内容: */ public class UnderView extends View { private float mStartX; private int mWidth; //真实view private View mMoveView; public View getmMoveView() { return mMoveView; } public void setmMoveView(View mMoveView) { this.mMoveView = mMoveView; } public UnderView(Context context) { this(context, null); } public UnderView(Context context, @Nullable AttributeSet attrs) { this(context, attrs, 0); } public UnderView(Context context, @Nullable AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); mWidth = getWidth(); } Handler mainHandler = new Handler() { @Override public void handleMessage(Message msg) { super.handleMessage(msg); } }; @Override public boolean onTouchEvent(MotionEvent event) { final int action = event.getAction(); final float nx = event.getX(); switch (action) { case MotionEvent.ACTION_DOWN: mStartX = nx; onAnimationEnd(); case MotionEvent.ACTION_MOVE: handleMoveView(nx); break; case MotionEvent.ACTION_UP: case MotionEvent.ACTION_CANCEL: doTriggerEvent(nx); break; } return true; } private void handleMoveView(float x) { float movex = x - mStartX; if (movex < 0) movex = 0; mMoveView.setTranslationX(movex); float mWidthFloat = (float) mWidth;//屏幕显示宽度 if (getBackground() != null) { getBackground().setAlpha((int) ((mWidthFloat - mMoveView.getTranslationX()) / mWidthFloat * 200));//初始透明度的值为200 } } private void doTriggerEvent(float x) { float movex = x - mStartX; if (movex > (mWidth * 0.4)) { moveMoveView(mWidth - mMoveView.getLeft(), true);//自动移动到屏幕右边界之外,并finish掉 } else { moveMoveView(-mMoveView.getLeft(), false);//自动移动回初始位置,重新覆盖 } } private void moveMoveView(float to, boolean exit) { ObjectAnimator animator = ObjectAnimator.ofFloat(mMoveView, "translationX", to); animator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() { @Override public void onAnimationUpdate(ValueAnimator animation) { if (getBackground() != null) { getBackground().setAlpha((int) (((float) mWidth - mMoveView.getTranslationX() ) / (float) mWidth * 200)); } } });//随移动动画更新背景透明度 animator.setDuration(250).start(); if (exit) { animator.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { mainHandler.obtainMessage(LockActivity.MSG_LAUNCH_HOME).sendToTarget(); super.onAnimationEnd(animation); } }); } //监听动画结束,利用Handler通知Activity退出 } }
bouvens/parallel1d
demo/index.js
<filename>demo/index.js const { clear, print, printArray, printCalculationTime, restartButton, showStart, showEnd, } = require('./display') const { generateInput, factorial } = require('./synchronous') const parallel = require('../promisified') const { DEFAULTS } = require('..') const FactorialWorker = require('./factorial.worker').default const INPUT_MAX = 300 const INPUT_LENGTH = 50000 const sleep = (ms) => new Promise((resolve) => { setTimeout(resolve, ms) }) async function startQueue() { await sleep(0) let start = new Date() const input = generateInput(INPUT_MAX, INPUT_LENGTH) // heavy function printArray('input', input) print(`Generation time: ${new Date() - start} ms\n`) await sleep(700) start = new Date() const syncRunResult = input.map(factorial) // heavy function const syncTime = new Date() - start printArray('synchronous factorials calculating', syncRunResult) printCalculationTime(syncTime) await sleep(700) start = new Date() print(`Start ${DEFAULTS.numberOfWorkers} workers`) const result = await parallel(FactorialWorker, { input }, input.length) const asyncTime = new Date() - start printArray('web workers factorials calculating', result) printCalculationTime(asyncTime) const timesFaster = Math.round((syncTime / asyncTime) * 10) / 10 print(`Parallel calculations were ~${timesFaster} times faster.`) showEnd() } function benchmark() { showStart() clear() print(`There'll be ${INPUT_LENGTH.toLocaleString('en-US')} numbers in range 1–${ INPUT_MAX.toLocaleString('en-US')} in the original array. It will be calculated in BigNum and converted to Number for display.\n`) startQueue() } restartButton.addEventListener('click', benchmark) benchmark()
kikeqt/Curso_CPP
ejemplo_4_02.cpp
#include <iostream> int main(void) { std::cout << "char -> " << sizeof(char) << std::endl; // 1 std::cout << "unsigned short -> " << sizeof(unsigned short) << std::endl; // 2 std::cout << "unsigned -> " << sizeof(unsigned) << std::endl; // 4 std::cout << "int -> " << sizeof(int) << std::endl; // 4 std::cout << "double -> " << sizeof(double) << std::endl; // 8 std::cout << "long double -> " << sizeof(long double) << std::endl; // ? return EXIT_SUCCESS; }
gchq/stroom-expression
src/main/java/stroom/dashboard/expression/v1/Evaluator.java
<filename>src/main/java/stroom/dashboard/expression/v1/Evaluator.java<gh_stars>1-10 package stroom.dashboard.expression.v1; import java.io.Serializable; abstract class Evaluator implements Serializable { private static final long serialVersionUID = 7429374303172048909L; protected abstract Val evaluate(final Val a, final Val b); }
JaeYeopHan/customize_your_travel_prj
src/container/entryContainer/index.js
<reponame>JaeYeopHan/customize_your_travel_prj import EntryContainer from "./EntryContainer"; export default EntryContainer;
mindspore-ai/mindscience
MindSPONGE/mindsponge/md/control/bd_baro.py
# Copyright 2021 Huawei Technologies Co., Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================ """BD_BARO""" class BDBARO: """BDBARO""" def __init__(self, controller, target_pressure, box_length, mode): self.constant_pres_convertion = 6.946827162543585e4 print("START INITIALIZING MC BAROSTAT:\n") self.module_name = "bd_baro" self.is_controller_printf_initialized = 0 print(" The target pressure is %.2f bar\n" % (target_pressure * self.constant_pres_convertion)) self.v0 = box_length[0] * box_length[1] * box_length[2] self.new_v = self.v0 self.dt = 1e-3 if "dt" not in controller.command_set else float(controller.command_set["dt"]) print(" The dt is %f ps\n" % self.dt) self.taup = 1.0 if "tau" not in controller.command_set else float(controller.command_set["tau"]) print(" The time constant tau is %f ps\n" % self.taup) self.compressibility = 4.5e-5 if "compressibility" not in controller.command_set else float( controller.command_set["compressibility"]) print(" The compressibility constant is %f bar^-1\n" % self.compressibility) self.update_interval = 10 if "update_interval" not in controller.command_set else int( controller.command_set["update_interval"]) print(" The update_interval is %d\n" % self.update_interval) self.system_reinitializing_count = 0 if mode == 2 and controller.command_set["barostat"] == "berendsen": self.is_initialized = 1 else: self.is_initialized = 0 if self.is_initialized and not self.is_controller_printf_initialized: self.is_controller_printf_initialized = 1 print("END INITIALIZING BERENDSEN BAROSTATn")
hazelcast/hazelcast-jet-soak-testing
event-journal-test/src/main/java/com/hazelcast/jet/tests/eventjournal/EventJournalTest.java
<reponame>hazelcast/hazelcast-jet-soak-testing /* * Copyright (c) 2008-2018, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.jet.tests.eventjournal; import com.hazelcast.client.HazelcastClient; import com.hazelcast.client.config.ClientConfig; import com.hazelcast.config.Config; import com.hazelcast.config.MapConfig; import com.hazelcast.core.EntryEventType; import com.hazelcast.core.HazelcastInstance; import com.hazelcast.jet.Job; import com.hazelcast.jet.aggregate.AggregateOperations; import com.hazelcast.jet.config.JobConfig; import com.hazelcast.jet.config.ProcessingGuarantee; import com.hazelcast.jet.pipeline.Pipeline; import com.hazelcast.jet.pipeline.Sinks; import com.hazelcast.jet.pipeline.Sources; import com.hazelcast.jet.tests.common.AbstractSoakTest; import com.hazelcast.jet.tests.common.QueueVerifier; import com.hazelcast.logging.ILogger; import com.hazelcast.logging.LoggingService; import com.hazelcast.map.EventJournalMapEvent; import com.hazelcast.map.IMap; import java.io.IOException; import static com.hazelcast.function.Functions.wholeItem; import static com.hazelcast.jet.Util.mapPutEvents; import static com.hazelcast.jet.core.JobStatus.FAILED; import static com.hazelcast.jet.pipeline.JournalInitialPosition.START_FROM_OLDEST; import static com.hazelcast.jet.pipeline.WindowDefinition.sliding; import static com.hazelcast.jet.tests.common.Util.getJobStatusWithRetry; import static java.util.concurrent.TimeUnit.SECONDS; public class EventJournalTest extends AbstractSoakTest { private static final int DEFAULT_LAG = 1500; private static final int DEFAULT_TIMESTAMP_PER_SECOND = 50; private static final int DEFAULT_SNAPSHOT_INTERVAL = 5000; private static final int DEFAULT_WINDOW_SIZE = 20; private static final int DEFAULT_SLIDE_BY = 10; private static final int DEFAULT_COUNTER_PER_TICKER = 100; private static final int EVENT_JOURNAL_CAPACITY = 1_500_000; private static final int RESULTS_EVENT_JOURNAL_CAPACITY = 40_000; private static final String MAP_NAME = EventJournalTest.class.getSimpleName(); private static final String RESULTS_MAP_NAME = MAP_NAME + "-RESULTS"; private int countPerTicker; private int snapshotIntervalMs; private int lagMs; private int timestampPerSecond; private int windowSize; private int slideBy; private int partitionCount; private int memberSize; private transient ClientConfig remoteClusterClientConfig; private transient EventJournalTradeProducer tradeProducer; private transient HazelcastInstance remoteClient; public static void main(String[] args) throws Exception { new EventJournalTest().run(args); } @Override public void init(HazelcastInstance client) throws Exception { lagMs = propertyInt("lagMs", DEFAULT_LAG); timestampPerSecond = propertyInt("timestampPerSecond", DEFAULT_TIMESTAMP_PER_SECOND); snapshotIntervalMs = propertyInt("snapshotIntervalMs", DEFAULT_SNAPSHOT_INTERVAL); windowSize = propertyInt("windowSize", DEFAULT_WINDOW_SIZE); slideBy = propertyInt("slideBy", DEFAULT_SLIDE_BY); countPerTicker = propertyInt("countPerTicker", DEFAULT_COUNTER_PER_TICKER); remoteClusterClientConfig = remoteClusterClientConfig(); configureTradeProducer(); } @Override public void test(HazelcastInstance client, String name) throws Exception { JobConfig jobConfig = new JobConfig(); jobConfig.setName(name); jobConfig.setSnapshotIntervalMillis(snapshotIntervalMs); jobConfig.setProcessingGuarantee(ProcessingGuarantee.EXACTLY_ONCE); Job job = client.getJet().newJob(pipeline(), jobConfig); tradeProducer.start(); int windowCount = windowSize / slideBy; LoggingService loggingService = client.getLoggingService(); QueueVerifier queueVerifier = new QueueVerifier(loggingService, "Verifier[" + RESULTS_MAP_NAME + "]", windowCount).startVerification(); IMap<Long, Long> resultMap = remoteClient.getMap(RESULTS_MAP_NAME); EventJournalConsumer<Long, Long> consumer = new EventJournalConsumer<>(resultMap, mapPutEvents(), partitionCount); long begin = System.currentTimeMillis(); while (System.currentTimeMillis() - begin < durationInMillis) { boolean isEmpty = consumer.drain(e -> { assertEquals("EXACTLY_ONCE -> Unexpected count for " + e.getKey(), countPerTicker, (long) e.getNewValue()); queueVerifier.offer(e.getKey()); }); if (isEmpty) { SECONDS.sleep(1); } if (getJobStatusWithRetry(job) == FAILED) { job.join(); } } if (getJobStatusWithRetry(job) == FAILED) { job.join(); } System.out.println("Cancelling jobs.."); queueVerifier.close(); job.cancel(); } protected void teardown(Throwable t) throws Exception { if (tradeProducer != null) { tradeProducer.close(); } if (remoteClient != null) { remoteClient.shutdown(); } } private Pipeline pipeline() { Pipeline pipeline = Pipeline.create(); pipeline.readFrom(Sources.<Long, Long, Long>remoteMapJournal(MAP_NAME, remoteClusterClientConfig, START_FROM_OLDEST, EventJournalMapEvent::getNewValue, e -> e.getType() == EntryEventType.ADDED)) .withTimestamps(t -> t, lagMs).setName("Read from map(" + MAP_NAME + ")") .setLocalParallelism(partitionCount / memberSize) .window(sliding(windowSize, slideBy)) .groupingKey(wholeItem()) .aggregate(AggregateOperations.counting()).setName("Aggregate(count)") .writeTo(Sinks.remoteMap(RESULTS_MAP_NAME, remoteClusterClientConfig)) .setName("Write to map(" + RESULTS_MAP_NAME + ")"); return pipeline; } private void configureTradeProducer() throws IOException { remoteClient = HazelcastClient.newHazelcastClient(remoteClusterClientConfig); memberSize = remoteClient.getCluster().getMembers().size(); partitionCount = remoteClient.getPartitionService().getPartitions().size(); Config config = remoteClient.getConfig(); MapConfig mapConfig = new MapConfig(MAP_NAME); mapConfig.getEventJournalConfig() .setCapacity(EVENT_JOURNAL_CAPACITY) .setEnabled(true); config.addMapConfig(mapConfig); MapConfig mapConfig2 = new MapConfig(RESULTS_MAP_NAME); mapConfig2.getEventJournalConfig() .setCapacity(RESULTS_EVENT_JOURNAL_CAPACITY) .setEnabled(true); config.addMapConfig(mapConfig2); ILogger producerLogger = getLogger(EventJournalTradeProducer.class); IMap<Long, Long> map = remoteClient.getMap(MAP_NAME); tradeProducer = new EventJournalTradeProducer(countPerTicker, map, timestampPerSecond, producerLogger); } }
singhaditya28/fs_image
fs_image/rpm/storage/__init__.py
<reponame>singhaditya28/fs_image #!/usr/bin/env python3 # Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. from .storage import Storage, StorageInput, StorageOutput from .cli_object_storage import CLIObjectStorage __all__ = [Storage, StorageInput, StorageOutput, CLIObjectStorage] # Register implementations with Storage from . import filesystem_storage, s3_storage # noqa: F401 try: # Import FB-specific implementations if available from . import facebook # noqa: F401 except ImportError: # pragma: no cover pass
promethe42/ifcplusplus
IfcPlusPlus/src/ifcpp/IFC4/lib/IfcPresentationLayerWithStyle.cpp
<filename>IfcPlusPlus/src/ifcpp/IFC4/lib/IfcPresentationLayerWithStyle.cpp<gh_stars>0 /* Code generated by IfcQuery EXPRESS generator, www.ifcquery.com */ #include <sstream> #include <limits> #include "ifcpp/model/AttributeObject.h" #include "ifcpp/model/BuildingException.h" #include "ifcpp/model/BuildingGuid.h" #include "ifcpp/reader/ReaderUtil.h" #include "ifcpp/writer/WriterUtil.h" #include "ifcpp/IFC4/include/IfcIdentifier.h" #include "ifcpp/IFC4/include/IfcLabel.h" #include "ifcpp/IFC4/include/IfcLayeredItem.h" #include "ifcpp/IFC4/include/IfcLogical.h" #include "ifcpp/IFC4/include/IfcPresentationLayerWithStyle.h" #include "ifcpp/IFC4/include/IfcPresentationStyle.h" #include "ifcpp/IFC4/include/IfcText.h" // ENTITY IfcPresentationLayerWithStyle IfcPresentationLayerWithStyle::IfcPresentationLayerWithStyle() {} IfcPresentationLayerWithStyle::IfcPresentationLayerWithStyle( int id ) { m_entity_id = id; } IfcPresentationLayerWithStyle::~IfcPresentationLayerWithStyle() {} shared_ptr<BuildingObject> IfcPresentationLayerWithStyle::getDeepCopy( BuildingCopyOptions& options ) { shared_ptr<IfcPresentationLayerWithStyle> copy_self( new IfcPresentationLayerWithStyle() ); if( m_Name ) { copy_self->m_Name = dynamic_pointer_cast<IfcLabel>( m_Name->getDeepCopy(options) ); } if( m_Description ) { copy_self->m_Description = dynamic_pointer_cast<IfcText>( m_Description->getDeepCopy(options) ); } for( size_t ii=0; ii<m_AssignedItems.size(); ++ii ) { auto item_ii = m_AssignedItems[ii]; if( item_ii ) { copy_self->m_AssignedItems.push_back( dynamic_pointer_cast<IfcLayeredItem>(item_ii->getDeepCopy(options) ) ); } } if( m_Identifier ) { copy_self->m_Identifier = dynamic_pointer_cast<IfcIdentifier>( m_Identifier->getDeepCopy(options) ); } if( m_LayerOn ) { copy_self->m_LayerOn = dynamic_pointer_cast<IfcLogical>( m_LayerOn->getDeepCopy(options) ); } if( m_LayerFrozen ) { copy_self->m_LayerFrozen = dynamic_pointer_cast<IfcLogical>( m_LayerFrozen->getDeepCopy(options) ); } if( m_LayerBlocked ) { copy_self->m_LayerBlocked = dynamic_pointer_cast<IfcLogical>( m_LayerBlocked->getDeepCopy(options) ); } for( size_t ii=0; ii<m_LayerStyles.size(); ++ii ) { auto item_ii = m_LayerStyles[ii]; if( item_ii ) { copy_self->m_LayerStyles.push_back( dynamic_pointer_cast<IfcPresentationStyle>(item_ii->getDeepCopy(options) ) ); } } return copy_self; } void IfcPresentationLayerWithStyle::getStepLine( std::stringstream& stream ) const { stream << "#" << m_entity_id << "= IFCPRESENTATIONLAYERWITHSTYLE" << "("; if( m_Name ) { m_Name->getStepParameter( stream ); } else { stream << "$"; } stream << ","; if( m_Description ) { m_Description->getStepParameter( stream ); } else { stream << "$"; } stream << ","; stream << "("; for( size_t ii = 0; ii < m_AssignedItems.size(); ++ii ) { if( ii > 0 ) { stream << ","; } const shared_ptr<IfcLayeredItem>& type_object = m_AssignedItems[ii]; if( type_object ) { type_object->getStepParameter( stream, true ); } else { stream << "$"; } } stream << ")"; stream << ","; if( m_Identifier ) { m_Identifier->getStepParameter( stream ); } else { stream << "$"; } stream << ","; if( m_LayerOn ) { m_LayerOn->getStepParameter( stream ); } else { stream << "$"; } stream << ","; if( m_LayerFrozen ) { m_LayerFrozen->getStepParameter( stream ); } else { stream << "$"; } stream << ","; if( m_LayerBlocked ) { m_LayerBlocked->getStepParameter( stream ); } else { stream << "$"; } stream << ","; writeEntityList( stream, m_LayerStyles ); stream << ");"; } void IfcPresentationLayerWithStyle::getStepParameter( std::stringstream& stream, bool ) const { stream << "#" << m_entity_id; } const std::wstring IfcPresentationLayerWithStyle::toString() const { return L"IfcPresentationLayerWithStyle"; } void IfcPresentationLayerWithStyle::readStepArguments( const std::vector<std::wstring>& args, const std::map<int,shared_ptr<BuildingEntity> >& map ) { const size_t num_args = args.size(); if( num_args != 8 ){ std::stringstream err; err << "Wrong parameter count for entity IfcPresentationLayerWithStyle, expecting 8, having " << num_args << ". Entity ID: " << m_entity_id << std::endl; throw BuildingException( err.str().c_str() ); } m_Name = IfcLabel::createObjectFromSTEP( args[0], map ); m_Description = IfcText::createObjectFromSTEP( args[1], map ); readSelectList( args[2], m_AssignedItems, map ); m_Identifier = IfcIdentifier::createObjectFromSTEP( args[3], map ); m_LayerOn = IfcLogical::createObjectFromSTEP( args[4], map ); m_LayerFrozen = IfcLogical::createObjectFromSTEP( args[5], map ); m_LayerBlocked = IfcLogical::createObjectFromSTEP( args[6], map ); readEntityReferenceList( args[7], m_LayerStyles, map ); } void IfcPresentationLayerWithStyle::getAttributes( std::vector<std::pair<std::string, shared_ptr<BuildingObject> > >& vec_attributes ) const { IfcPresentationLayerAssignment::getAttributes( vec_attributes ); vec_attributes.push_back( std::make_pair( "LayerOn", m_LayerOn ) ); vec_attributes.push_back( std::make_pair( "LayerFrozen", m_LayerFrozen ) ); vec_attributes.push_back( std::make_pair( "LayerBlocked", m_LayerBlocked ) ); if( m_LayerStyles.size() > 0 ) { shared_ptr<AttributeObjectVector> LayerStyles_vec_object( new AttributeObjectVector() ); std::copy( m_LayerStyles.begin(), m_LayerStyles.end(), std::back_inserter( LayerStyles_vec_object->m_vec ) ); vec_attributes.push_back( std::make_pair( "LayerStyles", LayerStyles_vec_object ) ); } } void IfcPresentationLayerWithStyle::getAttributesInverse( std::vector<std::pair<std::string, shared_ptr<BuildingObject> > >& vec_attributes_inverse ) const { IfcPresentationLayerAssignment::getAttributesInverse( vec_attributes_inverse ); } void IfcPresentationLayerWithStyle::setInverseCounterparts( shared_ptr<BuildingEntity> ptr_self_entity ) { IfcPresentationLayerAssignment::setInverseCounterparts( ptr_self_entity ); } void IfcPresentationLayerWithStyle::unlinkFromInverseCounterparts() { IfcPresentationLayerAssignment::unlinkFromInverseCounterparts(); }
ManageIQ/topological_inventory-ui
src/test/components/sourcesTable/EmptyStateTable.test.js
import React from 'react'; import { render, screen } from '@testing-library/react'; import userEvent from '@testing-library/user-event'; import { componentWrapperIntl } from '../../../utilities/testsHelpers'; import EmptyStateTable from '../../../components/SourcesTable/EmptyStateTable'; import * as actions from '../../../redux/sources/actions'; describe('EmptyStateTable', () => { it('render correctly', () => { render(componentWrapperIntl(<EmptyStateTable />)); expect(screen.getByText('No sources found')).toBeInTheDocument(); expect( screen.getByText('No sources match the filter criteria. Remove all filters or clear all filters to show sources.') ).toBeInTheDocument(); }); it('calls clear filters when click on button', async () => { actions.clearFilters = jest.fn().mockImplementation(() => ({ type: 'cosi' })); render(componentWrapperIntl(<EmptyStateTable />)); await userEvent.click(screen.getByText('Clear all filters')); expect(actions.clearFilters).toHaveBeenCalled(); }); });
chaoxing-gsd/GSD
GSD-open/src/main/java/com/chaoxing/gsd/web/SSPController.java
package com.chaoxing.gsd.web; import com.alibaba.fastjson.JSON; import com.chaoxing.gsd.modules.service.RsService; import com.chaoxing.gsd.service.DownloadIndexService; import com.chaoxing.gsd.service.SearchESClusterService; import com.chaoxing.gsd.service.SearchESIndexService; import com.chaoxing.gsd.service.WebpageIndexService; import com.chaoxing.gsd.utils.IDUtils; import com.chaoxing.gsd.web.res.BaseRes; import com.chaoxing.gsd.web.res.BaseResponse; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.scheduling.annotation.Async; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseBody; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.ws.rs.POST; import java.io.*; import java.util.*; import com.spreada.utils.chinese.*; import com.chaoxing.gsd.modules.entity.Gis; /** * @author heyang * @date 2018/08/27 describe:索引操作 */ @Controller @RequestMapping("/gsd") public class SSPController { @Autowired private SearchESIndexService esService; @Autowired private RsService rsService; @Autowired private DownloadIndexService downloadIndexService; @Autowired private SearchESClusterService sec; @Autowired private WebpageIndexService webpageis; // 文献message_types字段限制条件:BK=图书,JN=期刊,DT=学位论文,CP=会议论文,PAT=专利,ST=标准,NP=报纸,TR=科技成果,年鉴=YB,法律法规=LAR,信息咨询=INF,案例=CAS @SuppressWarnings("unused") private static String[] message_types = { "BK", "DT", "JN", "CP", "PAT", "ST", "NP", "TR", "YB", "LAR", "INF", "CAS" }; // 用户保存网页document_type文档类型限制:webpage:邮件采集网页,literature文献 public static final List<String> DOCUMENT_TYPES = Arrays.asList(new String[] { "webpage", "literature" }); public static Logger logger = LoggerFactory.getLogger(SSPController.class); /** * @author heyang * @param ${indexnames} * 索引数组 * @param ${content} * 搜索内容 * @param ${field} * 检索字段 * @param ${pagesize} * 每页记录数 * @param ${pagenum} * 第几页从0开始 describe: 在指定多个索引库检索内容 */ @POST @RequestMapping("/search2") @ResponseBody public BaseResponse search2(@RequestParam(value = "indexnames") String[] indexNames, @RequestParam(value = "content") String content, @RequestParam(value = "field") String field, @RequestParam(value = "pagesize") Integer pageSize, @RequestParam(value = "pagenum") Integer pageNum) { long begin1 = System.currentTimeMillis(); // 将内容转成简体 String trueSearch = content == null ? content : 简繁转换类.转换(content, 简繁转换类.目标.简体); BaseResponse resp = new BaseResponse(); Map<String, Object> map = esService.search2(indexNames, trueSearch, field, pageSize, pageNum); resp.setStatu(true); resp.setData(map); logger.info("search2 from es index: {} toast time:{} ", JSON.toJSONString(indexNames), System.currentTimeMillis() - begin1); return resp; } /** * 在(七个哈佛库/7个索引)指定多个索引库查找聚类(String[] indexnames,String content,String field) * 参数说明:indexnames索引库数组:textref_zhonghuajingdian,textref_kanripo,textref_ctext,textref_cbta,biogref_dnb,biogref_ddbc,biogref_cbdb * content:搜索内容,field:检索字段,传空值,则在所有字段检索 * 返回的聚类名称包含: * "gender","born_year","died_year","dynasty","jiguan","author","edition","collection","notes" * @param indexNames * @param content * @param field * @return */ @POST @RequestMapping("/searchclusters2") @ResponseBody public BaseResponse searchClusters2(@RequestParam(value = "indexnames", required = true) String[] indexNames, @RequestParam(value = "content", required = true) String content, @RequestParam(value = "field", required = true) String field) { long begin1 = System.currentTimeMillis(); BaseResponse resp = new BaseResponse(); Map<?, ?> map = sec.search2(indexNames, content, field); resp.setStatu(true); resp.setData(map); logger.info("searchclusters2 from es index: {} toast time:{} ", JSON.toJSONString(indexNames), System.currentTimeMillis() - begin1); return resp; } /** * @author heyang * @param ${indexName}索引名称 * @param ${indexId} * 文档id describe: 根据索引名称和文档id查询文档 */ @PostMapping("/es/searchindexbyid") @ResponseBody public BaseResponse searchDocumentById(@RequestParam(name = "indexName") String indexName, @RequestParam(name = "indexId") String documentId) { long begin1 = System.currentTimeMillis(); BaseResponse resp = new BaseResponse(); try { List<Map<String, Object>> result = esService.searchDocumentById(indexName, documentId); resp.setStatu(true); resp.setData(result); } catch (Exception e) { logger.info("searchindexbyid from es index: {} error:{} ", indexName, e); resp = BaseRes.getErrorResponse(); } logger.info("searchindexbyid from es index: {} toast time:{} ", indexName, System.currentTimeMillis() - begin1); return resp; } /** * @author heyang * @param ${indexName} * 索引名称 webpage * @param ${documentIds} * 文档id数组 * @param ${userId} * 用户id describe:根据索引名称文档id数组删除文档 */ @PostMapping("/es/deldocumentbyids") @ResponseBody public BaseResponse delDocumentById(@RequestParam(name = "indexName") String indexName, @RequestParam(name = "documentIds") String[] documentIds, @RequestParam(name = "userId", required = false) String userId) { long begin1 = System.currentTimeMillis(); BaseResponse resp = new BaseResponse(); try { for (String documentId : documentIds) { esService.delDocumentById(indexName, documentId); } resp.setStatu(true); } catch (Exception e) { logger.info("deldocumentbyids from es indexName: {} error:{} ", indexName, e); resp = BaseRes.getErrorResponse(); } logger.info("deldocumentbyids from es indexName: {} toast time:{} ", indexName, System.currentTimeMillis() - begin1); return resp; } /** * @author heyang * @param webpageIds * 网页id数组 * @param userId * 用户id * @param downloadType * 导出格式:1(ris格式)/2(bib格式)只能输入1或者2 describe: 导出webpage索引 */ @PostMapping("/es/downloadwebpage") @ResponseBody public void downLoadWebPage(@RequestParam(name = "webpageIds") String[] webpageIds, @RequestParam(name = "userId") String userId, @RequestParam(name = "downloadType") int downloadType, HttpServletResponse response, HttpServletRequest request) { try { if (downloadType != 1 && downloadType != 2) { logger.warn("downloadType只能输入1或者2"); return; } List<Map<String, Object>> list = new ArrayList<>(); // 查询所有网页 for (String indexId : webpageIds) { List<Map<String, Object>> result = esService.searchDocumentById("webpage", indexId); if (result.size() != 0) { Map<String, Object> map = result.get(0); try { list.add(map); } catch (Exception e) { logger.error("downLoadWebPage error:{}", e); } } } String token = IDUtils.maketoken(); String path = ""; if (downloadType == 1) { path = "/" + token + ".ris"; } else if (downloadType == 2) { path = "/" + token + ".bib"; } String fileName = request.getServletContext().getRealPath(path); File file = new File(fileName); // 创建文件 if (!file.exists()) { file.createNewFile(); } // 写入,下载,删除 if (downloadType == 1) { downloadIndexService.writeris(list, file); } else if (downloadType == 2) { downloadIndexService.writebib(list, file); } downLoad(response, fileName); file.delete(); // 异步保存导出记录信息 toSaveExportLiteratureRecord(userId, webpageIds); } catch (Exception e) { logger.error("downLoadWebPage error io:{}", e); } } /** * 保存文献导出记录信息 * * @param userId * 用户id * @param literatureId * 文献id */ @Async("asyncServiceExecutor") private void toSaveExportLiteratureRecord(String userId, String[] literatureId) { rsService.toSaveExportLiteratureRecord(userId, literatureId); } /** * @author heyang * @param path * 文件路径 describe: 下载文本文件 */ public void downLoad(HttpServletResponse response, String path) throws IOException { File file = new File(path); response.setContentType("text/html;charset=utf-8"); FileReader reader = new FileReader(file); // 写出(字符缓冲流,只能写文字不能写图片) PrintWriter out = response.getWriter(); char buffer[] = new char[1024]; int len = 1024; while ((len = reader.read(buffer)) != -1) { out.write(buffer, 0, len); } // 释放资源 reader.close(); out.flush(); out.close(); } /** * @author heyang * @param ${indexName} * 索引名称 sanfrancisco_picture describe: gis过滤 */ @PostMapping("/es/gis") @ResponseBody public BaseResponse Gis(@RequestParam(name = "indexName") String indexName) { long begin1 = System.currentTimeMillis(); BaseResponse resp = new BaseResponse(); try { List<Gis> result = webpageis.Gis(); resp.setStatu(true); resp.setData(result); } catch (Exception e) { logger.info("gis from es index: {} error:{} ", indexName, e); resp = BaseRes.getErrorResponse(); } logger.info("gis from es index: {} toast time:{} ", indexName, System.currentTimeMillis() - begin1); return resp; } }
Allocamelus/Allocamelus
internal/router/handlers/api/v1/account/logout.go
<filename>internal/router/handlers/api/v1/account/logout.go<gh_stars>1-10 package account import ( "github.com/allocamelus/allocamelus/internal/user" "github.com/gofiber/fiber/v2" ) // Logout handler func Logout(c *fiber.Ctx) error { user.Logout(c) return c.SendStatus(204) }
JonathanDUFOUR/libft_io_tester
srcs/test/test_ft_convert_base.c
/* ************************************************************************** */ /* */ /* ::: :::::::: */ /* test_ft_convert_base.c :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: jodufour <<EMAIL>> +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2021/11/16 22:48:42 by jodufour #+# #+# */ /* Updated: 2022/04/27 09:32:26 by jodufour ### ########.fr */ /* */ /* ************************************************************************** */ #include <stdio.h> #include <stdlib.h> #include "ft_io.h" #include "tester.h" #include "e_ret.h" typedef struct s_test t_test; struct s_test { int const num; char const *str; char const *from; char const *to; char const *expect; }; static t_test const g_test[] = { {1, "", "", "", NULL}, {2, "lala", "l", "dejavu", NULL}, {3, "value", "dejavu", "X", NULL}, {4, "baguette", "vin rouge", "fromage", NULL}, {5, "crusher", "shcr", "tsar bomBA", NULL}, {6, "delta", "gamma", "phi", NULL}, {7, "beta", "epsilon", "alpha", NULL}, {8, "-x-x-x", "x-", "01", NULL}, {9, "101010", "01", "o+", NULL}, {10, " +Matthieu", "aMeutih", "9876543210", "097304"}, {11, "01111111111111111111111111111111", "10", "0123456789abcdef", "-80000000"}, {12, "\t \r-ppppopnnnAvif", "poneyvif", "01", "-1000010010010"}, {13, " \v\f+.!..!..!!!.!!..!", "!.", "0123456789ABCDEF", "B626"}, {14, "Jonathan", "koalA", "fruit", "f"}, {0} }; int test_ft_convert_base(int *const ret) { int i; char *str; char const *expect; printf("%20s:", __func__ + 5); i = 0; while (g_test[i].num) { str = ft_convert_base(g_test[i].str, g_test[i].from, g_test[i].to); expect = g_test[i].expect; result(g_test[i].num, (!str && !expect) || (str && expect && !strcmp(str, expect))); free(str); ++i; } printf("\n"); return (*ret = SUCCESS); }
lasdolphin/kf
pkg/kf/internal/tools/clientgen/tmplfunctional.go
<filename>pkg/kf/internal/tools/clientgen/tmplfunctional.go // Copyright 2019 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package clientgen import ( "text/template" "github.com/google/kf/pkg/kf/internal/tools/generator" ) var functionalUtilTemplate = template.Must(template.New("").Funcs(generator.TemplateFuncs()).Parse(` //////////////////////////////////////////////////////////////////////////////// // Functional Utilities //////////////////////////////////////////////////////////////////////////////// type ResourceInfo struct{} // NewResourceInfo returns a new instance of ResourceInfo func NewResourceInfo() *ResourceInfo { return &ResourceInfo{} } // Namespaced returns true if the type belongs in a namespace. func (*ResourceInfo) Namespaced() bool { return {{.Kubernetes.Namespaced}} } // GroupVersionResource gets the GVR struct for the resource. func (*ResourceInfo) GroupVersionResource() schema.GroupVersionResource { return schema.GroupVersionResource{ Group: "{{ .Kubernetes.Group }}", Version: "{{ .Kubernetes.Version }}", Resource: "{{ .Kubernetes.Plural | lower }}", } } // GroupVersionKind gets the GVK struct for the resource. func (*ResourceInfo) GroupVersionKind() schema.GroupVersionKind { return schema.GroupVersionKind{ Group: "{{ .Kubernetes.Group }}", Version: "{{ .Kubernetes.Version }}", Kind: "{{ .Kubernetes.Kind }}", } } // FriendlyName gets the user-facing name of the resource. func (*ResourceInfo) FriendlyName() string { return "{{.CF.Name}}" } {{ if and .SupportsConditions .Kubernetes.Conditions }} var ( {{ range .Kubernetes.Conditions }} {{.ConditionName}} = apis.ConditionType({{.Definition}}){{ end }} ) {{ end }} // Predicate is a boolean function for a {{.Type}}. type Predicate func(*{{.Type}}) bool // Mutator is a function that changes {{.Type}}. type Mutator func(*{{.Type}}) error // DiffWrapper wraps a mutator and prints out the diff between the original object // and the one it returns if there's no error. func DiffWrapper(w io.Writer, mutator Mutator) Mutator { return func(mutable *{{.Type}}) error { before := mutable.DeepCopy() if err := mutator(mutable); err != nil { return err } FormatDiff(w, "old", "new", before, mutable) return nil } } // FormatDiff creates a diff between two {{.Type}}s and writes it to the given // writer. func FormatDiff(w io.Writer, leftName, rightName string, left, right *{{.Type}}) { diff, err := kmp.SafeDiff(left, right) switch { case err != nil: fmt.Fprintf(w, "couldn't format diff: %s\n", err.Error()) case diff == "": fmt.Fprintln(w, "No changes") default: fmt.Fprintf(w, "{{.CF.Name}} Diff (-%s +%s):\n", leftName, rightName) // go-cmp randomly chooses to prefix lines with non-breaking spaces or // regular spaces to prevent people from using it as a real diff/patch // tool. We normalize them so our outputs will be consistent. fmt.Fprintln(w, strings.ReplaceAll(diff, " ", " ")) } } // List represents a collection of {{.Type}}. type List []{{.Type}} // Filter returns a new list items for which the predicates fails removed. func (list List) Filter(filter Predicate) (out List) { for _, v := range list { if filter(&v) { out = append(out, v) } } return } {{ if .SupportsObservedGeneration }} // ObservedGenerationMatchesGeneration is a predicate that returns true if the // object's ObservedGeneration matches the genration of the object. func ObservedGenerationMatchesGeneration(obj *{{.Type}}) bool { return obj.Generation == obj.{{.Kubernetes.ObservedGenerationFieldPath}} } {{ end }} {{ if .SupportsConditions }} // ExtractConditions converts the native condition types into an apis.Condition // array with the Type, Status, Reason, and Message fields intact. func ExtractConditions(obj *{{.Type}}) (extracted []apis.Condition) { for _, cond := range obj.{{.Kubernetes.ConditionsFieldPath}} { // Only copy the following four fields to be compatible with // recommended Kuberntes fields. extracted = append(extracted, apis.Condition{ Type: apis.ConditionType(cond.Type), Status: corev1.ConditionStatus(cond.Status), Reason: cond.Reason, Message: cond.Message, }) } return } {{ end }} `))
Bugxyb/prebid-server-java
src/main/java/org/prebid/server/bidder/kayzen/KayzenBidder.java
package org.prebid.server.bidder.kayzen; import com.fasterxml.jackson.core.type.TypeReference; import com.iab.openrtb.request.BidRequest; import com.iab.openrtb.request.Imp; import com.iab.openrtb.response.BidResponse; import com.iab.openrtb.response.SeatBid; import io.vertx.core.http.HttpMethod; import org.apache.commons.collections4.CollectionUtils; import org.prebid.server.bidder.Bidder; import org.prebid.server.bidder.model.BidderBid; import org.prebid.server.bidder.model.BidderError; import org.prebid.server.bidder.model.HttpCall; import org.prebid.server.bidder.model.HttpRequest; import org.prebid.server.bidder.model.Result; import org.prebid.server.exception.PreBidException; import org.prebid.server.json.DecodeException; import org.prebid.server.json.JacksonMapper; import org.prebid.server.proto.openrtb.ext.ExtPrebid; import org.prebid.server.proto.openrtb.ext.request.kayzen.ExtImpKayzen; import org.prebid.server.proto.openrtb.ext.response.BidType; import org.prebid.server.util.HttpUtil; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Objects; import java.util.stream.Collectors; public class KayzenBidder implements Bidder<BidRequest> { private static final TypeReference<ExtPrebid<?, ExtImpKayzen>> KAYZEN_EXT_TYPE_REFERENCE = new TypeReference<>() { }; private static final String URL_ZONE_ID_MACRO = "{{ZoneID}}"; private static final String URL_ACCOUNT_ID_MACRO = "{{AccountID}}"; private static final int FIRST_IMP_INDEX = 0; private final String endpointUrl; private final JacksonMapper mapper; public KayzenBidder(String endpointUrl, JacksonMapper mapper) { this.endpointUrl = HttpUtil.validateUrl(Objects.requireNonNull(endpointUrl)); this.mapper = Objects.requireNonNull(mapper); } @Override public Result<List<HttpRequest<BidRequest>>> makeHttpRequests(BidRequest request) { final List<Imp> originalImps = request.getImp(); final Imp firstImp = originalImps.get(FIRST_IMP_INDEX); final ExtImpKayzen extImpKayzen; try { extImpKayzen = parseImpExt(firstImp); } catch (PreBidException e) { return Result.withError(BidderError.badInput(e.getMessage())); } final List<Imp> modifiedImps = new ArrayList<>(originalImps); modifiedImps.set(FIRST_IMP_INDEX, firstImp.toBuilder().ext(null).build()); return Result.withValue(createRequest(extImpKayzen, request, modifiedImps)); } private ExtImpKayzen parseImpExt(Imp imp) { try { return mapper.mapper().convertValue(imp.getExt(), KAYZEN_EXT_TYPE_REFERENCE).getBidder(); } catch (IllegalArgumentException e) { throw new PreBidException(String.format("Missing bidder ext in impression with id: %s", imp.getId())); } } private HttpRequest<BidRequest> createRequest(ExtImpKayzen extImpKayzen, BidRequest request, List<Imp> imps) { final String url = endpointUrl.replace(URL_ZONE_ID_MACRO, extImpKayzen.getZone()) .replace(URL_ACCOUNT_ID_MACRO, extImpKayzen.getExchange()); final BidRequest outgoingRequest = request.toBuilder().imp(imps).build(); return HttpRequest.<BidRequest>builder() .method(HttpMethod.POST) .uri(url) .headers(HttpUtil.headers()) .payload(outgoingRequest) .body(mapper.encodeToBytes(outgoingRequest)) .build(); } @Override public final Result<List<BidderBid>> makeBids(HttpCall<BidRequest> httpCall, BidRequest bidRequest) { try { final BidResponse bidResponse = mapper.decodeValue(httpCall.getResponse().getBody(), BidResponse.class); return Result.of(extractBids(httpCall.getRequest().getPayload(), bidResponse), Collections.emptyList()); } catch (DecodeException | PreBidException e) { return Result.withError(BidderError.badServerResponse(e.getMessage())); } } private static List<BidderBid> extractBids(BidRequest bidRequest, BidResponse bidResponse) { if (bidResponse == null || CollectionUtils.isEmpty(bidResponse.getSeatbid())) { return Collections.emptyList(); } return bidsFromResponse(bidRequest, bidResponse); } private static List<BidderBid> bidsFromResponse(BidRequest bidRequest, BidResponse bidResponse) { return bidResponse.getSeatbid().stream() .filter(Objects::nonNull) .map(SeatBid::getBid) .filter(Objects::nonNull) .flatMap(Collection::stream) .map(bid -> BidderBid.of(bid, getBidMediaType(bid.getImpid(), bidRequest.getImp()), bidResponse.getCur())) .collect(Collectors.toList()); } private static BidType getBidMediaType(String impId, List<Imp> imps) { for (Imp imp : imps) { if (impId.equals(imp.getId())) { if (imp.getBanner() != null) { return BidType.banner; } else if (imp.getVideo() != null) { return BidType.video; } else if (imp.getXNative() != null) { return BidType.xNative; } } } return BidType.banner; } }
Max-Zhenzhera/my_vocab_backend
tests/test_api/base/post_route.py
<reponame>Max-Zhenzhera/my_vocab_backend from abc import abstractmethod from .route import BaseTestRoute __all__ = ['BaseTestPostRoute'] class BaseTestPostRoute(BaseTestRoute): @property @abstractmethod def request_json(self) -> dict: """ The API route JSON (sent in request body) for successful response. Abstract *class* attribute: request_json: ClassVar[dict] = PydanticModel.dict() """
lucascheco/Uem
Organizacao_e_Recuperacao_de_Dados/Trabalho2/arvore.c
<gh_stars>0 /* Autores: * <NAME> - RA:112686. * <NAME> - RA:112684. * <NAME> - RA:107678. Descrição: * Árvore B. */ #include <stdio.h> #include <string.h> #include <stdlib.h> #include "MY_LIB1.h" int main(Contador c, Argumentos p) { if (c < 2) { fprintf(stderr, "Erro: numero incorreto de argumentos.\n"); fprintf(stderr, "> Modo de uso:\n"); fprintf(stderr, "$ %s -c arquivos_chaves.\n", p[PROGRAMA]); fprintf(stderr, "$ %s -p\n", p[PROGRAMA]); exit(1); } if (!strcmp(p[MODO], "-c")) { printf("Criando Arvore-B... "); criar(p[ARQ]); printf("Arvore-B criada com sucesso.\n"); } else if (!strcmp(p[MODO], "-p")) { imprimir_relatorio(); } else { fprintf(stderr, "Erro: opcao \"%s\" invalida.\n", p[MODO]); } return 0; }
shubs/api-store
deprecated/Facebook Group Members/Facebook Group Members.js
// Phantombuster configuration { "phantombuster command: nodejs" "phantombuster package: 4" // Buster and Nick instantiation const Buster = require("phantombuster") const buster = new Buster() const Nick = require("nickjs") const nick = new Nick({ printPageErrors: false, printResourceErrors: false, printNavigation: false, printAborts: false, }) // Requires of npm packages const Papa = require("papaparse") // } const noop = () => {} // Getting the arguments: sessionCookies + groupUrl const groupUrl = buster.arguments.groupUrl const cookieXs = buster.arguments.cookieXs const cookieUser = buster.arguments.cookieUser // Check arguments if ((typeof cookieXs !== "string") || cookieXs.length < 10) { console.log("Warning: Invalid cookieXs") nick.exit(1) } if ((typeof cookieUser !== "string") || cookieUser.length < 10) { console.log("Warning: Invalid cookieUser") nick.exit(1) } if ((typeof cookieUser !== "string") || cookieUser.length < 10) { console.log("Warning: Invalid cookieUser") nick.exit(1) } // Returns the number of members loaded const getMembersNb = (arg, callback) => { callback(null, document.querySelectorAll("div.lists tbody td").length) } // Returns the length of a list of members const getListLength = (arg, callback) => { callback(null, document.querySelectorAll("div.lists div.profileBrowserGrid.fbProfileBrowserListContainer > *").length) } // Function to load automatically all members of a group const loadAllMembers = async (tab) => { let listLength = await tab.evaluate(getListLength) let loop = true while (loop) { try { await tab.waitUntilVisible("a.uiMorePagerPrimary") await tab.click("a.uiMorePagerPrimary") try { await tab.waitUntilVisible(`div.lists div.profileBrowserGrid.fbProfileBrowserListContainer > *:nth-child(${listLength + 1})`) } catch (error) { noop() } listLength = await tab.evaluate(getListLength) console.log(`Loaded ${await tab.evaluate(getMembersNb)} members.`) } catch (error) { loop = false } } } // Function to scrape all members of a page const scrapeMembers = (arg, callback) => { const members = document.querySelectorAll("div.lists tbody td") const result = [] for (const member of members) { const memberInfo = {} if (member.querySelector("div.fsl.fwb.fcb > a")) {memberInfo.name = member.querySelector("div.fsl.fwb.fcb > a").textContent.trim()} else {memberInfo.name = null} if (member.querySelector("._17tq")) {memberInfo.info = member.querySelector("._17tq").textContent.trim()} else {memberInfo.info = null} if (member.querySelector("div.fsl.fwb.fcb > a")) {memberInfo.profile = member.querySelector("div.fsl.fwb.fcb > a").href.replace(/[&?]fref=.*/, "")} else {memberInfo.profile = null} if (memberInfo.name || memberInfo.info || memberInfo.profile) { result.push(memberInfo) } } callback(null, result) } const scrapeFacebookName = (arg, callback) => { callback(null, document.querySelector("img._s0._4ooo._44ma.img").getAttribute("aria-label").trim()) } // Function to connect to facebook with cookies const facebookConnect = async (tab) => { await nick.setCookie({ name: "c_user", value: cookieUser, domain: ".facebook.com" }) await nick.setCookie({ name: "xs", value: cookieXs, domain: ".facebook.com" }) await tab.open("facebook.com") try { await tab.waitUntilVisible("div[role=\"feed\"]") } catch (error) { console.log("ERROR: Could not connect to facebook with this cookies.") nick.exit(1) } console.log(`Connected to facebook successfully as ${await tab.evaluate(scrapeFacebookName)}`) } // Main function handle errors and launch everything ;(async () => { const tab = await nick.newTab() await facebookConnect(tab) await tab.open(groupUrl.replace(/\/$/, "") + "/members") // Forge URL and remove the last "/" await tab.waitUntilVisible("table._5f0n") await loadAllMembers(tab) const members = await tab.evaluate(scrapeMembers) const url = await buster.saveText(Papa.unparse(members), "members.csv") console.log(`CSV successfully saved at ${url}`) try { await buster.setResultObject(members) } catch (error) { await buster.setResultObject({csvUrl: url}) } nick.exit() })() .catch(err => { console.log(err) nick.exit(1) })
harryfallows/atlasdb
atlasdb-conjure/src/main/java/com/palantir/atlasdb/http/v2/ClientOptionsConstants.java
/* * (c) Copyright 2020 Palantir Technologies Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.palantir.atlasdb.http.v2; import com.google.common.annotations.VisibleForTesting; import com.palantir.conjure.java.api.config.service.HumanReadableDuration; public final class ClientOptionsConstants { static final HumanReadableDuration CONNECT_TIMEOUT = HumanReadableDuration.milliseconds(500); @VisibleForTesting // The read timeout controls how long the client waits to receive the first byte from the server before giving up, // so in general read timeouts should not be set to less than what is considered an acceptable time for the server // to give a suitable response. // In the context of TimeLock, this timeout must be longer than how long an AwaitingLeadershipProxy takes to // decide whether a node is the leader and still has a quorum. // Odd number for debugging public static final HumanReadableDuration SHORT_READ_TIMEOUT = HumanReadableDuration.milliseconds(12566); // Should not be reduced below 65 seconds to support workflows involving locking. static final HumanReadableDuration LONG_READ_TIMEOUT = HumanReadableDuration.seconds(65); // Under standard settings, throws after expected outages of 1/2 * 0.01 * (2^13 - 1) = 40.96 s static final HumanReadableDuration STANDARD_BACKOFF_SLOT_SIZE = HumanReadableDuration.milliseconds(10); static final int STANDARD_MAX_RETRIES = 13; static final int NO_RETRIES = 0; static final HumanReadableDuration STANDARD_FAILED_URL_COOLDOWN = HumanReadableDuration.milliseconds(100); static final HumanReadableDuration NON_RETRY_FAILED_URL_COOLDOWN = HumanReadableDuration.milliseconds(1); private ClientOptionsConstants() { // don't think about it } }
karreiro/drools-wb
drools-wb-services/drools-wb-verifier/drools-wb-verifier-client/src/main/java/org/drools/workbench/services/verifier/plugin/client/api/FactTypes.java
<reponame>karreiro/drools-wb<filename>drools-wb-services/drools-wb-verifier/drools-wb-verifier-client/src/main/java/org/drools/workbench/services/verifier/plugin/client/api/FactTypes.java<gh_stars>0 /* * Copyright 2016 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.workbench.services.verifier.plugin.client.api; import java.util.HashSet; import java.util.Set; import org.jboss.errai.common.client.api.annotations.MapsTo; import org.jboss.errai.common.client.api.annotations.Portable; import org.kie.soup.commons.validation.PortablePreconditions; @Portable public class FactTypes { private final Set<FactType> factTypes; public FactTypes() { this(new HashSet<>()); } public FactTypes(@MapsTo("factTypes") final Set<FactType> factTypes) { this.factTypes = PortablePreconditions.checkNotNull("factTypes", factTypes); } public FactType getFactType(final String factTypeName) { for (final FactType factType : factTypes) { if (factType.getName() .equals(factTypeName)) { return factType; } } return null; } public Set<FactType> getFactTypes() { return factTypes; } public String getFieldType(final String factTypeName, final String fieldName) { PortablePreconditions.checkNotNull("factTypeName", factTypeName); PortablePreconditions.checkNotNull("fieldName", fieldName); final FactType factType = getFactType(factTypeName); if (factType == null) { return null; } for (final Field field : factType.getFields()) { if (field.getFieldName() .equals(fieldName)) { return field.getType(); } } return null; } @Override public String toString() { StringBuilder builder = new StringBuilder(); for (final FactType key : factTypes) { builder.append(key.getName()); builder.append("{"); for (final Field field : key.getFields()) { builder.append(field.toString()); builder.append(", "); } builder.append("}"); } return builder.toString(); } public void add(final FactType factType) { factTypes.add(factType); } @Portable public static class FactType { private final String name; private final Set<Field> fields; public FactType(@MapsTo("name") final String name, @MapsTo("fields") final Set<Field> fields) { this.name = name; this.fields = fields; } public String getName() { return name; } public Set<Field> getFields() { return fields; } } @Portable public static class Field { private final String fieldName; private final String type; public Field(@MapsTo("fieldName") final String fieldName, @MapsTo("type") final String type) { this.fieldName = fieldName; this.type = type; } public String getFieldName() { return fieldName; } public String getType() { return type; } @Override public String toString() { return "Field{" + "fieldName='" + fieldName + '\'' + ", type='" + type + '\'' + '}'; } } }
dudizimber/connect-nodejs-sdk
src/model/V1UpdateOrderRequest.js
/** * Square Connect API * Client library for accessing the Square Connect APIs * * OpenAPI spec version: 2.0 * Contact: <EMAIL> * * NOTE: This class is auto generated by the swagger code generator program. * https://github.com/swagger-api/swagger-codegen.git * * Swagger Codegen version: 2.3.0-SNAPSHOT * */ var ApiClient = require('../ApiClient'); /** * The V1UpdateOrderRequest model module. * @module model/V1UpdateOrderRequest */ /** * Constructs a new <code>V1UpdateOrderRequest</code>. * V1UpdateOrderRequest * @alias module:model/V1UpdateOrderRequest * @class * @param action {String} The action to perform on the order (COMPLETE, CANCEL, or REFUND). See [V1UpdateOrderRequestAction](#type-v1updateorderrequestaction) for possible values */ var exports = function(action) { var _this = this; _this['action'] = action; }; /** * Constructs a <code>V1UpdateOrderRequest</code> from a plain JavaScript object, optionally creating a new instance. * Copies all relevant properties from <code>data</code> to <code>obj</code> if supplied or a new instance if not. * @param {Object} data The plain JavaScript object bearing properties of interest. * @param {module:model/V1UpdateOrderRequest} obj Optional instance to populate. * @return {module:model/V1UpdateOrderRequest} The populated <code>V1UpdateOrderRequest</code> instance. */ exports.constructFromObject = function(data, obj) { if (data) { //obj = obj || new exports(); obj = obj || {}; if (data.hasOwnProperty('action')) { obj['action'] = ApiClient.convertToType(data['action'], 'String'); } if (data.hasOwnProperty('shipped_tracking_number')) { obj['shipped_tracking_number'] = ApiClient.convertToType(data['shipped_tracking_number'], 'String'); } if (data.hasOwnProperty('completed_note')) { obj['completed_note'] = ApiClient.convertToType(data['completed_note'], 'String'); } if (data.hasOwnProperty('refunded_note')) { obj['refunded_note'] = ApiClient.convertToType(data['refunded_note'], 'String'); } if (data.hasOwnProperty('canceled_note')) { obj['canceled_note'] = ApiClient.convertToType(data['canceled_note'], 'String'); } } return obj; } /** * The action to perform on the order (COMPLETE, CANCEL, or REFUND). See [V1UpdateOrderRequestAction](#type-v1updateorderrequestaction) for possible values * @member {String} action */ exports.prototype['action'] = undefined; /** * The tracking number of the shipment associated with the order. Only valid if action is COMPLETE. * @member {String} shipped_tracking_number */ exports.prototype['shipped_tracking_number'] = undefined; /** * A merchant-specified note about the completion of the order. Only valid if action is COMPLETE. * @member {String} completed_note */ exports.prototype['completed_note'] = undefined; /** * A merchant-specified note about the refunding of the order. Only valid if action is REFUND. * @member {String} refunded_note */ exports.prototype['refunded_note'] = undefined; /** * A merchant-specified note about the canceling of the order. Only valid if action is CANCEL. * @member {String} canceled_note */ exports.prototype['canceled_note'] = undefined; module.exports = exports;
Extollite/Protocol
bedrock/bedrock-v340/src/main/java/com/nukkitx/protocol/bedrock/v340/serializer/AddEntitySerializer_v340.java
<reponame>Extollite/Protocol package com.nukkitx.protocol.bedrock.v340.serializer; import com.nukkitx.network.VarInts; import com.nukkitx.protocol.bedrock.packet.AddEntityPacket; import com.nukkitx.protocol.bedrock.v340.BedrockUtils; import com.nukkitx.protocol.serializer.PacketSerializer; import io.netty.buffer.ByteBuf; import lombok.AccessLevel; import lombok.NoArgsConstructor; @NoArgsConstructor(access = AccessLevel.PRIVATE) public class AddEntitySerializer_v340 implements PacketSerializer<AddEntityPacket> { public static final AddEntitySerializer_v340 INSTANCE = new AddEntitySerializer_v340(); @Override public void serialize(ByteBuf buffer, AddEntityPacket packet) { VarInts.writeLong(buffer, packet.getUniqueEntityId()); VarInts.writeUnsignedLong(buffer, packet.getRuntimeEntityId()); BedrockUtils.writeString(buffer, packet.getIdentifier()); BedrockUtils.writeVector3f(buffer, packet.getPosition()); BedrockUtils.writeVector3f(buffer, packet.getMotion()); BedrockUtils.writeVector3f(buffer, packet.getRotation()); BedrockUtils.writeArray(buffer, packet.getAttributes(), BedrockUtils::writeEntityAttribute); BedrockUtils.writeEntityData(buffer, packet.getMetadata()); BedrockUtils.writeArray(buffer, packet.getEntityLinks(), BedrockUtils::writeEntityLink); } @Override public void deserialize(ByteBuf buffer, AddEntityPacket packet) { packet.setUniqueEntityId(VarInts.readLong(buffer)); packet.setRuntimeEntityId(VarInts.readUnsignedLong(buffer)); packet.setIdentifier(BedrockUtils.readString(buffer)); packet.setPosition(BedrockUtils.readVector3f(buffer)); packet.setMotion(BedrockUtils.readVector3f(buffer)); packet.setRotation(BedrockUtils.readVector3f(buffer)); BedrockUtils.readArray(buffer, packet.getAttributes(), BedrockUtils::readEntityAttribute); BedrockUtils.readEntityData(buffer, packet.getMetadata()); BedrockUtils.readArray(buffer, packet.getEntityLinks(), BedrockUtils::readEntityLink); } }
sharang-d/kops
vendor/github.com/miekg/coredns/middleware/rewrite/condition_test.go
package rewrite /* func TestConditions(t *testing.T) { tests := []struct { condition string isTrue bool }{ {"a is b", false}, {"a is a", true}, {"a not b", true}, {"a not a", false}, {"a has a", true}, {"a has b", false}, {"ba has b", true}, {"bab has b", true}, {"bab has bb", false}, {"a not_has a", false}, {"a not_has b", true}, {"ba not_has b", false}, {"bab not_has b", false}, {"bab not_has bb", true}, {"bab starts_with bb", false}, {"bab starts_with ba", true}, {"bab starts_with bab", true}, {"bab ends_with bb", false}, {"bab ends_with bab", true}, {"bab ends_with ab", true}, {"a match *", false}, {"a match a", true}, {"a match .*", true}, {"a match a.*", true}, {"a match b.*", false}, {"ba match b.*", true}, {"ba match b[a-z]", true}, {"b0 match b[a-z]", false}, {"b0a match b[a-z]", false}, {"b0a match b[a-z]+", false}, {"b0a match b[a-z0-9]+", true}, {"a not_match *", true}, {"a not_match a", false}, {"a not_match .*", false}, {"a not_match a.*", false}, {"a not_match b.*", true}, {"ba not_match b.*", false}, {"ba not_match b[a-z]", false}, {"b0 not_match b[a-z]", true}, {"b0a not_match b[a-z]", true}, {"b0a not_match b[a-z]+", true}, {"b0a not_match b[a-z0-9]+", false}, } for i, test := range tests { str := strings.Fields(test.condition) ifCond, err := NewIf(str[0], str[1], str[2]) if err != nil { t.Error(err) } isTrue := ifCond.True(nil) if isTrue != test.isTrue { t.Errorf("Test %v: expected %v found %v", i, test.isTrue, isTrue) } } invalidOperators := []string{"ss", "and", "if"} for _, op := range invalidOperators { _, err := NewIf("a", op, "b") if err == nil { t.Errorf("Invalid operator %v used, expected error.", op) } } replaceTests := []struct { url string condition string isTrue bool }{ {"/home", "{uri} match /home", true}, {"/hom", "{uri} match /home", false}, {"/hom", "{uri} starts_with /home", false}, {"/hom", "{uri} starts_with /h", true}, {"/home/.hiddenfile", `{uri} match \/\.(.*)`, true}, {"/home/.hiddendir/afile", `{uri} match \/\.(.*)`, true}, } for i, test := range replaceTests { r, err := http.NewRequest("GET", test.url, nil) if err != nil { t.Error(err) } str := strings.Fields(test.condition) ifCond, err := NewIf(str[0], str[1], str[2]) if err != nil { t.Error(err) } isTrue := ifCond.True(r) if isTrue != test.isTrue { t.Errorf("Test %v: expected %v found %v", i, test.isTrue, isTrue) } } } */
Anton-Gusarov/angularjs-requirejs-typescript
typings/node-uuid/node-uuid-tests.js
<reponame>Anton-Gusarov/angularjs-requirejs-typescript /// <reference path="node-uuid.d.ts" /> var uuid = require('node-uuid'); var uid1 = uuid.v1(); var uid2 = uuid.v2(); var uid3 = uuid.v3(); var uid4 = uuid.v4(); var options = { node: [], clockseq: 2, nsecs: 3, msecs: new Date() }; var padding = [0, 1, 2]; var offset = 15; var buf = []; uuid.parse(uid4, buf, offset); uuid.unparse(buf, offset); uuid.v1(options, padding, offset); uuid.v2(options, padding, offset); uuid.v3(options, padding, offset); uuid.v4(options, padding, offset); //# sourceMappingURL=node-uuid-tests.js.map
tusharchoudhary0003/Custom-Football-Game
sources/com/facebook/C6782m.java
package com.facebook; import android.os.Parcel; import android.os.Parcelable.Creator; /* renamed from: com.facebook.m */ /* compiled from: FacebookRequestError */ class C6782m implements Creator<FacebookRequestError> { C6782m() { } public FacebookRequestError createFromParcel(Parcel in) { return new FacebookRequestError(in, (C6782m) null); } public FacebookRequestError[] newArray(int size) { return new FacebookRequestError[size]; } }
cndavy/docker-office
custom_apps/files_accesscontrol/l10n/sl.js
OC.L10N.register( "files_accesscontrol", { "No rule given" : "Ni podanega še nobenega pravila.", "File access control" : "Nadzor dostopa", "Control access to files based on conditions" : "Nadzor dostopa do datotek na osnovi določenih pogojev", "Each rule group consists of one or more rules. A request matches a group if all rules evaluate to true. If a request matches at least one of the defined groups, the request is blocked and the file content can not be read or written." : "Vsako skupno pravil sestavlja eno ali več pravil. Zahteva je skladna s skupino, če se vsa pravila potrdijo kot skladna. Če je zahteva skladna z vsaj eno izmed določenih skupin, je ta blokirana in vsebine datoteke ni mogoče ne prebrati ne prepisati." }, "nplurals=4; plural=(n%100==1 ? 0 : n%100==2 ? 1 : n%100==3 || n%100==4 ? 2 : 3);");
atlasapi/atlas
src/main/java/org/atlasapi/remotesite/wikipedia/television/ScrapedFlatHierarchy.java
<filename>src/main/java/org/atlasapi/remotesite/wikipedia/television/ScrapedFlatHierarchy.java package org.atlasapi.remotesite.wikipedia.television; import java.util.List; import org.atlasapi.remotesite.wikipedia.wikiparsers.Article; import com.google.common.base.Function; import com.google.common.collect.ImmutableCollection; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; /** * Struct to hold all scraped data needed for the {@link TvBrandHierarchyExtractor} to extract a TV brand's content hierarchy. * <p> * (Can be obtained from futures of all its distinct parts by constructing its special-purpose inner class ({@link Collector}) and calling {@link Collector#collect()}.) */ public class ScrapedFlatHierarchy { /** * Effectively a "defuturizer" / builder for {@link ScrapedFlatHierarchy} from Future versions of all its component pieces of information. * @see ScrapedFlatHierarchy * @see #collect() */ public static class Collector { private final ListenableFuture<Article> brandArticle; private final ListenableFuture<ScrapedBrandInfobox> brandInfo; private final ListenableFuture<ImmutableCollection<ScrapedEpisode>> episodes; public Collector(ListenableFuture<Article> brandArticle, ListenableFuture<ScrapedBrandInfobox> brandInfo, ListenableFuture<ImmutableCollection<ScrapedEpisode>> episodes) { this.brandArticle = brandArticle; this.brandInfo = brandInfo; this.episodes = episodes; } /** * Returns a Future of {@link ScrapedFlatHierarchy} that's done only when all component pieces of information are ready. */ public ListenableFuture<ScrapedFlatHierarchy> collect() { return Futures.transform(Futures.successfulAsList(brandArticle, brandInfo, episodes), new Function<List<Object>, ScrapedFlatHierarchy>() { public ScrapedFlatHierarchy apply(List<Object> input) { return new ScrapedFlatHierarchy( (Article) (input.get(0)), (ScrapedBrandInfobox) (input.get(1)), (ImmutableCollection<ScrapedEpisode>) (input.get(2))); } }); } } private final Article brandArticle; private final ScrapedBrandInfobox brandInfo; private final ImmutableCollection<ScrapedEpisode> episodes; public ScrapedFlatHierarchy(Article baseArticle, ScrapedBrandInfobox info, ImmutableCollection<ScrapedEpisode> episodes) { this.brandArticle = baseArticle; this.brandInfo = info; this.episodes = episodes; } public Article getBrandArticle() { return brandArticle; } public ScrapedBrandInfobox getBrandInfo() { return brandInfo; } public ImmutableCollection<ScrapedEpisode> getEpisodes() { return episodes; } }
zipated/src
chrome/browser/chromeos/policy/off_hours/time_utils.h
<reponame>zipated/src // Copyright 2017 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef CHROME_BROWSER_CHROMEOS_POLICY_OFF_HOURS_TIME_UTILS_H_ #define CHROME_BROWSER_CHROMEOS_POLICY_OFF_HOURS_TIME_UTILS_H_ #include <string> #include <vector> #include "chrome/browser/chromeos/policy/off_hours/off_hours_interval.h" #include "chrome/browser/chromeos/policy/off_hours/weekly_time.h" namespace policy { namespace off_hours { // Put time in milliseconds which is added to local time to get GMT time to // |offset| considering daylight from |clock|. Return true if there was no // error. bool GetOffsetFromTimezoneToGmt(const std::string& timezone, base::Clock* clock, int* offset); // Convert time intervals from |timezone| to GMT timezone. std::vector<OffHoursInterval> ConvertIntervalsToGmt( const std::vector<OffHoursInterval>& intervals, base::Clock* clock, const std::string& timezone); // Return duration till next "OffHours" time interval. base::TimeDelta GetDeltaTillNextOffHours( const WeeklyTime& current_time, const std::vector<OffHoursInterval>& off_hours_intervals); } // namespace off_hours } // namespace policy #endif // CHROME_BROWSER_CHROMEOS_POLICY_OFF_HOURS_TIME_UTILS_H_
Sunrisepeak/Linux2.6-Reading
drivers/staging/rtl8723bs/include/osdep_service.h
<filename>drivers/staging/rtl8723bs/include/osdep_service.h /* SPDX-License-Identifier: GPL-2.0 */ /****************************************************************************** * * Copyright(c) 2007 - 2013 Realtek Corporation. All rights reserved. * ******************************************************************************/ #ifndef __OSDEP_SERVICE_H_ #define __OSDEP_SERVICE_H_ #define _FAIL 0 #define _SUCCESS 1 #define RTW_RX_HANDLED 2 #include <osdep_service_linux.h> #define BIT0 0x00000001 #define BIT1 0x00000002 #define BIT2 0x00000004 #define BIT3 0x00000008 #define BIT4 0x00000010 #define BIT5 0x00000020 #define BIT6 0x00000040 #define BIT7 0x00000080 #define BIT8 0x00000100 #define BIT9 0x00000200 #define BIT10 0x00000400 #define BIT11 0x00000800 #define BIT12 0x00001000 #define BIT13 0x00002000 #define BIT14 0x00004000 #define BIT15 0x00008000 #define BIT16 0x00010000 #define BIT17 0x00020000 #define BIT18 0x00040000 #define BIT19 0x00080000 #define BIT20 0x00100000 #define BIT21 0x00200000 #define BIT22 0x00400000 #define BIT23 0x00800000 #define BIT24 0x01000000 #define BIT25 0x02000000 #define BIT26 0x04000000 #define BIT27 0x08000000 #define BIT28 0x10000000 #define BIT29 0x20000000 #define BIT30 0x40000000 #define BIT31 0x80000000 #define BIT32 0x0100000000 #define BIT33 0x0200000000 #define BIT34 0x0400000000 #define BIT35 0x0800000000 #define BIT36 0x1000000000 extern int RTW_STATUS_CODE(int error_code); void *_rtw_zmalloc(u32 sz); void *_rtw_malloc(u32 sz); void _kfree(u8 *pbuf, u32 sz); struct sk_buff *_rtw_skb_alloc(u32 sz); struct sk_buff *_rtw_skb_copy(const struct sk_buff *skb); int _rtw_netif_rx(struct net_device *ndev, struct sk_buff *skb); #define rtw_malloc(sz) _rtw_malloc((sz)) #define rtw_zmalloc(sz) _rtw_zmalloc((sz)) #define rtw_skb_alloc(size) _rtw_skb_alloc((size)) #define rtw_skb_alloc_f(size, mstat_f) _rtw_skb_alloc((size)) #define rtw_skb_copy(skb) _rtw_skb_copy((skb)) #define rtw_skb_copy_f(skb, mstat_f) _rtw_skb_copy((skb)) #define rtw_netif_rx(ndev, skb) _rtw_netif_rx(ndev, skb) extern void _rtw_init_queue(struct __queue *pqueue); static inline void thread_enter(char *name) { allow_signal(SIGTERM); } static inline void flush_signals_thread(void) { if (signal_pending(current)) { flush_signals(current); } } #define rtw_warn_on(condition) WARN_ON(condition) static inline int rtw_bug_check(void *parg1, void *parg2, void *parg3, void *parg4) { int ret = true; return ret; } #define _RND(sz, r) ((((sz)+((r)-1))/(r))*(r)) #ifndef MAC_ARG #define MAC_ARG(x) (x) #endif extern void rtw_free_netdev(struct net_device * netdev); /* Macros for handling unaligned memory accesses */ void rtw_buf_free(u8 **buf, u32 *buf_len); void rtw_buf_update(u8 **buf, u32 *buf_len, u8 *src, u32 src_len); struct rtw_cbuf { u32 write; u32 read; u32 size; void *bufs[]; }; bool rtw_cbuf_full(struct rtw_cbuf *cbuf); bool rtw_cbuf_empty(struct rtw_cbuf *cbuf); bool rtw_cbuf_push(struct rtw_cbuf *cbuf, void *buf); void *rtw_cbuf_pop(struct rtw_cbuf *cbuf); struct rtw_cbuf *rtw_cbuf_alloc(u32 size); /* String handler */ /* * Write formatted output to sized buffer */ #define rtw_sprintf(buf, size, format, arg...) snprintf(buf, size, format, ##arg) #endif
ks555/idlak
src/html/gmm-basis-fmllr-accs_8cc.js
var gmm_basis_fmllr_accs_8cc = [ [ "AccumulateForUtterance", "gmm-basis-fmllr-accs_8cc.html#ad950fbbf9109c04331891caa17864705", null ], [ "main", "gmm-basis-fmllr-accs_8cc.html#a0ddf1224851353fc92bfbff6f499fa97", null ] ];