language
stringclasses
15 values
src_encoding
stringclasses
34 values
length_bytes
int64
6
7.85M
score
float64
1.5
5.69
int_score
int64
2
5
detected_licenses
listlengths
0
160
license_type
stringclasses
2 values
text
stringlengths
9
7.85M
C#
UTF-8
1,093
3.015625
3
[]
no_license
using System.Collections.Generic; using System.Linq; namespace EverbridgeWCF.Data { class DoorDAO : IDoorDAO{ private readonly DoorContext db; public DoorDAO(DoorContext db) { this.db = db; } public List<Door> getAll() { return db.doors.ToList(); } public async void insert(Door door) { db.doors.Add(door); await db.SaveChangesAsync(); } public async void delete(long id) { db.doors.RemoveRange(db.doors.Where(x => x.id == id)); await db.SaveChangesAsync(); } public async void update(Door door) { var result = db.doors.SingleOrDefault(x => x.id == door.id); if (result != null) { result.isLocked = door.isLocked; result.isOpen = door.isOpen; result.label = door.label; } await db.SaveChangesAsync(); } public Door getDoor(long id) { return db.doors.SingleOrDefault(x => x.id == id); } } }
C++
UTF-8
3,709
2.953125
3
[ "FSFAP", "MIT" ]
permissive
#ifndef SRC_MAIN_CORE_IMPL_FREEZABLE_POSTING_LIST_H_ #define SRC_MAIN_CORE_IMPL_FREEZABLE_POSTING_LIST_H_ #include <memory> #include <utility> #include "core/index/posting_list.h" #include "core/reader/posting_list_reader.h" #include "core/reader/reader_utils.h" namespace redgiant { /* * - A wrapper of posting list implements thread safety for parallel reading. * - Once frozen, the wrapped posting list will not allow further changes, then * it is safe to create read from it in multiple threads. * - To change a frozen posting list, you need to fork a new one and apply it * after changes are made. * - Instances of this class should be protected by external shared_mutex. */ template <typename DocId, typename Weight> class FreezablePostingList: public PostingList<DocId, Weight> { public: typedef PostingList<DocId, Weight> Base; typedef typename Base::PList PList; typedef typename Base::Reader Reader; typedef PostingListFactory<DocId, Weight> Factory; typedef typename Factory::ReaderByVal ReaderByVal; typedef typename Factory::ReaderByRef ReaderByRef; // Create from the factory of internal posting list. FreezablePostingList(const Factory& factory, bool frozen = false) : instance_(factory.create_posting_list()), frozen_(frozen) { } // Create from the factory of internal posting list and an external reader. FreezablePostingList(const Factory& factory, std::unique_ptr<ReaderByVal> reader, bool frozen = false) : instance_(factory.create_posting_list(std::move(reader))), frozen_(frozen) { } // Create from the factory of internal posting list and an external reader. FreezablePostingList(const Factory& factory, std::unique_ptr<ReaderByRef> reader, bool frozen = false) : instance_(factory.create_posting_list(std::move(reader))), frozen_(frozen) { } // Create from a passed-in posting list. FreezablePostingList(std::shared_ptr<PList> plist, bool frozen = false) : instance_(std::move(plist)), frozen_(frozen) { } virtual ~FreezablePostingList() = default; // need external read lock virtual bool empty() const { return instance_->empty(); } // need external write lock virtual int update(DocId doc_id, const Weight& weight) { if (frozen_) { return 0; } return instance_->update(doc_id, weight); } // need external write lock virtual int remove(DocId doc_id) { if (frozen_) { return 0; } return instance_->remove(doc_id); } // need external read lock, and the input shared_list is ignored. // once the reader is created, it is safe to read from it any time later. virtual std::unique_ptr<Reader> create_reader(std::shared_ptr<PList> shared_list) const { // we should hold wrapped instance_ instead of this or shared_list, // since the wrapped instance may change. (void) shared_list; if (!frozen_) { // it is not safe to return reader to the internal instance if not frozen. return nullptr; } return create_reader_shared(instance_); } // need external write lock void freeze() { frozen_ = true; } // need external read lock std::shared_ptr<PList> get_instance() const { if (!frozen_) { // it is not safe to return reader to the internal instance if not frozen. return nullptr; } return instance_; } // safe to access without external lock std::shared_ptr<FreezablePostingList> fork(const Factory& factory) const { return std::make_shared(factory.create_posting_list(create_reader_shared(instance_))); } private: std::shared_ptr<PList> instance_; bool frozen_; }; } /* namespace redgiant */ #endif /* SRC_MAIN_CORE_IMPL_FREEZABLE_POSTING_LIST_H_ */
Markdown
UTF-8
1,230
2.859375
3
[]
no_license
--- description: 设置活动位置,到达设定好的位置时软件提示 --- # 22.系统功能——位置,自带场景标记 我们在“[5种常见的时间归类方式](../ch02/ch02.16.md)”这一节中提到了“场景分类”。 在aTimeLogger Pro中(aTimeLogger没有这个功能)用户可以进入活动项,选择位置,导出报告时,就不仅有时间数据,还能给所有时间数据加上位置标记,增加了一个场景分析的维度。 在更多-位置,右上角“+”新建,输入位置名称,点击进入,可选的地图有苹果地图和百度地图,我默认选择苹果地图,因为苹果地图有当前坐标和定位坐标显示,长按紫色定位标可以手动修正,百度地图没有当前位置,且手动设置坐标时保存无效。 ![](<../.gitbook/assets/图片 (44).png>) 填写位置名称,如“家里”,“办公室”。 ![](<../.gitbook/assets/图片 (45).png>) 在活动项中添加位置,如图示: ![](<../.gitbook/assets/图片 (46).png>) 目前的缺点是,活动项设置位置后无法移除,且带有位置标记的数据导出CSV不显示,只能在更多-位置里看内置的图表,比较鸡肋,不推荐。
Swift
UTF-8
2,468
3.21875
3
[]
no_license
// // Author: Alex Cummaudo // Student ID: 1744070 // Program: A2 - Inference Engine // Unit: COS30019 - Intro to AI // Date: 4/05/2016 // /// /// Implements the backward chaining algorithm for entailing information. Refer to /// p.257 of AIMA for more. /// struct BackwardChaining: EntailmentMethod { // MARK: Implement EntailmentMethod func entail(query query: Sentence, fromKnowledgeBase kb: KnowledgeBase) -> EntailmentResponse { // Initialise our agenda and clauses let clauses: [ComplexSentence] = kb.clauses // Add facts in (i.e., atomic sentences) let facts: [AtomicSentence] = kb.sentences.filter({$0 is AtomicSentence}).map({$0 as! AtomicSentence}) // Agenda starts with the query first; query must be atomic guard let query = query as? AtomicSentence else { return 0 // cannot be entailed } // Agenda - starts off with the query var agenda: [AtomicSentence] = [query] // Result of the entailment - how many sentences that were entailed var entailed: [AtomicSentence] = [] while !agenda.isEmpty { let conclusion = agenda.removeLast() entailed.insert(conclusion, atIndex: 0) // Facts do not contain the query? if facts.contains({$0 != query}) { let symbols: [AtomicSentence] = clauses // Filter out clauses that don't contain the conclusion .filter({ clause in clause.containsConclusion(conclusion) }) // mapped to their premise symbols as atomic sentences .map({ clause in clause.sentences.left!.symbols.map { AtomicSentence($0) } }) // flattened .flatMap({$0}) // No more symbols? Break the loop if symbols.isEmpty { break } else { // Get all the symbols we are yet to entail and add them // to our agenda let symbolsNotInEntailed = symbols.filter({ symbol in !entailed.contains(symbol) }) agenda.appendContentsOf(symbolsNotInEntailed) } } } return entailed } }
Java
GB18030
652
3.953125
4
[]
no_license
package demo02.Stream; import java.util.stream.Stream; /* * * limitһӳٷֻǶеԪؽнȡ * صһµԿԼStreamе */ public class Demo06Stream_limit { public static void main(String[] args) { //ȡһStream String[] arr = {"","ϲ","","̫","̫"}; Stream<String> stream = Stream.of(arr); //ʹlimitStreamеԪؽнȡֻҪǰ3Ԫ Stream<String> stream2 = stream.limit(3); stream2.forEach(name -> System.out.println(name)); } }
Java
UTF-8
514
2.796875
3
[]
no_license
package Modules; import java.util.ArrayList; public class Building { public String name; public String libelle; public ArrayList<Room> rooms; public Building(String name, String libelle){ this.name = name; this.libelle = libelle; this.rooms = new ArrayList<>(); } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getLibelle() { return libelle; } public void setLibelle(String libelle) { this.libelle = libelle; } }
C#
UTF-8
609
2.84375
3
[]
no_license
using System; using System.Runtime.CompilerServices; namespace FabricTraining { public class BikeFactoryTester { public static void Main() { BikeFactory factory = new BikeFactory(); IBike roadBike = factory.GetBike("RoadBike"); roadBike.FrameSize = 54; Console.WriteLine(roadBike.Description + " with " + roadBike.FrameSize); IBike anotherBike = factory.GetBike("RoadBike"); Console.WriteLine(anotherBike.Description + " with " + anotherBike.FrameSize); Console.ReadLine(); } } }
C#
UTF-8
1,644
2.5625
3
[]
no_license
using CamadaDados; using System.Data; namespace CamadaNegocio { public class NPrograma { // Metodo Inserir public static string Inserir( string audio, string duracao, string dataCadastro, string sintese) { DPrograma Obj = new CamadaDados.DPrograma(); Obj.Audio = audio; Obj.Duracao = duracao; Obj.DataCadastro = dataCadastro; Obj.Sintese = sintese; return Obj.Inserir(Obj); } // Metodo Editar public static string Editar( int codigo, string audio, string duracao, string dataCadastro, string sintese) { DPrograma Obj = new CamadaDados.DPrograma(); Obj.Codigo = codigo; Obj.Audio = audio; Obj.Duracao = duracao; Obj.DataCadastro = dataCadastro; Obj.Sintese = sintese; return Obj.Editar(Obj); } // Metodo Excluir public static string Excluir(int codigo) { DPrograma Obj = new CamadaDados.DPrograma(); Obj.Codigo = codigo; return Obj.Excluir(Obj); } // Metodo Mostrar public static DataTable Mostrar() { return new DPrograma().Mostrar(); } // Metodo Buscar Nome public static DataTable BuscarNome(string testbusca) { DPrograma Obj = new DPrograma(); Obj.TextBusca = testbusca; return Obj.BuscarNome(Obj); } } }
Markdown
UTF-8
2,504
2.78125
3
[]
no_license
# Sequences-of-Objects Sequences of Objects (SO) is a novel and effective method for ancient Chinese 'Shan Shui' painting composition classification. This repository contains a TensorFlow implementation of SO. The repository is separated in two main parts: (1) object detector; (2) sequence classifier. Here we apply [SSD](http://arxiv.org/abs/1512.02325) as our object detector, and LSTM as our sequence classifier. ## Requirements: * Tensorflow (>= 1.4) * numpy * six * opencv-python * matplotlib * pillow If you are using pip you can install these with ```bash pip install tensorflow numpy six opencv-python matplotlib pillow ``` ## Minimal example of object detector The [object_detection.py](object_detector/notebooks/object_detection.py) contains a minimal example of object detection pipeline. To run: ```bash cd object_detector/checkpoints # at root of the repository unzip checkpoints.zip cd object_detector/notebooks # at root of the repository python object_detection.py [painting_path] ``` Here are three examples of successful detection outputs: Lofty and remote: ```bash python object_detection.py '../demo/lofty_and_remote.jpg' ``` ![](object_detector/results/lofty_and_remote.jpg) Wide and remote: ```bash python object_detection.py '../demo/wide_and_remote.jpg' ``` ![](object_detector/results/wide_and_remote.jpg) Deep and remote: ```bash python object_detection.py '../demo/deep_and_remote.jpg' ``` ![](object_detector/results/deep_and_remote.jpg) ## Minimal example of Sequences of Objects The [object_detection_save_results.py](object_detector/notebooks/object_detection_save_results.py) and [composition_classification.py](sequence_classifier/composition_classification.py) contain a minimal example of our SO pipeline. To run: ```bash cd object_detector/checkpoints # at root of the repository unzip checkpoints.zip cd object_detector/notebooks # at root of the repository python object_detection_save_results.py [painting_path] # object detection cd sequence_classifier # at root of the repository python composition_classification.py [painting_path] # composition classification ``` or you can directly use the shell script [sequences_of_objects](sequences_of_objects): ```bash chmod +x ./sequences_of_objects ./sequences_of_objects [painting_path] ``` For example: ```bash chmod +x ./sequences_of_objects ./sequences_of_objects '../demo/lofty_and_remote.jpg' ``` first you can see the object detection results, then the composition classification results.
PHP
UTF-8
1,241
2.609375
3
[ "MIT", "LicenseRef-scancode-unknown-license-reference" ]
permissive
<?php /** * Created by PhpStorm. * User: pablocamara * Date: 24/03/2018 * Time: 23:25 */ class Article_model extends CI_Model { public $id; public $associated_category_id; public $url_tag; public $title; public $small_description; public $full_description; public $createdAt; public function __construct() { parent::__construct(); $this->load->database(); } public function getArticlesByTagNames($tag_names){ if(!is_array($tag_names)){ $tag_names = Array($tag_names); } return $this->db->select('*') ->from('article_tags') ->join('article_associated_tags','article_associated_tags.tag_id = article_tags.id') ->join('article','article.id = article_associated_tags.article_id') ->where_in('article_tags.name',$tag_names) ->get()->result(); } public function getById($id){ return $this->db->select('*') ->where('id',$id) ->get('article')->row(); } public function getByUrlTag($article_url_tag){ return $this->db->select('*') ->where('url_tag',$article_url_tag) ->get('article')->row(); } }
C++
UTF-8
817
2.625
3
[]
no_license
#ifndef ZORKISH_BAG_H #define ZORKISH_BAG_H #include <vector> #include "../InventoryItem/InventoryItem.h" #include "../Inventory/Inventory.h" class Bag { private: Inventory bagEntities = Inventory("bag"); bool bagOpen; string unlockItemName; string itemKey; string itemName = "null"; string itemDescription; void setupBagEntities(vector<string> entities); public: Bag(vector<string> details); Bag(); ~Bag(); void unlockContainer(string itemName, string unlockName); void printContainerContents(); void addToContainer(InventoryItem item); void removeFromContainer(InventoryItem item, string key); InventoryItem checkContainerEntities(string itemName); string getKey(); string getName(); string getDescription(); }; #endif //ZORKISH_BAG_H
C++
UTF-8
266
2.78125
3
[]
no_license
// https://zxi.mytechroad.com/blog/simulation/1528-shuffle-string/ class Solution { public: string restoreString(string s, vector<int>& indices) { string ans(s); for (int i = 0; i < indices.size(); ++i) ans[indices[i]] = s[i]; return ans; } };
Markdown
UTF-8
1,819
2.578125
3
[]
no_license
<h2>분산시스템을 이용하여 대규모 그래프에 존재하는 모든 삼각형 찾아내기</h2> <h4>박하명 교수님</h4> <b>Graph are EVERYWHERE</b><br> 세상 어디에든 그래프는 존재한다. 데이터는 그래프로 표현될 수 있으며 그래프에 존재하는 모든 삼각형을 찾아내어 그것을 가지고 노드간의 관계를 찾을 수 있다.<br> *삼각형이란 3개의 노드가 연결되어있는 것을 의미한다. <b>spam/fake user detection</b><br> Facebook을 예로, 일반유저들 사이에서 친구관계를 그래프로 나타내면 군집화되어 나타나며 삼각형이 이루어져 있는 경우가 많다. 하지만 Spam, fake user들의 친구관계를 그래프로 나타내면 삼각형의 갯수가 현저히 적고, 거대한 삼각형이 나타나는 경우가 많다. 이를 통해 spam, fake user를 판별할 수 있다. <b>DDos attack detection</b><br> Attacker, Victim, Zombie PC가 삼각형 그래프로 나타낼 수 있으면 DDos attack이 발생한 것으로 판별할 수 있다. <b>Community detection</b><br> Clustering coefficient는 결집계수이 특정 노드와 이웃한 노드들이 서로 연결되어 있을 확률이다. 이를 통해 삼각형을 찾고 community를 판별할 수 있다. <br> <br> 하지만 그래프가 너무 크고 많다는 문제가 있다. Facebook에는 10억명 이상의 액티브 유저가 있으며 1조개가 넘는 유니크한 웹페이지가 있다. 이러한 문제는 분산 시스템을 활용하여 해결할 수 있다. 분산 시스템이란 인터넷에 연결된 여러 컴퓨터들의 처리 능력을 이용하여 거대한 계산 문제를 해결하는 분산처리 모델이다. 분산 시스템에 사용되는 프레임 워크로는 하둡과 스파크가 있다.
JavaScript
UTF-8
1,030
4.5
4
[]
no_license
/* Jogo de Adivinhação - O sistema sorteia um número aleatório de 0 a 50; - O sistema pede um palpite para o usuário; - Se o usuário acertar, ele venceu; - Se não: - O sistema avisa se o palpite do usuário é maior ou menor que o número sorteado; - A cada erro, o usuário perde 1 vida. Ele tem dez vidas para acertar o número; */ let numeroSorteado = 0; function rodarJogo() { let palpite = Number(prompt('Adivinhe o número sorteado entre 0 e 50.')); if (palpite === numeroSorteado) { alert('Parabéns, você ganhou o jogo'); } else { if (palpite > numeroSorteado) { alert('Seu palpite foi maior que o número sorteado. Tente um número mais baixo.'); } else { alert('Seu palpite foi menor que o número sorteado. Tente um número mais alto.'); } rodarJogo(); } } function iniciarJogo() { numeroSorteado = aleatorio(0, 50); console.log(numeroSorteado); rodarJogo(); } iniciarJogo();
Markdown
UTF-8
2,943
3.09375
3
[]
no_license
### CHESSY v1.0: A generic OOP framework for board games [aspirationally] Author: Ranga Muvavarirwa #### Motivation: Many situations arise whereby two sides need to simultaneously use a public good [e.g. a river, a game board etc] in pursuit of their own self-regarding goals (e.g. cross river, get as many pieces as possible to otherend of the board] Traditional solutions to such situations have typically modelled participant behavior and the ensuring game-strategy as none-coopertive/winner-takes-all/zero-sum games: the outcomes of which (war, tragedy of the commons etc) are often often inefficient and/or unacceptable #### Hypothesis Participants (societies, game players etc) would select a cooperative approach -- if they could identify a set of viable paths whose collective cost (number of participants for whom a path could not be found) was less than some predetermined cost of a zero-sum approach (cost of one side losing + cost of half-winning side) #### Goal Create a cooperative version of a board game (v1 supports chess), whose main objective is to determine whether both sides can achieve a shared objective (get as many pieces as possible to the other side of the board) -- without 'taking any pieces from the 'opposing' team. v2 will provide a mechanism for comparing the distribution of costs/value between cooperative vs competitive iterations #### Key Concepts Framework supports any game scenario whose geometry can be implemented programmatically (chess, chechers, go etc) Players can enter game at any point in time (need not be simultaneous). Available in v2 Players can have different skill levels [1 = Novice 10 = Expert] - Creates a harness for evaluating expert performance -- relative to a random agent - Subsequent iterations could use this harness to train the expert (by evaluating moves by a random agent that beats the expert ( Bayesian approaches might be useful here ). #### Architecture Chessy employs two patterns: - a form of dependency injection / factory-pattern whereby metadata specifying the rules of a game are presented in JSON/Python-dictionary format [refer games dictionary object below] - dynamic class creation using the "TYPE" method for specifying classes [King, Queen, Bishop etc] as well as for instantiating specific players [b_Pawn_01, w_King, b_K1 etc) #### Organization ##### Classes - Game: Controller. Creates board. Invokes Team() to create teams+players. Updates and maintains game state (time_step) - Team: Instance per team. Invoked by Game(). Invokes Player_Template - Player_Template: Generic player class. Invoked by Team() - Incorrect_Input_error: Generic exception handler for inputs - UserInput: [arguably duplicative] class by which an object is initialized with user_inputs as properties ##### Methods - Main: initiates program - run_trials: Iterates through num_trials. Candidate for multiprocessing refactor. - plot_results: Display results using MatPlotLib
C++
UTF-8
541
3.09375
3
[ "MIT" ]
permissive
#ifndef _NODO_HASH_H_ #define _NODO_HASH_H_ #include <string> #include <iostream> using namespace std; class NodoHash { private: long double clave; string valor; public: NodoHash(); long double getClave(); string getValor(); void setClave(long double); void setValor(string); ~NodoHash(); void operator=(const NodoHash &h){ this->clave = h.clave; this->valor = h.valor; } bool operator>(const NodoHash &h){ return(this->clave>h.clave); } bool operator<(const NodoHash &h){ return(this->clave<h.clave); } }; #endif
Markdown
UTF-8
1,133
3.421875
3
[]
no_license
--- layout: post id: 90 alias: try-mockito tags: Java date: 2011-09-13 01:40:44 title: mockito --- 写单元测试,早晚会需要mock,不然有的东西没法测。曾经在一年前,我试用过EasyMock,这个目前使用人数最多、好评也很多的mock库,可以让我们方便的mock一个接口或者一个类。 <p>但是,还不够方便,虽然比我们自己写一个mock类方便很多,但是,总觉得还不够方便。 <p>直到有一天,我看到了mockito,原来,mock还可以更简单。如果自己写mock的复杂度是10的话,使用easymock大约是4,而mockito应该是3。不要小看这一点点,这就是创造力,这就是方便。 <p>主页:[http://www.mockito.org/](http://www.mockito.org/) <p>两个缺点: <p>1. 只能使用jdk1.5以上,因为它的方便是用jdk1.5的泛型换来的 <p>2. 不能mock &#8220;static method","final method","new",这没办法,简单易用的mock库基本上都有这缺陷。 <p>另一个专讲mock的有名网站:[http://www.mockobjects.com](http://www.mockobjects.com/),上面有各个语言的常用的mock库的链接
C#
UTF-8
3,685
3.234375
3
[]
no_license
using ExchangeRates.Repositories; using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; namespace ExchangeRates.Services { public class RateService : IRateService { // inject repository layer private readonly IRateRepository _rateRepository; // constructor for injected repository layer public RateService(IRateRepository rateRepository) { _rateRepository = rateRepository ?? throw new ArgumentNullException(nameof(rateRepository)); } // calls CreateRate through repository rate public Rate CreateRate(Rate newRate) { return _rateRepository.CreateRate(newRate); } // calculates the conversion rate based on user input strings public decimal GetConversionRate(string amount, string fromCountryRate, string toCountryRate) { decimal decimalNumber; if (decimal.TryParse(amount, out decimalNumber)) // parse decimal { if (decimalNumber > 0) // check if decimal number is over 0 - ACCEPTS 0,1 etc. IN SEARCH BAR! { Rate validatedFcr = _rateRepository.ReadByCountry(fromCountryRate); // fetches the Rate object according to the country code if (validatedFcr != null) // checks for nulls, if null, returns 0 to controller { Rate validatedTcr = _rateRepository.ReadByCountry(toCountryRate); // same here if (validatedTcr != null) // same here { decimal conversionRateFcr = decimal.Parse(validatedFcr.Rate1.ToString()); // makes Rate object's decimal calculable decimal conversionRateTcr = decimal.Parse(validatedTcr.Rate1.ToString()); // same here decimal exchangeRate = conversionRateFcr / conversionRateTcr; // divides conversion rates with each other and saved to parameter decimal conversionValue = 1 / exchangeRate; // conversionValue saved from dividing one by the exchange rate decimal result = decimalNumber * conversionValue; // result is amount of currency multiplied by its conversionValue return decimal.Round(result, 2, MidpointRounding.AwayFromZero); // amount of rounded currency for desired amount in other currency } } } } return 0; // 0 is bound in controller to show up an error message } // repository is called with Read, returning a List of Rates public List<Rate> Read() { return _rateRepository.Read(); } // the user's country code is given to repository function readbycountry and returns its value Rate public Rate ReadByCountry(string country) { return _rateRepository.ReadByCountry(country); } // updates desired Rate by country tag, jsonobject Rate including id public Rate UpdateRate(string country, Rate updateRate) { var validatedRate =_rateRepository.ReadByCountry(country); // save repository's function result to a var value if (validatedRate != null) // if does not return null, carry on with UpdateRate through repository { return _rateRepository.UpdateRate(updateRate); } else { Console.WriteLine("Could not find given rate."); return updateRate; } } } }
Java
UTF-8
2,396
2.203125
2
[]
no_license
package me.isaaceichelberger.speeduhc; import me.isaaceichelberger.speeduhc.commands.CommandKit; import me.isaaceichelberger.speeduhc.handlers.Game; import me.isaaceichelberger.speeduhc.listeners.InventoryClick; import me.isaaceichelberger.speeduhc.listeners.MGListener; import me.isaaceichelberger.speeduhc.listeners.PlayerJoin; import me.isaaceichelberger.speeduhc.listeners.PlayerQuit; import me.isaaceichelberger.speeduhc.utils.MoveWorldBorder; import me.isaaceichelberger.speeduhc.threads.StartCountdown; import org.bukkit.Bukkit; import org.bukkit.WorldCreator; import org.bukkit.plugin.PluginManager; import org.bukkit.plugin.java.JavaPlugin; import org.bukkit.Location; public final class SpeedUHC extends JavaPlugin { private static int startCountdownId; @Override public void onEnable() { getCommand("kit").setExecutor(new CommandKit()); GameState.setState(GameState.IN_LOBBY); Game.setCanStart(false); startCountdown(); registerListeners(); WorldCreator wc = new WorldCreator("lobby"); getServer().createWorld(wc); WorldCreator wc1 = new WorldCreator("gameWorld"); getServer().createWorld(wc1); MoveWorldBorder.setWorldBorder(Bukkit.getWorld("gameWorld").getWorldBorder(), 1500, 10); MoveWorldBorder.getWorldBorder().setCenter(new Location(Bukkit.getWorld("gameWorld"), 0, 0, 0)); MoveWorldBorder.getWorldBorder().setWarningDistance(10); MoveWorldBorder.getWorldBorder().setSize(1500); MoveWorldBorder.getWorldBorder().setWarningTime(5); MoveWorldBorder.getWorldBorder().setDamageAmount(5.0); } @Override public void onDisable() { } private void registerListeners(){ PluginManager pm = getServer().getPluginManager(); pm.registerEvents(new PlayerJoin(this), this); pm.registerEvents(new PlayerQuit(this), this); pm.registerEvents(new InventoryClick(this), this); } private void startCountdown(){ StartCountdown.timeUntilStart = 10; // for testing startCountdownId = getServer().getScheduler().scheduleSyncRepeatingTask(this, new StartCountdown(this), 20L, 20L); } public void stopCountdown() { getServer().getScheduler().cancelTask(startCountdownId); } public void restartCountdown(){ stopCountdown(); startCountdown(); } }
JavaScript
UTF-8
312
3.53125
4
[]
no_license
// https://www.hackerrank.com/challenges/diagonal-difference/problem function diagonalDifference(arr) { let left = 0 let right = 0 if(arr.length === 0) return 0 for(let i = 0; i < arr.length; i++){ left += arr[i][i] right += arr[i][arr.length - 1 - i] } return Math.abs(left-right) }
Java
UTF-8
355
2.0625
2
[]
no_license
package com.example.psrihariv.gentcmobile; /** * Created by psrihariv on 3/1/2016. */ public class GenTCJNILib { static { System.loadLibrary("GenTCjni"); } public static native void init(String path); public static native void draw(float AngleX, float AngleY); public static native void resize(int width, int height); }
C++
UTF-8
1,073
2.921875
3
[]
no_license
#ifndef NUMBER_H #define NUMBER_H #include "token.h" #include <cmath> #include <string> class number:public token{ public: number(double val):token(std::to_string(val)),value(val){;} virtual tokenType getType() const{return tokenType::_number; } inline const double getValue()const {return value;} private: double value; }; class constant:public token { public: constant(double val):token(std::to_string(val)),value(val){;} virtual tokenType getType() const{return tokenType::_constant; } inline const double getValue()const {return value;} private: double value; }; class variable:public token{ public: variable(std::string name):token(name){;} virtual tokenType getType() const{return tokenType::_variable; } inline const double getValue()const {return value;} inline void setValue(double val) const{value = val;} private: mutable double value; }; const token* readNumber(const char* cstr,size_t& ich ,size_t len, std::ostream& logStream); static const constant pi(M_PI); static const constant e(M_E); #endif
C#
UTF-8
2,109
2.78125
3
[ "MIT" ]
permissive
using System; using System.Collections.Concurrent; using System.Text; namespace Abc.Zerio.Core { public struct MessageTypeId : IEquatable<MessageTypeId> { private static readonly ConcurrentDictionary<Type, MessageTypeId> _values = new ConcurrentDictionary<Type, MessageTypeId>(); private static Func<Type, MessageTypeId> _messageTypeIdFactory = CreateDefaultMessageTypeId; private readonly uint _value; public MessageTypeId(uint value) { _value = value; } public static MessageTypeId Get(Type type) { return _values.GetOrAdd(type, _messageTypeIdFactory); } public static void RegisterFactory(Func<Type, MessageTypeId> factory) { _messageTypeIdFactory = factory; } public static void Register(Type type, MessageTypeId messageTypeId) { _values[type] = messageTypeId; } public static explicit operator uint(MessageTypeId messageTypeId) { return messageTypeId._value; } public bool Equals(MessageTypeId other) { return _value == other._value; } public override bool Equals(object obj) { var other = obj as MessageTypeId?; return other != null && Equals(other.Value); } public override int GetHashCode() { return (int)_value; } private static unsafe MessageTypeId CreateDefaultMessageTypeId(Type messageType) { // Default implementation computes a crc32 hash from the type full name var messageTypeFullName = messageType.FullName; var fullNameLength = messageTypeFullName.Length; fixed (char* pChar = messageTypeFullName) { var bytes = stackalloc byte[fullNameLength]; var length = Encoding.ASCII.GetBytes(pChar, fullNameLength, bytes, fullNameLength); return new MessageTypeId(Crc32.Compute(bytes, length)); } } } }
C
UTF-8
629
2.515625
3
[ "BSD-3-Clause", "LicenseRef-scancode-public-domain", "BSD-2-Clause", "LicenseRef-scancode-other-permissive", "LicenseRef-scancode-musl-exception", "MIT", "LLVM-exception", "Apache-2.0", "NCSA" ]
permissive
#define _GNU_SOURCE #include <math.h> #include <stdint.h> double exp10(double x) { static const double p10[] = { 1e-15, 1e-14, 1e-13, 1e-12, 1e-11, 1e-10, 1e-9, 1e-8, 1e-7, 1e-6, 1e-5, 1e-4, 1e-3, 1e-2, 1e-1, 1, 1e1, 1e2, 1e3, 1e4, 1e5, 1e6, 1e7, 1e8, 1e9, 1e10, 1e11, 1e12, 1e13, 1e14, 1e15 }; double n, y = modf(x, &n); union {double f; uint64_t i;} u = {n}; /* fabs(n) < 16 without raising invalid on nan */ if ((u.i>>52 & 0x7ff) < 0x3ff+4) { if (!y) return p10[(int)n+15]; y = exp2(3.32192809488736234787031942948939 * y); return y * p10[(int)n+15]; } return pow(10.0, x); } weak_alias(exp10, pow10);
C++
UTF-8
383
3.234375
3
[]
no_license
//to check maximum and minimum in a array #include<iostream> #include<ctype.h> using namespace std; int main (void) { char x[101],max=0,min=0; cout<<" enter "; cin.getline(x,100); for(int i=1;x[i]!='\0';i++) { if(x[i]>max) { max=x[i]; } if(x[i]<min) { min=x[i]; } } cout<<"max is "<<max<<" and min is "<<min; }
Python
UTF-8
3,158
2.734375
3
[]
no_license
import socket, sys, threading, hashlib,time, logging,os from sys import stderr from time import gmtime, strftime from logging import getLogger, StreamHandler, Formatter, DEBUG #ESTAS SIMPLEMENTE SON VARIABLES host, port = '', 9000 hasher = hashlib.md5() TAMANO = 60000 tamano = 0 class transfer : #ESTE ES EL SOCKET QUE VAMOS USAR serversocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) serversocket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) def __init__(self): numCli = int(sys.argv[1]) file_name = sys.argv[2] self.serversocket.bind((host, port)) print('Ready to receive connections') self.serversocket.listen(5) id_cliente = 1 threads = [] #VAMOS A ESPERAR A QUE SE CONECTEN TODOS LOS CLIENTES ANTES DE INICIAR LOS THREADS QUE TRANSFIEREN EL ARCHIVO A CADA UNO while numCli > 0 : tamano = os.path.getsize(file_name) conn, addr = self.serversocket.accept() send_thread = threading.Thread(target = self.send_file, args=(file_name, tamano, conn, id_cliente )) threads.append(send_thread) numCli = numCli - 1 id_cliente=id_cliente+1 for thread in threads: thread.start() #AQUI INICIA N LOS THREADS def send_file(self, file_name, tamano, conn, id_cliente): logger = getLogger() os.makedirs(os.path.dirname('./logs/TCP{}.log'.format(id_cliente)), exist_ok=True) logging.basicConfig(format='%(message)s', filename='./logs/TCP{}.log'.format(id_cliente), level=logging.DEBUG) sh = StreamHandler(stderr) sh.setLevel(DEBUG) f = Formatter(' %(message)s') sh.setFormatter(f) logger.addHandler(sh) logger.setLevel(DEBUG) showtime = strftime("%Y-%m-%d %H:%M:%S", gmtime()) logger.info('%s#%s', 'FECHA', showtime) logger.info('%s#%s', 'NOMBRE_ARCHIVO', file_name) logger.info('%s#%s', 'TAMANO_ARCHIVO', tamano) logger.info('%s#%s', 'ID_CLIENTE', id_cliente) i = 0 bytesSent = 0 with open(file_name, 'rb') as file: tamano = os.path.getsize(file_name) data = file.read(TAMANO) start_time = time.time() conn.send(data) while data != bytes(''.encode()): data = file.read(TAMANO) sent = conn.send(data) i = i+1 print(len(data)) bytesSent = bytesSent+sent if sent < TAMANO: sent = conn.send(b'Fin') print('Fin') break elapsed_time = time.time() - start_time logger.info('ENVIO_DEL_ARCHIVO#EXITOSO') logger.info('%s#%s', 'BYTES_ENVIADOS', bytesSent) logger.info('%s#%s', 'BYTES_RECIBIDOS', bytesSent) logger.info('%s#%s', 'PAQUETES_ENVIADOS', i) logger.info('%s#%s', 'PAQUETES_RECIBIDOS', i) logger.info('%s#%s', 'TIEMPO_TOTAL', elapsed_time) logger.info('----------------------------') Transfer = transfer()
PHP
UTF-8
414
2.515625
3
[]
no_license
<?php namespace Waka\Crsm\Classes\Imports; use Waka\Crsm\Models\ProjectState; use Maatwebsite\Excel\Concerns\ToModel; use Maatwebsite\Excel\Concerns\WithHeadingRow; class ProjectStateImport implements ToModel, WithHeadingRow { public function model(array $row) { return new ProjectState([ 'name' => $row['name'], 'is_running' => $row['is_running'], ]); } }
C#
UTF-8
894
2.515625
3
[]
no_license
using System; using System.Collections.Generic; using System.Text; namespace conti.maurizio._4H.MediaMobile { class Campione { public DateTime Data { get; set; } public double Temperatura { get; set; } public int Umidita { get; set; } public int Pressione { get; set; } public override string ToString() { return $"{Data}\t{Temperatura}\t{Umidita}\t{Pressione}"; } public string Titoli { get { return "Datetime\t\t\tTemp\tUmid\tPres"; } } public string RigaTemperatura(double offset, int zoom=1) { double val = (Temperatura - offset)*zoom; StringBuilder retVal = new StringBuilder(); retVal.Append( '*', (int)val+1 ); return retVal.ToString(); } } }
Java
UTF-8
2,196
2.296875
2
[]
no_license
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package com.ReminderApp.Servlets; import com.ReminderApp.Model.Employee; import java.io.IOException; import java.io.PrintWriter; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; /** * * @author rrojas */ public class EmpleadoEditServlet extends HttpServlet { protected void processRequest(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { response.setContentType("text/html;charset=UTF-8"); String paramValue = ""; Employee emp = new Employee(); String[] param = request.getParameterValues("employee"); for (String param1 : param) { paramValue = param1; } if(paramValue.isEmpty()){ } int id = Integer.parseInt(paramValue); emp = emp.MostrarEmpleado(id); //emp.MostrarEmpleado(id); request.setAttribute("empleado", emp); request.getRequestDispatcher("editEmployee.jsp").forward(request, response); } @Override protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { processRequest(request, response); } @Override protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { processRequest(request, response); } @Override public String getServletInfo() { return "Short description"; }// </editor-fold> }
Java
UTF-8
357
2.015625
2
[]
no_license
package org.dora.jdbc.grammar.model.operand; import org.dora.jdbc.grammar.model.operand.BinaryOperand; import org.dora.jdbc.grammar.model.operand.Operand; /** * Created by SDE on 2018/5/8. */ public class QuotientOperand extends BinaryOperand { public QuotientOperand(Operand left, Operand right) { super(left, right, "quotient"); } }
Rust
UTF-8
103,425
2.765625
3
[]
no_license
use crate::xb::{HasXB, MightHaveXB}; ///! This module defines data structures for FDS input data, and the functions ///! for converting them to and from Fortran Namelists. This is inherently less ///! flexible than the Namelist data, which can hold any data, but allows us to ///! operate more flexibly on FDS input data. ///! ///! TODO: How do we deal with defaults? Should we leave a Maybe value? or should ///! we insert the defaults when we convert from the Namelists. Let's go with the ///! defaults, it allows us to ignore whether something is specified or not, ///! although it does mean we need to be aware of the version of FDS we are ///! considering. This decision is based on the fact that the primary purpose is ///! analysis of the values, not how it is specified. If we care about the ///! structure of the input itself, we can always fall back to the Namelist data. // use namelist::Namelist; // use namelist::NamelistFile; use namelist::*; use std::convert::{TryFrom, TryInto}; /// The Haskell data type representation of an FDS input script. The first items /// (such as head and time) are single occurrence items. As any of these items /// may or may not occur they are given maybe types. The other items may occur /// zero, one or many times, and are therefore given a list type. There is /// provision for storing namelists that are not understood for the purposes of /// forward compatibility. #[derive(Clone, Debug)] pub struct FDSFile { pub head: Option<Head>, pub time: Option<Time>, pub dump: Option<Dump>, pub misc: Option<Misc>, pub meshes: Vec<Mesh>, pub reacs: Vec<Reac>, pub devcs: Vec<Devc>, pub matls: Vec<Matl>, pub surfs: Vec<Surf>, pub obsts: Vec<Obst>, pub holes: Vec<Hole>, pub hvacs: Vec<Hvac>, pub vents: Vec<Vent>, pub bndfs: Vec<Bndf>, pub isofs: Vec<Isof>, pub slcfs: Vec<Slcf>, pub ramps: Vec<Ramp>, pub props: Vec<Prop>, pub parts: Vec<Part>, pub trnxs: Vec<Trnx>, pub trnys: Vec<Trny>, pub trnzs: Vec<Trnz>, pub unknown_namelists: Vec<Namelist>, } impl FDSFile { pub fn new() -> Self { Default::default() } pub fn get_surf(&self, surf_id: &str) -> Option<&Surf> { for s in &self.surfs { match &s.id { Some(id) => { if id == surf_id { return Some(s); } } None => (), } } None } pub fn decode_namelist(&mut self, namelist: &Namelist) { decode_namelist(self, namelist) } } impl Default for FDSFile { fn default() -> Self { FDSFile { head: None, time: None, dump: None, misc: None, meshes: vec![], reacs: vec![], devcs: vec![], matls: vec![], surfs: vec![], obsts: vec![], holes: vec![], hvacs: vec![], vents: vec![], bndfs: vec![], isofs: vec![], slcfs: vec![], ramps: vec![], props: vec![], parts: vec![], trnxs: vec![], trnys: vec![], trnzs: vec![], unknown_namelists: vec![], } } } #[derive(Clone, Debug)] pub struct Head { pub chid: Option<String>, pub fyi: Option<String>, pub title: Option<String>, } #[derive(Clone, Debug)] pub struct Bndf { cell_centred: bool, fyi: Option<String>, part_id: Option<String>, prop_id: Option<String>, recount_drip: bool, quantity: Option<String>, spec_id: Option<String>, statistics: Option<String>, } #[derive(Clone, Debug)] pub struct Bnde { cell_centered: bool, fyi: Option<String>, part_id: Option<String>, prop_id: Option<String>, quantity: Option<String>, spec_id: Option<String>, } #[derive(Clone, Debug)] pub struct Clip { fyi: Option<String>, maximum_density: f64, maximum_mass_fraction: Vec<Vec<f64>>, maximum_temperature: f64, minimum_density: f64, minimum_mass_fraction: Vec<Vec<f64>>, minimum_temperature: f64, } #[derive(Clone, Debug)] pub struct Ctrl { constant: f64, // , CYCLES : String // , CYCLE_TIME : String delay: f64, differential_gain: f64, evacuation: bool, function_type: String, id: String, initial_state: bool, integral_gain: f64, input_id: Vec<String>, latch: bool, n: i64, on_bound: String, proportional_gain: f64, ramp_id: String, setpoint: f64, target_value: f64, trip_direction: i64, } #[derive(Clone, Debug)] pub struct Csvf { csvfile: String, uvwfile: String, } #[derive(Clone, Debug)] pub struct Devc { // bypass_flowrate: f64, // conversion_addend: f64, // conversion_factor: f64, // coord_factor: f64, // ctrl_id: Option<String>, // delay: f64, // depth: f64, // devc_id: Option<String>, // dry: bool, // duct_id: Option<String>, // evacuation: bool, // flowrate: f64, // fyi: Option<String>, // hide_coordinates: bool, pub id: Option<String>, // initial_state: bool, // init_id: Option<String>, // ior: Option<i64>, // latch: bool, // matl_id: Option<String>, // node_id: Vec<String>, // no_update_devc_id: Option<String>, // no_update_ctrl_id: Option<String>, // orientation: XYZ, // orientation_number: i64, // output: bool, // part_id: Option<String>, // pipe_index: i64, // points: i64, pub prop_id: Option<String>, // quantity: Option<String>, // quantity2: Option<String>, // quantity_range: (f64, f64), // r_id: Option<String>, // reac_id: Option<String>, // relative: bool, // rotation: f64, // setpoint: Option<f64>, // smoothing_factor: f64, // spec_id: Option<String>, // statistics: Option<String>, // statistics_start: f64, // surf_id: Option<String>, // time_averaged: bool, // time_history: bool, // trip_direction: i64, // units: Option<String>, // velo_index: i64, // xb: Option<XB>, pub xyz: Option<XYZ>, // x_id: Option<String>, // y_id: Option<String>, // z_id: Option<String>, // xyz_units: String, } impl Devc { pub fn is_sprinkler(&self, fds_file: &FDSFile) -> bool { let prop_id = match self.prop_id.as_ref() { Some(x) => x, None => return false, }; println!("devc has prop {:?}", self.prop_id); // Get the prop. for prop in &fds_file.props { if prop.id.as_ref() == Some(prop_id) { return prop.is_sprinkler_prop(); } } false } } #[derive(Clone, Debug)] pub struct Dump { clip_restart_files: bool, column_dump_limit: bool, ctrl_column_limit: i64, devc_column_limit: i64, dt_bnde: f64, dt_bndf: f64, dt_cpu: f64, dt_ctrl: f64, dt_devc: f64, dt_devc_line: f64, dt_flush: f64, dt_geom: f64, dt_hrr: f64, dt_isof: f64, dt_mass: f64, dt_part: f64, dt_pl3d: f64, dt_prof: f64, dt_restart: f64, dt_sl3d: f64, dt_slcf: f64, eb_part_file: bool, flush_file_buffers: bool, geom_diag: bool, mass_file: bool, maximum_particles: i64, mms_timer: f64, nframes: i64, plot3d_part_id: Vec<String>, plot3d_quantity: Vec<String>, plot3d_spec_id: Vec<String>, plot3d_velo_index: Vec<i64>, render_file: String, sig_figs: i64, sig_figs_exp: i64, smoke3d: bool, smoke3d_quantity: String, smoke3d_spec_id: String, status_files: bool, suppress_diagnostics: bool, uvw_timer: Vec<f64>, velocity_error_file: bool, write_xyz: bool, } #[derive(Clone, Debug)] pub struct Hole { color: String, ctrl_id: String, devc_id: String, evacuation: bool, fyi: Option<String>, id: String, mesh_id: String, mult_id: String, rgb: RGB, transparency: f64, xb: XB, } #[derive(Clone, Debug)] pub struct Hvac { aircoil_id: Option<String>, ambient: bool, area: f64, clean_loss: f64, coolant_specific_heat: f64, coolant_mass_flow: f64, coolant_temperature: f64, ctrl_id: String, damper: bool, devc_id: String, diameter: f64, duct_id: Vec<String>, duct_interp_type: String, efficiency: Vec<f64>, fan_id: String, filter_id: String, fixed_q: Vec<f64>, id: String, leak_enthalpy: bool, length: f64, loading: Vec<f64>, loading_multiplier: Vec<f64>, loss: Vec<f64>, mass_flow: f64, max_flow: f64, max_pressure: f64, n_cells: i64, node_id: Vec<String>, perimeter: f64, ramp_id: String, ramp_loss: String, reverse: bool, roughness: f64, spec_id: String, tau_ac: f64, tau_fan: f64, tau_vf: f64, type_id: String, vent_id: Option<String>, vent2_id: Option<String>, volume_flow: f64, xyz: XYZ, } #[derive(Clone, Debug)] pub struct Init { auto_ignition_temperature: f64, cell_centered: bool, ctrl_id: String, density: f64, devc_id: String, diameter: f64, dt_insert: f64, dx: f64, dy: f64, dz: f64, height: f64, hrrpuv: f64, id: String, mass_fraction: Vec<f64>, mass_per_time: f64, mass_per_volume: f64, mult_id: String, n_particles: i64, n_particles_per_cell: i64, part_id: String, radius: f64, shape: String, spec_id: Vec<String>, temperature: f64, uvw: Vec<f64>, xb: XB, xyz: XYZ, particle_weight_factor: f64, number_initial_particles: i64, } #[derive(Clone, Debug)] pub struct Isof { fyi: Option<String>, quantity: String, spec_id: String, value: f64, velo_index: i64, } #[derive(Clone, Debug)] pub struct Matl { a: Vec<f64>, absorption_coefficient: f64, boiling_temperature: f64, color: String, conductivity: f64, conductivity_ramp: String, density: f64, e: Vec<f64>, emissivity: f64, fyi: Option<String>, heating_rate: Vec<f64>, heat_of_combustion: Vec<f64>, heat_of_reaction: Vec<f64>, id: String, matl_id: Vec<String>, nu_matl: Vec<f64>, nu_spec: Vec<f64>, n_reactions: i64, n_s: Vec<f64>, n_t: Vec<f64>, n_o2: Vec<f64>, pcr: Vec<bool>, pyrolysis_range: Vec<f64>, reference_rate: Vec<f64>, reference_temperature: Vec<f64>, rgb: RGB, specific_heat: f64, specific_heat_ramp: String, spec_id: Vec<String>, threshold_sign: Vec<f64>, threshold_temperature: Vec<f64>, // , POROSITY : String allow_shrinking: bool, allow_swelling: bool, gas_diffusion_depth: Vec<f64>, } // data PyrolysisReac // = PyrolysisReacAE // { pyrolysisReacAE_A : f64 -- ^A // , pyrolysisReacAE_E : f64 -- ^E // , pyrolysisReacAE_HEAT_OF_REACTION : f64 -- ^HEAT_OF_REACTION // } // | PyrolysisReacTGM // { pyrolysisReacTGM_REFERENCE_TEMPERATURE : f64 -- ^REFERENCE_TEMPERATURE // , pyrolysisReacTGM_HEATING_RATE : f64 -- ^HEATING_RATE // , pyrolysisReacTGM_PYROLYSIS_RANGE : f64 -- ^PYROLYSIS_RANGE // , pyrolysisReacTGM_HEAT_OF_REACTION : f64 -- ^HEAT_OF_REACTION // } // | NoPyrolysis #[derive(Clone, Debug)] pub struct Resolution { pub x: f64, pub y: f64, pub z: f64, } impl Resolution { pub fn volume(&self) -> f64 { self.x * self.y * self.z } pub fn max_side(&self) -> f64 { let mut max = self.x; if self.y > max { max = self.y; } if self.z > max { max = self.z; } max } } // ---------------------------------------- #[derive(Clone, Debug)] pub struct Mesh { pub id: Option<String>, pub xb: XB, pub ijk: IJK, // pub color: String, // pub cylindrical: bool, // pub evacuation: bool, // pub evac_humans: bool, // pub evac_z_offset: f64, // pub fyi: Option<String>, // pub level: i64, // pub mpi_process: Option<i64>, // pub mult_id: Option<String>, // pub rgb: RGB, // pub n_threads: Option<i64>, // , PERIODIC_MESH_IDS : [Text] } impl Mesh { pub fn resolution(&self) -> Resolution { let ijk = self.ijk; let (dx, dy, dz) = self.dimensions(); Resolution { x: dx / (ijk.i as f64), y: dy / (ijk.j as f64), z: dz / (ijk.k as f64), } } pub fn dimensions(&self) -> (f64, f64, f64) { let xb = self.xb(); let dx = xb.x2 - xb.x1; let dy = xb.y2 - xb.y1; let dz = xb.z2 - xb.z1; (dx, dy, dz) } pub fn n_cells(&self) -> u64 { (self.ijk.i * self.ijk.j * self.ijk.k) as u64 } } impl HasXB for Mesh { fn xb(&self) -> XB { self.xb.clone() } } impl HasXB for &Mesh { fn xb(&self) -> XB { self.xb.clone() } } impl HasXB for &mut Mesh { fn xb(&self) -> XB { self.xb.clone() } } #[derive(Clone, Debug)] pub struct Misc { // agglomeration: bool, // aerosol_al2o3: bool, // allow_surface_particles: bool, // allow_underside_particles: bool, // assumed_gas_temperature: f64, // assumed_gas_temperature_ramp: String, // baroclinic: bool, // bndf_default: bool, // cc_ibm: bool, // cnf_cutoff: f64, // cfl_max: f64, // cfl_min: f64, // cfl_velocity_norm: i64, // check_ht: bool, // check_realizability: bool, // check_vn: bool, // clip_mass_fraction: bool, // compute_viscosity_twice: bool, // compute_zeta_source_term: bool, // constant_h_solid: bool, // constant_specific_heat_ratio: bool, // coriolis_vector: Vec<f64>, // correct_subgrid_temperature: bool, // coupled_1d3d_heat_transfer: bool, // c_deardorff: f64, // c_rng: f64, // c_rng_cutoff: f64, // c_smagorinsky: f64, // c_vreman: f64, // dns: bool, // drag_cfl_max: f64, // dt_mean_forcing: f64, // enthalpy_transport: bool, // evacuation_drill: bool, // evacuation_mc_mode: bool, // evac_pressure_iterations: i64, // evac_surf_default: String, // evac_time_iterations: i64, // evaporation: bool, // // , EXCHANGE_EDGES : String // external_boundary_correction: bool, // extinction_model: String, // hvac_pres_relax: f64, // ht3d_test: i64, // fds5_options: bool, // flux_limiter: i64, // force_vector: Vec<f64>, // freeze_velocity: bool, // fyi: Option<String>, // gamma: f64, // gravitational_deposition: bool, // gravitational_settling: bool, // ground_level: f64, // gvec: Vec<f64>, // dt_hvac: f64, // h_f_reference_temperature: f64, // hrrpuv_max_smv: f64, // humidity: f64, // hvac_mass_transport: bool, // iblank_smv: bool, // immersed_boundary_method: i64, // initial_unmixed_fraction: f64, // // , KINETIC_ENERGY_SOURCE : String // lapse_rate: f64, // les_filter_width: String, // max_chemistry_iterations: i64, // max_leak_paths: i64, pub maximum_visibility: f64, // mean_forcing: Vec<bool>, // mpi_timeout: f64, // n_fixed_chemistry_substeps: i64, // near_wall_turbulence_model: String, // // , NEW_MOMENTUM_NUDGING : String // // , NEW_OPEN_BOUNDARY : String // noise: bool, // noise_velocity: f64, // no_evacuation: bool, // no_ramps: bool, // // , NORTHANGLE : String // overwrite: bool, // particle_cfl_max: f64, // particle_cfl_min: f64, // particle_cfl: bool, // periodic_test: i64, // // , PROFILING : String // porous_floor: bool, // // , POTENTIAL_TEMPERATURE_CORRECTION : String // pr: f64, // process_all_meshes: bool, // projection: bool, // p_inf: f64, // // , RAMP_FVX_T : String // // , RAMP_FVY_T : String // // , RAMP_FVZ_T : String // ramp_gx: String, // ramp_gy: String, // ramp_gz: String, // ramp_u0: String, // ramp_u0_t: String, // ramp_v0: String, // ramp_v0_t: String, // ramp_w0: String, // ramp_w0_t: String, // ramp_u0_z: String, // ramp_v0_z: String, // ramp_w0_z: String, // // , RADIATION : String // research_mode: bool, // restart: bool, // restart_chid: String, // richardson_error_tolerance: f64, // run_avg_fac: f64, // sc: f64, // second_order_interpolated_boundary: bool, // second_order_particle_transport: bool, // shared_file_system: bool, // // , SLIP_CONDITION : String // smoke_albedo: f64, // solid_phase_only: bool, // // , SOOT_OXIDATION : String // // , SPONGE_LAYER_DISTANCE : String // stratification: bool, // suppression: bool, // // , SURF_DEFAULT : String // // , TEMPERATURE_DEPENDENT_REACTION : String // // , TENSOR_DIFFUSIVITY : String // terrain_case: bool, // terrain_image: String, // // , TEST_FILTER_QUADRATURE : String // texture_origin: Vec<f64>, // thermophoretic_deposition: bool, // thicken_obstructions: bool, // // , TRANSPORT_UNMIXED_FRACTION : String // // , TRANSPORT_ZETA_SCHEME : String // tmpa: f64, // turbulence_model: String, // turbulent_deposition: bool, // // , TURB_INIT_CLOCK : String // u0: f64, // uvw_file: String, // v0: f64, // veg_level_set_coupled: bool, // veg_level_set_uncoupled: bool, // verbose: f64, pub visibility_factor: f64, // vn_max: f64, // vn_min: f64, // y_co2_infty: f64, // y_o2_infty: f64, // w0: f64, // // , WD_PROPS : String // // , WIND_BOUNDARY : String // // , WIND_ONLY : String } #[derive(Clone, Debug)] pub struct Mult { dx: f64, dxb: Vec<f64>, dx0: f64, dy: f64, dyb: Vec<f64>, dy0: f64, dz: f64, dzb: Vec<f64>, dz0: f64, id: String, i_lower: i64, i_upper: i64, j_lower: i64, j_upper: i64, k_lower: i64, k_upper: i64, n_lower: i64, n_upper: i64, } #[derive(Clone, Debug)] pub struct Obst { // allow_vent: bool, // bndf_face: (bool, bool, bool, bool, bool, bool), // bndf_obst: bool, // bulk_density: Option<f64>, // color: Option<String>, // ctrl_id: Option<String>, // devc_id: Option<String>, // evacuation: bool, // fyi: Option<String>, // ht3d: bool, pub id: Option<String>, // matl_id: Option<String>, // mesh_id: Option<String>, // mult_id: Option<String>, // // , NOTERRAIN : bool // outline: bool, // overlay: bool, // permit_hole: bool, // prop_id: Option<String>, // removable: bool, // rgb: Option<RGB>, pub surf_id: Option<String>, pub surf_id6: Option<(String, String, String, String, String, String)>, pub surf_ids: Option<(String, String, String)>, // texture_origin: XYZ, // thicken: bool, // transparency: f64, pub xb: XB, } impl Obst { pub fn surf_ids(&self) -> Vec<String> { let mut ss = Vec::with_capacity(6); match &self.surf_id { Some(s) => { ss.push(s.clone()); } _ => (), } match &self.surf_ids { Some((s1, s2, s3)) => { ss.push(s1.clone()); ss.push(s2.clone()); ss.push(s3.clone()); } _ => (), } match &self.surf_id6 { Some((s1, s2, s3, s4, s5, s6)) => { ss.push(s1.clone()); ss.push(s2.clone()); ss.push(s3.clone()); ss.push(s4.clone()); ss.push(s5.clone()); ss.push(s6.clone()); } _ => (), } ss } pub fn has_surf(&self, surf_id: &str) -> bool { for id in self.surf_ids() { if &id == surf_id { return true; } } false } pub fn area_x(&self) -> Option<f64> { let xb = Some(self.xb)?; Some((xb.y2 - xb.y1) * (xb.z2 - xb.z1)) } pub fn area_y(&self) -> Option<f64> { let xb = Some(self.xb)?; Some((xb.x2 - xb.x1) * (xb.z2 - xb.z1)) } pub fn area_z(&self) -> Option<f64> { let xb = Some(self.xb)?; Some((xb.x2 - xb.x1) * (xb.y2 - xb.y1)) } pub fn is_burner(&self, fds_data: &FDSFile) -> bool { let surf_ids = self.surf_ids(); let surfaces: Vec<Option<&Surf>> = surf_ids .iter() .map(|surf_id| { fds_data .surfs .iter() .find(|surf| surf.id.as_ref() == Some(surf_id)) }) .collect(); for surf in surfaces { if let Some(surf) = surf { if surf.is_burner() { return true; } } } false } pub fn is_extract(&self, fds_data: &FDSFile) -> bool { let surf_ids = self.surf_ids(); let surfaces: Vec<Option<&Surf>> = surf_ids .iter() .map(|surf_id| { fds_data .surfs .iter() .find(|surf| surf.id.as_ref() == Some(surf_id)) }) .collect(); for surf in surfaces { if let Some(surf) = surf { if surf.is_extract() { return true; } } } false } pub fn is_supply(&self, fds_data: &FDSFile) -> bool { let surf_ids = self.surf_ids(); let surfaces: Vec<Option<&Surf>> = surf_ids .iter() .map(|surf_id| { fds_data .surfs .iter() .find(|surf| surf.id.as_ref() == Some(surf_id)) }) .collect(); for surf in surfaces { if let Some(surf) = surf { if surf.is_supply() { return true; } } } false } } impl HasXB for Obst { fn xb(&self) -> XB { self.xb } } #[derive(Clone, Debug)] pub struct Part { age: f64, breakup: bool, breakup_cnf_ramp_id: Option<String>, breakup_distribution: String, breakup_gamma_d: f64, breakup_ratio: f64, breakup_sigma_d: Option<f64>, check_distribution: bool, cnf_ramp_id: Option<String>, color: String, complex_refractive_index: f64, ctrl_id: Option<String>, dense_volume_fraction: f64, devc_id: Option<String>, diameter: Option<f64>, distribution: String, drag_coefficient: Vec<f64>, drag_law: String, free_area_fraction: Option<f64>, fyi: Option<String>, gamma_d: f64, heat_of_combustion: Option<f64>, horizontal_velocity: f64, id: Option<String>, initial_temperature: f64, massless: bool, maximum_diameter: f64, minimum_diameter: f64, monodisperse: bool, n_strata: i64, orientation: Vec<f64>, permeability: Vec<f64>, periodic_x: bool, periodic_y: bool, periodic_z: bool, porous_volume_fraction: Option<f64>, prop_id: Option<String>, quantities: Vec<String>, quantities_spec_id: Vec<String>, radiative_property_table: Option<f64>, real_refractive_index: f64, rgb: Option<RGB>, running_average_factor: f64, sampling_factor: i64, second_order_particle_transport: bool, sigma_d: Option<f64>, spec_id: Option<String>, static_: bool, surface_tension: f64, surf_id: Option<String>, target_only: bool, turbulent_dispersion: bool, vertical_velocity: f64, } #[derive(Clone, Debug)] pub struct Pres { check_poisson: bool, fishpak_bc: Vec<i64>, // , GLMAT_SOLVER : String iteration_suspend_factor: f64, // , LAPLACE_PRESSURE_CORRECTION : String max_pressure_iterations: i64, pressure_relax_time: f64, pressure_tolerance: f64, relaxation_factor: f64, scarc_method: String, scarc_krylov: String, scarc_multigrid: String, scarc_smooth: String, scarc_precon: String, scarc_coarse: String, scarc_initial: String, scarc_accuracy: f64, scarc_debug: String, scarc_multigrid_cycle: String, scarc_multigrid_level: String, scarc_multigrid_coarsening: String, scarc_multigrid_iterations: i64, scarc_multigrid_accuracy: f64, scarc_multigrid_interpol: String, scarc_krylov_iterations: i64, scarc_krylov_accuracy: f64, scarc_smooth_iterations: i64, scarc_smooth_accuracy: f64, scarc_smooth_omega: String, scarc_precon_iterations: i64, scarc_precon_accuracy: f64, scarc_precon_omega: String, scarc_coarse_iterations: i64, scarc_coarse_accuracy: f64, solver: String, suspend_pressure_iterations: i64, velocity_tolerance: f64, } #[derive(Clone, Debug)] pub struct Prof { format_index: i64, fyi: String, id: String, ior: i64, quantity: String, xyz: XYZ, } #[derive(Clone, Debug)] pub struct Prop { pub activation_obscuration: Option<f64>, pub activation_temperature: Option<f64>, // alpha_c: f64, // alpha_e: f64, // bead_density: f64, // bead_diameter: f64, // bead_emissivity: f64, // bead_heat_transfer_coefficient: f64, // bead_specific_heat: f64, // beta_c: f64, // beta_e: f64, // // , FED_ACTIVITY : String // characteristic_velocity: f64, // c_factor: f64, // density: f64, // diameter: f64, // droplet_velocity: f64, // emissivity: f64, // flow_ramp: String, pub flow_rate: Option<f64>, // flow_tau: f64, // fyi: Option<String>, // gauge_emissivity: f64, // gauge_temperature: f64, // heat_transfer_coefficient: f64, pub id: Option<String>, // initial_temperature: f64, // k_factor: f64, // length: f64, // mass_flow_rate: f64, // offset: f64, // operating_pressure: f64, // orifice_diameter: f64, // p0: String, // particles_per_second: i64, // particle_velocity: f64, pub part_id: Option<String>, // pdpa_end: f64, // pdpa_histogram: bool, // pdpa_histogram_limits: Vec<f64>, // pdpa_histogram_nbins: i64, // pdpa_histogram_cumulative: bool, // pdpa_integrate: bool, // pdpa_m: i64, // pdpa_n: i64, // pdpa_normalize: bool, // pdpa_radius: f64, // pdpa_start: f64, // pressure_ramp: String, // , PX : String // // , PXX : String pub quantity: Option<String>, pub rti: Option<f64>, // smokeview_id: Vec<String>, // smokeview_parameters: Vec<String>, // spec_id: String, // spray_angle: Vec<f64>, // spray_pattern_beta: f64, // spray_pattern_mu: f64, // spray_pattern_shape: String, // spray_pattern_table: String, // velocity_component: i64, // , DROPLET_VELOCITY : String } impl Prop { pub fn is_sprinkler_prop(&self) -> bool { self.quantity == Some("SPRINKLER LINK TEMPERATURE".to_string()) } } #[derive(Clone, Debug)] pub struct Radi { angle_increment: i64, band_limits: Vec<f64>, c_max: f64, c_min: f64, initial_radiation_iterations: i64, kappa0: f64, nmieang: i64, number_radiation_angles: i64, path_length: f64, radiation: bool, radiation_iterations: i64, // , RADIATIVE_FRACTION : String radtmp: f64, // , RTE_SOURCE_CORRECTION : String time_step_increment: i64, wide_band_model: bool, mie_minimum_diameter: f64, mie_maximum_diameter: f64, mie_ndg: i64, number_initial_iterations: i64, qr_clip: f64, } #[derive(Clone, Debug)] pub struct Ramp { id: String, entries: Vec<RampEntry>, } #[derive(Clone, Debug)] pub struct RampEntry { ctrl_id: String, devc_id: String, f: f64, fyi: Option<String>, number_interpolation_points: i64, t: f64, x: f64, z: f64, } #[derive(Clone, Debug)] pub struct Reac { // pub a: Option<f64>, // , ALT_REAC_ID : String // pub auto_ignition_temperature: f64, // pub c: f64, // pub check_atom_balance: bool, pub co_yield: Option<f64>, // pub critical_flame_temperature: f64, // pub e: f64, // pub epumo2: f64, // , K : String // pub equation: String, // pub fixed_mix_time: f64, // , FLAME_SPEED : String // , FLAME_SPEED_EXPONENT : String // , FLAME_SPEED_TEMPERATURE : String // pub formula: String, // pub fuel: String, // pub fuel_radcal_id: String, // , FWD_ID : String // pub fyi: Option<String>, // pub h: f64, // pub heat_of_combustion: f64, // pub id: Option<String>, // pub ideal: bool, // pub n: f64, // pub nu: Vec<f64>, // pub n_s: Vec<f64>, // pub n_t: f64, // pub o: f64, // , ODE_SOLVER : String // pub radiative_fraction: f64, // pub ramp_chi_r: String, // , RAMP_FS : String // pub reac_atom_error: f64, // pub reac_mass_error: f64, // , REVERSE : String // pub soot_h_fraction: f64, pub soot_yield: Option<f64>, // pub spec_id_n_s: Vec<String>, // pub spec_id_nu: Vec<String>, // , TABLE_FS : String // , TAU_CHEM : String // , TAU_FLAME : String // pub third_body: bool, // , TURBULENT_FLAME_SPEED_ALPHA : String // , TURBULENT_FLAME_SPEED_EXPONENT : String // , Y_P_MIN_EDC : String } #[derive(Clone, Debug)] pub struct Slcf { agl_slice: String, cell_centered: bool, evacuation: bool, // , FACE_CENTERED : String // , FIRE_LINE : String fyi: Option<String>, id: Option<String>, ior: i64, level_set_fire_line: String, maximum_value: f64, mesh_number: i64, minimum_value: f64, part_id: String, pbx: Option<f64>, pby: Option<f64>, pbz: Option<f64>, // , PROP_ID : String quantity: Option<String>, quantity2: Option<String>, reac_id: String, // , SLICETYPE : String spec_id: Option<String>, vector: bool, velo_index: i64, xb: XB, } #[derive(Clone, Debug)] pub struct Spec { aerosol: bool, alias: String, background: bool, // , COPY_LUMPED : String conductivity: f64, conductivity_solid: f64, density_liquid: f64, density_solid: f64, diffusivity: f64, enthalpy_of_formation: f64, epsilonklj: f64, fic_concentration: f64, fld_lethal_dose: f64, formula: String, fyi: Option<String>, heat_of_vaporization: f64, h_v_reference_temperature: f64, id: String, lumped_component_only: bool, mass_extinction_coefficient: f64, mass_fraction: Vec<f64>, mass_fraction_0: f64, // , MAX_DIAMETER : String mean_diameter: f64, melting_temperature: f64, // , MIN_DIAMETER : String mw: f64, // , N_BINS : String pr_gas: f64, primitive: bool, radcal_id: String, ramp_cp: String, ramp_cp_l: String, ramp_d: String, ramp_g_f: String, ramp_k: String, ramp_mu: String, reference_enthalpy: f64, reference_temperature: f64, sigmalj: f64, spec_id: Vec<String>, specific_heat: f64, specific_heat_liquid: f64, vaporization_temperature: f64, viscosity: f64, volume_fraction: Vec<f64>, } #[derive(Clone, Debug)] pub struct Surf { pub adiabatic: bool, pub auto_ignition_temperature: f64, // pub backing: String, // pub burn_away: bool, // pub cell_size_factor: f64, // pub c_forced_constant: f64, // pub c_forced_pr_exp: f64, // pub c_forced_re: f64, // pub c_forced_re_exp: f64, // pub c_horizontal: f64, // pub c_vertical: f64, pub color: Option<String>, // pub convection_length_scale: f64, // pub convective_heat_flux: Option<f64>, // pub convert_volume_to_mass: bool, // pub default: bool, // pub dt_insert: f64, // pub emissivity: f64, // pub emissivity_back: Option<f64>, // pub evac_default: bool, // pub external_flux: f64, // pub extinction_temperature: f64, // pub free_slip: bool, pub fyi: Option<String>, // pub geometry: String, // pub heat_of_vaporization: f64, // pub heat_transfer_coefficient: f64, // pub heat_transfer_coefficient_back: f64, // pub heat_transfer_model: String, pub hrrpua: Option<f64>, // pub ht3d: bool, pub id: Option<String>, // pub ignition_temperature: f64, // pub inner_radius: f64, // pub internal_heat_source: Vec<f64>, // pub layer_divide: f64, // pub leak_path: Vec<i64>, // pub length: f64, // pub mass_flux: Option<Vec<f64>>, // pub mass_flux_total: Option<f64>, // pub mass_flux_var: Option<f64>, // pub mass_fraction: Vec<f64>, // pub mass_transfer_coefficient: f64, // pub matl_id: Vec<String>, // pub matl_mass_fraction: Vec<f64>, // pub minimum_layer_thickness: f64, pub mlrpua: Option<f64>, // , N_CELLS_MAX : String // n_layer_cells_max: Vec<i64>, // net_heat_flux: f64, // no_slip: bool, // nppc: i64, // particle_mass_flux: f64, // part_id: String, // ple: f64, // profile: String, // radius: f64, // ramp_ef: String, // ramp_mf: Vec<String>, // ramp_part: String, // ramp_q: Option<String>, // ramp_t: Option<String>, // ramp_t_i: Option<String>, // ramp_v: Option<String>, // ramp_v_x: Option<String>, // ramp_v_y: Option<String>, // ramp_v_z: Option<String>, // rgb: RGB, // roughness: f64, // spec_id: String, // spread_rate: f64, // stretch_factor: f64, // tau_ef: f64, // tau_mf: f64, // tau_part: f64, pub tau_q: Option<f64>, // tau_t: f64, // tau_v: f64, // texture_height: f64, // texture_map: String, // texture_width: f64, // tga_analysis: bool, // tga_final_temperature: f64, // tga_heating_rate: f64, // thickness: Vec<f64>, // tmp_back: f64, // tmp_front: f64, // tmp_inner: Vec<f64>, // transparency: f64, // vegetation: bool, // vegetation_arrhenius_degrad: bool, // vegetation_cdrag: f64, // vegetation_char_fraction: f64, // vegetation_element_density: i64, // vegetation_ground_temp: f64, // vegetation_height: f64, // vegetation_initial_temp: f64, // vegetation_layers: i64, // vegetation_linear_degrad: bool, // vegetation_load: f64, // vegetation_lset_ignite_time: f64, // veg_lset_qcon: f64, // vegetation_moisture: f64, // vegetation_no_burn: bool, // vegetation_svratio: i64, // veg_level_set_spread: bool, // veg_lset_ros_back: f64, // veg_lset_ros_flank: f64, // veg_lset_ros_head: f64, // veg_lset_wind_exp: f64, // veg_lset_sigma: f64, // veg_lset_ht: f64, // veg_lset_beta: f64, // veg_lset_ellipse: f64, // veg_lset_tan2: bool, // veg_lset_ellipse_head: f64, pub vel: Option<f64>, // vel_bulk: f64, // vel_grad: f64, // vel_t: Option<(f64, f64)>, pub volume_flow: Option<f64>, // width: f64, // xyz: XYZ, // z0: f64, // , ZETA_FRONT : String // , EXTERNAL_FLUX_RAMP : String // , TAU_EXTERNAL_FLUX : String // , VOLUME_FLUX : String } impl Surf { /// A SURF is a burner surface if it has either MLRPUA or HRRPUA set. pub fn is_burner(&self) -> bool { self.mlrpua.is_some() || self.hrrpua.is_some() } /// A SURF is an extract surface if either VEL or VOLUME_FLOW is set and is positive /// AND it is not a burner. pub fn is_extract(&self) -> bool { (self.vel.map(|v| v > 0.0).unwrap_or(false) || self.volume_flow.map(|v| v > 0.0).unwrap_or(false)) && !self.is_burner() } /// A SURF is a supply surface if either VEL or VOLUME_FLOW is set and is negative /// AND it is not a burner. pub fn is_supply(&self) -> bool { (self.vel.map(|v| v < 0.0).unwrap_or(false) || self.volume_flow.map(|v| v < 0.0).unwrap_or(false)) && !self.is_burner() } } impl Default for Surf { fn default() -> Self { Surf { adiabatic: true, auto_ignition_temperature: -273_f64, // backing: "EXPOSED".to_string(), // burn_away: false, // cell_size_factor: 1_f64, // c_forced_constant: 0_f64, // c_forced_pr_exp: 0_f64, // c_forced_re: 0_f64, // c_forced_re_exp: 0_f64, // c_horizontal: 1.52_f64, // c_vertical: 1.31_f64, color: None, // convection_length_scale: 1_f64, // convective_heat_flux: None, // convert_volume_to_mass: false, // default: false, // dt_insert: 0.01_f64, // emissivity: 0.9_f64, // emissivity_back: None, // evac_default: false, // external_flux: None, // extinction_temperature: -273_f64, // fireline_mlr_max: f64, // free_slip: false, fyi: None, // geometry: "CARTESIAN".to_string(), // heat_of_vaporization: None, // heat_transfer_coefficient: None, // heat_transfer_coefficient_back: None, // heat_transfer_model: None, hrrpua: None, // ht3d: false, id: None, // ignition_temperature: 5000_f64, // inner_radius: None, // internal_heat_source: vec![], // layer_divide: None, // leak_path: vec![], // length: None, // mass_flux: None, // mass_flux_total: None, // mass_flux_var: None, // mass_fraction: vec![], // mass_transfer_coefficient: None, // matl_id: vec![], // matl_mass_fraction: vec![], // minimum_layer_thickness: 1e-6_f64, mlrpua: None, // , N_CELLS_MAX : String // n_layer_cells_max: vec![1000], // net_heat_flux: None, // no_slip: false, // nppc: 1_i64, // particle_mass_flux: None, // part_id: None, // ple: 0.3_f64, // profile: None, // radius: None, // ramp_ef: None, // ramp_mf: vec![], // ramp_part: None, // ramp_q: None, // ramp_t: None, // ramp_t_i: None, // ramp_v: None, // ramp_v_x: None, // ramp_v_y: None, // ramp_v_z: None, // rgb: RGB {r:255_i64, g: 204_i64, b: 102_i64}, // roughness: 0_f64, // spec_id: None, // spread_rate: None, // stretch_factor: 2_f64, // tau_ef: 1_f64, // tau_mf: 1_f64, // tau_part: 1_f64, tau_q: None, // tau_t: 1_f64, // tau_v: 1_f64, // texture_height: 1_f64, // texture_map: None, // texture_width: 1_f64, // tga_analysis: false, // tga_final_temperature: 800_f64, // tga_heating_rate: 5_f64, // thickness: vec![], // tmp_back: 20_f64, // tmp_front: 20_f64, // tmp_inner: vec![], // transparency: 1_f64, // vegetation: None, // vegetation_arrhenius_degrad: None, // vegetation_cdrag: None, // vegetation_char_fraction: None, // vegetation_element_density: None, // vegetation_ground_temp: None, // vegetation_height: None, // vegetation_initial_temp: None, // vegetation_layers: None, // vegetation_linear_degrad: None, // vegetation_load: None, // vegetation_lset_ignite_time: None, // veg_lset_qcon: None, // vegetation_moisture: None, // vegetation_no_burn: None, // vegetation_svratio: None, // veg_level_set_spread: None, // veg_lset_ros_back: None, // veg_lset_ros_flank: None, // veg_lset_ros_head: None, // veg_lset_wind_exp: None, // veg_lset_sigma: None, // veg_lset_ht: None, // veg_lset_beta: None, // veg_lset_ellipse: None, // veg_lset_tan2: None, // veg_lset_ellipse_head: None, vel: None, // vel_bulk: None, // vel_grad: None, // vel_t: None, volume_flow: None, // width: None, // xyz: None, // z0: 10_f64, // , ZETA_FRONT : String // , EXTERNAL_FLUX_RAMP : String // , TAU_EXTERNAL_FLUX : String // , VOLUME_FLUX : String } } } // #[derive(Clone, Debug)] // pub struct SurfLayer // // // { surfLayer_THICKNESS : f64 // , surfLayer_Components : [SurfLayerComponent] // } // #[derive(Clone, Debug)] // pub struct SurfLayerComponent // -- { surfLayerComponent_Pos : i64 // -- , surfLayerComponent_ParentLayer : SurfLayer // { surfLayerComponent_MATL : Matl // -- { surfLayerComponent_MATL : String // , surfLayerComponent_MATL_MASS_FRACTION : f64 // } // --surfLayerComponent_Pos = surfLayerComponent_Pos // --surfLayerComponent_ParentLayer = surfLayerComponent_ParentLayer // -- surfLayerComponent_MATL = surfLayerComponent_MATL // -- surfLayerComponent_MATL_MASS_FRACTION = (\a -> a) . surfLayerComponent_MATL_MASS_FRACTION // #[derive(Clone, Debug)] // pub struct SurfBurner // -- { surfLayerComponent_Pos : i64 // -- , surfLayerComponent_ParentLayer : SurfLayer // -- { surfLayerComponent_MATL : Matl // { surfBurner_HRRPUA : f64 // , surfBURNER_TAU_Q : f64 // } // | NoBurner #[derive(Clone, Debug)] pub struct Tabl { fyi: Option<String>, id: String, table_data: Vec<f64>, } #[derive(Clone, Debug)] pub struct Time { // pub dt: Option<f64>, // pub evac_dt_flowfield: f64, // pub evac_dt_steady_state: f64, // pub fyi: Option<String>, // pub limiting_dt_ratio: f64, // pub lock_time_step: bool, // pub restrict_time_step: bool, pub t_begin: Option<f64>, pub t_end: Option<f64>, // pub t_end_geom: f64, // pub time_shrink_factor: f64, // pub wall_increment: i64, // pub wall_increment_ht3d: i64, // pub twfin: f64, } fn decode_time(fds_file: &mut FDSFile, namelist: &Namelist) { let time = Time { t_begin: namelist .parameters .get("T_BEGIN") .cloned() .and_then(|x| x.try_into().ok()), t_end: namelist .parameters .get("T_END") .cloned() .and_then(|x| x.try_into().ok()), }; if fds_file.time.is_some() { panic!("Multiple TIME namelists") } fds_file.time = Some(time); } #[derive(Clone, Debug)] pub struct Trnx { cc: f64, fyi: Option<String>, ideriv: i64, mesh_number: i64, pc: f64, } #[derive(Clone, Debug)] pub struct Trny { cc: f64, fyi: Option<String>, ideriv: i64, mesh_number: i64, pc: f64, } #[derive(Clone, Debug)] pub struct Trnz { cc: f64, fyi: Option<String>, ideriv: i64, mesh_number: i64, pc: f64, } #[derive(Clone, Debug)] pub struct Vent { // color: Option<String>, // ctrl_id: String, // devc_id: String, // dynamic_pressure: f64, // evacuation: bool, // fyi: Option<String>, pub id: Option<String>, // ior: i64, // l_eddy: f64, // l_eddy_ij: Vec<i64>, // mb: String, // mesh_id: String, // mult_id: String, // n_eddy: i64, // outline: bool, // pbx: f64, // pby: f64, // pbz: f64, // pressure_ramp: String, // radius: f64, // reynolds_stress: Vec<f64>, // rgb: RGB, // spread_rate: f64, pub surf_id: Option<String>, // texture_origin: Vec<f64>, // tmp_exterior: f64, // tmp_exterior_ramp: String, // transparency: f64, // uvw: Vec<f64>, // vel_rms: f64, // // , WIND : String pub xb: Option<XB>, // xyz: XYZ, } impl Vent { pub fn surf_ids(&self) -> Vec<String> { let mut ss = Vec::with_capacity(1); match &self.surf_id { Some(s) => { ss.push(s.clone()); } _ => (), } ss } pub fn has_surf(&self, surf_id: &str) -> bool { for id in self.surf_ids() { if &id == surf_id { return true; } } false } /// The area of a vent. As a vent must be 2d it only has one area. pub fn area(&self) -> Option<f64> { let xb = self.xb?; if xb.x1 == xb.x2 { Some((xb.y2 - xb.y1) * (xb.z2 - xb.z1)) } else if xb.y1 == xb.y2 { Some((xb.x2 - xb.x1) * (xb.z2 - xb.z1)) } else if xb.z1 == xb.z2 { Some((xb.x2 - xb.x1) * (xb.y2 - xb.y1)) } else { None } } pub fn is_burner(&self, fds_data: &FDSFile) -> bool { let surf_ids = self.surf_ids(); let surfaces: Vec<Option<&Surf>> = surf_ids .iter() .map(|surf_id| { fds_data .surfs .iter() .find(|surf| surf.id.as_ref() == Some(surf_id)) }) .collect(); for surf in surfaces { if let Some(surf) = surf { if surf.is_burner() { return true; } } } false } pub fn is_extract(&self, fds_data: &FDSFile) -> bool { let surf_ids = self.surf_ids(); let surfaces: Vec<Option<&Surf>> = surf_ids .iter() .map(|surf_id| { fds_data .surfs .iter() .find(|surf| surf.id.as_ref() == Some(surf_id)) }) .collect(); for surf in surfaces { if let Some(surf) = surf { if surf.is_extract() { return true; } } } false } pub fn is_supply(&self, fds_data: &FDSFile) -> bool { let surf_ids = self.surf_ids(); let surfaces: Vec<Option<&Surf>> = surf_ids .iter() .map(|surf_id| { fds_data .surfs .iter() .find(|surf| surf.id.as_ref() == Some(surf_id)) }) .collect(); for surf in surfaces { if let Some(surf) = surf { if surf.is_supply() { return true; } } } false } } impl MightHaveXB for Vent { fn try_xb(&self) -> Option<XB> { self.xb.clone() } } impl MightHaveXB for &Vent { fn try_xb(&self) -> Option<XB> { self.xb.clone() } } impl MightHaveXB for &mut Vent { fn try_xb(&self) -> Option<XB> { self.xb.clone() } } #[derive(Clone, Debug)] pub struct Zone { pub id: String, pub leak_area: f64, pub leak_pressure_exponent: f64, pub leak_reference_pressure: f64, pub xb: XB, pub periodic: bool, } #[derive(Copy, Clone, Debug)] pub enum Plane { X, Y, Z, } #[derive(Copy, Clone, Debug)] pub enum Direction { NegX, PosX, NegY, PosY, NegZ, PosZ, } #[derive(Copy, Clone, Debug)] pub struct IJK { pub i: i64, pub j: i64, pub k: i64, } #[derive(Copy, Clone, Debug)] pub enum FromParameterValueError { ExpectedArray, IncorrectArrayLength { expected: usize, found: usize }, } impl std::fmt::Display for FromParameterValueError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match *self { Self::ExpectedArray => write!(f, "Expected an array, but found an atom"), Self::IncorrectArrayLength { expected, found } => { write!(f, "Expected array length of {} found {}", found, expected) } } } } impl std::error::Error for FromParameterValueError { fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { match *self { Self::ExpectedArray => None, Self::IncorrectArrayLength { .. } => None, } } } impl TryFrom<ParameterValue> for IJK { type Error = FromParameterValueError; fn try_from(pv: ParameterValue) -> Result<Self, Self::Error> { match pv { ParameterValue::Atom(_) => Err(FromParameterValueError::ExpectedArray), ParameterValue::Array(vmap) => match vmap.len() { 3 => Ok(IJK { i: ParameterValue::Atom(vmap.get(&vec![1]).unwrap().clone()) .try_into() .unwrap(), j: ParameterValue::Atom(vmap.get(&vec![2]).unwrap().clone()) .try_into() .unwrap(), k: ParameterValue::Atom(vmap.get(&vec![3]).unwrap().clone()) .try_into() .unwrap(), }), l => Err(FromParameterValueError::IncorrectArrayLength { expected: 3, found: l, }), }, } } } #[derive(Copy, Clone, Debug)] pub struct RGB { pub r: i64, pub g: i64, pub b: i64, } #[derive(Copy, Clone, Debug, PartialEq)] pub struct XB { pub x1: Coord, pub x2: Coord, pub y1: Coord, pub y2: Coord, pub z1: Coord, pub z2: Coord, } impl XB { /// Test if two XBs intersect (i.e. their bounding boxes). Two bounding boxes /// intersect of all 3 dimensions have overlap. EQ is considered overlap. pub fn intersect(&self, b: &XB) -> bool { let intersect_x = (self.x2 > b.x1) && (b.x2 > self.x1); let intersect_y = (self.y2 > b.y1) && (b.y2 > self.y1); let intersect_z = (self.z2 > b.z1) && (b.z2 > self.z1); intersect_x && intersect_y && intersect_z } } impl TryFrom<ParameterValue> for XB { type Error = FromParameterValueError; fn try_from(pv: ParameterValue) -> Result<Self, Self::Error> { match pv { ParameterValue::Atom(_) => Err(FromParameterValueError::ExpectedArray), ParameterValue::Array(vmap) => match vmap.len() { 6 => Ok(XB { x1: ParameterValue::Atom(vmap.get(&vec![1]).unwrap().clone()) .try_into() .expect("x1 failed"), x2: ParameterValue::Atom(vmap.get(&vec![2]).unwrap().clone()) .try_into() .expect("x2 failed"), y1: ParameterValue::Atom(vmap.get(&vec![3]).unwrap().clone()) .try_into() .expect("y1 failed"), y2: ParameterValue::Atom(vmap.get(&vec![4]).unwrap().clone()) .try_into() .expect("y2 failed"), z1: ParameterValue::Atom(vmap.get(&vec![5]).unwrap().clone()) .try_into() .expect("z1 failed"), z2: ParameterValue::Atom(vmap.get(&vec![6]).unwrap().clone()) .try_into() .expect("z2 failed"), }), l => Err(FromParameterValueError::IncorrectArrayLength { expected: 6, found: l, }), }, } } } #[derive(Copy, Clone, Debug)] pub struct XYZ { pub x: Coord, pub y: Coord, pub z: Coord, } impl TryFrom<ParameterValue> for XYZ { type Error = FromParameterValueError; fn try_from(pv: ParameterValue) -> Result<Self, Self::Error> { match pv { ParameterValue::Atom(_) => Err(FromParameterValueError::ExpectedArray), ParameterValue::Array(vmap) => match vmap.len() { 3 => Ok(XYZ { x: ParameterValue::Atom(vmap.get(&vec![1]).unwrap().clone()) .try_into() .unwrap(), y: ParameterValue::Atom(vmap.get(&vec![2]).unwrap().clone()) .try_into() .unwrap(), z: ParameterValue::Atom(vmap.get(&vec![3]).unwrap().clone()) .try_into() .unwrap(), }), l => Err(FromParameterValueError::IncorrectArrayLength { expected: 3, found: l, }), }, } } } type Coord = f64; type GridCoord = i64; fn simple_surf(name: String) -> Surf { Surf { id: Some(name), ..Default::default() } } /// Convert a ['NamelistFile'] to an ['FDSFile']. pub fn decode_fds_file(namelist_file: &NamelistFile) -> FDSFile { let inert_surf = simple_surf("INERT".to_string()); let open_surf = simple_surf("OPEN".to_string()); let hvac_surf = simple_surf("HVAC".to_string()); let mut fds_file = FDSFile { surfs: vec![inert_surf, open_surf, hvac_surf], ..Default::default() }; for namelist in namelist_file.namelists.iter() { decode_namelist(&mut fds_file, &namelist); } fds_file } fn decode_namelist(fds_file: &mut FDSFile, namelist: &Namelist) { match namelist.name.as_ref() { "OBST" => decode_obst(fds_file, namelist), "VENT" => decode_vent(fds_file, namelist), "DEVC" => decode_devc(fds_file, namelist), // "PART" => decode_part(fds_file, namelist), "TIME" => decode_time(fds_file, namelist), "PROP" => decode_prop(fds_file, namelist), "SURF" => decode_surf(fds_file, namelist), "MESH" => decode_mesh(fds_file, namelist), // "SLCF" => decode_slcf(fds_file, namelist), "REAC" => decode_reac(fds_file, namelist), // "HVAC" => decode_hvac(fds_file, namelist), // "DUMP" => decode_dump(fds_file, namelist), // "MISC" => decode_misc(fds_file, namelist), "HEAD" => decode_head(fds_file, namelist), _ => decode_unknown(fds_file, namelist), } } fn decode_unknown(fds_file: &mut FDSFile, namelist: &Namelist) { fds_file.unknown_namelists.push(namelist.clone()); } fn decode_obst(fds_file: &mut FDSFile, namelist: &Namelist) { let obst = Obst { // allow_vent: bool, // bndf_face: (bool, bool, bool, bool, bool, bool), // bndf_obst: bool, // bulk_density: Option<f64>, // color: Option<String>, // ctrl_id: Option<String>, // devc_id: Option<String>, // evacuation: bool, // fyi: Option<String>, // ht3d: bool, id: namelist .parameters .get("ID") .cloned() .map(|x| x.try_into().expect("obst.id")), // matl_id: Option<String>, // mesh_id: Option<String>, // mult_id: Option<String>, // // , NOTERRAIN : bool // outline: bool, // overlay: bool, // permit_hole: bool, // prop_id: Option<String>, // removable: bool, // rgb: Option<RGB>, surf_id: namelist .parameters .get("SURF_ID") .cloned() .map(|x| x.try_into().expect("surf_id failed")), surf_id6: namelist.parameters.get("SURF_ID6").map(|pv| match pv { ParameterValue::Atom(s) => panic!("expected array"), ParameterValue::Array(vmap) => match vmap.len() { 6 => ( ParameterValue::Atom(vmap.get(&vec![1]).unwrap().clone()) .try_into() .unwrap(), ParameterValue::Atom(vmap.get(&vec![2]).unwrap().clone()) .try_into() .unwrap(), ParameterValue::Atom(vmap.get(&vec![3]).unwrap().clone()) .try_into() .unwrap(), ParameterValue::Atom(vmap.get(&vec![4]).unwrap().clone()) .try_into() .unwrap(), ParameterValue::Atom(vmap.get(&vec![5]).unwrap().clone()) .try_into() .unwrap(), ParameterValue::Atom(vmap.get(&vec![6]).unwrap().clone()) .try_into() .unwrap(), ), l => panic!("expected array of lengh 6, not {}", l), }, }), surf_ids: namelist.parameters.get("SURF_IDS").map(|pv| match pv { ParameterValue::Atom(s) => panic!("expected array"), ParameterValue::Array(vmap) => match vmap.len() { 3 => ( ParameterValue::Atom(vmap.get(&vec![1]).unwrap().clone()) .try_into() .unwrap(), ParameterValue::Atom(vmap.get(&vec![2]).unwrap().clone()) .try_into() .unwrap(), ParameterValue::Atom(vmap.get(&vec![3]).unwrap().clone()) .try_into() .unwrap(), ), l => panic!("expected array of lengh 3, not {}", l), }, }), // texture_origin: XYZ, // thicken: bool, // transparency: f64, xb: namelist .parameters .get("XB") .cloned() .map(|x| x.try_into().expect("obst.xb")) .unwrap(), }; fds_file.obsts.push(obst); } fn decode_vent(fds_file: &mut FDSFile, namelist: &Namelist) { let vent = Vent { // allow_vent: bool, // bndf_face: (bool, bool, bool, bool, bool, bool), // bndf_obst: bool, // bulk_density: Option<f64>, // color: Option<String>, // ctrl_id: Option<String>, // devc_id: Option<String>, // evacuation: bool, // fyi: Option<String>, // ht3d: bool, id: namelist .parameters .get("ID") .cloned() .map(|x| x.try_into().expect("vent.id")), // matl_id: Option<String>, // mesh_id: Option<String>, // mult_id: Option<String>, // // , NOTERRAIN : bool // outline: bool, // overlay: bool, // permit_hole: bool, // prop_id: Option<String>, // removable: bool, // rgb: Option<RGB>, surf_id: namelist .parameters .get("SURF_ID") .cloned() .map(|x| x.try_into().expect("devc.surf_id")), // texture_origin: XYZ, // thicken: bool, // transparency: f64, xb: namelist .parameters .get("XB") .cloned() .map(|x| x.try_into().expect("devc.xb")), }; fds_file.vents.push(vent); } fn decode_devc(fds_file: &mut FDSFile, namelist: &Namelist) { let devc = Devc { id: namelist .parameters .get("ID") .cloned() .map(|x| x.try_into().expect("devc.id")), prop_id: namelist .parameters .get("PROP_ID") .cloned() .map(|x| x.try_into().expect("devc.prop_id")), xyz: namelist .parameters .get("XYZ") .cloned() .map(|x| x.try_into().expect("devc.xyz")), }; fds_file.devcs.push(devc); } fn decode_prop(fds_file: &mut FDSFile, namelist: &Namelist) { let prop = Prop { id: namelist .parameters .get("ID") .cloned() .map(|x| x.try_into().expect("prop.id")), activation_obscuration: namelist .parameters .get("ACTIVATION_OBSCURATION") .cloned() .map(|x| x.try_into().expect("prop.activation_obscuration")), activation_temperature: namelist .parameters .get("ACTIVATION_TEMPERATURE") .cloned() .map(|x| x.try_into().expect("prop.activation_obscuration")), flow_rate: namelist .parameters .get("FLOW_RATE") .cloned() .map(|x| x.try_into().expect("prop.activation_obscuration")), part_id: namelist .parameters .get("PART_ID") .cloned() .map(|x| x.try_into().expect("prop.part_id")), quantity: namelist .parameters .get("QUANTITY") .cloned() .map(|x| x.try_into().expect("prop.quantity")), rti: namelist .parameters .get("RTI") .cloned() .map(|x| x.try_into().expect("prop.rti")), }; fds_file.props.push(prop); } fn decode_head(fds_file: &mut FDSFile, namelist: &Namelist) { let head = Head { chid: namelist .parameters .get("CHID") .cloned() .map(|x| x.try_into().expect("head.chid")), title: namelist .parameters .get("TITLE") .cloned() .and_then(|x| x.try_into().ok()), fyi: namelist .parameters .get("TITLE") .cloned() .and_then(|x| x.try_into().ok()), }; fds_file.head = Some(head); } fn decode_surf(fds_file: &mut FDSFile, namelist: &Namelist) { let surf = Surf { adiabatic: namelist .parameters .get("ADIABATIC") .cloned() .map(|x| x.try_into().expect("adiabatic")) .unwrap_or(false), auto_ignition_temperature: namelist .parameters .get("AUTO_IGNITION_TEMPERATURE") .cloned() .map(|x| x.try_into().expect("auto_ignition_tmperature")) .unwrap_or(-273_f64), color: namelist .parameters .get("COLOR") .cloned() .map(|x| x.try_into().expect("color:")), fyi: namelist .parameters .get("FYI") .cloned() .map(|x| x.try_into().expect("fyi")), hrrpua: namelist .parameters .get("HRRPUA") .cloned() .map(|x| x.try_into().expect("hrrpua")), id: namelist .parameters .get("ID") .cloned() .map(|x| x.try_into().expect("id")), mlrpua: namelist .parameters .get("MLRPUA") .cloned() .map(|x| x.try_into().expect("mlrpua")), vel: namelist .parameters .get("VEL") .cloned() .map(|x| x.try_into().expect("vel")), volume_flow: namelist .parameters .get("VOLUME_FLOW") .cloned() .map(|x| x.try_into().expect("volume_flow")), tau_q: namelist .parameters .get("TAU_Q") .cloned() .map(|x| x.try_into().expect("tau_q")), }; fds_file.surfs.push(surf); } // decodeVent : FDSFile -> Namelist -> FDSFile // decodeVent fdsData nml = // let vent = Vent // { COLOR = parToString <$> getParameterMaybe nml "COLOR" // // , CTRL_ID : String // // , DEVC_ID : String // // , DYNAMIC_PRESSURE : f64 // // , EVACUATION : bool // // , FYI : String // , ID = parToString <$> getParameterMaybe nml "ID" // // , IOR : i64 // // , L_EDDY : f64 // // , L_EDDY_IJ : Vec<i64> // // , MB : String // // , MESH_ID : String // // , MULT_ID : String // // , N_EDDY : i64 // // , OUTLINE : bool // // , PBX : f64 // // , PBY : f64 // // , PBZ : f64 // // , PRESSURE_RAMP : String // // , RADIUS : f64 // // , REYNOLDS_STRESS : Vec<f64> // // , RGB : RGB // // , SPREAD_RATE : f64 // , SURF_ID = parToString <$> getParameterMaybe nml "SURF_ID" // // , TEXTURE_ORIGIN : Vec<f64> // // , TMP_EXTERIOR : f64 // // , TMP_EXTERIOR_RAMP : String // // , TRANSPARENCY : f64 // // , UVW : Vec<f64> // // , VEL_RMS : f64 // // -- , WIND : String // , XB = parToXB <$> getParameterMaybe nml "XB" // // , XYZ : XYZ // } // in fdsData { Vents = vent:(Vents fdsData)} // parToList : (ParameterValue -> b) -> ParameterValue -> [b] // parToList tranform (ParArray arr) = fmap tranform $ M.elems arr // parToList _ xs = error $ show xs <> " is not an array" // parToRGB : ParameterValue -> RGB // parToRGB (ParArray arr) = case M.elems arr of // [a, b, c] -> RGB (parToInt a) (parToInt b) (parToInt c) // xs -> error $ show xs <> " is an invalid RGB array" // parToRGB xs = error $ show xs <> " is not an array" // parToXYZ : ParameterValue -> XYZ // parToXYZ par = // let (x,y,z) = parTo3 parToDouble par // in XYZ x y z // parToIJK : ParameterValue -> IJK // parToIJK par = // let (i,j,k) = parTo3 parToInt par // in IJK i j k // parToXB : ParameterValue -> XB // parToXB par = // let (x1,x2,y1,y2,z1,z2) = parTo6 parToDouble par // in XB x1 x2 y1 y2 z1 z2 // parTo3String : ParameterValue -> (String, String, String) // parTo3String (ParArray arr) = // let [a,b,c] = M.elems arr // in (parToString a, parToString b, parToString c) // parTo3String xs = error $ show xs <> " is not an array" // parTo2 : (ParameterValue -> b) -> ParameterValue -> (b, b) // parTo2 tranform (ParArray arr) = // let [a, b] = fmap tranform $ M.elems arr // in (a, b) // parTo2 _ xs = error $ show xs <> " is not an array" // parTo3 : (ParameterValue -> c) -> ParameterValue -> (c, c, c) // parTo3 tranform (ParArray arr) = // let [a, b, c] = fmap tranform $ M.elems arr // in (a, b, c) // parTo3 _ xs = error $ show xs <> " is not an array" // parTo6 : (ParameterValue -> f) // -> ParameterValue -> (f, f, f, f, f, f) // parTo6 tranform (ParArray arr) = case fmap tranform $ M.elems arr of // [a,b,c,d,e,f] -> (a, b, c, d, e, f) // xs -> error $ "incorrect number of elements in array" // parTo6 _ xs = error $ show xs <> " is not an array" // parTo6String : ParameterValue // -> (String, String, String, String, String, String) // parTo6String = parTo6 parToString // decodeDevc : FDSFile -> Namelist -> FDSFile // decodeDevc fdsData nml = // let // devcId = parToString <$> getParameterMaybe nml "ID" // devc = Devc // { BYPASS_FLOWRATE = fromMaybe 0 $ parToDouble <$> getParameterMaybe nml "BYPASS_FLOWRATE" // , CONVERSION_ADDEND = fromMaybe 0 $ parToDouble <$> getParameterMaybe nml "CONVERSION_ADDEND" // , CONVERSION_FACTOR = fromMaybe 1 $ parToDouble <$> getParameterMaybe nml "CONVERSION_FACTOR" // , COORD_FACTOR = fromMaybe 1 $ parToDouble <$> getParameterMaybe nml "COORD_FACTOR" // , CTRL_ID = Nothing // , DELAY = fromMaybe 0 $ parToDouble <$> getParameterMaybe nml "DELAY" // , DEPTH = fromMaybe 0 $ parToDouble <$> getParameterMaybe nml "DEPTH" // , DEVC_ID = parToString <$> getParameterMaybe nml "DEVC_ID" // , DRY = fromMaybe False $ parToBool <$> getParameterMaybe nml "DRY" // , DUCT_ID = parToString <$> getParameterMaybe nml "DUCT_ID" // , EVACUATION = fromMaybe False $ parToBool <$> getParameterMaybe nml "EVACUATION" // , FLOWRATE = fromMaybe 0 $ parToDouble <$> getParameterMaybe nml "FLOWRATE" // , FYI = parToString <$> getParameterMaybe nml "FYI" // , HIDE_COORDINATES = fromMaybe False $ parToBool <$> getParameterMaybe nml "HIDE_COORDINATES" // , ID = parToString <$> getParameterMaybe nml "ID" // , INITIAL_STATE = fromMaybe False $ parToBool <$> getParameterMaybe nml "INITIAL_STATE" // , INIT_ID = Nothing // , IOR = Nothing // , LATCH = True // , MATL_ID = Nothing // , NODE_ID = [] // , NO_UPDATE_DEVC_ID = Nothing // , NO_UPDATE_CTRL_ID = Nothing // , ORIENTATION = fromMaybe (XYZ 0 0 1) $ parToXYZ <$> getParameterMaybe nml "ORIENTATION" // , ORIENTATION_NUMBER = fromMaybe 1 $ parToInt <$> getParameterMaybe nml "ORIENTATION_NUMBER" // , OUTPUT = fromMaybe True $ parToBool <$> getParameterMaybe nml "OUTPUT" // , PART_ID = parToString <$> getParameterMaybe nml "PART_ID" // , PIPE_INDEX = fromMaybe 1 $ parToInt <$> getParameterMaybe nml "PIPE_INDEX" // , POINTS = fromMaybe 1 $ parToInt <$> getParameterMaybe nml "POINTS" // , PROP_ID = parToString <$> getParameterMaybe nml "PROP_ID" // , QUANTITY = parToString <$> getParameterMaybe nml "QUANTITY" // , QUANTITY2 = parToString <$> getParameterMaybe nml "QUANTITY2" // , QUANTITY_RANGE = fromMaybe (-1e50,1e50) $ parTo2 parToDouble <$> getParameterMaybe nml "QUANTITY_RANGE" // , R_ID = parToString <$> getParameterMaybe nml "R_ID" // , REAC_ID = parToString <$> getParameterMaybe nml "REAC_ID" // , RELATIVE = fromMaybe False $ parToBool <$> getParameterMaybe nml "RELATIVE" // , ROTATION = fromMaybe 0 $ parToDouble <$> getParameterMaybe nml "ROTATION" // , SETPOINT = parToDouble <$> getParameterMaybe nml "SETPOINT" // , SMOOTHING_FACTOR = fromMaybe 0 $ parToDouble <$> getParameterMaybe nml "SMOOTHING_FACTOR" // , SPEC_ID = parToString <$> getParameterMaybe nml "SPEC_ID" // , STATISTICS = parToString <$> getParameterMaybe nml "STATISTICS" // , STATISTICS_START = fromMaybe 0 $ parToDouble <$> getParameterMaybe nml "STATISTICS_START" -- todo: T_BEGIN is the default, how do we get that? // , SURF_ID = parToString <$> getParameterMaybe nml "SURF_ID" // , TIME_AVERAGED = fromMaybe True $ parToBool <$> getParameterMaybe nml "TIME_AVERAGED" // , TIME_HISTORY = fromMaybe False $ parToBool <$> getParameterMaybe nml "TIME_HISTORY" // , TRIP_DIRECTION = fromMaybe 1 $ parToInt <$> getParameterMaybe nml "TRIP_DIRECTION" // , UNITS = parToString <$> getParameterMaybe nml "UNITS" // , VELO_INDEX = fromMaybe 0 $ parToInt <$> getParameterMaybe nml "VELO_INDEX" // , XB = parToXB <$> getParameterMaybe nml "XB" // , XYZ = parToXYZ <$> getParameterMaybe nml "XYZ" // , X_ID = (\n->n<>"-x") <$> devcId // , Y_ID = (\n->n<>"-y") <$> devcId // , Z_ID = (\n->n<>"-z") <$> devcId // , XYZ_UNITS = "m" // } // in fdsData { Devcs = devc:(Devcs fdsData)} fn decode_mesh(fds_file: &mut FDSFile, namelist: &Namelist) { let mesh = Mesh { id: namelist .parameters .get("ID") .cloned() .map(|x| x.try_into().expect("mesh.id")), ijk: namelist .parameters .get("IJK") .cloned() .map(|x| x.try_into().expect("mesh.ijk")) .unwrap(), xb: namelist .parameters .get("XB") .cloned() .map(|x| x.try_into().expect("mesh.xb")) .unwrap(), }; fds_file.meshes.push(mesh); } // decodeMesh : FDSFile -> Namelist -> FDSFile // decodeMesh fdsData nml = // let // // devcId = parToString <$> getParameterMaybe nml "ID" // mesh = Mesh // { ID = parToString <$> getParameterMaybe nml "ID" // , XB = fromMaybe (XB 0 1 0 1 0 1) $ parToXB <$> getParameterMaybe nml "XB" // , IJK = fromMaybe (IJK 10 10 10) $ parToIJK <$> getParameterMaybe nml "IJK" // , COLOR = fromMaybe "BLACK" $ parToString <$> getParameterMaybe nml "COLOR" // , CYLINDRICAL = fromMaybe False $ parToBool <$> getParameterMaybe nml "CYLINDRICAL" // , EVACUATION = fromMaybe False $ parToBool <$> getParameterMaybe nml "EVACUATION" // , EVAC_HUMANS = fromMaybe False $ parToBool <$> getParameterMaybe nml "EVAC_HUMANS" // , EVAC_Z_OFFSET = fromMaybe 1 $ parToDouble <$> getParameterMaybe nml "EVAC_Z_OFFSET" // , FYI = parToString <$> getParameterMaybe nml "FYI" // , LEVEL = fromMaybe 1 $ parToInt <$> getParameterMaybe nml "LEVEL" // , MPI_PROCESS = parToInt <$> getParameterMaybe nml "LEVEL" // , MULT_ID = parToString <$> getParameterMaybe nml "FYI" // , RGB = fromMaybe (RGB 0 0 0) $ parToRGB <$> getParameterMaybe nml "RGB" // , N_THREADS = parToInt <$> getParameterMaybe nml "N_THREADS" // // , PERIODIC_MESH_IDS : [Text] // } // in fdsData { Meshes = mesh:(Meshes fdsData)} // decodePart : FDSFile -> Namelist -> FDSFile // decodePart fdsData nml = // let // part = Part // { AGE = fromMaybe 1e5 $ parToDouble <$> getParameterMaybe nml "AGE" // , BREAKUP = fromMaybe False $ parToBool <$> getParameterMaybe nml "BREAKUP" // , BREAKUP_CNF_RAMP_ID = parToString <$> getParameterMaybe nml "BREAKUP_CNF_RAMP_ID" // , BREAKUP_DISTRIBUTION = fromMaybe "ROSIN.." $ parToString <$> getParameterMaybe nml "BREAKUP_CNF_RAMP_ID" // , BREAKUP_GAMMA_D = fromMaybe 2.4 $ parToDouble <$> getParameterMaybe nml "BREAKUP_GAMMA_D" // , BREAKUP_RATIO = fromMaybe (3/7) $ parToDouble <$> getParameterMaybe nml "BREAKUP_RATIO" // , BREAKUP_SIGMA_D = parToDouble <$> getParameterMaybe nml "BREAKUP_SIGMA_D" // , CHECK_DISTRIBUTION = fromMaybe False $ parToBool <$> getParameterMaybe nml "CHECK_DISTRIBUTION" // , CNF_RAMP_ID = parToString <$> getParameterMaybe nml "CNF_RAMP_ID" // , COLOR = fromMaybe "BLACK" $ parToString <$> getParameterMaybe nml "COLOR" // , COMPLEX_REFRACTIVE_INDEX = fromMaybe 0.01 $ parToDouble <$> getParameterMaybe nml "COMPLEX_REFRACTIVE_INDEX" // , CTRL_ID = parToString <$> getParameterMaybe nml "CTRL_ID" // , DENSE_VOLUME_FRACTION = fromMaybe 1e-5 $ parToDouble <$> getParameterMaybe nml "DENSE_VOLUME_FRACCTION" // , DEVC_ID = parToString <$> getParameterMaybe nml "DEVC_ID" // , DIAMETER = parToDouble <$> getParameterMaybe nml "DIAMETER" // , DISTRIBUTION = fromMaybe "ROSIN..." $ parToString <$> getParameterMaybe nml "DISTRIBUTION" // , DRAG_COEFFICIENT = fromMaybe [] $ parToList parToDouble <$> getParameterMaybe nml "DRAG_COEFFICIENT" // , DRAG_LAW = fromMaybe "SPHERE" $ parToString <$> getParameterMaybe nml "DRAG_LAW" // , FREE_AREA_FRACTION = parToDouble <$> getParameterMaybe nml "FREE_AREA_FRACTION" // , FYI = parToString <$> getParameterMaybe nml "FYI" // , GAMMA_D = fromMaybe 2.4 $ parToDouble <$> getParameterMaybe nml "GAMMA_D" // , HEAT_OF_COMBUSTION = parToDouble <$> getParameterMaybe nml "HEAT_OF_COMBUSTION" // , HORIZONTAL_VELOCITY = fromMaybe 0.2 $ parToDouble <$> getParameterMaybe nml "HORIZONTAL_VELOCITY" // , ID = parToString <$> getParameterMaybe nml "ID" // // , INITIAL_TEMPERATURE : f64 -- TMPA // , MASSLESS = fromMaybe False $ parToBool <$> getParameterMaybe nml "MASSLESS" // , MAXIMUM_DIAMETER = fromMaybe (1/0 {- Infinity -}) $ parToDouble <$> getParameterMaybe nml "MAXIMUM_DIAMETER" // , MINIMUM_DIAMETER = fromMaybe 20 $ parToDouble <$> getParameterMaybe nml "MINIMUM_DIAMETER" // , MONODISPERSE = fromMaybe False $ parToBool <$> getParameterMaybe nml "MONODISPERSE" // , N_STRATA = fromMaybe 6 $ parToInt <$> getParameterMaybe nml "N_STRATA" // // , ORIENTATION : Vec<f64> // // , PERMEABILITY : Vec<f64> // , PERIODIC_X = fromMaybe False $ parToBool <$> getParameterMaybe nml "PERIODIC_X" // , PERIODIC_Y = fromMaybe False $ parToBool <$> getParameterMaybe nml "PERIODIC_Y" // , PERIODIC_Z = fromMaybe False $ parToBool <$> getParameterMaybe nml "PERIODIC_Z" // , POROUS_VOLUME_FRACTION = parToDouble <$> getParameterMaybe nml "POROUS_VOLUME_FRACTION" // , PROP_ID = parToString <$> getParameterMaybe nml "PROP_ID" // // , QUANTITIES : Vec<String> // // , QUANTITIES_SPEC_ID : Vec<String> // , RADIATIVE_PROPERTY_TABLE = parToDouble <$> getParameterMaybe nml "RADIATIVE_PROPERTY_TABLE" // , REAL_REFRACTIVE_INDEX = fromMaybe 1.33 $ parToDouble <$> getParameterMaybe nml "REAL_REFRACTIVE_INDEX" // , RGB = parToRGB <$> getParameterMaybe nml "RGB" // , RUNNING_AVERAGE_FACTOR = fromMaybe 0.5 $ parToDouble <$> getParameterMaybe nml "RUNNING_AVERAGE_FACTOR" // , SAMPLING_FACTOR = fromMaybe 1 $ parToInt <$> getParameterMaybe nml "SAMPLING_FACTOR" // , SECOND_ORDER_PARTICLE_TRANSPORT = fromMaybe False $ parToBool <$> getParameterMaybe nml "SECOND_ORDER_PARTICLE_TRANSPORT" // , SIGMA_D = parToDouble <$> getParameterMaybe nml "SIGMA_D" // , SPEC_ID = parToString <$> getParameterMaybe nml "SPEC_ID" // , STATIC = fromMaybe False $ parToBool <$> getParameterMaybe nml "STATIC" // , SURFACE_TENSION = fromMaybe 7.28e-2 $ parToDouble <$> getParameterMaybe nml "SURFACE_TENSION" // , SURF_ID = parToString <$> getParameterMaybe nml "SURF_ID" // // , TARGET_ONLY : bool // , TURBULENT_DISPERSION = fromMaybe False $ parToBool <$> getParameterMaybe nml "AGE" // , VERTICAL_VELOCITY = fromMaybe 0.5 $ parToDouble <$> getParameterMaybe nml "VERTICAL_VELOCITY" // } // in fdsData { Parts = part:(Parts fdsData)} // decodeSurfInto : FDSFile -> Namelist -> FDSFile // decodeSurfInto fdsData nml = // let surf = decodeSurf nml // in fdsData { Surfs = surf:(Surfs fdsData)} // decodeSurf : Namelist -> Surf // decodeSurf nml = Surf // { ADIABATIC = fromMaybe False $ parToBool <$> getParameterMaybe nml "ADIABATIC" // , AUTO_IGNITION_TEMPERATURE = fromMaybe (-273) $ parToDouble <$> getParameterMaybe nml "AUTO_IGNITION_TEMPERATURE" // , BACKING = fromMaybe "EXPOSED" $ parToString <$> getParameterMaybe nml "BACKING" // , BURN_AWAY = fromMaybe False $ parToBool <$> getParameterMaybe nml "BACKING" // , CELL_SIZE_FACTOR = fromMaybe 1.0 $ parToDouble <$> getParameterMaybe nml "CELL_SIZE_FACTOR" // , C_FORCED_CONSTANT = fromMaybe 0.0 $ parToDouble <$> getParameterMaybe nml "C_FORCED_CONSTANT" // , C_FORCED_PR_EXP = fromMaybe 0.0 $ parToDouble <$> getParameterMaybe nml "C_FORCED_PR_EXP" // , C_FORCED_RE = fromMaybe 0.0 $ parToDouble <$> getParameterMaybe nml "C_FORCED_RE" // , C_FORCED_RE_EXP = fromMaybe 0.0 $ parToDouble <$> getParameterMaybe nml "C_FORCED_RE_EXP" // , C_HORIZONTAL = fromMaybe 1.52 $ parToDouble <$> getParameterMaybe nml "C_HORIZONTAL" // , C_VERTICAL = fromMaybe 1.31 $ parToDouble <$> getParameterMaybe nml "C_VERTICAL" // , COLOR = parToString <$> getParameterMaybe nml "COLOR" // // , CONVECTION_LENGTH_SCALE : f64 // // , CONVECTIVE_HEAT_FLUX : f64 // // , CONVERT_VOLUME_TO_MASS : bool // // , DEFAULT : bool // // , DT_INSERT : f64 // // , EMISSIVITY : f64 // // , EMISSIVITY_BACK : f64 // // , EVAC_DEFAULT : bool // // , EXTERNAL_FLUX : f64 // // , E_COEFFICIENT : f64 // // , FIRELINE_MLR_MAX : f64 // // , FREE_SLIP : bool // // , FYI : String // // , GEOMETRY : String // // , HEAT_OF_VAPORIZATION : f64 // // , HEAT_TRANSFER_COEFFICIENT : f64 // // , HEAT_TRANSFER_COEFFICIENT_BACK : f64 // // , HEAT_TRANSFER_MODEL : String // , HRRPUA = parToDouble <$> getParameterMaybe nml "HRRPUA" // // , HT3D : bool // , ID = parToString <$> getParameterMaybe nml "ID" // // , IGNITION_TEMPERATURE : f64 // // , INNER_RADIUS : f64 // // , INTERNAL_HEAT_SOURCE : Vec<f64> // // , LAYER_DIVIDE : f64 // // , LEAK_PATH : Vec<i64> // // , LENGTH : f64 // , MASS_FLUX = Nothing -- parToDouble <$> getParameterMaybe nml "MASS_FLUX" // , MASS_FLUX_TOTAL = parToDouble <$> getParameterMaybe nml "MASS_FLUX_TOTAL" // , MASS_FLUX_VAR = parToDouble <$> getParameterMaybe nml "MASS_FLUX_VAR" // // , MASS_FRACTION : Vec<f64> // // , MASS_TRANSFER_COEFFICIENT : f64 // // , MATL_ID : Vec<String> // // , MATL_MASS_FRACTION : Vec<f64> // // , MINIMUM_LAYER_THICKNESS : f64 // , MLRPUA = parToDouble <$> getParameterMaybe nml "MLRPUA" // // , N_LAYER_CELLS_MAX : Vec<i64> // // , NET_HEAT_FLUX : f64 // // , NO_SLIP : bool // // , NPPC : i64 // // , PARTICLE_MASS_FLUX : f64 // // , PART_ID : String // // , PLE : f64 // // , PROFILE : String // // , RADIUS : f64 // // , RAMP_EF : String // // , RAMP_MF : Vec<String> // // , RAMP_PART : String // , RAMP_Q = parToString <$> getParameterMaybe nml "RAMP_Q" // , RAMP_T = parToString <$> getParameterMaybe nml "RAMP_T" // // , RAMP_T_I : Option<String> // // , RAMP_V : Option<String> // // , RAMP_V_X : Option<String> // // , RAMP_V_Y : Option<String> // // , RAMP_V_Z : Option<String> // // , RGB : RGB // // , ROUGHNESS : f64 // // , SPEC_ID : String // // , SPREAD_RATE : f64 // // , STRETCH_FACTOR : Vec<f64> // // , TAU_EF : f64 // // , TAU_MF : f64 // // , TAU_PART : f64 // , TAU_Q = fromMaybe 1 $ parToDouble <$> getParameterMaybe nml "TAU_Q" // // , TAU_T : f64 // // , TAU_V : f64 // // , TEXTURE_HEIGHT : f64 // // , TEXTURE_MAP : String // // , TEXTURE_WIDTH : f64 // // , TGA_ANALYSIS : bool // // , TGA_FINAL_TEMPERATURE : f64 // // , TGA_HEATING_RATE : f64 // // , THICKNESS : Vec<f64> // // , TMP_BACK : f64 // // , TMP_FRONT : f64 // // , TMP_INNER : Vec<f64> // // , TRANSPARENCY : f64 // // , VEGETATION : bool // // , VEGETATION_ARRHENIUS_DEGRAD : bool // // , VEGETATION_CDRAG : f64 // // , VEGETATION_CHAR_FRACTION : f64 // // , VEGETATION_ELEMENT_DENSITY : i64 // // , VEGETATION_GROUND_TEMP : f64 // // , VEGETATION_HEIGHT : f64 // // , VEGETATION_INITIAL_TEMP : f64 // // , VEGETATION_LAYERS : i64 // // , VEGETATION_LINEAR_DEGRAD : bool // // , VEGETATION_LOAD : f64 // // , VEGETATION_LSET_IGNITE_TIME : f64 // // , VEG_LSET_QCON : f64 // // , VEGETATION_MOISTURE : f64 // // , VEGETATION_NO_BURN : bool // // , VEGETATION_SVRATIO : i64 // // , VEG_LEVEL_SET_SPREAD : bool // // , VEG_LSET_ROS_BACK : f64 // // , VEG_LSET_ROS_FLANK : f64 // // , VEG_LSET_ROS_HEAD : f64 // // , VEG_LSET_WIND_EXP : f64 // // , VEG_LSET_SIGMA : f64 // // , VEG_LSET_HT : f64 // // , VEG_LSET_BETA : f64 // // , VEG_LSET_ELLIPSE : f64 // // , VEG_LSET_TAN2 : bool // // , VEG_LSET_ELLIPSE_HEAD : f64 // , VEL = parToDouble <$> getParameterMaybe nml "VEL" // // , VEL_BULK : f64 // // , VEL_GRAD : f64 // , VEL_T = parTo2 parToDouble <$> getParameterMaybe nml "VEL_T" // , VOLUME_FLOW = parToDouble <$> getParameterMaybe nml "VOLUME_FLOW" // // , WIDTH : f64 // // , XYZ : XYZ // // , Z0 : f64 // } // decodeProp : FDSFile -> Namelist -> FDSFile // decodeProp fdsData nml = // let // prop = Prop // { ACTIVATION_OBSCURATION = fromMaybe 3.24 $ parToDouble <$> getParameterMaybe nml "ACTIVATION_OBSCURATION" // , ACTIVATION_TEMPERATURE = fromMaybe 74 $ parToDouble <$> getParameterMaybe nml "ACTIVATION_TEMPERATURE" // // , ALPHA_C : f64 // // , ALPHA_E : f64 // // , BEAD_DENSITY : f64 // // , BEAD_DIAMETER : f64 // // , BEAD_EMISSIVITY : f64 // // , BEAD_HEAT_TRANSFER_COEFFICIENT : f64 // // , BEAD_SPECIFIC_HEAT : f64 // // , BETA_C : f64 // // , BETA_E : f64 // // -- , FED_ACTIVITY : String // // , CHARACTERISTIC_VELOCITY : f64 // // , C_FACTOR : f64 // // , DENSITY : f64 // // , DIAMETER : f64 // // , DROPLET_VELOCITY : f64 // // , EMISSIVITY : f64 // // , FLOW_RAMP : String // // , FLOW_RATE : f64 // // , FLOW_TAU : f64 // // , FYI : String // // , GAUGE_EMISSIVITY : f64 // // , GAUGE_TEMPERATURE : f64 // // , HEAT_TRANSFER_COEFFICIENT : f64 // , ID = parToString <$> getParameterMaybe nml "ID" // // , INITIAL_TEMPERATURE : f64 // // , K_FACTOR : f64 // // , LENGTH : f64 // // , MASS_FLOW_RATE : f64 // // , OFFSET : f64 // // , OPERATING_PRESSURE : f64 // // , ORIFICE_DIAMETER : f64 // // , P0 : String // // , PARTICLES_PER_SECOND : i64 // // , PARTICLE_VELOCITY : f64 // // , PART_ID : String // // , PDPA_END : f64 // // , PDPA_HISTOGRAM : bool // // , PDPA_HISTOGRAM_LIMITS : Vec<f64> // // , PDPA_HISTOGRAM_NBINS : i64 // // , PDPA_HISTOGRAM_CUMULATIVE : bool // // , PDPA_INTEGRATE : bool // // , PDPA_M : i64 // // , PDPA_N : i64 // // , PDPA_NORMALIZE : bool // // , PDPA_RADIUS : f64 // // , PDPA_START : f64 // // , PRESSURE_RAMP : String // // -- , PX : String // // -- , PXX : String // , QUANTITY = parToString <$> getParameterMaybe nml "QUANTITY" // // , RTI : f64 // // , SMOKEVIEW_ID : Vec<String> // // , SMOKEVIEW_PARAMETERS : Vec<String> // // , SPEC_ID : String // // , SPRAY_ANGLE : Vec<f64> // // , SPRAY_PATTERN_BETA : f64 // // , SPRAY_PATTERN_MU : f64 // // , SPRAY_PATTERN_SHAPE : String // // , SPRAY_PATTERN_TABLE : String // // , VELOCITY_COMPONENT : i64 // // -- , DROPLET_VELOCITY : String // } // in fdsData { Props = prop:(Props fdsData)} // decodeSlcf : FDSFile -> Namelist -> FDSFile // decodeSlcf fdsData nml = // let // slcf = Slcf // { CELL_CENTERED = fromMaybe False $ parToBool <$> getParameterMaybe nml "CELL_CENTERED" // , EVACUATION = fromMaybe False $ parToBool <$> getParameterMaybe nml "EVACUATION" // // -- , FACE_CENTERED : String // // -- , FIRE_LINE : String // // , FYI : String // , ID = parToString <$> getParameterMaybe nml "ID" // // , IOR : i64 // // , LEVEL_SET_FIRE_LINE : String // // , MAXIMUM_VALUE : f64 // // , MESH_NUMBER : i64 // // , MINIMUM_VALUE : f64 // // , PART_ID : String // , PBX = parToDouble <$> getParameterMaybe nml "PBX" // , PBY = parToDouble <$> getParameterMaybe nml "PBY" // , PBZ = parToDouble <$> getParameterMaybe nml "PBZ" // // -- , PROP_ID : String // , QUANTITY = parToString <$> getParameterMaybe nml "QUANTITY" // , QUANTITY2 = parToString <$> getParameterMaybe nml "QUANTITY2" // // , REAC_ID : String // // -- , SLICETYPE : String // , SPEC_ID = parToString <$> getParameterMaybe nml "SPEC_ID" // // , VECTOR : bool // // , VELO_INDEX : i64 // // , XB : XB // } // in fdsData { Slcfs = slcf:(Slcfs fdsData)} // decodeHvac : FDSFile -> Namelist -> FDSFile // decodeHvac fdsData nml = // let // hvac = Hvac // { AIRCOIL_ID = parToString <$> getParameterMaybe nml "AIRCOIL_ID" // // , AMBIENT : bool // // , AREA : f64 // // , CLEAN_LOSS : f64 // // , COOLANT_SPECIFIC_HEAT : f64 // // , COOLANT_MASS_FLOW : f64 // // , COOLANT_TEMPERATURE : f64 // // , CTRL_ID : String // // , DAMPER : bool // // , DEVC_ID : String // // , DIAMETER : f64 // // , DUCT_ID : Vec<String> // // , DUCT_INTERP_TYPE : String // // , EFFICIENCY : Vec<f64> // // , FAN_ID : String // // , FILTER_ID : String // // , FIXED_Q : Vec<f64> // // , ID : String // // , LEAK_ENTHALPY : bool // // , LENGTH : f64 // // , LOADING : Vec<f64> // // , LOADING_MULTIPLIER : Vec<f64> // // , LOSS : Vec<f64> // // , MASS_FLOW : f64 // // , MAX_FLOW : f64 // // , MAX_PRESSURE : f64 // // , N_CELLS : i64 // // , NODE_ID : Vec<String> // // , PERIMETER : f64 // // , RAMP_ID : String // // , RAMP_LOSS : String // // , REVERSE : bool // // , ROUGHNESS : f64 // // , SPEC_ID : String // // , TAU_AC : f64 // // , TAU_FAN : f64 // // , TAU_VF : f64 // // , TYPE_ID : String // , VENT_ID = parToString <$> getParameterMaybe nml "VENT_ID" // , VENT2_ID = parToString <$> getParameterMaybe nml "VENT2_ID" // // , VOLUME_FLOW : f64 // // , XYZ : XYZ // } // in fdsData { Hvacs = hvac:(Hvacs fdsData)} fn decode_reac(fds_file: &mut FDSFile, namelist: &Namelist) { let reac = Reac { // adiabatic: namelist // .parameters // .get("ADIABATIC") // .cloned() // .map(|x| x.try_into().expect("adiabatic")) // .unwrap_or(false), soot_yield: namelist .parameters .get("SOOT_YIELD") .cloned() .map(|x| x.try_into().expect("soot_yield")), co_yield: namelist .parameters .get("CO_YIELD") .cloned() .map(|x| x.try_into().expect("co_yield")), // color: namelist // .parameters // .get("COLOR") // .cloned() // .map(|x| x.try_into().expect("color:")), // fyi: namelist // .parameters // .get("FYI") // .cloned() // .map(|x| x.try_into().expect("fyi")), // hrrpua: namelist // .parameters // .get("HRRPUA") // .cloned() // .map(|x| x.try_into().expect("hrrpua")), // id: namelist // .parameters // .get("ID") // .cloned() // .map(|x| x.try_into().expect("id")), // mlrpua: namelist // .parameters // .get("MLRPUA") // .cloned() // .map(|x| x.try_into().expect("mlrpua")), }; fds_file.reacs.push(reac); } // decodeMisc : FDSFile -> Namelist -> FDSFile // decodeMisc fdsData nml = // let // misc = Misc // { ALLOW_SURFACE_PARTICLES = fromMaybe False $ parToBool <$> getParameterMaybe nml "ALLOW_SURFACE_PARTICLES" // // , ALLOW_UNDERSIDE_PARTICLES : bool // // , ASSUMED_GAS_TEMPERATURE : f64 // // , ASSUMED_GAS_TEMPERATURE_RAMP : String // // , BAROCLINIC : bool // // , BNDF_DEFAULT : bool // // , CC_IBM : bool // // , CNF_CUTOFF : f64 // // , CFL_MAX : f64 // // , CFL_MIN : f64 // // , CFL_VELOCITY_NORM : i64 // // , CHECK_HT : bool // // , CHECK_REALIZABILITY : bool // // , CHECK_VN : bool // // , CLIP_MASS_FRACTION : bool // // , COMPUTE_VISCOSITY_TWICE : bool // // , COMPUTE_ZETA_SOURCE_TERM : bool // // , CONSTANT_H_SOLID : bool // // , CONSTANT_SPECIFIC_HEAT_RATIO : bool // // , CORIOLIS_VECTOR : Vec<f64> // // , CORRECT_SUBGRID_TEMPERATURE : bool // // , COUPLED_1D3D_HEAT_TRANSFER : bool // // , C_DEARDORFF : f64 // // , C_RNG : f64 // // , C_RNG_CUTOFF : f64 // // , C_SMAGORINSKY : f64 // // , C_VREMAN : f64 // // , DNS : bool // // , DRAG_CFL_MAX : f64 // // , DT_MEAN_FORCING : f64 // // , ENTHALPY_TRANSPORT : bool // // , EVACUATION_DRILL : bool // // , EVACUATION_MC_MODE : bool // // , EVAC_PRESSURE_ITERATIONS : i64 // // , EVAC_SURF_DEFAULT : String // // , EVAC_TIME_ITERATIONS : i64 // // , EVAPORATION : bool // // -- , EXCHANGE_EDGES : String // // , EXTERNAL_BOUNDARY_CORRECTION : bool // // , EXTINCTION_MODEL : String // // , HVAC_PRES_RELAX : f64 // // , HT3D_TEST : i64 // // , FDS5_OPTIONS : bool // // , FLUX_LIMITER : i64 // // , FORCE_VECTOR : Vec<f64> // // , FREEZE_VELOCITY : bool // // , FYI : String // // , GAMMA : f64 // // , GRAVITATIONAL_DEPOSITION : bool // // , GRAVITATIONAL_SETTLING : bool // // , GROUND_LEVEL : f64 // // , GVEC : Vec<f64> // // , DT_HVAC : f64 // // , H_F_REFERENCE_TEMPERATURE : f64 // // , HRRPUV_MAX_SMV : f64 // // , HUMIDITY : f64 // // , HVAC_MASS_TRANSPORT : bool // // , IBLANK_SMV : bool // // , IMMERSED_BOUNDARY_METHOD : i64 // // , INITIAL_UNMIXED_FRACTION : f64 // // -- , KINETIC_ENERGY_SOURCE : String // // , LAPSE_RATE : f64 // // , LES_FILTER_WIDTH : String // // , MAX_CHEMISTRY_ITERATIONS : i64 // // , MAX_LEAK_PATHS : i64 // , MAXIMUM_VISIBILITY = fromMaybe 30 $ parToDouble <$> getParameterMaybe nml "MAXIMUM_VISIBILITY" // // , MEAN_FORCING : [Bool] // // , MPI_TIMEOUT : f64 // // , N_FIXED_CHEMISTRY_SUBSTEPS : i64 // // , NEAR_WALL_TURBULENCE_MODEL : String // // -- , NEW_MOMENTUM_NUDGING : String // // -- , NEW_OPEN_BOUNDARY : String // // , NOISE : bool // // , NOISE_VELOCITY : f64 // // , NO_EVACUATION : bool // // , NO_RAMPS : bool // // -- , NORTHANGLE : String // // , OVERWRITE : bool // // , PARTICLE_CFL_MAX : f64 // // , PARTICLE_CFL_MIN : f64 // // , PARTICLE_CFL : bool // // , PERIODIC_TEST : i64 // // -- , PROFILING : String // // , POROUS_FLOOR : bool // // -- , POTENTIAL_TEMPERATURE_CORRECTION : String // // , PR : f64 // // , PROCESS_ALL_MESHES : bool // // , PROJECTION : bool // // , P_INF : f64 // // -- , RAMP_FVX_T : String // // -- , RAMP_FVY_T : String // // -- , RAMP_FVZ_T : String // // , RAMP_GX : String // // , RAMP_GY : String // // , RAMP_GZ : String // // , RAMP_U0 : String // // , RAMP_U0_T : String // // , RAMP_V0 : String // // , RAMP_V0_T : String // // , RAMP_W0 : String // // , RAMP_W0_T : String // // , RAMP_U0_Z : String // // , RAMP_V0_Z : String // // , RAMP_W0_Z : String // // -- , RADIATION : String // // , RESEARCH_MODE : bool // // , RESTART : bool // // , RESTART_CHID : String // // , RICHARDSON_ERROR_TOLERANCE : f64 // // , RUN_AVG_FAC : f64 // // , SC : f64 // // , SECOND_ORDER_INTERPOLATED_BOUNDARY : bool // // , SECOND_ORDER_PARTICLE_TRANSPORT : bool // // , SHARED_FILE_SYSTEM : bool // // -- , SLIP_CONDITION : String // // , SMOKE_ALBEDO : f64 // // , SOLID_PHASE_ONLY : bool // // -- , SOOT_OXIDATION : String // // -- , SPONGE_LAYER_DISTANCE : String // // , STRATIFICATION : bool // // , SUPPRESSION : bool // // -- , SURF_DEFAULT : String // // -- , TEMPERATURE_DEPENDENT_REACTION : String // // -- , TENSOR_DIFFUSIVITY : String // // , TERRAIN_CASE : bool // // , TERRAIN_IMAGE : String // // -- , TEST_FILTER_QUADRATURE : String // // , TEXTURE_ORIGIN : Vec<f64> // // , THERMOPHORETIC_DEPOSITION : bool // // , THICKEN_OBSTRUCTIONS : bool // // -- , TRANSPORT_UNMIXED_FRACTION : String // // -- , TRANSPORT_ZETA_SCHEME : String // // , TMPA : f64 // // , TURBULENCE_MODEL : String // // , TURBULENT_DEPOSITION : bool // // -- , TURB_INIT_CLOCK : String // // , U0 : f64 // // , UVW_FILE : String // // , V0 : f64 // // , VEG_LEVEL_SET_COUPLED : bool // // , VEG_LEVEL_SET_UNCOUPLED : bool // // , VERBOSE : f64 // , VISIBILITY_FACTOR = fromMaybe 3 $ parToDouble <$> getParameterMaybe nml "VISIBILITY_FACTOR" // // , VN_MAX : f64 // // , VN_MIN : f64 // // , Y_CO2_INFTY : f64 // // , Y_O2_INFTY : f64 // // , W0 : f64 // // -- , WD_PROPS : String // // -- , WIND_BOUNDARY : String // // -- , WIND_ONLY : String // } // in fdsData { Misc = (Just misc)} // decodeTime : FDSFile -> Namelist -> FDSFile // decodeTime fdsData nml = // let // time = Time // { DT = parToDouble <$> getParameterMaybe nml "DT" // // , EVAC_DT_FLOWFIELD : f64 // // , EVAC_DT_STEADY_STATE : f64 // , FYI = parToString <$> getParameterMaybe nml "FYI" // // , LIMITING_DT_RATIO : f64 // // , LOCK_TIME_STEP : bool // // , RESTRICT_TIME_STEP : bool // , T_BEGIN = fromMaybe 0 $ parToDouble <$> getParameterMaybe nml "T_BEGIN" // , T_END = fromMaybe 1 $ parToDouble <$> getParameterMaybe nml "T_END" // // , T_END_GEOM : f64 // // , TIME_SHRINK_FACTOR : f64 // // , WALL_INCREMENT : i64 // // , WALL_INCREMENT_HT3D : i64 // // , TWFIN : f64 // } // in fdsData { Time = (Just time)} // decodeHead : FDSFile -> Namelist -> FDSFile // decodeHead fdsData nml = // let // head = Head // { head_CHID = parToString <$> getParameterMaybe nml "CHID" // , head_FYI = parToString <$> getParameterMaybe nml "FYI" // , head_TITLE = parToString <$> getParameterMaybe nml "TITLE" // } // in fdsData { Head = (Just head)} // decodeDump : FDSFile -> Namelist -> FDSFile // decodeDump fdsData nml = // let // dump = Dump // { CLIP_RESTART_FILES = fromMaybe True $ parToBool <$> getParameterMaybe nml "CLIP_RESTART_FILES" // // , COLUMN_DUMP_LIMIT : bool // // , CTRL_COLUMN_LIMIT : i64 // // , DEVC_COLUMN_LIMIT : i64 // // , DT_BNDE : f64 // // , DT_BNDF : f64 // // , DT_CPU : f64 // // , DT_CTRL : f64 // // , DT_DEVC : f64 // // , DT_DEVC_LINE : f64 // // , DT_FLUSH : f64 // // , DT_GEOM : f64 // // , DT_HRR : f64 // // , DT_ISOF : f64 // // , DT_MASS : f64 // // , DT_PART : f64 // // , DT_PL3D : f64 // // , DT_PROF : f64 // , DT_RESTART = fromMaybe 1000000 $ parToDouble <$> getParameterMaybe nml "DT_RESTART" // // , DT_SL3D : f64 // // , DT_SLCF : f64 // // , EB_PART_FILE : bool // // , FLUSH_FILE_BUFFERS : bool // // , GEOM_DIAG : bool // // , MASS_FILE : bool // // , MAXIMUM_PARTICLES : i64 // // , MMS_TIMER : f64 // , NFRAMES = fromMaybe 1000 $ parToInt <$> getParameterMaybe nml "NFRAMES" // // , PLOT3D_PART_ID : Vec<String> // // , PLOT3D_QUANTITY : Vec<String> // // , PLOT3D_SPEC_ID : Vec<String> // // , PLOT3D_VELO_INDEX : Vec<i64> // // , RENDER_FILE : String // // , SIG_FIGS : i64 // // , SIG_FIGS_EXP : i64 // // , SMOKE3D : bool // // , SMOKE3D_QUANTITY : String // // , SMOKE3D_SPEC_ID : String // // , STATUS_FILES : bool // // , SUPPRESS_DIAGNOSTICS : bool // // , UVW_TIMER : Vec<f64> // // , VELOCITY_ERROR_FILE : bool // // , WRITE_XYZ : bool // } // in fdsData { Dump = (Just dump)}
Java
UTF-8
7,079
1.992188
2
[]
no_license
package com.qiunan.flink.connector.clickhouse.table.internal.executor; import com.qiunan.flink.connector.clickhouse.table.internal.connection.ClickHouseConnectionProvider; import com.qiunan.flink.connector.clickhouse.table.internal.converter.ClickHouseRowConverter; import com.google.common.util.concurrent.AbstractExecutionThreadService; import org.apache.flink.api.common.functions.RuntimeContext; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.common.typeutils.TypeSerializer; import org.apache.flink.table.data.RowData; import org.apache.flink.types.RowKind; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ru.yandex.clickhouse.ClickHouseConnection; import ru.yandex.clickhouse.ClickHousePreparedStatement; import ru.yandex.clickhouse.except.ClickHouseException; import java.io.IOException; import java.sql.SQLException; import java.time.Duration; import java.util.ArrayList; import java.util.List; public class ClickHouseBatchExecutor implements ClickHouseExecutor{ private static final long serialVersionUID = 1L; private static final Logger LOG = LoggerFactory.getLogger(ClickHouseBatchExecutor.class); private transient ClickHousePreparedStatement stmt; private transient ClickHouseConnectionProvider connectionProvider; private RuntimeContext context; private TypeInformation<RowData> rowDataTypeInformation; private final String sql; private final ClickHouseRowConverter converter; private transient List<RowData> batch; private final Duration flushInterval; private final int batchSize; private final int maxRetries; private transient TypeSerializer<RowData> typeSerializer; private boolean objectReuseEnabled = false; private transient ExecuteBatchService service; public ClickHouseBatchExecutor(String sql, ClickHouseRowConverter converter, Duration flushInterval, int batchSize, int maxRetries, TypeInformation<RowData> rowDataTypeInformation) { this.sql = sql; this.converter = converter; this.flushInterval = flushInterval; this.batchSize = batchSize; this.maxRetries = maxRetries; this.rowDataTypeInformation = rowDataTypeInformation; } @Override public void prepareStatement(ClickHouseConnection connection) throws SQLException { this.batch = new ArrayList<>(); this.stmt = (ClickHousePreparedStatement)connection.prepareStatement(this.sql); this.service = new ExecuteBatchService(); this.service.startAsync(); } @Override public void prepareStatement(ClickHouseConnectionProvider connectionProvider) throws SQLException { this.batch = new ArrayList<>(); this.connectionProvider = connectionProvider; this.stmt = (ClickHousePreparedStatement) connectionProvider.getConnection().prepareStatement(this.sql); this.service = new ExecuteBatchService(); this.service.startAsync(); } @Override public void setRuntimeContext(RuntimeContext context) { this.context = context; this.typeSerializer = this.rowDataTypeInformation.createSerializer(context.getExecutionConfig()); this.objectReuseEnabled = context.getExecutionConfig().isObjectReuseEnabled(); } @Override public synchronized void addBatch(RowData rowData) { if(rowData.getRowKind() != RowKind.DELETE && rowData.getRowKind() != RowKind.UPDATE_BEFORE) { if(this.objectReuseEnabled) { this.batch.add(this.typeSerializer.copy(rowData)); } else { this.batch.add(rowData); } } } @Override public synchronized void executeBatch() throws IOException { if(this.service.isRunning()) { notifyAll(); } else { throw new IOException("executor unexpectedly terminated", this.service.failureCause()); } } @Override public void closeStatement() throws SQLException { if(this.service != null) { this.service.stopAsync().awaitTerminated(); } else { LOG.warn("executor closed before initialized"); } if(this.stmt != null) { this.stmt.close(); this.stmt = null; } } @Override public String getState() { return ClickHouseBatchExecutor.this.service.state().toString(); } private class ExecuteBatchService extends AbstractExecutionThreadService{ private ExecuteBatchService() {} @Override protected void run() throws Exception { while(isRunning()) { synchronized(ClickHouseBatchExecutor.this) { ClickHouseBatchExecutor.this.wait(ClickHouseBatchExecutor.this.flushInterval.toMillis()); if(!ClickHouseBatchExecutor.this.batch.isEmpty()) { for (RowData rowData : ClickHouseBatchExecutor.this.batch) { ClickHouseBatchExecutor.this.converter.toClickHouse(rowData, ClickHouseBatchExecutor.this.stmt); ClickHouseBatchExecutor.this.stmt.addBatch(); } attemptExecuteBatch(); } } } } private void attemptExecuteBatch() throws Exception{ for(int idx = 1; idx <= ClickHouseBatchExecutor.this.maxRetries; idx++) { try { ClickHouseBatchExecutor.this.stmt.executeBatch(); ClickHouseBatchExecutor.this.batch.clear(); break; }catch (ClickHouseException e1) { ClickHouseBatchExecutor.LOG.error("ClickHouse error", e1); //当出现ClickHouse exception, code: 27 ...DB::Exception: Cannot parse input 即这条数据是错误时,略过此次插入 int errorCode = e1.getErrorCode(); if(errorCode == 27) { ClickHouseBatchExecutor.this.stmt.clearBatch(); ClickHouseBatchExecutor.this.batch.clear(); break; } }catch (SQLException e2) { ClickHouseBatchExecutor.LOG.error("ClickHouse executeBatch error, retry times = {}", Integer.valueOf(idx), e2); if(idx >= ClickHouseBatchExecutor.this.maxRetries){ throw new IOException(e2); } try { Thread.sleep((1000L * idx)); } catch (InterruptedException e3) { Thread.currentThread().interrupt(); throw new IOException("unable to flush; interrupted while doing another attempt", e3); } } } } } }
Markdown
UTF-8
2,469
2.828125
3
[ "Unlicense" ]
permissive
--- layout: post title: How to Fix "Cannot find module" in TypeScript date: 2016-10-28 --- When using external library in TypeScript, one of the most common problem is that the `tsc` failed to find module. For example, 12 import Image from 'react-native-image-progress'; ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ src/windows/main/pages/components/ProfilePicturePicker.tsx(12,19): error TS2307: Cannot find module 'react-native-image-progress'. It is caused by missing typings definition. To search for any existing definition, just type this command in the project root: $ typings search react-native-image-progress No results found for search It is very easy to fix if the typings is already in the typings registry. However in the above case no results are found. We then have to create the typing definition ourselves. It is rather easy but still take some time to do that. # Step 1 - Create Typings Structure First of all we create `typings/custom/react-native-image-progress` directory and `index.d.ts` file as shown below. SomeApp ├── typings │   └── custom │       └── react-native-image-progress │           └── index.d.ts └── typings.json # Step 2 - Update the typing definition files Add the following line to the `globalDependencies` in `typings.json` "react-native-image-progress": "file:typings/custom/react-native-image-progress/index.d.ts" Add the following content to `typings/custom/react-native-image-progress/index.d.ts` declare module "react-native-image-progress" { import React, {ComponentClass} from 'react'; import {ViewStyle, TextStyle} from 'react-native'; interface IImageProgressProperties extends React.TouchableWithoutFeedbackProperties, React.Props<ImageProgressStatic> { indicator?: any; indicatorProps?: Object; renderIndicator?: any; threshold?: number; source: {uri: string} | string; } interface ImageProgressStatic extends ComponentClass<IImageProgressProperties> { } var ImageProgress: ImageProgressStatic; type ImageProgress = ImageProgressStatic; export = ImageProgress; } # Step 3 - Install the custom typing definition Run the following command typings install After the above steps, you should now use the `*.ts` or `*.tsx` without any errors or warnings.
Python
UTF-8
976
3.9375
4
[ "Apache-2.0" ]
permissive
''' Created on Aug, 11 2015 @author: mlaptev ''' # First: create list that contains tuple with following structure: # 1. the element # 2. square of the element # 3. cube of the element first_task_result = [(elem, elem**2, elem**3) for elem in range (1, 21)] # Second: create list that contains cube of even numbers second_task_result = [elem**3 for elem in range(2, 21, 2)] # Third: create dictionary with symbols and its key in string third_task_results = {key: ord(key) for key in "This is a test message"} # Fourth: dictionary with number as a key and cube of the number as a value. number should be even and should not have 10 as delimiter fourth_tast_results = {key: key**3 for key in range(10, 1001, 2) if key % 10 != 0} if __name__ == '__main__': print "First task" print first_task_result print "Second task" print second_task_result print "Third task" print third_task_results print "Fourth task" print fourth_tast_results
Java
UTF-8
1,093
3.703125
4
[]
no_license
public class EditDistance { public static int editDistance(String s, String t) { // initialize the table: int[][] myTable = new int[s.length() + 1][t.length() + 1]; for (int i = 0; i < s.length() + 1; i++) { myTable[i][0] = i; } for (int j = 0; j < t.length() + 1; j++) { myTable[0][j] = j; } // dynamic programming array: for (int i = 1; i < s.length() + 1; i++) { for (int j = 1; j < t.length() + 1; j++) { int a = myTable[i - 1][j] + 1; int b = myTable[i][j - 1] + 1; int c = myTable[i - 1][j - 1]; // System.out.println("s.charAt(i - 1): " + s.charAt(i - 1) + // "\n" + "t.charAt(j - 1): " + t.charAt(j - 1)); // System.out.println("i: " + i + "\n" + "j: " + j); if (s.charAt(i - 1) != t.charAt(j - 1)) { c = c + 1; } myTable[i][j] = Math.min(a, Math.min(b, c)); } } // System.out.println(Arrays.deepToString(myTable)); return myTable[s.length()][t.length()]; } // main method for testing purposes public static void main(String[] args) { System.out.println(editDistance("Saturday", "Sunday")); } }
C++
UTF-8
714
3.265625
3
[]
no_license
#include<iostream.h> #include<conio.h> void main() { clrscr(); int *a,n,prod=1,multi=1,sumo=0,sume=0; cout<<"\nEnter no of elements...."; cin>>n; a=new int[n]; for (int i=0;i<n;i++) { cin>>a[i]; } clrscr(); cout<<"\nArray A={"; for (int j=0;j<n;j++) { cout<<a[j]<<","; } cout<<"\b}"; for (int k=0;k<n;k++) { if (k==0 || k%2==0) //IF FOR PRODUCT AND SUM OF ELEMENTS AT EVEN PLACES { prod=prod*a[k]; sume=sume+a[k]; } else if (k%2!=0)// " " " " " " " " ODD " { multi=multi*a[k]; sumo=sumo+a[k]; } } cout<<endl<<endl<<"\nODD:-\t\tEVEN:-"; cout<<"\nPRODUCT="<<prod<<"\t\t\b\b\b\b\b\b\b\bPRODUCT="<<multi; cout<<"\nSUM="<<sume<<"\t\tSUM="<<sumo; getch(); }
Markdown
UTF-8
1,023
3.171875
3
[]
no_license
# Command Pattern ### Terminology * **Command object**: Knows about a Receiver and invokes a method on it. * **Receiver object**: Handles the state of the design pattern (Light). * **Invoker**: Knows about the Command object and invokes the concrete interface implementation. * **Client**: Uses the invoker. #### Usage #### What problems can the Command design pattern solve? Coupling the invoker of a request to a particular request should be avoided. That is, hard-wired requests should be avoided. It should be possible to configure an object (that invokes a request) with a request. Implementing (hard-wiring) a request directly into a class is inflexible because it couples the class to a particular request at compile-time, which makes it impossible to specify a request at run-time. ##### What solution does the Command design pattern describe? Define separate (command) objects that encapsulate a request. A class delegates a request to a command object instead of implementing a particular request directly.
Markdown
UTF-8
422
2.53125
3
[ "MIT" ]
permissive
# Responsive Timeline This application uses pure CSS to display a responsive timeline. Go ahead, resize your browser window. See if you can catch all the changes! ## Screenshot ![Responsive Timeline](/images/responsivetimeline.png) ## Installation ``` git clone https://github.com/mkhira2/responsive-timeline.git cd responsive-timeline open index.html ``` ## License This project is licensed under the MIT License.
Python
UTF-8
22,848
3.09375
3
[ "MIT" ]
permissive
# -*- coding: utf-8 -*- """ pip_services3_commons.data.StringValueMap ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ StringValueMap implementation :copyright: Conceptual Vision Consulting LLC 2018-2019, see AUTHORS for more details. :license: MIT, see LICENSE for more details. """ from datetime import datetime from typing import Any, List, Optional, Sequence from ..convert import DoubleConverter, TypeCode from ..convert.BooleanConverter import BooleanConverter from ..convert.DateTimeConverter import DateTimeConverter from ..convert.FloatConverter import FloatConverter from ..convert.IntegerConverter import IntegerConverter from ..convert.LongConverter import LongConverter from ..convert.MapConverter import MapConverter from ..convert.StringConverter import StringConverter from ..convert.TypeConverter import TypeConverter from ..data import AnyValueMap from ..data.AnyValue import AnyValue from ..data.AnyValueArray import AnyValueArray class StringValueMap(dict): """ Cross-language implementation of a map (dictionary) where all keys and values are strings. The stored values can be converted to different types using variety of accessor methods. The string map is highly versatile. It can be converted into many formats, stored and sent over the wire. This class is widely used in Pip.Services as a basis for variety of classes, such as ConfigParams, ConnectionParams, CredentialParams and others. Example: .. code-block:: python value1 = StringValueMap.fromString("key1=1;key2=123.456;key3=2018-01-01") value1.get_as_boolean("key1") // Result: true value1.get_as_integer("key2") // Result: 123 value1.get_as_float("key2") // Result: 123.456 value1.get_as_datetime("key3") // Result: Date 2018,0,1 """ def __init__(self, map: Any = None): """ Creates a new instance of the map and assigns its args. :param map: (optional) values to initialize this map. """ super(StringValueMap, self).__init__() if isinstance(map, dict): for (k, v) in map.items(): self.put(k, v) def get_keys(self) -> List[str]: """ Gets keys of all elements stored in this map. :return: a list with all map keys. """ names = [] for (k, _) in self.items(): names.append(k) return names def get(self, key: str) -> str: """ Gets a map element specified by its key. :param key: a key of the element to get. :return: the args of the map element. """ return self[key] if key in self else None def put(self, key: str, value: Any): """ Puts a new args into map element specified by its key. :param key: a key of the element to put. :param value: a new args for map element. """ self[key] = StringConverter.to_nullable_string(value) def remove(self, key: str): """ Removes a map element specified by its key :param key: a key of the element to remove. """ self.pop(key, None) def append(self, map: Any): """ Appends new elements to this map. :param map: a map with elements to be added. """ if isinstance(map, dict): for (k, v) in map.items(): self.put(k, v) def clear(self): """ Clears this map by removing all its elements. """ super().clear() def length(self) -> int: """ Gets a number of elements stored in this map. :return: the number of elements in this map. """ count = 0 for key in self.keys(): if key in self.keys(): count += 1 return count def get_as_object(self, key: str = None) -> Any: """ Gets the args stored in map element without any conversions. When element key is not defined it returns the entire map args. :param key: (optional) a key of the element to get :return: the element args or args of the map when index is not defined. """ if key is None: return self.get_as_map(None) else: return self.get(key) def set_as_object(self, key: Any, value: Any = None): """ Sets a new args to map element specified by its index. When the index is not defined, it resets the entire map args. This method has double purpose because method overrides are not supported in JavaScript. :param key: (optional) a key of the element to set :param value: a new element or map value. """ if value is None: value = key self.clear() values = MapConverter.to_map(value) self.append(values) else: self.put(key, value) def get_as_map(self, key: Any) -> AnyValueMap: """ Converts map element into an AnyValueMap or returns empty AnyValueMap if conversion is not possible. :param key: a key of element to get. :return: AnyValueMap args of the element or empty AnyValueMap if conversion is not supported. """ if key is None: map = {} for (k, v) in self.items(): map[k] = v return map else: value = self.get(key) return MapConverter.to_map(value) def set_as_map(self, values: dict): """ Sets values to map :param values: values to set """ self.clear() for (k, v) in values.items(): self.put(k, v) def get_as_nullable_string(self, key: str) -> Optional[str]: """ Converts map element into a string or returns None if conversion is not possible. :param key: an index of element to get. :return: string args of the element or None if conversion is not supported. """ value = self.get(key) return StringConverter.to_nullable_string(value) def get_as_string(self, key: str) -> str: """ Converts map element into a string or returns "" if conversion is not possible. :param key: an index of element to get. :return: string args ot the element or "" if conversion is not supported. """ value = self.get(key) return StringConverter.to_string(value) def get_as_string_with_default(self, key: str, default_value: str) -> str: """ Converts map element into a string or returns default args if conversion is not possible. :param key: an index of element to get. :param default_value: the default args :return: string args ot the element or default args if conversion is not supported. """ value = self.get(key) return StringConverter.to_string_with_default(value, default_value) def get_as_nullable_boolean(self, key: str) -> Optional[bool]: """ Converts map element into a boolean or returns None if conversion is not possible :param key: an index of element to get. :return: boolean args of the element or None if conversion is not supported. """ value = self.get(key) return BooleanConverter.to_nullable_boolean(value) def get_as_boolean(self, key: str) -> bool: """ Converts map element into a boolean or returns false if conversion is not possible. :param key: an index of element to get. :return: boolean args ot the element or false if conversion is not supported. """ value = self.get(key) return BooleanConverter.to_boolean(value) def get_as_boolean_with_default(self, key: str, default_value: bool) -> bool: """ Converts map element into a boolean or returns default args if conversion is not possible. :param key: an index of element to get. :param default_value: the default args :return: boolean args ot the element or default args if conversion is not supported. """ value = self.get(key) return BooleanConverter.to_boolean_with_default(value, default_value) def get_as_nullable_integer(self, key: str) -> Optional[int]: """ Converts map element into an integer or returns None if conversion is not possible. :param key: an index of element to get. :return: integer args of the element or None if conversion is not supported. """ value = self.get(key) return IntegerConverter.to_nullable_integer(value) def get_as_integer(self, key: str) -> int: """ Converts map element into an integer or returns 0 if conversion is not possible. :param key: an index of element to get. :return: integer args ot the element or 0 if conversion is not supported. """ value = self.get(key) return IntegerConverter.to_integer(value) def get_as_integer_with_default(self, key: str, default_value: int) -> int: """ Converts map element into an integer or returns default args if conversion is not possible. :param key: an index of element to get. :param default_value: the default args :return: integer args ot the element or default args if conversion is not supported. """ value = self.get(key) return IntegerConverter.to_integer_with_default(value, default_value) def get_as_nullable_long(self, key: str) -> Optional[float]: value = self.get(key) return LongConverter.to_nullable_long(value) def get_as_long(self, key: str) -> float: value = self.get(key) return LongConverter.to_long(value) def get_as_long_with_default(self, key: str, default_value: float) -> float: value = self.get(key) return LongConverter.to_long_with_default(value, default_value) def get_as_nullable_float(self, key: str) -> Optional[float]: """ Converts map element into a float or returns None if conversion is not possible. :param key: an index of element to get. :return: float args of the element or None if conversion is not supported. """ value = self.get(key) return FloatConverter.to_nullable_float(value) def get_as_float(self, key: str) -> float: """ Converts map element into a float or returns 0 if conversion is not possible. :param key: an index of element to get. :return: float args ot the element or 0 if conversion is not supported. """ value = self.get(key) return FloatConverter.to_float(value) def get_as_float_with_default(self, key: str, default_value: float) -> float: """ Converts map element into a float or returns default args if conversion is not possible. :param key: an index of element to get. :param default_value: the default args :return: float args ot the element or default args if conversion is not supported. """ value = self.get(key) return FloatConverter.to_float_with_default(value, default_value) def get_as_nullable_double(self, key: str) -> Optional[float]: """ Converts map element into a double or returns null if conversion is not possible. :param key: an index of element to get. :return: double value of the element or null if conversion is not supported. """ value = self.get(key) return DoubleConverter.to_nullable_double(value) def get_as_double(self, key: str) -> float: """ Converts map element into a double or returns 0 if conversion is not possible. :param key: an index of element to get. :return: double value of the element or 0 if conversion is not supported. """ value = self.get(key) return DoubleConverter.to_double(value) def get_as_double_with_default(self, key: str, default_value: float) -> float: """ Converts map element into a double or returns default value if conversion is not possible. :param key: a key of element to get. :param default_value: the default args :return: double value of the element or default value if conversion is not supported. """ value = self.get(key) return DoubleConverter.to_double_with_default(value, default_value) def get_as_nullable_datetime(self, key: str) -> Optional[datetime]: """ Converts map element into a Date or returns None if conversion is not possible. :param key: an index of element to get. :return: Date args of the element or None if conversion is not supported. """ value = self.get(key) return DateTimeConverter.to_nullable_datetime(value) def get_as_datetime(self, key: str) -> datetime: """ Converts map element into a Date or returns the current date if conversion is not possible. :param key: an index of element to get. :return: Date args ot the element or the current date if conversion is not supported. """ value = self.get(key) return DateTimeConverter.to_datetime(value) def get_as_datetime_with_default(self, key: str, default_value: datetime) -> datetime: """ Converts map element into a Date or returns default args if conversion is not possible. :param key: an index of element to get. :param default_value: the default args :return: Date args ot the element or default args if conversion is not supported. """ value = self.get(key) return DateTimeConverter.to_datetime_with_default(value, default_value) def get_as_nullable_type(self, value_type: TypeCode, key: str) -> Optional[Any]: """ Converts map element into a args defined by specied typecode. If conversion is not possible it returns None. :param value_type: the TypeCode that defined the type of the result :param key: an index of element to get. :return: element args defined by the typecode or None if conversion is not supported. """ value = self.get(key) return TypeConverter.to_nullable_type(value_type, value) def get_as_type(self, value_type: TypeCode, key: str) -> Any: """ Converts map element into a args defined by specied typecode. If conversion is not possible it returns default args for the specified type. :param value_type: the TypeCode that defined the type of the result :param key: an index of element to get. :return: element args defined by the typecode or default if conversion is not supported. """ value = self.get(key) return TypeConverter.to_type(value_type, value) def get_as_type_with_default(self, value_type: TypeCode, key: str, default_value: Any) -> Any: """ Converts map element into a args defined by specied typecode. If conversion is not possible it returns default args. :param value_type: the TypeCode that defined the type of the result :param key: an index of element to get. :param default_value: the default args :return: element args defined by the typecode or default args if conversion is not supported. """ value = self.get(key) return TypeConverter.to_type_with_default(value_type, value, default_value) def get_as_value(self, key: str) -> AnyValue: """ Converts map element into an AnyValue or returns an empty AnyValue if conversion is not possible. :param key: a key of element to get. :return: AnyValue value of the element or empty AnyValue if conversion is not supported. """ value = self.get(key) return AnyValue(value) def get_as_array(self, key: str) -> AnyValueArray: """ Converts map element into an AnyValue or returns an empty AnyValue if conversion is not possible. :param key: a key of element to get. :return: AnyValue value of the element or empty AnyValue if conversion is not supported. """ value = self.get(key) return AnyValueArray.from_value(value) def get_as_nullable_array(self, key: str) -> Optional[AnyValueArray]: """ Converts map element into an AnyValueArray or returns null if conversion is not possible. :param key: a key of element to get. :return: AnyValueArray value of the element or null if conversion is not supported. """ value = self.get(key) return None if value is None else AnyValueArray.from_value(value) def get_as_array_with_default(self, key: str, default_value: AnyValueArray) -> AnyValueArray: """ Converts map element into an AnyValueArray or returns default value if conversion is not possible. :param key: a key of element to get. :param default_value: the default value :return: AnyValueArray value of the element or default value if conversion is not supported. """ result = self.get_as_nullable_array(key) return result if result is not None else default_value def get_as_nullable_map(self, key: str) -> AnyValueMap: """ Converts map element into an AnyValueMap or returns None if conversion is not possible. :param key: a key of element to get. :return: AnyValueMap args of the element or None if conversion is not supported. """ value = self.get_as_object(key) return AnyValueMap.from_value(value) # def get_as_map(self, key): # args = self.get(key) # return AnyValueMap.from_value(args) def get_as_map_with_default(self, key: str, default_value: AnyValueMap) -> AnyValueMap: """ Converts map element into an AnyValueMap or returns default args if conversion is not possible. :param key: a key of element to get. :param default_value: the default args :return: AnyValueMap args of the element or default args if conversion is not supported. """ value = self.get_as_nullable_map(key) return MapConverter.to_map_with_default(value, default_value) def clone(self) -> Any: """ Creates a binary clone of this object. :return: a clone of this object. """ map = StringValueMap() map.set_as_map(self) return map def to_string(self): """ Gets a string representation of the object. The result is a semicolon-separated list of key-args pairs as **"key1=value1;key2=value2;key=value3"** :return: a string representation of the object. """ result = '' for (k, v) in self.items(): if len(result) > 0: result += ';' if v != None: result += k + '=' + StringConverter.to_string_with_default(v, '') else: result += k return result def __str__(self): """ Gets a string representation of the object. The result is a semicolon-separated list of key-args pairs as **"key1=value1;key2=value2;key=value3"** :return: a string representation of the object. """ result = '' for (k, v) in self.items(): if len(result) > 0: result += ';' if v != None: result += k + '=' + StringConverter.to_string_with_default(v, '') else: result += k return result @staticmethod def from_value(value: Any) -> 'StringValueMap': """ Converts specified args into StringValueMap. :param value: args to be converted :return: a newly created StringValueMap. """ # map = RecursiveObjectReader.get_properties(args) return StringValueMap(value) @staticmethod def from_tuples(*tuples: Any) -> 'StringValueMap': """ Creates a new StringValueMap from a list of key-args pairs called tuples. :param tuples: a list of values where odd elements are keys and the following even elements are values :return: a newly created StringValueMap. """ return StringValueMap.from_tuples_array(*tuples) @staticmethod def from_tuples_array(tuples: Sequence[Any]) -> 'StringValueMap': """ Creates a new StringValueMap from a list of key-args pairs called tuples. The method is similar to :func:`from_tuples` but tuples are passed as array instead of parameters. :param tuples: a list of values where odd elements are keys and the following even elements are values :return: a newly created StringValueMap. """ result = StringValueMap() if tuples is None or len(tuples) == 0: return result index = 0 while index < len(tuples): if index + 1 >= len(tuples): break key = StringConverter.to_string(tuples[index]) value = StringConverter.to_nullable_string(tuples[index + 1]) index += 2 result.put(key, value) return result @staticmethod def from_string(line: str) -> 'StringValueMap': """ Parses semicolon-separated key-args pairs and returns them as a StringValueMap. :param line: semicolon-separated key-args list to initialize StringValueMap. :return: a newly created StringValueMap. """ result = StringValueMap() if line is None or len(line) == 0: return result tokens = str(line).split(';') for token in tokens: if len(token) == 0: continue index = token.find('=') key = token[0:index] if index >= 0 else token value = token[index + 1:] if index >= 0 else None result.put(key, value) return result @staticmethod def from_maps(*maps: dict) -> 'StringValueMap': """ Creates a new AnyValueMap by merging two or more maps. Maps defined later in the list override values from previously defined maps. :param maps: an array of maps to be merged :return: a newly created AnyValueMap. """ result = StringValueMap() if maps is None or len(maps) == 0: return result for map in maps: for (k, v) in map.items(): key = StringConverter.to_string(k) result.put(key, v) return result
Java
UTF-8
6,005
2.0625
2
[]
no_license
package shu.cloud.tools; import java.io.File; import java.io.IOException; import java.sql.Timestamp; import java.util.Iterator; import java.util.List; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerConfigurationException; import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; import org.hibernate.Transaction; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.xml.sax.SAXException; import shu.cloud.model.LongLatData; import shu.cloud.model.LongLatDataDAO; import shu.cloud.model.TaskFileData; import shu.cloud.model.TaskFileDataDAO; public class AddSearchResultToDB { public static void init(String path,String addrName,String uuid){ /* List list=llddDao.findByAddrName(addrName); if(list.size()>=1){ Timestamp ts1=new Timestamp(System.currentTimeMillis()); System.out.println("list.size()="+list.size()); DuplicateData ddData=new DuplicateData(addrName,uuid,ts1); DuplicateDataDAO ddDao=new DuplicateDataDAO(); Transaction tx = ddDao.getSession().beginTransaction(); ddDao.save(ddData); tx.commit(); ddDao.getSession().close(); return; } */ DocumentBuilderFactory factory=DocumentBuilderFactory.newInstance(); Element theBook=null, theElem=null, root=null; try { factory.setIgnoringElementContentWhitespace(true); DocumentBuilder db=factory.newDocumentBuilder(); Document xmldoc=db.parse(new File(path)); root=xmldoc.getDocumentElement(); //output(root); Element resultDom=(Element)root.getElementsByTagName("result").item(0); Element locationDom=(Element)resultDom.getElementsByTagName("location").item(0); String status=root.getElementsByTagName("status").item(0).getTextContent(); String latitude="0"; String longitude="0"; String precise="0"; String confidence="0"; String level="0"; if(status.equals("OK")){ latitude=locationDom.getElementsByTagName("lat").item(0).getTextContent(); longitude=locationDom.getElementsByTagName("lng").item(0).getTextContent(); precise=resultDom.getElementsByTagName("precise").item(0).getTextContent(); confidence=resultDom.getElementsByTagName("confidence").item(0).getTextContent(); level=resultDom.getElementsByTagName("level").item(0).getTextContent(); }else if(status.equals("INVALID_PARAMETERS")){ status="INVALID_PARAMETERS"; }else if(status.equals("INVALID_PARAMETERS")){ status="INVILID_KEY"; } //System.out.println("status="+status); //System.out.println("latitude="+latitude); //System.out.println("longitude="+longitude); //System.out.println("precise="+precise); //System.out.println("confidence="+confidence); //System.out.println("level="+level); boolean bprecise=true; if(precise.equals("0")){ bprecise=false; } Timestamp ts=new Timestamp(System.currentTimeMillis()); //UUID uuid=UUID.randomUUID(); LongLatData lldData=new LongLatData(addrName,status,latitude,longitude, bprecise,confidence,level, "上海", ts, uuid); //LongLatDataDAO lldDao=new LongLatDataDAO(); LongLatDataDAO llddDao=new LongLatDataDAO(); Transaction tx = llddDao.getSession().beginTransaction(); llddDao.save(lldData); tx.commit(); llddDao.getSession().close(); TaskFileDataDAO tDao = new TaskFileDataDAO(); List list=tDao.findByUuid(uuid); Iterator iter=list.iterator(); int doneCount=8; while(iter.hasNext()){ TaskFileData tfdData1=(TaskFileData) iter.next(); doneCount=Integer.parseInt(tfdData1.getDoneCount()); System.out.println(); doneCount=doneCount+1; tfdData1.setDoneCount(doneCount+""); Transaction tx1 = tDao.getSession().beginTransaction(); tDao.merge(tfdData1); tx1.commit(); tDao.getSession().flush(); tDao.getSession().close(); } //List list1=lldDao.findAll(); //System.out.println("size==="+list1.size()); } catch (ParserConfigurationException e) { e.printStackTrace(); } catch (SAXException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } } public static void output(Node node){ TransformerFactory transFactory=TransformerFactory.newInstance(); try { Transformer transformer = transFactory.newTransformer(); transformer.setOutputProperty("encoding", "gb2312"); transformer.setOutputProperty("indent", "yes"); DOMSource source=new DOMSource(); source.setNode(node); StreamResult result=new StreamResult(); result.setOutputStream(System.out); transformer.transform(source, result); } catch (TransformerConfigurationException e) { e.printStackTrace(); } catch (TransformerException e) { e.printStackTrace(); } } }
Java
UTF-8
730
2.140625
2
[ "MIT" ]
permissive
package org.ofdrw.core.image; import org.junit.jupiter.api.Test; import org.ofdrw.TestTool; import org.ofdrw.core.basicType.ST_Array; import org.ofdrw.core.pageDescription.color.color.CT_Color; import static org.junit.jupiter.api.Assertions.*; public class BorderTest { public static Border borderCase(){ return new Border() .setLineWidth(0.353) .setHorizonalCornerRadius(1d) .setVerticalCornerRadius(1d) .setDashOffset(11d) .setDashPattern(new ST_Array(1, 2, 3)) .setBorderColor(CT_Color.rgb(0, 0, 0)); } @Test public void gen() throws Exception { TestTool.genXml("border", borderCase()); } }
PHP
UTF-8
3,832
2.765625
3
[ "MIT" ]
permissive
<?php /* * This file is part of Badcow DNS Library. * * (c) Samuel Williams <sam@badcow.co> * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ namespace Badcow\DNS; use Badcow\DNS\Rdata\AAAA; use Badcow\DNS\Rdata\A; use Badcow\DNS\Rdata\CNAME; use Badcow\DNS\Rdata\DNAME; use Badcow\DNS\Rdata\FormattableInterface; use Badcow\DNS\Rdata\HINFO; use Badcow\DNS\Rdata\LOC; use Badcow\DNS\Rdata\MX; use Badcow\DNS\Rdata\NS; use Badcow\DNS\Rdata\PTR; use Badcow\DNS\Rdata\SOA; use Badcow\DNS\Rdata\SRV; use Badcow\DNS\Rdata\TXT; class AlignedBuilder implements ZoneBuilderInterface { /** * The order in which Resource Records should appear in a zone. * * @var array */ private static $order = [ SOA::TYPE, NS::TYPE, A::TYPE, AAAA::TYPE, CNAME::TYPE, DNAME::TYPE, MX::TYPE, LOC::TYPE, HINFO::TYPE, TXT::TYPE, PTR::TYPE, SRV::TYPE, ]; /** * {@inheritdoc} */ public function build(ZoneInterface $zone) { $master = '$ORIGIN '.$zone->getName().PHP_EOL. '$TTL '.$zone->getDefaultTtl().PHP_EOL; $rrs = $zone->getResourceRecords(); $current = SOA::TYPE; $namePadding = $ttlPadding = $typePadding = 0; usort($rrs, 'self::compareResourceRecords'); foreach ($rrs as $rr) { /* @var $rr ResourceRecord */ $namePadding = (strlen($rr->getName()) > $namePadding) ? strlen($rr->getName()) : $namePadding; $ttlPadding = (strlen($rr->getTtl()) > $ttlPadding) ? strlen($rr->getTtl()) : $ttlPadding; $typePadding = (strlen($rr->getType()) > $typePadding) ? strlen($rr->getType()) : $typePadding; } foreach ($rrs as $rr) { /* @var $rr ResourceRecord */ if (null == $rr->getRdata()) { continue; } if ($rr->getType() !== $current) { $master .= PHP_EOL.ResourceRecord::COMMENT_DELIMINATOR.$rr->getType().' RECORDS'.PHP_EOL; $current = $rr->getType(); } $rdata = $rr->getRdata(); if ($rdata instanceof FormattableInterface) { $rdata->setPadding($namePadding + $ttlPadding + $typePadding + 6); } $master .= sprintf('%s %s %s %s %s', str_pad($rr->getName(), $namePadding, ' ', STR_PAD_RIGHT), str_pad($rr->getTtl(), $ttlPadding, ' ', STR_PAD_RIGHT), str_pad($rr->getClass(), 2, ' ', STR_PAD_RIGHT), str_pad($rr->getType(), $typePadding, ' ', STR_PAD_RIGHT), ($rdata instanceof FormattableInterface) ? $rdata->outputFormatted() : $rdata->output() ); if (null != $rr->getComment()) { $master .= ResourceRecord::COMMENT_DELIMINATOR.$rr->getComment(); } $master .= PHP_EOL; } return $master; } /** * Compares two ResourceRecords to determine which is the higher order. Used with the usort() function. * * @param ResourceRecord $a * @param ResourceRecord $b * * @return int */ public static function compareResourceRecords(ResourceRecord $a, ResourceRecord $b) { if ($a->getType() === $b->getType()) { return strcmp($a->getName().$a->getRdata()->output(), $b->getName().$b->getRdata()->output()); } $_a = array_search($a->getType(), self::$order); $_b = array_search($b->getType(), self::$order); if (false !== $_a && false !== $_b) { return $_a - $_b; } if (false === $_a) { return 1; } return -1; } }
SQL
UTF-8
6,208
4.0625
4
[]
no_license
-- Add below your SQL statements. -- You can create intermediate views (as needed). Remember to drop these views after you have populated the result tables. -- You can use the "\i a2.sql" command in psql to execute the SQL commands in this file. -- Query 1 statements create view q1full as select cid as c2id, cname as c2name, neighbor as c1id, height from country join neighbour on country.cid=neighbour.country; create view q1max as select neighbor, max(height) as maxheight from country join neighbour on country.cid=neighbour.country group by neighbor; create view q1tall as select * from q1full join q1max on q1full.c1id=q1max.neighbor and q1full.height=q1max.maxheight; create view q1ans as select c1id, cname as c1name, c2id, c2name from q1tall join country on q1tall.c1id=country.cid order by c1id asc; insert into Query1(select * from q1ans); -- Query 2 statements create view q2coast as select cid as c2 from oceanAccess; create view q2almost as (select cid as ccc from country) except all (select * from q2coast); create view q2ans as select cid, cname from country join q2almost on country.cid=q2almost.ccc order by cname asc; insert into Query2 (select * from q2ans); -- Query 3 statements create view q3vati as select cid as c1id, cname as c1name from q2ans join neighbour on q2ans.cid=neighbour.country group by c1id, c1name having count(neighbor) = 1; create view q3add as select c1id, c1name, neighbor as c2id from q3vati join neighbour on q3vati.c1id=neighbour.country; create view q3ans as select c1id, c1name, c2id, country.cname as c2name from q3add join country on q3add.c2id=country.cid order by c1name asc; insert into Query3 (select * from q3ans); -- Query 4 statements create view q4inde as select country, neighbor, oid from neighbour, oceanAccess where neighbour.country=oceanAccess.cid OR neighbour.neighbor=oceanAccess.cid; create view q4island as (select country as cid, oid from q4inde) union (select * from oceanAccess); create view q4ans as select cname, oname from q4island, ocean, country where ocean.oid=q4island.oid and country.cid=q4island.cid order by cname asc, oname desc; insert into Query4(select * from q4ans); -- Query 5 statements create view q5avg as select cid as cc, avg(hdi_score) as avghdi from hdi where year>=2009 and year<=2013 group by cid; create view q5ans as select cc as cid, cname, avghdi from q5avg, country where q5avg.cc=country.cid order by avghdi desc limit 10; insert into Query5(select * from q5ans); -- Query 6 statements create view q6hacky as select h1.cid as h1c, h2.cid as h2c, h3.cid as h3c, h4.cid as h4c, h5.cid as h5c from hdi as h1, hdi as h2, hdi as h3, hdi as h4, hdi as h5 where h1.year=2009 and h2.year=2010 and h3.year=2011 and h4.year=2012 and h5.year=2013 and h1.hdi_score<h2.hdi_score and h2.hdi_score<h3.hdi_score and h3.hdi_score<h4.hdi_score and h4.hdi_score<h5.hdi_score and h1.cid=h2.cid and h2.cid=h3.cid and h3.cid=h4.cid and h4.cid=h5.cid; create view q6ans as select country.cid as cid, cname from q6hacky, country where q6hacky.h1c=country.cid order by cname asc; insert into Query6(select * from q6ans); --Query 7 statements create view q7total as select rid, rname, rpercentage * population as followers from religion, country where religion.cid=country.cid; create view q7sum as select rid, rname, sum(followers) as followers from q7total group by rid, rname; insert into Query7(select * from q7sum); -- Query 8 statements create view q8most as select cid as cc, max(lpercentage) as most from language group by cid; create view q8popu as select cid, lid, lname, most from q8most, language where q8most.cc=language.cid and q8most.most=language.lpercentage; create view q8nei as select cid as cc, lid, lname, neighbor from q8popu, neighbour where q8popu.cid=neighbour.country; create view q8name as select cid as ccc, lid, lname, neighbor, cname as c1name from q8nei, country c1 where q8nei.cc=c1.cid; create view q8names as select cid, c1name, neighbor as c2id, cname as c2name, lid, lname as ll from q8name, country c2 where q8name.neighbor=c2.cid; create view q8ans as select c1name, c2name, lname from q8names, q8popu where q8popu.cid=q8names.c2id and q8names.ll=q8popu.lname order by c1name desc; insert into Query8(select * from q8ans); -- Query 9 statements create view q9dir as select cid as cc, oid as oo from country natural left join oceanAccess; create view q9deep as select * from q9dir join ocean on ocean.oid=q9dir.oo; create view q9high as (select cid from country) except (select cc as cid from q9deep); create view q9hhalf as select cname, height as totalspan from q9high join country on q9high.cid=country.cid; create view q9lhalf as select cid, cname, height + depth as totalspan from q9deep join country on q9deep.cc=country.cid; create view q9lll as select cid as cc, max(totalspan) as totalspan from q9lhalf group by cc; create view q9lol as select cname, totalspan from q9lll join country on q9lll.cc=country.cid; create view q9ans as (select cname, totalspan from q9lol) union (select cname, totalspan from q9hhalf); create view q9max as select max(totalspan) as maxy from q9ans; create view q9final as select cname, totalspan from q9max, q9ans where q9max.maxy=q9ans.totalspan; insert into Query9(select * from q9final); -- Query 10 statements create view q0all as select cid, cname, neighbor, length from country join neighbour on country.cid=neighbour.country; create view q0sum as select cid as cc, sum(length) as borderslength from q0all group by cc; create view q0ans as select cname, borderslength from q0sum join country on country.cid=q0sum.cc; insert into Query10(select * from q0ans); --All "drop view" statements drop view q2coast cascade; drop view q1full cascade; drop view q1max cascade; drop view q4inde cascade; drop view q5avg cascade; drop view q6hacky cascade; drop view q7total cascade; drop view q8most cascade; drop view q9dir cascade; drop view q0all cascade;
Markdown
UTF-8
755
2.625
3
[]
no_license
--- layout: home title: Home list_title: Blog posts exclude: true main-image: "main.jpg" image-alt: "Karlbergskanalen" image-class: "wide-image" --- Welcome! This site was originally created for the course ["Client-based web programming"](https://coursepress.lnu.se/kurs/klientbaserad-webbprogrammering/) at Linnaeus university. It contains a few [blog posts](/blog/) about creating the site and a page with some of my [music](/music/). But I also try to keep it updated with a portfolio with my [projects](/projects). The site was created using [Jekyll](https://jekyllrb.com), and then published to [GitHub Pages](https://pages.github.com). All the posts on this site were written in [Markdown](https://daringfireball.net/projects/markdown/). Enjoy!
C++
UTF-8
1,073
2.78125
3
[]
no_license
/** * Simple Arduino RC Receiver for one Channel * Author: Heinz Bruederlin * Date: 01/2018 */ #include "RcRx.h" RcRx* RcRx::singleton = NULL; /** * only port 2 or 3 support external interrupts * (otherwise we have to use pin change interrupt code ...) */ RcRx::RcRx(int _portNo) { portNo = _portNo; } void RcRx::init(void) { startTime = 0; pulseWidth = 0; lastInterruptTime = 0; if (!singleton) { singleton = this; } attachInterrupt(digitalPinToInterrupt(portNo), calcSignal, CHANGE); } unsigned int RcRx::value(void) { if (pulseWidth < 800 || pulseWidth > 2200) { return 0; } return pulseWidth; } void RcRx::calcSignal(void) { if (!singleton) return; singleton->lastInterruptTime = micros(); if (digitalRead(singleton->portNo) == HIGH) { singleton->startTime = singleton->lastInterruptTime; } else if (singleton->startTime != 0) { singleton->pulseWidth = (singleton->lastInterruptTime - singleton->startTime); singleton->startTime = 0; } }
Markdown
UTF-8
2,442
3.953125
4
[]
no_license
--- id: kth-largest-element-in-an-array title: Kth Largest Element in an Array sidebar_label: Kth Largest Element in an Array --- ## Description <div class="description"> <p>Find the <strong>k</strong>th largest element in an unsorted array. Note that it is the kth largest element in the sorted order, not the kth distinct element.</p> <p><strong>Example 1:</strong></p> <pre> <strong>Input:</strong> <code>[3,2,1,5,6,4] </code>and k = 2 <strong>Output:</strong> 5 </pre> <p><strong>Example 2:</strong></p> <pre> <strong>Input:</strong> <code>[3,2,3,1,2,4,5,5,6] </code>and k = 4 <strong>Output:</strong> 4</pre> <p><strong>Note: </strong><br /> You may assume k is always valid, 1 &le; k &le; array&#39;s length.</p> </div> ## Solution(javascript) ```javascript /** * @param {number[]} nums * @param {number} k * @return {number} */ const findKthLargest = (nums, k) => { const swap = (a, b, arr) => { // eslint-disable-line if (a !== b) { const temp = arr[a] arr[a] = arr[b] // eslint-disable-line arr[b] = temp // eslint-disable-line } } const Heap = compareFn => (arr = []) => { const left = index => 2 * index + 1 const right = index => 2 * index + 2 const parent = index => Math.floor((index - 1) / 2) const size = () => arr.length // log(n) const heapify = (index) => { const l = left(index) const r = right(index) let current = index if (compareFn(arr[current], arr[l]) > 0 && (l < size())) { current = l } if (compareFn(arr[current], arr[r]) > 0 && (r < size())) { current = r } if (current !== index) { swap(current, index, arr) heapify(current) } } // log(n) const heapifyUp = (index) => { const p = parent(index) if (p >= 0 && compareFn(arr[p], arr[index]) > 0) { swap(p, index, arr) heapifyUp(p) } } // O(n) const buildHeap = () => { for (let i = Math.floor(arr.length / 2); i >= 0; i--) { heapify(i) } } const extract = () => { swap(0, arr.length - 1, arr) const top = arr.pop() heapify(0) return top } buildHeap() return { extract, size: () => arr.length, } } const maxHeap = Heap((a, b) => b - a)(nums) while (k > 1 && maxHeap.size() > 1) { maxHeap.extract() k-- } return maxHeap.extract() } ```
Java
UTF-8
1,093
2.484375
2
[]
no_license
package models; import elements.Button; import elements.Dropdown; import elements.Link; import elements.Page; public class MensClothing extends PageObject{ private static MensClothing instance; private Link linkItem; private Dropdown options; private Button addToCard; public static MensClothing get() { if (instance == null) { instance = new MensClothing().init(); } return instance; } private MensClothing init() { super.pageLocator = new Page("//*[@class='resultContainer']"); linkItem = new Link("//*[@class='productItem'][1]//a//h3"); options = new Dropdown("(//*[@name='dropDownGroupValues:0'])[1]"); addToCard = new Button("//*[@class='noBorder btnAddToCart']"); return this; } public String clickFirstItem(){ String titleItem = linkItem.getText(); linkItem.click(); return titleItem; } public void selectFirstValueOption(){ options.setText("SMALL"); } public void clickAddToCard(){ addToCard.click(); } }
Java
UTF-8
1,352
1.882813
2
[]
no_license
package com.dream.city.base.model.req; import lombok.Data; import lombok.ToString; import java.io.Serializable; import java.math.BigDecimal; import java.util.Date; /** * @author wvv * 玩家投资订单 */ @ToString @Data public class InvestOrderReq implements Serializable { private Integer id; /** * 订单id */ private Integer orderId; /** 投资金额 */ private BigDecimal orderAmount; /** 订单状态 */ private String orderState; private String orderName; private String orderNum; /** 订单是否复投 */ private Integer orderRepeat; /** 名称 */ private String inName; /** 项目ID */ private Integer investId; /** 限额 */ private BigDecimal inLimit; /** 个人所得税 */ private BigDecimal personalInTax; /** 企业所得税 */ private BigDecimal enterpriseIntax; /** 收益倍数 */ private Integer inEarning; /** 开始时间 */ private Date inStart; /** 投资结束时间 */ private Date inEnd; private Integer inType; /** 玩家ID */ private String playerId; /** 玩家Name */ private String payerName; private String verifyStatus; /** 金额类型:mt,usdt */ private String amountType; private Date orderStartTime; private Date orderEndTime; }
C++
UTF-8
712
2.65625
3
[]
no_license
#include <bits/stdc++.h> using namespace std; int main(void) { ios::sync_with_stdio(false); cin.tie(NULL); int T; cin >> T; while(T--) { int x1, y1, r1, x2, y2, r2; cin >> x1 >> y1 >> r1 >> x2 >> y2 >> r2; if(r1 < r2) { swap(x1,x2); swap(y1,y2); swap(r1,r2); } int dist2 = (x1-x2) * (x1-x2) + (y1-y2) * (y1-y2); int ar = (r1 + r2) * (r1 + r2); int sr = (r1 - r2) * (r1 - r2); if(x1 == x2 && y1 == y2 && r1 == r2) cout << -1; else if (dist2 < r1*r1) { if(dist2 < sr) cout << 0; else if( dist2 == sr) cout << 1; else cout << 2; } else { if(dist2 == ar) cout << 1; else if(dist2 < ar) cout << 2; else cout << 0; } cout << '\n'; } }
Java
UTF-8
4,644
2.078125
2
[]
no_license
package com.game.open.controller; import com.game.chat.util.JsonUtils; import com.game.common.vo.ResultVO; import com.game.login.domain.Account; import com.game.open.domain.GameDataRVO; import com.game.open.service.OpenService; import com.game.util.CommonFunUtil; import com.game.util.Constant; import com.game.util.SignUtil; import com.google.common.collect.Maps; import com.google.gson.Gson; import org.apache.http.util.TextUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; import java.util.Map; @RestController @RequestMapping("/oapi") public class OpenController { private static final Logger logger = LoggerFactory.getLogger(OpenController.class); @Autowired OpenService openService; @PostMapping("/login") public String login(@RequestParam("p") String p, @RequestParam("r") Long r, @RequestParam("appId") String appId, @RequestParam("t") String t, @RequestParam("sign") String sign ) throws Exception{ Map<String, String> params = Maps.newHashMap(); params.put("t", t); params.put("appId", appId); params.put("r", String.valueOf(r)); params.put("sign", sign); if (! SignUtil.validateSign(params)) { return CommonFunUtil.toRsp(ResultVO.error("sign check fail", Constant.RNT_CODE_SIGN_ERROR), r); } if (appId.equals("")) { return CommonFunUtil.toRsp(ResultVO.error("param error", Constant.RNT_CODE_PARAM_ERROR), r); } Account account = CommonFunUtil.toReq(p, r, Account.class); //Account account = new Gson().fromJson(p, Account.class); if (account == null) { return CommonFunUtil.toRsp(ResultVO.error("json exception", Constant.RNT_CODE_EXCEPTION), r); } //参数校验 if(! CommonFunUtil.checkCommonParams(account) ) { return CommonFunUtil.toRsp(ResultVO.error("param error", Constant.RNT_CODE_PARAM_ERROR), r); } return CommonFunUtil.toRsp(openService.login(account), r); } @PostMapping("/auth/token") public ResultVO authToken(@RequestParam("gameId") String gameId, @RequestParam("uId") int uId, @RequestParam("token") String token, @RequestParam("sign") String sign ) { return openService.authToken(gameId, uId, token, sign); } @PostMapping("/game/data") public String gameData(@RequestParam("p") String p, @RequestParam("r") Long r, @RequestParam("appId") String appId, @RequestParam("t") String t, @RequestParam("sign") String sign ) throws Exception { Map<String, String> params = Maps.newHashMap(); params.put("t", t); params.put("appId", appId); params.put("r", String.valueOf(r)); params.put("sign", sign); if (! SignUtil.validateSign(params)) { return CommonFunUtil.toRsp(ResultVO.error("sign check fail", Constant.RNT_CODE_SIGN_ERROR), r); } GameDataRVO gameDataRVO = CommonFunUtil.toReq(p, r, GameDataRVO.class); if (gameDataRVO == null) { return CommonFunUtil.toRsp(ResultVO.error("json exception", Constant.RNT_CODE_PARAM_ERROR), r); } logger.debug("game data:" + JsonUtils.objectToString(gameDataRVO)); //GameDataRVO gameDataRVO = JsonUtils.stringToObject(p, GameDataRVO.class); int type = gameDataRVO.getGd().getType(); if (type == 0) { return CommonFunUtil.toRsp(ResultVO.error("param error", Constant.RNT_CODE_PARAM_ERROR), r); } switch (type) { case Constant.GAME_DATA_TYPE_FACE: return CommonFunUtil.toRsp(openService.gameFace(), r); case Constant.GAME_DATA_TYPE_CONFIG: return CommonFunUtil.toRsp(openService.gameDataCfg(gameDataRVO), r); case Constant.GAME_DATA_TYPE_RANKING: return CommonFunUtil.toRsp(openService.gameRank(gameDataRVO), r); default: break; } return CommonFunUtil.toRsp(ResultVO.error("param error", Constant.RNT_CODE_PARAM_ERROR), r); } }
Java
UTF-8
855
3.28125
3
[]
no_license
package summixLinker; import java.util.ArrayList; import summixAssembler.TextFile; /** * Processes the object files with pass1 and pass2, then returns the final program file, loaded into memory at the the spot decided by the user. * @author Michael Pinnegar * */ public class Linker { /** * Takes the object files in, as well as the start of memory, and processes the object files into a program file. * @param objects Object files to be processed into a single program file. First object file is main. * @param memoryStart Start of memory as decided by user. * @return Program file */ public static TextFile processObjects(ArrayList<TextFile> objects, int memoryStart) { LinkerPass1.processObjects(objects, memoryStart); TextFile finalObjectFile = LinkerPass2.processObjects(objects); return finalObjectFile; } }
Markdown
UTF-8
1,843
3.484375
3
[]
no_license
# Digital-Concept-Tutorial: Number Systems & Conversions ### Author: Chlotte Crim ## Summary of Purpose This tutorial is covering the types of number systems as well as how to convert between them. ## Content Tutorial ![Number Systems](http://www.electronicsengineering.nbcafe.in/wp-content/uploads/2014/09/number-systems.png) [Intro to Binary, Hex, & More](https://code.tutsplus.com/articles/number-systems-an-introduction-to-binary-hexadecimal-and-more--active-10848) ### The Number Systems #### Base 10 (Decimal) Represent any number using 10 digits (0–9) Example: 253810 = 2×(10^3)+5×(10^2)+3×(10^1)+8×(10^0) #### Base 2 (Binary) Represent any number using 10 digits (0–9) The B denotes binary prefix 101012 = 10101B = 1×(2^4)+0×(2^3)+1×(2^2)+0×(2^1)+1×(2^0) = 16+4+1= 21 #### Base 8 (Octal) Represent any number using 8 digits (0–7) Example: 278 = 2×(8^1)+7×(8^0) = 16+7 = 23 #### Base 16 (Hexadecimal) Represent any number using 10 digits and 6 characters (0–9, A, B, C, D, E, F) The H denotes hex prefix. Example: 2816 = 28H = 2×(16^1)+8×(16^0) = 40 Example: BC1216 = BC12H = 11×(16^3)+12×(16^2)+1×(16^1)+2×(16^0) = 48146 ### Number Conversion ![](https://www.electronicshub.org/wp-content/uploads/2015/05/Untitled1dd.jpg) Click the link to view the [Tutorials Point](https://www.tutorialspoint.com/computer_logical_organization/number_system_conversion.htm) explanation fo conversion between the 4 number systems. Below is a video outlining how to convert between the four number systems. [![Number Systems Conversion Intro](https://i.ytimg.com/vi/L2zsmYaI5ww/maxresdefault.jpg)](https://youtu.be/L2zsmYaI5ww) ## Description of Target Audience This tutorial is intended for students beginning their journey into the world of computer systems.
Java
UTF-8
269
1.757813
2
[]
no_license
package mj.net.handler.game.douniu; import mj.net.handler.MessageHandler; import mj.net.message.game.douniu.DNGameStart; public interface DNGameStartHandler<U> extends MessageHandler<DNGameStart, U>{ @Override public boolean handler(DNGameStart msg, U user); }
Java
UTF-8
153
2.078125
2
[]
no_license
package template; public class CurrentAccount extends Account{ public void calculateInterest() { System.out.println("活期利息。。。"); } }
Java
UTF-8
2,134
2.296875
2
[]
no_license
package com.eai.serviceimpl; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import com.eai.model.Role; import com.eai.model.User; import com.eai.model.UserDetail; import com.eai.repository.UserDetailRepository; import com.eai.repository.UserRepository; import com.eai.service.UserDetailService; @Service public class UserDetailServiceImpl implements UserDetailService { @Autowired private UserDetailRepository userDetailRepository; @Autowired private UserRepository userRepository; @Override @Transactional public UserDetail findByUserDetail(Integer idUserDetail) { return userDetailRepository.findById(idUserDetail).orElse(null); } @Override @Transactional public UserDetail saveUserDetail(UserDetail userDetail){ userDetail = userDetailRepository.save(userDetail); if(userDetail != null) { User user = new User(); BCryptPasswordEncoder passwordEncoder = new BCryptPasswordEncoder(10); String hashedPassword = passwordEncoder.encode(userDetail.getPassword()); user.setUserDetail(userDetail); user.setUserName(userDetail.getUserName()); user.setPassword(hashedPassword); user.setEnabled(true); user.setRole(new Role(new Integer("2"))); userRepository.save(user); } return userDetail; } @Override @Transactional public UserDetail updateUserDetail(UserDetail userDetail){ return userDetailRepository.save(userDetail); } @Override @Transactional public UserDetail updateUserPassword(UserDetail userDetail) { User user = userRepository.findByUserName(userDetail.getUserName()); BCryptPasswordEncoder passwordEncoder = new BCryptPasswordEncoder(10); String hashedPassword = passwordEncoder.encode(userDetail.getNewPassword()); user.setPassword(hashedPassword); userRepository.save(user); return userDetail; } }
Python
UTF-8
129
2.96875
3
[]
no_license
import datetime x = datetime.datetime.now() print(x.strftime("%a")) #%I - Tunnid #%M - Minutid # Friday 17:35 print(x.year)
JavaScript
UTF-8
1,831
2.5625
3
[]
no_license
/* * This file is part of the Sylius package. * * (c) Paweł Jędrzejewski * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ /* eslint-env browser */ class SyliusVariantImages { constructor() { this.mainImageLink = document.querySelector('[data-js-product-image]'); this.mainImage = this.mainImageLink.querySelector('img'); this.defaultImageLink = this.mainImageLink.getAttribute('href'); this.defaultImageSrc = this.mainImage.getAttribute('src'); document.querySelectorAll('[name*="sylius_add_to_cart[cartItem][variant]"]').forEach((item) => { item.addEventListener('change', () => this.setImage()); }); this.setImage(); } getActiveVariant() { const items = document.querySelectorAll(` input[name="sylius_add_to_cart[cartItem][variant]"]:checked, select[name*="sylius_add_to_cart[cartItem][variant]"] option:checked `); return [...items].map(el => el.value).join(' '); } getActiveImageSrc(variant) { let imageLink = this.defaultImageLink; let imageSrc = this.defaultImageSrc; const item = document.querySelector(` .sylius-image-variants [data-variant-code="${variant}"], .sylius-image-variants [data-variant-options="${variant} "] `); if (item) { const parent = item.closest('[data-js-product-thumbnail]'); imageLink = parent.querySelector('a').getAttribute('href'); imageSrc = parent.querySelector('img').getAttribute('data-large-thumbnail'); } return { imageLink, imageSrc }; } setImage() { const img = this.getActiveImageSrc(this.getActiveVariant()); this.mainImageLink.setAttribute('href', img.imageLink); this.mainImage.setAttribute('src', img.imageSrc); } } export default SyliusVariantImages;
Java
UTF-8
290
2.0625
2
[ "Apache-2.0" ]
permissive
package org.tangence.java; public class TangenceException extends Exception { /** * Constructor for TangenceException. * @param string String */ public TangenceException(final String string) { super(string); } private static final long serialVersionUID = -271328471238567238L; }
JavaScript
UTF-8
590
3.140625
3
[ "MIT" ]
permissive
import { quotes } from './quotes.js'; const button = document.getElementById('button'); function getRandomQuote(quotes) { const randIndex = Math.floor(Math.random() * quotes.length); return quotes[randIndex]; } function renderQuote(quotes) { let quote = getRandomQuote(quotes); const renderQuote = ` <p class="quote-paragraph">${quote['text']}</p> <p class="author-paragraph">—${quote['author']}</p> `; document.getElementById('app').innerHTML = renderQuote; }; function start() { button.addEventListener('click', () => { renderQuote(quotes); }) } start();
C
UTF-8
398
3.5
4
[]
no_license
#include<stdio.h> int main(){ int ar[] = {1,2,32,42,4,456,67,34,2}; int len = sizeof(ar)/sizeof(ar[0]); printf("Arary is: \n"); for (int i = 0; i < len; i++){ printf(" %d ",ar[i]); } int lar = ar[0]; for (int i = 1; i < len; i++){ if (ar[i] > lar){ lar = ar[i]; } } printf("\n %d \n",lar); return 0; }
C
ISO-8859-1
7,590
3.21875
3
[]
no_license
#include "grid.h" #include <time.h> #include <stdlib.h> #include <math.h> static void turn(grid g); static void turningxtimes(grid g, int x); static int firstRota(dir d); static int secondRota(dir d); static int rand_a_b(int a, int b); static void fusion(grid g, dir d); static void decalage(grid g, dir d); /** * \brief calculate the number of rotation needed for simulated that the direction is UP * \param d the direction given by the user */ static int firstRota(dir d) { switch (d) { case UP: return 0; break; case LEFT: return 1; break; case DOWN: return 2; break; default: return 3; break; } } struct grid_s { tile g[GRID_SIDE][GRID_SIDE]; // On cre un tableau statique de tuiles nous servant stoquer l'ensemble des tuiles de notre grille. unsigned long int score; //Variable permettant de stocker le score }; /** * \brief return an integer in the interval [a;b[ */ static int rand_a_b(int a, int b) { // fonction permettant de retourner un entier dans l'intervalle [a;b[ return rand() % (b - a) + a; } grid new_grid() { grid gr = malloc(sizeof(struct grid_s)); //Allocation mmoire pour la nouvelle instance de grille gr->score = 0; //Initialisation du score 0 for (int i = 0; i < GRID_SIDE; i++) { for (int j = 0; j < GRID_SIDE; j++) { //Initialisation de l'ensemble des tuiles 0 gr->g[i][j] = 0; } } return gr; } bool can_move(grid g, dir d) { turningxtimes(g, firstRota(d)); for (int i = 0; i < GRID_SIDE; i++) { int tmp = g->g[i][0]; bool void_tile = (tmp == 0); for (int j = 1; j < GRID_SIDE; j++) { if (g->g[i][j] == 0) void_tile = true; else { if (tmp == g->g[i][j] || void_tile) { turningxtimes(g, secondRota(d)); return true; } tmp = g->g[i][j]; } } } turningxtimes(g, secondRota(d)); return false; } void delete_grid(grid g) { free(g); } void copy_grid(grid src, grid dst) { dst->score = src->score; for (int i = 0; i < GRID_SIDE; i++) { for (int j = 0; j < GRID_SIDE; j++) { dst->g[i][j] = src->g[i][j]; } } } tile get_tile(grid gr, int x, int y) { return gr->g[x][y]; } void set_tile(grid gr, int x, int y, tile t) { gr->g[x][y] = t; } unsigned long int grid_score(grid g) { return g->score; } bool game_over(grid g) { return !(can_move(g, UP) || can_move(g, DOWN) || can_move(g, LEFT) || can_move(g, RIGHT)); } void add_tile(grid g) { int x = rand_a_b(0, GRID_SIDE); //rcupre un entier dans l'intervalle [0;4[ pour la position x int y = rand_a_b(0, GRID_SIDE); //rcupre un entier dans l'intervalle [0;4[ pour la position y int alea = rand_a_b(0, 10); //rcupre un entier dans l'intervalle [0;10[ pour savoir si la tuile aura pour valeur 2 ou 4 int val_tile = 1; // la valeur de la tuile, initialise 2 par dfaut while (g->g[x][y] != 0) { //Vrifie si la position trouve n'est pas dj occupe. Si c'est le cas, on retire une nouvelle position x = rand_a_b(0, GRID_SIDE); y = rand_a_b(0, GRID_SIDE); } if (alea == 0) { //Permet de voir si la valeur de la nouvelle tuile est de 4 (Une chance sur dix) val_tile = 2; } set_tile(g, x, y, val_tile); // affecte la tuile dans la grille } void play(grid gr, dir d) { do_move(gr, d); // Effectue le mouvement dans la direction donne en paramtre add_tile(gr); // Ajoute une tuile de manire alatoire } /** * \brief movement of the tiles upwards * \param g the grid * \param d the direction given by the user */ static void decalage(grid g, dir d) { int y_libre; turningxtimes(g, firstRota(d)); for (int i = 0; i < GRID_SIDE; i++) { y_libre = 0; for (int j = 0; j < GRID_SIDE; j++) { if (g->g[i][j] != 0) { g->g[i][y_libre] = g->g[i][j]; y_libre++; } } for (; y_libre < GRID_SIDE; y_libre++) { g->g[i][y_libre] = 0; } } turningxtimes(g, secondRota(d)); } /** * \brief fusion between two consecutive tiles * \param g the grid * \param d the direction given by the user */ static void fusion(grid g, dir d) { turningxtimes(g, firstRota(d)); for (int x = 0; x <= GRID_SIDE - 1; x++) { for (int y = 0; y < GRID_SIDE - 1; y++) { if (g->g[x][y] != 0 && g->g[x][y] == g->g[x][y + 1]) { g->g[x][y] += 1; g->g[x][y + 1] = 0; g->score += pow(2, g->g[x][y]); y++; } } } turningxtimes(g, secondRota(d)); } void do_move(grid g, dir d) { decalage(g, d); //Place l'ensemble des tuiles dans la direction donne en paramtre fusion(g, d); //Fusionne les tuiles devant fusionner entre elle decalage(g, d); //Replace les tuiles dans la bonne direction } /** * \brief turn the grid in the anticlockwise * \param g the grid */ static void turn(grid g) { for (int x = 0; x < GRID_SIDE / 2; x++) { for (int y = 0; y < (GRID_SIDE + 1) / 2; y++) { int oldx = x; int oldy = y; int tmp1 = g->g[x][y]; for (int i = 0; i < 4; i++) { /*On va appliquer au point une rotation d'un quart de cercle. Rappel : si p est un point de la forme a+ib alors p*i vaut -b+ia et p*i est l'image de p aprs y avoir appliqu une rotation de PI/2 dans le sens antihoraire autour du sens du repre. On dcale donc le repre (et donc la tuile) afin que le centre du repre soit au centre de la grille. Pour cela on applique une translation la tuile de -GRID_SIDE/2;-GRID_SIDE/2. On multiplie ensuite le point obtenu par PI puis on re-dcale le repre dans le sens oppos afin de remettre la grille sa place d'origine. Dtails des calculs: Translation: (a+ib)+(-GRID_SIDE/2-i*GRID_SIDE/2)=(a-GRID_SIDE/2) + i(b-GRID_SIDE/2) Rotation: [a-GRID_SIDE/2 + i(b-GRID_SIDE/2)]*i=i*(a-GRID_SIDE/2) + i(b-GRID_SIDE/2)= -(b-GRID_SIDE/2) + i*(a-GRID_SIDE/2) =-b+GRID_SIDE/2 + i*(a-GRID_SIDE/2) Re-translation: -b+GRID_SIDE/2 + i*(a-GRID_SIDE/2) + (GRID_SIDE/2 +i*GRID_SIDE) =-b+2*GRID_SIDE/2 +i*a=-b+GRID_SIDE +i*a */ int newx = -oldy + GRID_SIDE - 1; int newy = oldx; int tmp2 = g->g[newx][newy]; g->g[newx][newy] = tmp1; tmp1 = tmp2; oldx = newx; oldy = newy; } } } } /** * \brief turn the grid x times * \param g the grid * \param x the number of times the grid will be turn */ static void turningxtimes(grid g, int x) { for (int i = 0; i < x; i++) { turn(g); } } /** * \brief calculate the number of rotation needed for reset the rotation after call turningxtime(g,firstRota(g)) * \param d the direction given by the user */ static int secondRota(dir d) { return ((4 - firstRota(d)) % 4); }
C++
UTF-8
1,744
2.53125
3
[]
no_license
void setup() { for(int i=5;i<9;i++) pinMode(i,OUTPUT); } void forward() { digitalWrite(5,HIGH); digitalWrite(6,LOW); digitalWrite(7,LOW); digitalWrite(8,HIGH); } void backward(){ digitalWrite(5,LOW); digitalWrite(6,HIGH); digitalWrite(7,HIGH); digitalWrite(8,LOW); } void left(){ digitalWrite(5,HIGH); digitalWrite(6,LOW); digitalWrite(7,LOW); digitalWrite(8,LOW); } void right(){ digitalWrite(5,LOW); digitalWrite(6,LOW); digitalWrite(7,LOW); digitalWrite(8,HIGH); } void clockwise(){ digitalWrite(5,HIGH); digitalWrite(6,LOW); digitalWrite(7,HIGH); digitalWrite(8,LOW); } void stopp() { digitalWrite(5,LOW); digitalWrite(6,LOW); digitalWrite(7,LOW); digitalWrite(8,LOW); } void loop(){ //Square forward(); delay(2000); right(); delay(1200); forward(); delay(2000); right(); delay(1200); forward(); delay(2000); right(); delay(1200); forward(); delay(2000); right(); delay(1200); stopp(); delay(1000); //Rectangle forward(); delay(2000); right(); delay(1200); forward(); delay(4000); right(); delay(1200); forward(); delay(2000); right(); delay(1200); forward(); delay(4000); right(); delay(1200); stopp(); delay(1000); //Hexagon forward(); delay(2000); right(); delay(680); forward(); delay(2000); right(); delay(1300); forward(); delay(2000); right(); delay(680); forward(); delay(2000); right(); delay(680); forward(); delay(2000); right(); delay(1300); forward(); delay(2000); right(); delay(680); stopp(); delay(1000); //Circle for(int i=0;i<1000;i++){ forward(); delay(100); clockwise(); delay(25); } int i=1; while(i=1) stopp(); }
Java
UTF-8
1,081
2.25
2
[]
no_license
package org.cocome.cloud.web.frontend.navigation; public enum NavigationElements { LOGIN("/login", null), ENTERPRISE_MAIN("/enterprise/main", null), STORE_MAIN("/store/main", null), START_SALE("/store/start_sale", "cashier"), ORDER_PRODUCTS("/store/order_products", "stock manager"), SHOW_STOCK("/store/show_stock", "stock manager"), STOCK_REPORT("/store/show_reports", "store manager"), RECEIVE_PRODUCTS("/store/receive_products", "stock manager"), SHOW_ENTERPRISES("/enterprise/show_enterprises", "enterprise manager"), CREATE_ENTERPRISE("/enterprise/create_enterprise", "enterprise manager"), CREATE_PRODUCT("/enterprise/create_product", "enterprise manager"), SHOW_PRODUCTS("/enterprise/show_products", "enterprise manager"); private String navOutcome; private String permission; private NavigationElements(String navOutcome, String permission) { this.navOutcome = navOutcome; this.permission = permission; } public String getNavigationOutcome() { return this.navOutcome; } public String getNeededPermission() { return this.permission; } }
Shell
UTF-8
2,021
4.21875
4
[]
no_license
#!/bin/bash set -eo pipefail # 変数 SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)" REGION="$1" FLAG="$2" # 引数チェック if [ "$#" -ne 2 ]; then echo "第1引数にリージョン、第2引数にフラグを指定してください" exit 1 fi if [[ "$REGION" != "ap-northeast-1" && "$REGION" != "ap-southeast-1" ]]; then echo "第1引数にはap-northeast-1,ap-southeast-1のいずれかを指定してください。" exit 1 fi if [[ "$FLAG" != "dr" && "$FLAG" != "az" ]]; then echo "第1引数にはdr,azのいずれかを指定してください。" exit 1 fi # サーバ一覧のファイルを1行ずつ読み込んで配列へ格納 case "$REGION" in ap-northeast-1) case "$FLAG" in "dr") mapfile -t SERVER_ARRAY < <(sed 1d ${SCRIPT_DIR}/${FLAG}/rds-${REGION}-${FLAG}.csv | sed '/^#/d') ;; "az") mapfile -t SERVER_ARRAY < <(sed 1d ${SCRIPT_DIR}/${FLAG}/rds-${REGION}-${FLAG}.csv | sed '/^#/d') ;; esac ;; ap-southeast-1) case "$FLAG" in "dr") mapfile -t SERVER_ARRAY < <(sed 1d ${SCRIPT_DIR}/${FLAG}/rds-${REGION}-${FLAG}.csv | sed '/^#/d') ;; "az") mapfile -t SERVER_ARRAY < <(sed 1d ${SCRIPT_DIR}/${FLAG}/rds-${REGION}-${FLAG}.csv | sed '/^#/d') ;; esac ;; esac # 確認メッセージ for array in "${SERVER_ARRAY[@]}"; do echo $array; done read -r -p "上記サーバの名前を変更します。よろしいですか? (y/N): " yn case "$yn" in [yY]*) ;; *) echo "処理を終了します." ; exit ;; esac function main() { . ${SCRIPT_DIR}/lib/common . ${SCRIPT_DIR}/lib/rds for i in "${SERVER_ARRAY[@]}"; do # 変数に格納 DB_INSTANCE_IDENTIFIER=$(echo $i | cut -d , -f 1) NEW_DB_INSTANCE_IDENTIFIER=${DB_INSTANCE_IDENTIFIER}-renamed echo "---${DB_INSTANCE_IDENTIFIER} --> ${NEW_DB_INSTANCE_IDENTIFIER} start---" # DBインスタンスの名前を変更 rename_db_instance_name echo "---${DB_INSTANCE_IDENTIFIER} end---" done } if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then main "$@" fi
Python
UTF-8
2,580
2.8125
3
[]
no_license
from config.config import * import json import asyncio def transfer_json(msg, method): """字符串与json格式互相转换""" if method: return json.dumps(msg) else: return json.loads(msg) async def handle_echo(reader, writer): local_addr = writer.get_extra_info('sockname') connect_request = await reader.read(200) connect_request = transfer_json(connect_request.decode(), False) request_addr = str(connect_request['local_addr']) request_code = str(connect_request['code']) print(request_addr+'正在请求连接:'+request_code) if connect_request['code'] == 'Request connection': accept_connection = {'local_addr': local_addr, 'dest_addr': connect_request['local_addr'], 'code': 'Accept ' 'connection'} accept_connection = transfer_json(accept_connection, True) writer.write(accept_connection.encode()) await writer.drain() print('与'+request_addr+'成功建立连接') else: refuse_connection = {'local_addr': local_addr, 'dest_addr': connect_request['local_addr'], 'code': 'Refuse ' 'connection'} refuse_connection = transfer_json(refuse_connection, True) writer.write(refuse_connection.encode()) await writer.drain() print("已拒绝"+request_addr) while True: msg = await reader.read(1024) if msg.decode() == 'Disconnect Request': writer.write('ByeBye'.encode()) await writer.drain() writer.close() print('与'+request_addr+'的连接已断开') break elif msg.decode() == 'Force Disconnect': writer.close() print('与'+request_addr+'的连接已断开') break print('接受到来自'+request_addr+'的消息:'+msg.decode()) re_msg = 'response start!\n'+msg.decode()+'\nresponse end!' writer.write(re_msg.encode()) await writer.drain() print('回复给'+request_addr+'的消息:\n'+re_msg) async def main(): server = await asyncio.start_server(handle_echo, Client_Ip[0], Client_Port[0]) address = server.sockets[0].getsockname() print(f'Serving on {address}') async with server: await server.serve_forever() def open_server_center(): asyncio.run(main()) if __name__ == '__main__': open_server_center()
Python
UTF-8
1,322
3.578125
4
[]
no_license
# Union method # set_a = {"a", "b", "c", 1, 2, 3} # set_b = {"c", "d", "e", 3, 4, 5} # set_c = {5, 6, 7} # print (set_a.union(set_b)) # print (set_b.union(set_a)) # print (set_a.union(set_b, set_c)) # print (set_b.union(set_a, set_c)) # print (set_a | set_b) # print (set_b | set_a) # # Intersection method # set_a = {"a", "b", "c", 1, 2, 3} # set_b = {"c", "d", "e", 3, 4, 5} # set_c = {1, 2, 3} # print (set_a.intersection(set_b)) # print (set_b.intersection(set_a)) # print (set_a.intersection(set_b, set_c)) # print (set_a & set_b) # print (set_b & set_a) # # Intersection Update method # set_a = {"a", "b", "c", 1, 2, 3} # set_b = {"c", "d", "e", 3, 4, 5} # set_c = {1, 2, 3} # # # common elements: 3, "c" # # set_b.intersection_update(set_a, set_c) # print (set_a) # print (set_b) # print (set_c) # Difference method # set_a = {"a", "b", "c", 1, 2, 3} # set_b = {"c", "d", "e", 3, 4, 5} # set_c = {1, 2, 3, 4, "d"} # # print (set_a.difference(set_b, set_c)) # print (set_b.difference(set_a)) # # print (set_a - set_b) # print (set_b - set_a) # Difference Update method set_a = {"a", "b", "c", 1, 2, 3} set_b = {"c", "d", "e", 3, 4, 5} set_c = {3, 4, 5} # set_a.difference_update(set_b) set_b.difference_update(set_a, set_c) print (set_a) print (set_b) print (set_c)
C++
UTF-8
2,485
3.09375
3
[]
no_license
// // Tree.cpp // CM // // Created by Yuantong Ding on 12/14/14. // Copyright (c) 2014 Yuantong Ding. All rights reserved. // #include "Tree.h" void Tree::setRoot(int name, int type){ _root->_name = name; _root->_type = type; _root->_weight = 1; _root->_parent = NULL; } void Tree::addLineage(vector<CellIndexType> lineage){ int start = lineage.size()-4; if (lineage[start]==_root->_name) { Node *current = _root; bool skip = false; for (int i=start-2; i>=0; i-=2) { for (int j=0; j<current->_children.size(); j++) { if (current->_children[j]->_name == lineage[i]) { skip = true; current = current->_children[j]; break; } } if (!skip) { Node *add; add->_name = lineage[i]; add->_type = lineage[i+1]; add->_weight = 1; add->_parent = current; current->_children.push_back(add); } } } } void Tree::compress(Node * node){ Node *current = node; if (current->_children.size()==0) { return; } if (current->_children.size()==1 && current->_type == current->_children[0]->_type) { Node * p = current->_parent; Node * c = current->_children[0]; current->_children[0]->_weight += current->_weight; current->_children[0]->_parent = current->_parent; for (int i=0; i<p->_children.size(); i++) { if (p->_children[i]->_name == current->_name) { p->_children[i] = current->_children[0]; break; } } current->_parent = NULL; current->_children[0] = NULL; delete current; current = p; }else{ for (int i=0; i<current->_children.size(); i++) { compress(current->_children[i]); } } } void Tree::compression(){ compress(_root); } void Tree::print(Node * node){ if (node->_children.size()==0) { cout<<node->_name<<":"<<node->_type<<":"<<node->_weight; return; } for (int i=0; i<node->_children.size(); i++){ if (i==0){ cout<<"("; } print(node->_children[i]); cout <<", "; if (i==node->_children.size()-1){ cout<<")"; } } cout<<node->_name<<":"<<node->_type<<":"<<node->_weight; } void Tree::printTree(){ print(_root); }
Java
UTF-8
1,233
3.9375
4
[]
no_license
package com.koreait.downcasting; class Car { private String model; public Car(String model) { super(); this.model = model; } public void drive() { System.out.println(model + "타고 드라이브중 ."); } } class EV extends Car { private int battery; private final int FULL_BATTERY = 80; public EV(String model, int battery) { super(model); this.battery = battery; } public void charging(int battery) { this.battery += battery; if (this.battery > FULL_BATTERY) { this.battery = FULL_BATTERY; } } } class Hybrid extends EV { private int oil; private final int FULL_OIL = 50; public Hybrid(String model, int battery, int oil) { super(model, battery); this.oil = oil; } public void addOil(int oil) { this.oil += oil; if (this.oil > FULL_OIL) { this.oil = FULL_OIL; } } } public class Ex02_Car { public static void main(String[] args) { Car[] motors = new Car[2]; motors[0] = new EV("bmwi", 50); motors[1] = new Hybrid("sonata", 30, 50); for (Car car : motors) { car.drive(); // 모든 Car가능 if (car instanceof EV) { ((EV) car).charging(10); } else if (car instanceof Hybrid) { ((Hybrid) car).addOil(20); } }//for } }
Go
UTF-8
4,252
2.71875
3
[]
no_license
// @Title // @Description // @Author Niels 2020/5/27 package glog import ( "archive/zip" "io" "io/ioutil" "log" "os" "sort" "strconv" "strings" "time" ) var cfg *LogCfg var running bool func restartTask(newCfg *LogCfg) { cfg = newCfg if running { return } //date按天、week按周、month按月、none不生成新的文件 //自动加载配置文件 go func() { for { now := time.Now() // 计算下一个零点 next := now.Add(time.Hour * 24) next = time.Date(next.Year(), next.Month(), next.Day(), 0, 0, 0, 0, next.Location()) t := time.NewTimer(next.Sub(now.Add(time.Minute))) <-t.C fileRolling() time.Sleep(time.Minute * 1) } }() ticker := time.NewTicker(time.Second * 5) go func() { for { listenFileSize() <-ticker.C } }() running = true } func fileRolling() { if cfg == nil { return } for key, c := range cfg.LoggerCfgs { name := cfg.Root + c.File backupLog(name, c) GetLogger(key).refreshWriter(cfg.Root, c) } } func listenFileSize() { if cfg == nil { return } for key, c := range cfg.LoggerCfgs { name := cfg.Root + c.File fileInfo, err := os.Stat(name) if err != nil { continue } size := fileInfo.Size() if size >= getRealSize(c.MaxFileSize) { backupLog(name, c) GetLogger(key).refreshWriter(cfg.Root, c) } } } func backupLog(name string, c *LoggerCfg) { nextInfo := getNextInfo(c.File, c.MaxBackupIndex, c.Compress) bkName := cfg.Root + c.File + "." + nextInfo.Datestr + "." + strconv.Itoa(nextInfo.Index) err := os.Rename(name, bkName) os.Create(name) if err != nil { log.Print(err.Error()) } if c.Compress { zipFile(bkName) } for _, file := range nextInfo.Deleting { os.Remove(cfg.Root + file) } } func zipFile(name string) { err := CompressFile(name+".zip", name) if err != nil { log.Println(err.Error()) return } os.Remove(name) } //压缩文件Src到Dst func CompressFile(Dst string, Src string) error { newfile, err := os.Create(Dst) if err != nil { return err } defer newfile.Close() file, err := os.Open(Src) if err != nil { return err } // 打开:zip文件 archive := zip.NewWriter(newfile) defer archive.Close() info, _ := file.Stat() header, _ := zip.FileInfoHeader(info) // 设置:zip的文件压缩算法 header.Method = zip.Deflate // 创建:压缩包头部信息 writer, _ := archive.CreateHeader(header) io.Copy(writer, file) return nil } type NextInfo struct { Deleting []string Datestr string Index int } type LogFileInfo struct { Datestr string Index int Name string } func getNextInfo(fileName string, maxIndex int, compress bool) *NextInfo { now := time.Now() datestr := now.Format("2006-01-02") logsList := []*LogFileInfo{} files, _ := ioutil.ReadDir(cfg.Root) for _, file := range files { name := file.Name() if strings.HasPrefix(name, fileName) { val := strings.Split(name, ".") if len(val) < 4 { continue } index, _ := strconv.Atoi(val[3]) info := &LogFileInfo{ Datestr: val[2], Index: index, Name: name, } logsList = append(logsList, info) } } sort.Slice(logsList, func(i, j int) bool { ei := logsList[i] ej := logsList[j] if ei.Datestr == ej.Datestr { return ei.Index > ej.Index } return ei.Datestr > ej.Datestr }) length := len(logsList) next := &NextInfo{ Deleting: []string{}, Datestr: datestr, Index: 0, } if length == 0 { return next } newest := logsList[0] if newest.Datestr == datestr { next.Index = newest.Index + 1 } for i := maxIndex - 1; i < length; i++ { next.Deleting = append(next.Deleting, logsList[i].Name) } return next } func isFileExists(name string) bool { _, err := os.Stat(name) if err != nil { if os.IsNotExist(err) { return false } } return true } func getRealSize(s string) int64 { s = strings.ToUpper(s) str := "" multiple := 1 if strings.HasSuffix(s, "KB") { str = "KB" multiple = 1024 } else if strings.HasSuffix(s, "MB") { str = "MB" multiple = 1024 * 1024 } else if strings.HasSuffix(s, "GB") { str = "KB" multiple = 1024 * 1024 * 1024 } s = strings.Replace(s, str, "", 1) val, err := strconv.Atoi(s) if nil != err { return 0 } return int64(val * multiple) }
C#
UTF-8
1,763
2.75
3
[]
no_license
using OpenCvSharp; using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.Diagnostics; using System.Text; using System.Threading.Tasks; namespace genshinbot.diag { public class CvThread { static bool running; static ConcurrentQueue<Action> run = new ConcurrentQueue<Action>(); private static Task tsk; static Dictionary<string, Mat> updates = new Dictionary<string, Mat>(); public static double MaxFps = 24; public static void Run() { Stopwatch sw = new Stopwatch(); sw.Start(); while (running) { lock (updates) { foreach (var entry in updates) { Cv2.ImShow(entry.Key, entry.Value); } updates.Clear(); } while (!run.IsEmpty) { Debug.Assert(run.TryDequeue(out var action)); action(); } var msPerFrame = 1000 / MaxFps; Cv2.WaitKey((int)Math.Max(1, msPerFrame - sw.ElapsedMilliseconds)); sw.Restart(); } } public static void ImShow(string name, Mat m) { lock (updates) updates[name] = m; } public static void Invoke(Action a) { run.Enqueue(a); } public static void Stop() { if (!running) return; running = false; tsk.Wait(); } static CvThread() { running = true; tsk = Task.Run(Run); } } }
PHP
UTF-8
7,012
2.84375
3
[]
no_license
<?php // TODO FINISH UP: // -------------------------------------------------- // time and phone number validation plus errors // get date and time limiter code from Rob // get UI code from rob or randall. Use it if needed // fix "between DATE and DATE" // Silence warnings error_reporting(E_ALL ^ E_WARNING); ?> <!DOCTYPE html> <html lang = "en"> <head> <title> User Registration Form </title> <?php include('includes/header.html'); ?> <link rel="stylesheet" href="css/finalstyles.css"> <!-- css/styles.css --> </head> <body> <div> <!--<h1> Study Participant Application </h1> --> <?php // THIS PAGE IS THE MEAT OF IT ALL // Pulls together all included files $noerror = " "; $errorarray = array("noerror" => $noerror); $error = 0; $errorcodes = array("noerror" => 1, "emailchar" => 0, "noemail" => 0, "namechar" => 0, "noname" => 0, "pastdate" => 0, "nodate" => 0, "dbemail" => 0, "dbname" => 0, "dbdate" => 0, "nophone" => 0, "badphone" => 0); if ($_SERVER['REQUEST_METHOD'] == 'POST'){ // save input into new variables // strip away HTML tags and trim whitespace for security purposes $email = trim(htmlspecialchars($_POST['email'])); $name = trim(htmlspecialchars($_POST['name'])); $date = trim(htmlspecialchars($_POST['date'])); $time = trim(htmlspecialchars($_POST['time'])); $phone = trim(htmlspecialchars($_POST['phone'])); // set error indicator to FALSE $problem = FALSE; // create Error message variables $emailchar = '<div class = "error"> ERROR: Email is invalid or contains invalid characters. Please try again. </div>'; $noemail = '<div class = "error"> This field is required </div>'; $namechar = '<div class = "error"> ERROR: Name contains invalid characters. Please try again </div>'; $noname = '<div class = "error"> This field is required </div>'; $nodate = '<div class = "error"> This field is required </div>'; $pastdate = '<div class = "error"> ERROR: The date is in the past. Please choose a day in the future. </div>'; $dbemail = '<div class = "error"> EMAIL ALREADY REGISTERED. Please use a different email. </div>'; $dbname = '<div class = "error">NAME ALREADY REGISTERED. Please pick a different name. </div>'; $dbdate = '<div class = "error"> ERROR: Time slot already chosen for ' . $date . " @ " . $time . "</div>"; $nophone = '<div class = "error"> This field is required </div>'; $badphone = '<div class = "error"> Invalid Phone Number </div>'; $errorarray += ["emailchar" => $emailchar]; $errorarray += ["noemail" => $noemail]; $errorarray += ["namechar" => $namechar]; $errorarray += ["noname" => $noname]; $errorarray += ["pastdate" => $pastdate]; $errorarray += ["nodate" => $nodate]; $errorarray += ["dbemail" => $dbemail]; $errorarray += ["dbdate" =>$dbdate]; $errorarray += ["dbname" => $dbname]; $errorarray += ["badphone" => $badphone]; $errorarray += ["nophone" => $nophone]; // set to access 'no error' designation by default //****************EMAIL***************** // validate email // if email submitted run further validation if (!empty($email)){ // make sure email has only one '@' sign if (substr_count($email, '@') != 1) { // set error indicators for TRUE $problem = TRUE; $error = 1; $errorcodes['emailchar'] = 1; $errorcodes['noerror'] = 0; } } else { // set error indicator to TRUE $problem = TRUE; $errorcodes['noemail'] = 1; $errorcodes['noerror'] = 0; } //****************NAME***************** // validate email // if email submitted run further validation if (!empty($name)){ //convert string to array, $charArray = str_split($name); // if ord(letter) is not one of the specified, throw error // Allow dash, space, period, apostrophes, all upper case, all lower case. ASCII reference conditional // iterate over string foreach ($charArray as $char){ if (!( ((ord($char) >= 65) && (ord($char) <=90)) || ((ord($char) >= 97) && (ord($char) <=122)) || (ord($char) == 39) || (ord($char) == 32) || ( (ord($char) >= 44) && (ord($char) <=46) ) )){ $problem = TRUE; $errorcodes['namechar'] = 1; $errorcodes['noerror'] = 0; break; } } } else { // Name not submitted $problem = TRUE; $errorcodes['noname'] = 1; $errorcodes['noerror'] = 0; } // *********DATE CHECK********** if (!empty($date)){ $now = new DateTime(); $datetime = new DateTime($date); // check to see if date is in the past // TODO: add functionality to limit the future if ($datetime < $now){ $problem = TRUE; $errorcodes['pastdate'] = 1; $errorcodes['noerror'] = 0; } } else { // include "email not submitted error" // for now, just print to screen //print '<div class = "error"> ERROR: Date not submitted. Please enter Date </div>'; // set error indicator to TRUE //$nodate = TRUE; //$error = 6; $problem = TRUE; $errorcodes['nodate'] = 1; $errorcodes['noerror'] = 0; } if (!empty($phone)){ // check to see if date is in the past if (!is_numeric($phone)){ $problem = TRUE; $errorcodes['badphone'] = 1; $errorcodes['noerror'] = 0; } } else { // set error indicator to TRUE $problem = TRUE; $errorcodes['nophone'] = 1; $errorcodes['noerror'] = 0; } //***************DATABASE STUFFZ******************** // INCLUDE SQL connect files for security purposes //include('includes/sql/db_connect.php'); DELETE THIS include ('includes/sql/db_validation.php'); // if no issues, add to database, send email, send message if (!$problem) { // Write user info to database include('includes/sql/db_write.php'); // inform user they have been added to study // print "<div class = 'admin-master'><div class = 'admin-inside'> <h1> SUCCESS! </h1> <br/> Congratulations " . $name . ", you have successfully registered for this study! Your appointment is scheduled for " . $date . " @ " . $time . ". If you need to change or cancel your appointment for any reason, please do not hesitate to contact our office. <br/> <br/> Kindly keep this page for your records. Thank you so much for your participation! </div></div>"; // create message and send email $message = "Congratulations " . $name . ", you have successfully registered for this study! Your appointment is scheduled for " . $date . " @ " . $time . ". If you need to change or cancel your appointment for any reason, please do not hesitate to contact our office. <br/> <br/> Kindly keep this page for your records. Thank you so much for your participation!"; mail($email, "Thank you for your participation!",$message, "maxegillman@gmail.com"); } //*******SHOW FORM ($PROBLEM = YES)******************* // also include form if there is an error (problem) // error message will appear below input box if ($problem) { include('includes/user_form.php'); } // ELSE form not processed yet at all, show form } else { include('includes/user_form.php'); } ?> </div> </body> </html>
JavaScript
UTF-8
2,503
2.578125
3
[]
no_license
//app.js App({ onLaunch: function () { // 展示本地存储能力 var logs = wx.getStorageSync('logs') || [] logs.unshift(Date.now()) wx.setStorageSync('logs', logs) // 登录 wx.login({ success: res => { // 发送 res.code 到后台换取 openId, sessionKey, unionId } }) // 获取用户信息 wx.getSetting({ success: res => { if (res.authSetting['scope.userInfo']) { // 已经授权,可以直接调用 getUserInfo 获取头像昵称,不会弹框 wx.getUserInfo({ success: res => { // 可以将 res 发送给后台解码出 unionId this.globalData.userInfo = res.userInfo // 由于 getUserInfo 是网络请求,可能会在 Page.onLoad 之后才返回 // 所以此处加入 callback 以防止这种情况 if (this.userInfoReadyCallback) { this.userInfoReadyCallback(res) } } }) } } }) }, globalData: { userInfo: null, requestURL: "http://localhost:8080", userSession: 0, //用户登录sessionKey seconds: 0, hours: 0, minutes: 0, timer: '', bno: 0, lno: 0, }, checkLogin: function(e){ if (this.globalData.userSession == 0) { return false; }else{ return true; } }, startCount: function(bikeno, leaseno){ // this.setGlobalData({ // bno: bikeno, // lno: leaseno // }) this.globalData.bno = bikeno; this.globalData.lno = leaseno; // 初始化计时器 let s = 0; let m = 0; let h = 0; // 计时开始 this.globalData.timer = setInterval(() => { this.globalData.seconds = s++; if (s == 60) { s = 0; m++; //等待一秒 setTimeout(() => { this.globalData.minutes = m; }, 1000) if (m == 60) { m = 0; h++ setTimeout(() => { this.globalData.h = h; }, 1000) } }; }, 1000) }, cancelCount: function(){ clearInterval(this.globalData.timer); // this.setData({ // seconds: 0, // minutes: 0, // hours: 0, // bikeno: 0, // leaseno: 0, // timer: '' // }) this.globalData.seconds = 0; this.globalData.minutes = 0; this.globalData.hours = 0; this.globalData.bno = 0; this.globalData.lno = 0; this.globalData.timer = '' } })
C++
UTF-8
1,518
2.890625
3
[]
no_license
class Solution { public: string getPermutation(int n, int k) { // Start typing your C/C++ solution below // DO NOT write int main() function assert (k >= 1); int total = 1; for (int i=2; i<=n; ++i) total *= i; k = k % total; if (k == 0) k = total; string dig = "0123456789"; string str; for (int i=1; i<=n; ++i) str.push_back(dig[i]); if (k < total - k + 1) { for (int i=2; i<=k; ++i) { next_permutation(str.begin(), str.end()); } } else { k = total + 1 - k; for (int i=1; i<=k; ++i) { prev_permutation(str.begin(), str.end()); } } return str; } }; class Solution { public: string getPermutation(int n, int k) { // Start typing your C/C++ solution below // DO NOT write int main() function string res; int factorial = 1; for (int i=1; i<=n; ++i) { res.push_back('0' + i); factorial *= i; } k--; k %= factorial; for (int i=0; i<n; ++i) { factorial /= (n-i); int index = k / factorial; k %= factorial; char temp = res[index]; for (int j=index+1; j<n-i; ++j) { res[j-1] = res[j]; } res[n-i-1] = temp; } reverse(res.begin(), res.end()); return res; } };
Go
UTF-8
837
2.625
3
[]
no_license
package sql import ( "testing" ) func TestSQL_IndexNormal(t *testing.T) { man, err := New(NewConfig()) if err != nil { t.Fatal(err) } defer func() { if err = man.Shutdown(); err != nil { t.Fatal(err) } }() err = man.IndexOffer( map[string]string{ "abc.com/topic": "abc.com/topic_hub", }) if err != nil { t.Fatal(err) } } func TestSQL_IndexNullElements(t *testing.T) { man, err := New(NewConfig()) if err != nil { t.Fatal(err) } defer func() { if err = man.Shutdown(); err != nil { t.Fatal(err) } }() err = man.IndexOffer( map[string]string{ "abc.com/topic": "", }) if err == nil { t.Fatal("Was able to index a null hub url") } err = man.IndexOffer( map[string]string{ "": "abc.com/topic_hub_url", }) if err == nil { t.Fatal("Was able to index a null topic url") } }
TypeScript
UTF-8
2,288
2.859375
3
[]
no_license
import { ThunkAction } from "redux-thunk"; import { AppState } from "./rootReducer"; import { Action } from "@reduxjs/toolkit"; import { getImageFolderSuccessful } from "./imagesSlice"; import { IImageFolder, IPhoto } from "../types"; const shufflePhotos = (photoArray: IPhoto[]) => { var currentIndex = photoArray.length, temporaryValue, randomIndex; while (0 !== currentIndex) { randomIndex = Math.floor(Math.random() * currentIndex); currentIndex -= 1; temporaryValue = photoArray[currentIndex]; photoArray[currentIndex] = photoArray[randomIndex]; photoArray[randomIndex] = temporaryValue; } return photoArray; } export const getImageFolders = (): ThunkAction<void, AppState, unknown, Action<string>> => async dispatch => { try { let imageFolderList: IImageFolder[] = []; const response = await fetch(`/images/imageManifest.txt`); const responseText = await response.text(); const imageDetails = responseText.split(/\r?\n/); let folders = imageDetails.map(imageDetail => { const imagePathSections = imageDetail.split("/"); return imagePathSections[0]; }); folders = folders.filter((value, index, array) => { return array.indexOf(value) === index && value !== ""; }); // Dedupe imageFolderList = folders.map(folder => { const imagesInFolder = imageDetails.filter(imageDetail => { const imagePathSections = imageDetail.split("/"); return imagePathSections[0] === folder; }) let imageList = imagesInFolder.map(imageInFolder => { const imagePathSections = imageInFolder.split("/"); const imageDetail = imagePathSections[1].split(" "); //if (imageDetail[0] && imageDetail[0].endsWith(".jpg")) { const image: IPhoto = { src: `/images/${folder}/${imageDetail[0]}`, width: parseInt(imageDetail[1]), height: parseInt(imageDetail[2]) }; return image; //} }); const imageFolder: IImageFolder = { folderName: folder, imageList: shufflePhotos(imageList as IPhoto[]) }; return imageFolder; }); dispatch(getImageFolderSuccessful(imageFolderList)); } catch { console.log("Image folder retrieval failure"); } };
Java
UTF-8
15,456
2.34375
2
[]
no_license
package com.zipingfang.aihuan.utils; import android.app.Activity; import android.content.ContentValues; import android.content.Context; import android.content.Intent; import android.database.Cursor; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.Matrix; import android.media.ExifInterface; import android.net.Uri; import android.os.Bundle; import android.os.Handler; import android.provider.MediaStore; import android.provider.MediaStore.MediaColumns; import android.support.v4.app.Fragment; import android.text.TextUtils; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.text.SimpleDateFormat; import java.util.Date; import java.util.Locale; /** * 拍照或选取 * * @ClassName: PhotoUtil * @Description: * @author heiyue heiyue623@126.com * @date 2014-6-18 下午3:47:32 * */ public class PhotoUtil { /** * 外置存储拍照请求 */ // public static final int REQUEST_IMAGE_CAPTURE_OUTPUT = 0x101; /** * 图片压缩成功 */ private static final int IMAGE_COMPRESS_SUCCESS = 200; /** * 开始压缩图片 */ private static final int IMAGE_COMPRESS_START = 100; /** * 图片压缩失败 */ private static final int IMAGE_COMPRESS_FAIL = 404; /** * @desc <pre> * 旋转图片 * </pre> * @author Weiliang Hu * @date 2013-9-18 * @param angle * @param bitmap * @return */ public static Bitmap rotaingImageView(int angle, Bitmap bitmap) { // 旋转图片 动作 Matrix matrix = new Matrix(); matrix.postRotate(angle); System.out.println("angle2=" + angle); // 创建新的图片 Bitmap resizedBitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, true); bitmap.recycle(); return resizedBitmap; } /** * 拍照并保存到指定路径 * * @param activity * @param cacheDir 拍照存放的路径文件夹 * @param imageName * 要保存的文件名,如果为空,则默认生成以日期个时代文件名 * @return 文件保存的路径uri */ public static Uri takePhotoCustomerPath(Activity activity, String cacheDir, String imageName, int requestCode) { Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE); if (TextUtils.isEmpty(imageName)) { imageName = createDefaultName(); } File file = new File(cacheDir); // 路径不存在则创建路径 if (!file.exists() && file.isDirectory()) { file.mkdirs(); } ContentValues contentValues = new ContentValues(1); contentValues.put(MediaStore.Images.Media.DATA, new File(cacheDir + imageName).getAbsolutePath()); Uri uri = activity.getContentResolver().insert(MediaStore.Images.Media.EXTERNAL_CONTENT_URI,contentValues); // Uri uri = Uri.fromFile(new File(cacheDir + imageName)); intent.putExtra(MediaStore.EXTRA_OUTPUT, uri); activity.startActivityForResult(intent, requestCode); return uri; } /** * 默认创建以时间格式的图片名称 * * @return */ public static String createDefaultName() { SimpleDateFormat simpleDateFormat = new SimpleDateFormat( "yyyyMMddHHmmss", Locale.getDefault()); return simpleDateFormat.format(new Date(System.currentTimeMillis())) + ".jpg"; } /** * 打开相册 * * @param activity * @param requestCode */ public static void pickPhoto(Activity activity, int requestCode) { // Intent intent = new Intent(Intent.ACTION_GET_CONTENT); // intent.setType("image/*"); // 此方法比较兼容 Intent intent = new Intent(Intent.ACTION_PICK, MediaStore.Images.Media.EXTERNAL_CONTENT_URI); activity.startActivityForResult(intent, requestCode); } /** * 打开相册 * * @param activity * @param requestCode */ public static void pickPhoto(Fragment activity, int requestCode) { // Intent intent = new Intent(Intent.ACTION_GET_CONTENT); // intent.setType("image/*"); // 此方法比较兼容 Intent intent = new Intent(Intent.ACTION_PICK, MediaStore.Images.Media.EXTERNAL_CONTENT_URI); activity.startActivityForResult(intent, requestCode); } /** * 打开相机 */ public static void openCamera(Activity activity, int requestCode) { Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE); activity.startActivityForResult(intent, requestCode); } /** * 打开相机 */ public static void openCamera(Fragment activity, int requestCode) { Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE); activity.startActivityForResult(intent, requestCode); } /** * 拍照获取的结果 * * @param data * @return */ public static Bitmap getBitmapFromResult(Intent data) { if (data != null) { Bundle bundle = data.getExtras(); if (bundle != null) { Bitmap bitmap = (Bitmap) bundle.get("data"); // 获取相机返回的数据,并转换为Bitmap图片格式 return bitmap; } } return null; } /** * 打开图片裁剪 * * @param activity * @param uri * @param outputX * 输出的宽 * @param outputY * 输出的高 * @param requestCode */ public static void openCropImage(Activity activity, Uri uri, int outputX, int outputY, int requestCode) { // 裁剪图片意图 Intent intent = new Intent("com.android.camera.action.CROP"); intent.setDataAndType(uri, "image/*"); intent.putExtra("crop", "true"); // 裁剪框的比例,1:1 intent.putExtra("aspectX", 1); intent.putExtra("aspectY", 1); // 裁剪后输出图片的尺寸大小 intent.putExtra("outputX", outputX); intent.putExtra("outputY", outputY); // 图片格式 intent.putExtra("outputFormat", "JPEG"); intent.putExtra("noFaceDetection", true); intent.putExtra("return-data", true); activity.startActivityForResult(intent, requestCode); } /** * 打开图片裁剪 * * @param activity * @param uri * @param outputX * 输出的宽 * @param outputY * 输出的高 * @param requestCode */ public static void openCropImage(Activity activity, Uri uri, Uri outUri, int outputX, int outputY, int requestCode) { // 裁剪图片意图 Intent intent = new Intent("com.android.camera.action.CROP"); intent.setDataAndType(uri, "image/*"); intent.putExtra("crop", "true"); // 裁剪框的比例,1:1 intent.putExtra("aspectX", 1); intent.putExtra("aspectY", 1); intent.putExtra("scale", true); intent.putExtra("scaleUpIfNeeded", true); // 裁剪后输出图片的尺寸大小 intent.putExtra("outputX", outputX); intent.putExtra("outputY", outputY); // 图片格式 intent.putExtra("outputFormat", "JPEG"); intent.putExtra("noFaceDetection", true); intent.putExtra("return-data", false); intent.putExtra(MediaStore.EXTRA_OUTPUT, outUri); activity.startActivityForResult(intent, requestCode); } /** * 打开图片裁剪 * * @param activity * @param uri * @param outputX * 输出的宽 * @param outputY * 输出的高 * @param requestCode */ public static void openCropImage(Fragment activity, Uri uri, int outputX, int outputY, int requestCode) { // 裁剪图片意图 Intent intent = new Intent("com.android.camera.action.CROP"); intent.setDataAndType(uri, "image/*"); intent.putExtra("crop", "true"); // 裁剪框的比例,1:1 intent.putExtra("aspectX", 1); intent.putExtra("aspectY", 1); // 裁剪后输出图片的尺寸大小 intent.putExtra("outputX", outputX); intent.putExtra("outputY", outputY); // 图片格式 intent.putExtra("outputFormat", "JPEG"); intent.putExtra("noFaceDetection", true); intent.putExtra("return-data", true); activity.startActivityForResult(intent, requestCode); } /** * 压缩图片进度回调 * * @ClassName: ImageZoomCallBack * @Description: * @author heiyue heiyue623@126.com * @date 2014-6-18 下午5:35:36 * */ public interface ImageZoomCallBack { void onImgZoomStart(); void onImgZoomSuccess(String newPath); void onImgZoomFail(); } /** * 压缩图片 * * @param context * @param imageOldPath 原文件路径 * @param newPath 新的文件路径 * @param width 图片最大的宽度 * @param height 图片最大的高度 * @param quality 图书压缩的质量30 * -100 数值越大压缩质量越高 * @param callBack * 压缩的回调 */ public static void zoomImage(final Context context, final Uri imageOldPath, final String newPath, final int width, final int height, final int quality, final ImageZoomCallBack callBack, final boolean needRemote) { final Handler handler = new Handler() { @Override public void handleMessage(android.os.Message msg) { switch (msg.what) { case IMAGE_COMPRESS_START: if (callBack != null) { callBack.onImgZoomStart(); } break; case IMAGE_COMPRESS_SUCCESS: if (callBack != null) { callBack.onImgZoomSuccess(newPath); } break; case IMAGE_COMPRESS_FAIL: if (callBack != null) { callBack.onImgZoomFail(); } break; default: break; } }; }; new Thread(new Runnable() { @Override public void run() { handler.sendEmptyMessage(IMAGE_COMPRESS_START); try { Bitmap pathBitmap = getPathBitmap(context, imageOldPath, width, height); if (pathBitmap != null) { boolean saveSuccess = saveImageFileByBitmap(newPath, quality, pathBitmap, needRemote); if (saveSuccess) { handler.sendEmptyMessage(IMAGE_COMPRESS_SUCCESS); } else { handler.sendEmptyMessage(IMAGE_COMPRESS_FAIL); } } } catch (Exception e) { handler.sendEmptyMessage(IMAGE_COMPRESS_FAIL); e.printStackTrace(); } } }).start(); } /** * 相册获取图片路径 * * @param context * @param data * @return */ public static Uri getPhotoPath(Context context, Intent data) { if (data == null) { return null; } Uri path = null; Uri uri = data.getData(); if (uri != null) { Cursor c = null; try { String[] filePathColumns = { MediaColumns.DATA }; c = context.getContentResolver().query(uri, filePathColumns, null, null, null); c.moveToFirst(); int columnIndex = c.getColumnIndex(filePathColumns[0]); String imagePath = c.getString(columnIndex); if (imagePath != null) { path = Uri.fromFile(new File(imagePath)); } } catch (Exception e) { } finally { if (c != null) { c.close(); } } } return path; } /** * 显示需要压缩大图片大小 * * @param context * @param imageFilePath * @param dw * 需要压缩的宽度 * @param dh * 需要压缩高度 * @return * @throws FileNotFoundException */ private static Bitmap getPathBitmap(Context context, Uri imageFilePath, int dw, int dh) throws FileNotFoundException { // 获取屏幕的宽和高 /** * 为了计算缩放的比例,我们需要获取整个图片的尺寸,而不是图片 * BitmapFactory.Options类中有一个布尔型变量inJustDecodeBounds,将其设置为true * 这样,我们获取到的就是图片的尺寸,而不用加载图片了。 * 当我们设置这个值的时候,我们接着就可以从BitmapFactory.Options的outWidth和outHeight中获取到值 */ BitmapFactory.Options op = new BitmapFactory.Options(); op.inJustDecodeBounds = true; // 由于使用了MediaStore存储,这里根据URI获取输入流的形式 Bitmap pic = BitmapFactory.decodeStream(context.getContentResolver() .openInputStream(imageFilePath), null, op); int wRatio = (int) Math.ceil(op.outWidth / (float) dw); // 计算宽度比例 int hRatio = (int) Math.ceil(op.outHeight / (float) dh); // 计算高度比例 /** * 接下来,我们就需要判断是否需要缩放以及到底对宽还是高进行缩放。 如果高和宽不是全都超出了屏幕,那么无需缩放。 * 如果高和宽都超出了屏幕大小,则如何选择缩放呢》 这需要判断wRatio和hRatio的大小 * 大的一个将被缩放,因为缩放大的时,小的应该自动进行同比率缩放。 缩放使用的还是inSampleSize变量 */ if (wRatio > 1 && hRatio > 1) { if (wRatio >= hRatio) { op.inSampleSize = wRatio; } else { op.inSampleSize = hRatio; } } op.inJustDecodeBounds = false; // 注意这里,一定要设置为false,因为上面我们将其设置为true来获取图片尺寸了 pic = BitmapFactory.decodeStream(context.getContentResolver() .openInputStream(imageFilePath), null, op); return pic; } /** * JPG格式保存压缩图片 * * @param savaFilePath * 图片保存路径 * @param quality * 压缩比例 最小30 最大100 * @param bitmap * @return */ public static boolean saveImageFileByBitmap(String savaFilePath, int quality, Bitmap bitmap, boolean needRemote) { BufferedOutputStream bos = null; try { // 判断图片是否旋转了,旋转图片 if (needRemote) { bitmap = rotateBitmap(bitmap, readPictureDegree(savaFilePath)); // bitmap // =PhotoUtil.rotaingImageView(readPictureDegree(savaFilePath), // bitmap); } File image = new File(savaFilePath); if (!image.exists()) { image.createNewFile(); } bos = new BufferedOutputStream(new FileOutputStream(image)); if (quality < 30) { quality = 30; } if (quality > 100) { quality = 100; } bitmap.compress(Bitmap.CompressFormat.JPEG, quality, bos); bos.flush(); return true; } catch (Exception e) { e.printStackTrace(); } finally { try { bos.close(); // bitmap.recycle(); } catch (IOException e) { e.printStackTrace(); } } return false; } /** * 判断图片的旋转角度 * * @param path * @return */ private static int readPictureDegree(String path) { int degree = 0; try { ExifInterface exifInterface = new ExifInterface(path); int orientation = exifInterface.getAttributeInt( ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL); switch (orientation) { case ExifInterface.ORIENTATION_ROTATE_90: degree = 90; break; case ExifInterface.ORIENTATION_ROTATE_180: degree = 180; break; case ExifInterface.ORIENTATION_ROTATE_270: degree = 270; break; } } catch (IOException e) { e.printStackTrace(); } return degree; } /** * 旋转图片的角度 * * @param bitmap * @param degress * @return */ private static Bitmap rotateBitmap(Bitmap bitmap, int degress) { if (bitmap != null) { Matrix m = new Matrix(); m.postRotate(degress); System.out.println("degress:" + degress); bitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), m, true); return bitmap; } return bitmap; } }
Java
UTF-8
239
1.8125
2
[]
no_license
package br.andretto.sample02.services; import br.andretto.sample02.model.Produto; import java.util.List; public interface IProdutoService { public Produto buscarProdutoPorId(int cod); public List<Produto> buscarTodos(); }
Java
UTF-8
1,729
3.03125
3
[]
no_license
package pinckneyjames.AcquiaAdventure; public class ConferenceRoom implements Rooms{ String[] objectList, interactableList, enemiesList, exitList; public ConferenceRoom() { objectList = null; interactableList = null; enemiesList = null; exitList = null; } public ConferenceRoom(String[] objects) { objectList = objects; } public ConferenceRoom(String[] objects, String[] Interactables) { objectList = objects; interactableList = Interactables; } public ConferenceRoom(String[] objects, String[] Interactables, String[] Enemies) { objectList = objects; interactableList = Interactables; enemiesList = Enemies; } public ConferenceRoom(String[] objects, String[] Interactables, String[] Enemies, String[] Exits) { objectList = objects; interactableList = Interactables; enemiesList = Enemies; exitList = Exits; } @Override public boolean getItem(String[] obj, String name) { boolean output = false; for(String object : obj) { if(object.equalsIgnoreCase(name)) { output = true; } } return output; } @Override public boolean getInteractable(String[] obj, String name) { boolean output = false; for(String object : obj) { if(object.equalsIgnoreCase(name)) { output = true; } } return output; } @Override public boolean getEnemy(String[] obj, String name) { boolean output = false; for(String object : obj) { if(object.equalsIgnoreCase(name)) { output = true; } } return output; } @Override public boolean getExit(String[] obj, String name) { boolean output = false; for(String object : obj) { if(object.equalsIgnoreCase(name)) { output = true; } } return output; } }
JavaScript
UTF-8
1,171
3.15625
3
[]
no_license
function Particle(ctx) { this.random = function(a, b) { return Math.floor(Math.random() * b) + a; }; this.randPick = function(arr) { return arr[Math.floor(Math.random() * arr.length)]; }; this.width = ctx.canvas.width; this.height = ctx.canvas.height; this.colors = ['#3c40c6', '#0be881', '#ff3f34', '#ffd32a']; this.x = this.random(0, this.width); this.y = this.random(0, this.height); this.radius = this.random(2, 7); this.color = this.randPick(this.colors); this.show = function() { let self = this; setInterval(function(){ ctx.clearRect(0, 0, self.width, self.height); self.update(); }, 100); } this.update = function() { this.x++; this.y++; if (this.x >= this.width) { this.x = 5; } else if (this.x <= 0) { this.x = this.width - 5; } if (this.y >= this.height) { this.y = 5; } else if (this.y <= 0) { this.y = this.height - 5; } ctx.fillStyle = this.color; ctx.beginPath(); ctx.arc( this.x, this.y, this.radius, 0, 2 * Math.PI, false ); ctx.fill(); } } module.exports = Particle;
TypeScript
UTF-8
3,465
2.765625
3
[]
no_license
import { Request, Response } from 'express'; import pool from '../database'; // ================================================== // Lista los tipos de documento de la BD // ================================================== class TiposDocumentosController { public async list(req: Request, res: Response): Promise<void> { const tiposDocumentos = await pool.query('call bsp_listar_tipodocumento()'); res.json(tiposDocumentos); } // ================================================== // Obtiene un tipo de documento de la BD // ================================================== public async getOne(req: Request, res: Response): Promise<any> { const { id } = req.params; const tipodoc = await pool.query('call bsp_dame_tipodocumento(?)', [id]); if (tipodoc.length > 0) { return res.json(tipodoc[0]); } res.status(404).json({ text: "El tipoDoc no existe" }); } // ================================================== // Inserta un tipo de documento // ================================================== public async create(req: Request, res: Response): Promise<any> { // var sql = "SET @Documento = '" + req.body.Documento + "';SET @Descripcion = '" + req.body.Documento + "'; \ // CALL bsp_alta_tipodocumento(@Documento,@Descripcion);" // console.log('sql es : ',sql); //console.log('req.body.Documento es : ',req.body.Documento); // const result = pool.query; // console.log('result es : ',result); /* con.query(sql, function(err, res) { if (err) { console.log(err); return; } }); */ /*console.log('req.body es : ',req.body) console.log('documento es : ',req.body.Documento) console.log('Descripcion es : ',req.body.Descripcion)*/ var documento = req.body.Documento; var descripcion = req.body.Descripcion; const result = await pool.query('CALL bsp_alta_tipodocumento(?,?)', [documento,descripcion] ); // const result = pool.query('SET @Documento = "Nuevodoc2";SET @Descripcion = "Esto es una descripcion nueva";CALL bsp_alta_tipodocumento(@Documento,@Descripcion);'); console.log('result es : ',result); res.json({ message: 'TipoDoc guardada' }); } /* public async getOne(req: Request, res: Response): Promise<any> { const { id } = req.params; const games = await pool.query('SELECT * FROM games WHERE id = ?', [id]); console.log(games.length); if (games.length > 0) { return res.json(games[0]); } res.status(404).json({ text: "The game doesn't exits" }); } public async create(req: Request, res: Response): Promise<void> { const result = await pool.query('INSERT INTO games set ?', [req.body]); res.json({ message: 'Game Saved' }); } public async update(req: Request, res: Response): Promise<void> { const { id } = req.params; const oldGame = req.body; await pool.query('UPDATE games set ? WHERE id = ?', [req.body, id]); res.json({ message: "The game was Updated" }); } public async delete(req: Request, res: Response): Promise<void> { const { id } = req.params; await pool.query('DELETE FROM games WHERE id = ?', [id]); res.json({ message: "The game was deleted" }); } */ } const tiposdocumentosController = new TiposDocumentosController; export default tiposdocumentosController;
Go
UTF-8
4,834
2.765625
3
[]
no_license
package usecase import ( "database/sql" "github.com/NikitaLobaev/BMSTU-DB/internal/models" "github.com/NikitaLobaev/BMSTU-DB/internal/thread/repository" . "github.com/NikitaLobaev/BMSTU-DB/internal/tools/response" VoteUsecase "github.com/NikitaLobaev/BMSTU-DB/internal/vote/usecase" "github.com/labstack/gommon/log" "github.com/lib/pq" "net/http" "strconv" "time" ) type ThreadUsecase struct { threadRepository repository.ThreadRepository voteUsecase *VoteUsecase.VoteUsecase } func NewThreadUsecase(threadRepository repository.ThreadRepository, voteUsecase *VoteUsecase.VoteUsecase) *ThreadUsecase { return &ThreadUsecase{ threadRepository: threadRepository, voteUsecase: voteUsecase, } } func (threadUsecase *ThreadUsecase) Create(thread *models.Thread) *Response { if existingThread, err := threadUsecase.threadRepository.SelectBySlug(thread.Slug); err == nil { return NewResponse(http.StatusConflict, existingThread) } thread2, err := threadUsecase.threadRepository.Insert(thread) if err != nil { if err == sql.ErrNoRows { return NewResponse(http.StatusNotFound, models.Error{ Message: "Can't find user with nickname " + thread.UserNickname + " or forum with slug " + thread.ForumSlug, }) } log.Error(err) return NewResponse(http.StatusServiceUnavailable, nil) } return NewResponse(http.StatusCreated, thread2) } func (threadUsecase *ThreadUsecase) GetBySlugOrId(slugOrId string) *Response { id, err := strconv.ParseUint(slugOrId, 10, 32) var thread *models.Thread if err == nil { thread, err = threadUsecase.threadRepository.SelectById(uint32(id)) } else { thread, err = threadUsecase.threadRepository.SelectBySlug(slugOrId) } if err != nil { if err == sql.ErrNoRows { return NewResponse(http.StatusNotFound, models.Error{ Message: "Can't find thread with slug or id " + slugOrId, }) } log.Error(err) return NewResponse(http.StatusServiceUnavailable, nil) } return NewResponse(http.StatusOK, thread) } func (threadUsecase *ThreadUsecase) UpdateDetails(slugOrId string, threadUpdate *models.ThreadUpdate) *Response { id, err := strconv.ParseUint(slugOrId, 10, 32) var thread *models.Thread if err == nil { thread, err = threadUsecase.threadRepository.UpdateById(uint32(id), threadUpdate) } else { thread, err = threadUsecase.threadRepository.UpdateBySlug(slugOrId, threadUpdate) } if err != nil { if err == sql.ErrNoRows { return NewResponse(http.StatusNotFound, models.Error{ Message: "Can't find thread with slug or id " + slugOrId, }) } log.Error(err) return NewResponse(http.StatusServiceUnavailable, nil) } return NewResponse(http.StatusOK, thread) } func (threadUsecase *ThreadUsecase) Vote(slugOrId string, vote *models.Vote) *Response { responseVote := threadUsecase.voteUsecase.Vote(slugOrId, vote) if responseVote.Code != http.StatusCreated { return responseVote } result := threadUsecase.GetBySlugOrId(slugOrId) return result } func (threadUsecase *ThreadUsecase) GetThreadsByForumSlug(forumSlug string, forumParams *models.ForumParams) *Response { threads, err := threadUsecase.threadRepository.SelectThreadsBySlug(forumSlug, forumParams) if err != nil { log.Error(err) return NewResponse(http.StatusServiceUnavailable, nil) } return NewResponse(http.StatusOK, threads) } func (threadUsecase *ThreadUsecase) CreatePosts(slugOrId string, posts *models.Posts) *Response { responseThread := threadUsecase.GetBySlugOrId(slugOrId) if responseThread.Code != http.StatusOK { return responseThread } location, _ := time.LoadLocation("UTC") now := time.Now().In(location).Round(time.Microsecond) for _, post := range *posts { if post.Created.IsZero() { post.Created = now } } posts, err := threadUsecase.threadRepository.InsertPosts(responseThread.JSONObject.(*models.Thread), posts) if err != nil { if err == sql.ErrNoRows { return NewResponse(http.StatusNotFound, models.Error{ Message: "Can't find one of users", }) } else if pqErr, ok := err.(*pq.Error); ok && pqErr.Code == "P0001" { return NewResponse(http.StatusConflict, models.Error{ Message: "Can't find one of parent posts or it was created in another thread", }) } log.Error(err) return NewResponse(http.StatusServiceUnavailable, nil) } return NewResponse(http.StatusCreated, posts) } func (threadUsecase *ThreadUsecase) GetPosts(slugOrId string, postParams *models.PostParams) *Response { responseThread := threadUsecase.GetBySlugOrId(slugOrId) if responseThread.Code != http.StatusOK { return responseThread } posts, err := threadUsecase.threadRepository.SelectPosts(responseThread.JSONObject.(*models.Thread), postParams) if err != nil { log.Error(err) return NewResponse(http.StatusServiceUnavailable, nil) } return NewResponse(http.StatusOK, posts) }
C++
UTF-8
1,083
2.703125
3
[]
no_license
#pragma once #include <string> #include <map> #include <memory> # include <boost/serialization/base_object.hpp> # include <boost/serialization/shared_ptr.hpp> # include <boost/serialization/map.hpp> /** * @brief Contains a trie's node */ template<typename key_type> class trie_node { public: template<typename T> using trie_node_ptr = std::shared_ptr<trie_node<T>>; unsigned int get_id(); trie_node(); trie_node(unsigned int id); std::map<key_type, trie_node_ptr<key_type>>& get_childs(); void set_final_node(bool val); bool get_final_node(); bool has_child_; private: unsigned int id_; std::map<key_type, trie_node_ptr<key_type>> childs_; bool final_node_; friend class boost::serialization::access; template<class Archive> void serialize(Archive & ar, const unsigned int version) { (void)version; ar & id_; ar & childs_; ar & final_node_; ar & has_child_; } }; template<typename T> using trie_node_ptr = std::shared_ptr<trie_node<T>>; #include "trie_node.hxx"
C++
UTF-8
498
4.1875
4
[]
no_license
/* Using char[] */ // CPP program to illustrate char // concatenation using standard functions #include <iostream> #include <cstring> using namespace std; int main() { // take large size of array char str[] = "Hello"; cout << "Before Concatenation : " << str << endl; // Hello strcat(str, " World"); cout << "After Concatenation : " << str; // Hello World return 0; } /* Output *//* Before Concatenation : Hello After Concatenation : Hello World */
C#
UTF-8
3,096
3.1875
3
[]
no_license
using System; using System.Collections.Generic; using SQLite; using HangmanWordGame.Resources; using System.IO; namespace HangmanWordGame { class SqlConnection { private string dbpath { get; set; } private SQLiteConnection db { get; set; } public SqlConnection() { //Path String for Database File string dbPath = Path.Combine(System.Environment.GetFolderPath(System.Environment.SpecialFolder.Personal), "HangmanWordGameDB.sqlite"); //Set up the Db connection db = new SQLiteConnection(dbPath); db.CreateTable<ScoreCard>(); // Table of Records } public List<ScoreCard> ViewAll() // Gets Stored Score from Scorecard.cs file { try { return db.Query<ScoreCard>("select * from ScoreCard ORDER BY Score DESC"); } catch (Exception e) { Console.WriteLine("Error:" + e.Message); return null; } } public string UpdateScore(int id, string name, int score) // Updates the Score in db { try { string dbPath = Path.Combine(System.Environment.GetFolderPath(System.Environment.SpecialFolder.Personal), "HangmanWordGameDB.sqlite"); var db = new SQLiteConnection(dbPath); var item = new ScoreCard(); item.Id = id; item.Name = name; item.Score = score; db.Update(item); return "Record Updated..!"; } catch (Exception ex) { return "Error : " + ex.Message; } } public string InsertNewPlayer(string name, int score) // Adds New Palyer to db { try { string dbPath = Path.Combine(System.Environment.GetFolderPath(System.Environment.SpecialFolder.Personal), "HangmanWordGameDB.sqlite"); var db = new SQLiteConnection(dbPath); var item = new ScoreCard(); item.Name = name; item.Score = score; db.Insert(item); return " Sucessfully added to the database !"; } catch (Exception ex) { return "Error : " + ex.Message; } } public string DeletePlayer(int id) // Deletes Player info from the db. { try { string dbPath = Path.Combine(System.Environment.GetFolderPath(System.Environment.SpecialFolder.Personal), "HangmanWordGameDB.sqlite"); var db = new SQLiteConnection(dbPath); var item = new ScoreCard(); item.Id = id; db.Delete(item); return "Sucessfully deleted from the database !"; } catch (Exception ex) { return "Error : " + ex.Message; } } } }
PHP
UTF-8
865
2.953125
3
[]
no_license
<?php namespace Ecommerce\Shipping; use Ecommerce\Address\Address; use Ecommerce\Cart\Cart; use Ecommerce\Customer\Customer; class GetData { private Cart $cart; private Customer $customer; private Address $shippingAddress; public static function create(): self { return new self(); } public function getCart(): Cart { return $this->cart; } public function setCart(Cart $cart): GetData { $this->cart = $cart; return $this; } public function getCustomer(): Customer { return $this->customer; } public function setCustomer(Customer $customer): GetData { $this->customer = $customer; return $this; } public function getShippingAddress(): Address { return $this->shippingAddress; } public function setShippingAddress(Address $shippingAddress): GetData { $this->shippingAddress = $shippingAddress; return $this; } }
Markdown
UTF-8
630
3.96875
4
[]
no_license
### Insertion sort [참고자료](https://gmlwjd9405.github.io/2018/05/06/algorithm-insertion-sort.html) **코드** ```java import java.util.Arrays; public class Insertion_sort { public static void main(String[] args) { int[] input = {6, 15, 5, 7, 25, 12}; insertion_sort(input); System.out.println(Arrays.toString(input)); } // i 앞의 배열들은 모두 정렬된 상태 private static void insertion_sort(int[] arr) { for (int i = 1; i < arr.length; i++) { int temp = arr[i]; int j = i-1; while (j >= 0 && temp < arr[j]) { arr[j+1] = arr[j]; j--; } arr[j+1] = temp; } } } ```
Java
UTF-8
807
2.609375
3
[ "Apache-2.0" ]
permissive
package pro.taskana.user.api.exceptions; import java.util.Map; import pro.taskana.common.api.exceptions.ErrorCode; import pro.taskana.common.api.exceptions.TaskanaException; import pro.taskana.user.api.models.User; /** * This exception is thrown when a specific {@linkplain User} referenced by its {@linkplain * User#getId() id} is not in the database. */ public class UserNotFoundException extends TaskanaException { public static final String ERROR_KEY = "USER_NOT_FOUND"; private final String userId; public UserNotFoundException(String userId) { super( String.format("User with id '%s' was not found.", userId), ErrorCode.of(ERROR_KEY, Map.of("userId", ensureNullIsHandled(userId)))); this.userId = userId; } public String getUserId() { return userId; } }
C#
UTF-8
4,387
2.59375
3
[ "BSD-3-Clause" ]
permissive
/* * Copyright (c) 2014, Furore (info@furore.com) and contributors * See the file CONTRIBUTORS for details. * * This file is licensed under the BSD 3-Clause license * available at https://raw.github.com/furore-fhir/spark/master/LICENSE */ using System; using System.Collections.Generic; using System.IO; using Hl7.Fhir.Model; using Hl7.Fhir.Support; using System.Linq; namespace Hl7.Fhir.Specification.Source { /// <summary> /// Reads FHIR artifacts (Profiles, ValueSets, ...) using a list of other IArtifactSources /// </summary> public class CachedArtifactSource : IArtifactSource { public const int DEFAULT_CACHE_DURATION = 4 * 3600; // 4 hours /// <summary> /// /// </summary> /// <param name="source">ArtifactSource that will be used to get data from on a cache miss</param> /// <param name="cacheDuration">Duration before trying to refresh the cache, in seconds</param> public CachedArtifactSource(IArtifactSource source, int cacheDuration) { Source = source; CacheDuration = cacheDuration; _artifactNames = new Cache<IEnumerable<string>>(id=>Source.ListArtifactNames(), CacheDuration); _conformanceResources = new Cache<Resource>(id => Source.LoadConformanceResourceByUrl(id), CacheDuration); _resourceInformation = new Cache<IEnumerable<ConformanceInformation>>(id=>Source.ListConformanceResources(), CacheDuration); } public CachedArtifactSource(IArtifactSource source) : this(source,DEFAULT_CACHE_DURATION) { } public IArtifactSource Source { get; private set; } public int CacheDuration { get; set; } public Stream LoadArtifactByName(string name) { // We don't cache a stream (yet?) return Source.LoadArtifactByName(name); } private Cache<IEnumerable<string>> _artifactNames; public IEnumerable<string> ListArtifactNames() { return _artifactNames.Get("__ARTIFACTNAMES__"); } private Cache<Resource> _conformanceResources; public Resource LoadConformanceResourceByUrl(string url) { return _conformanceResources.Get(url); } private Cache<IEnumerable<ConformanceInformation>> _resourceInformation; public IEnumerable<ConformanceInformation> ListConformanceResources() { return _resourceInformation.Get("__RESOURCEINFORMATION__"); } private class Cache<T> { private Func<string,T> _onCacheMiss; private int _duration; public Cache(Func<string,T> onCacheMiss, int duration) { _onCacheMiss = onCacheMiss; _duration = duration; } private List<CacheEntry<T>> _cache = new List<CacheEntry<T>>(); public T Get(string identifier) { // Check the cache var entry = _cache.Where(ce => ce.Identifier == identifier).SingleOrDefault(); // Remove entry if it's too old if(entry != null && entry.Expired) { _cache.Remove(entry); entry = null; } // If we still have a fresh entry, return it if (entry != null) return entry.Data; else { // Otherwise, fetch it and cache it. T newData = default(T); try { newData = _onCacheMiss(identifier); } catch (Exception) { } _cache.Add(new CacheEntry<T> { Data = newData, Identifier = identifier, Expires = DateTime.Now.AddSeconds(_duration) }); return newData; } } } private class CacheEntry<T> { public T Data; public DateTime Expires; public string Identifier; public bool Expired { get { return DateTime.Now > Expires; } } } } }
Java
UTF-8
992
1.976563
2
[ "Apache-2.0" ]
permissive
package com.services.pojo.csedemo; import com.services.pojo.csedemo.model.Person; import java.util.List; import javax.validation.constraints.*; import io.servicecomb.provider.pojo.RpcSchema; import com.services.pojo.csedemo.Csedemo; @javax.annotation.Generated(value = "io.swagger.codegen.languages.CsePojoCodegen", date = "2017-11-07T02:26:49.587Z") @RpcSchema(schemaId = "csedemo") public class CsedemoImpl implements Csedemo { private CsedemoAgent csedemoAgent = new CsedemoAgent(); public Integer add(Integer a, Integer b) { return csedemoAgent.add(a, b); } public String sayHei(String name) { return csedemoAgent.sayHei(name); } public String sayHello(String name) { return csedemoAgent.sayHello(name); } public String sayHi(String name) { return csedemoAgent.sayHi(name); } public String saySomething(String prefix, Person user) { return csedemoAgent.saySomething(prefix, user); } }
Java
UTF-8
1,811
2.640625
3
[]
no_license
import com.google.common.base.Objects; /** * @author patrick */ public class DocumentSimilarity implements Comparable<DocumentSimilarity> { private double distance; private int rank; private String targetDocument; private String index; private String className; public DocumentSimilarity(double distance, String targetDocument, String className, String index) { this.distance = distance; this.targetDocument = targetDocument; this.index = index; this.className = className; } public int getRank() { return rank; } public void setRank(int rank) { this.rank = rank; } public double getDistance() { return distance; } public void setDistance(double distance) { this.distance = distance; } public String getTargetDocument() { return targetDocument; } public String getIndex() { return index; } @Override public int compareTo(DocumentSimilarity o) { return Double.compare(distance, o.distance); } @Override public int hashCode() { return Objects.hashCode(distance, targetDocument, index); } @Override public boolean equals(Object obj) { if (obj == null) return false; if (!obj.getClass().equals(getClass())) return false; DocumentSimilarity other = (DocumentSimilarity) obj; return Objects.equal(distance, other.distance) && Objects.equal(rank, other.rank) && Objects.equal(targetDocument, other.targetDocument) && Objects.equal(index, other.index); } public String getClassName() { return className; } public void setClassName(String className) { this.className = className; } }
Python
UTF-8
1,046
2.9375
3
[]
no_license
import sqlite3 import sys import json def get_print(id): conn = sqlite3.connect('scorelib.dat') cur = conn.cursor() cur.execute('SELECT person.name, person.born, person.died ' 'FROM print ' 'INNER JOIN edition ON print.edition = edition.id ' 'INNER JOIN score ON edition.score = score.id ' 'INNER JOIN score_author ON score.id = score_author.score ' 'INNER JOIN person ON person.id = score_author.composer ' 'WHERE print.id=(?) ', [id]) rows = cur.fetchall() output = [] for row in rows: person = { "name" : row[0], "born" : int(row[1]) if row[1] is not None else None , "died" : int(row[2]) if row[2] is not None else None } output.append(person) print(json.dumps(output, indent=4, ensure_ascii=False)) conn.close() if __name__ == '__main__': if len(sys.argv) !=2: print("Wrong number of arguments!") else: get_print(sys.argv[1])
C#
UTF-8
45,184
2.65625
3
[ "Apache-2.0" ]
permissive
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ using System; using System.Collections.Generic; using System.Threading; namespace P2PStateServer { public delegate void SessionReadHandler(ISessionObject Session,object StateObject); /// <summary> /// Represents a thread-safe session state dictionary. /// </summary> class SessionDictionary { //ReaderWriterLock used while accessing dictionary #if NET20 ReaderWriterLock rwl = new ReaderWriterLock(); #else ReaderWriterLockSlim rwl = new ReaderWriterLockSlim(); #endif //This stores a list of all sessions sorted by expiration date -- very helpful to the session expiry scavenger. DateSortedDictionary<string,string> expiryList = new DateSortedDictionary<string, string>(); const int DeadLockIterationCount = 2000; //Number of iterations to count before declaring a deadlock private Dictionary<string, ISessionObject> dict = new Dictionary<string, ISessionObject>(); /// <summary> /// Adds a session into the dictionary /// </summary> /// <param name="Key">Session Key</param> /// <param name="Session">Session object</param> /// <param name="UpdateIfNotFound">Indicates whether session should be updated if the session was not found. If set to flase, this gives the caller a chance to query the network before trying again</param> /// <param name="LockedSessionInfo">Locked session information if session is locked</param> /// <returns>Result of Action</returns> public SessionActionResult Add(string Key, ISessionObject Session,bool UpdateIfNotFound, out SessionResponseInfo LockedSessionInfo) { //If an item with key already exists, return SessionActionResult.AlreadyExists //else add the item and return SessionActionResult.OK //Calls UpSert internally return UpSert(Key, Session, true, UpdateIfNotFound, out LockedSessionInfo); //Insert Item if it doesn't exist } /// <summary> /// Updates a session (adds a new one if it was not found) /// </summary> /// <param name="Key">Session Key</param> /// <param name="Session">Session object</param> /// <param name="UpdateIfNotFound">Indicates whether session should be updated if the session was not found. If set to flase, this gives the caller a chance to query the network before trying again</param> /// <param name="LockedSessionInfo">Locked session information if session is locked</param> /// <returns>Result of Action</returns> public SessionActionResult Update(string Key, ISessionObject Session, bool UpdateIfNotFound, out SessionResponseInfo LockedSessionInfo) { //If the item is locked and the new items lockCookie does not match, return SessionActionResult.Locked //else add the item and return SessionActionResult.OK //Calls UpSert internally return UpSert(Key, Session, false, UpdateIfNotFound, out LockedSessionInfo); //Insert or Update item } /// <summary> /// Updates or inserts a session in the dictionary /// </summary> /// <param name="Key">Session Key</param> /// <param name="Session">Session object</param> /// <param name="InsertOnly">Indicates that session should only be inserted if it does not already exist</param> /// <param name="UpdateIfNotFound">Indicates whether session should be updated if the session was not found. If set to flase, this gives the caller a chance to query the network before trying again </param> /// <param name="LockedSessionInfo">Locked session information if session is locked</param> /// <returns>Result of Action</returns> private SessionActionResult UpSert(string Key, ISessionObject Session, bool InsertOnly, bool UpdateIfNotFound, out SessionResponseInfo LockedSessionInfo) { // Look for the session using a reader lock. // If session is not found, switch to a writer lock and insert item. // If session is found: // Perform an atomic compare exchange on the variable 'InUse' // If session is in Use, try Upsert again from the start. // If session is not in Use, Perform UpSert and reset InUse // Also update Sorted session list if(Key == null) throw new ArgumentNullException("Key"); if(Session == null) throw new ArgumentNullException("Session"); LockedSessionInfo = null; bool tryAgain; Diags.ResetDeadLockCounter(); do { tryAgain = false; AcquireReadLock(); ISessionObject entry; try { dict.TryGetValue(Key, out entry); } finally { ReleaseReadLock(); } if (entry == null) { if (!UpdateIfNotFound) { return SessionActionResult.NotFound; } //Session not found -- insert brand new session object AcquireWriteLock(); try { //Check again to be sure now that the write-lock has been obtained dict.TryGetValue(Key, out entry); if (entry != null) { //ooops -- another thread inserted a seesion with this key while this thread was trying to obtain the write-lock //so try again tryAgain = true; continue; } Session.LockCookie = 1; //For some reason Lockcookie starts counting from 2 -- so set it to 1 now so that it increments to 2 when sought dict[Key] = Session; expiryList.Add(DateTime.UtcNow.Add(new TimeSpan(0, Session.TimeOut,0)) , Key, Key); Diags.LogNewSession(Key, Session); } finally { ReleaseWriteLock(); } } else { //Session Found if (InsertOnly) { Diags.LogSessionAlreadyExists(Key); return SessionActionResult.AlreadyExists; //Do not perform an update if InsertOnly is requested } //There is no need to acquire a write lock here since the dictionary is not been modified. //Only the dictionary entry itself is been updated and such updates are guaranteed to be atomic //if the atomic InUse property is set. if (entry.CompareExchangeIsInUse(true, false) == false) { //the InUse flag is set, so this code section has exclusive access to this session object try { if (entry.IsLocked) { if (!entry.UnLock(Session.LockCookie )) { //Lockcookie did not match LockedSessionInfo = (SessionResponseInfo) entry.CreateResponseInfo(); Diags.LogSessionIsLocked(Key); return SessionActionResult.Locked; } } Session.LockCookie = entry.LockCookie; //Overwrite the incoming session's lock-cookie with the internal one's so as not to let external input change the lockcookie Session.ExtraFlags = -1; //disable extra flags since an update is being performed entry.CopyFrom(Session); //Copy all information from Session to entry expiryList.Add(DateTime.UtcNow.Add(new TimeSpan(0, Session.TimeOut, 0)), Key, Key); //reset expiry timeout Diags.LogUpdatedSession(Key, Session); } finally { entry.CompareExchangeIsInUse(false, true); } } else { //Is this entry being exported? if (entry.IsExporting) { //This session is already been exported so leave Diags.ResetDeadLockCounter(); return SessionActionResult.Exporting; } //Another thread is using this session and will be done soon so try again Thread.Sleep(1); //pause for 1 ms tryAgain = true; } } Diags.DetectDeadLock(Key, DeadLockIterationCount); //Signal a deadlock after 2000 iterations } while (tryAgain); Diags.ResetDeadLockCounter(); //Signal deadlock was freed return SessionActionResult.OK; } /// <summary> /// Removes a session from the dictionary /// </summary> /// <param name="Key">Session Key</param> /// <param name="LockCookie">Lock Cookie (used to unlock item if it's locked)</param> /// <param name="LockedSessionInfo">Locked session information if session is locked</param> /// <returns>Result of Action</returns> public SessionActionResult Remove(string Key, uint LockCookie, out SessionResponseInfo LockedSessionInfo) { // Look for the session using a reader lock. // If session is not found, return false; // If session is found: // Perform an atomic compare exchange on the variable 'InUse' // if session is in Use, try Delete again from the start. // if session is not In Use, Perform Delete return Remove(Key, LockCookie, false, DateTime.MinValue, out LockedSessionInfo); } /// <summary> /// Remove a session item because it has expired /// </summary> /// <param name="Key">Session Key</param> /// <param name="ExpiryDate">session Expiry date</param> /// <returns>Result of Action</returns> private SessionActionResult Expire(string Key, DateTime ExpiryDate) { SessionResponseInfo sessInfo; return Remove(Key, 0, true, ExpiryDate, out sessInfo) ; } /// <summary> /// Removes a session from the dictionary /// </summary> /// <param name="Key">Session Key</param> /// <param name="LockCookie">Lock Cookie</param> /// <param name="IsExpiring">Indicates that the item is being removed because it's expiring</param> /// <param name="ExpiryDate">The Item expiry date (for comparison)</param> /// <param name="LockedSessionInfo">Locked session information if session is locked</param> /// <returns>Result of Action</returns> private SessionActionResult Remove(string Key, uint LockCookie, bool IsExpiring, DateTime ExpiryDate ,out SessionResponseInfo LockedSessionInfo) { if (Key == null) throw new ArgumentNullException("Key"); LockedSessionInfo = null; bool tryAgain; Diags.ResetDeadLockCounter(); do { tryAgain = false; AcquireReadLock(); ISessionObject entry; try { dict.TryGetValue(Key, out entry); } finally { ReleaseReadLock(); } if (entry == null) { //Session not found Diags.LogSessionNotFound(Key); return SessionActionResult.NotFound; } else { //Session Found if (entry.CompareExchangeIsInUse(true, false) == false) { try { //The InUse flag is set and so this code section has exclusive access to this session object AcquireWriteLock(); try { //Check again to be sure, now that the write-lock has been obtained ISessionObject oldEntry = entry; if (!dict.TryGetValue(Key, out entry)) { //ooops -- another thread deleted the session from the dictionary while this thread //was either trying to do the compareExchange (or if buggy, while obtaining the write-lock) //so try again oldEntry.CompareExchangeIsInUse(false, true); //unlock the previously locked item tryAgain = true; continue; } if (IsExpiring) { DateTime timeStamp; if (expiryList.TryGetTimeStamp(Key, out timeStamp)) { if (timeStamp != ExpiryDate) { //The expiration date on this session was updated, so leave return SessionActionResult.OK; } } } if (!IsExpiring && entry.IsLocked) //Locked items DO expire. if not expiring, LockCookie has to match session's { if (!entry.UnLock(LockCookie)) { //Lockcookie did not match LockedSessionInfo = (SessionResponseInfo)entry.CreateResponseInfo(); Diags.LogSessionIsLocked(Key); return SessionActionResult.Locked; } } if (dict.Remove(Key)) { expiryList.Remove(Key); if (IsExpiring) { Diags.LogSessionExpired(Key); } else { Diags.LogSessionDeleted(Key); } } else { //This should never happen Diags.Fail("ASSERTION Failed -- Session dictionary was unable to remove key\r\n"); } } finally { ReleaseWriteLock(); } } finally { if (entry != null) entry.CompareExchangeIsInUse(false, true); } } else { //Is this entry being exported? if (entry.IsExporting) { //This session is already been exported so leave Diags.ResetDeadLockCounter(); return SessionActionResult.Exporting; } //Another thread is using this session and will be done soon so try again Thread.Sleep(1); //pause for 1 ms tryAgain = true; } Diags.DetectDeadLock(Key, DeadLockIterationCount); //Signal a deadlock after 2000 iterations } } while (tryAgain); Diags.ResetDeadLockCounter(); //Signal deadlock was freed return SessionActionResult.OK; } /// <summary> /// Reads a stored session /// </summary> /// <param name="Key">Session Key</param> /// <param name="Reader">Method to call to complete read</param> /// <param name="StateObject">State object</param> /// <returns>Result of read action</returns> public SessionActionResult Read(string Key, SessionReadHandler Reader, object StateObject) { // Look for session using a reader lock. // If session is not found, return false; // If session is found: // Perform an atomic compare exchange on the variable 'InUse' // if session is in Use, try read again from the start. // if session is not in use, call delegate and return OK return Read(Key, Reader, StateObject, false); } /// <summary> /// Reads a stored session /// </summary> /// <param name="Key">Session Key</param> /// <param name="Reader">Method to call to complete read</param> /// <param name="StateObject">State object</param> /// <param name="isExporting">Indicates if the session is to be exported</param> /// <returns>Result of read action</returns> private SessionActionResult Read(string Key, SessionReadHandler Reader, object StateObject, bool isExporting) { if (Key == null) throw new ArgumentNullException("Key"); bool tryAgain; bool sessionIslocked = false; Diags.ResetDeadLockCounter(); //Reset Dead lock counter do { tryAgain = false; AcquireReadLock(); ISessionObject entry; try { dict.TryGetValue(Key, out entry); } finally { ReleaseReadLock(); } if (entry == null) { //Session not found Diags.LogSessionNotFound(Key); return SessionActionResult.NotFound; } else { //Session Found if (entry.CompareExchangeIsInUse(true, false) == false) { //The InUse flag has been set and now this thread has exclusive access to this session object try { //Set IsExporting flag for this entry if item is to be exported if (isExporting) { entry.IsExporting = true; } //Call Reader Delegate if(Reader != null) Reader(entry, StateObject); if (isExporting) { Diags.LogSessionExporting(Key, entry); } else { Diags.LogSessionRead(Key, entry); } sessionIslocked = entry.IsLocked; } finally { if (!isExporting) //Remove inUse property if not exporting { entry.CompareExchangeIsInUse(false, true); } } } else { //Nope, it's still there so check if it's been exported and try again if (entry.IsExporting) { //This session is already been exported so leave Diags.ResetDeadLockCounter(); return SessionActionResult.Exporting; } Thread.Sleep(1); //pause for 1 ms tryAgain = true; } Diags.DetectDeadLock(Key, DeadLockIterationCount); //Signal a deadlock after 2000 iterations } } while (tryAgain); Diags.ResetDeadLockCounter(); //Signal deadlock was freed if (sessionIslocked && !isExporting ) { Diags.LogSessionIsLocked(Key); return SessionActionResult.Locked; } else { return SessionActionResult.OK; } } /// <summary> /// Begins an external session export /// </summary> /// <param name="Key">Session key</param> /// <param name="Reader">Method to call to kickstart export</param> /// <param name="StateObject">State object</param> /// <returns>Result of the operation</returns> public SessionActionResult BeginExport(string Key, SessionReadHandler Reader, object StateObject) { // Look for session using a reader lock. // If session is not found, return not found; // If session is found: // Perform an atomic compare exchange on the variable 'InUse' // if session is in Use, try read again from the start. // if session is not in use, call delegate and return OK response -- do not reset inuse property return Read(Key, Reader, StateObject, true); } /// <summary> /// Ends an external session export /// </summary> /// <param name="Key">Session key</param> /// <param name="RemoveSession">True to remove session from dictionary</param> public void EndExport(string Key, bool RemoveSession) { //This method resets the inuse property if the isExporting property is true if (Key == null) throw new ArgumentNullException("Key"); AcquireReadLock(); ISessionObject entry; try { dict.TryGetValue(Key, out entry); } finally { ReleaseReadLock(); } if (entry == null) { //Session not found -- it's okay, don't freak out, session may have expired. return; } else { //Session Found if (entry.IsInUse) { //The InUse flag, now check the isExporting flag if (!entry.IsExporting) { Exception ex = new InvalidOperationException("EndExport must be called after a call to BeginExport"); Diags.LogApplicationError("EndExport must be called after a call to BeginExport -- Entry is InUse but IsExporting is false", ex); throw ex; } try { //Delete session if (RemoveSession) { AcquireWriteLock(); try { if (dict.Remove(Key)) { expiryList.Remove(Key); Diags.LogSessionDeleted(Key); } } finally { ReleaseWriteLock(); } } } finally { entry.IsExporting = false; Diags.LogSessionExported(Key); entry.CompareExchangeIsInUse(false, true); } } else { Exception ex = new InvalidOperationException("EndExport must be called after a call to BeginExport"); Diags.LogApplicationError("EndExport must be called after a call to BeginExport -- Entry is not in use", ex); throw ex; } } } /// <summary> /// Gets the list of all keys in the session dictionary /// </summary> /// <remarks> /// This is useful to perform an operation on all keys in the session. /// However, this is a static list and the caller should be aware a key may no longer exist when the operation is performed /// </remarks> public List<string> Keys { get { AcquireReadLock(); try { Dictionary<string, ISessionObject>.KeyCollection keys = dict.Keys; List<string> keyList = new List<string>(keys); return keyList; } finally { ReleaseReadLock(); } } } /// <summary> /// Removes all expired sessions from the dictionary /// </summary> public void Sweep() { List<string> keys = expiryList.GetOldKeys(DateTime.UtcNow); foreach (string key in keys) { //Get Timestamp DateTime timeStamp; if (expiryList.TryGetTimeStamp(key, out timeStamp)) { //Make sure new timestamp is expired if (DateTime.UtcNow > timeStamp) { //Expired Expire(key, timeStamp); } } } } #region Reader Writer Lock Acquisition/Release /// <summary> /// Acquires the session dictionary Read Lock /// </summary> private void AcquireReadLock() { #if NET20 rwl.AcquireReaderLock(Timeout.Infinite); #else rwl.EnterReadLock(); #endif } /// <summary> /// Releases the session dictionary Read Lock /// </summary> private void ReleaseReadLock() { #if NET20 rwl.ReleaseReaderLock(); #else rwl.ExitReadLock(); #endif } /// <summary> /// Acquires the Session dictionary Write Lock /// </summary> private void AcquireWriteLock() { #if NET20 rwl.AcquireWriterLock(Timeout.Infinite); #else rwl.EnterWriteLock(); #endif } /// <summary> /// Releases the Session Dictionary Write Lock /// </summary> private void ReleaseWriteLock() { #if NET20 rwl.ReleaseWriterLock(); #else rwl.ExitWriteLock(); #endif } #endregion } public enum SessionActionResult { OK, //Operation was successful Locked, //Resource is locked NotFound, //resource was not found AlreadyExists, //Resource already exists Exporting //Resource is being exported } /// <summary> /// Represents a thread safe dictionary of key-value pairs sorted in place by their time stamps. /// </summary> /// <typeparam name="TKey">Type of item key</typeparam> /// <typeparam name="TValue">Type of item value</typeparam> class DateSortedDictionary<TKey, TValue> { List<TimeTaggedItem<TKey>> list = new List<TimeTaggedItem<TKey>>(); Dictionary<TKey, TimeTaggedItem<TValue>> dict = new Dictionary<TKey, TimeTaggedItem<TValue>>(); object sync = new object(); /// <summary> /// Adds or updates an item in the dictionary /// </summary> /// <param name="TimeStamp">The timestamp to set for the item</param> /// <param name="Key">The Item Key</param> /// <param name="Value">The Item Value</param> public void Add(DateTime TimeStamp, TKey Key, TValue Value) { lock(sync) { TimeTaggedItem<TValue> entryValueItem; if (dict.TryGetValue(Key, out entryValueItem)) { //This item exists so update both dictionary and list //First update list //Look for existing item using the found entry int index = BinaryLocate(Key, entryValueItem); //Remove it list.RemoveAt(index); //Reinsert it in the right place in the ordered timestamp list //1. Look for next largest item in list for the new timestamp TimeTaggedItem<TKey> listItem = new TimeTaggedItem<TKey>(TimeStamp, Key); index = list.BinarySearch(listItem); //2. Insert it there list.Insert(index < 0 ? ~index : index, listItem); //Secondly update dictionary with the value dict[Key] = new TimeTaggedItem<TValue>(TimeStamp, Value); } else { //This item does not exist so insert into both dictionary and list //First insert into list //Look for next largest item in list TimeTaggedItem<TKey> listItem = new TimeTaggedItem<TKey>(TimeStamp,Key); int index = list.BinarySearch(listItem); //Insert it there list.Insert(index < 0 ? ~index : index, listItem); //Secondly insert into dictionary dict.Add(Key,new TimeTaggedItem<TValue>(TimeStamp,Value)); } } } /// <summary> /// Scans for the location of an item within the internal list. /// </summary> /// <remarks> /// Performs a binary search within the list with forward and backward scanning. /// Will throw an exception if item is not found. USE ONLY to find an item by key when you KNOW it's there. /// </remarks> /// <param name="Key">Item Key</param> /// <param name="entryValueItem">The Item Value</param> /// <returns>The zer-based index of the items location</returns> private int BinaryLocate(TKey Key, TimeTaggedItem<TValue> entryValueItem) { int index = list.BinarySearch(new TimeTaggedItem<TKey>(entryValueItem.TimeStamp, Key)); if (index < 0) { throw new InvalidOperationException("Use BinaryLocate only when the item exists in the List"); } //This might not be the one corresponding to the right key int b = 0, f = 0; while (!list[index].Value.Equals(Key)) { //So scan backwards and forwards silmultaneously b++; f++; if (index - b >= 0) { if (list[index - b].Value.Equals(Key)) { index = index - b; break; } } if (index + f <= list.Count - 1) { if (list[index + f].Value.Equals(Key)) { index = index + f; break; } } if ((index + f > list.Count - 1) && (index - b < 0)) { throw new InvalidOperationException("Use BinaryLocate only when the item exists in the List"); } } return index; } /// <summary> /// Checks if the dictionary contains an item /// </summary> /// <param name="key">Item Key</param> /// <returns>True, if the item was found. Otherwise, false</returns> public bool ContainsKey(TKey key) { lock (sync) { return dict.ContainsKey(key); } } /// <summary> /// Removes an item from the dictionary /// </summary> /// <param name="Key">Item Key</param> /// <returns>true if item was removed. Otherwise, false.</returns> public bool Remove(TKey Key) { lock (sync) { TimeTaggedItem<TValue> entryValueItem; if (dict.TryGetValue(Key, out entryValueItem)) { //This item exists so remove both dictionary and list //First remove from list //Look for existing item using the found entry int index = BinaryLocate(Key, entryValueItem); //Remove item from the list list.RemoveAt(index); //Secondly remove from dictionary dict.Remove(Key); return true; } return false; } } /// <summary> /// Gets the value of an item /// </summary> /// <param name="key">Item Key</param> /// <param name="value">Item Value</param> /// <returns>True, if item value was obtained. Otherwise, false</returns> public bool TryGetValue(TKey key, out TValue value) { value = default(TValue); TimeTaggedItem<TValue> entry; lock(sync) { if (dict.TryGetValue(key, out entry)) { value = entry.Value; return true; } else { return false; } } } /// <summary> /// Gets the time stamp value of an item /// </summary> /// <param name="Key">Key</param> /// <param name="TimeStamp">The Timestamp</param> /// <returns>True if timestamp was obtained. Otherwise, false.</returns> public bool TryGetTimeStamp(TKey Key, out DateTime TimeStamp) { TimeTaggedItem<TValue> entry; TimeStamp = DateTime.MinValue; lock (sync) { if (dict.TryGetValue(Key, out entry)) { //This item exists so return the value; TimeStamp = entry.TimeStamp; return true; } else { return false; } } } /// <summary> /// Gets the key of the item with the newest time stamp /// </summary> public TKey NewestKey { get { lock (sync) { if (list.Count > 0) { return list[list.Count - 1].Value; } else { return default(TKey); } } } } /// <summary> /// Gets the key of the item with the oldest time stamp /// </summary> public TKey OldestKey { get { lock (sync) { if (list.Count > 0) { return list[0].Value; } else { return default(TKey); } } } } /// <summary> /// Gets a list of all keys newer than a specified date /// </summary> /// <param name="DatedAfter">The specified date</param> /// <returns>A list of keys</returns> public List<TKey> GetNewKeys(DateTime DatedAfter) { List<TKey> keys = new List<TKey>(); lock (sync) { if (list.Count > 0) { for (int i = list.Count - 1; i >= 0; i--) { if (list[i].TimeStamp > DatedAfter) { keys.Add(list[i].Value); } else { break; } } } } return keys; } /// <summary> /// Gets a list of all keys older than a specified date /// </summary> /// <param name="DatedBefore">The specified date</param> /// <returns>List of keys</returns> public List<TKey> GetOldKeys(DateTime DatedBefore) { List<TKey> keys = new List<TKey>(); lock (sync) { if (list.Count > 0) { for (int i = 0; i < list.Count; i++) { if (list[i].TimeStamp < DatedBefore) { keys.Add(list[i].Value); } else { break; } } } } return keys; } /// <summary> /// Gets the item with the newest time stamp /// </summary> public TimeTaggedItem<TValue> Newest { get { lock (sync) { if (list.Count > 0) { return dict[list[list.Count - 1].Value]; } else { return null; } } } } /// <summary> /// Gets the item with the oldest timestamp /// </summary> public TimeTaggedItem<TValue> Oldest { get { lock (sync) { if (list.Count > 0) { return dict[list[0].Value]; } else { return null; } } } } /// <summary> /// Gets the number of items in the dictionary /// </summary> public int Count { get { lock (sync) { return list.Count; } } } /// <summary> /// Gets or sets the value for an item in the dictionary /// </summary> /// <param name="Key">Session key</param> /// <returns>The item value</returns> public TValue this[TKey Key] { get { TimeTaggedItem<TValue> entry; lock (sync) { if (dict.TryGetValue(Key, out entry)) { //This item exists so return the value; return entry.Value; } else { throw new KeyNotFoundException("The key was not found in the dictionary"); } } } set { TimeTaggedItem<TValue> entry; bool found = false; lock (sync) { if (dict.TryGetValue(Key, out entry)) { //This item exists so update the value; entry.Value = value; found = true; } } if (!found) { this.Add(DateTime.MinValue, Key, value); } } } } /// <summary> /// Represents a time tagged item. /// </summary> /// <remarks> /// Note that this class can be used encapsulate a dictionary key or value /// </remarks> /// <typeparam name="T">The Type of the time tagged item's value</typeparam> class TimeTaggedItem<T> : IComparable<TimeTaggedItem<T>> { public DateTime TimeStamp; public T Value; /// <summary> /// Initializes a new instance of a TimeTaggedItem class /// </summary> /// <param name="TimeStamp">The timestamp to assign</param> /// <param name="Value">The value of the item</param> public TimeTaggedItem(DateTime TimeStamp, T Value) { this.TimeStamp = TimeStamp; this.Value = Value; } #region IComparable<DatedItem<TItem>> Members public int CompareTo(TimeTaggedItem<T> other) { return TimeStamp.CompareTo(other.TimeStamp); } #endregion } }
Markdown
UTF-8
1,971
2.625
3
[]
no_license
# Socketry A portmanteu of Socket and Wizardry. We'll see if that pans out. Something something "socket activation." ## Running ``` make bin/server ./hack/run.sh ``` or ``` ./hack/runc.sh ./hack/run.sh runc run server ``` and ``` curl --unix-socket ./service.sock ``` ## Rough Sketch Zero downtime deployments by way of socket passing between processes. The trick is that I want to do this with containers, also: to start with, the listening socket exists at the host namespace but the receiving process should run within its own context. At this point, I expect the architecture to be roughly: listening daemon outside the container, spawns a container that has a socket, passes the fd(s) over the socket to a process inside the container that then execs the listening process. ### Questions - How does passing fds between namespaces work? - Well, before I get into the whole domain socket thing, it seems runc has some provision for this: https://github.com/opencontainers/runc/blob/master/docs/terminals.md#other-file-descriptors - Can we effectively use "run once" mode here? A program that takes a single fd and writes Stuff to it is very easy to build. - Current sticking point: http package isn't built to handle a single connection, but instead to Accept - What about "run for a while - <something forgotten here> - Boldly: can we do something cross-machine? Much larger project, that. ## References - https://kevincox.ca/2021/04/15/my-ideal-service/ - https://github.com/coreos/go-systemd/tree/v14/examples/activation/httpserver - https://tailordev.fr/blog/2017/06/09/deploying-a-go-app-with-systemd-socket-activation/ - https://vincent.bernat.ch/en/blog/2018-systemd-golang-socket-activation - https://copyconstruct.medium.com/file-descriptor-transfer-over-unix-domain-sockets-dcbbf5b3b6ec - and the referenced paper https://research.fb.com/publications/zero-downtime-release-disruption-free-load-balancing-of-a-multi-billion-user-website/
C#
UTF-8
803
3.15625
3
[]
no_license
private void setInformation(DataTable dt, List<string> text) { var cache = new Dictionary<string, int>(); //cache column numbers foreach(var entry in text) cache[entry] = getColumnNumberByValue(dt, entry); for(int i=1; i < dt.Rows.Count; i++) { foreach(var entry in text) { var columnIndex = cache[entry]; if(columnIndex != -1) { var value = dt.Rows[i][columnIndex].ToString(); } } } } private int getColumnNumberByValue(DataTable dataTable, string text) { for(int i=0; i < dataTable.Columns.Count; i++) { if(dataTable.Rows[0][i].ToString() == text) return i; } return -1; }
Java
UTF-8
464
2.046875
2
[]
no_license
package com.algaworks; import java.math.BigDecimal; import com.algaworks.bean.PedidoVendaService; import com.algaworks.model.PedidoVenda; import com.algaworks.util.cdi.WeldContext; public class Principal { public static void main(String[] args) { PedidoVendaService service = WeldContext.INSTANCE.getBean(PedidoVendaService.class); PedidoVenda pedidoVenda = new PedidoVenda("sabonete", new BigDecimal("3.00")); service.salvar(pedidoVenda); } }
Ruby
UTF-8
684
3.09375
3
[]
no_license
require_relative "base_repository" require_relative "../models/meal" class MealRepository < BaseRepository # def initialize(csv_file) # @csv_file = csv_file # @meals = [] # @next_id = 1 # load_meals if File.exists?(@csv_file) # end # def all # @meals # end # def find(meal_id) # @meals[meal_id - 1] # end # def add(meal) # meal.id = @next_id # @meals << meal # @next_id += 1 # write_csv # add_meal_to_csv(meal) # end private def build_element(row) row[:id] = row[:id].to_i # Convert column to Fixnum row[:price] = row[:price].to_i # Convert column to Fixnum Meal.new(row) end end
C#
UTF-8
15,278
3.765625
4
[]
no_license
namespace Set { using System; using System.Collections; using System.Collections.Generic; /// <summary> /// Class that implements a set as a binary search tree /// </summary> /// <typeparam name="T">type of the elements of a set</typeparam> public class Set<T> : ISet<T> where T : IComparable { /// <summary> /// Root of a tree /// </summary> private Node root; /// <summary> /// Initializes a new instance of the <see cref="Set{T}"/> class. /// </summary> public Set() { } /// <summary> /// Initializes a new instance of the <see cref="Set{T}"/> class. /// </summary> /// <param name="collection">collection of items that will be added to set</param> public Set(IEnumerable<T> collection) { if (collection == null) { throw new ArgumentNullException(nameof(collection)); } foreach (var item in collection) { this.Add(item); } } /// <summary> /// Gets number of elements in a set /// </summary> public int Count { get; private set; } /// <summary> /// Gets a value indicating whether set is readonly /// </summary> public bool IsReadOnly => false; /// <summary> /// Adds item to set /// </summary> /// <param name="item">item to be added to set</param> /// <returns>true if item was added to set, false otherwise</returns> public bool Add(T item) { if (this.Contains(item)) { return false; } this.Count++; if (this.root == null) { this.root = new Node(item); return true; } this.GetParent(item).SetChild(item, new Node(item)); return true; } /// <summary> /// Clears a set /// </summary> public void Clear() { this.root = null; this.Count = 0; } /// <summary> /// Checks whether set contains given item /// </summary> /// <param name="item">item to be checked</param> /// <returns>true if set contains the item, false otherwise</returns> public bool Contains(T item) { var node = this.root; while (node != null) { if (node.Value.Equals(item)) { return true; } node = node.GetChild(item); } return false; } /// <summary> /// Copies a set into an array /// </summary> /// <param name="array">array in which set will be copied</param> /// <param name="arrayIndex">starting index of copying</param> public void CopyTo(T[] array, int arrayIndex) { if (array == null) { throw new ArgumentNullException($"Array {nameof(array)} is null"); } if (arrayIndex < 0 || arrayIndex > array.Length - this.Count) { throw new ArgumentOutOfRangeException(nameof(arrayIndex), "Аргумент должен быть не меньше нуля и оставшееся количество ячеек массива должно быть не меньше размера множества"); } var itemIndex = 0; foreach (var item in this) { array[arrayIndex + itemIndex] = item; ++itemIndex; } } /// <summary> /// Excepts a set with another collection /// </summary> /// <param name="other">collection to be excepted with a set</param> public void ExceptWith(IEnumerable<T> other) { if (other == null) { throw new ArgumentNullException(nameof(other)); } foreach (var item in other) { this.Remove(item); } } public IEnumerator<T> GetEnumerator() { var queue = new Stack<Node>(); queue.Push(this.root); while (queue.Count != 0) { var currentNode = queue.Pop(); if (currentNode != null) { yield return currentNode.Value; queue.Push(currentNode.RightChild); queue.Push(currentNode.LeftChild); } } } /// <summary> /// Intersects a set with another collection /// </summary> /// <param name="other">collection to be intersected with a set</param> public void IntersectWith(IEnumerable<T> other) { if (other == null) { throw new ArgumentNullException(nameof(other)); } var intersection = new List<T>(); foreach (var item in other) { if (this.Contains(item)) { intersection.Add(item); } } this.Clear(); foreach (var item in intersection) { this.Add(item); } } /// <summary> /// Checks whether a set is a proper subset of given collection /// </summary> /// <param name="other">given collection</param> /// <returns>true if a set is a proper subset of given collection, false otherwise</returns> public bool IsProperSubsetOf(IEnumerable<T> other) { if (other == null) { throw new ArgumentNullException(nameof(other)); } var set = new Set<T>(other); return this.IsSubsetOf(other) && this.Count < set.Count; } /// <summary> /// Checks whether a set is a proper superset of given collection /// </summary> /// <param name="other">given collection</param> /// <returns>true if a set is a proper superset of given collection, false otherwise</returns> public bool IsProperSupersetOf(IEnumerable<T> other) { if (other == null) { throw new ArgumentNullException(nameof(other)); } var set = new Set<T>(other); return set.IsProperSubsetOf(this); } /// <summary> /// Checks whether a set is a subset of given collection /// </summary> /// <param name="other">given collection</param> /// <returns>true if a set is a subset of given collection, false otherwise</returns> public bool IsSubsetOf(IEnumerable<T> other) { if (other == null) { throw new ArgumentNullException(nameof(other)); } var set = new Set<T>(other); foreach (var item in this) { if (!set.Contains(item)) { return false; } } return true; } /// <summary> /// Check whether a set is a superset of given collection /// </summary> /// <param name="other">given collection</param> /// <returns>true if a set is a supersetof given collection, false otherwise</returns> public bool IsSupersetOf(IEnumerable<T> other) { if (other == null) { throw new ArgumentNullException(nameof(other)); } var set = new Set<T>(other); return set.IsSubsetOf(this); } /// <summary> /// Checks whether a set overlaps given collection /// </summary> /// <param name="other">given collection</param> /// <returns>true if a set overlaps given collection, false otherwise</returns> public bool Overlaps(IEnumerable<T> other) { if (other == null) { throw new ArgumentNullException(nameof(other)); } foreach (var item in other) { if (this.Contains(item)) { return true; } } return false; } /// <summary> /// Removes item from a set /// </summary> /// <param name="item">item to be removed from a set</param> /// <returns>true if item was removed from a set, false otherwise</returns> public bool Remove(T item) { if (!this.Contains(item)) { return false; } this.Count--; if (this.root.Value.Equals(item)) { return this.RemoveRoot(); } var parent = this.GetParent(item); var node = parent.GetChild(item); if (node.RightChild != null) { node.RightChild.GetLeftmostDescendant().LeftChild = node.LeftChild; parent.SetChild(node.Value, node.RightChild); } else { parent.SetChild(node.Value, node.LeftChild); } return true; } /// <summary> /// Checks whether a set equals to given collection /// </summary> /// <param name="other">given collection</param> /// <returns>true if a set equals to given collection, false otherwise</returns> public bool SetEquals(IEnumerable<T> other) { if (other == null) { throw new ArgumentNullException(nameof(other)); } var set = new Set<T>(other); return this.IsSubsetOf(set) && set.IsSubsetOf(this); } /// <summary> /// Excepts a set with given collection symmetrically /// </summary> /// <param name="other">given collections</param> public void SymmetricExceptWith(IEnumerable<T> other) { if (other == null) { throw new ArgumentNullException(nameof(other)); } foreach (var item in other) { if (this.Contains(item)) { this.Remove(item); } else { this.Add(item); } } } /// <summary> /// Unions a set with given collection /// </summary> /// <param name="other">given collection</param> public void UnionWith(IEnumerable<T> other) { if (other == null) { throw new ArgumentNullException(nameof(other)); } foreach (var item in other) { this.Add(item); } } void ICollection<T>.Add(T item) { this.Add(item); } IEnumerator IEnumerable.GetEnumerator() { return this.GetEnumerator(); } /// <summary> /// Removes root of a tree /// </summary> /// <returns>true if root was deleted</returns> private bool RemoveRoot() { if (this.root.RightChild != null) { this.root.RightChild.GetLeftmostDescendant().LeftChild = this.root.LeftChild; this.root = this.root.RightChild; return true; } this.root = this.root.LeftChild; return true; } /// <summary> /// Gets parent for an item, /// if a set does not contain an item, gets node that will be item parent after addition item to a set /// </summary> /// <param name="item">item which parent will be returned</param> /// <returns>item parent</returns> private Node GetParent(T item) { Node parent = null; var node = this.root; if (!this.Contains(item)) { while (node != null) { parent = node; node = node.GetChild(item); } return parent; } while (node != null) { if (node.Value.Equals(item)) { return parent; } parent = node; node = node.GetChild(item); } return parent; } /// <summary> /// Class that represents a node of a tree /// </summary> private class Node { /// <summary> /// Initializes a new instance of the <see cref="Node"/> class. /// </summary> /// <param name="value">node value</param> public Node(T value) { this.Value = value; } /// <summary> /// Gets or sets node value /// </summary> public T Value { get; set; } /// <summary> /// Gets or sets right child of a node /// </summary> public Node RightChild { get; set; } /// <summary> /// Gets or sets left child of a node /// </summary> public Node LeftChild { get; set; } /// <summary> /// Gets child of a node by a key /// </summary> /// <param name="key">given key</param> /// <returns>node left child if key is less than node value, node right child otherwise</returns> public Node GetChild(T key) { if (this.Value.CompareTo(key) > 0) { return this.LeftChild; } else { return this.RightChild; } } /// <summary> /// Set node child by a key /// </summary> /// <param name="key">given key</param> /// <param name="item">item by which a node chil will be replaced</param> public void SetChild(T key, Node item) { if (this.Value.CompareTo(key) > 0) { this.LeftChild = item; } else { this.RightChild = item; } } /// <summary> /// Gets node leftmost descendant /// </summary> /// <returns>node leftmost descendant</returns> public Node GetLeftmostDescendant() { var node = this; while (node.LeftChild != null) { node = node.LeftChild; } return node; } } } }
C#
UTF-8
3,309
3.265625
3
[]
no_license
using System; using System.Collections.Generic; using System.Linq; namespace Generic.Extend { /// <summary> /// .NET 4.0 /// 只能放在接口或者委托的泛型参数里面 /// out 协变covariant 修饰返回值 /// in 逆变contravariant 修饰传入的参数 /// 逆变和协变只能放在泛型接口和泛型委托的泛型参数里 /// <see cref="https://www.cnblogs.com/wangbaicheng1477865665/p/OutIn.html"/> /// </summary> public class CCTest { public static void Show() { { Bird b1 = new Bird(); Bird b2 = new Sparrow(); Sparrow b3 = new Sparrow(); // Sparrow b4 = new Bird(); } { List<Bird> birds = new List<Bird>(); // List<Bird>birds1 = new List<Sparrow>(); List<Bird> birds2 = new List<Sparrow>() .Select(c=>((Bird)c)).ToList(); } { // 协变 IEnumerable<Bird>birds = new List<Bird>(); IEnumerable<Bird>birds1=new List<Sparrow>(); Func<Bird> func = new Func<Sparrow>(() => null); //这是能编译的 ICustomerListOut<Bird> customerList1 = new CustomerListOut<Bird>(); //这也是能编译的,在泛型中,子类指向父类,我们称为协变 ICustomerListOut<Bird> customerList2 = new CustomerListOut<Sparrow>(); } { // 逆变 ICustomerListIn<Sparrow> customerList2 = new CustomerListIn<Sparrow>(); ICustomerListIn<Sparrow> customerList1 = new CustomerListIn<Bird>();//父类指向子类,我们称为逆变 ICustomerListIn<Bird> birdList1 = new CustomerListIn<Bird>(); birdList1.Show(new Sparrow()); birdList1.Show(new Bird()); Action<Sparrow> act = new Action<Bird>((Bird i) => { }); } } } /// <summary> /// 逆变 参数不能作为返回值 /// </summary> /// <typeparam name="T"></typeparam> public interface ICustomerListIn<in T> { //T Get();//不能作为返回值 void Show(T t); } public class CustomerListIn<T> : ICustomerListIn<T> { public void Show(T t) { } } /// <summary> /// out 协变 只能是返回结果 /// 协变就是让泛型有子父级关系 /// </summary> /// <typeparam name="T"></typeparam> public interface ICustomerListOut<out T> { T Get(); // void Show(T t);//T不能作为传入参数 } /// <summary> /// 类没有协变逆变 /// </summary> /// <typeparam name="T"></typeparam> public class CustomerListOut<T> : ICustomerListOut<T> { public T Get() { return default(T); } public void Show(T t) { } } /// <summary> /// 鸟类 /// </summary> public class Bird { public int Id { get; set; } } /// <summary> /// 麻雀 /// </summary> public class Sparrow : Bird { public string Name { get; set; } } }
Python
UTF-8
5,055
3.484375
3
[]
no_license
""" Class for a classification algorithm. """ import pdb import numpy as np import math class Naive_Bayes: def __init__(self, classifier_type, **kwargs): """ Initializer. Classifier_type should be a string which refers to the specific algorithm the current classifier is using. Use keyword arguments to store parameters specific to the algorithm being used. E.g. if you were making a neural net with 30 input nodes, hidden layer with 10 units, and 3 output nodes your initalization might look something like this: neural_net = Classifier(weights = [], num_input=30, num_hidden=10, num_output=3) Here I have the weight matrices being stored in a list called weights (initially empty). """ self.classifier_type = classifier_type self.params = kwargs self.mean = {} self.stdv = {} self.classify = {} """ The kwargs you inputted just becomes a dictionary, so we can save that dictionary to be used in other methods. """ def train(self, training_data): """ Data should be nx(m+1) numpy matrix where n is the number of examples and m is the number of features (recall that the first element of the vector is the label). I recommend implementing the specific algorithms in a seperate module and then determining which method to call based on classifier_type. E.g. if you had a module called neural_nets: if self.classifier_type == 'neural_net': import neural_nets neural_nets.train_neural_net(self.params, training_data) Note that your training algorithms should be modifying the parameters so make sure that your methods are actually modifying self.params You should print the accuracy, precision, and recall on the training data. """ #seperate by class #pdb.set_trace() for row in training_data: #print row if row[0] not in self.classify: self.classify[row[0]] = [row[1:]] else: #pdb.set_trace() self.classify[row[0]].append(row[1:]) #return classify # get mean for each attribute per class #pdb.set_trace() for clas in self.classify.keys(): for i in range(len(self.classify[clas][0])): self.mean[(clas, i)] = np.mean(zip(*self.classify[clas])[i]) self.stdv[(clas, i)] = np.std(zip(*self.classify[clas])[i]) #pdb.set_trace() def predict(self, data): """ Predict class of a single data vector Data should be 1x(m+1) numpy matrix where m is the number of features (recall that the first element of the vector is the label). I recommend implementing the specific algorithms in a seperate module and then determining which method to call based on classifier_type. This method should return the predicted label. """ #realClass = data[0] attributes = data[1:] #get probablity for each attribute prob = {} classProb = {} for clas in self.classify.keys(): classProb[clas] = 1 for i in range(len(attributes)): for clas in self.classify.keys(): mean, stdv = self.mean[(clas, i)], self.stdv[(clas,i)] exponent = math.exp(-(math.pow(attributes[i]-mean,2)/(2*math.pow(stdv,2)))) attributeProb = (1 / math.sqrt(2*math.pi) * stdv) * exponent #if i in prob: #prob[i].append(attributeProb) #prob[i][clas] = attributeProb #else: #prob[i][clas] = attributeProb #prob[i] = [attributeProb] if i not in prob: prob[i] = {} prob[i][clas] = attributeProb #TODO: change this so that it can allow with cases that have more than 2 classifications #pdb.set_trace() for attributeProb in prob.values(): for clas in self.classify.keys(): classProb[clas] *= attributeProb[clas] #classProb[1.0] *= attributeProb[1] #classProb[clas] *= attributeProb # classProb[clas] * attributeProb #pdb.set_trace() #db.set_trace() maxClass = self.classify.keys()[0] maxProb = classProb[maxClass] for clas in self.classify.keys(): if classProb[clas] > maxProb: maxProb = classProb[clas] maxClass = clas return maxClass def test(self, test_data): """ Data should be nx(m+1) numpy matrix where n is the number of examples and m is the number of features (recall that the first element of the vector is the label). You should print the accuracy, precision, and recall on the test data. """
Shell
UTF-8
1,984
3.46875
3
[]
no_license
#!/bin/bash # #DESCRIPTION # Monitor usage of cohesity view using listViews.py python script # #NOTES # File Name : usage_cohesity_view.sh # Author : Miquel Mariano - @miquelMariano | https://miquelmariano.github.io # Version : 1 # #USAGE # Put this script on /var/prtg/scriptsxml and use "SSH script advanced sensor XML" # ./usage_cohesity_view.sh view_name gib cohesity.corp.local miquel.mariano corp.local # #REQUISITES # This script need listViews.py see https://github.com/bseltz-cohesity/scripts/tree/b445a612dd472f1db5126c763b0f66a072d4a2f1/python/listViews #CHANGELOG # v1 21/12/2021 Script creation # cohesity_view_name=$1 units=$2 #mib or gib vip=$3 user=$4 domain=$5 logical_usage=$(/etc/scripts-cohesity/listViews/listViews.py -v $3 -u $4 -d $5 -s -n $1 -x $2 | grep 'logical usage' |awk '{print $3}' |cut -d . -f 1) logical_quota=$(/etc/scripts-cohesity/listViews/listViews.py -v $3 -u $4 -d $5 -s -n $1 -x $2 | grep 'logical quota' |awk '{print $3}') quota_alert=$(/etc/scripts-cohesity/listViews/listViews.py -v $3 -u $4 -d $5 -s -n $1 -x $2 | grep 'quota alert' |awk '{print $3}') usage=$(($logical_usage * 100/$logical_quota)) xmlresult=`cat <<EOF <?xml version="1.0" encoding='UTF-8'?> <prtg> <text> Cohesity statistics for view: $1</text> <result> <channel>Usage</channel> <unit>Percent</unit> <value>$usage</value> <LimitMaxError>90</LimitMaxError> <LimitMaxWarning>80</LimitMaxWarning> <LimitMode>1</LimitMode> </result> <result> <channel>Logical Usage</channel> <unit>Custom</unit> <customUnit>$2</customUnit> <value>$logical_usage</value> </result> <result> <channel>Logical Quota</channel> <unit>Custom</unit> <customUnit>$2</customUnit> <value>$logical_quota</value> </result> <result> <channel>Quota alert</channel> <unit>Custom</unit> <customUnit>$2</customUnit> <value>$quota_alert</value> </result> </prtg> EOF ` echo "$xmlresult" exit 0