text
stringlengths 27
775k
|
|---|
using Autodesk.DesignScript.Runtime;
namespace Camber.Civil.Styles.Labels.ProfileView
{
[IsVisibleInDynamoLibrary(false)]
public enum ProfileViewLabelStyles
{
//DefaultLabelStyle,
DepthLabelStyles,
StationElevationLabelStyles
}
}
|
package cn.hfbin.auth.provider.token;
import cn.hfbin.common.core.constant.GrantTypeConstant;
import cn.hfbin.common.core.jwt.model.AuthUserInfo;
import cn.hfbin.ucpm.params.LoginParams;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
/**
* @Author: huangfubin
* @Description: 账号、验证码、密码登录
* @Date: 2021/7/29
*/
@Slf4j
@Component(GrantTypeConstant.OPEN_ID)
public class OpenIdStrategy extends AbstractTokenGranter {
@Override
public AuthUserInfo grant(LoginParams loginParam) {
log.info("OpenIdTokenGranter");
return null;
}
}
|
import { Color } from "@tangle-frost/iota-core/dist/data/color";
/**
* Options for Png renderer.
*/
export declare class PngRendererOptions {
/**
* The foreground colour.
*/
foreground?: Color;
/**
* The background colour.
*/
background?: Color;
/**
* The css class to apply for the html element.
*/
cssClass?: string;
}
|
package com.alamkanak.weekview
import android.content.Context
import android.os.Handler
import android.os.Looper
import androidx.annotation.WorkerThread
import java.util.concurrent.Executor
import java.util.concurrent.Executors
class MainExecutor : Executor {
private val handler = Handler(Looper.getMainLooper())
override fun execute(runnable: Runnable) {
handler.post(runnable)
}
}
/**
* A helper class that processes the submitted [WeekViewEntity] objects and creates [EventChip]s
* on a background thread.
*/
internal class EventsProcessor(
private val context: Context,
private val eventsCache: EventsCache,
private val eventChipsFactory: EventChipsFactory,
private val eventChipsCache: EventChipsCache
) {
private val backgroundExecutor = Executors.newSingleThreadExecutor()
private val mainThreadExecutor = MainExecutor()
/**
* Updates the [EventsCache] with the provided [WeekViewEntity] elements and creates
* [EventChip]s.
*
* @param entities The list of new [WeekViewEntity] elements
* @param viewState The current [ViewState] of [WeekView]
* @param onFinished Callback to inform the caller whether [WeekView] should invalidate.
*/
fun submit(
entities: List<WeekViewEntity>,
viewState: ViewState,
onFinished: () -> Unit
) {
backgroundExecutor.execute {
submitItems(entities, viewState)
mainThreadExecutor.execute {
onFinished()
}
}
}
@WorkerThread
private fun submitItems(
items: List<WeekViewEntity>,
viewState: ViewState
) {
val resolvedItems = items.map { it.resolve(context) }
eventsCache.update(resolvedItems)
if (eventsCache is SimpleEventsCache) {
val eventChips = eventChipsFactory.create(resolvedItems, viewState)
eventChipsCache.replaceAll(eventChips)
} else {
val existingIds = eventChipsCache.eventIds
val newResolvedItems = resolvedItems.filterNot { it.id in existingIds }
val eventChips = eventChipsFactory.create(newResolvedItems, viewState)
eventChipsCache.addAll(eventChips)
}
}
}
|
using NFugue.Parsing;
namespace NFugue.Temporal
{
public interface ITemporalEvent
{
void Execute(Parser parser);
}
}
|
export OS="linux"
export ARCH="amd64"
export ARCH_ALT="x86_64"
export WORKFOLDER="$HOME/workfolder"
export DOTFILES="$HOME/.dotfiles"
export PATH=$PATH:/usr/local/bin
export PATH=$PATH:/usr/local/go/bin
#export GOPATH="$(go env GOPATH)"
export LD_LIBRARY_PATH=/usr/local/lib:$LD_LIBRARY_PATH
export DISTRO_VERSION=$(lsb_release -sc)
# for googler
export BROWSER=w3m
|
const { prepare, hydrateStrings } = require('./clean')
const createElement = require('./element')
function hydrateCDATA(CDATA, text) {
return text.replace(/_CDATA\$(?<index>\d+)/g, (_, index) => {
return `${CDATA[index]}`
})
}
function parseXML(data) {
const tag = /(?<balise><[^>]+>)(?<text>[^<]*)/g
const stack = []
let rootElement = createElement('root')
const hydrate = (value = "") => hydrateStrings({ strings: data.strings, text: value }, false)
while ((exec = tag.exec(data.text)) !== null) {
let { balise, text } = exec.groups
if (balise[1] !== '?' && balise[1] !== '!') {
text = hydrateCDATA(data.CDATA, hydrate(text))
balise = balise.replace(/ *= */g, '=')
const [tagName, ...attributes] = balise.replace(/((^<\??)|(\??>$))/g, '').split(' ')
const lastElement = stack[stack.length - 1]
const element = createElement(tagName.replace(/((^\/)|(\/$))/g, ''))
if (text) element.$addChildren(createElement(text, true))
attributes?.forEach(attr => {
const [key, value] = attr.split('=')
element.$addAttribute(key, hydrate(value))
})
if (lastElement) {
if ('/' + lastElement.$tagName === tagName) {
stack.pop()
if (text) {
stack[stack.length - 1]?.$addChildren(createElement(text, true))
}
} else if (!tagName.startsWith('/')) {
lastElement.$addChildren(element)
if (!balise.endsWith('/>')) {
stack.push(element)
}
}
} else {
rootElement.$addChildren(element)
stack.push(element)
}
}
}
return { rootElement }
}
function extractCDATA(data) {
const CDATA = []
const text = data.replace(
/<!\[CDATA\[((?!\]\]>).)*\]\]>/gs,
(match) => {
return '_CDATA$' + (CDATA.push(match.slice(9, -3)) - 1)
})
return { CDATA, text }
}
module.exports = class XMEN_XML {
CDATA = []
strings = []
root = null
originalData = null
text = null
constructor(dataInStringOrBuffer) {
let data;
if (typeof dataInStringOrBuffer === 'string') {
data = dataInStringOrBuffer
} else if (dataInStringOrBuffer instanceof Buffer) {
data = dataInStringOrBuffer.toString()
} else {
throw new Error("Invalid data in constructor.")
}
this.originalData = data
const { CDATA, text: tmp } = extractCDATA(data)
this.CDATA = [...CDATA]
let { strings, text } = prepare(tmp)
this.strings = [...strings]
this.text = '' + text
const parsed = parseXML({ text, strings, CDATA })
this.root = parsed.rootElement
}
}
|
// Copyright 2015-2019 Hans Dembinski
//
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt
// or copy at http://www.boost.org/LICENSE_1_0.txt)
#ifndef BOOST_HISTOGRAM_DETAIL_RELAXED_EQUAL_HPP
#define BOOST_HISTOGRAM_DETAIL_RELAXED_EQUAL_HPP
#include <boost/histogram/detail/priority.hpp>
#include <type_traits>
namespace boost {
namespace histogram {
namespace detail {
struct relaxed_equal {
template <class T, class U>
constexpr auto impl(const T& t, const U& u, priority<1>) const noexcept
-> decltype(t == u) const {
return t == u;
}
// consider T and U not equal, if there is no operator== defined for them
template <class T, class U>
constexpr bool impl(const T&, const U&, priority<0>) const noexcept {
return false;
}
// consider two T equal if they are stateless
template <class T>
constexpr bool impl(const T&, const T&, priority<0>) const noexcept {
return std::is_empty<T>::value;
}
template <class T, class U>
constexpr bool operator()(const T& t, const U& u) const noexcept {
return impl(t, u, priority<1>{});
}
};
} // namespace detail
} // namespace histogram
} // namespace boost
#endif
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
namespace Sociatis.Extensions
{
public static class DropdownListExtensions
{
public static List<SelectListItem> Choose(this List<SelectListItem> list, int item)
{
var copyList = new List<SelectListItem>();
list.ForEach(x => copyList.Add(x));
copyList.First(x => x.Value == item.ToString()).Selected = true;
return copyList;
}
}
}
|
/* Code Pulse: a real-time code coverage tool, for more information, see <http://code-pulse.com/>
*
* Copyright (C) 2014-2017 Code Dx, Inc. <https://codedx.com/>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.codedx.codepulse.hq.trace
import java.util.concurrent.ScheduledThreadPoolExecutor
import java.util.concurrent.TimeUnit
import scala.collection.mutable.HashMap
import com.codedx.codepulse.hq.monitor.HealthMonitor
import com.codedx.codepulse.hq.monitor.TraceComponent
import com.codedx.codepulse.hq.monitor.TraceComponentHealth
import com.codedx.codepulse.hq.monitor.TraceComponentMonitorData
import com.codedx.codepulse.hq.util.DaemonThreadFactory
import reactive.EventSource
import reactive.EventStream
import reactive.Observing
/** An aggregator and scheduler for a collection of HealthMonitors.
* Individual monitors may be added and removed from this collection;
* each monitor will be run at its decided rate in a thread pool, and
* the status which it reports will be added to an internal status map
* that records the most up-to-date status for each TraceComponent.
*
* @param threadPoolSize The number of threads to use in the internal
* executor. Defaults to 1.
*/
class TraceStatus(threadPoolSize: Integer = 1) extends Cleanup with Observing {
private val statusMap = new HashMap[TraceComponent, TraceComponentHealth]
private val executor = new ScheduledThreadPoolExecutor(threadPoolSize, DaemonThreadFactory)
/** An Event that gets triggered when one of the connected HealthMonitors
* reports a new status for its associated component.
*/
def healthChanges: EventStream[TraceComponentHealth] = healthChangeSource
private val healthChangeSource = new EventSource[TraceComponentHealth]
/** An event that gets triggered when new monitor data is reported */
def monitorData: EventStream[TraceComponentMonitorData] = healthChangeSource.map { _.data }.filter { _.isDefined }.map { _.get }
private def reportHealth(health: TraceComponentHealth) {
statusMap.synchronized { statusMap.put(health.component, health) } match {
case Some(lastHealth) =>
if (lastHealth != health) {
// report change in health of component
healthChangeSource.fire(health)
}
case None =>
healthChangeSource.fire(health)
}
}
/** @return A map containing the most up-to-date status for each trace component. */
def currentHealth = statusMap.synchronized { statusMap.toMap }
/** Looks up the current monitor data for a specific component */
def currentMonitorData[T <: TraceComponentMonitorData](component: TraceComponent): Option[T] = {
statusMap.synchronized { statusMap.get(component) } match {
case Some(data) => Some(data.asInstanceOf[T])
case None => None
}
}
/** Add a HealthMonitor, scheduling it to run repeatedly at its own decided interval,
* after an immediate run.
* When it runs, any reported `healthUpdateEvent` that it triggers will be propagated
* into this TraceStatus's internal map.
*/
def addHealthMonitor(monitor: HealthMonitor): Unit = {
monitor.healthUpdates += reportHealth
executor.scheduleAtFixedRate(monitor, 0, monitor.runInterval, TimeUnit.MILLISECONDS)
}
// /** Removes a HealthMonitor from this TraceStatus, so that its health updates will no longer
// * be propagated into the internal health map.
// */
// def -=(monitor: HealthMonitor) {
// // monitor.healthUpdateEvent -= reportHealth
// executor.remove(monitor)
// }
/** Stop updating the health map and trigger the shutdown. */
def cleanup = executor.shutdown
}
|
using System;
using System.Collections.Generic;
using System.Linq;
namespace E01.DistanceBetweenVerticles
{
internal class Program
{
private static Dictionary<int, List<int>> Graph;
private static Dictionary<int, bool> Visited;
private static Dictionary<int, int> Parent;
static void Main(string[] args)
{
int verticlesQty = int.Parse(Console.ReadLine());
int pairsQty = int.Parse(Console.ReadLine());
Graph = RegisterGraph(verticlesQty);
for (int i = 0; i < pairsQty; i++)
{
Visited = RegisterVisitedList();
Parent = RegisterParentsList();
string[] data = Console.ReadLine().Split('-');
int start = int.Parse(data[0]);
int end = int.Parse(data[1]);
BFS(start, end);
}
}
private static Dictionary<int, List<int>> RegisterGraph(int n)
{
Dictionary<int, List<int>> graph = new Dictionary<int, List<int>>();
for (int i = 0; i < n; i++)
{
string[] data = Console.ReadLine().Split(':', StringSplitOptions.RemoveEmptyEntries);
if (data.Length == 1)
{
graph.Add(int.Parse(data[0]), new List<int>());
continue;
}
List<int> children = data[1].Split().Select(int.Parse).ToList();
graph.Add(int.Parse(data[0]), children);
}
return graph;
}
private static Dictionary<int, bool> RegisterVisitedList()
{
Dictionary<int, bool> list = new Dictionary<int, bool>();
foreach (int n in Graph.Keys) list.Add(n, false);
return list;
}
private static Dictionary<int, int> RegisterParentsList()
{
Dictionary<int, int> list = new Dictionary<int, int>();
foreach (int n in Graph.Keys) list.Add(n, -1);
return list;
}
private static void BFS(int start, int target)
{
Queue<int> queue = new Queue<int>();
queue.Enqueue(start);
Visited[start] = true;
while (queue.Count > 0)
{
int node = queue.Dequeue();
if (node == target)
{
Stack<int> path = GetPath(target);
Console.WriteLine($"{{{start}, {target}}} -> {path.Count - 1}");
return;
}
foreach (int child in Graph[node])
{
if (!Visited[child])
{
Visited[child] = true;
queue.Enqueue(child);
Parent[child] = node;
}
}
}
Console.WriteLine($"{{{start}, {target}}} -> -1");
}
private static Stack<int> GetPath(int target)
{
Stack<int> path = new Stack<int>();
int idx = target;
while (idx != -1)
{
path.Push(idx);
idx = Parent[idx];
}
return path;
}
}
}
|
package com.lasta.api.sample.model.form
import com.lasta.api.sample.constant.GreetingPhase
import java.io.Serializable
import javax.validation.constraints.NotNull
class GreetingForm : Serializable {
companion object {
private const val serialVersionUID: Long = 1L
}
@NotNull
lateinit var phase: GreetingPhase
var name: String? = null
}
|
using MongoDB.Bson;
using MongoDB.Bson.Serialization.Attributes;
namespace Store.Infra.MongoDB.DataModels;
public class EvaluationData
{
public ObjectId Id { get; set; }
[BsonRepresentation(BsonType.ObjectId)]
public string? StoreId { get; set; }
public int Stars { get; set; }
public string? Commentary { get; set; }
}
|
import os
from pathlib import Path
import pytest
README = Path(__file__).parent.parent / "README.md"
SAMPLE = Path(__file__).parent / "data" / "example5dOMX.dv"
@pytest.mark.skipif(os.name == "nt", reason="paths annoying on windows")
def test_readme():
code = README.read_text().split("```python")[1].split("```")[0]
code = code.replace("some_file.dv", str(SAMPLE.absolute()))
exec(code)
|
#include "event_Event.h"
namespace event {
const EventAction& Event::getAction() const {
return dynamic_cast<const EventAction&>(*this);
}
const EventState& Event::getState() const {
return dynamic_cast<const EventState&>(*this);
}
const EventRange& Event::getRange() const {
return dynamic_cast<const EventRange&>(*this);
}
EventAction::EventAction(cb::string name, const cb::strvector& args)
: Event(EventType::Action, name), StrArgList(args)
{
}
EventState::EventState(cb::string name, bool state)
: Event(EventType::State, name), state(state)
{
}
EventRange::EventRange(cb::string name, float value, float oldValue)
: Event(EventType::Range, name), value(value), oldValue(oldValue)
{
}
}
|
MIME_ENCODED = /=\?([a-z\-0-9]*)\?[QB]\?([a-zA-Z0-9+\/=\_\-]+)\?=/i
IMAP_EMAIL_ENVELOPE_FORMAT = /([a-zA-Z\-\.\_]*@[a-zA-Z\-\.\_]*)/
IMAP_EMAIL_ENVELOPE_FORMAT2 = /(.*)<([a-zA-Z\-\.\_]*@[a-zA-Z\-\.\_]*)>/
require 'iconv'
def valid_email?(email)
email.size < 100 && email =~ /.@.+\../ && email.count('@') == 1
end
def mime_encoded?( str )
return false if str.nil?
not (MIME_ENCODED =~ str).nil?
end
def from_qp(str, remove_underscore = true)
return '' if str.nil?
result = str.gsub(/=\r\n/, "")
result = result.gsub(/_/, " ") if remove_underscore
result.gsub!(/\r\n/m, $/)
result.gsub!(/=([\da-fA-F]{2})/) { $1.hex.chr }
result
end
def mime_decode(str, remove_underscore = true)
return '' if str.nil?
str.gsub(MIME_ENCODED) {|s|
enc = s.scan(MIME_ENCODED).flatten
if /\?Q\?/i =~ s
begin
Iconv.conv("UTF-8", enc[0], from_qp(enc[1], remove_underscore))
rescue
from_qp(enc[1], remove_underscore)
end
else
begin
Iconv.conv("UTF-8", enc[0], enc[1].unpack("m*").to_s)
rescue
enc[1].unpack("m*").to_s
end
end
}
end
def imap2friendlly_email(str)
begin
if str === IMAP_EMAIL_ENVELOPE_FORMAT
email = str.scan(IMAP_EMAIL_ENVELOPE_FORMAT)[0][0]
else
email = str.scan(IMAP_EMAIL_ENVELOPE_FORMAT2)[0][0]
end
name = str.slice(0, str.rindex(email)-1)
name = decode(name).to_s if mime_encoded?(name)
return "#{name.nil? ? '' : name.strip}<#{email}>"
rescue
"Error parsing str - #{str.scan(IMAP_EMAIL_ENVELOPE_FORMAT)} - #{str.scan(IMAP_EMAIL_ENVELOPE_FORMAT2)}"
end
end
def imap2friendlly_name(str)
begin
email = str.scan(IMAP_EMAIL_ENVELOPE_FORMAT)[0][0]
name = str.slice(0, str.rindex(email))
if name.nil? or name.strip == ""
return email
else
return name
end
rescue
str
end
end
def imap2friendlly_full_name(str)
begin
email = str.scan(IMAP_EMAIL_ENVELOPE_FORMAT)[0][0]
name = str.slice(0, str.rindex(email))
if name.nil? or name.strip == ""
return email
else
return "#{name}<#{email}>"
end
rescue
str
end
end
def imap2name_only(str)
email = str.scan(IMAP_EMAIL_ENVELOPE_FORMAT)[0][0]
name = str.slice(0, str.rindex(email))
return "#{name.nil? ? '' : name.strip}"
end
def imap2time(str)
begin
vals = str.scan(/(...), (.?.) (...) (....) (..):(..):(..) (.*)/)[0]
Time.local(vals[3],vals[2],vals[1],vals[4],vals[5],vals[6])
rescue
Time.now
end
end
def encode_email(names, email)
nameen = ""
names.each_byte { | ch | nameen = nameen +"=" + sprintf("%X",ch) }
return "=?#{CDF::CONFIG[:mail_charset]}?Q?#{nameen}?= <#{email}>"
end
# #############################
# HTML utils
# #############################
def replace_tag(tag, attrs)
replacements = {"body" => "",
"/body" => "",
"meta" => "",
"/meta" => "",
"head" => "",
"/head" => "",
"html" => "",
"/html" => "",
"title" => "<div class='notviscode'>",
"/title" => "</div>",
"div" => "",
"/div" => "",
"span" => "",
"/span" => "",
"layer" => "",
"/layer" => "",
"br" => "<br/>",
"/br" => "<br/>",
"iframe" => "",
"/iframe" => "",
"link" => "<xlink" << replace_attr(attrs) << ">",
"/link" => "</xlink" << replace_attr(attrs) << ">",
"style" => "<div class='notviscode'>",
"/style" => "</div>",
"script" => "<div class='notviscode'>",
"/script" => "</div>" }
replacements.fetch(tag.downcase, ("<" << tag.downcase << replace_attr(attrs) << ">"))
end
def replace_attr(attrs)
if attrs
attrs.downcase.gsub("onload", "onfilter").
gsub("onclick", "onfilter").
gsub("onkeypress", "onfilter").
gsub("javascript", "_javascript").
gsub("JavaScript", "_javascript")
else
""
end
end
def clear_html(text)
attribute_key = /[\w:_-]+/
attribute_value = /(?:[A-Za-z0-9\-_#\%\.,\/\:]+|(?:'[^']*?'|"[^"]*?"))/
attribute = /(?:#{attribute_key}(?:\s*=\s*#{attribute_value})?)/
attributes = /(?:#{attribute}(?:\s+#{attribute})*)/
tag_key = attribute_key
tag = %r{<([!/?\[]?(?:#{tag_key}|--))((?:\s+#{attributes})?\s*(?:[!/?\]]+|--)?)>}
text.gsub(tag, '').gsub(/\s+/, ' ').strip
CGI::escape(text)
end
def strip_html(text)
attribute_key = /[\w:_-]+/
attribute_value = /(?:[A-Za-z0-9\-_#\%\.,\/\:]+|(?:'[^']*?'|"[^"]*?"))/
attribute = /(?:#{attribute_key}(?:\s*=\s*#{attribute_value})?)/
attributes = /(?:#{attribute}(?:\s+#{attribute})*)/
tag_key = attribute_key
tag = %r{<([!/?\[]?(?:#{tag_key}|--))((?:\s+#{attributes})?\s*(?:[!/?\]]+|--)?)>}
res = text.gsub(tag) { |match|
ret = ""
match.scan(tag) { |token|
ret << replace_tag(token[0], token[1])
}
ret
}
# remove doctype tags
xattributes = /(?:#{attribute_value}(?:\s+#{attribute_value})*)/
xtag = %r{<!#{tag_key}((?:\s+#{xattributes})?\s*(?:[!/?\]]+|--)?)>}
res.gsub(xtag, '')
end
|
import 'package:flutter/material.dart';
import 'package:flutter/cupertino.dart';
import 'package:flutter_demos/widget/my_home_page.dart';
import 'package:flutter_demos/widget/category_page.dart';
class TestApp extends StatefulWidget {
@override
_TestAppState createState() => _TestAppState();
}
class _TestAppState extends State<TestApp> {
PageController _pageController;
final List<BottomNavigationBarItem> bottomTabs = [
BottomNavigationBarItem(
icon: Icon(CupertinoIcons.home),
title: Text('首页'),
),
BottomNavigationBarItem(
icon: Icon(CupertinoIcons.home),
title: Text('分类'),
)
];
final List<Widget> tabBodys = [
MyHomePage(),
CategoryPage(),
];
int currentIndex = 0;
var currentPage;
@override
void initState() {
currentPage = tabBodys[currentIndex];
_pageController = new PageController()
..addListener(() {
if (currentPage != _pageController.page.round()) {
setState(() {
currentPage = _pageController.page.round();
});
}
});
super.initState();
}
@override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(
title: Text('demo'),
leading: Text(''),
),
bottomNavigationBar: BottomNavigationBar(
type:BottomNavigationBarType.fixed,
currentIndex: currentIndex,
items:bottomTabs,
onTap: (index){
setState(() {
currentIndex=index;
currentPage =tabBodys[currentIndex];
});
},
),
body: IndexedStack(
index: currentIndex,
children: tabBodys,
)
// body: MaterialApp(
// theme: ThemeData.dark(),
// home: IndexedStack(
// index: currentIndex,
// children: tabBodys,
// ),
// routes: routers,
// ),
);
}
}
|
-- 89 - Status do Contrato
INSERT
INTO tab_atributos (cod_atributos, dsc_descricao, sgl_chave)
SELECT 89,'Status do Contrato','status-do-contrato'
WHERE 89 NOT IN (SELECT cod_atributos FROM tab_atributos);
-- 89 - Status do Contrato - (1-Em elaboração)
INSERT
INTO tab_atributos_valores (cod_atributos_valores, fk_atributos_valores_atributos_id, sgl_valor, dsc_descricao)
SELECT nextval ('tab_atributos_valores_cod_atributos_valores_seq'), 89,'1','Em elaboração'
WHERE NOT exists(SELECT cod_atributos_valores FROM tab_atributos_valores
WHERE fk_atributos_valores_atributos_id = 89 AND sgl_valor= '1');
-- 89 - Status do Contrato - (2-Em análise financeiro)
INSERT
INTO tab_atributos_valores (cod_atributos_valores, fk_atributos_valores_atributos_id, sgl_valor, dsc_descricao)
SELECT nextval ('tab_atributos_valores_cod_atributos_valores_seq'), 89,'2','Em análise financeiro'
WHERE NOT exists(SELECT cod_atributos_valores FROM tab_atributos_valores
WHERE fk_atributos_valores_atributos_id = 89 AND sgl_valor= '2');
|
# typed: true
class Users::PublicLinksController < ApplicationController
skip_before_action :require_login
def index
# User.includes(:public_links).find(params[:id]) does not seem to hit the custom
# find method from Sluggable for some reason
@user = User.find(params[:user_id])
return not_found if @user.blank?
@links = @user.public_links
end
end
|
from setuptools import setup
setup(
name = 'nem',
packages = ['nem'], # this must be the same as the name above
version = '0.0.5',
description = 'Access library for AEMO / NEM goverement data',
author = 'Michael Wheeler',
author_email = 'michael@michael-wheeler.org',
url = 'https://github.com/theskorm/nemweb2', # use the URL to the github repo
download_url = 'https://github.com/TheSkorm/nemweb2/archive/0.0.5.tar.gz', # I'll explain this in a second
keywords = ['aemo', 'nem', 'gov', 'au'], # arbitrary keywords
classifiers = [],
install_requires = [
"beautifulsoup4",
"requests",
"pyexcel_xls"
]
)
|
require 'homeslice_core'
require 'geometry'
describe Homeslice do
it "should generate identical hashes" do
edge_a = Geometry::Edge.new(Geometry::Point[0, 0, 0], Geometry::Point[1, 1, 1])
edge_b = Geometry::Edge.new(Geometry::Point[0, 0, 0], Geometry::Point[1, 1, 1])
expect(edge_a.hash == edge_b.hash).to be true
end
it "should generate different hashes" do
edge_a = Geometry::Edge.new(Geometry::Point[0, 0, 0], Geometry::Point[1, 1, 1])
edge_b = Geometry::Edge.new(Geometry::Point[0, 0, 2], Geometry::Point[1, 1, 1])
expect(edge_a.hash == edge_b.hash).to be false
end
end
|
<?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use App\TinTuc;
use App\TheLoai;
use App\LoaiTin;
class TinTucController extends Controller
{
public function getdanhsach()
{
$tintuc = TinTuc::paginate(20);
return view('admin.TinTuc.hienthi',['tintuc'=>$tintuc]);
}
public function getthem()
{
$theloai = TheLoai::all();
$loaitin = LoaiTin::all();
return view('admin.TinTuc.them',['loaitin'=>$loaitin,'theloai'=>$theloai]);
}
public function getsua($id)
{
$tintuc = TinTuc::find($id);
$theloai = TheLoai::all();
$loaitin = LoaiTin::all();
return view('admin.TinTuc.sua',['tintuc'=>$tintuc,'loaitin'=>$loaitin,'theloai'=>$theloai]);
}
public function getxoa($id)
{
$tintuc=TinTuc::find($id);
$tintuc->delete();
return redirect('admin/TinTuc/danhsach')->with('thongbao','xóa thành công');
}
public function postthem(Request $req)
{
$this->validate($req,
[
'tieude'=>'required|min:3|max:1000',
'tomtat'=>'required',
'idtheloai'=>'required',
'idloaitin'=>'required',
'noidung'=>'required'
],
[
'tieude.required'=>'Tên loại tin là bắt buộc',
'tieude.min'=>'Tên loại tin cần lớn hơn 3 ký tự',
'tieude.max'=>'Tên loại tin cần nhỏ hơn 1000 ký tự',
'idtheloai'=>'Chưa chọn thể loại',
'tomtat'=>'Thiếu tóm tắt',
'idloaitin'=>'Thiếu loại tin',
'noidung'=>'Chưa nhập nội dung'
]
);
$tintuc=new TinTuc;
$tintuc->TieuDe=$req->tieude;
$tintuc->idtheloai=$req->idtheloai;
$tintuc->idLoaiTin=$req->idloaitin;
$tintuc->tieudekhongdau=changeTitle($req->tieude);
$tintuc->NoiBat=0;
if($req->noibat=='on')
$tintuc->NoiBat=1;
$tintuc->TomTat=$req->tomtat;
$tintuc->NoiDung=$req->noidung;
$tintuc->LuotXem=0;
if($req->hasFile('hinh'))
{
$tintuc->Hinh='';
$file=$req->file('hinh');
$duoi=$file->getClientOriginalExtension();
if($duoi!='jpg'&&$duoi!='png'&&$duoi!='jpeg')
{
return redirect('admin/tintuc/them')->with('errors','lỗi định dạng ảnh');
}
$name=$file->getClientOriginalName();
$hinh=str_random(4)."_".$name;
while(file_exists("upload/tintuc/".$hinh))
{
$hinh=str_random(4)."_".$name;
}
$file->move("upload/tintuc",$hinh);
//unlink("upload/tintuc/".$req->hinh);
$tintuc->Hinh=$hinh;
}
$tintuc->save();
return redirect('admin/tintuc/danhsach')->with('thongbao','Thêm loại tin thành công');
}
public function postsua(Request $req,$id)
{
$this->validate($req,
[
'tieude'=>'required|min:3|max:1000',
'tomtat'=>'required',
'idtheloai'=>'required',
'idloaitin'=>'required',
'noidung'=>'required'
],
[
'tieude.required'=>'Tên loại tin là bắt buộc',
'tieude.min'=>'Tên loại tin cần lớn hơn 3 ký tự',
'tieude.max'=>'Tên loại tin cần nhỏ hơn 1000 ký tự',
'idtheloai'=>'Chưa chọn thể loại',
'tomtat'=>'Thiếu tóm tắt',
'idloaitin'=>'Thiếu loại tin',
'noidung'=>'Chưa nhập nội dung'
]
);
$tintuc= TinTuc::find($id);
$tintuc->TieuDe=$req->tieude;
$tintuc->idtheloai=$req->idtheloai;
$tintuc->idLoaiTin=$req->idloaitin;
$tintuc->tieudekhongdau=changeTitle($req->tieude);
if($req->noibat=='on')
$tintuc->NoiBat=1;
$tintuc->NoiBat=0;
$tintuc->NoiDung=$req->noidung;
$tintuc->TomTat=$req->tomtat;
if($req->hasFile('hinh'))
{
$file=$req->file('hinh');
$duoi=$file->getClientOriginalExtension();
if($duoi!='jpg'&&$duoi!='png'&&$duoi!='jpeg')
{
return redirect('admin/tintuc/them')->with('errors','lỗi định dạng ảnh');
}
$name=$file->getClientOriginalName();
$hinh=str_random(4)."_".$name;
while(file_exists("upload/tintuc/".$hinh))
{
$hinh=str_random(4)."_".$name;
}
$file->move("upload/tintuc",$hinh);
unlink("upload/tintuc/".$tintuc->Hinh);
$tintuc->Hinh=$hinh;
}
$tintuc->save();
return redirect('admin/tintuc/danhsach')->with('thongbao','Sửa loại tin thành công');
}
public function getloaitin($id)
{
$loaitin=LoaiTin::where('idtheloai','=',$id)->get();
foreach ($loaitin as $lt) {
echo "<option value='".$lt->id."'>".$lt->Ten."</option>";
}
}
}
|
#include "../include/work_queue.h"
#include "../include/types.h"
#include <string.h>
#include <chrono>
#include <cassert>
WorkQueue::WorkQueue(size_t q_len, size_t max_elm_size) : len(q_len), max_elm_size(max_elm_size) {
non_block = false;
// place all nodes of linked list in the producer queue and reserve
// memory for the vectors
for (size_t i = 0; i < len; i++) {
DataNode *node = new DataNode(max_elm_size); // create and reserve space for updates
node->next = producer_list; // next of node is head
producer_list = node; // set head to new node
}
consumer_list_size = 0;
printf("WQ: created work queue with %lu elements each of size %lu\n", len, max_elm_size);
}
WorkQueue::~WorkQueue() {
// free data from the queues
// grab locks to ensure that list variables aren't old due to cpu caching
producer_list_lock.lock();
consumer_list_lock.lock();
while (producer_list != nullptr) {
DataNode *temp = producer_list;
producer_list = producer_list->next;
delete temp;
}
while (consumer_list != nullptr) {
DataNode *temp = consumer_list;
consumer_list = consumer_list->next;
delete temp;
}
producer_list_lock.unlock();
consumer_list_lock.unlock();
}
void WorkQueue::push(node_id_t node_idx, std::vector<node_id_t> &upd_vec) {
if(upd_vec.size() > max_elm_size) {
throw WriteTooBig(upd_vec.size(), max_elm_size);
}
std::unique_lock<std::mutex> lk(producer_list_lock);
producer_condition.wait(lk, [this]{return !full();});
// printf("WQ: Push:\n");
// print();
// remove head from produce_list
DataNode *node = producer_list;
producer_list = producer_list->next;
lk.unlock();
// set node id and set node's data vector equal to data_vec
node->node_idx = node_idx;
std::swap(node->data_vec, upd_vec);
// add this block to the consumer queue for processing
consumer_list_lock.lock();
node->next = consumer_list;
consumer_list = node;
++consumer_list_size;
consumer_list_lock.unlock();
consumer_condition.notify_one();
}
bool WorkQueue::peek(DataNode *&data) {
// wait while queue is empty
// printf("waiting to peek\n");
std::unique_lock<std::mutex> lk(consumer_list_lock);
consumer_condition.wait(lk, [this]{return !empty() || non_block;});
// printf("WQ: Peek\n");
// print();
// if non_block and queue is empty then there is no data to get
// so inform the caller of this
if (empty()) {
lk.unlock();
return false;
}
// remove head from consumer_list and release lock
DataNode *node = consumer_list;
consumer_list = consumer_list->next;
--consumer_list_size;
lk.unlock();
data = node;
return true;
}
bool WorkQueue::peek_batch(std::vector<DataNode *> &node_vec, size_t batch_size) {
assert(batch_size <= len); // cannot request a batch bigger than the work queue
node_vec.clear(); // clear out any old data
node_vec.reserve(batch_size);
// wait until consumer queue is large enough
std::unique_lock<std::mutex> lk(consumer_list_lock);
//FIXME: This version of batch waiting causes a lot of contention on the mutex.
consumer_condition.wait(lk,
[this, batch_size]{return consumer_list_size >= batch_size || non_block;});
// printf("WQ: Peek-batch\n");
// print();
if (empty()) {
lk.unlock();
return false;
}
// pull data from head of consumer_list
for(size_t i = 0; i < batch_size; i++) {
if (consumer_list == nullptr) break; // if non_block is true may not be able to get full batch
node_vec.push_back(consumer_list);
consumer_list = consumer_list->next;
--consumer_list_size;
}
lk.unlock();
return true;
}
void WorkQueue::peek_callback(DataNode *node) {
producer_list_lock.lock();
// printf("WQ: Callback\n");
// print();
node->next = producer_list;
producer_list = node;
producer_list_lock.unlock();
producer_condition.notify_one();
// printf("WQ: Callback done\n");
}
void WorkQueue::peek_batch_callback(const std::vector<DataNode *> &node_vec) {
for (size_t i = 1; i < node_vec.size(); i++) {
node_vec[i-1]->next = node_vec[i]; // fix next pointers just to be sure
}
DataNode *head = node_vec[0];
DataNode *tail = node_vec[node_vec.size() - 1];
producer_list_lock.lock();
tail->next = producer_list;
producer_list = head;
producer_list_lock.unlock();
producer_condition.notify_all(); // we've probably added a bunch of stuff
}
void WorkQueue::set_non_block(bool _block) {
consumer_list_lock.lock();
non_block = _block;
consumer_list_lock.unlock();
consumer_condition.notify_all();
}
void WorkQueue::print() {
std::string to_print = "";
int p_size = 0;
DataNode *temp = producer_list;
while (temp != nullptr) {
to_print += std::to_string(p_size) + ": " + std::to_string((uint64_t)temp) + "\n";
temp = temp->next;
++p_size;
}
int c_size = 0;
temp = consumer_list;
while (temp != nullptr) {
to_print += std::to_string(c_size) + ": " + std::to_string((uint64_t)temp) + "\n";
temp = temp->next;
++c_size;
}
printf("WQ: producer_queue size = %i consumer_queue size = %i\n%s", p_size, c_size, to_print.c_str());
}
|
package com.alg.pointers;
/**
* Given an array of integers, find two numbers such that they add up to a specific target number.
*
* The function twoSum should return indices of the two numbers such that they add up to the target, where index1 must
* be less than index2. Please note that your returned answers (both index1 and index2) are zero-based.
*
* You may assume that each input would have exactly one solution
*
* Example1:
* numbers=[2, 7, 11, 15], target=9
* return [0, 1]
* Example2:
* numbers=[15, 2, 7, 11], target=9
* return [1, 2]
*
* Challenge
* Either of the following solutions are acceptable:
*
* O(n) Space, O(nlogn) Time
* O(n) Space, O(n) Time
*
*/
public class TwoSum {
/**
* @param numbers: An array of Integer
* @param target: target = numbers[index1] + numbers[index2]
* @return: [index1, index2] (index1 < index2)
*/
public int[] twoSum(int[] numbers, int target) {
// write your code here
int[] results = new int[2];
for(int i = 0; i < numbers.length - 1; i++) {
for(int j = i + 1; j < numbers.length; j++) {
if (numbers[i] + numbers[j] == target) {
results[0] = i;
results[1] = j;
}
}
}
return results;
}
public static void main(String[] args) {
int[] numbers = {15, 2, 7, 11};
int target = 9;
}
}
|
import styled from '@xstyled/styled-components'
export const Input = styled.input`
appearance: none;
background-color: control-background;
border-radius: control;
border-style: solid;
border-width: control;
border-color: control-border;
line-height: control;
padding: 1 2;
color: control-on;
transition: control;
&::placeholder {
color: control-placeholder;
}
&:hover {
border-color: control-border-hover;
}
&:focus {
outline: none;
box-shadow: control-focus;
border-color: control-border-active;
}
`
export const InputGroup = styled.div`
display: inline-flex;
color: control-placeholder;
transition: control;
position: relative;
&:focus-within {
color: control-on;
}
> ${Input} {
flex: 1 1 auto;
width: 1%;
min-width: 0;
padding-left: 32;
}
> .algolia-autocomplete {
flex: 1 1 auto;
width: 1%;
min-width: 0;
> ${Input} {
padding-left: 32;
}
}
`
export const InputGroupIcon = styled.div`
display: flex;
position: absolute;
padding: 1 2;
height: 100%;
align-items: center;
user-select: none;
pointer-events: none;
z-index: 1;
`
|
# 민주주의 서울 오픈 소스
## 프로젝트 빌드
자바 및 maven 환경이 설치되어 있어야 합니다.
```
mvn package
```
빌드 시 target폴더에 war 파일이 생성됩니다.
## docker 실행 방법
1. 프로젝트 빌드를 한다.
2. 생성된 war 파일을 ROOT.war 파일로 이름을 변경한다.
3. docker/webapps 경로에 복사한다.
4. docker 경로로 이동 후 `docker-compose up`
|
package com.tommasoberlose.darkmode.receivers
import android.content.BroadcastReceiver
import android.content.Context
import android.content.Intent
import android.util.Log
import com.tommasoberlose.darkmode.global.Actions
import com.tommasoberlose.darkmode.global.Preferences
import com.tommasoberlose.darkmode.helpers.DarkThemeHelper
import com.tommasoberlose.darkmode.helpers.NotificationHelper
import com.tommasoberlose.darkmode.services.SchedulerJob
class UpdatesReceiver : BroadcastReceiver() {
override fun onReceive(context: Context, intent: Intent) {
when (intent.action) {
Actions.ACTION_UPDATE_DARK_MODE_ON -> {
if (!Preferences.toggleThemeRequiresDeviceIdle) {
DarkThemeHelper.toggleDarkTheme(context, true)
} else {
SchedulerJob.scheduleService(context, true)
}
}
Actions.ACTION_UPDATE_DARK_MODE_OFF -> {
if (!Preferences.toggleThemeRequiresDeviceIdle) {
DarkThemeHelper.toggleDarkTheme(context, false)
} else {
SchedulerJob.scheduleService(context, false)
}
}
}
}
}
|
json.(achievement, :id, :name, :description, :points, :order, :patch)
json.owned owned.fetch(achievement.id.to_s, '0%')
json.icon image_url("achievements/#{achievement.icon_id}.png", skip_pipeline: true)
category = achievement.category
json.category do
json.(category, :id, :name)
end
json.type do
json.(category.type, :id, :name)
end
unless local_assigns[:skip_reward]
json.reward do
if achievement.title.present?
json.type 'Title'
json.title do
json.partial! 'api/titles/title', title: achievement.title, owned: owned, skip_achievement: true
end
elsif achievement.item_id.present?
json.type 'Item'
json.name achievement.item.name
end
end
end
|
export const fetchProductList = (...args) => ({ type: 'product/list', args });
export const fetchProductListError = (...args) => ({ type: 'product/list_error', args });
|
#pragma once
#include "CoreMinimal.h"
#include "XPSettings.generated.h"
USTRUCT()
struct FXPSettings {
GENERATED_BODY()
public:
UPROPERTY(EditAnywhere)
float XP_PerGold;
UPROPERTY(EditAnywhere)
float XP_PerKill;
UPROPERTY(EditAnywhere)
float XP_OnSurvivedLevel;
FSD_API FXPSettings();
};
|
package io.horizontalsystems.bitcoincore.network.messages
import io.horizontalsystems.bitcoincore.io.BitcoinInputMarkable
import io.horizontalsystems.bitcoincore.io.BitcoinOutput
class PongMessage(val nonce: Long) : IMessage {
override fun toString(): String {
return "PongMessage(nonce=$nonce)"
}
}
class PongMessageParser : IMessageParser {
override val command: String = "pong"
override fun parseMessage(input: BitcoinInputMarkable): IMessage {
return PongMessage(input.readLong())
}
}
class PongMessageSerializer : IMessageSerializer {
override val command: String = "pong"
override fun serialize(message: IMessage): ByteArray? {
if (message !is PongMessage) {
return null
}
return BitcoinOutput()
.writeLong(message.nonce)
.toByteArray()
}
}
|
import 'package:unleash/src/features.dart';
import 'package:unleash/src/toggle_backup/toggle_backup.dart';
ToggleBackup create(String backupFilePath) => NoOpToggleBackup();
class NoOpToggleBackup implements ToggleBackup {
@override
Future<Features?> load() async => null;
@override
Future<void> save(Features toggleJson) => Future.value();
}
|
---
layout: article
title: "「设计模式」 装饰者模式"
date: 2019-01-07 8:06:40 +0800
key: decorator-20190107
aside:
toc: true
category: [software, software, design_pattern]
---
背景:不必改变原类文件和使用继承的情况下,动态的扩展一个对象的的功能。
方案:利用子对象,委派
定义:
角色:
油漆工(decorator)是用来刷油漆的
被修饰者decoratee是被刷油漆的对象
实现:“实现被装饰者类---定义被装饰者对象----使用被装饰者对象产生装饰者对象”(有多种方式)
案例:咖啡
特定场景下才用;
说明:
装饰者与被装饰者具有相同的类型
可以用多个装饰者装饰一个对象
由于装饰者与被装饰者具有相同的类型,我们可以用装饰后的对象代替原来的对象。
装饰者在委派它装饰的对象作某种处理时,可以添加上自己的行为(功能扩展)(在委派之前或/和之后)。
对象可以在任何时候被装饰,因此我们能在运行时动态的装饰对象。
|
import 'package:get/get.dart';
import 'package:santaclothes/data/repository/notification_repository.dart';
import 'package:santaclothes/presentation/notification/notification_controller.dart';
class NotificationBinding extends Bindings {
@override
void dependencies() {
Get.put(NotificationController(NotificationRepository()));
}
}
|
import { code, Example, md, Props } from '@uidu/docs';
import React from 'react';
export default md`
### Checkbox
<p class="lead">A checkbox element primarily for use in forms.</p>
The Checkbox export provides for controlled & uncontrolled usage and includes the label, input & icon.
${code`import Checkbox, { CheckboxGroup } from '@uidu/checkbox';`}
${(
<Example
packageName="@uidu/checkbox"
Component={require('../examples/00-basic-usage').default}
title="Basic"
source={require('!!raw-loader!../examples/00-basic-usage').default}
/>
)}
${(
<Example
packageName="@uidu/checkbox"
Component={require('../examples/01-indeterminate').default}
title="Indeterminate"
source={require('!!raw-loader!../examples/01-indeterminate').default}
/>
)}
${(
<Example
packageName="@uidu/checkbox"
Component={require('../examples/04-checkbox-form').default}
title="With a Form"
source={require('!!raw-loader!../examples/04-checkbox-form').default}
/>
)}
#### Checkbox Props
${(
<Props
props={require('!!extract-react-types-loader!../src/components/Checkbox')}
/>
)}
`;
// #### CheckboxGroup Props
// ${(
// <Props
// props={require('!!extract-react-types-loader!../src/components/CheckboxGroup')}
// />
// )}
|
using System;
namespace CgiResourceUpload.Models
{
internal class ValidationCheck
{
public Func<bool> Validate { get; }
public string FailMessage { get; }
public ValidationCheck(Func<bool> validate, string failMessage)
{
Validate = validate;
FailMessage = failMessage;
}
}
}
|
#!/bin/bash
set -v
pushd $PWD
# Install the dev dependencies for building Qpid proton system library.
sudo apt-get install -y gcc cmake cmake-curses-gui uuid-dev
sudo apt-get install -y libssl-dev
sudo apt-get install -y libsasl2-2 libsasl2-dev
# Get the latest Qpid Proton source
cd $HOME/build
git clone --branch 0.30.0 https://github.com/apache/qpid-proton.git
cd qpid-proton
# Configure the source of Qpid Proton.
mkdir build
cd build
cmake .. -DCMAKE_INSTALL_PREFIX=/usr -DBUILD_BINDINGS=
# Compile system libraries.
make all
# Install system libraries
sudo make install
# Enable the qpid_proton bundler group
[ -z "$BUNDLE_WITH" ] && bundle config with qpid_proton
popd
set +v
|
package dev.helight.hopper.data
import dev.helight.hopper.*
import dev.helight.hopper.ecs.data.ArchetypeSnapshot
import dev.helight.hopper.ecs.data.EntitySnapshot
import java.util.*
import java.util.concurrent.locks.ReentrantReadWriteLock
data class Archetype(
val group: ComponentGroup
) {
var ids: MutableList<EntityId> = mutableListOf()
var data: MutableList<MutableList<ComponentData>> = mutableListOf()
private val rwLock = ReentrantReadWriteLock()
private val r: ReentrantReadWriteLock.ReadLock = rwLock.readLock()
private val w: ReentrantReadWriteLock.WriteLock = rwLock.writeLock()
private fun componentIndex(id: ComponentID): Int = group.indexOf(id)
val size
get() = ids.size
@ExperimentalUnsignedTypes
fun snapshot(): ArchetypeSnapshot {
r.lock()
if (group.contains(TransientEntity::class.java.toKey())) {
r.unlock()
return ArchetypeSnapshot(group.joinToString("-"), listOf())
}
val entities = mutableListOf<EntitySnapshot>()
val serializers = group.map { ecs.serializers[it] }.toList()
try {
ids.forEachIndexed { index, entityId ->
val serializedData = data[index].mapIndexed { componentIndex, data ->
serializers[componentIndex]!!.serialize(data)
}.toList()
entities.add(EntitySnapshot(entityId.toULong(), serializedData))
}
} finally {
r.unlock()
}
return ArchetypeSnapshot(group.joinToString("-"), entities)
}
fun push(entityId: EntityId, components: MutableList<ComponentData>) {
w.lock()
try {
ids.add(entityId)
data.add(components)
} finally {
w.unlock()
}
}
fun pop(entityId: EntityId): ExportedEntity? {
var currentData: ExportedEntity? = null
w.lock()
try {
val index = ids.indexOf(entityId)
if (index != -1) {
currentData = Triple(entityId, group, data[index])
ids.removeAt(index)
data.removeAt(index)
}
} finally {
w.unlock()
}
return currentData
}
fun update(entityId: EntityId, componentID: ComponentID, value: ComponentData) {
w.lock()
try {
val index = ids.indexOf(entityId)
val componentIndex = componentIndex(componentID)
data[index][componentIndex] = value
} finally {
w.unlock()
}
}
fun get(entityId: EntityId, componentID: ComponentID): ComponentData {
val currentData: ComponentData
r.lock()
try {
val index = ids.indexOf(entityId)
val componentIndex = componentIndex(componentID)
currentData = data[index][componentIndex]
} finally {
r.unlock()
}
return currentData
}
fun contains(entityId: EntityId): Boolean = ids.contains(entityId)
fun export(entityId: EntityId): ExportedEntity? {
r.lock()
var currentData: ExportedEntity? = null
try {
val index = ids.indexOf(entityId)
if (index != -1) {
currentData = Triple(entityId, group, data[index])
}
} finally {
r.unlock()
}
return currentData
}
fun all(): List<Pair<EntityId, MutableList<ComponentData>>> {
val currentData: MutableList<Pair<EntityId, MutableList<ComponentData>>> = mutableListOf()
r.lock()
try {
ids.forEachIndexed { index, id ->
currentData.add(id to data[index])
}
} finally {
r.unlock()
}
return currentData
}
fun print() {
r.lock()
ids.forEachIndexed {i,x ->
println("[$i] ${ids[i]} = ${data[i]}")
}
r.unlock()
}
companion object {
@ExperimentalUnsignedTypes
fun loadFromSnapshot(snapshot: ArchetypeSnapshot): Archetype {
val group = when (snapshot.group.isEmpty()) {
false -> TreeSet(snapshot.group.split("-").map { it.toULong() })
true -> sortedSetOf()
}
println(ecs.serializers.toString())
val serializers = group.map { ecs.serializers[it] }.toList()
val eList = mutableListOf<EntityId>()
val dList = mutableListOf<MutableList<ComponentData>>()
snapshot.entities.forEach {
println("==> ${it.id}")
val deserializedData = it.data.mapIndexed { index, data ->
println("#$index $data")
serializers[index]!!.deserialize(data)
}.toMutableList()
eList.add(it.id.toULong())
dList.add(deserializedData)
}
val archetype = Archetype(group)
archetype.ids = eList
archetype.data = dList
return archetype
}
}
}
|
using System;
using UnityEngine;
namespace UGF.DebugTools.Runtime.UI.Scopes
{
public readonly struct DebugUIEnabledScope : IDisposable
{
private readonly bool m_enabled;
public DebugUIEnabledScope(bool enabled)
{
m_enabled = GUI.enabled;
GUI.enabled = enabled;
}
public void Dispose()
{
GUI.enabled = m_enabled;
}
}
}
|
# backup files from a docker volume into /tmp/backup.tar.gz
function docker-volume-backup-compressed() {
docker run --rm -v /tmp:/backup -v $1:/mount debian:jessie tar -czvf /backup/backup.tar.gz "${@:2}"
}
# restore files from /tmp/backup.tar.gz into a docker volume
function docker-volume-restore-compressed() {
docker run --rm -v /tmp:/backup -v $1:/mount debian:jessie tar -xzvf /backup/backup.tar.gz "${@:2}"
echo "Double checking files..."
docker run --rm -v /tmp:/backup -v $1:/mount debian:jessie ls -lh "${@:2}"
}
# backup files from a docker volume into /tmp/backup.tar
function docker-volume-backup() {
docker run --rm -v /tmp:/backup -v $1:/mount busybox tar -cvf /backup/backup.tar C /mount
}
# restore files from /tmp/backup.tar into a docker volume
function docker-volume-restore() {
docker run --rm -v /tmp:/backup -v $1:/mount -w /mount busybox tar -xvf /backup/backup.tar
echo "Double checking files..."
docker run --rm -v /tmp:/backup -v $1:/mount busybox ls -lh /mount
}
|
%%%-------------------------------------------------------------------
%%% @author Aaron Lelevier
%%% @doc
%%% @end
%%% Created : 08. May 2021 8:14 AM
%%%-------------------------------------------------------------------
-module(bytes_util).
-author("Aaron Lelevier").
-vsn(1.0).
-export([
reverse_bytes/1,
term_to_packet/1,
packet_to_term/1
]).
%% @doc Reverse the order of bytes in a binary
-spec reverse_bytes(binary()) -> binary().
reverse_bytes(Bin) ->
list_to_binary(lists:reverse(binary_to_list(Bin))).
-spec term_to_packet(any()) -> bitstring().
term_to_packet(Term) ->
Bin = term_to_binary(Term),
Size = size(Bin),
<<Size:4, Bin/binary>>.
-spec packet_to_term(bitstring()) -> {byte(), any()}.
packet_to_term(Packet) ->
<<Size:4, Bin/binary>> = Packet,
Term = binary_to_term(Bin),
{Size, Term}.
|
package io.algostack.risk.engine;
import java.util.concurrent.Callable;
import java.util.concurrent.Future;
public interface TaskExecutor {
<T> Future<T> execute(Callable<T> task);
}
|
/* eslint-disable jest/expect-expect */
import OnlineParser from '../onlineParser';
import {
getUtils,
performForEachType,
expectVarsDef,
expectArgs,
expectDirective,
} from './OnlineParserUtils';
describe('onlineParser', () => {
describe('.startState', () => {
it('initializes state correctly', () => {
const parser = OnlineParser();
expect(parser.startState()).toEqual({
level: 0,
step: 0,
name: null,
kind: 'Document',
type: null,
rule: [
{
isList: true,
ofRule: 'Definition',
separator: undefined,
},
],
needsSeperator: false,
prevState: {
level: 0,
step: 0,
name: null,
kind: null,
type: null,
rule: null,
needsSeperator: false,
prevState: null,
},
});
});
});
describe('.token', () => {
it('detects invalid char', () => {
const { token } = getUtils(`^`);
expect(token()).toEqual('invalidchar');
});
it('parses schema def', () => {
const { t } = getUtils(`
schema {
query: SomeType
}
`);
t.keyword('schema', { kind: 'SchemaDef' });
t.punctuation('{');
t.keyword('query', { kind: 'OperationTypeDef' });
t.punctuation(':');
t.name('SomeType');
t.punctuation('}', { kind: 'Document' });
t.eol();
});
it('parses short query', () => {
const { t } = getUtils(`
{
someField
}
`);
t.punctuation('{', { kind: 'SelectionSet' });
t.property('someField', { kind: 'Field' });
t.punctuation('}', { kind: 'Document' });
t.eol();
});
it('parses query', () => {
const { t } = getUtils(`
query SomeQuery {
someField
}
`);
t.keyword('query', { kind: 'Query' });
t.def('SomeQuery');
t.punctuation('{', { kind: 'SelectionSet' });
t.property('someField', { kind: 'Field' });
t.punctuation('}', { kind: 'Document' });
t.eol();
});
it('parses query with variables', () => {
const { t, stream } = getUtils(`
query SomeQuery ($someVariable: SomeInputType) {
someField(someArg: $someVariable)
}
`);
t.keyword('query', { kind: 'Query' });
t.def('SomeQuery');
expectVarsDef(
{ t, stream },
{
onKind: 'Query',
vars: [{ name: 'someVariable', type: 'SomeInputType' }],
},
);
t.punctuation('{', { kind: 'SelectionSet' });
t.property('someField', { kind: 'Field' });
expectArgs(
{ t, stream },
{
onKind: 'Field',
args: [{ name: 'someArg', isVariable: true, value: 'someVariable' }],
},
);
t.punctuation('}', { kind: 'Document' });
t.eol();
});
performForEachType(
`
query SomeQuery {
someField(someArg: __VALUE__)
}
`,
({ t, stream }, fill) => {
it(`parses query field having argument of type ${fill.type}`, () => {
t.keyword('query', { kind: 'Query' });
t.def('SomeQuery');
t.punctuation('{', { kind: 'SelectionSet' });
t.property('someField', { kind: 'Field' });
expectArgs(
{ t, stream },
{ onKind: 'Field', args: [{ name: 'someArg', ...fill }] },
);
t.punctuation('}', { kind: 'Document' });
t.eol();
});
},
);
performForEachType(
`
query SomeQuery {
someField(someArg: [__VALUE__])
}
`,
({ t, stream }, fill) => {
it(`parses query field having argument as list of type ${fill.type}`, () => {
t.keyword('query', { kind: 'Query' });
t.def('SomeQuery');
t.punctuation('{', { kind: 'SelectionSet' });
t.property('someField', { kind: 'Field' });
expectArgs(
{ t, stream },
{
onKind: 'Field',
args: [{ name: 'someArg', isList: true, ...fill }],
},
);
t.punctuation('}', { kind: 'Document' });
t.eol();
});
},
);
it('parses query field having argument of type object', () => {
const { t } = getUtils(`
query SomeQuery {
someField(someArg: { anotherField: $someVariable })
}
`);
t.keyword('query', { kind: 'Query' });
t.def('SomeQuery');
t.punctuation('{', { kind: 'SelectionSet' });
t.property('someField', { kind: 'Field' });
t.punctuation(/\(/, { kind: 'Arguments' });
t.attribute('someArg', { kind: 'Argument' });
t.punctuation(':');
t.punctuation('{', { kind: 'ObjectValue' });
t.attribute('anotherField', { kind: 'ObjectField' });
t.punctuation(':');
t.variable('$', { kind: 'Variable' });
t.variable('someVariable');
t.punctuation('}', { kind: 'Arguments' });
t.punctuation(/\)/, { kind: 'Field' });
t.punctuation('}', { kind: 'Document' });
t.eol();
});
performForEachType(
`
query SomeQuery {
someField @someDirective(anotherArg: __VALUE__)
}
`,
({ t, stream }, fill) => {
it(`parses query field with directive having argument of type ${fill.type}`, () => {
t.keyword('query', { kind: 'Query' });
t.def('SomeQuery');
t.punctuation('{', { kind: 'SelectionSet' });
t.property('someField', { kind: 'Field' });
expectDirective(
{ t, stream },
{
name: 'someDirective',
onKind: 'Field',
args: [{ name: 'anotherArg', ...fill }],
},
);
t.punctuation('}', { kind: 'Document' });
t.eol();
});
},
);
it(`parses query field with a directive and selection set`, () => {
const { t } = getUtils(`
query SomeQuery {
someField @someDirective {
anotherField
}
}
`);
t.keyword('query', { kind: 'Query' });
t.def('SomeQuery');
t.punctuation('{', { kind: 'SelectionSet' });
t.property('someField', { kind: 'Field' });
expectDirective({ t }, { name: 'someDirective' });
t.punctuation('{', { kind: 'SelectionSet' });
t.property('anotherField', { kind: 'Field' });
t.punctuation('}', { kind: 'SelectionSet' });
t.punctuation('}', { kind: 'Document' });
t.eol();
});
it(`parses query field with an alias`, () => {
const { t } = getUtils(`
query SomeQuery {
someAlias : someField @someDirective {
anotherField
}
}
`);
t.keyword('query', { kind: 'Query' });
t.def('SomeQuery');
t.punctuation('{', { kind: 'SelectionSet' });
t.property('someAlias', { kind: 'AliasedField' });
t.punctuation(':');
t.qualifier('someField');
expectDirective({ t }, { name: 'someDirective' });
t.punctuation('{', { kind: 'SelectionSet' });
t.property('anotherField', { kind: 'Field' });
t.punctuation('}', { kind: 'SelectionSet' });
t.punctuation('}', { kind: 'Document' });
t.eol();
});
it(`parses invalid query`, () => {
const { t, token } = getUtils(`
{}garbage
`);
t.punctuation('{', { kind: 'SelectionSet' });
t.punctuation('}', { kind: 'Document' });
expect(token()).toEqual('invalidchar');
t.eol();
});
it(`parses a fragment defination`, () => {
const { t } = getUtils(`
fragment SomeFragment on SomeType {
someField
}
`);
t.keyword('fragment', { kind: 'FragmentDefinition' });
t.def('SomeFragment');
t.keyword('on', { kind: 'TypeCondition' });
t.name('SomeType', { kind: 'NamedType' });
t.punctuation('{', { kind: 'SelectionSet' });
t.property('someField', { kind: 'Field' });
t.punctuation('}', { kind: 'Document' });
t.eol();
});
it(`parses a fragment defination with a directive`, () => {
const { t } = getUtils(`
fragment SomeFragment on SomeType @someDirective {
someField
}
`);
t.keyword('fragment', { kind: 'FragmentDefinition' });
t.def('SomeFragment');
t.keyword('on', { kind: 'TypeCondition' });
t.name('SomeType', { kind: 'NamedType' });
expectDirective({ t }, { name: 'someDirective' });
t.punctuation('{', { kind: 'SelectionSet' });
t.property('someField', { kind: 'Field' });
t.punctuation('}', { kind: 'Document' });
t.eol();
});
it('parses query with inline fragment', () => {
const { t } = getUtils(`
query SomeQuery {
someField {
... on SomeType {
anotherField
}
}
}
`);
t.keyword('query', { kind: 'Query' });
t.def('SomeQuery');
t.punctuation('{', { kind: 'SelectionSet' });
t.property('someField', { kind: 'Field' });
t.punctuation('{', { kind: 'SelectionSet' });
t.punctuation('...', { kind: 'InlineFragment' });
t.keyword('on', { kind: 'TypeCondition' });
t.name('SomeType', { kind: 'NamedType' });
t.punctuation('{', { kind: 'SelectionSet' });
t.property('anotherField', { kind: 'Field' });
t.punctuation('}', { kind: 'SelectionSet' });
t.punctuation('}', { kind: 'SelectionSet' });
t.punctuation('}', { kind: 'Document' });
t.eol();
});
it('parses query with fragment spread', () => {
const { t } = getUtils(`
query SomeQuery {
someField {
...SomeFragment @someDirective
}
}
`);
t.keyword('query', { kind: 'Query' });
t.def('SomeQuery');
t.punctuation('{', { kind: 'SelectionSet' });
t.property('someField', { kind: 'Field' });
t.punctuation('{', { kind: 'SelectionSet' });
t.punctuation('...', { kind: 'FragmentSpread' });
t.def('SomeFragment');
expectDirective({ t }, { name: 'someDirective' });
t.punctuation('}', { kind: 'SelectionSet' });
t.punctuation('}', { kind: 'Document' });
t.eol();
});
it('parses mutation', () => {
const { t } = getUtils(`
mutation SomeMutation {
someMutation
}
`);
t.keyword('mutation', { kind: 'Mutation' });
t.def('SomeMutation');
t.punctuation('{', { kind: 'SelectionSet' });
t.property('someMutation', { kind: 'Field' });
t.punctuation('}', { kind: 'Document' });
t.eol();
});
it('parses mutation with variables', () => {
const { t, stream } = getUtils(`
mutation SomeMutation ($someVariable: SomeInputType) {
someMutation(someArg: $someVariable)
}
`);
t.keyword('mutation', { kind: 'Mutation' });
t.def('SomeMutation');
expectVarsDef(
{ t, stream },
{
onKind: 'Mutation',
vars: [{ name: 'someVariable', type: 'SomeInputType' }],
},
);
t.punctuation('{', { kind: 'SelectionSet' });
t.property('someMutation', { kind: 'Field' });
expectArgs(
{ t, stream },
{
onKind: 'Field',
args: [{ name: 'someArg', isVariable: true, value: 'someVariable' }],
},
);
t.punctuation('}', { kind: 'Document' });
t.eol();
});
performForEachType(
`
mutation SomeMutation {
someMutation(someArg: __VALUE__)
}
`,
({ t, stream }, fill) => {
it(`parses mutation field having argument of type ${fill.type}`, () => {
t.keyword('mutation', { kind: 'Mutation' });
t.def('SomeMutation');
t.punctuation('{', { kind: 'SelectionSet' });
t.property('someMutation', { kind: 'Field' });
expectArgs(
{ t, stream },
{ onKind: 'Field', args: [{ name: 'someArg', ...fill }] },
);
t.punctuation('}', { kind: 'Document' });
t.eol();
});
},
);
it('parses mutation field having argument of type object', () => {
const { t } = getUtils(`
mutation SomeMutation {
someMutation(someArg: { anotherField: $someVariable })
}
`);
t.keyword('mutation', { kind: 'Mutation' });
t.def('SomeMutation');
t.punctuation('{', { kind: 'SelectionSet' });
t.property('someMutation', { kind: 'Field' });
t.punctuation(/\(/, { kind: 'Arguments' });
t.attribute('someArg', { kind: 'Argument' });
t.punctuation(':');
t.punctuation('{', { kind: 'ObjectValue' });
t.attribute('anotherField', { kind: 'ObjectField' });
t.punctuation(':');
t.variable('$', { kind: 'Variable' });
t.variable('someVariable');
t.punctuation('}', { kind: 'Arguments' });
t.punctuation(/\)/, { kind: 'Field' });
t.punctuation('}', { kind: 'Document' });
t.eol();
});
performForEachType(
`
mutation SomeMutation {
someMutation @someDirective(anotherArg: __VALUE__)
}
`,
({ t, stream }, fill) => {
it(`parses mutation field with directive having argument of type ${fill.type}`, () => {
t.keyword('mutation', { kind: 'Mutation' });
t.def('SomeMutation');
t.punctuation('{', { kind: 'SelectionSet' });
t.property('someMutation', { kind: 'Field' });
expectDirective(
{ t, stream },
{
name: 'someDirective',
onKind: 'Field',
args: [{ name: 'anotherArg', ...fill }],
},
);
t.punctuation('}', { kind: 'Document' });
t.eol();
});
},
);
it(`parses mutation field with a directive and selection set`, () => {
const { t } = getUtils(`
mutation SomeMutation {
someMutation @someDirective {
anotherField
}
}
`);
t.keyword('mutation', { kind: 'Mutation' });
t.def('SomeMutation');
t.punctuation('{', { kind: 'SelectionSet' });
t.property('someMutation', { kind: 'Field' });
expectDirective({ t }, { name: 'someDirective' });
t.punctuation('{', { kind: 'SelectionSet' });
t.property('anotherField', { kind: 'Field' });
t.punctuation('}', { kind: 'SelectionSet' });
t.punctuation('}', { kind: 'Document' });
t.eol();
});
it(`parses mutation field with an alias`, () => {
const { t } = getUtils(`
mutation SomeMutation {
someAlias : someMutation @someDirective {
anotherField
}
}
`);
t.keyword('mutation', { kind: 'Mutation' });
t.def('SomeMutation');
t.punctuation('{', { kind: 'SelectionSet' });
t.property('someAlias', { kind: 'AliasedField' });
t.punctuation(':');
t.qualifier('someMutation');
expectDirective({ t }, { name: 'someDirective' });
t.punctuation('{', { kind: 'SelectionSet' });
t.property('anotherField', { kind: 'Field' });
t.punctuation('}', { kind: 'SelectionSet' });
t.punctuation('}', { kind: 'Document' });
t.eol();
});
it('parses mutation with inline fragment', () => {
const { t } = getUtils(`
mutation SomeMutation {
someMutation {
... on SomeType {
anotherField
}
}
}
`);
t.keyword('mutation', { kind: 'Mutation' });
t.def('SomeMutation');
t.punctuation('{', { kind: 'SelectionSet' });
t.property('someMutation', { kind: 'Field' });
t.punctuation('{', { kind: 'SelectionSet' });
t.punctuation('...', { kind: 'InlineFragment' });
t.keyword('on', { kind: 'TypeCondition' });
t.name('SomeType', { kind: 'NamedType' });
t.punctuation('{', { kind: 'SelectionSet' });
t.property('anotherField', { kind: 'Field' });
t.punctuation('}', { kind: 'SelectionSet' });
t.punctuation('}', { kind: 'SelectionSet' });
t.punctuation('}', { kind: 'Document' });
t.eol();
});
it('parses mutation with fragment spread', () => {
const { t } = getUtils(`
mutation SomeMutation {
someMutation {
...SomeFragment @someDirective
}
}
`);
t.keyword('mutation', { kind: 'Mutation' });
t.def('SomeMutation');
t.punctuation('{', { kind: 'SelectionSet' });
t.property('someMutation', { kind: 'Field' });
t.punctuation('{', { kind: 'SelectionSet' });
t.punctuation('...', { kind: 'FragmentSpread' });
t.def('SomeFragment');
expectDirective({ t }, { name: 'someDirective' });
t.punctuation('}', { kind: 'SelectionSet' });
t.punctuation('}', { kind: 'Document' });
t.eol();
});
it('parses subscription', () => {
const { t } = getUtils(`
subscription SomeSubscription {
someSubscription
}
`);
t.keyword('subscription', { kind: 'Subscription' });
t.def('SomeSubscription');
t.punctuation('{', { kind: 'SelectionSet' });
t.property('someSubscription', { kind: 'Field' });
t.punctuation('}', { kind: 'Document' });
t.eol();
});
it('parses subscription with variables', () => {
const { t, stream } = getUtils(`
subscription SomeSubscription ($someVariable: SomeInputType) {
someSubscription(someArg: $someVariable)
}
`);
t.keyword('subscription', { kind: 'Subscription' });
t.def('SomeSubscription');
expectVarsDef(
{ t, stream },
{
onKind: 'Subscription',
vars: [{ name: 'someVariable', type: 'SomeInputType' }],
},
);
t.punctuation('{', { kind: 'SelectionSet' });
t.property('someSubscription', { kind: 'Field' });
expectArgs(
{ t, stream },
{
onKind: 'Field',
args: [{ name: 'someArg', isVariable: true, value: 'someVariable' }],
},
);
t.punctuation('}', { kind: 'Document' });
t.eol();
});
performForEachType(
`
subscription SomeSubscription {
someSubscription(someArg: __VALUE__)
}
`,
({ t, stream }, fill) => {
it(`parses subscription field having argument of type ${fill.type}`, () => {
t.keyword('subscription', { kind: 'Subscription' });
t.def('SomeSubscription');
t.punctuation('{', { kind: 'SelectionSet' });
t.property('someSubscription', { kind: 'Field' });
expectArgs(
{ t, stream },
{ onKind: 'Field', args: [{ name: 'someArg', ...fill }] },
);
t.punctuation('}', { kind: 'Document' });
t.eol();
});
},
);
it('parses subscription field having argument of type object', () => {
const { t } = getUtils(`
subscription SomeSubscription {
someSubscription(someArg: { anotherField: $someVariable })
}
`);
t.keyword('subscription', { kind: 'Subscription' });
t.def('SomeSubscription');
t.punctuation('{', { kind: 'SelectionSet' });
t.property('someSubscription', { kind: 'Field' });
t.punctuation(/\(/, { kind: 'Arguments' });
t.attribute('someArg', { kind: 'Argument' });
t.punctuation(':');
t.punctuation('{', { kind: 'ObjectValue' });
t.attribute('anotherField', { kind: 'ObjectField' });
t.punctuation(':');
t.variable('$', { kind: 'Variable' });
t.variable('someVariable');
t.punctuation('}', { kind: 'Arguments' });
t.punctuation(/\)/, { kind: 'Field' });
t.punctuation('}', { kind: 'Document' });
t.eol();
});
performForEachType(
`
subscription SomeSubscription {
someSubscription @someDirective(anotherArg: __VALUE__)
}
`,
({ t, stream }, fill) => {
it(`parses subscription field with directive having argument of type ${fill.type}`, () => {
t.keyword('subscription', { kind: 'Subscription' });
t.def('SomeSubscription');
t.punctuation('{', { kind: 'SelectionSet' });
t.property('someSubscription', { kind: 'Field' });
expectDirective(
{ t, stream },
{
name: 'someDirective',
onKind: 'Field',
args: [{ name: 'anotherArg', ...fill }],
},
);
t.punctuation('}', { kind: 'Document' });
t.eol();
});
},
);
it(`parses subscription field with a directive and selection set`, () => {
const { t } = getUtils(`
subscription SomeSubscription {
someSubscription @someDirective {
anotherField
}
}
`);
t.keyword('subscription', { kind: 'Subscription' });
t.def('SomeSubscription');
t.punctuation('{', { kind: 'SelectionSet' });
t.property('someSubscription', { kind: 'Field' });
expectDirective({ t }, { name: 'someDirective' });
t.punctuation('{', { kind: 'SelectionSet' });
t.property('anotherField', { kind: 'Field' });
t.punctuation('}', { kind: 'SelectionSet' });
t.punctuation('}', { kind: 'Document' });
t.eol();
});
it(`parses subscription field with an alias`, () => {
const { t } = getUtils(`
subscription SomeSubscription {
someAlias : someSubscription @someDirective {
anotherField
}
}
`);
t.keyword('subscription', { kind: 'Subscription' });
t.def('SomeSubscription');
t.punctuation('{', { kind: 'SelectionSet' });
t.property('someAlias', { kind: 'AliasedField' });
t.punctuation(':');
t.qualifier('someSubscription');
expectDirective({ t }, { name: 'someDirective' });
t.punctuation('{', { kind: 'SelectionSet' });
t.property('anotherField', { kind: 'Field' });
t.punctuation('}', { kind: 'SelectionSet' });
t.punctuation('}', { kind: 'Document' });
t.eol();
});
it('parses subscription with inline fragment', () => {
const { t, stream } = getUtils(`
subscription SomeSubscription {
someSubscription {
... on SomeType {
anotherField
}
}
}
`);
t.keyword('subscription', { kind: 'Subscription' });
t.def('SomeSubscription');
t.punctuation('{', { kind: 'SelectionSet' });
t.property('someSubscription', { kind: 'Field' });
t.punctuation('{', { kind: 'SelectionSet' });
t.punctuation('...', { kind: 'InlineFragment' });
t.keyword('on', { kind: 'TypeCondition' });
t.name('SomeType', { kind: 'NamedType' });
t.punctuation('{', { kind: 'SelectionSet' });
t.property('anotherField', { kind: 'Field' });
t.punctuation('}', { kind: 'SelectionSet' });
t.punctuation('}', { kind: 'SelectionSet' });
t.punctuation('}', { kind: 'Document' });
t.eol();
});
it('parses subscription with fragment spread', () => {
const { t, stream } = getUtils(`
subscription SomeSubscription {
someSubscription {
...SomeFragment @someDirective
}
}
`);
t.keyword('subscription', { kind: 'Subscription' });
t.def('SomeSubscription');
t.punctuation('{', { kind: 'SelectionSet' });
t.property('someSubscription', { kind: 'Field' });
t.punctuation('{', { kind: 'SelectionSet' });
t.punctuation('...', { kind: 'FragmentSpread' });
t.def('SomeFragment');
expectDirective({ t }, { name: 'someDirective' });
t.punctuation('}', { kind: 'SelectionSet' });
t.punctuation('}', { kind: 'Document' });
t.eol();
});
describe('parses object type def', () => {
it(`correctly`, () => {
const { t } = getUtils(`
type SomeType {
someField: AnotherType!
}
`);
t.keyword('type', { kind: 'ObjectTypeDef' });
t.name('SomeType');
t.punctuation('{');
t.property('someField', { kind: 'FieldDef' });
t.punctuation(':');
t.name('AnotherType', { kind: 'NamedType' });
t.punctuation('!', { kind: 'FieldDef' });
t.punctuation('}', { kind: 'Document' });
t.eol();
});
it('with an object implementing an interface', () => {
const { t } = getUtils(`type SomeType implements SomeInterface`);
t.keyword('type', { kind: 'ObjectTypeDef' });
t.name('SomeType');
t.keyword('implements', { kind: 'Implements' });
t.name('SomeInterface', { kind: 'NamedType' });
t.eol();
});
it('with an object type implementing multiple interfaces', () => {
const { t } = getUtils(
`type SomeType implements SomeInterface & AnotherInterface & YetAnotherInterface`,
);
t.keyword('type', { kind: 'ObjectTypeDef' });
t.name('SomeType');
t.keyword('implements', { kind: 'Implements' });
t.name('SomeInterface', { kind: 'NamedType' });
t.punctuation('&', { kind: 'Implements' });
t.name('AnotherInterface', { kind: 'NamedType' });
t.punctuation('&', { kind: 'Implements' });
t.name('YetAnotherInterface', { kind: 'NamedType' });
t.eol();
});
it('with a directive', () => {
const { t } = getUtils(`type SomeType @someDirective`);
t.keyword('type', { kind: 'ObjectTypeDef' });
t.name('SomeType');
expectDirective({ t }, { name: 'someDirective' });
t.eol();
});
performForEachType(
`type SomeType @someDirective(someArg: __VALUE__)`,
({ t, stream }, fill) => {
it(`with a directive having argument of type ${fill.type}`, () => {
t.keyword('type', { kind: 'ObjectTypeDef' });
t.name('SomeType');
expectDirective(
{ t, stream },
{
name: 'someDirective',
onKind: 'ObjectTypeDef',
args: [{ name: 'someArg', ...fill }],
},
);
t.eol();
});
},
);
});
describe('parses interface def', () => {
it('correctly', () => {
const { t } = getUtils(`
interface SomeInterface {
someField: SomeType!
}
`);
t.keyword('interface', { kind: 'InterfaceDef' });
t.name('SomeInterface');
t.punctuation('{');
t.property('someField', { kind: 'FieldDef' });
t.punctuation(':');
t.name('SomeType', { kind: 'NamedType' });
t.punctuation('!', { kind: 'FieldDef' });
t.punctuation('}', { kind: 'Document' });
t.eol();
});
it('with a directive', () => {
const { t } = getUtils(`interface SomeInterface @someDirective`);
t.keyword('interface', { kind: 'InterfaceDef' });
t.name('SomeInterface');
expectDirective({ t }, { name: 'someDirective' });
t.eol();
});
it('implementing multiple interfaces', () => {
const { t } = getUtils(
`interface AnInterface implements SomeInterface & AnotherInterface & YetAnotherInterface`,
);
t.keyword('interface', { kind: 'InterfaceDef' });
t.name('AnInterface');
t.keyword('implements', { kind: 'Implements' });
t.name('SomeInterface', { kind: 'NamedType' });
t.punctuation('&', { kind: 'Implements' });
t.name('AnotherInterface', { kind: 'NamedType' });
t.punctuation('&', { kind: 'Implements' });
t.name('YetAnotherInterface', { kind: 'NamedType' });
t.eol();
});
performForEachType(
`interface SomeInterface @someDirective(someArg: __VALUE__)`,
({ t, stream }, fill) => {
it(`with a directive having argument of type ${fill.type}`, () => {
t.keyword('interface', { kind: 'InterfaceDef' });
t.name('SomeInterface');
expectDirective(
{ t, stream },
{
name: 'someDirective',
onKind: 'InterfaceDef',
args: [{ name: 'someArg', ...fill }],
},
);
t.eol();
});
},
);
});
describe('parses field defs', () => {
it('correctly', () => {
const { t } = getUtils(`
type SomeType {
someField: AnotherType!
}
`);
t.keyword('type', { kind: 'ObjectTypeDef' });
t.name('SomeType');
t.punctuation('{');
t.property('someField', { kind: 'FieldDef' });
t.punctuation(':');
t.name('AnotherType', { kind: 'NamedType' });
t.punctuation('!', { kind: 'FieldDef' });
t.punctuation('}', { kind: 'Document' });
t.eol();
});
it('with an argument', () => {
const { t } = getUtils(`
type SomeType {
someField(someArg: AnotherType): [SomeAnotherType!]!
}
`);
t.keyword('type', { kind: 'ObjectTypeDef' });
t.name('SomeType');
t.punctuation('{');
t.property('someField', { kind: 'FieldDef' });
t.punctuation(/\(/, { kind: 'ArgumentsDef' });
t.attribute('someArg', { kind: 'InputValueDef' });
t.punctuation(':');
t.name('AnotherType', { kind: 'NamedType' });
t.punctuation(/\)/, { kind: 'FieldDef' });
t.punctuation(':');
t.punctuation(/\[/, { kind: 'ListType' });
t.name('SomeAnotherType', { kind: 'NamedType' });
t.punctuation('!', { kind: 'ListType' });
t.punctuation(/\]/);
t.punctuation('!', { kind: 'FieldDef' });
t.punctuation('}', { kind: 'Document' });
t.eol();
});
it('with a directive', () => {
const { t } = getUtils(`
type SomeType {
someField: AnotherType @someDirective
}
`);
t.keyword('type', { kind: 'ObjectTypeDef' });
t.name('SomeType');
t.punctuation('{');
t.property('someField', { kind: 'FieldDef' });
t.punctuation(':');
t.name('AnotherType', { kind: 'NamedType' });
expectDirective({ t }, { name: 'someDirective' });
t.punctuation('}', { kind: 'Document' });
t.eol();
});
it('with multiple directives', () => {
const { t } = getUtils(`
type SomeType {
someField: AnotherType @someDirective @anotherDirective
}
`);
t.keyword('type', { kind: 'ObjectTypeDef' });
t.name('SomeType');
t.punctuation('{');
t.property('someField', { kind: 'FieldDef' });
t.punctuation(':');
t.name('AnotherType', { kind: 'NamedType' });
expectDirective({ t }, { name: 'someDirective' });
expectDirective({ t }, { name: 'anotherDirective' });
t.punctuation('}', { kind: 'Document' });
t.eol();
});
performForEachType(
`
type SomeType {
someField: AnotherType @someDirective(someArg: __VALUE__)
}
`,
({ t, stream }, fill) => {
it(`with a directive having arguments of type ${fill.type}`, () => {
t.keyword('type', { kind: 'ObjectTypeDef' });
t.name('SomeType');
t.punctuation('{');
t.property('someField', { kind: 'FieldDef' });
t.punctuation(':');
t.name('AnotherType', { kind: 'NamedType' });
expectDirective(
{ t, stream },
{
name: 'someDirective',
onKind: 'FieldDef',
args: [{ name: 'someArg', ...fill }],
},
);
t.punctuation('}', { kind: 'Document' });
t.eol();
});
},
);
});
describe('parses extend type def', () => {
it('correctly', () => {
const { t } = getUtils(`
extend type SomeType {
someField: AnotherType
}
`);
t.keyword('extend', { kind: 'ExtendDef' });
t.keyword('type', { kind: 'ObjectTypeDef' });
t.name('SomeType');
t.punctuation('{');
t.property('someField', { kind: 'FieldDef' });
t.punctuation(':');
t.name('AnotherType', { kind: 'NamedType' });
t.punctuation('}', { kind: 'Document' });
t.eol();
});
it('with multiple directives', () => {
const { t } = getUtils(`
extend type SomeType {
someField: AnotherType @someDirective @anotherDirective
}
`);
t.keyword('extend', { kind: 'ExtendDef' });
t.keyword('type', { kind: 'ObjectTypeDef' });
t.name('SomeType');
t.punctuation('{');
t.property('someField', { kind: 'FieldDef' });
t.punctuation(':');
t.name('AnotherType', { kind: 'NamedType' });
expectDirective({ t }, { name: 'someDirective' });
expectDirective({ t }, { name: 'anotherDirective' });
t.punctuation('}', { kind: 'Document' });
t.eol();
});
});
describe('parses input type def', () => {
it('correctly', () => {
const { t } = getUtils(`
input SomeInputType {
someField: AnotherType
}
`);
t.keyword('input', { kind: 'InputDef' });
t.name('SomeInputType');
t.punctuation('{');
t.attribute('someField', { kind: 'InputValueDef' });
t.punctuation(':');
t.name('AnotherType', { kind: 'NamedType' });
t.punctuation('}', { kind: 'Document' });
t.eol();
});
it('with multiple directives', () => {
const { t } = getUtils(`
input SomeInputType {
someField: AnotherType @someDirective @anotherDirective
}
`);
t.keyword('input', { kind: 'InputDef' });
t.name('SomeInputType');
t.punctuation('{');
t.attribute('someField', { kind: 'InputValueDef' });
t.punctuation(':');
t.name('AnotherType', { kind: 'NamedType' });
expectDirective({ t }, { name: 'someDirective' });
expectDirective({ t }, { name: 'anotherDirective' });
t.punctuation('}', { kind: 'Document' });
t.eol();
});
});
describe('parses enum type def', () => {
it('correctly', () => {
const { t } = getUtils(`
enum SomeEnum {
SOME_ENUM_VALUE
ANOTHER_ENUM_VALUE
}
`);
t.keyword('enum', { kind: 'EnumDef' });
t.name('SomeEnum');
t.punctuation('{');
t.value('Enum', 'SOME_ENUM_VALUE', { kind: 'EnumValueDef' });
t.value('Enum', 'ANOTHER_ENUM_VALUE', { kind: 'EnumValueDef' });
t.punctuation('}', { kind: 'Document' });
t.eol();
});
it('with a directive', () => {
const { t } = getUtils(`
enum SomeEnum @someDirective {
SOME_ENUM_VALUE
ANOTHER_ENUM_VALUE
}
`);
t.keyword('enum', { kind: 'EnumDef' });
t.name('SomeEnum');
expectDirective({ t }, { name: 'someDirective' });
t.punctuation('{', { kind: 'EnumDef' });
t.value('Enum', 'SOME_ENUM_VALUE', { kind: 'EnumValueDef' });
t.value('Enum', 'ANOTHER_ENUM_VALUE', { kind: 'EnumValueDef' });
t.punctuation('}', { kind: 'Document' });
t.eol();
});
});
describe('parses scalar type def', () => {
it('correctly', () => {
const { t } = getUtils(`scalar SomeScalar`);
t.keyword('scalar', { kind: 'ScalarDef' });
t.name('SomeScalar');
t.eol();
});
it('with a directive', () => {
const { t } = getUtils(`scalar SomeScalar @someDirective`);
t.keyword('scalar', { kind: 'ScalarDef' });
t.name('SomeScalar');
expectDirective({ t }, { name: 'someDirective' });
t.eol();
});
});
describe('parses union type def', () => {
it('correctly', () => {
const { t } = getUtils(`union SomeUnionType = SomeType | AnotherType`);
t.keyword('union', { kind: 'UnionDef' });
t.name('SomeUnionType');
t.punctuation('=');
t.name('SomeType', { kind: 'NamedType' });
t.punctuation('|', { kind: 'UnionDef' });
t.name('AnotherType', { kind: 'NamedType' });
t.eol();
});
it('with a directive', () => {
const { t } = getUtils(
`union SomeUnionType @someDirective = SomeType | AnotherType`,
);
t.keyword('union', { kind: 'UnionDef' });
t.name('SomeUnionType');
expectDirective({ t }, { name: 'someDirective' });
t.punctuation('=', { kind: 'UnionDef' });
t.name('SomeType', { kind: 'NamedType' });
t.punctuation('|', { kind: 'UnionDef' });
t.name('AnotherType', { kind: 'NamedType' });
t.eol();
});
});
describe('parses directive type def', () => {
it('with multiple locations', () => {
const { t } = getUtils(
`directive @someDirective on FIELD_DEFINITION | ENUM_VALUE `,
);
t.keyword('directive', { kind: 'DirectiveDef' });
t.meta('@');
t.meta('someDirective');
t.keyword('on');
t.value('Enum', 'FIELD_DEFINITION', { kind: 'DirectiveLocation' });
t.punctuation('|', { kind: 'DirectiveDef' });
t.value('Enum', 'ENUM_VALUE', { kind: 'DirectiveLocation' });
t.eol();
});
});
});
});
|
using System.Security.Cryptography;
using System.Text;
namespace Midgard.Utilities
{
public class Passwords
{
private static string Md5(string str)
{
using var md5 = MD5.Create();
var result = "";
var buffer = md5.ComputeHash(Encoding.UTF8.GetBytes(str));
foreach (var t in buffer)
{
result += t.ToString("X2");
}
return result.ToLower();
}
private static string Sha512(string str)
{
using var sha512 = SHA512.Create();
var result = "";
var buffer = sha512.ComputeHash(Encoding.ASCII.GetBytes(str));
foreach (var t in buffer)
{
result += t.ToString("X2");
}
return result.ToLower();
}
public static string Hash(string password, string salt)
{
return Sha512(Md5(password) + salt);
}
}
}
|
using Microsoft.Extensions.DependencyInjection;
using System;
using System.Collections.Generic;
namespace Orleans.MultiClient.DependencyInjection
{
public interface IMultiClientBuilder
{
IServiceCollection Services { get; }
Action<IClientBuilder> OrleansConfigure { get; set; }
IList<OrleansClientOptions> ClientOptions { get; set; }
void Build();
}
}
|
'use strict';
var http = require('http');
var app = require('./app');
http.createServer(app).listen(app.get('port'), function() {
console.log('Express server listening on port ' + app.get('port') + ' in ' + app.get('env') + ' environment');
});
|
'use strict';
import { Connection } from 'mysql';
/**
* Establishes a connection to the DB.
*/
export function connect(connection: Connection): Promise<void> {
return(new Promise((resolve, reject) => {
connection.connect((error) => {
if (error !== null && error !== undefined) { return(reject(error)); }
resolve();
});
}));
}
/**
* Gracefully terminates the current connection to the DB.
*/
export function end(connection: Connection): Promise<void> {
return(new Promise((resolve, reject) => {
connection.end((error) => {
if (error !== null && error !== undefined) { return(reject(error)); }
resolve();
});
}));
}
/**
* Returns a function that will run a query on the provided connection.
*/
export function queryFn(
connection: Connection
): (options: string, values: any[]) => Promise<void> {
return((options: string, values: any[]): Promise<void> => {
return(new Promise((resolve, reject) => {
connection.query(options, values, (error) => {
if (error !== null && error !== undefined) { return(reject(error)); }
resolve();
});
}));
});
}
|
import Towers from "./towers.js";
const LOADED_TOWERS: Map<number, Towers[]> = new Map();
const utf8Decoder = new TextDecoder('utf-8');
export async function randomOfSize(size: number): Promise<Towers> {
await loadTowers(size);
const towers = LOADED_TOWERS.get(size)!;
const t = towers[Math.floor(Math.random() * towers.length)];
return t.copy();
}
async function loadTowers(size: number): Promise<void> {
// Check if already loaded
if (LOADED_TOWERS.has(size)) {
return;
}
const response = await fetch(`./src/puzzles/towers/towers_${size}.txt`);
const reader = response.body!.getReader();
let done = false;
let data = "";
while (!done) {
let result = await reader.read();
done = result.done;
data += utf8Decoder.decode(result.value);
}
let puzzles = data.split("-----\n");
// The text file ends with the delimiter, so remove the empty string at the
// end of the array
puzzles.pop();
// Remove difficulty rating for now
puzzles = puzzles.map(p => {
const index = p.indexOf("\n");
return p.substring(index + 1, p.length - 1);
})
LOADED_TOWERS.set(size, puzzles.map(p => {
let t = Towers.fromString(p);
return t;
}));
}
|
// Library
import { construct } from './codes.ts'
import { ANSIStyle, style } from './styles.ts'
import { ANSIColor, color, bgOffset } from './colors.ts'
/**
* Mixes several ANSI styles and colors together
* @param s The style to apply to the text
* @param c The color to apply to the text
* @param bg The color to apply to the background
*/
export const mix = (s: ANSIStyle, c: ANSIColor = 'default', bg: ANSIColor = 'default') => {
const _mix: number[] = [style[s][0]]
if (c !== 'default') { _mix.push(color[c][0]) }
if (bg !== 'default') { _mix.push(color[bg][0] + bgOffset) }
return construct(_mix, 0)
}
|
package de.htwg.se.empire.controller.impl
import de.htwg.se.empire.controller.ReinforcementController
import de.htwg.se.empire.model.Grid
import de.htwg.se.empire.model.player.Player
import org.apache.logging.log4j.{ LogManager, Logger }
class DefaultReinforcementController extends ReinforcementController {
val LOG: Logger = LogManager.getLogger(this.getClass)
/*
* Player gets one soldier for every 3 countries he have, but minimum 3
*/
def calcSoldiersToDistribute(playingField: Grid, player: Player): Int = {
val bonus = getBonus(playingField, player)
if (player.countries.length / 3 > 3) (player.countries.length / 3) + bonus else 3 + bonus
}
def getBonus(playingField: Grid, player: Player): Int = {
var bonus = 0
for (c <- playingField.continents) {
val continentCountries = c.countries.toSet
val playerCountries = player.countries.toSet
if (playerCountries.subsetOf(continentCountries)) bonus += c.bonus
}
bonus
}
def distributeSoldiers(playingField: Grid, countryName: String, soldiers: Int): Unit = {
val country = playingField.getCountry(countryName)
if (country.isDefined) country.get.addSoldiers(soldiers) //else inform view
}
}
|
mod envelope;
mod mtype;
mod confirm;
pub use {
envelope::*,
mtype::*,
confirm::*,
};
use crate::player;
use crate::map;
|
//===- Error.cpp - system_error extensions for llvm-readobj -----*- C++ -*-===//
//
// The LLVM Compiler Infrastructure
//
// This file is distributed under the University of Illinois Open Source
// License. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
//
// This defines a new error_category for the llvm-readobj tool.
//
//===----------------------------------------------------------------------===//
#include "Error.h"
#include "llvm/Support/ErrorHandling.h"
using namespace llvm;
namespace {
class _readobj_error_category : public _do_message {
public:
virtual const char* name() const;
virtual std::string message(int ev) const;
virtual error_condition default_error_condition(int ev) const;
};
} // namespace
const char *_readobj_error_category::name() const {
return "llvm.readobj";
}
std::string _readobj_error_category::message(int ev) const {
switch (ev) {
case readobj_error::success: return "Success";
case readobj_error::file_not_found:
return "No such file.";
case readobj_error::unsupported_file_format:
return "The file was not recognized as a valid object file.";
case readobj_error::unrecognized_file_format:
return "Unrecognized file type.";
case readobj_error::unsupported_obj_file_format:
return "Unsupported object file format.";
case readobj_error::unknown_symbol:
return "Unknown symbol.";
default:
llvm_unreachable("An enumerator of readobj_error does not have a message "
"defined.");
}
}
error_condition _readobj_error_category::default_error_condition(int ev) const {
if (ev == readobj_error::success)
return errc::success;
return errc::invalid_argument;
}
namespace llvm {
const error_category &readobj_category() {
static _readobj_error_category o;
return o;
}
} // namespace llvm
|
package de.faweizz.poc.util
import kotlinx.serialization.Serializable
@Serializable
data class PendingTransformationStep(
val id: String,
val step: TransformationStepData
)
|
/*
* Copyright 2014-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.store.flow.impl;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Maps;
import com.google.common.collect.Streams;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.commons.lang3.tuple.Pair;
import org.onlab.util.KryoNamespace;
import org.onlab.util.Tools;
import org.onosproject.cfg.ComponentConfigService;
import org.onosproject.cluster.ClusterService;
import org.onosproject.cluster.NodeId;
import org.onosproject.core.CoreService;
import org.onosproject.core.IdGenerator;
import org.onosproject.event.AbstractListenerManager;
import org.onosproject.mastership.MastershipService;
import org.onosproject.net.DeviceId;
import org.onosproject.net.device.DeviceEvent;
import org.onosproject.net.device.DeviceListener;
import org.onosproject.net.device.DeviceService;
import org.onosproject.net.flow.CompletedBatchOperation;
import org.onosproject.net.flow.DefaultFlowEntry;
import org.onosproject.net.flow.FlowEntry;
import org.onosproject.net.flow.FlowEntry.FlowEntryState;
import org.onosproject.net.flow.FlowRule;
import org.onosproject.net.flow.FlowRuleEvent;
import org.onosproject.net.flow.FlowRuleEvent.Type;
import org.onosproject.net.flow.FlowRuleService;
import org.onosproject.net.flow.FlowRuleStore;
import org.onosproject.net.flow.FlowRuleStoreDelegate;
import org.onosproject.net.flow.StoredFlowEntry;
import org.onosproject.net.flow.TableStatisticsEntry;
import org.onosproject.net.flow.oldbatch.FlowRuleBatchEntry;
import org.onosproject.net.flow.oldbatch.FlowRuleBatchEntry.FlowRuleOperation;
import org.onosproject.net.flow.oldbatch.FlowRuleBatchEvent;
import org.onosproject.net.flow.oldbatch.FlowRuleBatchOperation;
import org.onosproject.net.flow.oldbatch.FlowRuleBatchRequest;
import org.onosproject.persistence.PersistenceService;
import org.onosproject.store.AbstractStore;
import org.onosproject.store.cluster.messaging.ClusterCommunicationService;
import org.onosproject.store.cluster.messaging.ClusterMessage;
import org.onosproject.store.cluster.messaging.ClusterMessageHandler;
import org.onosproject.store.flow.ReplicaInfo;
import org.onosproject.store.flow.ReplicaInfoEvent;
import org.onosproject.store.flow.ReplicaInfoEventListener;
import org.onosproject.store.flow.ReplicaInfoService;
import org.onosproject.store.impl.MastershipBasedTimestamp;
import org.onosproject.store.serializers.KryoNamespaces;
import org.onosproject.store.service.AsyncConsistentMap;
import org.onosproject.store.service.EventuallyConsistentMap;
import org.onosproject.store.service.EventuallyConsistentMapEvent;
import org.onosproject.store.service.EventuallyConsistentMapListener;
import org.onosproject.store.service.MapEvent;
import org.onosproject.store.service.MapEventListener;
import org.onosproject.store.service.Serializer;
import org.onosproject.store.service.StorageService;
import org.onosproject.store.service.WallClockTimestamp;
import org.osgi.service.component.ComponentContext;
import org.osgi.service.component.annotations.Activate;
import org.osgi.service.component.annotations.Component;
import org.osgi.service.component.annotations.Deactivate;
import org.osgi.service.component.annotations.Modified;
import org.osgi.service.component.annotations.Reference;
import org.osgi.service.component.annotations.ReferenceCardinality;
import org.slf4j.Logger;
import java.util.Collections;
import java.util.Dictionary;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.function.Function;
import java.util.stream.Collectors;
import static com.google.common.base.Strings.isNullOrEmpty;
import static org.onlab.util.Tools.get;
import static org.onlab.util.Tools.groupedThreads;
import static org.onosproject.net.flow.FlowRuleEvent.Type.RULE_REMOVED;
import static org.onosproject.store.flow.impl.ECFlowRuleStoreMessageSubjects.APPLY_BATCH_FLOWS;
import static org.onosproject.store.flow.impl.ECFlowRuleStoreMessageSubjects.FLOW_TABLE_BACKUP;
import static org.onosproject.store.flow.impl.ECFlowRuleStoreMessageSubjects.GET_DEVICE_FLOW_COUNT;
import static org.onosproject.store.flow.impl.ECFlowRuleStoreMessageSubjects.GET_DEVICE_FLOW_ENTRIES;
import static org.onosproject.store.flow.impl.ECFlowRuleStoreMessageSubjects.GET_FLOW_ENTRY;
import static org.onosproject.store.flow.impl.ECFlowRuleStoreMessageSubjects.REMOTE_APPLY_COMPLETED;
import static org.onosproject.store.flow.impl.ECFlowRuleStoreMessageSubjects.REMOVE_FLOW_ENTRY;
import static org.slf4j.LoggerFactory.getLogger;
import static org.onosproject.store.OsgiPropertyConstants.*;
/**
* Manages inventory of flow rules using a distributed state management protocol.
*/
@Component(
immediate = true,
service = FlowRuleStore.class,
property = {
MESSAGE_HANDLER_THREAD_POOL_SIZE + ":Integer=" + MESSAGE_HANDLER_THREAD_POOL_SIZE_DEFAULT,
BACKUP_PERIOD_MILLIS + ":Integer=" + BACKUP_PERIOD_MILLIS_DEFAULT,
ANTI_ENTROPY_PERIOD_MILLIS + ":Integer=" + ANTI_ENTROPY_PERIOD_MILLIS_DEFAULT,
EC_FLOW_RULE_STORE_PERSISTENCE_ENABLED + ":Boolean=" + EC_FLOW_RULE_STORE_PERSISTENCE_ENABLED_DEFAULT,
MAX_BACKUP_COUNT + ":Integer=" + MAX_BACKUP_COUNT_DEFAULT
}
)
public class ECFlowRuleStore
extends AbstractStore<FlowRuleBatchEvent, FlowRuleStoreDelegate>
implements FlowRuleStore {
private final Logger log = getLogger(getClass());
private static final long FLOW_RULE_STORE_TIMEOUT_MILLIS = 5000;
/** Number of threads in the message handler pool. */
private int msgHandlerPoolSize = MESSAGE_HANDLER_THREAD_POOL_SIZE_DEFAULT;
/** Delay in ms between successive backup runs. */
private int backupPeriod = BACKUP_PERIOD_MILLIS_DEFAULT;
/** Delay in ms between anti-entropy runs. */
private int antiEntropyPeriod = ANTI_ENTROPY_PERIOD_MILLIS_DEFAULT;
/** Indicates whether or not changes in the flow table should be persisted to disk. */
private boolean persistenceEnabled = EC_FLOW_RULE_STORE_PERSISTENCE_ENABLED_DEFAULT;
/** Max number of backup copies for each device. */
private volatile int backupCount = MAX_BACKUP_COUNT_DEFAULT;
private InternalFlowTable flowTable = new InternalFlowTable();
@Reference(cardinality = ReferenceCardinality.MANDATORY)
protected ReplicaInfoService replicaInfoManager;
@Reference(cardinality = ReferenceCardinality.MANDATORY)
protected ClusterCommunicationService clusterCommunicator;
@Reference(cardinality = ReferenceCardinality.MANDATORY)
protected ClusterService clusterService;
@Reference(cardinality = ReferenceCardinality.MANDATORY)
protected DeviceService deviceService;
@Reference(cardinality = ReferenceCardinality.MANDATORY)
protected CoreService coreService;
@Reference(cardinality = ReferenceCardinality.MANDATORY)
protected ComponentConfigService configService;
@Reference(cardinality = ReferenceCardinality.MANDATORY)
protected MastershipService mastershipService;
@Reference(cardinality = ReferenceCardinality.MANDATORY)
protected PersistenceService persistenceService;
private Map<Long, NodeId> pendingResponses = Maps.newConcurrentMap();
private ExecutorService messageHandlingExecutor;
private ExecutorService eventHandler;
private final ScheduledExecutorService backupSenderExecutor =
Executors.newSingleThreadScheduledExecutor(groupedThreads("onos/flow", "backup-sender", log));
private EventuallyConsistentMap<DeviceId, List<TableStatisticsEntry>> deviceTableStats;
private final EventuallyConsistentMapListener<DeviceId, List<TableStatisticsEntry>> tableStatsListener =
new InternalTableStatsListener();
@Reference(cardinality = ReferenceCardinality.MANDATORY)
protected StorageService storageService;
protected final Serializer serializer = Serializer.using(KryoNamespace.newBuilder()
.register(KryoNamespaces.API)
.register(BucketId.class)
.register(FlowBucket.class)
.register(ImmutablePair.class)
.build());
protected final KryoNamespace.Builder serializerBuilder = KryoNamespace.newBuilder()
.register(KryoNamespaces.API)
.register(BucketId.class)
.register(MastershipBasedTimestamp.class);
protected AsyncConsistentMap<DeviceId, Long> mastershipTermLifecycles;
private IdGenerator idGenerator;
private NodeId local;
@Activate
public void activate(ComponentContext context) {
configService.registerProperties(getClass());
idGenerator = coreService.getIdGenerator(FlowRuleService.FLOW_OP_TOPIC);
local = clusterService.getLocalNode().id();
eventHandler = Executors.newSingleThreadExecutor(
groupedThreads("onos/flow", "event-handler", log));
messageHandlingExecutor = Executors.newFixedThreadPool(
msgHandlerPoolSize, groupedThreads("onos/store/flow", "message-handlers", log));
registerMessageHandlers(messageHandlingExecutor);
mastershipTermLifecycles = storageService.<DeviceId, Long>consistentMapBuilder()
.withName("onos-flow-store-terms")
.withSerializer(serializer)
.buildAsyncMap();
deviceTableStats = storageService.<DeviceId, List<TableStatisticsEntry>>eventuallyConsistentMapBuilder()
.withName("onos-flow-table-stats")
.withSerializer(serializerBuilder)
.withAntiEntropyPeriod(5, TimeUnit.SECONDS)
.withTimestampProvider((k, v) -> new WallClockTimestamp())
.withTombstonesDisabled()
.build();
deviceTableStats.addListener(tableStatsListener);
deviceService.addListener(flowTable);
deviceService.getDevices().forEach(device -> flowTable.addDevice(device.id()));
logConfig("Started");
}
@Deactivate
public void deactivate(ComponentContext context) {
configService.unregisterProperties(getClass(), false);
unregisterMessageHandlers();
deviceService.removeListener(flowTable);
deviceTableStats.removeListener(tableStatsListener);
deviceTableStats.destroy();
eventHandler.shutdownNow();
messageHandlingExecutor.shutdownNow();
backupSenderExecutor.shutdownNow();
log.info("Stopped");
}
@SuppressWarnings("rawtypes")
@Modified
public void modified(ComponentContext context) {
if (context == null) {
logConfig("Default config");
return;
}
Dictionary properties = context.getProperties();
int newPoolSize;
int newBackupPeriod;
int newBackupCount;
int newAntiEntropyPeriod;
try {
String s = get(properties, "msgHandlerPoolSize");
newPoolSize = isNullOrEmpty(s) ? msgHandlerPoolSize : Integer.parseInt(s.trim());
s = get(properties, BACKUP_PERIOD_MILLIS);
newBackupPeriod = isNullOrEmpty(s) ? backupPeriod : Integer.parseInt(s.trim());
s = get(properties, MAX_BACKUP_COUNT);
newBackupCount = isNullOrEmpty(s) ? backupCount : Integer.parseInt(s.trim());
s = get(properties, ANTI_ENTROPY_PERIOD_MILLIS);
newAntiEntropyPeriod = isNullOrEmpty(s) ? antiEntropyPeriod : Integer.parseInt(s.trim());
} catch (NumberFormatException | ClassCastException e) {
newPoolSize = MESSAGE_HANDLER_THREAD_POOL_SIZE_DEFAULT;
newBackupPeriod = BACKUP_PERIOD_MILLIS_DEFAULT;
newBackupCount = MAX_BACKUP_COUNT_DEFAULT;
newAntiEntropyPeriod = ANTI_ENTROPY_PERIOD_MILLIS_DEFAULT;
}
if (newBackupPeriod != backupPeriod) {
backupPeriod = newBackupPeriod;
flowTable.setBackupPeriod(newBackupPeriod);
}
if (newAntiEntropyPeriod != antiEntropyPeriod) {
antiEntropyPeriod = newAntiEntropyPeriod;
flowTable.setAntiEntropyPeriod(newAntiEntropyPeriod);
}
if (newPoolSize != msgHandlerPoolSize) {
msgHandlerPoolSize = newPoolSize;
ExecutorService oldMsgHandler = messageHandlingExecutor;
messageHandlingExecutor = Executors.newFixedThreadPool(
msgHandlerPoolSize, groupedThreads("onos/store/flow", "message-handlers", log));
// replace previously registered handlers.
registerMessageHandlers(messageHandlingExecutor);
oldMsgHandler.shutdown();
}
if (backupCount != newBackupCount) {
backupCount = newBackupCount;
}
logConfig("Reconfigured");
}
private void registerMessageHandlers(ExecutorService executor) {
clusterCommunicator.addSubscriber(APPLY_BATCH_FLOWS, new OnStoreBatch(), executor);
clusterCommunicator.<FlowRuleBatchEvent>addSubscriber(
REMOTE_APPLY_COMPLETED, serializer::decode, this::notifyDelegate, executor);
clusterCommunicator.addSubscriber(
GET_FLOW_ENTRY, serializer::decode, flowTable::getFlowEntry, serializer::encode, executor);
clusterCommunicator.addSubscriber(
GET_DEVICE_FLOW_ENTRIES, serializer::decode, flowTable::getFlowEntries, serializer::encode, executor);
clusterCommunicator.<Pair<DeviceId, FlowEntryState>, Integer>addSubscriber(
GET_DEVICE_FLOW_COUNT,
serializer::decode,
p -> flowTable.getFlowRuleCount(p.getLeft(), p.getRight()),
serializer::encode, executor);
clusterCommunicator.addSubscriber(
REMOVE_FLOW_ENTRY, serializer::decode, this::removeFlowRuleInternal, serializer::encode, executor);
}
private void unregisterMessageHandlers() {
clusterCommunicator.removeSubscriber(REMOVE_FLOW_ENTRY);
clusterCommunicator.removeSubscriber(GET_DEVICE_FLOW_ENTRIES);
clusterCommunicator.removeSubscriber(GET_DEVICE_FLOW_COUNT);
clusterCommunicator.removeSubscriber(GET_FLOW_ENTRY);
clusterCommunicator.removeSubscriber(APPLY_BATCH_FLOWS);
clusterCommunicator.removeSubscriber(REMOTE_APPLY_COMPLETED);
clusterCommunicator.removeSubscriber(FLOW_TABLE_BACKUP);
}
private void logConfig(String prefix) {
log.info("{} with msgHandlerPoolSize = {}; backupPeriod = {}, backupCount = {}",
prefix, msgHandlerPoolSize, backupPeriod, backupCount);
}
@Override
public int getFlowRuleCount() {
return Streams.stream(deviceService.getDevices()).parallel()
.mapToInt(device -> getFlowRuleCount(device.id()))
.sum();
}
@Override
public int getFlowRuleCount(DeviceId deviceId) {
return getFlowRuleCount(deviceId, null);
}
@Override
public int getFlowRuleCount(DeviceId deviceId, FlowEntryState state) {
NodeId master = mastershipService.getMasterFor(deviceId);
if (master == null) {
log.debug("Failed to getFlowRuleCount: No master for {}", deviceId);
return 0;
}
if (Objects.equals(local, master)) {
return flowTable.getFlowRuleCount(deviceId, state);
}
log.trace("Forwarding getFlowRuleCount to master {} for device {}", master, deviceId);
return Tools.futureGetOrElse(clusterCommunicator.sendAndReceive(
Pair.of(deviceId, state),
GET_DEVICE_FLOW_COUNT,
serializer::encode,
serializer::decode,
master),
FLOW_RULE_STORE_TIMEOUT_MILLIS,
TimeUnit.MILLISECONDS,
0);
}
@Override
public FlowEntry getFlowEntry(FlowRule rule) {
NodeId master = mastershipService.getMasterFor(rule.deviceId());
if (master == null) {
log.debug("Failed to getFlowEntry: No master for {}", rule.deviceId());
return null;
}
if (Objects.equals(local, master)) {
return flowTable.getFlowEntry(rule);
}
log.trace("Forwarding getFlowEntry to {}, which is the primary (master) for device {}",
master, rule.deviceId());
return Tools.futureGetOrElse(clusterCommunicator.sendAndReceive(rule,
ECFlowRuleStoreMessageSubjects.GET_FLOW_ENTRY,
serializer::encode,
serializer::decode,
master),
FLOW_RULE_STORE_TIMEOUT_MILLIS,
TimeUnit.MILLISECONDS,
null);
}
@Override
public Iterable<FlowEntry> getFlowEntries(DeviceId deviceId) {
NodeId master = mastershipService.getMasterFor(deviceId);
if (master == null) {
log.debug("Failed to getFlowEntries: No master for {}", deviceId);
return Collections.emptyList();
}
if (Objects.equals(local, master)) {
return flowTable.getFlowEntries(deviceId);
}
log.trace("Forwarding getFlowEntries to {}, which is the primary (master) for device {}",
master, deviceId);
return Tools.futureGetOrElse(clusterCommunicator.sendAndReceive(deviceId,
ECFlowRuleStoreMessageSubjects.GET_DEVICE_FLOW_ENTRIES,
serializer::encode,
serializer::decode,
master),
FLOW_RULE_STORE_TIMEOUT_MILLIS,
TimeUnit.MILLISECONDS,
Collections.emptyList());
}
@Override
public void storeFlowRule(FlowRule rule) {
storeBatch(new FlowRuleBatchOperation(
Collections.singletonList(new FlowRuleBatchEntry(FlowRuleOperation.ADD, rule)),
rule.deviceId(), idGenerator.getNewId()));
}
@Override
public void storeBatch(FlowRuleBatchOperation operation) {
if (operation.getOperations().isEmpty()) {
notifyDelegate(FlowRuleBatchEvent.completed(
new FlowRuleBatchRequest(operation.id(), Collections.emptySet()),
new CompletedBatchOperation(true, Collections.emptySet(), operation.deviceId())));
return;
}
DeviceId deviceId = operation.deviceId();
NodeId master = mastershipService.getMasterFor(deviceId);
if (master == null) {
log.warn("No master for {} ", deviceId);
Set<FlowRule> allFailures = operation.getOperations()
.stream()
.map(op -> op.target())
.collect(Collectors.toSet());
notifyDelegate(FlowRuleBatchEvent.completed(
new FlowRuleBatchRequest(operation.id(), Collections.emptySet()),
new CompletedBatchOperation(false, allFailures, deviceId)));
return;
}
if (Objects.equals(local, master)) {
storeBatchInternal(operation);
return;
}
log.trace("Forwarding storeBatch to {}, which is the primary (master) for device {}",
master, deviceId);
clusterCommunicator.unicast(operation,
APPLY_BATCH_FLOWS,
serializer::encode,
master)
.whenComplete((result, error) -> {
if (error != null) {
log.warn("Failed to storeBatch: {} to {}", operation, master, error);
Set<FlowRule> allFailures = operation.getOperations()
.stream()
.map(op -> op.target())
.collect(Collectors.toSet());
notifyDelegate(FlowRuleBatchEvent.completed(
new FlowRuleBatchRequest(operation.id(), Collections.emptySet()),
new CompletedBatchOperation(false, allFailures, deviceId)));
}
});
}
private void storeBatchInternal(FlowRuleBatchOperation operation) {
final DeviceId did = operation.deviceId();
//final Collection<FlowEntry> ft = flowTable.getFlowEntries(did);
Set<FlowRuleBatchEntry> currentOps = updateStoreInternal(operation);
if (currentOps.isEmpty()) {
batchOperationComplete(FlowRuleBatchEvent.completed(
new FlowRuleBatchRequest(operation.id(), Collections.emptySet()),
new CompletedBatchOperation(true, Collections.emptySet(), did)));
return;
}
notifyDelegate(FlowRuleBatchEvent.requested(new
FlowRuleBatchRequest(operation.id(),
currentOps), operation.deviceId()));
}
private Set<FlowRuleBatchEntry> updateStoreInternal(FlowRuleBatchOperation operation) {
return operation.getOperations().stream().map(
op -> {
StoredFlowEntry entry;
switch (op.operator()) {
case ADD:
entry = new DefaultFlowEntry(op.target());
log.debug("Adding flow rule: {}", entry);
flowTable.add(entry);
return op;
case MODIFY:
entry = new DefaultFlowEntry(op.target());
log.debug("Updating flow rule: {}", entry);
flowTable.update(entry);
return op;
case REMOVE:
return flowTable.update(op.target(), stored -> {
stored.setState(FlowEntryState.PENDING_REMOVE);
log.debug("Setting state of rule to pending remove: {}", stored);
return op;
});
default:
log.warn("Unknown flow operation operator: {}", op.operator());
}
return null;
}
).filter(Objects::nonNull).collect(Collectors.toSet());
}
@Override
public void deleteFlowRule(FlowRule rule) {
storeBatch(
new FlowRuleBatchOperation(
Collections.singletonList(
new FlowRuleBatchEntry(
FlowRuleOperation.REMOVE,
rule)), rule.deviceId(), idGenerator.getNewId()));
}
@Override
public FlowRuleEvent pendingFlowRule(FlowEntry rule) {
if (mastershipService.isLocalMaster(rule.deviceId())) {
return flowTable.update(rule, stored -> {
if (stored.state() == FlowEntryState.PENDING_ADD) {
stored.setState(FlowEntryState.PENDING_ADD);
return new FlowRuleEvent(Type.RULE_UPDATED, rule);
}
return null;
});
}
return null;
}
@Override
public FlowRuleEvent addOrUpdateFlowRule(FlowEntry rule) {
NodeId master = mastershipService.getMasterFor(rule.deviceId());
if (Objects.equals(local, master)) {
return addOrUpdateFlowRuleInternal(rule);
}
log.warn("Tried to update FlowRule {} state,"
+ " while the Node was not the master.", rule);
return null;
}
private FlowRuleEvent addOrUpdateFlowRuleInternal(FlowEntry rule) {
FlowRuleEvent event = flowTable.update(rule, stored -> {
stored.setBytes(rule.bytes());
stored.setLife(rule.life(TimeUnit.NANOSECONDS), TimeUnit.NANOSECONDS);
stored.setLiveType(rule.liveType());
stored.setPackets(rule.packets());
stored.setLastSeen();
if (stored.state() == FlowEntryState.PENDING_ADD) {
stored.setState(FlowEntryState.ADDED);
return new FlowRuleEvent(Type.RULE_ADDED, rule);
}
return new FlowRuleEvent(Type.RULE_UPDATED, rule);
});
if (event != null) {
return event;
}
// TODO: Confirm if this behavior is correct. See SimpleFlowRuleStore
// TODO: also update backup if the behavior is correct.
flowTable.add(rule);
return null;
}
@Override
public FlowRuleEvent removeFlowRule(FlowEntry rule) {
final DeviceId deviceId = rule.deviceId();
NodeId master = mastershipService.getMasterFor(deviceId);
if (Objects.equals(local, master)) {
// bypass and handle it locally
return removeFlowRuleInternal(rule);
}
if (master == null) {
log.warn("Failed to removeFlowRule: No master for {}", deviceId);
// TODO: revisit if this should be null (="no-op") or Exception
return null;
}
log.trace("Forwarding removeFlowRule to {}, which is the master for device {}",
master, deviceId);
return Tools.futureGetOrElse(clusterCommunicator.sendAndReceive(
rule,
REMOVE_FLOW_ENTRY,
serializer::encode,
serializer::decode,
master),
FLOW_RULE_STORE_TIMEOUT_MILLIS,
TimeUnit.MILLISECONDS,
null);
}
private FlowRuleEvent removeFlowRuleInternal(FlowEntry rule) {
// This is where one could mark a rule as removed and still keep it in the store.
final FlowEntry removed = flowTable.remove(rule);
log.debug("Removed flow rule: {}", removed);
// rule may be partial rule that is missing treatment, we should use rule from store instead
return removed != null ? new FlowRuleEvent(RULE_REMOVED, removed) : null;
}
@Override
public void purgeFlowRule(DeviceId deviceId) {
flowTable.purgeFlowRule(deviceId);
}
@Override
public void purgeFlowRules() {
flowTable.purgeFlowRules();
}
@Override
public void batchOperationComplete(FlowRuleBatchEvent event) {
//FIXME: need a per device pending response
NodeId nodeId = pendingResponses.remove(event.subject().batchId());
if (nodeId == null) {
notifyDelegate(event);
} else {
// TODO check unicast return value
clusterCommunicator.unicast(event, REMOTE_APPLY_COMPLETED, serializer::encode, nodeId);
//error log: log.warn("Failed to respond to peer for batch operation result");
}
}
private final class OnStoreBatch implements ClusterMessageHandler {
@Override
public void handle(final ClusterMessage message) {
FlowRuleBatchOperation operation = serializer.decode(message.payload());
log.debug("received batch request {}", operation);
final DeviceId deviceId = operation.deviceId();
NodeId master = mastershipService.getMasterFor(deviceId);
if (!Objects.equals(local, master)) {
Set<FlowRule> failures = new HashSet<>(operation.size());
for (FlowRuleBatchEntry op : operation.getOperations()) {
failures.add(op.target());
}
CompletedBatchOperation allFailed = new CompletedBatchOperation(false, failures, deviceId);
// This node is no longer the master, respond as all failed.
// TODO: we might want to wrap response in envelope
// to distinguish sw programming failure and hand over
// it make sense in the latter case to retry immediately.
message.respond(serializer.encode(allFailed));
return;
}
pendingResponses.put(operation.id(), message.sender());
storeBatchInternal(operation);
}
}
private class InternalFlowTable implements DeviceListener {
private final Map<DeviceId, DeviceFlowTable> flowTables = Maps.newConcurrentMap();
@Override
public void event(DeviceEvent event) {
if (event.type() == DeviceEvent.Type.DEVICE_ADDED) {
addDevice(event.subject().id());
}
}
/**
* Adds the given device to the flow table.
*
* @param deviceId the device to add to the table
*/
public void addDevice(DeviceId deviceId) {
flowTables.computeIfAbsent(deviceId, id -> new DeviceFlowTable(
id,
clusterService,
clusterCommunicator,
new InternalLifecycleManager(id),
backupSenderExecutor,
backupPeriod,
antiEntropyPeriod));
}
/**
* Sets the flow table backup period.
*
* @param backupPeriod the flow table backup period
*/
void setBackupPeriod(int backupPeriod) {
flowTables.values().forEach(flowTable -> flowTable.setBackupPeriod(backupPeriod));
}
/**
* Sets the flow table anti-entropy period.
*
* @param antiEntropyPeriod the flow table anti-entropy period
*/
void setAntiEntropyPeriod(int antiEntropyPeriod) {
flowTables.values().forEach(flowTable -> flowTable.setAntiEntropyPeriod(antiEntropyPeriod));
}
/**
* Returns the flow table for a specific device.
*
* @param deviceId the device identifier
* @return the flow table for the given device
*/
private DeviceFlowTable getFlowTable(DeviceId deviceId) {
DeviceFlowTable flowTable = flowTables.get(deviceId);
return flowTable != null ? flowTable : flowTables.computeIfAbsent(deviceId, id -> new DeviceFlowTable(
deviceId,
clusterService,
clusterCommunicator,
new InternalLifecycleManager(deviceId),
backupSenderExecutor,
backupPeriod,
antiEntropyPeriod));
}
/**
* Returns the flow rule count for the given device.
*
* @param deviceId the device for which to return the flow rule count
* @return the flow rule count for the given device
*/
public int getFlowRuleCount(DeviceId deviceId) {
return getFlowTable(deviceId).count();
}
/**
* Returns the count of flow rules in the given state for the given device.
*
* @param deviceId the device for which to return the flow rule count
* @return the flow rule count for the given device
*/
public int getFlowRuleCount(DeviceId deviceId, FlowEntryState state) {
if (state == null) {
return getFlowRuleCount(deviceId);
}
return (int) getFlowTable(deviceId)
.getFlowEntries()
.stream()
.filter(rule -> rule.state() == state)
.count();
}
/**
* Returns the flow entry for the given rule.
*
* @param rule the rule for which to return the flow entry
* @return the flow entry for the given rule
*/
public StoredFlowEntry getFlowEntry(FlowRule rule) {
return getFlowTable(rule.deviceId()).getFlowEntry(rule);
}
/**
* Returns the set of flow entries for the given device.
*
* @param deviceId the device for which to lookup flow entries
* @return the set of flow entries for the given device
*/
public Set<FlowEntry> getFlowEntries(DeviceId deviceId) {
return getFlowTable(deviceId).getFlowEntries();
}
/**
* Adds the given flow rule.
*
* @param rule the rule to add
*/
public void add(FlowEntry rule) {
Tools.futureGetOrElse(
getFlowTable(rule.deviceId()).add(rule),
FLOW_RULE_STORE_TIMEOUT_MILLIS,
TimeUnit.MILLISECONDS,
null);
}
/**
* Updates the given flow rule.
*
* @param rule the rule to update
*/
public void update(FlowEntry rule) {
Tools.futureGetOrElse(
getFlowTable(rule.deviceId()).update(rule),
FLOW_RULE_STORE_TIMEOUT_MILLIS,
TimeUnit.MILLISECONDS,
null);
}
/**
* Applies the given update function to the rule.
*
* @param function the update function to apply
* @return a future to be completed with the update event or {@code null} if the rule was not updated
*/
public <T> T update(FlowRule rule, Function<StoredFlowEntry, T> function) {
return Tools.futureGetOrElse(
getFlowTable(rule.deviceId()).update(rule, function),
FLOW_RULE_STORE_TIMEOUT_MILLIS,
TimeUnit.MILLISECONDS,
null);
}
/**
* Removes the given flow rule.
*
* @param rule the rule to remove
*/
public FlowEntry remove(FlowEntry rule) {
return Tools.futureGetOrElse(
getFlowTable(rule.deviceId()).remove(rule),
FLOW_RULE_STORE_TIMEOUT_MILLIS,
TimeUnit.MILLISECONDS,
null);
}
/**
* Purges flow rules for the given device.
*
* @param deviceId the device for which to purge flow rules
*/
public void purgeFlowRule(DeviceId deviceId) {
// If the device is still present in the store, purge the underlying DeviceFlowTable.
// Otherwise, remove the DeviceFlowTable and unregister message handlers.
if (deviceService.getDevice(deviceId) != null) {
DeviceFlowTable flowTable = flowTables.get(deviceId);
if (flowTable != null) {
flowTable.purge();
}
} else {
DeviceFlowTable flowTable = flowTables.remove(deviceId);
if (flowTable != null) {
flowTable.close();
}
}
}
/**
* Purges all flow rules from the table.
*/
public void purgeFlowRules() {
Iterator<DeviceFlowTable> iterator = flowTables.values().iterator();
while (iterator.hasNext()) {
iterator.next().close();
iterator.remove();
}
}
}
@Override
public FlowRuleEvent updateTableStatistics(DeviceId deviceId, List<TableStatisticsEntry> tableStats) {
deviceTableStats.put(deviceId, tableStats);
return null;
}
@Override
public Iterable<TableStatisticsEntry> getTableStatistics(DeviceId deviceId) {
NodeId master = mastershipService.getMasterFor(deviceId);
if (master == null) {
log.debug("Failed to getTableStats: No master for {}", deviceId);
return Collections.emptyList();
}
List<TableStatisticsEntry> tableStats = deviceTableStats.get(deviceId);
if (tableStats == null) {
return Collections.emptyList();
}
return ImmutableList.copyOf(tableStats);
}
@Override
public long getActiveFlowRuleCount(DeviceId deviceId) {
return Streams.stream(getTableStatistics(deviceId))
.mapToLong(TableStatisticsEntry::activeFlowEntries)
.sum();
}
private class InternalTableStatsListener
implements EventuallyConsistentMapListener<DeviceId, List<TableStatisticsEntry>> {
@Override
public void event(EventuallyConsistentMapEvent<DeviceId,
List<TableStatisticsEntry>> event) {
//TODO: Generate an event to listeners (do we need?)
}
}
/**
* Device lifecycle manager implementation.
*/
private final class InternalLifecycleManager
extends AbstractListenerManager<LifecycleEvent, LifecycleEventListener>
implements LifecycleManager, ReplicaInfoEventListener, MapEventListener<DeviceId, Long> {
private final DeviceId deviceId;
private volatile DeviceReplicaInfo replicaInfo;
InternalLifecycleManager(DeviceId deviceId) {
this.deviceId = deviceId;
replicaInfoManager.addListener(this);
mastershipTermLifecycles.addListener(this);
replicaInfo = toDeviceReplicaInfo(replicaInfoManager.getReplicaInfoFor(deviceId));
}
@Override
public DeviceReplicaInfo getReplicaInfo() {
return replicaInfo;
}
@Override
public void activate(long term) {
final ReplicaInfo replicaInfo = replicaInfoManager.getReplicaInfoFor(deviceId);
if (replicaInfo != null && replicaInfo.term() == term) {
mastershipTermLifecycles.put(deviceId, term);
}
}
@Override
public void event(ReplicaInfoEvent event) {
if (event.subject().equals(deviceId)) {
onReplicaInfoChange(event.replicaInfo());
}
}
@Override
public void event(MapEvent<DeviceId, Long> event) {
if (event.key().equals(deviceId) && event.newValue() != null) {
onActivate(event.newValue().value());
}
}
/**
* Handles a term activation event.
*
* @param term the term that was activated
*/
private void onActivate(long term) {
final ReplicaInfo replicaInfo = replicaInfoManager.getReplicaInfoFor(deviceId);
if (replicaInfo != null && replicaInfo.term() == term) {
NodeId master = replicaInfo.master().orElse(null);
List<NodeId> backups = replicaInfo.backups()
.subList(0, Math.min(replicaInfo.backups().size(), backupCount));
listenerRegistry.process(new LifecycleEvent(
LifecycleEvent.Type.TERM_ACTIVE,
new DeviceReplicaInfo(term, master, backups)));
}
}
/**
* Handles a replica info change event.
*
* @param replicaInfo the updated replica info
*/
private synchronized void onReplicaInfoChange(ReplicaInfo replicaInfo) {
DeviceReplicaInfo oldReplicaInfo = this.replicaInfo;
this.replicaInfo = toDeviceReplicaInfo(replicaInfo);
if (oldReplicaInfo == null || oldReplicaInfo.term() < replicaInfo.term()) {
if (oldReplicaInfo != null) {
listenerRegistry.process(new LifecycleEvent(LifecycleEvent.Type.TERM_END, oldReplicaInfo));
}
listenerRegistry.process(new LifecycleEvent(LifecycleEvent.Type.TERM_START, this.replicaInfo));
} else if (oldReplicaInfo.term() == replicaInfo.term()) {
listenerRegistry.process(new LifecycleEvent(LifecycleEvent.Type.TERM_UPDATE, this.replicaInfo));
}
}
/**
* Converts the given replica info into a {@link DeviceReplicaInfo} instance.
*
* @param replicaInfo the replica info to convert
* @return the converted replica info
*/
private DeviceReplicaInfo toDeviceReplicaInfo(ReplicaInfo replicaInfo) {
NodeId master = replicaInfo.master().orElse(null);
List<NodeId> backups = replicaInfo.backups()
.subList(0, Math.min(replicaInfo.backups().size(), backupCount));
return new DeviceReplicaInfo(replicaInfo.term(), master, backups);
}
@Override
public void close() {
replicaInfoManager.removeListener(this);
mastershipTermLifecycles.removeListener(this);
}
}
private static class CountMessage {
private final DeviceId deviceId;
private final FlowEntryState state;
CountMessage(DeviceId deviceId, FlowEntryState state) {
this.deviceId = deviceId;
this.state = state;
}
}
}
|
# frozen_string_literal: true
json.array! @attachments, :partial => 'attachments/attachment', :as => :attachment
|
#!/usr/bin/env python
# coding=utf-8
import argparse
parser = argparse.ArgumentParser('demo')
parser.add_argument('--model_path', type=str, default='/home', required=True,
help='the path of checkpoint file')
parser.add_argument('--model_name', type=str, default='yolov3',
choices=['yolov3', 'yolov5', 'fcos'], dest='model')
parser.add_argument('--version', action='version', version='{} v1.0'.format(parser.prog))
parser.print_help()
args, unparsed = parser.parse_known_args()
print(args)
print(unparsed)
|
name "cacerts"
default_version "2016-04-20"
source url: "https://curl.haxx.se/ca/cacert-#{version}.pem"
version "2016-04-20" do
source md5: "782dcde8f5d53b1b9e888fdf113c42b9"
end
version "2016.01.20" do
source md5: "06629db7f712ff3a75630eccaecc1fe4"
source url: "https://curl.haxx.se/ca/cacert-2016-01-20.pem"
end
relative_path "cacerts-#{version}"
build do
mkdir "#{install_dir}/embedded/ssl/certs"
copy "#{project_dir}/cacert*.pem", "#{install_dir}/embedded/ssl/certs/cacert.pem"
# Windows does not support symlinks
unless windows?
link "#{install_dir}/embedded/ssl/certs/cacert.pem", "#{install_dir}/embedded/ssl/cert.pem"
block { File.chmod(0644, "#{install_dir}/embedded/ssl/certs/cacert.pem") }
end
end
|
import 'dart:io';
import 'package:git/git.dart';
import 'package:path/path.dart' as p;
import 'package:process_run/shell.dart';
/// Push a git branch to a remote (pushes `gh-pages` by default).
Future<void> publish({
String directory = 'build',
String branch = 'gh-pages',
String message = 'Updates',
String remote = 'origin',
bool push = true,
bool force = false,
}) async {
try {
final gitDir = await GitDir.fromExisting(p.current);
if (directory == 'build') {
directory = p.join(p.current, directory, 'web');
var exists = await Directory(directory).exists();
if (!exists) await run('flutter build web');
} else {
directory = p.join(p.current, directory);
}
await gitDir.updateBranchWithDirectoryContents(branch, directory, message);
if (push) {
var args = ['push', remote, branch];
if (force) args.add('--force');
await gitDir.runCommand(args);
}
print('Published');
} catch (e) {
print(e);
}
}
|
import {
BTC_POOL_NAME,
ChainId,
GW_V1_1_GAS,
POOLS_MAP,
PoolName,
TRANSACTION_TYPES,
Token,
isLegacySwapABIPool,
} from "../constants"
import { enqueuePromiseToast, enqueueToast } from "../components/Toastify"
import { formatDeadlineToNumber, getContract } from "../utils"
import {
useAllContracts,
useLPTokenContract,
useSwapContract,
} from "./useContract"
import { useDispatch, useSelector } from "react-redux"
import { AppState } from "../state"
import { BigNumber } from "@ethersproject/bignumber"
import { Erc20 } from "../../types/ethers-contracts/Erc20"
import { GasPrices } from "../state/user"
import { IS_PRODUCTION } from "../utils/environment"
import META_SWAP_ABI from "../constants/abis/metaSwap.json"
import { MetaSwap } from "../../types/ethers-contracts/MetaSwap"
import { NumberInputState } from "../utils/numberInputState"
import { SwapFlashLoan } from "../../types/ethers-contracts/SwapFlashLoan"
import { SwapFlashLoanNoWithdrawFee } from "../../types/ethers-contracts/SwapFlashLoanNoWithdrawFee"
import { SwapGuarded } from "../../types/ethers-contracts/SwapGuarded"
import checkAndApproveTokenForTrade from "../utils/checkAndApproveTokenForTrade"
import { parseUnits } from "@ethersproject/units"
import { subtractSlippage } from "../utils/slippage"
import { updateLastTransactionTimes } from "../state/application"
import { useActiveWeb3React } from "."
import { useMemo } from "react"
interface ApproveAndDepositStateArgument {
[tokenSymbol: string]: NumberInputState
}
export function useApproveAndDeposit(
poolName: PoolName,
): (
state: ApproveAndDepositStateArgument,
shouldDepositWrapped?: boolean,
) => Promise<void> {
const dispatch = useDispatch()
const swapContract = useSwapContract(poolName)
const lpTokenContract = useLPTokenContract(poolName)
const tokenContracts = useAllContracts()
const { account, chainId, library } = useActiveWeb3React()
const { gasStandard, gasFast, gasInstant } = useSelector(
(state: AppState) => state.application,
)
//todo
const options = {
gasLimit: GW_V1_1_GAS,
}
const {
slippageCustom,
slippageSelected,
gasPriceSelected,
gasCustom,
transactionDeadlineCustom,
transactionDeadlineSelected,
infiniteApproval,
} = useSelector((state: AppState) => state.user)
const POOL = POOLS_MAP[poolName]
const metaSwapContract = useMemo(() => {
if (POOL.metaSwapAddresses && chainId && library) {
return getContract(
POOL.metaSwapAddresses?.[chainId],
META_SWAP_ABI,
library,
account ?? undefined,
) as MetaSwap
}
return null
}, [chainId, library, POOL.metaSwapAddresses, account])
return async function approveAndDeposit(
state: ApproveAndDepositStateArgument,
shouldDepositWrapped = false,
): Promise<void> {
try {
if (!account || !chainId) throw new Error("Wallet must be connected")
if (
!swapContract ||
!lpTokenContract ||
(shouldDepositWrapped && !metaSwapContract)
)
throw new Error("Swap contract is not loaded")
const poolTokens = shouldDepositWrapped
? (POOL.underlyingPoolTokens as Token[])
: POOL.poolTokens
const effectiveSwapContract = shouldDepositWrapped
? (metaSwapContract as MetaSwap)
: swapContract
let gasPriceUnsafe: string | number | undefined
if (gasPriceSelected === GasPrices.Custom) {
gasPriceUnsafe = gasCustom?.valueSafe
} else if (gasPriceSelected === GasPrices.Fast) {
gasPriceUnsafe = gasFast
} else if (gasPriceSelected === GasPrices.Instant) {
gasPriceUnsafe = gasInstant
} else {
gasPriceUnsafe = gasStandard
}
const gasPrice = parseUnits(
gasPriceUnsafe ? String(gasPriceUnsafe) : "45",
9,
)
const approveSingleToken = async (token: Token): Promise<void> => {
const spendingValue = BigNumber.from(state[token.symbol].valueSafe)
if (spendingValue.isZero()) return
const tokenContract = tokenContracts?.[token.symbol] as Erc20
if (tokenContract == null) return
await checkAndApproveTokenForTrade(
tokenContract,
effectiveSwapContract.address,
account,
spendingValue,
infiniteApproval,
gasPrice,
{
onTransactionError: () => {
throw new Error("Your transaction could not be completed")
},
},
chainId,
)
return
}
// For each token being deposited, check the allowance and approve it if necessary
if (!IS_PRODUCTION) {
for (const token of poolTokens) {
await approveSingleToken(token)
}
} else {
await Promise.all(poolTokens.map((token) => approveSingleToken(token)))
}
const isFirstTransaction = (await lpTokenContract.totalSupply()).isZero()
let minToMint: BigNumber
if (isFirstTransaction) {
minToMint = BigNumber.from("0")
} else {
if (isLegacySwapABIPool(poolName)) {
minToMint = await (
effectiveSwapContract as SwapFlashLoan
).calculateTokenAmount(
account,
poolTokens.map(({ symbol }) => state[symbol].valueSafe),
true, // deposit boolean
)
} else {
minToMint = await (
effectiveSwapContract as SwapFlashLoanNoWithdrawFee
).calculateTokenAmount(
poolTokens.map(({ symbol }) => state[symbol].valueSafe),
true, // deposit boolean
)
}
}
minToMint = subtractSlippage(minToMint, slippageSelected, slippageCustom)
const deadline = formatDeadlineToNumber(
transactionDeadlineSelected,
transactionDeadlineCustom,
)
let spendTransaction
const txnAmounts = poolTokens.map(({ symbol }) => state[symbol].valueSafe)
const txnDeadline = Math.round(
new Date().getTime() / 1000 + 60 * deadline,
)
if (poolName === BTC_POOL_NAME && chainId !== ChainId.GW_V1_1) {
const swapGuardedContract = effectiveSwapContract as SwapGuarded
spendTransaction = await swapGuardedContract?.addLiquidity(
txnAmounts,
minToMint,
txnDeadline,
[],
)
} else if (poolName === BTC_POOL_NAME && chainId === ChainId.GW_V1_1) {
const swapGuardedContract = effectiveSwapContract as SwapGuarded
spendTransaction = await swapGuardedContract?.addLiquidity(
txnAmounts,
minToMint,
txnDeadline,
[],
options,
)
} else if (chainId === ChainId.GW_V1_1) {
const swapFlashLoanContract = effectiveSwapContract as SwapFlashLoan
spendTransaction = await swapFlashLoanContract?.addLiquidity(
txnAmounts,
minToMint,
txnDeadline,
options,
)
} else {
const swapFlashLoanContract = effectiveSwapContract as SwapFlashLoan
spendTransaction = await swapFlashLoanContract?.addLiquidity(
txnAmounts,
minToMint,
txnDeadline,
)
}
await enqueuePromiseToast(chainId, spendTransaction.wait(), "deposit", {
poolName,
})
dispatch(
updateLastTransactionTimes({
[TRANSACTION_TYPES.DEPOSIT]: Date.now(),
}),
)
return Promise.resolve()
} catch (e) {
console.error(e)
enqueueToast(
"error",
e instanceof Error ? e.message : "Transaction Failed",
)
}
}
}
|
#!/bin/bash
set -e
set -o pipefail
# install.sh
# This script installs my basic setup for a debian laptop
export DEBIAN_FRONTEND=noninteractive
# Choose a user account to use for this installation
get_user() {
if [ -z "${TARGET_USER-}" ]; then
mapfile -t options < <(find /home/* -maxdepth 0 -printf "%f\\n" -type d)
# if there is only one option just use that user
if [ "${#options[@]}" -eq "1" ]; then
readonly TARGET_USER="${options[0]}"
echo "Using user account: ${TARGET_USER}"
return
fi
# iterate through the user options and print them
PS3='Which user account should be used? '
select opt in "${options[@]}"; do
readonly TARGET_USER=$opt
break
done
fi
}
check_is_sudo() {
if [ "$EUID" -ne 0 ]; then
echo "Please run as root."
exit
fi
}
setup_sources_min() {
apt update
apt install -y \
apt-transport-https \
ca-certificates \
curl \
dirmngr \
lsb-release \
--no-install-recommends
# turn off translations, speed up apt update
mkdir -p /etc/apt/apt.conf.d
echo 'Acquire::Languages "none";' > /etc/apt/apt.conf.d/99translations
}
base_min() {
apt update
apt -y upgrade
apt install -y \
adduser \
automake \
bash-completion \
bc \
bzip2 \
ca-certificates \
coreutils \
curl \
dnsutils \
file \
findutils \
gcc \
git \
gnupg \
gnupg2 \
gnupg-agent \
grep \
gzip \
hostname \
indent \
iptables \
jq \
less \
libc6-dev \
libimobiledevice6 \
locales \
lsof \
make \
mount \
net-tools \
pinentry-curses \
rxvt-unicode-256color \
scdaemon \
silversearcher-ag \
ssh \
strace \
sudo \
tar \
tree \
tzdata \
usbmuxd \
unzip \
xz-utils \
zip \
zsh \
--no-install-recommends
apt autoremove
apt autoclean
apt clean
install_scripts
}
setup_sources() {
setup_sources_min;
}
# installs base packages
# the utter bare minimal shit
base() {
base_min;
apt update
apt -y upgrade
./install-nodejs-npm-on-wsl.sh
./install-docker-on-wsl.sh
./install-kubectl-on-wsl.sh
# ./install-neovim-on-wsl.sh
./install-dotnet-core-on-wsl.sh
# install tlp with recommends
# apt install -y tlp tlp-rdw
setup_sudo
apt autoremove
apt autoclean
apt clean
}
setup_sudo() {
# add user to sudoers
adduser "$TARGET_USER" sudo
# add user to systemd groups
# then you wont need sudo to view logs and shit
gpasswd -a "$TARGET_USER" systemd-journal
gpasswd -a "$TARGET_USER" systemd-network
}
# install custom scripts/binaries
install_scripts() {
#git completion
GIT_VERSION=`git --version | awk '{print $3}'`
URL="https://raw.github.com/git/git/v$GIT_VERSION/contrib/completion/git-completion.bash"
echo "Downloading git-completion for git version: $GIT_VERSION..."
if ! curl "$URL" --silent --output "$HOME/.git-completion.bash"; then
echo "ERROR: Couldn't download completion script. Make sure you have a working internet connection." && exit 1
fi
# oh-my-zsh install
sh -c "$(curl -fsSL https://raw.githubusercontent.com/robbyrussell/oh-my-zsh/master/tools/install.sh)"
chsh -s $(which zsh)
# theme
git clone https://github.com/bhilburn/powerlevel9k.git ~/.oh-my-zsh/custom/themes/powerlevel9k
# oh-my-zsh plugin install
git clone https://github.com/zsh-users/zsh-completions ~/.oh-my-zsh/custom/plugins/zsh-completions
git clone git://github.com/zsh-users/zsh-autosuggestions ~/.zsh/zsh-autosuggestions
git clone https://github.com/zsh-users/zsh-syntax-highlighting.git ~/.zsh/zsh-syntax-highlighting
zsh compaudit | xargs chown -R "$(whoami)"
zsh compaudit | xargs chmod g-w
# vimrc vundle install
git clone https://github.com/VundleVim/Vundle.vim.git ~/.vim/bundle/Vundle.vim
# Pathogen install
mkdir -p ~/.vim/autoload ~/.vim/bundle && \
curl -LSso ~/.vim/autoload/pathogen.vim https://tpo.pe/pathogen.vim
# Nerdtree for vim install
git clone https://github.com/scrooloose/nerdtree.git ~/.vim/bundle/nerdtree
}
get_dotfiles() {
# create subshell
(
cd "$HOME"
# install dotfiles from repo
#git clone git@github.com:domokost/dotfiles.git "${HOME}/dotfiles"
cd "${HOME}/dotfiles"
# installs all the things
make
# enable dbus for the user session
# systemctl --user enable dbus.socket
)
#install_vim;
}
install_vim() {
# create subshell
(
cd "$HOME"
# install .vim files
git clone --recursive git@github.com:jessfraz/.vim.git "${HOME}/.vim"
ln -snf "${HOME}/.vim/vimrc" "${HOME}/.vimrc"
sudo ln -snf "${HOME}/.vim" /root/.vim
sudo ln -snf "${HOME}/.vimrc" /root/.vimrc
# alias vim dotfiles to neovim
mkdir -p "${XDG_CONFIG_HOME:=$HOME/.config}"
ln -snf "${HOME}/.vim" "${XDG_CONFIG_HOME}/nvim"
ln -snf "${HOME}/.vimrc" "${XDG_CONFIG_HOME}/nvim/init.vim"
# do the same for root
sudo mkdir -p /root/.config
sudo ln -snf "${HOME}/.vim" /root/.config/nvim
sudo ln -snf "${HOME}/.vimrc" /root/.config/nvim/init.vim
# update alternatives to neovim
sudo update-alternatives --install /usr/bin/vi vi "$(which nvim)" 60
sudo update-alternatives --config vi
sudo update-alternatives --install /usr/bin/vim vim "$(which nvim)" 60
sudo update-alternatives --config vim
sudo update-alternatives --install /usr/bin/editor editor "$(which nvim)" 60
sudo update-alternatives --config editor
# install things needed for deoplete for vim
sudo apt update
sudo apt install -y \
python3-pip \
python3-setuptools \
--no-install-recommends
pip3 install -U \
setuptools \
wheel \
neovim
)
}
usage() {
echo -e "install.sh\\n\\tThis script installs my basic setup for a debian laptop\\n"
echo "Usage:"
echo " base - setup sources & install base pkgs"
echo " basemin - setup sources & install base min pkgs"
echo " dotfiles - get dotfiles"
echo " vim - install vim specific dotfiles"
echo " scripts - install scripts"
}
main() {
local cmd=$1
if [[ -z "$cmd" ]]; then
usage
exit 1
fi
if [[ $cmd == "base" ]]; then
check_is_sudo
get_user
# setup /etc/apt/sources.list
setup_sources
base
elif [[ $cmd == "basemin" ]]; then
check_is_sudo
get_user
# setup /etc/apt/sources.list
setup_sources_min
base_min
elif [[ $cmd == "dotfiles" ]]; then
get_user
get_dotfiles
elif [[ $cmd == "vim" ]]; then
install_vim
elif [[ $cmd == "scripts" ]]; then
install_scripts
else
usage
fi
}
main "$@"
|
require 'spec_helper'
describe Webhook::EventReceiver do
let(:user) { FactoryGirl.create(:user) }
let(:record_source) { user }
let(:event) { Webhook::EventRegister.new(record_source).event }
subject { described_class.new(request_params) }
let(:params) { subject.params }
let!(:request_params) do
Webhook::EventSender.new(event.id).request_params
end
describe '#process_request' do
it 'raises error when request is invalid' do
params[:record].merge!({ a: 1 })
expect{ subject.process_request }.to raise_error
end
it 'call method to deal with the changes' do
expect(Webhook::EventProcessor).to receive(:new).with(params[:record]).and_call_original
expect_any_instance_of(Webhook::EventProcessor).to receive(:user_updated)
subject.process_request
end
it 'does not call method to deal with the change when type is invalid' do
params.merge!({ type: 'project.created' })
expect(Webhook::EventProcessor).not_to receive(:new)
subject.process_request
end
end
describe '#valid_request?' do
it 'returns true when the request is valid' do
expect(subject.valid_request?).to be_true
end
it 'returns false when request is invalid' do
params[:record].merge!({ a: 1 })
expect(subject.valid_request?).to be_false
end
end
end
|
module TeachersPet
class Cli
option :organization, required: true
option :repository, required: true
option :public, type: :boolean, default: false, desc: "Make the repositories public"
students_option
common_options
desc 'create_repos', "Create assignment repositories for students."
def create_repos
TeachersPet::Actions::CreateRepos.new(options).run
end
end
end
|
<?php
//error_reporting( E_ALL & ~E_DEPRECATED & ~E_NOTICE );
$link = mysqli_connect("localhost","root","","test");
if(!$link)
{
die('oops connection problem ! --> '.mysqli_connect_error());
}
?>
|
#pragma once
#include <d2d1.h>
#include <d2d1_1.h>
#include <dwrite.h>
#include <wrl.h>
#include <vector>
#include <string>
#include "gap.h"
using namespace std;
using namespace D2D1;
using namespace Microsoft::WRL;
#define WHEEL_SCROLL_UP 120
#define WHEEL_SCROLL_DOWN 65416
#define ADDITIONAL_TOP_SCROLLING_OFFSET 0
#define ADDITIONAL_BOTTOM_SCROLLING_OFFSET this->FontSize * ((int)this->Paragraphs.size() - 1)
#define TEXT_SCROLLING_SCALAR this->FontSize*3
#define FONT_SIZE_MIN 7.0f
#define FONT_SIZE_MAX 105.0f
#define FONT_SIZING_SCALAR 7.0f
#define FONT_SIZE_DIFFERENCE (this->FontSize-prevFontSize)
#define START_CONTEXT_DRAWING(drawer) drawer.StartDraw(this->Direct2DContext,this->Direct2DFactory)
#define CLEAR_CONTEXT_SURFACE(drawer) drawer.ClearSurface()
#define END_CONTEXT_DRAWING(drawer) drawer.EndDraw()
#define TARGET_HEIGHT this->Direct2DTarget->GetSize().height
#define TARGET_WIDTH this->Direct2DTarget->GetSize().width
#define ADDITIONAL_LEFT_LINECOUNTER_OFFSET this->FontSize * to_wstring(this->Paragraphs.size()).size()
class Renderer
{
public:
Renderer(HWND window);
~Renderer();
void OnScroll(float delta);
void OnCtrlScroll(float delta);
void OnResize(UINT width, UINT height);
void OnMouseClick(UINT posX,UINT posY,Gap* gap);
void RenderText(Gap* gap);
void RenderCursor(Gap* gap);
void SetParagraphs(vector<Paragraph> prs);
private:
HWND Window;
RECT SurfaceRect;
wchar_t* Text;
float FontSize;
float ScrollOffset;
vector<Paragraph> Paragraphs;
ID2D1Factory* Direct2DFactory;
IDWriteFactory* DWriteFactory;
ID2D1DeviceContext* Direct2DContext;
ID2D1HwndRenderTarget* Direct2DTarget;
ComPtr<IDWriteTextFormat> DWriteTextFormat;
ComPtr<ID2D1SolidColorBrush> Direct2DTextBrush;
ComPtr<ID2D1SolidColorBrush> Direct2DCursorBrush;
ComPtr<ID2D1SolidColorBrush> Direct2DLineCounterBrush;
void CreateDWriteFactory();
void CreateDirect2DTarget();
void CreateDirect2DFactory();
void CreateDirect2DContext();
ComPtr<ID2D1SolidColorBrush> CreateDirect2DBrush(
ColorF color = ColorF::WhiteSmoke
);
ComPtr<IDWriteTextLayout> CreateDWriteTextLayout(
wstring text,
UINT textLen,
float maxWidth = 0,
float maxHeight = 0
);
ComPtr<IDWriteTextFormat> CreateDWriteTextFormat(
float fontSize,
wstring fontFamily = L"Consolas",
DWRITE_FONT_WEIGHT fontWeight = DWRITE_FONT_WEIGHT_NORMAL,
DWRITE_FONT_STYLE fontStyle = DWRITE_FONT_STYLE_NORMAL,
DWRITE_FONT_STRETCH fontStretch = DWRITE_FONT_STRETCH_NORMAL
);
void RenderTextWithDirect2DContext();
void CreateResources();
};
|
use field_bit::FieldBit;
use rensa_tracker::RensaTracker;
use score;
pub struct RensaCoefTracker {
pub num_erased: [usize; 20],
pub long_bonus_coef: [usize; 20],
pub color_bonus_coef: [usize; 20],
}
impl RensaCoefTracker {
pub fn new() -> RensaCoefTracker {
RensaCoefTracker {
num_erased: [0; 20],
long_bonus_coef: [0; 20],
color_bonus_coef: [0; 20],
}
}
pub fn coef(&self, nth_chain: usize) -> usize {
score::calculate_rensa_bonus_coef(score::chain_bonus(nth_chain),
self.long_bonus_coef[nth_chain],
self.color_bonus_coef[nth_chain])
}
}
impl RensaTracker for RensaCoefTracker {
fn track_coef(&mut self, nth_chain: usize, num_erased: usize, long_bonus_coef: usize, color_bonus_coef: usize) {
self.num_erased[nth_chain] = num_erased;
self.long_bonus_coef[nth_chain] = long_bonus_coef;
self.color_bonus_coef[nth_chain] = color_bonus_coef;
}
fn track_vanish(&mut self, _nth_chain: usize, _vanished: &FieldBit, _ojama_vanished: &FieldBit) {}
fn track_drop(&mut self, _old_low_bits: u64, _old_high_bits: u64, _new_low_bits: u64, _new_high_bits: u64) {}
}
#[cfg(test)]
mod tests {
use super::RensaCoefTracker;
use rensa_tracker::RensaTracker;
#[test]
fn test_score() {
let mut tracker = RensaCoefTracker::new();
tracker.track_coef(1, 4, 0, 0);
tracker.track_coef(2, 4, 0, 0);
tracker.track_coef(3, 4, 0, 0);
assert_eq!(4, tracker.num_erased[3]);
assert_eq!(0, tracker.num_erased[4]);
}
}
#[cfg(all(test, target_feature = "avx2", target_feature="bmi2"))]
mod tests_for_avx2 {
use super::RensaCoefTracker;
use field::BitField;
#[test]
fn test_simulate() {
let mut bf = BitField::from_str(concat!(
"R...RR",
"RGBRYR",
"RRGBBY",
"GGBYYR"));
let mut tracker = RensaCoefTracker::new();
let rensa_result = bf.simulate_with_tracker(&mut tracker);
assert_eq!(5, rensa_result.chain);
assert_eq!(4, tracker.num_erased[1]);
assert_eq!(4, tracker.num_erased[2]);
assert_eq!(4, tracker.num_erased[3]);
assert_eq!(4, tracker.num_erased[4]);
assert_eq!(5, tracker.num_erased[5]);
assert_eq!(1, tracker.coef(1));
assert_eq!(8, tracker.coef(2));
assert_eq!(16, tracker.coef(3));
assert_eq!(32, tracker.coef(4));
assert_eq!(64 + 2, tracker.coef(5));
}
}
|
(defn expr1 []
"return value of expr1.")
(defn expr2 []
(println "Not Short-circuit evaluation.")
"return value of expr2.")
(println (or (expr1) (expr2)))
|
package de.htwg.se.model.handcomponent
import de.htwg.se.model.cardcomponent.CardInterface
import de.htwg.se.model.cardcomponent.cardbaseimlp.Card
trait HandInterface {
def toString: String
def handValue(): Int
def getCard(idx: Int): CardInterface
def getAllCards: Vector[CardInterface]
def removeAtIdx[T](idx: Int, vectorToRemoveFrom: Vector[CardInterface]): Vector[CardInterface]
def setAllCardsFalse: Unit
}
|
---
title: Sheepdog
is_name: false
---
Sheepdog (Trials & Training)
|
<?php
/*
* PhpFlatFileMarkdownWebsite - a simple preprocessor for markdown-based websites
*
* (c) 2011 Dmitriy Kubyshkin <dmitriy@kubyshkin.ru>
* https://github.com/grassator/php-flatfile-markdown-website
*/
/**
* This class handles all request processing.
* @package PhpFlatFileMarkdownWebsite
* @author Dmitriy Kubyshkin <dmitriy@kubyshkin.ru>
*/
class PhpFlatFileMarkdownWebsite
{
/**
* Holds path to page templates.
* @var string
*/
protected $templatesPath = '/templates';
/**
* Holds path to pages.
* @var string
*/
protected $pagesPath = '/pages';
/**
* Holds absolute path to files on server.
* @var string
*/
protected $rootPath;
/**
* Holds request uri without query string.
* @var string
*/
protected $requestUri;
/**
* Holds default page title.
* @var string
*/
protected $defaultTitle = '';
/**
* Holds current page title.
* @var string
*/
protected $currentTitle = '';
/**
* Holds meta keywords for current page.
* @var string
*/
protected $metaKeywords = '';
/**
* Holds meta keywords for current page.
* @var string
*/
protected $metaDescription = '';
/**
* Constructs core object.
*/
public function __construct()
{
// Preparing necessary paths
$this->rootPath = dirname($_SERVER['SCRIPT_FILENAME']);
$this->baseUri = str_replace($_SERVER['DOCUMENT_ROOT'], '',$this->rootPath);
if($this->baseUri) {
$this->baseUri = '/'.trim($this->baseUri, '/');
}
$this->requestUri = str_replace(
'?'.$_SERVER['QUERY_STRING'], '', $_SERVER['REQUEST_URI']
);
$this->requestUri = substr($this->requestUri, strlen($this->baseUri));
}
/**
* Starts request processing.
* @return void
*/
public function start()
{
$requestUri = $this->requestUri;
// If user requested directory index we show index.md file in that folder
if($requestUri[strlen($requestUri) - 1] == '/')
{
$requestUri .= 'index.md';
}
else // Else replacing extension sent in request to .md
{
$requestUri = preg_replace(
'/^(.+\.)(php|html?)$/', '$1md', $requestUri
);
}
if(file_exists($page = $this->absolutePagesPath() . $requestUri))
{
$this->renderPage($page);
}
else
{
$this->error404();
}
}
/**
* Shows error 404 page. If 404.html is present either in templates path
* or pages path it will be rendered. Otherwise a simple page will be shown.
* @return void
*/
protected function error404()
{
// If there was an error and headers were already sent we don't want
// another php notice shown about already sent headers
if(!headers_sent())
{
header("HTTP/1.0 404 Not Found");
}
// Searching for user-provided 404 page
if(file_exists($page = $this->absoluteTemplatesPath() . '/404.html'))
{
include $page;
}
elseif(file_exists($page = $this->absolutePagesPath() . '/404.html'))
{
include $page;
}
else // If not found rendering simple one.
{
echo "<h1>Error 404</h1><p>Requested page not found.</p>";
}
}
/**
* Renders page. Accepts path to existing markdown file.
* @param string $page
* @return void
*/
protected function renderPage($page)
{
// Transforming page from markdown to html
require_once dirname(__FILE__) . '/markdown.php';
$markdown = new Markdown_Parser();
$content = file_get_contents($page);
$content = $this->parseMeta($content);
$content = $markdown->transform($content);
// If there wasn't specified custom title inside document or otherwise
// we and there was a header inside a document we use it
if(empty($this->currentTitle) && !empty($markdown->document_title))
{
$this->currentTitle = $markdown->document_title;
}
// Outputting header template if present
if(file_exists($header = $this->absoluteTemplatesPath() . '/header.php'))
{
include $header;
}
echo $content;
// Outputting footer template if present
if(file_exists($footer = $this->absoluteTemplatesPath() . '/footer.php'))
{
include $footer;
}
}
/**
* Parses meta data specified inside document.
* @param string $content
* @return string
*/
protected function parseMeta($content)
{
$pattern = '/^\@(\w+) (.+)$/m';
return preg_replace_callback($pattern, array($this, 'parseMetaCallback'), $content);
}
/**
* Replaces meta data with empty string so the don't get parsed with markdown.
* @param array $matches
* @return string
*/
protected function parseMetaCallback($matches)
{
switch($matches[1])
{
case 'title':
$this->setCurrentTitle($matches[2]);
break;
case 'keywords':
$this->setMetaKeywords($matches[2]);
case 'description':
$this->setMetaDescription($matches[2]);
}
return '';
}
/**
* Sets meta description for current page.
* @param string $metaDescription
*/
public function setMetaDescription($metaDescription)
{
$this->metaDescription = $metaDescription;
}
/**
* Returns meta description for current page.
* @return string
*/
public function metaDescription()
{
return $this->metaDescription;
}
/**
* Sets meta description for current page.
* @param string $metaKeywords
*/
public function setMetaKeywords($metaKeywords)
{
$this->metaKeywords = $metaKeywords;
}
/**
* Returns meta keywords for current page.
* @return string
*/
public function metaKeywords()
{
return $this->metaKeywords;
}
/**
* Sets default page title.
* @param string $metaTitle
*/
public function setDefaultTitle($metaTitle)
{
$this->defaultTitle = $metaTitle;
}
/**
* Returns default page title.
* @return string
*/
public function defaultTitle()
{
return $this->defaultTitle;
}
/**
* Sets default page title.
* @param string $metaTitle
*/
public function setCurrentTitle($metaTitle)
{
$this->currentTitle = $metaTitle;
}
/**
* Returns default page title.
* @return string
*/
public function currentTitle()
{
return $this->currentTitle;
}
/**
* Returns full page title.
* @return string
*/
public function metaTitle()
{
if(!empty($this->defaultTitle))
{
return $this->currentTitle.' | '.$this->defaultTitle();
}
else
{
return $this->currentTitle();
}
}
/**
* Sets templates path relative to the engine root.
* @param string $templatesPath
*/
public function setTemplatesPath($templatesPath)
{
$this->templatesPath = $templatesPath;
}
/**
* Returns templates path relative to the engine root.
* @return string
*/
public function templatesPath()
{
return $this->templatesPath;
}
/**
* Returns absolute path to templates.
* @return string
*/
public function absoluteTemplatesPath()
{
return $this->rootPath . $this->templatesPath;
}
/**
* Sets pages path relative to the engine root.
* @param string $pagesPath
*/
public function setPagesPath($pagesPath)
{
$this->pagesPath = $pagesPath;
}
/**
* Returns pages path relative to the engine root.
* @return string
*/
public function pagesPath()
{
return $this->pagesPath;
}
/**
* Returns absolute path to pages.
* @return string
*/
public function absolutePagesPath()
{
return $this->rootPath . $this->pagesPath;
}
/**
* Returns request uri without query string.
* @return string
*/
public function requestUri()
{
return $this->requestUri;
}
/**
* @return string
*/
public function rootPath()
{
return $this->rootPath;
}
}
|
package study.daydayup.wolf.dts.sink;
import lombok.NonNull;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
import study.daydayup.wolf.dts.transformation.Statistics;
import study.daydayup.wolf.dts.source.offset.Offset;
import javax.annotation.Resource;
import java.util.Map;
/**
* study.daydayup.wolf.dts.sink
*
* @author Wingle
* @since 2020/2/8 6:07 下午
**/
@Component
public class MysqlEditor {
@Resource
private JdbcTemplate jdbc;
public int update(@NonNull String table, @NonNull Map<String, Object> key, @NonNull Map<String, Object> value) {
return 0;
}
@Transactional
public void save(Offset offset, Statistics statistics) {
}
}
|
use crate::opencl::*;
use ndarray::prelude::*;
#[cfg(test)]
use ndarray_rand::rand_distr::Uniform;
#[cfg(test)]
use ndarray_rand::RandomExt;
use std::time::{Instant,Duration};
use ocl::Error;
#[test]
#[serial]
fn vec_squared() -> Result<(), Error> {
let backend = CLBackEnd::new("GeForce")?;
let (n, m) = (1, 20);
let mut a = OpenCLArray::from_vec(backend, n, m, vec![0.5; m * n])?;
&a.square();
let a_result = a.to_vec()?;
println!("a_result: {:?}", a_result);
assert_eq!(a_result, vec![0.25; n * m]);
Ok(())
}
#[test]
#[serial]
fn array_squared() -> Result<(), Error> {
let backend = CLBackEnd::new("GeForce")?;
let (n, m) = (20, 20);
let mut array = Array2::<f32>::from_elem((n, m), 2.);
let mut a = OpenCLArray::from_array(backend, &array)?;
&a.square();
let array_result = a.to_array()?;
println!("a_result:\n{:?}", array_result);
assert_eq!(array_result, array.mapv(|x| x.powf(2.0)));
Ok(())
}
#[test]
#[serial]
fn array_transpose() -> Result<(), Error> {
let backend = CLBackEnd::new("GeForce")?;
let mut array = array![[1., 2., 3.], [4., 5., 6.]];
let mut a = OpenCLArray::from_array(backend, &array)?;
let b = a.t()?;
let result = b.to_array()?;
println!("result: {:#?}", result);
assert_eq!(result, array.t());
Ok(())
}
#[test]
#[serial]
fn array_dot() -> Result<(), Error> {
let backend = CLBackEnd::new("GeForce")?;
// let a = array![[1., 2., 3.], [4., 5., 6.]];
// let b = array![[1.,1.],[1.,1.],[1.,1.]];
let (n,m,k) = (10000,784,10);
let a = Array::random((n, m), Uniform::new(0., 1.));
let b = Array::random((m, k), Uniform::new(0., 1.));
let mut c_gpu = OpenCLArray::new(backend.clone(),n,k)?;
let mut start = Instant::now();
let c = a.dot(&b);
println!("c_cpu = a.b : {} ms",start.elapsed().as_millis());
start = Instant::now();
let a_gpu = OpenCLArray::from_array(backend.clone(), &a)?;
println!("a -> gpu: {} ns",start.elapsed().as_nanos());
start = Instant::now();
let b_gpu = OpenCLArray::from_array(backend, &b)?;
println!("b -> gpu: {} ns",start.elapsed().as_nanos());
start = Instant::now();
let mut gpu_start = Instant::now();
a_gpu.dot(&b_gpu,&mut c_gpu)?;
println!("c_gpu = a.b : {} ms",gpu_start.elapsed().as_millis());
let c_gpu = c_gpu.to_array()?;
// println!("c:\n{:#?}", c);
// println!("c_gpu:\n{:#?}", c_gpu);
let epsilon = 1e-3;
for y in 0..n {
for x in 0..k {
println!(
"{} - {} = {} ?< {}",
c_gpu[[y, x]],
c[[y, x]],
c_gpu[[y, x]] - c[[y, x]],
epsilon
);
assert!((c_gpu[[y, x]] - c[[y, x]]).abs() < epsilon);
}
}
Ok(())
}
#[test]
#[serial]
fn array_hadamard() -> Result<(), Error> {
let backend = CLBackEnd::new("GeForce")?;
let a = Array::random((10, 3), Uniform::new(0., 1.));
let b = Array::random((10, 3), Uniform::new(0., 1.));
let a_gpu = OpenCLArray::from_array(backend.clone(), &a)?;
let b_gpu = OpenCLArray::from_array(backend.clone(), &b)?;
let mut c_gpu = OpenCLArray::new(backend, a_gpu.rows,a_gpu.cols)?;
a_gpu.hadamard(&b_gpu,&mut c_gpu)?;
let c_gpu = c_gpu.to_array()?;
let c = a * b;
println!("c:\n{:#?}", c);
println!("c_gpu:\n{:#?}", c_gpu);
assert_eq!(c, c_gpu);
Ok(())
}
#[test]
#[serial]
fn array_add() -> Result<(), Error> {
let backend = CLBackEnd::new("GeForce")?;
let a = Array::random((10, 3), Uniform::new(0., 1.));
let b = Array::random((10, 3), Uniform::new(0., 1.));
let a_gpu = OpenCLArray::from_array(backend.clone(), &a)?;
let b_gpu = OpenCLArray::from_array(backend.clone(), &b)?;
let mut c_gpu = OpenCLArray::new(backend, a_gpu.rows,a_gpu.cols)?;
a_gpu.add(&b_gpu,&mut c_gpu)?;
let c_gpu = c_gpu.to_array()?;
let c = a + b;
println!("c:\n{:#?}", c);
println!("c_gpu:\n{:#?}", c_gpu);
assert_eq!(c, c_gpu);
Ok(())
}
#[test]
#[serial]
fn array_subtract() -> Result<(), Error> {
let backend = CLBackEnd::new("GeForce")?;
let a = Array::random((10, 3), Uniform::new(0., 1.));
let b = Array::random((10, 3), Uniform::new(0., 1.));
let a_gpu = OpenCLArray::from_array(backend.clone(), &a)?;
let b_gpu = OpenCLArray::from_array(backend.clone(), &b)?;
let mut c_gpu = OpenCLArray::new(backend, a_gpu.rows,a_gpu.cols)?;
a_gpu.subtract(&b_gpu,&mut c_gpu)?;
let c_gpu = c_gpu.to_array()?;
let c = a - b;
println!("c:\n{:#?}", c);
println!("c_gpu:\n{:#?}", c_gpu);
assert_eq!(c, c_gpu);
Ok(())
}
fn sigmoid_op(x: f32) -> f32 {
1.0 / (1.0 + (-x).exp())
}
#[test]
#[serial]
fn array_sigmoid() -> Result<(), Error> {
let backend = CLBackEnd::new("GeForce")?;
let a: Array2<f32> = Array::random((8, 10), Uniform::new(0.49, 0.51));
let (n, m): (usize, usize) = (a.nrows(), a.ncols());
let b = a.mapv(|x| sigmoid_op(x));
let a_gpu = OpenCLArray::from_array(backend.clone(), &a)?;
let mut b_gpu = OpenCLArray::new(backend,a_gpu.rows,a_gpu.cols)?;
a_gpu.sigmoid(&mut b_gpu)?;
let b_gpu = b_gpu.to_array()?;
let epsilon = 1e-5;
for y in 0..n {
for x in 0..m {
println!(
"{} - {} = {} < {}",
b_gpu[[y, x]],
b[[y, x]],
b_gpu[[y, x]] - b[[y, x]],
epsilon
);
assert!((b_gpu[[y, x]] - b[[y, x]]).abs() < epsilon);
}
}
Ok(())
}
fn sigmoid_prime_op(x: f32) -> f32 {
sigmoid_op(x) * (1.0 - sigmoid_op(x))
}
#[test]
#[serial]
fn array_sigmoid_prime() -> Result<(), Error> {
let backend = CLBackEnd::new("GeForce")?;
let a: Array2<f32> = Array::random((8, 10), Uniform::new(0.49, 0.51));
let (n, m): (usize, usize) = (a.nrows(), a.ncols());
let b = a.mapv(|x| sigmoid_prime_op(x));
let a_gpu = OpenCLArray::from_array(backend.clone(), &a)?;
let mut b_gpu = OpenCLArray::new(backend,a_gpu.rows,a_gpu.cols)?;
a_gpu.sigmoid_prime(&mut b_gpu)?;
let b_gpu = b_gpu.to_array()?;
let epsilon = 1e-5;
for y in 0..n {
for x in 0..m {
println!(
"{} - {} = {} < {}",
b_gpu[[y, x]],
b[[y, x]],
b_gpu[[y, x]] - b[[y, x]],
epsilon
);
assert!((b_gpu[[y, x]] - b[[y, x]]).abs() < epsilon);
}
}
Ok(())
}
#[test]
#[serial]
fn array_transpose_versions() -> Result<(), Error> {
let backend = CLBackEnd::new("GeForce")?;
let mut array = array![[1., 2., 3.], [4., 5., 6.]];
let mut a = OpenCLArray::from_array(backend, &array)?;
let mut start = Instant::now();
for _ in 0..10 {
let b = a.t()?;
}
println!("t_version 1 time: {}",start.elapsed().as_nanos());
let b = a.t()?;
start = Instant::now();
for _ in 0..10 {
a.t_v2()?;
}
println!("t_version 2 time: {}",start.elapsed().as_nanos());
a.t_v2()?;
let result = b.to_array()?;
let result_2 = a.clone().to_array()?;
println!("result:\n{:#?}", result);
println!("result:\n{:#?}", result_2);
assert_eq!(result, array.t());
assert_eq!(result_2, array.t());
a.t_v2()?;
let transpose_back = a.to_array()?;
assert_eq!(transpose_back,array);
Ok(())
}
|
package com.vegesoft.efficientspending.account.domain
import org.junit.jupiter.api.Assertions.assertEquals
import org.junit.jupiter.api.DisplayName
import org.junit.jupiter.api.Test
import java.util.*
internal class AccountTest {
@Test
@DisplayName("Should create account with CREATED status and 0 balance")
fun shouldCreateAccountWithCreatedStatusAnd0Balance() {
val id = UUID.randomUUID();
val firstName = "Alan";
val lastName = "Turing";
val email = "howtohackenigma@turing.com";
val createdAccount = AccountProvider.createdAccount(id, firstName, lastName, email)
assertEquals(id, createdAccount.id)
assertEquals(AccountStatus.CREATED, createdAccount.status)
assertEquals(firstName, createdAccount.firstName)
assertEquals(lastName, createdAccount.lastName)
assertEquals(email, createdAccount.email)
assertEquals(Balance(), createdAccount.balance)
}
}
|
package com.liang.phonecontactlist;
import java.io.IOException;
import java.io.InputStream;
import java.text.Collator;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.json.JSONException;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.content.ContentUris;
import android.content.Context;
import android.content.Intent;
import android.graphics.Color;
import android.graphics.drawable.Drawable;
import android.net.Uri;
import android.os.Bundle;
import android.provider.ContactsContract;
import android.provider.ContactsContract.RawContacts;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.view.Window;
import android.widget.Adapter;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.ArrayAdapter;
import android.widget.CheckBox;
import android.widget.ImageView;
import android.widget.ListAdapter;
import android.widget.ListView;
import android.widget.TextView;
import com.liang.Model.ContactMen;
import com.liang.controlHelper.ContactHelper;
import com.liang.phonenum.utils.AssetsDatabaseManager;
import com.liang.phonenum.utils.DatabaseDAO;
public class MultiDelActivity extends Activity {
private final static String TAG = "MultiDelActivity";
List<ContactMen> menList = new ArrayList<ContactMen>();
@SuppressLint("UseSparseArrays")
Map<Integer, Boolean> isCheckMap = new HashMap<Integer, Boolean>();
// PullMenParser menParser;
ListView listview;
private int checkCount = 0;
MenDelLogAdapter adapter;
TextView tv_multidelete;
TextView tv_checkAllMen, tv_deleteInfo;
ContactHelper contactHelper = ContactHelper.getContactHelper(this);
private boolean isAllCheck = false;
public void refresMen() {
listview.setVisibility(View.VISIBLE);
Collections.sort(menList, new CompratorByFileName());
adapter = new MenDelLogAdapter(MultiDelActivity.this, R.layout.menlist,
menList);
listview.setAdapter(adapter);
for (int i = 0; i < menList.size(); i++) {
isCheckMap.put(i, false);
}
}
@SuppressWarnings("unchecked")
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(R.layout.activity_multi_del);
menList = (List<ContactMen>) getIntent()
.getSerializableExtra("contact");
Log.e(TAG, "长度:" + menList.size());
listview = (ListView) findViewById(R.id.lv_delMenList);
TextView tv_cancelDel = (TextView) findViewById(R.id.tv_canelDelete);
refresMen();
tv_multidelete = (TextView) findViewById(R.id.tv_deletebtn);
tv_multidelete.setEnabled(false);
tv_multidelete.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
for (int i = 0; i < isCheckMap.size(); i++) {
if (isCheckMap.get(i)) {
ContactMen men = menList.get(i);
contactHelper.DeleteMen(men);
// contactHelper.deleteContact(men);
Drawable drawable = getResources().getDrawable(
R.drawable.garbage_enable);
drawable.setBounds(0, 0, drawable.getMinimumWidth(),
drawable.getMinimumHeight());
tv_multidelete.setCompoundDrawables(drawable, null,
null, null);//
}
}
Intent intent = new Intent(MultiDelActivity.this,
MainActivity.class);
intent.putExtra("deleteMen", true);
startActivity(intent);
}
});
tv_checkAllMen = (TextView) findViewById(R.id.tv_deleteChooseAll);
tv_checkAllMen.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
if (!isAllCheck) {
setAllCheck(true);
} else {
setAllCheck(false);
}
}
});
tv_deleteInfo = (TextView) findViewById(R.id.tv_deleteInfo);
listview.setOnItemClickListener(new OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view,
int position, long id) {
adapter.notifyDataSetChanged();
// TODO Auto-generated method stub
Log.e(TAG, "position:" + position + " id" + id);
CheckBox checkBox = (CheckBox) view
.findViewById(R.id.checked_men);
if (!checkBox.isChecked()) {
checkBox.setChecked(true);
isCheckMap.put(position, true);
checkCount++;
Log.d(TAG, "选中个数:" + checkCount);
} else {
checkBox.setChecked(false);
isCheckMap.put(position, false);
checkCount--;
Log.d(TAG, "取消选中后的个数:" + checkCount);
}
if (checkCount > 0) {
if (checkCount == menList.size()) {
setAllCheck(true);
Log.d(TAG, "已经被全部选中:" + menList.size());
} else {
Drawable drawable1 = getResources().getDrawable(
R.drawable.allcheck_enable);
drawable1.setBounds(0, 0, drawable1.getMinimumWidth(),
drawable1.getMinimumHeight());
tv_checkAllMen.setCompoundDrawables(null, drawable1,
null, null);//
tv_checkAllMen.setText("全选");
tv_checkAllMen.setTextColor(Color.BLACK);
isAllCheck = false;
Log.d(TAG, "已经被取消全部选中:" + menList.size() + " "
+ checkCount);
}
setGarbageCheck(true);
tv_deleteInfo.setText("删除联系人" + checkCount);
} else {
tv_deleteInfo.setText("删除联系人");
setGarbageCheck(false);
}
}
});
tv_cancelDel.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
finish();
}
});
}
private void deleteMen(ContactMen imen) {
// String where = ContactsContract.Data._ID + "=?";
String[] whereparams = new String[] { imen.getContactId() };
Log.e(TAG, "删除联系人:" + imen.toString());
//
Uri uri = ContactsContract.Contacts.CONTENT_URI;
getContentResolver().delete(uri, "_id=?", whereparams);
// getContentResolver().delete(uri, "display_name=?", new
// String[]{imen.getName()});
// uri = Uri.parse("content://com.android.contacts/data");
// getContentResolver().delete(uri, "raw_contact_id=?", new
// String[]{imen.getNumber()});
// int s=
// getContentResolver().delete(ContactsContract.RawContacts.CONTENT_URI,
// where, whereparams);
// Log.d(TAG,
// "姓名:"+imen.getName()+" id"+imen.getContactId()+" 删除结果:"+s);
}
public void deleteContact(long rawContactId) {
getContentResolver().delete(
ContentUris.withAppendedId(RawContacts.CONTENT_URI,
rawContactId), null, null);
}
private void setGarbageCheck(boolean checked) {
if (checked) {
tv_multidelete.setEnabled(true);
Drawable drawable = getResources().getDrawable(
R.drawable.garbage_disable);
drawable.setBounds(0, 0, drawable.getMinimumWidth(),
drawable.getMinimumHeight());
tv_multidelete.setCompoundDrawables(null, drawable, null, null);
tv_multidelete.setTextColor(Color.WHITE);
} else {
Drawable drawable = getResources().getDrawable(
R.drawable.garbage_enable);
drawable.setBounds(0, 0, drawable.getMinimumWidth(),
drawable.getMinimumHeight());
tv_multidelete.setCompoundDrawables(null, drawable, null, null);
tv_multidelete.setTextColor(Color.BLACK);
}
}
private void setAllCheck(boolean checked) {
ListAdapter listAdapter = listview.getAdapter();
if (checked) {
checkCount = menList.size();
for (int i = 0; i < menList.size(); i++) {
View view1 = adapter.getView(i, null, null);
CheckBox chek = (CheckBox) view1.findViewById(R.id.checked_men);
chek.setChecked(true);
isCheckMap.put(i, true);
}
Drawable drawable1 = getResources().getDrawable(
R.drawable.allcheck_disable);
drawable1.setBounds(0, 0, drawable1.getMinimumWidth(),
drawable1.getMinimumHeight());
tv_checkAllMen.setCompoundDrawables(null, drawable1, null, null);//
tv_deleteInfo.setText("删除联系人" + checkCount);
tv_checkAllMen.setText("取消全选");
tv_checkAllMen.setTextColor(Color.WHITE);
setGarbageCheck(true);
isAllCheck = true;
} else {
checkCount = 0;
tv_multidelete.setEnabled(false);
for (int i = 0; i < menList.size(); i++) {
View view1 = adapter.getView(i, null, null);
CheckBox chek = (CheckBox) view1.findViewById(R.id.checked_men);
chek.setChecked(false);
isCheckMap.put(i, false);
}
setGarbageCheck(false);
Drawable drawable1 = getResources().getDrawable(
R.drawable.allcheck_enable);
drawable1.setBounds(0, 0, drawable1.getMinimumWidth(),
drawable1.getMinimumHeight());
tv_checkAllMen.setCompoundDrawables(null, drawable1, null, null);//
tv_deleteInfo.setText("删除联系人");
tv_checkAllMen.setText("全选");
tv_checkAllMen.setTextColor(Color.BLACK);
isAllCheck = false;
}
// listview.refreshDrawableState();
adapter.notifyDataSetChanged();
}
private static class CompratorByFileName implements Comparator<ContactMen> {
@Override
public int compare(ContactMen lhs, ContactMen rhs) {
Comparator<Object> cmp = Collator
.getInstance(java.util.Locale.CHINA);
return cmp.compare(lhs.getName(), rhs.getName());
}
@Override
public boolean equals(Object o) {
return true;
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.multi_del, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
class MenDelLogAdapter extends ArrayAdapter<ContactMen> {
private int resourceId;
private List<ContactMen> listData;
public MenDelLogAdapter(Context context, int resource,
List<ContactMen> objects) {
super(context, resource, objects);
// TODO Auto-generated constructor stub
resourceId = resource;
listData = objects;
}
@Override
public int getCount() {
return listData.size();
}
@Override
public ContactMen getItem(int position) {
return listData.get(position);
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
ContactMen contactMen = getItem(position);
View view;
ViewHolder viewHolder = null;
if (convertView == null) {
view = LayoutInflater.from(getContext()).inflate(resourceId,
null);
viewHolder = new ViewHolder();
viewHolder.headImage = (ImageView) view
.findViewById(R.id.menlist_itePhoto);
viewHolder.menName = (TextView) view
.findViewById(R.id.menlist_itemName);
viewHolder.checkBox = (CheckBox) view
.findViewById(R.id.checked_men);
viewHolder.checkBox.setVisibility(View.VISIBLE);
view.setTag(viewHolder);
} else {
view = convertView;
viewHolder = (ViewHolder) view.getTag();
}
try {
ContactHelper contactHelper = ContactHelper
.getContactHelper(getContext());
if (contactMen.getImg_id() != 0) {
viewHolder.headImage.setImageBitmap(contactHelper.getPhoto(
contactMen.getContactId(), contactMen.getImg_id()));
} else {
viewHolder.headImage.setImageResource(R.drawable.headnew);
}
if (isCheckMap.get(position)) {
viewHolder.checkBox.setChecked(true);
} else {
viewHolder.checkBox.setChecked(false);
}
} catch (JSONException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
viewHolder.menName.setText(contactMen.getName());
return view;
}
class ViewHolder {
ImageView headImage;
TextView menName;
CheckBox checkBox;
}
}
}
|
Dungeon Treasure Hunt
=====================
Play current version: http://ripexz.github.io/dungeon-treasure-hunt/
A HTML5/JavaScript bomberman-like game. Initially created during a 7 hour gamejam.
Original version from the gamejam can be played here: http://www.ripexz.com/gamejam/1/pj/
Known Bugs:
---------------------
- Bombs don't trigger other bombs to explode.
|
### opengl_3D_gasket
Opengl을 사용한 3차원 Gasket구현 glew와 glfw shader를 사용하였습니다.
면마다의 다른 색상도 구현하였습니다.
3D gasket을 통한 키보드,마우스interface 또한 구현하였습니다.
## 사용법
- Q/q 키를 누르면 프로그램 종료
- 숫자 1, 2, 3, 4 키를 눌렀을 때, 각 4면을 칠하는 색상이 바뀜
- 마우스 왼쪽 클릭 시 숫자 1, 2, 3, 4에 부여되었던 색상 조합이 순차적으로 변경됨
: 1 -> 2 -> 3 -> 3 -> 4 -> 1 ->...
- U/u 키를 누르면 사면체 분할 횟수 증가(10이상 증가 시 메모리 부족으로 인하여 10이상은 막아둠)
- D/d 키를 누르면 사면체 분할 횟수 감소(값은 음수가 되지 않음)
- '+' 키를 누르면 사면체 크기 증가
- '-' 키를 누르면 사면체 크기 감소(스케일 값은 음수가 되지 않음)
- 마우스 오른쪽 클릭 시 회전축 변경(회전축 변경시 회전각 0도로 리셋)
: 정지(default) -> z축 -> x축 -> y축 -> 정지 ->...
## [2D gasket 코드](https://github.com/Junuu/Opengl_Gasket_2D)
|
# Instructions
[How to use this repository](../../README.md)
# Description
Installs the PSWindowsUpdate module, then runs the following cmdlets:
* Get-WULastResults
* Get-WURebootStatus
# Author
Colby Bouma
Jeremy Gruttner
|
#!/bin/bash
LAST_MODIFIED_CONFIG=$(grep -s -e '.*' .last_modified_config)
CURRENT_MODIFIED_CONFIG=$(date -r .config +%s)
DIFF=$(echo "$LAST_MODIFIED_CONFIG - $CURRENT_MODIFIED_CONFIG" | bc)
if [ "$DIFF" -lt "0" ]; then
echo "$CURRENT_MODIFIED_CONFIG" > .last_modified_config
gcc rotina_principal.c -o rotina_principal && ./rotina_principal
fi
|
using Newtonsoft.Json.Linq;
namespace NeuralNetworks.Units {
public abstract class Unit {
public string id { get; set; }
public double value { get; set; }
public double derivative { get; set; }
public abstract void count();
public abstract void countDerivativesOfInputUnits();
public virtual void countDerivative(double expectedOutput) => derivative = 2 * (value - expectedOutput);
public virtual JObject toJObject() {
JObject unit = new JObject {
["id"] = id, ["type"] = GetType().Name
};
return unit;
}
public virtual Unit fillFromJObject(JObject json) {
id = json["id"]!.Value<string>();
return this;
}
}
}
|
/*
* This is free and unencumbered software released into the public domain, following <https://unlicense.org>
*/
package com.pullvert.kotysa.android
import android.content.ContentValues
import android.database.sqlite.SQLiteDatabase
import android.database.sqlite.SQLiteOpenHelper
import com.pullvert.kotysa.*
import java.time.LocalDate
import java.time.LocalDateTime
import java.time.LocalTime
import java.time.OffsetDateTime
import java.time.format.DateTimeFormatter
import kotlin.reflect.KClass
/**
* @sample com.pullvert.kotysa.android.sample.UserRepositorySqLite
*/
internal class SqlClientSqLite(
private val client: SQLiteOpenHelper,
override val tables: Tables
) : BlockingSqlClient(), DefaultSqlClient {
override fun <T : Any> select(resultClass: KClass<T>,
dsl: (SelectDslApi.(ValueProvider) -> T)?): BlockingSqlClientSelect.Select<T> =
SqlClientSelectSqLite.Select(client.readableDatabase, tables, resultClass, dsl)
override fun <T : Any> createTable(tableClass: KClass<T>) {
val createTableSql = createTableSql(tableClass)
return client.writableDatabase.execSQL(createTableSql)
}
override fun <T : Any> insert(row: T) {
val table = tables.getTable(row::class)
val contentValues = ContentValues(table.columns.size)
table.columns.values
.filterNot { column -> column.entityGetter(row) == null && column.defaultValue != null }
.forEach { column -> contentValues.put(column.name, column.entityGetter(row)) }
// debug query
insertSqlDebug(row)
client.writableDatabase.insert(table.name, null, contentValues)
}
override fun insert(vararg rows: Any) {
checkRowsAreMapped(*rows)
rows.forEach { row -> insert(row) }
}
override fun <T : Any> deleteFromTable(tableClass: KClass<T>): BlockingSqlClientDeleteOrUpdate.DeleteOrUpdate<T> =
SqlClientDeleteSqLite.Delete(client.writableDatabase, tables, tableClass)
override fun <T : Any> updateTable(tableClass: KClass<T>): BlockingSqlClientDeleteOrUpdate.Update<T> =
SqlClientUpdateSqLite.Update(client.writableDatabase, tables, tableClass)
}
internal fun ContentValues.put(name: String, value: Any?) {
if (value != null) {
when (value) {
is Int -> put(name, value)
is Byte -> put(name, value)
is Long -> put(name, value)
is Float -> put(name, value)
is Short -> put(name, value)
is Double -> put(name, value)
is String -> put(name, value)
is Boolean -> put(name, value)
is ByteArray -> put(name, value)
// Date are stored as String
is LocalDate -> put(name, value.format(DateTimeFormatter.ISO_LOCAL_DATE))
is LocalDateTime -> put(name, value.format(DateTimeFormatter.ISO_LOCAL_DATE_TIME))
is OffsetDateTime -> put(name, value.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME))
is LocalTime -> put(name, value.format(DateTimeFormatter.ISO_LOCAL_TIME))
else -> throw UnsupportedOperationException(
"${value.javaClass.canonicalName} is not supported by Android SqLite")
}
} else {
putNull(name)
}
}
/**
* Create a [BlockingSqlClient] from a Android SqLite [SQLiteDatabase] with [Tables] mapping
*
* @sample com.pullvert.kotysa.android.sample.UserRepositorySqLite
*/
public fun SQLiteOpenHelper.sqlClient(tables: Tables): BlockingSqlClient = SqlClientSqLite(this, tables)
|
/**
* Constructor params for {@link MediaCatalog} class.
*/
export interface MediaCatalogCtor {
/**
* The path to the downloaded Media Catalog.
*/
path: string
/**
* The Meps Language Id of this publication.
*/
languageId: number
}
interface CatalogSchemaVersionRow {
type: 'catalogSchemaVersion'
/**
* The Media Catalog version.
*/
o: number
}
interface ImageSizes {
/**
* The complete URL to this image on the publication servers.
* Extra small size.
*/
xs: string
/**
* The complete URL to this image on the publication servers.
* Small size.
*/
sm: string
/**
* The complete URL to this image on the publication servers.
* Medium size.
*/
md: string
/**
* The complete URL to this image on the publication servers.
* Large size.
*/
lg: string
/**
* The complete URL to this image on the publication servers.
* Extra large size.
*
* Isn't always available. If not use `lg` instead.
*/
xl?: string
}
interface LanguageRow {
type: 'language'
/**
* Details about the language being used in this Media Catalog.
*/
o: {
/**
* Corresponds to `Symbol` of {@link LanguageDTO}.
*/
code: string
locale: string
vernacular: string
name: string
isLangPair: boolean
isSignLanguage: boolean
isRTL: boolean
}
}
interface CategoryRowObjOnDemand {
/**
* Signifies this "page" of categories is a list of more categories.
*
* The nested categories are under `subcategories`.
*/
type: 'container'
key: string
name: string
/**
* Nested "pages" of categories.
*/
subcategories: CategoryRowObj[]
}
interface CategoryRowObjContainer {
/**
* Signifies this page is a list of media.
*
* The media available is under `media`.
*/
type: 'ondemand'
images: {
pnr: {
xs: string
sm: string
md: string
lg: string
}
}
/**
* A list of `naturalKey` values that can be used to link to a {@link MediaItemRow}.
*/
media: string[]
}
export type CategoryRowObj = CategoryRowObjOnDemand | CategoryRowObjContainer
interface CategoryRow {
type: 'category'
o: CategoryRowObj
}
interface MediaItemRow {
type: 'media-item'
o: {
/**
* The same as `naturalKey` but without the language.
*
* Possibly used to find the same media in a different language's Media Catalog.
*/
languageAgnosticNaturalKey: string
/**
* An Id of sorts, used within category rows as a reference.
*/
naturalKey: string
keyParts: {
languageCode: string
/**
* Refers to `track` in {@link VideoDTO}.
*/
track: number
formatCode: 'VIDEO' | 'AUDIO'
} & ({
/**
* Refers to `doc` in {@link VideoDTO} when `type` is `'doc'`.
*/
docID: number
} | {
/**
* Refers to `doc` in {@link VideoDTO} when `type` is `'pub'`.
*/
pubSymbol: string
/**
* Almost refers to `issue` in {@link VideoDTO}, however it is only the leading part.
*
* @example
* video.issue = 20210500
* detail.issueDate = '202105'
*/
issueDate?: string
})
/**
* Refers to the `key` of a {@link CategoryRowObj}.
*/
primaryCategory: string
/**
* The displayed title.
*/
title: string
/**
* ISO Date string.
*/
firstPublished: string
/**
* Duration in seconds. Also includes millisecond precision.
*/
duration: number
checksums: string[]
images: {
/**
* Square images. You probably want `lsr` instead.
*/
sqr?: ImageSizes
/**
* Widescreen (16:9) images.
*/
lsr?: ImageSizes
}
}
}
interface SignatureRow {
type: 'signature'
/**
* A hash of some sort.
*/
o: string
}
/**
* A row of the Media Catalog NDJSON file.
*
* Rows can have different structures and are identified using `type`.
*/
export type MediaCatalogRow =
| CatalogSchemaVersionRow
| LanguageRow
| CategoryRow
| MediaItemRow
| SignatureRow
|
pub use fugue_bv as bv;
#[cfg(feature = "db")]
pub use fugue_db as db;
#[cfg(feature = "fp")]
pub use fugue_fp as fp;
pub use fugue_ir as ir;
pub use fugue_arch as arch;
pub use fugue_bytes as bytes;
|
/*
* Copyright 2020 47 Degrees, LLC. <http://www.47deg.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package pureconfiglib
import java.net.{URI, URL}
import java.time._
import pureconfig.ConfigSource
import scala.concurrent.duration.{Duration, FiniteDuration}
object Domain {
final case class TenantInfo(value: Int) extends AnyVal
case class Example(name: String, number: Int)
final case class OtherStuff(pool: Boolean, gym: Boolean)
case class Flat(
isCurrentlyRented: Boolean,
number: Int,
street: String,
pets: List[Double],
tenants: Map[String, TenantInfo],
mayBe: Option[String])
val multiExampleSource = ConfigSource.string("""
example-a: {
name: a
number: 6
}
example-b: {
name: b
number: 7
}
""")
case class PrimitivesConf(
string: String,
bool: Boolean,
double: Double,
float: Float,
int: Int,
long: Long,
short: Short,
char: Char)
case class OptionConfig(optionA: Option[String], optionB: Option[String], optionC: Option[Int])
case class CollectionsConfig(list: List[Char], set: Set[Int], map: Map[Int, String])
case class TimeConfig(localDate: LocalDate, localDateTime: LocalDateTime)
case class DurationConfig(duration: Duration, finiteDuration: FiniteDuration)
case class PathConfig(path: java.nio.file.Path, file: java.io.File, url: URL, uri: URI)
case class ApplicationConfig(
primitivesConf: Option[PrimitivesConf],
optionConfig: Option[OptionConfig],
collectionsConfig: Option[CollectionsConfig],
timeConfig: Option[TimeConfig],
durationConfig: Option[DurationConfig],
pathConfig: Option[PathConfig])
}
|
# (C) Datadog, Inc. 2018
# All rights reserved
# Licensed under Simplified BSD License (see LICENSE)
import os
from datadog_checks.utils.common import get_docker_hostname
HERE = os.path.dirname(os.path.abspath(__file__))
FIXTURES = os.path.join(HERE, 'fixtures')
ROOT = os.path.dirname(os.path.dirname(HERE))
CHECK_NAME = 'gunicorn'
HOST = get_docker_hostname()
PORT = 26379
PROC_NAME = 'dd-test-gunicorn'
INSTANCE = {'host': HOST, 'port': PORT, 'password': 'datadog-is-devops-best-friend', 'proc_name': PROC_NAME}
|
"""
Pathfinding.position_delta(pathfinder::AStar{D}, from::NTuple{Int,D}, to::NTuple{Int,D})
Returns the absolute difference in coordinates between `from` and `to` taking into account
periodicity of `pathfinder`.
"""
position_delta(pathfinder::GridPathfinder{D,true}, from::Dims{D}, to::Dims{D}) where {D} =
min.(abs.(to .- from), size(pathfinder.walkmap) .- abs.(to .- from))
position_delta(pathfinder::GridPathfinder{D,false}, from::Dims{D}, to::Dims{D}) where {D} =
abs.(to .- from)
"""
Pathfinding.delta_cost(pathfinder::GridPathfinder{D}, metric::M, from, to) where {M<:CostMetric}
Calculate an approximation for the cost of travelling from `from` to `to` (both of
type `NTuple{N,Int}`. Expects a return value of `Float64`.
"""
function delta_cost(
pathfinder::GridPathfinder{D,periodic,true},
metric::DirectDistance{D},
from::Dims{D},
to::Dims{D},
) where {D,periodic}
delta = collect(position_delta(pathfinder, from, to))
sort!(delta)
carry = 0
hdist = 0
for i in D:-1:1
hdist += metric.direction_costs[i] * (delta[D+1-i] - carry)
carry = delta[D+1-i]
end
return hdist
end
function delta_cost(
pathfinder::GridPathfinder{D,periodic,false},
metric::DirectDistance{D},
from::Dims{D},
to::Dims{D},
) where {D,periodic}
delta = position_delta(pathfinder, from, to)
return sum(delta) * metric.direction_costs[1]
end
delta_cost(
pathfinder::GridPathfinder{D},
metric::MaxDistance{D},
from::Dims{D},
to::Dims{D},
) where {D} = max(position_delta(pathfinder, from, to)...)
delta_cost(
pathfinder::GridPathfinder{D},
metric::PenaltyMap{D},
from::Dims{D},
to::Dims{D},
) where {D} =
delta_cost(pathfinder, metric.base_metric, from, to) +
abs(metric.pmap[from...] - metric.pmap[to...])
delta_cost(pathfinder::GridPathfinder{D}, from::Dims{D}, to::Dims{D}) where {D} =
delta_cost(pathfinder, pathfinder.cost_metric, from, to)
|
/*
Theming JS for CoWhere
© 2020 Johannes Kreutz. Alle Rechte vorbehalten.
*/
import my from './my.js';
import statusbar from './statusbar.js';
let theming = {
isDarkMode: function() {
return new Promise(function(resolve) {
NativeStorage.getItem("darkModePreference", function(result) {
if (result == "dark") {
resolve(true);
} else if (result == "light") {
resolve(false);
} else {
resolve(window.matchMedia("(prefers-color-scheme: dark)").matches);
}
}, function(error) {
console.log("[UserPreferences] Failed to load dark mode preference.");
this.setPreference("system");
resolve(window.matchMedia("(prefers-color-scheme: dark)").matches);
}.bind(this));
}.bind(this));
},
setPreference: function(preference) {
NativeStorage.setItem("darkModePreference", preference, function(result) {
this.apply();
console.log("[UserPreferences] Successfully stored dark mode preference.");
}.bind(this), function(error) {
console.log("[UserPreferences] Failed to store dark mode preference.");
})
},
getPreference: function() {
return new Promise(function(resolve) {
NativeStorage.getItem("darkModePreference", function(result) {
resolve(result);
}, function(error) {
console.log("[UserPreferences] Failed to load dark mode preference. Will restore default.");
this.setPreference("system");
resolve("system");
}.bind(this));
}.bind(this));
},
apply: function() {
this.isDarkMode().then(function(result) {
statusbar.reset();
if (result) {
document.documentElement.classList.add("theme-dark");
} else {
document.documentElement.classList.remove("theme-dark");
}
});
}
}
export default theming;
|
#include <stdlib.h>
#include <string.h>
#include "map.h"
#define map_index(map, hash) (unsigned int)(hash % map->_len)
#define map_error(map) (map->_c)
#define map_free_entry(k,v,entry) {if (k) k((entry)->key); if (v) v((entry)->value);}
typedef struct _map_entry
{
unsigned int hash; /*key的hash值*/
void * key; /*key 默认为字符串*/
void * value; /*value*/
struct _map_entry * next; /*hash冲突时使用链表解决*/
} _Entry;
struct map_
{
size_t size; /*map里键值对实际个数*/
_Entry ** table; /*存储键值对,长度为_len(>size),对key取hash值,再使用map_index及为键值对的下标*/
size_t _len; /*长度为table数组的长度*/
float load_factor; /*每次扩充map时,长度增量*/
size_t _thres_hold; /*size长度>=_thres_hold时,扩充map*/
map_hash f_hash; /*计算key hash值的函数*/
map_equals f_equals; /*判断key相同的函数*/
map_free_kv f_free_k; /*释放key的函数*/
map_free_kv f_free_v; /*释放value的函数*/
int _c; /*错误代码*/
map_alloc f_alloc; /*内存申请或释放函数*/
void *ud; /*用户自定义数据*/
#if defined(J_API_INFO)
sizeof_kv f_sizeof_k; /*计算key内存使用*/
sizeof_kv f_sizeof_v; /*计算value内存使用*/
size_t _mem; /*内存消耗*/
#endif
};
#define EntrySize sizeof(_Entry)
#define EntryPSize sizeof(_Entry*)
static void resize(Map *);
static _Entry * find(Map * map, _Entry * head, const void * key, unsigned int hash);
static _Entry * find_with_pre(Map * map, _Entry * head, _Entry ** pre, const void * key, unsigned int hash);
static unsigned int str_hash(const void * str);
static int str_equals(const void * a, const void * b);
static void s_free(void * p);
static void * default_alloc(void* p, size_t os, size_t ns) {
if (ns == 0) {
free(p);
return NULL;
}
return realloc(p, ns);
}
Map * map_new(map_alloc f, int init) {
f = !f ? default_alloc : f;
Map * map = (Map *) f(NULL, 0, sizeof(Map));
if (!map) return NULL;
map->size = 0;
map->load_factor = 0.75f;
map->_thres_hold = init;
map->f_hash = str_hash;
map->f_equals = str_equals;
map->f_free_k = s_free;
map->f_free_v = s_free;
map->_len = 0;
map->_c = 0;
map->f_alloc = f;
#if defined(J_API_INFO)
map->f_sizeof_k = NULL;
map->f_sizeof_v = NULL;
map->_mem = sizeof(Map);
#endif
int len = (int) (init * 1.75f);
size_t ms = EntryPSize * len;
map->table = (_Entry **) f(NULL, 0, ms);
if (!map->table) {
map->_c = ER_MEM;
return map;
}
memset(map->table, 0, ms);
map->_len = len;
#if defined(J_API_INFO)
map->_mem += ms;
#endif
return map;
}
int map_ero(Map * map) {
return map_error(map);
}
void map_set_load_factor(Map * map, float t) {
if (map_error(map)) return;
map->load_factor = t;
map->_thres_hold = (size_t)(map->_len / (1 + t));
}
void map_set_hash(Map * map, map_hash f) {
if (map_error(map)) return;
map->f_hash = f;
}
void map_set_equals(Map * map, map_equals f) {
if (map_error(map)) return;
map->f_equals = f;
}
void map_set_free(Map * map, map_free_kv fk, map_free_kv fv) {
if (map_error(map)) return;
map->f_free_k = fk;
map->f_free_v = fv;
}
void map_set_ud(Map *map, void *ud) {
if (map_error(map)) return;
map->ud = ud;
}
void *map_get_ud(Map *map) {
return map->ud;
}
void map_free(Map * map) {
map_free_kv fk = map->f_free_k;
map_free_kv fv = map->f_free_v;
size_t i;
for (i = 0; i < map->_len; i++) {
_Entry* entry = map->table[i];
if (!entry) continue;
do {
map_free_entry(fk, fv, entry);
_Entry * temp = entry->next;
map->f_alloc(entry, EntrySize, 0);
entry = temp;
} while (entry);
}
map->f_alloc(map->table, EntryPSize * map->_len, 0);
map->f_hash = NULL;
map->f_equals = NULL;
map->f_free_k = NULL;
map->f_free_v = NULL;
#if defined(J_API_INFO)
map->f_sizeof_k = NULL;
map->f_sizeof_v = NULL;
map->_mem = 0;
#endif
map->f_alloc(map, sizeof(Map), 0);
}
void map_remove_all(Map *map) {
map_free_kv fk = map->f_free_k;
map_free_kv fv = map->f_free_v;
size_t i;
for (i = 0; i < map->_len; i++) {
_Entry* entry = map->table[i];
if (!entry) continue;
do {
map_free_entry(fk, fv, entry);
_Entry * temp = entry->next;
map->f_alloc(entry, EntrySize, 0);
entry = temp;
} while (entry);
map->table[i] = NULL;
}
map->size = 0;
}
void * map_put(Map * map, void * key, void * value) {
if (map_error(map)) return NULL;
unsigned int hash = map->f_hash(key);
unsigned int i = map_index(map, hash);
_Entry * entry = map->table[i];
void * ret = NULL;
int add = 0;
/// 未存储过
if (!entry) {
entry = (_Entry *) map->f_alloc(NULL, 0, EntrySize);
map->table[i] = entry;
if (!entry) {
map->_c = ER_MEM;
add = 0;
} else {
entry->hash = hash;
entry->key = key;
entry->value = value;
entry->next = NULL;
#if defined(J_API_INFO)
map->_mem += EntrySize
+ (map->f_sizeof_k ? map->f_sizeof_k(key) : 0)
+ (map->f_sizeof_v ? map->f_sizeof_v(value) : 0);
#endif
add = 1;
}
} else {
_Entry *e = find(map, entry, key, hash);
/// 已有相同key
if (e) {
ret = e->value;
e->value = value;
#if defined(J_API_INFO)
map->_mem += (map->f_sizeof_v ? map->f_sizeof_v(value) - map->f_sizeof_v(ret): 0);
#endif
add = 0;
}
/// hash碰撞,在链表头增加
else {
e = (_Entry *) map->f_alloc(NULL, 0, EntrySize);
if (!e) {
map->_c = ER_MEM;
add = 0;
} else {
e->hash = hash;
e->key = key;
e->value = value;
map->table[i] = e;
e->next = entry;
#if defined(J_API_INFO)
map->_mem += EntrySize
+ (map->f_sizeof_k ? map->f_sizeof_k(key) : 0)
+ (map->f_sizeof_v ? map->f_sizeof_v(value) : 0);
#endif
add = 1;
}
}
}
map->size += add;
if (map->size >= map->_thres_hold) resize(map);
return ret;
}
void * map_get(Map *map, const void *key) {
if (map_error(map)) return NULL;
unsigned int hash = map->f_hash(key);
unsigned int i = map_index(map, hash);
_Entry * entry = map->table[i];
if (!entry) return NULL;
_Entry *e = find(map, entry, key, hash);
if (e) return e->value;
return NULL;
}
void * map_remove(Map * map, const void * key) {
if (map_error(map)) return NULL;
unsigned int hash = map->f_hash(key);
unsigned int i = map_index(map, hash);
_Entry *entry = map->table[i];
if (!entry) return NULL;
_Entry *pre = entry;
_Entry *e = find_with_pre(map, entry, &pre, key, hash);
if (e) {
map->size --;
void * ret = e->value;
e->hash = 0;
if (key != e->key && map->f_free_k) {
#if defined(J_API_INFO)
map->_mem -= (map->f_sizeof_k ? map->f_sizeof_k(e->key) : 0);
#endif
map->f_free_k(e->key);
}
/// 对应entry就在头部,直接去掉头部
if (e == entry) {
map->table[i] = e->next;
}
/// 不在头部,去掉节点
else {
pre->next = e->next;
}
e->next = NULL;
e->key = NULL;
#if defined(J_API_INFO)
map->_mem -= EntrySize - (map->f_sizeof_v ? map->f_sizeof_v(ret) : 0);
#endif
map->f_alloc(e, EntrySize, 0);
return ret;
}
return NULL;
}
size_t map_size(Map * map) {
if (map_error(map)) return 0;
return map->size;
}
size_t map_table_size(Map * map) {
return map->_len;
}
size_t map_entrys(Map * map, Map_Entry * out, size_t size) {
if (map_error(map) || !out || !size) return 0;
size_t ret = 0;
size_t i;
for (i = 0; i < map->_len && ret < size; i++) {
_Entry * temp = map->table[i];
if (!temp) continue;
do {
out[ret].key = temp->key;
out[ret].value = temp->value;
ret ++;
} while (ret < size && (temp = temp->next));
}
return ret;
}
void map_traverse(Map *map, map_look_fun traverse_function, void *ud) {
if (map_error(map) || !traverse_function) return;
size_t i;
int result = 0;
for (i = 0; i < map->_len && !result; i++) {
_Entry * temp = map->table[i];
if (!temp) continue;
do {
result = traverse_function(temp->key, temp->value, ud);
} while ((temp = temp->next) && !result);
}
}
static unsigned int str_hash(const void * str) {
const char *s = (const char *) str;
int h = 0;
for (; *s; s++)
h = *s+h*31;
return h;
}
static int str_equals(const void * a, const void * b) {
const char * ba = (const char *) a;
const char * bb = (const char *) b;
return strcmp(ba, bb) == 0;
}
static void s_free(void * p) {
free(p);
}
static _Entry * find(Map * map, _Entry * head, const void * key, unsigned int hash) {
_Entry * e = head;
while (e) {
if (e->key == key) break;
if (map->f_equals && map->f_equals(e->key, key)) break;
e = e->next;
}
return e;
}
static _Entry * find_with_pre(Map * map, _Entry * head, _Entry ** pre, const void * key, unsigned int hash) {
_Entry * e = head;
while (e) {
if (e->key == key) break;
if (map->f_equals && map->f_equals(e->key, key)) break;
*pre = e;
e = e->next;
}
return e;
}
static void resize(Map *map) {
size_t old_len = map->_len;
size_t new_len = (size_t)(map->_len * map->load_factor) + map->_len;
if (new_len <= old_len) new_len = old_len + 1;
_Entry ** old_table = map->table;
map->table = (_Entry **)map->f_alloc(NULL, 0, EntryPSize * new_len);
if (!map->table) {
map->_c = ER_MEM;
return;
}
memset(map->table, 0, EntryPSize * new_len);
map->_len = new_len;
map->_thres_hold = old_len;
/// 调整数据位置
size_t i;
for (i = 0; i < old_len; i++) {
_Entry* entry = old_table[i];
if (!entry) continue;
old_table[i] = NULL;
_Entry* temp;
_Entry* next;
do {
/// 寻找对应hash值新的位置
unsigned int ni = map_index(map, entry->hash);
temp = map->table[ni];
map->table[ni] = entry;
next = entry->next;
entry->next = temp;
entry = next;
} while(entry);
}
map->f_alloc(old_table, EntryPSize * old_len, 0);
#if defined(J_API_INFO)
map->_mem += EntryPSize * (new_len - old_len);
#endif
}
#define _f_error (char) 1
#define _f_malloc (char) 2
#define _f_finf (char) 4
#define _is_error(d) (((d)->flag) & _f_error)
#define _set_error(d) (d)->flag = (((d)->flag) | _f_error)
#define _is_malloc(d) (((d)->flag) & _f_malloc)
#define _set_malloc(d) (d)->flag = (((d)->flag) | _f_malloc)
#define _is_in_first(d) (((d)->flag) & _f_finf)
#define _set_not_in_first(d) (d)->flag = (((d)->flag) & ~(_f_finf))
typedef struct _data {
char* str;
size_t len;
size_t i;
/// 使用3位
/// * * * *
/// * 1 是否有错误
/// * 1 str是否是堆内存
/// * 1 map是否是第一个数据
char flag;
map_value_to_string k2s;
map_value_to_string v2s;
map_alloc allocFun;
} _data;
/**
* 增加字符串的长度
* @return 0:成功
*/
int _increase_str(_data *d, size_t min) {
map_alloc m_malloc = d->allocFun;
if (d->len - d->i < min) {
/// 字符串放不下的情况
size_t newlen = (size_t) (d->len * 1.75f);
size_t needlen = min + d->i;
newlen = newlen < needlen ? needlen : newlen;
if (_is_malloc(d)) {
d->str = m_malloc(d->str, d->len * sizeof(char), newlen);
memset(d->str + d->len, 0, newlen - d->len);
} else {
char *new_str = m_malloc(NULL, 0, newlen);
if (!new_str)
return 1;
memcpy(new_str, d->str, d->len);
d->str = new_str;
}
if (!d->str) {
//有错误
return 1;
}
d->len = newlen;
_set_malloc(d);
}
return 0;
}
int _map_look_to_json(const void *key, const void *value, void *ud) {
_data *d = (_data *)ud;
int needFreeKeyStr;
int needFreeValueStr;
char *keystr = d->k2s(key, &needFreeKeyStr);
char *valuestr = d->v2s(value, &needFreeValueStr);
size_t sl = strlen(keystr);
size_t vl = strlen(valuestr);
int isFirstData = _is_in_first(d);
if (_increase_str(d, vl + sl + (isFirstData ? 3 : 4))) { //"key": 多"":三个字符,第二个数据在前面多一个','
if (needFreeKeyStr)
d->allocFun(keystr, (sl + 1) * sizeof(char), 0);
if (needFreeValueStr)
d->allocFun(valuestr, (vl + 1) * sizeof(char), 0);
_set_error(d);
return 1;
}
if (!isFirstData)
d->str[d->i++] = ',';
else
_set_not_in_first(d);
// LOGI("%s 开始, \n\tstr:%s\n\tlen:%d\n\ti:%d", clz, d->str, d->len, d->i);
d->str[d->i++] = '"';
memcpy(&d->str[d->i], keystr, sl * sizeof(char));
d->i += sl;
d->str[d->i++] = '"';
d->str[d->i++] = ':';
/// "key": 完成
memcpy(&d->str[d->i], valuestr, vl * sizeof(char));
d->i += vl;
if (needFreeKeyStr)
d->allocFun(keystr, (sl + 1) * sizeof(char), 0);
if (needFreeValueStr)
d->allocFun(valuestr, (vl + 1) * sizeof(char), 0);
return 0;
}
static char *__default_to_string(const void *v, int *needFree) {
if (needFree) *needFree = 0;
return (char *)v;
}
char *map_to_string(Map *map, map_value_to_string k2s, map_value_to_string v2s) {
if (map_error(map)) return NULL;
static const size_t len = 100;
char str[len] = {'{', '\0'};
if (!k2s)
k2s = __default_to_string;
if (!v2s)
v2s = __default_to_string;
_data d = {str, len, 1, _f_finf, k2s, v2s, map->f_alloc};
map_traverse(map, _map_look_to_json, &d);
if (_is_error(&d) || _increase_str(&d, d.len + 2))
return NULL;
d.str[d.i++] = '}';
d.str[d.i] = '\0';
char *ret = map->f_alloc(NULL, 0, (strlen(d.str) + 1) * sizeof(char));
if (ret) {
strcpy(ret, d.str);
}
if (_is_malloc(&d)) {
map->f_alloc(d.str, d.len * sizeof(char), 0);
}
return ret;
}
///<editor-fold>
#if defined(J_API_INFO)
size_t map_mem(Map * map) {
return map->_mem;
}
void map_set_sizeof(Map * map, sizeof_kv k, sizeof_kv v) {
map->f_sizeof_k = k;
map->f_sizeof_v = v;
}
#endif
|
package com.cektrend.cekinhome.di
import android.content.Context
import androidx.room.Room
import com.cektrend.cekinhome.data.db.AppDatabase
import dagger.Module
import dagger.Provides
import dagger.hilt.InstallIn
import dagger.hilt.android.qualifiers.ApplicationContext
import dagger.hilt.components.SingletonComponent
import io.reactivex.disposables.CompositeDisposable
/**
* Created by Saipul Muiz on 7/30/2021.
* Cekinhome | Made with love
* Check our website -> Cektrend Studio | https://cektrend.com for more information
* For question and project collaboration contact me to saipulmuiz87@gmail.com
*/
@InstallIn(SingletonComponent::class)
@Module
class AppModule{
@Provides
@DbName
fun provideDbName() = "cekinhome.db"
@Provides
fun provideStudentDatabase(@ApplicationContext context: Context, @DbName dbName: String): AppDatabase{
return Room.databaseBuilder(context,
AppDatabase::class.java, dbName)
.build()
}
@Provides
fun provideStudentDao(appDatabase: AppDatabase) = appDatabase.historyLogDao()
@Provides
fun provideCompositeDisposable() = CompositeDisposable()
}
|
#!/bin/bash
wget -O pretrained_models.tar.gz http://cvsp.cs.ntua.gr/research/stavis/data/pretrained_models.tar.gz
tar -xzf pretrained_models.tar.gz
rm pretrained_models.tar.gz
mv pretrained_models/* .
rm -rf pretrained_models/
|
package singularity.measure
import java.text.SimpleDateFormat
/**
* Time measurement utilities
*/
object TimeTools {
type Nanosecond = Long
def nanoToMillisString(nanosecond: Nanosecond): String = {
val millis = (nanosecond /1e9).toInt
if(millis>0){
val ms = nanosecond/1e6 - millis * 1000
"%d,%06.2fms".format(millis.toInt, ms)
} else
"%.2fms".format(nanosecond /1e6)
}
def nanoToSecondString(nanosecond: Nanosecond): String = {
"%.3fs".format(nanosecond/1e9)
}
def printTimeUsed[A](taskName: String, shouldPrint: Boolean = true)(task: => A): A = {
val (nano, result) = measureTime(task)
if(shouldPrint)
println(s"*** [$taskName] time used: ${nanoToMillisString(nano)} ***")
result
}
def measureTime[A](task: => A): (Nanosecond, A) = {
val t1 = System.nanoTime()
val result = task
val time = System.nanoTime() - t1
(time, result)
}
/** Since this method uses Thread.sleep, it may not be accurate for methods with very short running time */
def scaleUpRunningTime[A](factor: Int)(task: => A): A = {
require(factor >= 1)
if(factor == 1) return task
val (nano, result) = measureTime(task)
val extraNano = (factor - 1) * nano
val millis = extraNano / 1000000
val nanos = extraNano - millis * 1000000
Thread.sleep(millis, nanos.toInt)
result
}
@inline
def runOnce[A](f: => A): A = {
f
}
@inline
def run5Times[A](f: => A): A = {
f;f;f;f;f
}
def runWithTimeout[T](timeoutMs: Millisecond)(f: => T) : Option[T] = {
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent._
import scala.concurrent.duration._
try {
Some(Await.result(Future(f), timeoutMs.milliseconds))
} catch {
case e: TimeoutException => None
}
}
type Millisecond = Long
type Timer = () => Millisecond
def runWithATimer[T](f: Timer => T): T = {
val startTime = System.currentTimeMillis()
val timer = () => {
System.currentTimeMillis() - startTime
}
f(timer)
}
val numericalDateTimeFormat = new SimpleDateFormat("yy-MM-dd-HH:mm:ss")
def numericalDateTime(): String = {
import java.util.Calendar
val date = Calendar.getInstance().getTime
numericalDateTimeFormat.format(date)
}
}
|
void main() {
int a = 20;
int b = 5;
int c = 7;
bool proposicao1 = a > b;
bool proposicao2 = b < c;
//saída
print('$a > $b && $b < $c - Resposta: ${proposicao1 && proposicao2}');
print('$a > $b || $b > $c - Resposta: ${proposicao1 || proposicao2}');
//verificando o falso
bool proposicao3 = a < b;
bool proposicao4 = b > c;
//saída
print('$a < $b && $b > $c - Resposta: ${proposicao3 && proposicao4}');
print('$a > $b || $b > $c - Resposta: ${proposicao3 || proposicao4}');
//negação
bool v = true;
bool f = false;
//saída
print('Negando o v: ${!v}');
print('Negando o f: ${!f}');
}
|
// Task Class: Represents a to-do
export default class Task {
constructor(description, completed, index) {
this.description = description;
this.completed = completed;
this.index = index;
}
task2string() {
const srt = `Index: ${this.index} - Description: ${this.description} - Completed: ${this.completed} --`;
return srt;
}
}
|
<?php
namespace app\components;
use Yii;
use yii\base\Component;
use yii\base\InvalidCallException;
class Github extends Component
{
/**
* @return \Github\Client
*/
public function client()
{
if (Yii::$app->user->isGuest) {
// TODO auto-relogin
throw new InvalidCallException('Can not create github client for not logged in user.');
}
// create client
$client = new \Github\Client();
/** @var \yii\authclient\clients\GitHub $authClient */
$authClient = Yii::$app->authClientCollection->getClient('github');
$accessToken = $authClient->getAccessToken()->getToken();
// authenticate
$client->authenticate($accessToken, '', \Github\Client::AUTH_HTTP_TOKEN);
return $client;
}
}
|
use std::collections::BTreeMap;
fn calculate_population(initial_population: &BTreeMap<i32, u128>, days: u32) -> u128 {
(0..days)
.fold(initial_population.clone(), |acc, _| {
let mut new_generation: BTreeMap<_, _> = acc
.into_iter()
.filter(|&(a, _)| a >= 0)
.map(|(a, b)| (a - 1, b))
.collect();
if new_generation.get(&-1).is_some() {
*new_generation.entry(6).or_insert(0) += new_generation[&-1];
*new_generation.entry(8).or_insert(0) += new_generation[&-1];
new_generation.insert(-1, 0);
}
new_generation
})
.into_iter()
.fold(0, |acc, (_, n)| acc + n)
}
fn main() {
let input: BTreeMap<i32, u128> = include_str!("../input")
.split(",")
.map(|n| n.trim().parse().unwrap())
.fold(BTreeMap::new(), |mut acc, x| {
*acc.entry(x).or_insert(0) += 1;
acc
});
let result_1 = calculate_population(&input, 80);
let result_2 = calculate_population(&input, 256);
println!("Part one: {}", result_1);
println!("Part two: {}", result_2);
}
|
#!/usr/bin/env bash
#Makes entry script executable, renames it, copies Configs to /tmp, and copies all py scripts to
#/usr/local/bin (which should already be on user PATH).
chmod 777 tesla.py
cp Configurations.properties /tmp/
cp *.py /usr/local/bin
cd /usr/local/bin
mv tesla.py tesla
|
using System;
using System.Threading;
using System.Threading.Tasks;
using ComposableAsync.Resilient.ExceptionFilter;
namespace ComposableAsync.Resilient.Retry
{
internal sealed class RetryDispatcher : IBasicDispatcher
{
private readonly int _MaxRetry;
private readonly IExceptionFilter _ExceptionFilter;
internal RetryDispatcher(IExceptionFilter exceptionFilter, int maxRetry)
{
_ExceptionFilter = exceptionFilter;
_MaxRetry = maxRetry;
}
public IBasicDispatcher Clone() => this;
public async Task Enqueue(Func<Task> action, CancellationToken cancellationToken)
{
var count = 0;
while (true)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
await action();
return;
}
catch (Exception exception)
{
if (ShouldRethrow(ref count, exception))
throw;
}
}
}
public async Task<T> Enqueue<T>(Func<Task<T>> action, CancellationToken cancellationToken)
{
var count = 0;
while (true)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
return await action();
}
catch (Exception exception)
{
if (ShouldRethrow(ref count, exception))
throw;
}
}
}
public Task<T> Enqueue<T>(Func<T> action, CancellationToken cancellationToken)
{
var count = 0;
while (true)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
return Task.FromResult(action());
}
catch (Exception exception)
{
if (ShouldRethrow(ref count, exception))
throw;
}
}
}
public Task Enqueue(Action action, CancellationToken cancellationToken)
{
var count = 0;
while (true)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
action();
return Task.CompletedTask;
}
catch (Exception exception)
{
if (ShouldRethrow(ref count, exception))
throw;
}
}
}
private bool ShouldRethrow(ref int count, Exception exception)
{
return (count++ == _MaxRetry) || (_ExceptionFilter.IsFiltered(exception));
}
}
}
|
(ns agentcity.schemas.gender
(:require [schema.core :as sc]
[agentcity.utilities.string_util :as str]))
(defn valid-name? [name]
(str/non-blank-with-max-length? 10 name))
(sc/defschema Gender
{:Id (sc/Int)
:Name (sc/constrained sc/Str valid-name?)})
|
package com.javatests;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.*;
class VowelsTest
{
@Test
public void testCase1() {
assertEquals(5, Vowels.getCount("abracadabra"));
}
@Test
public void testCase2() {
assertEquals(8, Vowels.getCount("jfdkslajkajki jk i jfdkofjdkofjdsk u fjsdku"));
}
}
|
var class_my_homie_device =
[
[ "addNode", "class_my_homie_device.html#addfbcc04b7bb91a6a19ed4f8756e621a", null ],
[ "getDef", "class_my_homie_device.html#ad49c3d32b890ce2d9443afe1ceb87a85", null ],
[ "getNode", "class_my_homie_device.html#a8ada4f85c0007406cd7d73125d35f8ca", null ],
[ "getNode", "class_my_homie_device.html#a95e41f8a3cba539cf37ad73d9529b1ce", null ],
[ "init", "class_my_homie_device.html#ab1c79c5645d99bb94a74ff9fc17fa528", null ],
[ "length", "class_my_homie_device.html#a88955bc85f377f97ae4e0ada2bc5d488", null ],
[ "loop", "class_my_homie_device.html#a39e0e310832a779327cf7bed66f650da", null ],
[ "setFactor", "class_my_homie_device.html#a3c07a55033e861473aac66ac2a821fa2", null ],
[ "setFilter", "class_my_homie_device.html#af69eaeb2dea8c0a6307f85565a810a47", null ],
[ "setFilter", "class_my_homie_device.html#aedb5c484f24fe3e52fe605259b765c2a", null ],
[ "setOption", "class_my_homie_device.html#a1585b4072311c785c2f1f2c9c3b4db19", null ],
[ "setValue", "class_my_homie_device.html#a809ffde9e36782389fd6dccd44a30d1d", null ]
];
|
{-# LANGUAGE AllowAmbiguousTypes #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeApplications #-}
module EncodeJsonLaws
( encodeJsonLaws
) where
import qualified Data.Aeson as Aeson
import Linnet.ContentTypes (ApplicationJson)
import Linnet.Encode
import Test.QuickCheck (Arbitrary, property)
import Test.QuickCheck.Classes (Laws (..))
encodeJsonLaws ::
forall a. (Encode ApplicationJson a, Arbitrary a, Show a, Aeson.FromJSON a, Eq a)
=> Laws
encodeJsonLaws = Laws "EncodeJSON" properties
where
roundTrip = property $ \(a :: a) -> (Aeson.decode . encode @ApplicationJson) a == Just a
properties = [("roundTrip", roundTrip)]
|
+++
title = "All Desktop Resources"
draft = false
desktop_resource_list = true
[menu]
[menu.desktop]
title = "All Resources (Single Page)"
identifier = "desktop/resources/index.md All Resources"
parent = "desktop/resources"
weight = 10
+++
{{< desktop_resource_yaml_all >}}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.