text
stringlengths 27
775k
|
|---|
module.exports = [
// top candidates
{
name: "Mem[e]mory card game",
description:
"this is probably the most fun side project I had so far. Functionality: Select doges from doge cards!! ",
keywords: ["React"],
category: "web dev",
url: "https://violetguos.github.io/memory_card/",
icon: "🐶",
},
{
name: "Speaker Accent Classification",
description: "Classify the accents of audiobook with CNN and RNN.",
keywords: ["Praat", "PyTorch", "Python"],
category: "deep learning",
url: "https://github.com/violetguos/project-ift6390",
icon: "🗣",
},
{
name: "Fake news detections",
description:
"Linear, ensemble, and neural network models to perform multi-class classification on news articles.",
keywords: ["NLP", "LSTM"],
category: "NLP",
url: "https://github.com/violetguos/COMP550-Project",
icon: "🗞",
},
{
name: "multilingual learning game",
description:
"Kids' language learning game built from scratch. Submitted to code jam.",
keywords: ["code jam", "game"],
category: "web dev",
url: "https://itch.io/jam/top-jam-1/rate/1149862",
icon: "🔡",
},
// the rest of the projects
// react
{
name: "Online CV",
description: "Fill out a simple form and present your online CV",
keywords: ["React"],
category: "web dev",
url: "https://violetguos.github.io/cv-react",
icon: "💼",
},
// rails
{
name: "Eventpriv",
description: "Invite friends to your private events!! No more pesty DMs! ",
keywords: ["Ruby", "Rails", "Bulma"],
category: "web dev",
url: "https://github.com/violetguos/eventpriv",
icon: "👥",
},
{
name: "Flight booker",
description:
"book tickets for multiple passengers. Integrated Stripe API for Ruby",
keywords: ["Ruby", "Rails", "Stripe"],
category: "web dev",
url: "https://github.com/berat/wordBox-react-native",
icon: "✈️",
},
// Vanilla JS
{
name: "Music box",
description: "Dynamic Gif display based on your music.",
keywords: ["JavaScript"],
category: "web dev",
url: "https://yayinternet.github.io/hw4-music-box-violetguos/",
icon: "🎶",
},
{
name: "Flash cards",
dscription: "A set of flash cards, swipe right to learn CSS & Korean.",
keywords: ["JavaScript", "CSS", "HTML"],
category: "web dev",
url: "https://yayinternet.github.io/hw3-flashcards-violetguos/",
icon: "📇",
},
{
name: "Adopt your pup",
description: "Find a puppy based on your personality!",
keywords: ["HTML", "CSS", "JavaScript"],
category: "web dev",
url: "https://yayinternet.github.io/hw2-quiz-ext-violetguos/",
icon: "🐶",
},
{
name: "Tic Tac Toe",
description:
"Classic tic tac toe, aka naughts and crosses. Also a classic UI exercise.",
keywords: ["HTML", "CSS", "JavaScript"],
category: "web dev",
url: "https://violetguos.github.io/js-odin/tic-tac-toe/",
icon: "⭕️",
},
{
name: "Rock paper scissor",
description: "Classic rock paper scissor. Also a classic UI project.",
keywords: ["HTML", "CSS", "JavaScript"],
category: "web dev",
url: "https://violetguos.github.io/chifoumi/",
icon: "🔨",
},
{
name: "etch a sketch",
description: "the ipad of our times.",
keywords: ["HTML", "CSS", "JavaScript"],
category: "web dev",
url: "https://violetguos.github.io/etch-a-sketch/",
icon: "👩🎨",
},
// Pure CSS
{
name: "mock Youtube",
description: "Mocks the UI of Youtube. Built without any UI framework.",
keywords: ["HTML", "CSS"],
category: "web dev",
url: "https://violetguos.github.io/html-css/youtube/index.html",
icon: "📺",
},
{
name: "IOS calculator on the web",
description:
"Replica of the iOS 12 calculator. Built without any UI framework.",
keywords: ["HTML", "CSS", "JavaScript"],
category: "web dev",
url: "https://violetguos.github.io/web-calc/",
icon: "🧮",
},
// deep learning
{
name:
"Multi-digit street number bounding box segmentation and classification",
description:
"Use a pretrained Faster RCNN to segment street numbers and a ResNet to classify the digits.",
keywords: ["PyTorch", "ResNet", "Faster RCNN"],
category: "deep learning",
url: "https://github.com/violetguos/humanware",
icon: "🚪",
},
{
name: "Building deep learning NLP models from scratch",
description:
"Built RNN, GRU, transformer from scratch (without using the predefined cells in Pytorch)!",
keywords: ["Pytorch"],
category: "deep learning",
url: "https://github.com/violetguos/ift6135-rnn",
icon: "🗞",
},
{
name: "Predict User Identity from ECG Signals",
description:
"Used semi-supervised learning and VAE to predict identity of users based on ECG signals",
keywords: ["Pytorch", "VAE"],
category: "deep learning",
url: "https://github.com/violetguos/OMsignal",
icon: "🩺",
},
// data science
{
name: "Basic NLP algorithms",
description: "Sentiment analysis and decoding ciphers",
keywords: ["NLP", "NLTK"],
category: "NLP",
url: "https://github.com/violetguos/nlp",
icon: "📑",
},
{
name: "Fake news detections",
description:
"Linear, ensemble, and neural network models to perform multi-class classification on news articles.",
keywords: ["NLP", "LSTM"],
category: "NLP",
url: "https://github.com/violetguos/COMP550-Project",
icon: "🗞",
},
// ruby
{
name: "Chess",
description: "command line chess game",
keywords: ["Ruby", "rspec"],
category: "Ruby",
url: "https://github.com/violetguos/cmdline-chess",
icon: "♔",
},
]
|
## logger
模仿beego/logs模块
## 配置
#### 设置异步
> SetAsync()
#### 设置log等级
> SetLevel(l int)
- LevelEmergency
- LevelAlert
- LevelCritical
- LevelError
- LevelWarning
- LevelNotice
- LevelInformational
- LevelDebug
#### 设置logger
> SetLogger(adaptername string, config string)
目前支持的 adapter
- Console
- File
- ElasticSearch
- Ali Log Service
## 计划
新添配置,使出现不同等级的log时,可自定义处理方式
|
from turtle import Turtle
class ScoreCard(Turtle):
def __init__(self):
super(ScoreCard, self).__init__()
self.color("white")
self.penup()
self.goto(0, 0)
self.hideturtle()
def finish(self, success):
self.clear()
if success:
self.write("SUCCESS!")
else:
self.write("Sorry! Game Over")
|
require 'vagrant'
require 'vagrant/reverse_samba/version'
require 'vagrant/reverse_samba/plugin'
module Vagrant
module ReverseSamba
end
end
|
module ParseSpecificationLanguage where
import Data.Char
import Data.IP
import Data.List
import Data.List.Split
import qualified Data.Map as Map
import Data.Maybe
import ParserHelp
import Types
lexer :: String -> [String]
lexer s
| all isSpace s = []
| 'N':'O':'T':xs <- afterSpaces = "NOT":lexer xs
| '=':'>':xs <- afterSpaces = "=>":lexer xs
| '=':xs <- afterSpaces = "=":lexer xs
| '(':xs <- afterSpaces = "(":lexer xs
| ')':xs <- afterSpaces = ")":lexer xs
| ':':xs <- afterSpaces = ":":lexer xs
| ',':xs <- afterSpaces = ",":lexer xs
| '#':xs <- afterSpaces = lexer . dropWhile (/= '\n') $ xs
| length nextTerm >= 1 = nextTerm:lexer afterTerm
| otherwise = error $ "Unrecognized pattern " ++ s ++ afterTerm
where
afterSpaces = dropWhile isSpace s
(nextTerm, afterTerm) = span ((flip elem) ('.':'-':'_':'/':['a'..'z'] ++ ['A'..'Z'] ++ ['0'..'9'])) afterSpaces
parse :: [String] -> [ExampleRuleInstruction]
parse s =
let
instrCon = map parseInstruction (splitOn [","] s)
instrCon' = map (\((s, f), i)-> (s, f, i)) (zip instrCon [0..])
in
map (\(x, sCon, l) -> sCon (parseRule x l)) instrCon'--map parseRule (splitOn [","] s)
parseInstruction :: [String] -> ([String], ExampleRule -> ExampleRuleInstruction)
parseInstruction ("acl":c:":":xs) = (xs, ToChainNamed Repair c)
parseInstruction xs = (xs, NoInstruction)
parseRule :: [String] -> Label -> ExampleRule
parseRule s l =
let
(c, t) = break ("=>" == ) s
in
Rule (parseSpecificationCriteria c) (if not . null $ t then (parseSpecificationTarget $ t !! 1) else NoTarget) l
isConjunction :: String -> Bool
isConjunction s = s `elem` ["AND", "OR"]
toConjunction :: String -> Maybe ([ExampleCriteria] -> ExampleCriteria)
toConjunction "AND" = Just And
toConjunction "OR" = Just Or
toConjunction _ = Nothing
--Returns Nothing if passed an empty list, or the conjunction corresponding
--to "AND" or "OR" at the head of the list, if that exists
--if the list has elements, and the first is not valid, errors
conjunctionAtFront :: [String] -> Maybe ([ExampleCriteria] -> ExampleCriteria)
conjunctionAtFront [] = Nothing
conjunctionAtFront s = if isConjunction (head s) then
toConjunction (head s)
else
error ("Invalid!: " ++ (head s))
parseSpecificationCriteria :: [String] -> [ExampleCriteria]
parseSpecificationCriteria s
| [] <- s = []
| ("(":xs) <-s =
let
inParen = parseSpecificationCriteria (findInLeadingParenthesis $ "(":xs)
after = findAfterLeadingParenthesis $ "(":xs
conj = conjunctionAtFront after
in
if isJust conj then [fromJust conj (inParen ++ (parseSpecificationCriteria $ tail after))] else inParen
| otherwise =
let
(c, xs) = parseSpecificationCriteria' s
conj = conjunctionAtFront xs
in
if isJust conj then [fromJust conj $ c:(parseSpecificationCriteria $ tail xs)] else [c]
parseSpecificationCriteria' :: [String] -> (ExampleCriteria, [String])
parseSpecificationCriteria' s
| ("NOT":xs) <- s =
let
(next, xs') = parseSpecificationCriteria' xs
in
(InCNot $ next, xs')
| ("protocol":"=":p:xs) <- s =
let
p' = if isInteger p then (read p :: Int) else error "Invalid protocol"
in
(InC $ Protocol p', xs)
| ("destination_port":"=":dp:xs) <- s =
let
p = if isInteger dp then (read dp :: Int) else error "Invalid port"
in
(InC $ Port Destination (Left p), xs)
| ("source_port":"=":dp:xs) <- s = --This is terrible, don't duplicate like this...
let
p = if isInteger dp then (read dp :: Int) else error "Invalid port"
in
(InC $ Port Source (Left p), xs)
| ("destination_ip":"=":di:xs) <- s =
let di' = if '/' `elem` di then di else di ++ "/32" in (InC . (IPAddress Destination) . toIPRange $ di', xs)
| ("source_ip":"=":di:xs) <- s =
let di' = if '/' `elem` di then di else di ++ "/32" in (InC . (IPAddress Source) . toIPRange $ di', xs)
| ("time":"=":t:xs) <- s =
let
t' = if isInteger t then (read t :: Int) else error "Invalid time"
in
(Ext . Time $ t', xs)
| (head s) `elem` (Map.keys stringsToFlags) = (InC . fromJust $ Map.lookup (head s) stringsToFlags, tail s)
| otherwise = (InC . SC $ concat s, [])
parseSpecificationTarget :: String -> Target
parseSpecificationTarget ("DROP") = DROP
parseSpecificationTarget ("ACCEPT") = ACCEPT
parseSpecificationTarget x = error ("Unrecognized target = " ++ show x)
--Given a list of strings beginning with "(", finds all strings up to the matching ")"
findInLeadingParenthesis :: [String] -> [String]
findInLeadingParenthesis ("(":xs) = findInLeadingParenthesis' xs 1
findInLeadingParenthesis s = s
findInLeadingParenthesis' :: [String] -> Int -> [String]
findInLeadingParenthesis' (")":xs) 1 = []
findInLeadingParenthesis' ("(":xs) i = "(":findInLeadingParenthesis' xs (i + 1)
findInLeadingParenthesis' (")":xs) i = ")":findInLeadingParenthesis' xs (i - 1)
findInLeadingParenthesis' (x:xs) i = x:findInLeadingParenthesis' xs i
--Given a list of strings beginning with "(", finds all strings after the matching ")"
findAfterLeadingParenthesis :: [String] -> [String]
findAfterLeadingParenthesis s =
let
leading = findInLeadingParenthesis s
in
--The 2 accounts for the opening and closing parenthesis
drop (2 + length leading) s
|
import 'package:flutter/cupertino.dart' show CupertinoIcons;
import 'package:flutter/material.dart';
import '../../constants/enums.dart';
class CardColorDecisionCard extends StatefulWidget {
const CardColorDecisionCard({
Key? key,
}) : super(key: key);
@override
_CardColorDecisionCardState createState() => _CardColorDecisionCardState();
}
class _CardColorDecisionCardState extends State<CardColorDecisionCard> {
late CardColor color;
@override
void initState() {
super.initState();
color = CardColor.clover;
}
@override
Widget build(BuildContext context) {
const selectedColor = Colors.deepOrangeAccent;
return SafeArea(
child: Align(
alignment: Alignment.topCenter,
child: Card(
child: Padding(
padding: const EdgeInsets.all(8.0),
child: Column(
mainAxisSize: MainAxisSize.min,
children: <Widget>[
const ListTile(
title: Text('Welche Farbe wünschst du dir ?'),
),
RadioListTile<CardColor>(
controlAffinity: ListTileControlAffinity.trailing,
value: CardColor.clover,
groupValue: color,
title: const Text('Kreuz'),
secondary: const Icon(CupertinoIcons.suit_club_fill),
onChanged: (value) => setState(() => color = value!),
activeColor: selectedColor,
),
RadioListTile<CardColor>(
controlAffinity: ListTileControlAffinity.trailing,
value: CardColor.spade,
groupValue: color,
title: const Text('Pik'),
secondary: const Icon(CupertinoIcons.suit_spade_fill),
onChanged: (value) => setState(() => color = value!),
activeColor: selectedColor,
),
RadioListTile<CardColor>(
controlAffinity: ListTileControlAffinity.trailing,
value: CardColor.heart,
groupValue: color,
title: const Text('Herz'),
secondary: const Icon(CupertinoIcons.suit_heart_fill),
onChanged: (value) => setState(() => color = value!),
activeColor: selectedColor,
),
RadioListTile<CardColor>(
controlAffinity: ListTileControlAffinity.trailing,
value: CardColor.diamond,
groupValue: color,
title: const Text('Karo'),
secondary: const Icon(CupertinoIcons.suit_diamond_fill),
onChanged: (value) => setState(() => color = value!),
activeColor: selectedColor,
),
const SizedBox(height: 24),
ElevatedButton(
onPressed: () => Navigator.of(context).pop(color),
child: const Text('Wünschen'),
),
],
),
),
),
),
);
}
}
|
module Abilities
class ManagerCoreAbility
include CanCan::Ability
def initialize(user)
['Asset', 'AssetEvent', 'Organization', 'Policy', 'Role', 'Upload', 'User'].each do |c|
ability = "Abilities::Manager#{c}Ability".constantize.new(user)
self.merge ability if ability.present?
end
end
end
end
|
package main
func main() {
var p *int = nil
*p = 0
}
// run error
// panic: runtime error: invalid memory address or nil pointer dereference
// [signal SIGSEGV: segmentation violation code=0x1 addr=0x0 pc=0x105e262]
|
import componentFactoryFixture from '../../../../../test/helpers/component-factory-fixture';
import brushAreaDir from '../brush-area';
function nativeEvent(x, y) {
return {
clientX: x,
clientY: y,
};
}
function hammerEvent(x, y) {
return {
center: { x, y },
};
}
describe('Brush Area', () => {
let componentFixture;
let instance;
let config;
let out;
let theme;
let sandbox;
let container;
let rect;
let rendererSpy;
beforeEach(() => {
rect = {
x: 1,
y: 2,
width: 100,
height: 200,
computed: {
x: 1,
y: 2,
width: 100,
height: 200,
},
};
container = {
inner: rect,
outer: rect,
};
config = {
settings: {},
};
componentFixture = componentFactoryFixture();
sandbox = componentFixture.sandbox();
const chartMock = componentFixture.mocks().chart;
chartMock.shapesAt = sandbox.stub().returns([]);
chartMock.brushFromShapes = sandbox.stub();
rendererSpy = sandbox.spy(componentFixture.mocks().renderer, 'render');
theme = componentFixture.mocks().theme;
theme.style.returns({
area: {
fill: 'green',
strokeWidth: 0,
opacity: 0.2,
},
});
});
it('should render area indicator in component space', () => {
instance = componentFixture.simulateCreate(brushAreaDir, config);
componentFixture.simulateRender(container);
instance.def.start(nativeEvent(10, 20));
instance.def.move(nativeEvent(40, 30));
out = componentFixture.getRenderOutput();
expect(out).to.deep.equal([
{
type: 'rect',
x: 10,
y: 20,
width: 30,
height: 10,
fill: 'green',
strokeWidth: 0,
opacity: 0.2,
},
]);
});
it('`end` event should clear rendered node', () => {
instance = componentFixture.simulateCreate(brushAreaDir, config);
componentFixture.simulateRender(container);
instance.def.start(nativeEvent(10, 20));
instance.def.move(nativeEvent(40, 30));
instance.def.end();
out = componentFixture.getRenderOutput();
expect(out).to.deep.equal([]);
});
it('`cancel` event should end brushes', () => {
config.settings.brush = {
components: [
{
key: 'test',
contexts: ['test'],
},
{
key: 'test2',
contexts: ['test2'],
},
],
};
const spy = sandbox.spy();
const stub = sandbox.stub().returns({ end: spy });
componentFixture.mocks().chart.brush = stub;
instance = componentFixture.simulateCreate(brushAreaDir, config);
componentFixture.simulateRender(container);
instance.def.start(nativeEvent(10, 20));
instance.def.cancel();
componentFixture.getRenderOutput();
expect(spy).to.have.been.calledTwice;
expect(stub.firstCall).to.have.been.calledWith('test');
expect(stub.secondCall).to.have.been.calledWith('test2');
});
it('should brush in chart coordinate system', () => {
instance = componentFixture.simulateCreate(brushAreaDir, config);
componentFixture.simulateRender(container);
instance.def.start(hammerEvent(10, 20));
instance.def.move(hammerEvent(40, 30));
expect(componentFixture.mocks().chart.shapesAt).to.have.been.calledWith({
x: 11, // To include renderer position
y: 22,
width: 30,
height: 10,
});
});
it('should require `start` event before `move` event', () => {
instance = componentFixture.simulateCreate(brushAreaDir, config);
rendererSpy.resetHistory();
instance.def.move(nativeEvent(10, 20));
expect(rendererSpy).to.not.have.been.called;
});
it('should require `start` event before `end` event', () => {
instance = componentFixture.simulateCreate(brushAreaDir, config);
rendererSpy.resetHistory();
instance.def.end(nativeEvent(10, 20)); // If started, would render empty nodes
expect(rendererSpy).to.not.have.been.called;
});
it('should require `start` event before `cancel` event', () => {
instance = componentFixture.simulateCreate(brushAreaDir, config);
rendererSpy.resetHistory();
instance.def.cancel(nativeEvent(10, 20)); // If started, would render empty nodes
expect(rendererSpy).to.not.have.been.called;
});
it('should require to be inside the component container on `start` event', () => {
instance = componentFixture.simulateCreate(brushAreaDir, config);
componentFixture.simulateRender(container);
rendererSpy.resetHistory();
instance.def.start(nativeEvent(1000, 2000));
instance.def.move(nativeEvent(10, 20)); // If started, would render here
expect(rendererSpy).to.not.have.been.called;
});
});
|
package com.aptopayments.mobile.repository.card.remote.entities
import com.aptopayments.mobile.data.card.FeatureStatus
import com.aptopayments.mobile.data.card.InAppProvisioningFeature
import com.google.gson.annotations.SerializedName
internal data class InAppProvisioningFeatureEntity(
@SerializedName("status")
val status: String? = "",
) {
fun toFeature() = InAppProvisioningFeature(
FeatureStatus.fromString(status ?: "").toBoolean(),
)
companion object {
fun from(value: InAppProvisioningFeature?): InAppProvisioningFeatureEntity? {
return value?.let {
InAppProvisioningFeatureEntity(
status = FeatureStatus.fromBoolean(value.isEnabled).toString()
)
}
}
}
}
|
module Misty::Openstack::API::SwiftV1
def tag
'Object Storage API Reference 2.17.1'
end
def api
{"/info"=>{:GET=>[:list_activated_capabilities]},
"/v1/{account}"=>
{:GET=>[:show_account_details_and_list_containers],
:POST=>[:create_update_or_delete_account_metadata],
:HEAD=>[:show_account_metadata]},
"/v1/{account}/{container}"=>
{:GET=>[:show_container_details_and_list_objects],
:PUT=>[:create_container],
:POST=>[:create_update_or_delete_container_metadata],
:HEAD=>[:show_container_metadata],
:DELETE=>[:delete_container]},
"/v1/{account}/{container}/{object}"=>
{:GET=>[:get_object_content_and_metadata],
:PUT=>[:create_or_replace_object],
:COPY=>[:copy_object],
:DELETE=>[:delete_object],
:HEAD=>[:show_object_metadata],
:POST=>[:create_or_update_object_metadata]},
"/v1/endpoints"=>{:GET=>[:list_endpoints]}}
end
end
|
require File.expand_path(File.dirname(__FILE__) + '/spec_helper')
describe "ArrayExt" do
it "should be able to do powersets" do
ps = Knj::ArrayExt.powerset(:arr => [1, 2, 3, 4]).to_a
raise "Expected length of 16 but it wasnt: #{ps.length}" if ps.length != 16
ite = 0
Knj::ArrayExt.powerset(:arr => [1, 2, 3, 4]) do |arr|
ite += 1
end
raise "Expected block to be executed 16 times but it wasnt: #{ite}" if ite != 16
end
it "should be able to divide arrays" do
arr = [1, 2, 3, 4, 6, 7, 8, 9, 15, 16, 17, 18]
res = Knj::ArrayExt.divide(:arr => arr) do |a, b|
if (b - a) > 1
false
else
true
end
end
raise "Expected length of 3 but it wasnt: #{res.length}" if res.length != 3
raise "Expected length of 4 but it wasnt: #{res[0].length}" if res[0].length != 4
raise "Expected length of 3 but it wasnt: #{res[1].length}" if res[1].length != 3
raise "Expected length of 3 but it wasnt: #{res[2].length}" if res[2].length != 3
end
it "should be able to make ago-strings" do
arr = [1, 2]
Knj::ArrayExt.force_no_cols(:arr => arr, :no => 1)
raise "Expected length of 1 but got: #{arr.length}" if arr.length != 1
raise "Expected element to be 1 but it wasnt: #{arr[0]}" if arr[0] != 1
Knj::ArrayExt.force_no_cols(:arr => arr, :no => 3, :empty => "test")
raise "Expected length of 3 but got: #{arr.lengtj}" if arr.length != 3
raise "Expected element 2 to be 'test' but it wasnt: #{arr[2]}" if arr[2] != "test"
end
end
|
import { Component, ViewChild } from '@angular/core';
import { NgOnchangesExampleComponent } from '../ng-onchanges-example/ng-onchanges-example.component';
@Component({
selector: 'app-ng-onchangesparent',
templateUrl: './ng-onchangesparent.component.html',
styleUrls: ['./ng-onchangesparent.component.css']
})
export class NgOnchangesparentComponent {
cost: number;
margin: number;
price: number;
@ViewChild(NgOnchangesExampleComponent) childView: NgOnchangesExampleComponent;
constructor() {
this.clear();
}
clear() {
this.cost = 100;
this.margin = 10;
this.price = 110;
if (this.childView) { this.childView.clear(); }
}
}
|
import 'package:sembast/sembast_memory.dart' as sembast;
import 'package:tekartik_firebase_firestore_sembast/firestore_sembast.dart';
import 'package:tekartik_firebase_firestore_test/firestore_test.dart';
import 'package:tekartik_firebase_local/firebase_local.dart';
void main() {
// needed for memory
skipConcurrentTransactionTests = true;
var firebase = FirebaseLocal();
var firestoreService = newFirestoreServiceSembast(
databaseFactory: sembast.newDatabaseFactoryMemory());
run(firebase: firebase, firestoreService: firestoreService);
}
|
/**
* CloudMapping - Sistema de Extração de Dados de Mapeamento dos Experimentos em Computação em Nuvem
*
* Copyright (c) AssertLab.
*
* Este software é confidencial e propriedade da AssertLab. Não é permitida sua distribuição ou divulgação
* do seu conteúdo sem expressa autorização do AssertLab. Este arquivo contém informações proprietárias.
*/
package br.ufpe.cin.cloud.mapeamento.negocio.base.camadas;
/**
* Representa as opções de tipo de filtro em uma consulta com filtros.
*
* @author helaine.lins
* @created 15/04/2014 - 18:30:25
*/
public enum TipoFiltroEnum {
/**
* Representa o tipo de operador like.
*/
LIKE("like"),
/**
* Representa o tipo de operador equals.
*/
EQUALS("="),
/**
* Representa o tipo de operador equals.
*/
DIFFERENT("<>"),
/**
* Representa o tipo de operador NULL.
*/
NULL("IS NULL");
/**
* Representa o código da enumeração.
*/
private String operador;
/**
* Cria uma nova instância da enumeração inicializando o seu código e
* descrição correspondente.
*
* @param operador
* A instância que representa o operador da enumeração.
*/
private TipoFiltroEnum(String operador) {
this.operador = operador;
}
/**
* Obtém o valor do atributo operador.
*
* @return Uma instância de {@link String} contendo o valor do atributo
* operador.
*/
public String getOperador() {
return this.operador;
}
}
|
import Konva from 'konva';
Konva.showWarnings = false;
/**
*
* @param parent
* @param name
* @param options
*/
export function appendKonvaElement(parent,name,options){
const element = new Konva[name](options);
parent.add(element);
return element;
}
|
import 'package:build/build.dart';
import 'package:source_gen/source_gen.dart';
import 'src/dllimport_generator.dart';
Builder dllImportBuilder(BuilderOptions options) =>
LibraryBuilder(DllImportGenerator(),
generatedExtension: '.ffi.g.dart',
header: '$defaultFileHeader\nimport \'dart:ffi\';');
|
require 'sqlite3'
def open_database(output_file)
schema = IO.read("../schema/crawlerdb.sql")
# puts schema
db = SQLite3::Database.new(output_file)
schema.split(';').each do |part|
db.execute(part)
end
yield(db)
return db
end
|
module Dnsimple
module Struct
class Zone < Base
# @return [Integer] The zone ID in DNSimple.
attr_accessor :id
# @return [Integer] The associated account ID.
attr_accessor :account_id
# @return [String] The zone name.
attr_accessor :name
# @return [Boolean] True if the zone is a reverse zone.
attr_accessor :reverse
# @return [String] When the zone was created in DNSimple.
attr_accessor :created_at
# @return [String] When the zone was last updated in DNSimple.
attr_accessor :updated_at
end
end
end
|
import React from 'react';
import {List} from 'react-native-paper';
import {StyleSheet, View, Animated, Alert} from 'react-native';
import {RectButton} from 'react-native-gesture-handler';
import Icon from 'react-native-vector-icons/MaterialCommunityIcons';
import Swipeable from 'react-native-gesture-handler/Swipeable';
const AdminField = ({
onEdit,
onDelete,
onPress,
title,
description,
icon,
iconColor = '#4A6572',
index,
row,
prevOpenedRow,
onUpdateRow,
onUpdatePrevOpenedRow,
backgroundColor = '#fff7e0',
}) => {
const closeRow = index => {
if (prevOpenedRow && prevOpenedRow !== row[index]) {
prevOpenedRow.close();
}
onUpdatePrevOpenedRow(row[index]);
};
const renderActions = index => {
return (
<>
{onDelete ? (
<RectButton
style={[styles.swipeableButton, styles.swipeableDelete]}
onPress={() =>
Alert.alert(
'Delete',
`Are you sure you want to remove ${title}?`,
[
{
text: 'Yes',
onPress: () => {
row[index].close();
onDelete();
},
},
{text: 'No'},
],
)
}>
<Animated.Text
style={[styles.actionText, styles.swipeableDeleteText]}>
Delete
</Animated.Text>
</RectButton>
) : null}
{onEdit ? (
<RectButton
onPress={() => {
onEdit();
row[index].close();
}}
style={[styles.swipeableButton, styles.swipeableEdit]}>
<Animated.Text
style={[styles.actionText, styles.swipeableEditText]}>
Edit
</Animated.Text>
</RectButton>
) : null}
</>
);
};
return (
<Swipeable
renderRightActions={() => renderActions(index)}
friction={1.5}
ref={ref => {
row[index] = ref;
onUpdateRow(row);
}}
onSwipeableOpen={() => {
closeRow(index);
}}>
<List.Item
style={styles.itemContainer(backgroundColor)}
title={title}
titleStyle={styles.title}
description={description}
descriptionStyle={styles.description}
onPress={() => (onPress ? onPress() : null)}
left={() =>
icon ? (
<View style={styles.icon}>
<Icon size={40} name={icon} color={iconColor} />
</View>
) : null
}
/>
</Swipeable>
);
};
const styles = StyleSheet.create({
title: {
fontSize: 18,
textTransform: 'capitalize',
},
itemContainer: backgroundColor => ({
backgroundColor: backgroundColor,
}),
swipeableButton: {
justifyContent: 'center',
},
swipeableEdit: {
backgroundColor: '#FCBB00',
},
swipeableDelete: {
backgroundColor: '#A52630',
},
swipeableEditText: {
color: 'black',
},
swipeableDeleteText: {
color: 'white',
},
description: {
textTransform: 'capitalize',
},
icon: {
justifyContent: 'center',
alignItems: 'center',
},
actionText: {
fontSize: 18,
width: 80,
textAlign: 'center',
textAlignVertical: 'center',
},
});
export default AdminField;
|
package handler
import (
"demo/app/cli/internal/logic"
"demo/app/cli/internal/svc"
"github.com/urfave/cli/v2"
)
func ListHandler(ctx *svc.ServiceContext) cli.ActionFunc {
return func(context *cli.Context) error {
l := logic.NewListLogic(context, ctx)
return l.List()
}
}
|
from django.apps import AppConfig
class ConjugateConfig(AppConfig):
name = 'conjugate'
|
var naughtyWords = [
//URL: https://www.freewebheaders.com/full-list-of-bad-words-banned-by-google/
'2girls1cup',
'2g1c',
'a2m',
'acrotomophilia',
'ahole',
'alabamahotpocket',
'alaskanpipeline',
'anal',
'analimpaler',
'analleakage',
'analprobe',
'anilingus',
'anus',
'apeshit',
'areola',
'areole',
'arian',
'arrse',
'arse',
'arsehole',
'ass',
'assfuck',
'asshole',
'assbag',
'assbandit',
'assbang',
'assbanged',
'assbanger',
'assbangs',
'assbite',
'assclown',
'asscock',
'asscracker',
'asses',
'assface',
'assfaces',
'assfuck',
'assfucker',
'assfukka',
'assgoblin',
'asshat',
'asshead',
'asshole',
'assholes',
'asshopper',
'assjacker',
'asslick',
'asslicker',
'assmaster',
'assmonkey',
'assmucus',
'assmunch',
'assmuncher',
'asspirate',
'assshit',
'assshole',
'asssucker',
'asswad',
'asswhole',
'asswipe',
'asswipes',
'autoerotic',
'axwound',
'azazel',
'azz',
'babeland',
'babybatter',
'babyjuice',
'ballgag',
'ballgravy',
'ballkicking',
'balllicking',
'ballsack',
'ballsucking',
'ballbag',
'balls',
'ballsack',
'bampot',
'bangbros',
'bareback',
'barelylegal',
'barenaked',
'barf',
'bastard',
'bastardo',
'bastards',
'bastinado',
'battyboy',
'bawdy',
'bbw',
'bdsm',
'beaner',
'beaners',
'beardedclam',
'beastial',
'beastiality',
'beatch',
'beaver',
'beavercleaver',
'beaverlips',
'beefcurtain',
'beefcurtains',
'beeyotch',
'bellend',
'bender',
'beotch',
'bescumber',
'bestial',
'bestiality',
'biatch',
'bigblack',
'bigbreasts',
'bigknockers',
'bigtits',
'bimbo',
'bimbos',
'bint',
'birdlock',
'bitch',
'bitchtit',
'bitchass',
'bitched',
'bitcher',
'bitchers',
'bitches',
'bitchin',
'bitching',
'bitchtits',
'bitchy',
'blackcock',
'blondeaction',
'blondeonblondeaction',
'bloodclaat',
'bloody',
'bloodyhell',
'blowjob',
'blowme',
'blowmud',
'blowyourload',
'blowjob',
'blowjobs',
'bluewaffle',
'blumpkin',
'bod',
'bodily',
'boink',
'boiolas',
'bollock',
'bollocks',
'bollok',
'bollox',
'bondage',
'boned',
'boner',
'boners',
'bong',
'boob',
'boobies',
'boobs',
'booby',
'booger',
'bookie',
'boong',
'booobs',
'boooobs',
'booooobs',
'booooooobs',
'bootee',
'bootie',
'booty',
'bootycall',
'booze',
'boozer',
'boozy',
'bosom',
'bosomy',
'breasts',
'Breeder',
'brotherfucker',
'brownshowers',
'brunetteaction',
'buceta',
'bugger',
'bukkake',
'bullshit',
'bulldyke',
'bulletvibe',
'bullshit',
'bullshits',
'bullshitted',
'bullturds',
'bum',
'bumboy',
'bumblefuck',
'bumclat',
'bummer',
'buncombe',
'bung',
'bunghole',
'bunnyfucker',
'bustaload',
'busty',
'butt',
'buttfuck',
'buttplug',
'buttcheeks',
'buttfuck',
'buttfucka',
'buttfucker',
'butthole',
'buttmuch',
'buttmunch',
'buttplug',
'caca',
'cacafuego',
'cahone',
'cameltoe',
'camgirl',
'camslut',
'camwhore',
'carpetmuncher',
'cawk',
'cervix',
'chesticle',
'chickwithadick',
'chinc',
'chincs',
'choad',
'choade',
'chocice',
'chocolaterosebuds',
'chode',
'chodes',
'chotabags',
'cipa',
'circlejerk',
'clevelandsteamer',
'climax',
'clit',
'clitlicker',
'clitface',
'clitfuck',
'clitoris',
'clitorus',
'clits',
'clitty',
'clittylitter',
'cloverclamps',
'clunge',
'clusterfuck',
'cnut',
'cocain',
'cocaine',
'coccydynia',
'cock',
'cockpocket',
'cocksnot',
'cocksucker',
'cockass',
'cockbite',
'cockblock',
'cockburger',
'cockeye',
'cockface',
'cockfucker',
'cockhead',
'cockholster',
'cockjockey',
'cockknocker',
'cockknoker',
'Cocklump',
'cockmaster',
'cockmongler',
'cockmongruel',
'cockmonkey',
'cockmunch',
'cockmuncher',
'cocknose',
'cocknugget',
'cocks',
'cockshit',
'cocksmith',
'cocksmoke',
'cocksmoker',
'cocksniffer',
'cocksuck',
'cocksucked',
'cocksucker',
'cocksuckers',
'cocksucking',
'cocksucks',
'cocksuka',
'cocksukka',
'cockwaffle',
'coffindodger',
'coital',
'cok',
'cokmuncher',
'coksucka',
'commie',
'condom',
'coochie',
'coochy',
'coon',
'coonnass',
'coons',
'cooter',
'copsomewood',
'coprolagnia',
'coprophilia',
'corksucker',
'cornhole',
'corpwhore',
'corpulent',
'cox',
'crabs',
'crack',
'crackwhore',
'crap',
'crappy',
'creampie',
'cretin',
'crikey',
'cripple',
'crotte',
'cum',
'cumchugger',
'cumdumpster',
'cumfreak',
'cumguzzler',
'cumbubble',
'cumdump',
'cumdumpster',
'cumguzzler',
'cumjockey',
'cummer',
'cummin',
'cumming',
'cums',
'cumshot',
'cumshots',
'cumslut',
'cumstain',
'cumtart',
'cunilingus',
'cunillingus',
'cunnie',
'cunnilingus',
'cunny',
'cunt',
'cunthair',
'cuntass',
'cuntbag',
'cuntface',
'cunthole',
'cunthunter',
'cuntlick',
'cuntlicker',
'cuntlicking',
'cuntrag',
'cunts',
'cuntsicle',
'cuntslut',
'cus',
'cutrope',
'cyalis',
'cyberfuc',
'cyberfuck',
'cyberfucked',
'cyberfucker',
'cyberfuckers',
'cyberfucking',
'dago',
'dagos',
'dammit',
'damn',
'damned',
'damnit',
'darkie',
'darn',
'daterape',
'deepthroat',
'deggo',
'dendrophilia',
'dick',
'dickhead',
'dickhole',
'dickshy',
'dickbag',
'dickbeaters',
'dickdipper',
'dickface',
'dickflipper',
'dickfuck',
'dickfucker',
'dickhead',
'dickheads',
'dickhole',
'dickish',
'dickjuice',
'dickmilk',
'dickmonger',
'dickripper',
'dicks',
'dicksipper',
'dickslap',
'dicksucker',
'dicksucking',
'dicktickler',
'dickwad',
'dickweasel',
'dickweed',
'dickwhipper',
'dickwod',
'dickzipper',
'diddle',
'dildo',
'dildos',
'diligaf',
'dillweed',
'dimwit',
'dingle',
'dingleberries',
'dingleberry',
'dink',
'dinks',
'dipship',
'dipshit',
'dirsa',
'dirty',
'dirtypillows',
'dirtysanchez',
'dirtySanchez',
'div',
'dlck',
'dogstyle',
'doggiestyle',
'doggin',
'dogging',
'doggystyle',
'dolcett',
'domination',
'dominatrix',
'dommes',
'dong',
'donkeypunch',
'donkeyribber',
'doochbag',
'doofus',
'dookie',
'doosh',
'dopey',
'doubledong',
'doublepenetration',
'Doublelift',
'douche',
'douchebag',
'douchebags',
'douchewaffle',
'douchey',
'dpaction',
'drunk',
'dryhump',
'duche',
'dumass',
'dumbass',
'dumbasses',
'Dumbcunt',
'dumbfuck',
'dumbshit',
'dummy',
'dumshit',
'dvda',
'dyke',
'dykes',
'eatadick',
'eathairpie',
'eatmyass',
'ecchi',
'ejaculate',
'ejaculated',
'ejaculates',
'ejaculating',
'ejaculatings',
'ejaculation',
'ejakulate',
'erect',
'erection',
'erotic',
'erotism',
'escort',
'essohbee',
'eunuch',
'extacy',
'extasy',
'fuck',
'fucker',
'facial',
'fack',
'fanny',
'fannybandit',
'fannyflaps',
'fannyfucker',
'fanyy',
'fart',
'fartknocker',
'fatass',
'fcuk',
'fcuker',
'fcuking',
'fecal',
'feck',
'fecker',
'feist',
'felch',
'felcher',
'felching',
'fellate',
'fellatio',
'feltch',
'feltcher',
'femalesquirting',
'femdom',
'fenian',
'fice',
'figging',
'fingerbang',
'fingerfuck',
'fingerfucked',
'fingerfucker',
'fingerfuckers',
'fingerfucking',
'fingerfucks',
'fingering',
'fistfuck',
'fisted',
'fistfuck',
'fistfucked',
'fistfucker',
'fistfuckers',
'fistfucking',
'fistfuckings',
'fistfucks',
'fisting',
'fisty',
'flamer',
'flange',
'flaps',
'fleshflute',
'flogthelog',
'floozy',
'foad',
'foah',
'fondle',
'foobar',
'fook',
'fooker',
'footfetish',
'footjob',
'foreskin',
'freex',
'frenchify',
'frigg',
'frigga',
'frotting',
'fubar',
'fuc',
'fuck',
'fuckbuttons',
'fuckhole',
'Fuckoff',
'fuckpuppet',
'fucktrophy',
'fuckyomama',
'fuckyou',
'fucka',
'fuckass',
'fuckbag',
'fuckboy',
'fuckbrain',
'fuckbutt',
'fuckbutter',
'fucked',
'fuckedup',
'fucker',
'fuckers',
'fuckersucker',
'fuckface',
'fuckhead',
'fuckheads',
'fuckhole',
'fuckin',
'fucking',
'fuckings',
'fuckingshitmotherfucker',
'fuckme',
'fuckmeat',
'fucknugget',
'fucknut',
'fucknutt',
'fuckoff',
'fucks',
'fuckstick',
'fucktard',
'fucktards',
'fucktart',
'fucktoy',
'fucktwat',
'fuckup',
'fuckwad',
'fuckwhit',
'fuckwit',
'fuckwitt',
'fudgepacker',
'fuk',
'fuker',
'fukker',
'fukkers',
'fukkin',
'fuks',
'fukwhit',
'fukwit',
'fuq',
'futanari',
'fux',
'fux0r',
'fvck',
'fxck',
'gae',
'gai',
'gangbang',
'gangbanged',
'gangbangs',
'ganja',
'gash',
'gassyass',
'gay',
'gaysex',
'gayass',
'gaybob',
'gaydo',
'gayfuck',
'gayfuckist',
'gaylord',
'gays',
'gaysex',
'gaywad',
'genderbender',
'genitals',
'gey',
'gfy',
'ghay',
'ghey',
'giantcock',
'gigolo',
'ginger',
'gippo',
'girlon',
'girlontop',
'girlsgonewild',
'git',
'glans',
'goatcx',
'goatse',
'god',
'goddamn',
'godamn',
'godamnit',
'goddam',
'goddammit',
'goddamn',
'goddamned',
'goddamnit',
'godsdamn',
'gokkun',
'goldenshower',
'gonad',
'gonads',
'googirl',
'gooch',
'goodpoop',
'gook',
'gooks',
'goregasm',
'gringo',
'grope',
'groupsex',
'gspot',
'gtfo',
'guido',
'guro',
'hamflap',
'handjob',
'hardcore',
'hardon',
'hardcore',
'hardcoresex',
'hebe',
'heeb',
'hell',
'hemp',
'hentai',
'heroin',
'herp',
'herpes',
'herpy',
'hircismus',
'hiv',
'ho',
'hoar',
'hoare',
'hobag',
'hoe',
'hoer',
'holyshit',
'homo',
'homodumbshit',
'homoerotic',
'homoey',
'honkey',
'honky',
'hooch',
'hookah',
'hooker',
'hoor',
'hootch',
'hooter',
'hooters',
'hore',
'horniest',
'horny',
'hotcarl',
'hotchick',
'hotsex',
'howtokill',
'howtomurdep',
'howtomurder',
'hugefat',
'hump',
'humped',
'humping',
'hun',
'hussy',
'hymen',
'iap',
'iberianslap',
'inbred',
'incest',
'injun',
'intercourse',
'jackoff',
'jackass',
'jackasses',
'jackhole',
'jackoff',
'jaggi',
'jagoff',
'jailbait',
'jellydonut',
'jerk',
'jerkoff',
'jerkass',
'jerked',
'jerkoff',
'jism',
'jiz',
'jizm',
'jizz',
'jizzed',
'jock',
'juggs',
'junkie',
'junky',
'kafir',
'kawk',
'kike',
'kikes',
'kill',
'kinbaku',
'kinkster',
'kinky',
'klan',
'knob',
'knobend',
'knobbing',
'knobead',
'knobed',
'knobend',
'knobhead',
'knobjocky',
'knobjokey',
'kock',
'kondum',
'kondums',
'kooch',
'kooches',
'kootch',
'kraut',
'kum',
'kummer',
'kumming',
'kums',
'kunilingus',
'kunja',
'kunt',
'kwif',
'kyke',
'labia',
'lameass',
'lardass',
'leatherrestraint',
'leatherstraightjacket',
'lech',
'lemonparty',
'LEN',
'leper',
'lesbian',
'lesbians',
'lesbo',
'lesbos',
'lez',
'lezzie',
'lmao',
'lmfao',
'loin',
'loins',
'lolita',
'looney',
'lovemaking',
'lube',
'lust',
'lusting',
'lusty',
'mafugly',
'makemecome',
'malesquirting',
'mams',
'masochist',
'massa',
'masterb8',
'masterbate',
'masterbating',
'masterbation',
'masterbations',
'masturbate',
'masturbating',
'masturbation',
'maxi',
'menageatrois',
'menses',
'menstruate',
'menstruation',
'meth',
'mick',
'microphallus',
'middlefinger',
'midget',
'milf',
'minge',
'minger',
'missionaryposition',
'mofo',
'molest',
'mong',
'moomoofoofoo',
'moron',
'mothafuck',
'mothafucka',
'mothafuckas',
'mothafuckaz',
'mothafucked',
'mothafucker',
'mothafuckers',
'mothafuckin',
'mothafucking',
'mothafuckings',
'mothafucks',
'motherfucker',
'motherfuck',
'motherfucka',
'motherfucked',
'motherfucker',
'motherfuckers',
'motherfuckin',
'motherfucking',
'motherfuckings',
'motherfuckka',
'motherfucks',
'moundofvenus',
'mrhands',
'muff',
'muffdiver',
'muffpuff',
'muffdiver',
'muffdiving',
'munging',
'munter',
'murder',
'mutha',
'muthafecker',
'muthafuckker',
'muther',
'mutherfucker',
'naked',
'nambla',
'napalm',
'nappy',
'nawashi',
'needthedick',
'negro',
'nimphomania',
'nimrod',
'ninny',
'ninnyhammer',
'nipple',
'nipples',
'nob',
'nobjokey',
'nobhead',
'nobjocky',
'nobjokey',
'nonce',
'nsfwimages',
'nude',
'nudity',
'numbnuts',
'nutbutter',
'nutsack',
'nutter',
'nympho',
'nymphomania',
'octopussy',
'oldbag',
'omg',
'omorashi',
'onecuptwogirls',
'oneguyonejar',
'opiate',
'opium',
'orally',
'organ',
'orgasim',
'orgasims',
'orgasm',
'orgasmic',
'orgasms',
'orgies',
'orgy',
'ovary',
'ovum',
'paedophile',
'paki',
'panooch',
'pansy',
'pantie',
'panties',
'panty',
'pawn',
'pcp',
'pecker',
'peckerhead',
'pedo',
'pedobear',
'pedophile',
'pedophilia',
'pedophiliac',
'pee',
'peepee',
'pegging',
'penetrate',
'penetration',
'penial',
'penile',
'penis',
'penisbanger',
'penisfucker',
'penispuffer',
'perversion',
'phallic',
'phonesex',
'phuck',
'phuk',
'phuked',
'phuking',
'phukked',
'phukking',
'phuks',
'phuq',
'pieceofshit',
'pigfucker',
'pikey',
'pillowbiter',
'pimp',
'pimpis',
'pinko',
'piss',
'pissoff',
'pisspig',
'pissed',
'pissedoff',
'pisser',
'pissers',
'pisses',
'pissflaps',
'pissin',
'pissing',
'pissoff',
'pisspig',
'playboy',
'pleasurechest',
'pms',
'polack',
'polesmoker',
'pollock',
'ponyplay',
'poof',
'poon',
'poonani',
'poonany',
'poontang',
'poop',
'poopchute',
'Poopuncher',
'porchmonkey',
'porn',
'porno',
'pornography',
'pornos',
'pot',
'potty',
'prick',
'pricks',
'prickteaser',
'prig',
'princealbertpiercing',
'prod',
'pron',
'prostitute',
'prude',
'psycho',
'pthc',
'pube',
'pubes',
'pubic',
'pubis',
'punani',
'punanny',
'punany',
'punkass',
'punky',
'punta',
'puss',
'pusse',
'pussi',
'pussies',
'pussy',
'pussyfart',
'pussypalace',
'pussylicking',
'pussypounder',
'pussys',
'pust',
'puto',
'queaf',
'queef',
'queer',
'queerbait',
'queerhole',
'queero',
'queers',
'quicky',
'quim',
'racy',
'raghead',
'ragingboner',
'rape',
'raped',
'raper',
'rapey',
'raping',
'rapist',
'raunch',
'rectal',
'rectum',
'rectus',
'reefer',
'reetard',
'reich',
'renob',
'retard',
'retarded',
'reversecowgirl',
'revue',
'rimjaw',
'rimjob',
'rimming',
'ritard',
'rosypalm',
'rosypalmandher5sisters',
'rtard',
'rubbish',
'rum',
'rump',
'rumprammer',
'ruski',
'rustytrombone',
'shit',
'sadism',
'sadist',
'sambo',
'sandbar',
'Sandler',
'sanger',
'santorum',
'sausagequeen',
'scag',
'scantily',
'scat',
'schizo',
'schlong',
'scissoring',
'screw',
'screwed',
'screwing',
'scroat',
'scrog',
'scrot',
'scrote',
'scrotum',
'scrud',
'scum',
'seaman',
'seamen',
'seduce',
'seks',
'semen',
'sex',
'sexo',
'sexual',
'sexy',
'shag',
'shagger',
'shaggin',
'shagging',
'shamedame',
'shavedbeaver',
'shavedpussy',
'shibari',
'shirtlifter',
'shit',
'shitass',
'shitfucker',
'shitass',
'shitbag',
'shitbagger',
'shitblimp',
'shitbrains',
'shitbreath',
'shitcanned',
'shitcunt',
'shitdick',
'shite',
'shiteater',
'shited',
'shitey',
'shitface',
'shitfaced',
'shitfuck',
'shitfull',
'shithead',
'shitheads',
'shithole',
'shithouse',
'shiting',
'shitings',
'shits',
'shitspitter',
'shitstain',
'shitt',
'shitted',
'shitter',
'shitters',
'shittier',
'shittiest',
'shitting',
'shittings',
'shitty',
'shiz',
'shiznit',
'shota',
'shrimping',
'sissy',
'skag',
'skank',
'skeet',
'skullfuck',
'slag',
'sleaze',
'sleazy',
'slope',
'slut',
'slutbucket',
'slutbag',
'slutdumper',
'slutkiss',
'sluts',
'smartass',
'smartasses',
'smeg',
'smegma',
'smut',
'smutty',
'snatch',
'sniper',
'snowballing',
'snuff',
'sodoff',
'sodom',
'sodomize',
'sodomy',
'sonofabitch',
'sonofamotherlessgoat',
'sonofawhore',
'souse',
'soused',
'spac',
'spade',
'sperm',
'splooge',
'sploogemoose',
'spooge',
'spook',
'spreadlegs',
'spunk',
'stfu',
'stiffy',
'stoned',
'strapon',
'strappado',
'strip',
'stripclub',
'stroke',
'stupid',
'styledoggy',
'suck',
'suckass',
'sucked',
'sucking',
'sucks',
'suicidegirls',
'sultrywomen',
'sumofabiatch',
'swastika',
'swinger',
'taff',
'taig',
'taintedlove',
'takingthepiss',
'tampon',
'tard',
'tart',
'tastemy',
'tawdry',
'teabagging',
'teat',
'teets',
'teez',
'teste',
'testee',
'testes',
'testical',
'testicle',
'testis',
'threesome',
'throating',
'thrust',
'thug',
'thundercunt',
'tiedup',
'tightwhite',
'tinkle',
'tit',
'titwank',
'titfuck',
'titi',
'tities',
'tits',
'titt',
'tittiefucker',
'titties',
'titty',
'tittyfuck',
'tittyfucker',
'tittywank',
'titwank',
'toke',
'tongueina',
'toots',
'topless',
'tosser',
'tramp',
'trashy',
'tribadism',
'trumped',
'tubgirl',
'turd',
'tush',
'tushy',
'twat',
'twathead',
'twatlips',
'twats',
'twatty',
'twatwaffle',
'twink',
'twinkie',
'twofingers',
'twofingerswithtongue',
'twogirlsonecup',
'twunt',
'twunter',
'ugly',
'unclefucker',
'undies',
'undressing',
'unwed',
'upskirt',
'urethraplay',
'urinal',
'urine',
'urophilia',
'uterus',
'uzi',
'vag',
'vagina',
'vajayjay',
'valium',
'venusmound',
'veqtable',
'viagra',
'vibrator',
'violetwand',
'virgin',
'vixen',
'vjayjay',
'vodka',
'vomit',
'vorarephilia',
'voyeur',
'vulgar',
'vulva',
'wad',
'wang',
'wank',
'wanker',
'wankjob',
'wanky',
'wazoo',
'wedgie',
'weed',
'weenie',
'weewee',
'weiner',
'weirdo',
'wench',
'wetdream',
'wetback',
'whiz',
'whoar',
'whoralicious',
'whore',
'whorealicious',
'whorebag',
'whored',
'whoreface',
'whorehopper',
'whorehouse',
'whores',
'whoring',
'wigger',
'willies',
'willy',
'windowlicker',
'wiseass',
'wiseasses',
'womb',
'wop',
'wrappingmen',
'wrinkledstarfish',
'wtf',
'xrated',
'yaoi',
'yeasty',
'yellowshowers',
'yid',
'yiffy',
'yobbo',
'zibbi',
'zoophilia',
'zubb',
// https://github.com/LDNOOBW/List-of-Dirty-Naughty-Obscene-and-Otherwise-Bad-Words/
'2g1c',
'2girls1cup',
'acrotomophilia',
'alabamahotpocket',
'alaskanpipeline',
'anal',
'anilingus',
'anus',
'apeshit',
'arsehole',
'ass',
'asshole',
'assmunch',
'autoerotic',
'babeland',
'babybatter',
'babyjuice',
'ballgag',
'ballgravy',
'ballkicking',
'balllicking',
'ballsack',
'ballsucking',
'bangbros',
'bareback',
'barelylegal',
'barenaked',
'bastard',
'bastardo',
'bastinado',
'bbw',
'bdsm',
'beaner',
'beaners',
'beavercleaver',
'beaverlips',
'bestiality',
'bigblack',
'bigbreasts',
'bigknockers',
'bigtits',
'bimbos',
'birdlock',
'bitch',
'bitches',
'blackcock',
'blondeaction',
'blondeonblondeaction',
'blowjob',
'blowyourload',
'bluewaffle',
'blumpkin',
'bollocks',
'bondage',
'boner',
'boob',
'boobs',
'bootycall',
'brownshowers',
'brunetteaction',
'bukkake',
'bulletvibe',
'bullshit',
'bunghole',
'busty',
'butt',
'buttcheeks',
'butthole',
'cameltoe',
'camgirl',
'camslut',
'camwhore',
'carpetmuncher',
'chocolaterosebuds',
'circlejerk',
'clevelandsteamer',
'clit',
'clitoris',
'cloverclamps',
'clusterfuck',
'cock',
'cocks',
'coprolagnia',
'coprophilia',
'cornhole',
'coon',
'coons',
'creampie',
'cum',
'cumming',
'cunnilingus',
'cunt',
'darkie',
'daterape',
'deepthroat',
'dendrophilia',
'dick',
'dildo',
'dingleberry',
'dingleberries',
'dirtypillows',
'dirtysanchez',
'doggiestyle',
'doggystyle',
'dogstyle',
'dolcett',
'domination',
'dominatrix',
'dommes',
'donkeypunch',
'doubledong',
'doublepenetration',
'dpaction',
'dryhump',
'dvda',
'eatmyass',
'ecchi',
'ejaculation',
'erotic',
'erotism',
'escort',
'eunuch',
'fecal',
'felch',
'fellatio',
'feltch',
'femalesquirting',
'femdom',
'figging',
'fingerbang',
'fingering',
'fisting',
'footfetish',
'footjob',
'frotting',
'fuck',
'fuckbuttons',
'fuckin',
'fucking',
'fucktards',
'fudgepacker',
'futanari',
'gangbang',
'gaysex',
'genitals',
'giantcock',
'girlon',
'girlontop',
'girlsgonewild',
'goatcx',
'goatse',
'goddamn',
'gokkun',
'goldenshower',
'goodpoop',
'googirl',
'goregasm',
'grope',
'groupsex',
'gspot',
'guro',
'handjob',
'hardcore',
'hentai',
'homoerotic',
'honkey',
'hooker',
'hotcarl',
'hotchick',
'howtokill',
'howtomurder',
'hugefat',
'humping',
'incest',
'intercourse',
'jackoff',
'jailbait',
'jellydonut',
'jerkoff',
'jizz',
'juggs',
'kinbaku',
'kinkster',
'kinky',
'knobbing',
'leatherrestraint',
'leatherstraightjacket',
'lemonparty',
'lolita',
'lovemaking',
'makemecome',
'malesquirting',
'masturbate',
'menageatrois',
'milf',
'missionaryposition',
'motherfucker',
'moundofvenus',
'mrhands',
'muffdiver',
'muffdiving',
'nambla',
'nawashi',
'nimphomania',
'nipple',
'nipples',
'nsfwimages',
'nude',
'nudity',
'nympho',
'nymphomania',
'octopussy',
'omorashi',
'onecuptwogirls',
'oneguyonejar',
'orgasm',
'orgy',
'paedophile',
'paki',
'panties',
'panty',
'pedobear',
'pedophile',
'pegging',
'penis',
'phonesex',
'pieceofshit',
'pissing',
'pisspig',
'playboy',
'pleasurechest',
'polesmoker',
'ponyplay',
'poof',
'poon',
'poontang',
'punany',
'poopchute',
'porn',
'porno',
'pornography',
'princealbertpiercing',
'pthc',
'pubes',
'pussy',
'queaf',
'queef',
'quim',
'raghead',
'ragingboner',
'rape',
'raping',
'rapist',
'rectum',
'reversecowgirl',
'rimjob',
'rimming',
'rosypalm',
'rosypalmandher5sisters',
'rustytrombone',
'sadism',
'santorum',
'scat',
'schlong',
'scissoring',
'semen',
'sex',
'sexo',
'sexy',
'shavedbeaver',
'shavedpussy',
'shibari',
'shit',
'shitblimp',
'shitty',
'shota',
'shrimping',
'skeet',
'slut',
'smut',
'snatch',
'snowballing',
'sodomize',
'sodomy',
'splooge',
'sploogemoose',
'spooge',
'spreadlegs',
'spunk',
'strapon',
'strappado',
'stripclub',
'styledoggy',
'suck',
'sucks',
'suicidegirls',
'sultrywomen',
'swinger',
'taintedlove',
'tastemy',
'teabagging',
'threesome',
'throating',
'tiedup',
'tightwhite',
'tit',
'tits',
'titties',
'titty',
'tongueina',
'topless',
'tosser',
'tribadism',
'tubgirl',
'tushy',
'twat',
'twink',
'twinkie',
'twogirlsonecup',
'undressing',
'upskirt',
'urethraplay',
'urophilia',
'vagina',
'venusmound',
'vibrator',
'violetwand',
'vorarephilia',
'voyeur',
'vulva',
'wank',
'wetdream',
'wrappingmen',
'wrinkledstarfish',
'yaoi',
'yellowshowers',
'yiffy',
'zoophilia',
];
|
/*
* Copyright (c) John Gough 2016-2017
*/
package j2cpsfiles;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.FileOutputStream;
import java.io.DataOutputStream;
import java.io.File;
/**
*
* @author john
*/
public class j2cpsfiles /*implements FilenameFilter*/ {
private static final String CLASSEXT = ".class";
private static final String SYMFILEXT = ".cps";
// private static final String CPEXT = ".cp";
// private static final String dbName = "index.dbi";
private static final char EOF = '\0';
private static final char CR = '\r';
private static final char LF = '\n';
private static final char SP = ' ';
private static final char TAB = '\t';
private static final String CURRDIR =
System.getProperty("user.dir");
private static final String FILESEP =
System.getProperty("file.separator");
private static final String PATHSEPSTRING =
System.getProperty("path.separator");
private static final char PATHSEP =
PATHSEPSTRING.charAt(0);
/**
* Destination directory for symbol files.
*/
private static String dstDir = ".";
/**
* The installation root directory.
*/
private static String rootName = ".";
/**
* The root of the package class-file tree.
*/
private static File pkgRoot = new File(CURRDIR);
private static String[] classPath;
/**
* Locations to search for symbol files.
*/
private static String[] symPath;
private static boolean verbose = false;
private static boolean summary = false;
public static void SetVerbose( boolean v ) { verbose = v; }
public static void SetSummary( boolean v ) { summary = v; }
public static void SetDstDir(String sDir) {
if (!sDir.equals("."))
dstDir = "." + FILESEP + sDir;
}
public static void SetPackageRootDir(String rDir) {
rootName = "." + FILESEP + rDir;
pkgRoot = new File(CURRDIR, rDir);
}
/**
* This method is called after all arguments have been parsed.
*/
public static void GetPaths(boolean ignoreCpsym) {
if (summary) {
System.out.printf("Current directory \".\" is <%s>\n", CURRDIR);
if (!rootName.equals("."))
System.out.printf(
"Using <%s> as package-root directory\n", rootName);
if (!dstDir.equals("."))
System.out.printf("Using <%s> as symbol destination directory\n", dstDir);
}
classPath = GetPathArray("java.class.path");
if (ignoreCpsym) {
symPath = new String[] { dstDir };
} else {
String[] tmp = GetPathArray("CPSYM");
symPath = new String[tmp.length + 1];
symPath[0] = dstDir;
for (int i = 0; i < tmp.length; i++)
symPath[i+1] = tmp[i];
}
}
private static String GetPathFromProperty(String str) {
String path = System.getProperty(str);
return path;
}
private static String GetPathFromEnvVar(String str) {
String path = System.getenv(str);
return path;
}
private static String[] GetPathArray(String prop) {
// First look for the system property (preferred source)
String cPath = GetPathFromProperty(prop);
if (cPath == null)
cPath = GetPathFromEnvVar(prop);
if (cPath == null) {
System.err.println("No variable for \"" + prop + "\", using \".\"");
cPath = ".";
} else if (summary)
System.out.println("Using \"" + prop + "\" path \"" + cPath + "\"");
String[] splits = cPath.split(PATHSEPSTRING);
return splits;
}
public static File getPackageFile(String name) {
File inFile = new File(pkgRoot,name);
if (!inFile.exists()) {
boolean found = false;
for (int i=0; (i < classPath.length) && (!found); i++) {
if (verbose) {
System.out.println("<" + classPath[i] + FILESEP + name + ">");
}
inFile = new File(classPath[i],name);
found = inFile.exists();
}
if (!found) {
System.err.println(
"Cannot open package directory <" + name + ">, quitting");
//
// Is this too severe?
//
System.exit(0);
}
} else {
System.out.print("INFO: opened package directory <" + name + ">");
if (summary)
System.out.print(" from package-root <" + rootName + ">");
System.out.println();
}
return inFile;
}
public static File OpenClassFile(String name) {
if (!name.endsWith(CLASSEXT)) { name = name.concat(CLASSEXT); }
File inFile = new File(CURRDIR,name);
if (!inFile.exists()) {
inFile = FindClassFile(name);
}
if (!inFile.exists()) {
System.err.println("Cannot open class file <" + name + ">");
System.exit(0);
}
return inFile;
}
public static File OpenClassFile(File dir, String fName) {
File inFile = new File(dir,fName);
if (!inFile.exists()) {
System.err.println("Cannot open class file <" + dir.getName() +
FILESEP + fName + ">");
System.exit(0);
}
return inFile;
}
public static File FindClassFile(String name) {
File inFile = null;
boolean found = false;
if (!name.endsWith(CLASSEXT)) { name = name.concat(CLASSEXT); }
for (int i=0; (i < classPath.length) && (!found); i++) {
if (verbose) {
System.out.println("<" + classPath[i] + FILESEP + name + ">");
}
inFile = new File(classPath[i],name);
found = inFile.exists();
}
if (!found) {
System.err.println("Cannot open class file <" + name + ">");
System.exit(1);
}
return inFile;
}
public static File FindSymbolFile(String name)
throws FileNotFoundException, IOException {
File inFile = null;
boolean found = false;
if (!name.endsWith(SYMFILEXT)) {
name = name.concat(SYMFILEXT);
}
for (int i=0; (i < symPath.length) && (!found); i++) {
if (verbose) {
System.out.println("Seeking <" + symPath[i] + FILESEP + name + ">");
}
inFile = new File(symPath[i],name);
found = inFile.exists();
}
if (!found) {
if (verbose) {
System.out.println("Cannot find symbol file <" + name + ">");
}
return null;
} else {
//char[] arr = inFile.getPath().toCharArray();
return inFile;
}
}
public static DataOutputStream CreateSymFile(String fileName)
throws IOException {
String dirName = (dstDir == null ? CURRDIR : dstDir);
System.out.print("INFO: Creating symbolfile <" + fileName + SYMFILEXT + ">");
if (summary)
System.out.print(" in directory <" + dirName + ">");
System.out.println();
return new DataOutputStream(new FileOutputStream(
new File(dirName,fileName + SYMFILEXT)));
}
}
|
package leetcode.flattenbtree
import leetcode.binarytreeboundary.TreeNode
/**
* https://leetcode.com/problems/flatten-binary-tree-to-linked-list/description/
*/
class Solution {
fun flatten(root: TreeNode?) {
root?.flatten()
}
private fun TreeNode.flatten(): TreeNode? {
if (left == null && right == null) {
return this
}
val newLeftLeaf = left?.flatten()
val newRightLeaf = right?.flatten()
val oldRight = right
right = left ?: oldRight
left = null
newLeftLeaf?.right = oldRight
return newRightLeaf ?: newLeftLeaf ?: this
}
}
|
module Lust.Typing where
import Lust.Typing.Clocks as C
import Lust.Typing.Types as T
import Control.Monad ( (>=>) )
runTyping = T.runTyping >=> C.runClocking
|
using System.ComponentModel.Composition;
using System.Windows;
using Smellyriver.TankInspector.Pro.ConfiguratorShared;
using Smellyriver.TankInspector.Pro.Data.Tank;
namespace Smellyriver.TankInspector.Pro.StatChangesView
{
[Export(typeof(IStatChangesViewProvider))]
public class StatChangesViewProvider : IStatChangesViewProvider
{
public FrameworkElement CreateStatChangesView(TankInstance before, TankInstance after)
{
var vm = new StatChangesVM(before, after);
var view = new StatChangesView();
view.ViewModel = vm;
return view;
}
}
}
|
use ruma_identifiers::UserId;
#[derive(Clone, Debug, serde::Serialize)]
pub struct WhoamiResponse {
pub user_id: UserId,
}
|
<?php
namespace App\Http\Controllers\Kategori;
use Illuminate\Http\Request;
use App\Http\Controllers\Controller;
use App\Http\Models\Kategori\KategoriModel as Master;
use Carbon\Carbon;
use PDF;
class KategoriPmksController extends Controller
{
public function __construct()
{
$this->middleware('auth');
}
/**
* Display a listing of the resource.
*
* @return \Illuminate\Http\Response
*/
public function index()
{
$masters = Master::latest()->paginate(10);
\Debugbar::info($masters);
return view('kategori.index', compact('masters'))
->with('i',(request()->input('page',1) -1 ) * 10);
}
public function search(Request $request)
{
$masters = Master::where('kategori', 'like', '%' . $request->input('search') . '%')
->orWhere('jumlah_orang', 'like', '%' . $request->input('search') . '%')
->orWhere('bidang', 'like', '%' . $request->input('search') . '%')
->get();
return view('kategori.search', compact('masters'))
->with('i',(request()->input('page',1) -1 ) * 5);
}
/**
* Show the form for creating a new resource.
*
* @return \Illuminate\Http\Response
*/
public function create()
{
return view('kategori.create');
}
/**
* Store a newly created resource in storage.
*
* @param \Illuminate\Http\Request $request
* @return \Illuminate\Http\Response
*/
public function store(Request $request)
{
request()->validate([
'bidang' => 'required',
'kategori' => 'required',
'deskripsi' => 'required',
'jumlah_orang' => 'required',
'sudah_ditangani' => 'required',
'belum_ditangani' => 'required',
]);
$requestData = array(
'bidang' => $request->input('bidang' ),
'kategori' => $request->input('kategori' ),
'deskripsi' => $request->input('deskripsi'),
'jumlah_orang' => $request->input('jumlah_orang' ),
'sudah_ditangani' => $request->input('sudah_ditangani' ),
'belum_ditangani' => $request->input('belum_ditangani' ),
'dibuat' => Carbon::now()->timestamp,
'diubah' => '',
'gambar' => '',
);
Master::create($requestData);
return redirect()->route('kategori-pmks.index')
->with('success', 'Data Kategori PMKS Berhasil Ditambahkan');
}
/**
* Display the specified resource.
*
* @param int $id
* @return \Illuminate\Http\Response
*/
public function show($id)
{
$masters = Master::find($id);
\Debugbar::info($masters);
return view('kategori.show', compact('masters'));
}
/**
* Show the form for editing the specified resource.
*
* @param int $id
* @return \Illuminate\Http\Response
*/
public function edit($id)
{
$masters = Master::find($id);
\Debugbar::info($masters);
return view('kategori.edit', compact('masters'));
}
/**
* Update the specified resource in storage.
*
* @param \Illuminate\Http\Request $request
* @param int $id
* @return \Illuminate\Http\Response
*/
public function update(Request $request, $id)
{
request()->validate([
'bidang' => 'required',
'kategori' => 'required',
'deskripsi' => 'required',
'jumlah_orang' => 'required',
'sudah_ditangani' => 'required',
'belum_ditangani' => 'required',
]);
$requestData = array(
'bidang' => $request->input('bidang'),
'kategori' => $request->input('kategori'),
'deskripsi' => $request->input('deskripsi' ),
'jumlah_orang' => $request->input('jumlah_orang'),
'sudah_ditangani' => $request->input('sudah_ditangani'),
'belum_ditangani' => $request->input('belum_ditangani'),
'dibuat' => $request->input('dibuat'),
'diubah' => Carbon::now()->timestamp,
'gambar' => '',
);
$masters = Master::find($id);
$masters->update($requestData);
$masters->save();
return redirect()->route('kategori-pmks.index')
->with('success', 'Data Kategori PMKS Berhasil Di Perbarui');
}
/**
* Remove the specified resource from storage.
*
* @param int $id
* @return \Illuminate\Http\Response
*/
public function destroy($id)
{
Master::destroy($id);
return redirect()->route('kategori-pmks.index')
->with('success', 'Data Kategori PMKS Berhasil Di Hapus');
}
public function printToPdf()
{
$masters = Master::all();
\Debugbar::info($masters);
// return view('kategori.printPdf', compact('masters'));
$pdf = PDF::loadView('kategori.printPdf', compact('masters'))->setPaper('f4', 'portait');
return $pdf->download('REKAPITULASI DATA PMKS KABUPATEN BANDUNG BARAT.pdf');
}
}
|
/*
* Copyright (c) 2002-2021, City of Paris
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright notice
* and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice
* and the following disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* 3. Neither the name of 'Mairie de Paris' nor 'Lutece' nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
* License 1.0
*/
package fr.paris.lutece.util.bean;
import fr.paris.lutece.portal.service.i18n.I18nService;
import fr.paris.lutece.portal.service.util.AppException;
import fr.paris.lutece.portal.service.util.AppLogService;
import org.apache.commons.beanutils.BeanUtilsBean;
import org.apache.commons.beanutils.PropertyUtils;
import org.apache.commons.beanutils.SuppressPropertiesBeanIntrospector;
import org.apache.commons.beanutils.converters.DateConverter;
import org.apache.commons.beanutils.converters.SqlTimeConverter;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.sql.Date;
import java.sql.Timestamp;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.Map.Entry;
import javax.servlet.http.HttpServletRequest;
/**
* Bean Utils
*/
public final class BeanUtil
{
private static final char UNDERSCORE = '_';
static
{
PropertyUtils.addBeanIntrospector( SuppressPropertiesBeanIntrospector.SUPPRESS_CLASS );
}
private static Map<String, BeanUtilsBean> _mapBeanUtilsBeans;
/**
* BeanUtil initialization, considering Lutèce availables locales and date format properties
*/
public static void init( )
{
_mapBeanUtilsBeans = new HashMap<>( );
for ( Locale locale : I18nService.getAdminAvailableLocales( ) )
{
BeanUtilsBean beanUtilsBean = new BeanUtilsBean( );
beanUtilsBean.getPropertyUtils( ).addBeanIntrospector( SuppressPropertiesBeanIntrospector.SUPPRESS_CLASS );
DateConverter dateConverter = new DateConverter( null );
dateConverter.setPattern( I18nService.getDateFormatShortPattern( locale ) );
beanUtilsBean.getConvertUtils( ).register( dateConverter, Date.class );
SqlTimeConverter sqlTimeConverter = new SqlTimeConverter( null );
beanUtilsBean.getConvertUtils( ).register( sqlTimeConverter, Timestamp.class );
_mapBeanUtilsBeans.put( locale.getLanguage( ), beanUtilsBean );
}
}
/** Private constructor */
private BeanUtil( )
{
}
/**
* Populate a bean using parameters in http request
*
* @param bean
* @param request
*/
public static void populate( Object bean, HttpServletRequest request )
{
populate( bean, request, null );
}
/**
* Populate a bean using parameters in http request, with locale date format controls
*
* @param bean
* bean to populate
* @param request
* http request
* @param locale
*/
public static void populate( Object bean, HttpServletRequest request, Locale locale )
{
for ( Field field : bean.getClass( ).getDeclaredFields( ) )
{
try
{
// for all boolean field, init to false
if ( Boolean.class.isAssignableFrom( field.getType( ) ) || boolean.class.isAssignableFrom( field.getType( ) ) )
{
field.setAccessible( true );
field.set( bean, false );
}
}
catch( Exception e )
{
String error = "La valeur du champ " + field.getName( ) + " de la classe " + bean.getClass( ).getName( ) + " n'a pas pu être récupéré ";
AppLogService.error( error );
throw new AppException( error, e );
}
}
try
{
BeanUtilsBean beanUtilsBean;
if ( locale != null && _mapBeanUtilsBeans != null )
{
beanUtilsBean = _mapBeanUtilsBeans.get( locale.getLanguage( ) );
}
else
{
beanUtilsBean = BeanUtilsBean.getInstance( );
}
beanUtilsBean.populate( bean, convertMap( request.getParameterMap( ) ) );
}
catch( InvocationTargetException | IllegalAccessException e )
{
AppLogService.error( "Unable to fetch data from request", e );
}
}
/**
* Convert map by casifying parameters names.
*
* @param mapInput
* The input map
* @return The output map
*/
public static Map<String, Object> convertMap( Map<String, String [ ]> mapInput )
{
Map<String, Object> mapOutput = new HashMap<>( );
for ( Entry<String, String [ ]> entry : mapInput.entrySet( ) )
{
mapOutput.put( convertUnderscores( entry.getKey( ) ), entry.getValue( ) );
}
return mapOutput;
}
/**
* Remove underscore and set the next letter in caps
*
* @param strSource
* The source
* @return The converted string
*/
public static String convertUnderscores( String strSource )
{
StringBuilder sb = new StringBuilder( );
boolean bCapitalizeNext = false;
for ( char c : strSource.toCharArray( ) )
{
if ( c == UNDERSCORE )
{
bCapitalizeNext = true;
}
else
{
if ( bCapitalizeNext )
{
sb.append( Character.toUpperCase( c ) );
bCapitalizeNext = false;
}
else
{
sb.append( c );
}
}
}
return sb.toString( );
}
}
|
var NAVTREEINDEX3 =
{
"structchm_1_1_float_array.html":[0,0,0,7],
"structchm_1_1_float_array.html":[1,0,0,7],
"structchm_1_1_float_array.html#ab2ba32f86e835417d735f4da9ce800b9":[0,0,0,7,3],
"structchm_1_1_float_array.html#ab2ba32f86e835417d735f4da9ce800b9":[1,0,0,7,3],
"structchm_1_1_float_array.html#adaea16c23c019458ae10d1a4a9c766e0":[0,0,0,7,0],
"structchm_1_1_float_array.html#adaea16c23c019458ae10d1a4a9c766e0":[1,0,0,7,0],
"structchm_1_1_float_array.html#af656ffeefe3cfecc781277ea8d4a267b":[0,0,0,7,2],
"structchm_1_1_float_array.html#af656ffeefe3cfecc781277ea8d4a267b":[1,0,0,7,2],
"structchm_1_1_float_array.html#afdb41cad8bd4d576e74953a8602edc67":[0,0,0,7,1],
"structchm_1_1_float_array.html#afdb41cad8bd4d576e74953a8602edc67":[1,0,0,7,1],
"structchm_1_1_function_info.html":[0,0,0,8],
"structchm_1_1_function_info.html":[1,0,0,8],
"structchm_1_1_function_info.html#a156af7e73810b5d57c98cd8b6283a3c6":[0,0,0,8,0],
"structchm_1_1_function_info.html#a156af7e73810b5d57c98cd8b6283a3c6":[1,0,0,8,0],
"structchm_1_1_function_info.html#a20a765dacabf0ce8e68b6f131e0bd23d":[0,0,0,8,2],
"structchm_1_1_function_info.html#a20a765dacabf0ce8e68b6f131e0bd23d":[1,0,0,8,2],
"structchm_1_1_function_info.html#ad2ef3fd4e8c5f1c680d72801a37bf3a2":[0,0,0,8,1],
"structchm_1_1_function_info.html#ad2ef3fd4e8c5f1c680d72801a37bf3a2":[1,0,0,8,1],
"structchm_1_1_heap_pair.html":[0,0,0,10],
"structchm_1_1_heap_pair.html":[1,0,0,10],
"structchm_1_1_heap_pair.html#a190eb2547537c4f9334e4192f8c2436e":[0,0,0,10,6],
"structchm_1_1_heap_pair.html#a190eb2547537c4f9334e4192f8c2436e":[1,0,0,10,6],
"structchm_1_1_heap_pair.html#a29fd4e86e93f78db1511137a85474ec1":[0,0,0,10,4],
"structchm_1_1_heap_pair.html#a29fd4e86e93f78db1511137a85474ec1":[1,0,0,10,4],
"structchm_1_1_heap_pair.html#a5e9da841834fb0d36f9e89d73538e679":[0,0,0,10,1],
"structchm_1_1_heap_pair.html#a5e9da841834fb0d36f9e89d73538e679":[1,0,0,10,1],
"structchm_1_1_heap_pair.html#a6b7d85996b187ceb6fd0f01da80169dc":[0,0,0,10,3],
"structchm_1_1_heap_pair.html#a6b7d85996b187ceb6fd0f01da80169dc":[1,0,0,10,3],
"structchm_1_1_heap_pair.html#a7658ec8722dae868f6a9f28077fe3315":[0,0,0,10,5],
"structchm_1_1_heap_pair.html#a7658ec8722dae868f6a9f28077fe3315":[1,0,0,10,5],
"structchm_1_1_heap_pair.html#a9e66a1a83110f7f660b5c729d370a9d9":[0,0,0,10,0],
"structchm_1_1_heap_pair.html#a9e66a1a83110f7f660b5c729d370a9d9":[1,0,0,10,0],
"structchm_1_1_heap_pair.html#aeb44c9ed982084c1fdbfe99669c98d55":[0,0,0,10,2],
"structchm_1_1_heap_pair.html#aeb44c9ed982084c1fdbfe99669c98d55":[1,0,0,10,2],
"structchm_1_1_heuristic_template.html":[0,0,0,11],
"structchm_1_1_heuristic_template.html":[1,0,0,11],
"structchm_1_1_heuristic_template.html#a09f39a379c79ef964ea1e94424f10f4f":[0,0,0,11,1],
"structchm_1_1_heuristic_template.html#a09f39a379c79ef964ea1e94424f10f4f":[1,0,0,11,1],
"structchm_1_1_heuristic_template.html#ac4b45e527d9423298bccb2c40d9faffa":[0,0,0,11,0],
"structchm_1_1_heuristic_template.html#ac4b45e527d9423298bccb2c40d9faffa":[1,0,0,11,0],
"structchm_1_1_heuristic_template.html#ae9edffcb03bd793e800ad40d813b0bdd":[0,0,0,11,2],
"structchm_1_1_heuristic_template.html#ae9edffcb03bd793e800ad40d813b0bdd":[1,0,0,11,2],
"structchm_1_1_naive_template.html":[1,0,0,16],
"structchm_1_1_naive_template.html":[0,0,0,16],
"structchm_1_1_naive_template.html#a3c03329bca23a9b001e38df678614635":[0,0,0,16,2],
"structchm_1_1_naive_template.html#a3c03329bca23a9b001e38df678614635":[1,0,0,16,2],
"structchm_1_1_naive_template.html#a495f7029187872fcb47f2fd3626010d1":[1,0,0,16,0],
"structchm_1_1_naive_template.html#a495f7029187872fcb47f2fd3626010d1":[0,0,0,16,0],
"structchm_1_1_naive_template.html#a9da27f67f148bdcdbf23f2167996e279":[1,0,0,16,1],
"structchm_1_1_naive_template.html#a9da27f67f148bdcdbf23f2167996e279":[0,0,0,16,1],
"structchm_1_1_near_comparator.html":[1,0,0,17],
"structchm_1_1_near_comparator.html":[0,0,0,17],
"structchm_1_1_near_comparator.html#ab964992385b4531512bf7d7ef3ad83b1":[0,0,0,17,0],
"structchm_1_1_near_comparator.html#ab964992385b4531512bf7d7ef3ad83b1":[1,0,0,17,0],
"structchm_1_1_no_bit_array_template.html":[0,0,0,19],
"structchm_1_1_no_bit_array_template.html":[1,0,0,19],
"structchm_1_1_no_bit_array_template.html#a6e2e48b5c7dba3cd625a1eac65799027":[1,0,0,19,1],
"structchm_1_1_no_bit_array_template.html#a6e2e48b5c7dba3cd625a1eac65799027":[0,0,0,19,1],
"structchm_1_1_no_bit_array_template.html#a99b92996703b875ce4941171c1d1b752":[1,0,0,19,0],
"structchm_1_1_no_bit_array_template.html#a99b92996703b875ce4941171c1d1b752":[0,0,0,19,0],
"structchm_1_1_no_bit_array_template.html#af3b4c9b60f3302e9b5c656237f569685":[0,0,0,19,2],
"structchm_1_1_no_bit_array_template.html#af3b4c9b60f3302e9b5c656237f569685":[1,0,0,19,2],
"structchm_1_1_node.html":[0,0,0,20],
"structchm_1_1_node.html":[1,0,0,20],
"structchm_1_1_node.html#a0b469cbf093d41a850d5c62278d3cfa9":[0,0,0,20,2],
"structchm_1_1_node.html#a0b469cbf093d41a850d5c62278d3cfa9":[1,0,0,20,2],
"structchm_1_1_node.html#a364708057eddb8063c5c153e3102a8a4":[0,0,0,20,1],
"structchm_1_1_node.html#a364708057eddb8063c5c153e3102a8a4":[1,0,0,20,1],
"structchm_1_1_node.html#a644c17d256c4a7948b281a6df19ecf56":[0,0,0,20,0],
"structchm_1_1_node.html#a644c17d256c4a7948b281a6df19ecf56":[1,0,0,20,0],
"structchm_1_1_node.html#a8a99c30483e56e6c755702a4ad93107a":[0,0,0,20,3],
"structchm_1_1_node.html#a8a99c30483e56e6c755702a4ad93107a":[1,0,0,20,3],
"structchm_1_1_node_cmp.html":[0,0,0,21],
"structchm_1_1_node_cmp.html":[1,0,0,21],
"structchm_1_1_node_cmp.html#ab89379ba7d6db8c985aec795b5e81e5e":[0,0,0,21,0],
"structchm_1_1_node_cmp.html#ab89379ba7d6db8c985aec795b5e81e5e":[1,0,0,21,0],
"structchm_1_1_prefetching_template.html":[0,0,0,22],
"structchm_1_1_prefetching_template.html":[1,0,0,22],
"structchm_1_1_prefetching_template.html#a2e5e21e1a037db86180a815cafaa251b":[0,0,0,22,2],
"structchm_1_1_prefetching_template.html#a2e5e21e1a037db86180a815cafaa251b":[1,0,0,22,2],
"structchm_1_1_prefetching_template.html#a74793732a2ed58f46211d653ad9f7118":[0,0,0,22,0],
"structchm_1_1_prefetching_template.html#a74793732a2ed58f46211d653ad9f7118":[1,0,0,22,0],
"structchm_1_1_prefetching_template.html#afade2ad615878df99dc397a098f1f4a0":[0,0,0,22,1],
"structchm_1_1_prefetching_template.html#afade2ad615878df99dc397a098f1f4a0":[1,0,0,22,1],
"structchm_1_1_recall_table_config.html":[0,0,0,25],
"structchm_1_1_recall_table_config.html":[1,0,0,25],
"structchm_1_1_recall_table_config.html#a1203f7354f88b788d8fa4346541131ec":[1,0,0,25,3],
"structchm_1_1_recall_table_config.html#a1203f7354f88b788d8fa4346541131ec":[0,0,0,25,3],
"structchm_1_1_recall_table_config.html#a1ccbb2246fb9d7d380ba10a4265314c9":[1,0,0,25,2],
"structchm_1_1_recall_table_config.html#a1ccbb2246fb9d7d380ba10a4265314c9":[0,0,0,25,2],
"structchm_1_1_recall_table_config.html#a48fdc250d3b4069bf10090c785b65c67":[1,0,0,25,0],
"structchm_1_1_recall_table_config.html#a48fdc250d3b4069bf10090c785b65c67":[0,0,0,25,0],
"structchm_1_1_recall_table_config.html#a833c7e38af1b6e3ae1138609b810d7bf":[0,0,0,25,5],
"structchm_1_1_recall_table_config.html#a833c7e38af1b6e3ae1138609b810d7bf":[1,0,0,25,5],
"structchm_1_1_recall_table_config.html#a8c3196f05f113ed867bd9a5a52de29a6":[1,0,0,25,1],
"structchm_1_1_recall_table_config.html#a8c3196f05f113ed867bd9a5a52de29a6":[0,0,0,25,1],
"structchm_1_1_recall_table_config.html#a9ee7cba9bf82617d61a70304fec3546f":[0,0,0,25,8],
"structchm_1_1_recall_table_config.html#a9ee7cba9bf82617d61a70304fec3546f":[1,0,0,25,8],
"structchm_1_1_recall_table_config.html#aa33a9a067a799be28f3ca2d2a8ca7cbf":[1,0,0,25,6],
"structchm_1_1_recall_table_config.html#aa33a9a067a799be28f3ca2d2a8ca7cbf":[0,0,0,25,6],
"structchm_1_1_recall_table_config.html#ac23bc6bb87730c540382dfc3631bbcc6":[0,0,0,25,4],
"structchm_1_1_recall_table_config.html#ac23bc6bb87730c540382dfc3631bbcc6":[1,0,0,25,4],
"structchm_1_1_recall_table_config.html#af22ef66e670b702ca8a5565d2a9f30bd":[1,0,0,25,7],
"structchm_1_1_recall_table_config.html#af22ef66e670b702ca8a5565d2a9f30bd":[0,0,0,25,7],
"structchm_1_1_visit_result.html":[0,0,0,29],
"structchm_1_1_visit_result.html":[1,0,0,29],
"structchm_1_1_visit_result.html#a7754ce5f13ac9c656d36590196b59f46":[0,0,0,29,3],
"structchm_1_1_visit_result.html#a7754ce5f13ac9c656d36590196b59f46":[1,0,0,29,3],
"structchm_1_1_visit_result.html#a7b5ccc4fccab6955416e1e67e4cbc8a6":[0,0,0,29,4],
"structchm_1_1_visit_result.html#a7b5ccc4fccab6955416e1e67e4cbc8a6":[1,0,0,29,4],
"structchm_1_1_visit_result.html#ab242ad03c641792e98919b9127152c0e":[0,0,0,29,0],
"structchm_1_1_visit_result.html#ab242ad03c641792e98919b9127152c0e":[1,0,0,29,0],
"structchm_1_1_visit_result.html#ae5ce1850aae3143b04641ff59bcf22e0":[0,0,0,29,1],
"structchm_1_1_visit_result.html#ae5ce1850aae3143b04641ff59bcf22e0":[1,0,0,29,1],
"structchm_1_1_visit_result.html#afad4804fce0740f0c123593157fdb0ad":[0,0,0,29,2],
"structchm_1_1_visit_result.html#afad4804fce0740f0c123593157fdb0ad":[1,0,0,29,2],
"types_8hpp.html":[2,0,0,0,0,26],
"types_8hpp.html#a513a40c15e5fad57de99dabd89d0c494":[2,0,0,0,0,26,0],
"types_8hpp_source.html":[2,0,0,0,0,26]
};
|
class StatusController < ApplicationController
rescue_from GlimrApiClient::Unavailable, with: :index
respond_to :json
def index
respond_with(Status.check.to_json)
end
end
|
package com.vairavans.analytics
import io.mockk.Called
import io.mockk.mockk
import io.mockk.verify
import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
@RunWith(JUnit4::class)
class AbsDaggerAnalyticsAppCompatActivityTest {
open class TestEnabledActivity : AbsDaggerAnalyticsAppCompatActivity() {
fun performSomeActionGeneratingAnalyticsEvent( event : String ) =
logAnalyticsEvent( event )
}
class TestDisabledActivity : TestEnabledActivity() {
override var enableAnalytics: Boolean = false
}
private val analyticsEnabledActivity = TestEnabledActivity().apply {
firebaseAnalytics = mockk(relaxed = true)
}
private val analyticsDisabledActivity = TestDisabledActivity().apply {
firebaseAnalytics = mockk(relaxed = true)
}
@Test
fun `AbsDaggerAnalyticsAppCompatActivity logs requested event when enabled`() {
analyticsEnabledActivity.performSomeActionGeneratingAnalyticsEvent("Event")
verify { analyticsEnabledActivity.firebaseAnalytics.logEvent( "Event" ) }
}
@Test
fun `AbsDaggerAnalyticsAppCompatActivity ignores event when disabled`() {
analyticsDisabledActivity.performSomeActionGeneratingAnalyticsEvent("Event")
verify { analyticsDisabledActivity.firebaseAnalytics wasNot Called }
}
}
|
import numpy as np
from .geometry import GRBLocation
class GRB(object):
def __init__(self, ra, dec, distance, K, t_rise, t_decay):
"""
A GRB that emits a spectrum as a given location
:param ra: RA of the GRB
:param dec: DEC of the GRB
:param distance: distance to the GRB
:param K: normalization of the flux
:param t_rise: rise time of the flux
:param t_decay: decay time of the flux
:returns:
:rtype:
"""
# create a GRB location
self._location = GRBLocation(ra, dec, distance)
self._K = K
self._t_rise = t_rise
self._t_decay = t_decay
@property
def pulse_parameters(self):
"""
The temporal flux parameters
:returns: (K, t_rise, t_decay)
:rtype: tuple
"""
return self._K, self._t_rise, self._t_decay
@property
def location(self):
return self._location
|
module PDoc
module Models
class Entity < Base
attr_accessor :alias
def signatures
@signatures ||= []
end
def <=>(other)
id.downcase <=> other.id.downcase
end
def src_code_href
proc = Models.src_code_href
@src_code_href ||= proc ? proc.call(self) : nil
end
def src_code_text
@src_code_text ||= Models.src_code_text
end
def signatures?
@signatures && !@signatures.empty?
end
def signature
@signature ||= signatures.first
end
def methodized?
!!@methodized
end
def alias?
!!@alias
end
# returns an array of aliases
def aliases
@aliases ||= []
end
def aliases?
@aliases && !@aliases.empty?
end
def to_hash
super.merge({
:aliases => aliases.map { |a| a.id },
:alias => self.alias ? self.alias.id : nil,
:signatures => signatures,
:src_code_href => src_code_href
})
end
end
end
end
|
import Store from "./Store";
import { actionT, reducerT, reduceTreeT } from "./type";
const createStore = (reducer: reducerT, action?: actionT) => {
return new Store(
reducer,
action || {type: "init"},
);
};
export default createStore;
|
<?php
function createAvailableUserOption($user){
$user_id = $user["id"];
$user_name_surname = $user["name"]." ".$user["surname"];
echo "<option value='$user_id'>$user_name_surname</option>";
}
?>
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Glass.Mapper;
using Glass.Mapper.Caching;
using Glass.Mapper.Diagnostics;
using Glass.Mapper.IoC;
using Glass.Mapper.Maps;
using Glass.Mapper.Pipelines.ConfigurationResolver;
using Glass.Mapper.Pipelines.DataMapperResolver;
using Glass.Mapper.Pipelines.ObjectConstruction;
using Glass.Mapper.Pipelines.ObjectSaving;
namespace StackExchange.Profiling.Glassmapper
{
public class GlassProfiledDependencyResolver : IDependencyResolver
{
private readonly IDependencyResolver _inner;
public GlassProfiledDependencyResolver(IDependencyResolver inner) => _inner = inner;
public Config GetConfig() =>
_inner.GetConfig();
public ILog GetLog() =>
_inner.GetLog();
public ICacheManager GetCacheManager() =>
_inner.GetCacheManager();
public ModelCounter GetModelCounter() =>
_inner.GetModelCounter();
public void Finalise() =>
_inner.Finalise();
public IConfigFactory<AbstractDataMapperResolverTask> DataMapperResolverFactory =>
_inner.DataMapperResolverFactory;
public IConfigFactory<AbstractDataMapper> DataMapperFactory =>
_inner.DataMapperFactory;
public IConfigFactory<AbstractConfigurationResolverTask> ConfigurationResolverFactory =>
_inner.ConfigurationResolverFactory;
public IConfigFactory<AbstractObjectConstructionTask> ObjectConstructionFactory =>
_inner.ObjectConstructionFactory;
public IConfigFactory<AbstractObjectSavingTask> ObjectSavingFactory =>
_inner.ObjectSavingFactory;
public IConfigFactory<IGlassMap> ConfigurationMapFactory =>
_inner.ConfigurationMapFactory;
}
}
|
import { reject, resolve } from "Bluebird"
import chai, { assert } from "chai"
import chaiAsPromised from "chai-as-promised"
import { spy, stub } from "sinon"
import EventEmitter from "events"
import KoaServer from "server/server/KoaServer"
import { ConnectionError } from "server/lib/errors"
class MockService {
get name() { return "MockService" }
connect() { }
connectWithRetry() { }
isConnected() { }
close() { }
onDisconnect() { }
}
describe("KoaServer", () => {
before(() => {
chai.use(chaiAsPromised)
chai.should()
})
it("starts server", (done) => {
const server = new KoaServer()
server.start().should.be.fulfilled.and.notify(() => {
server.close()
done()
})
})
it("starts server with middleware", (done) => {
const middleware = (s) => s
const server = new KoaServer([middleware])
server.start().should.be.fulfilled.and.notify(() => {
server.close()
done()
})
})
it("connects to all valid services", (done) => {
const service1 = new MockService()
stub(service1, "connect", () => resolve(service1))
stub(service1, "isConnected", () => true)
const service2 = new MockService()
stub(service2, "connect", () => resolve(service2))
stub(service2, "isConnected", () => true)
const server = new KoaServer([], [service1, service2])
server.start().should.be.fulfilled.and.notify(() => {
server.close()
done()
})
})
it("retries connection n times when a service is down", (done) => {
const service1 = new MockService()
stub(service1, "connect", () => resolve(service1))
stub(service1, "isConnected", () => true)
const service2 = new MockService()
stub(service2, "connect", () => reject(service2))
stub(service2, "isConnected", () => false)
const RECONNECT_RETRIES = 5
const RECONNECT_DELAY = 0
const server = new KoaServer([], [service1, service2], RECONNECT_RETRIES, RECONNECT_DELAY)
spy(server, "connectAll")
server.start().should.be.rejectedWith(ConnectionError).and.notify(() => {
assert.equal(server.connectAll.callCount, RECONNECT_RETRIES)
server.connectAll.restore()
server.close()
done()
})
})
it("calls disconnect handler on service disconnect", (done) => {
const service1 = new MockService()
const emitter = new EventEmitter()
stub(service1, "connect", () => resolve(service1))
stub(service1, "connectWithRetry", () => resolve(service1))
stub(service1, "isConnected", () => true)
stub(service1, "onDisconnect", (callback) => emitter.on("close", () => callback()))
const server = new KoaServer([], [service1])
spy(server, "handleDisconnect")
server.start().should.be.fulfilled.and.notify(() => {
// Simulate service disconnect
emitter.emit("close")
// Check disconnect handler was called
assert(server.handleDisconnect.calledOnce)
server.handleDisconnect.restore()
server.close()
done()
})
})
})
|
package network.o3.o3wallet.API.Ontology
import com.google.gson.JsonObject
data class OntologyDataResponse(val desc: String, val error: Int,
val id: Int, val jsonrpc: String,
val result: JsonObject)
data class GasPrice(val gasprice: Long, val height: Long)
data class OntologyError(val code: Int, val Id: Int, val result: OntologyErrorResult)
data class OntologyErrorResult(val code: Int, val data: String, val message: String)
|
import { storiesOf } from '@storybook/react';
import withFusionStory from '../../../.storybook/withFusionStory';
import useKeyboardNavigation from '../useKeyboardNavigation';
import {useState, Fragment} from "react";
const KeyboardNavigationStory = () => {
const listItems = ['Item 1', 'Item 2', 'Item 3'];
const [currentItem, setCurrentItem] = useState<number | null>(0);
const [ref, setRef] = useState<HTMLElement | null>(null);
useKeyboardNavigation(
{
onDown: () =>
setCurrentItem(
currentItem !== null && currentItem < listItems.length - 1
? currentItem + 1
: currentItem
),
onUp: () =>
setCurrentItem(
currentItem !== null && currentItem > 0 ? currentItem - 1 : currentItem
),
},
ref
);
return (
<Fragment>
<input
ref={setRef}
placeholder={
currentItem !== null ? listItems[currentItem] : 'Click me to use navigation'
}
/>
<ul>
{listItems.map((item, index) => (
<li
style={{
listStyleType: 'none',
fontWeight: index === currentItem ? 600 : 200,
}}
key={index.toString()}
>
{item}
</li>
))}
</ul>
</Fragment>
);
};
storiesOf('Hooks/KeyboardNavigation', module)
.addDecorator(withFusionStory('KeyboardNavigation'))
.add('Default', () => {
return <KeyboardNavigationStory />;
});
|
/*
Level15: Map of tiles for the Level 15 / 20
Part of Manic Miner Remake
@see Game Level Map
Nacho, 2011 & 2017
Versions:
Num. Date Changes
---- ----------- --------------------------------
0.20 20-Ago-2017 Almost identical to 0.15, but translated to English
*/
public class Level15 : Level
{
public Level15()
{
name = "The Bank";
startingLevelData[ 0] = "L LLLLLLLLLLLLLLLLLLLLLLLLLLL";
startingLevelData[ 1] = "L MML";
startingLevelData[ 2] = "L V MML";
startingLevelData[ 3] = "LPP DDDDDDDDDDDDDDDDSSSSSMML";
startingLevelData[ 4] = "LPP MML";
startingLevelData[ 5] = "LSSSSS MML";
startingLevelData[ 6] = "L V SS MML";
startingLevelData[ 7] = "L F SS MML";
startingLevelData[ 8] = "L SS SS MML";
startingLevelData[ 9] = "L MML";
startingLevelData[10] = "LSS SS SS MML";
startingLevelData[11] = "L SS MML";
startingLevelData[12] = "L SSS SS MML";
startingLevelData[13] = "L SS MML";
startingLevelData[14] = "L V MML";
startingLevelData[15] = "LSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSL";
numEnemies = 3;
enemyList = new Enemy[numEnemies];
enemyList[0] = new Enemy("data/level15_enemy1.png", myGame);
enemyList[0].MoveTo(200, 65);
enemyList[0].SetSpeed(2, 0);
enemyList[0].SetMinMaxX(100, 670);
enemyList[0].SetWidthHeight(36, 48);
//enemyList[0].ChangeDirection(Sprite.RIGHT);
enemyList[1] = new Enemy("data/level15_enemy1.png", myGame);
enemyList[1].MoveTo(250, 300);
enemyList[1].SetSpeed(0, 2);
enemyList[1].SetMinMaxY(140, 350);
enemyList[1].SetWidthHeight(36, 48);
enemyList[2] = new Enemy("data/level15_enemy1.png", myGame);
enemyList[2].MoveTo(520, 160);
enemyList[2].SetSpeed(0, 2);
enemyList[2].SetMinMaxY(140, 350);
enemyList[2].SetWidthHeight(36, 48);
Restart();
}
}
|
package io.bazel.rulesscala.test_discovery
import java.io.{File, FileInputStream}
import java.util.jar.{JarEntry, JarInputStream}
object ArchiveEntries {
def listClassFiles(file: File): Stream[String] = {
val allEntries = if (file.isDirectory)
directoryEntries(file).map(_.stripPrefix(file.toString).stripPrefix("/"))
else
jarEntries(new JarInputStream(new FileInputStream(file)))
allEntries.filter(_.endsWith(".class"))
}
private def getJarEntryOrCloseStream(jarInputStream: JarInputStream): Option[JarEntry] = {
val entry = Option(jarInputStream.getNextJarEntry)
if (entry.isEmpty)
jarInputStream.close()
entry
}
private def jarEntries(jarInputStream: JarInputStream): Stream[String] =
Stream.continually(getJarEntryOrCloseStream(jarInputStream))
.takeWhile(_.nonEmpty)
.flatten
.map(_.getName)
private def directoryEntries(file: File): Stream[String] =
file.toString #:: (file.listFiles match {
case null => Stream.empty
case files => files.toStream.flatMap(directoryEntries)
})
}
|
---
layout: post
title: "越南"
date: 2017-05-26
categories:
- 环游世界那些事
description:
image: /img/UNADJUSTEDNONRAW_thumb_3001.jpg
image-sm:
---
2017年4月开始,因工作的机会两次进入越南,开始了一段对边境邻国风土人情的探索。
<h3>签证</h3>
越南签证为另纸签证,淘宝上办理238一次,只需要提供护照首页的扫描件即可办理。一天出签,速度还是相当快的。入关和出关只会在另纸签证上盖章,因此无论你去过多少次越南,护照上基本上不留下任何痕迹。
<h3>货币和消费水平</h3>
越南的当地货币为越南盾,计算方法很简单:去掉三个0,除以3,基本就是人民币的价钱了。<br/>
物价算是比较低廉的。我们入住的3-4星酒店,高级双床房一晚约为人民币200左右。<br/>
网络不错,基本能支持4G。每次停留时间15天,第一天购买的电话卡充值30元基本够用,公共场合基本都有WI-FI<br/>
<h4>河内</h4>
作为越南的首都,河内给人的第一印象,便是粉嫩的建筑。最有名的地标性建筑,是屹立在还剑区附近的约瑟夫教堂。还剑区是河内的老城区,围绕这还剑湖周围的一代,外国游客众多,也是来河内一定要去的地方。
<figure>
<img src="/img/UNADJUSTEDNONRAW_thumb_2ff1.jpg" alt=""/>
<figcaption>约瑟夫大教堂</figcaption>
</figure>
<figure>
<img src="/img/IMG_1450.jpg" alt=""/>
<figcaption>还剑湖中心商贸区</figcaption>
</figure>
<figure>
<img src="/img/IMG_1468.jpg" alt=""/>
<figcaption>教堂附近咖啡馆的小资情调</figcaption>
</figure>
河内是一个交通极度拥堵的城市,各式各样的摩托车随处穿行,城区弥漫着浓浓的尾气。当地人的上班时间是从早上8点到下午5点,因此从4点办左右开始,路上的摩托车就成蜂窝一般开始涌流。无意中拍到一张海报,也是有趣。
<figure>
<img src="/img/1t76M0jZRxqFI6mk9qMkMA_thumb_2ae5.jpg" alt=""/>
<figcaption>街边的社会主义宣传海报</figcaption>
</figure>
越南人喜欢吃米粉,米粉摊位几乎到处都是。后来去来中部,才知道粉也是有分类的。像河内这种细细的,叫做pho, 而中部那种扁状的,叫做Bun。
<figure>
<img src="/img/UNADJUSTEDNONRAW_thumb_2ae3.jpg" alt=""/>
<figcaption>配上香茅的Pho</figcaption>
</figure>
<figure>
<img src="/img/IMG_1765.jpg" alt=""/>
<figcaption>春卷</figcaption>
</figure>
<h4>顺化</h4>
越南的古都,越南差旅的第二站。<br/>
古城顺化就没有首都那么繁华,也略显干净而接地气。欧美的游客还是挺多的,就是没有什么中国人,大概因为,这座城市的人,都不太喜欢中国人吧。
食物明显没有河内那么合胃口,每天吃粉便是日常。
|
{-# LANGUAGE TupleSections #-}
--
-- Evaluation
--
module FreeCat.Evaluate where
import Data.Map as Map
import FreeCat.Core
evaluate :: Context -> Expr -> FreeCat Expr
evaluate c e@(SymbolExpr s pos) = do
case lookupSymbol c (name s) of
Nothing -> return e
Just s' ->
case equations s' of
(Equation c' [] (SymbolExpr _ _) e _ : _) -> evaluate c' e
_ -> return (SymbolExpr s pos)
evaluate c e@(AppExpr e0 e1 pos) =
do e0e <- evaluate c e0
e1e <- evaluate c e1
case e0e of
SymbolExpr s pos ->
case lookupSymbol c (name s) of
Nothing -> return (AppExpr e0e e1e pos)
Just s -> evaluatePatternMatch (equations s) (AppExpr e0e e1e pos)
AppExpr _ _ pos ->
do s <- leadSymbol e0e
case lookupSymbol c (name s) of
Nothing -> return (AppExpr e0e e1e pos)
Just s -> evaluatePatternMatch (equations s) (AppExpr e0e e1e pos)
LambdaExpr c' s d pos ->
do ec' <- augmentContext c' (name s) Nothing
(definedType s) Nothing [constantDefinition s (definedType s) e1e]
evaluate ec' d
FunctionTypeExpr _ _ _ -> barf ErrFunctionTypeOnAppLHS
DependentFunctionTypeExpr _ _ _ -> barf ErrFunctionTypeOnAppLHS
evaluate c0 e@(LambdaExpr c1 s d pos) = return e
evaluate c e@(FunctionTypeExpr a b pos) =
do ae <- evaluate c a
be <- evaluate c b
return (FunctionTypeExpr ae be pos)
evaluate c e@(DependentFunctionTypeExpr s b pos) = do
ae <- evaluate c (definedType s)
c' <- augmentContext c (name s) Nothing ae Nothing []
be <- evaluate c' b
s' <- certainly (lookupSymbol c' (name s))
return (DependentFunctionTypeExpr s' be pos)
-- Checks if the given expr matches any of the given pattern match equations.
-- Returns the result of evaluating the expr against the first matching definition
-- if one matches, and returns the input unchanged if no patterns match. Assumes the
-- subexpressions of the given expr are normalized.
evaluatePatternMatch :: [Equation] -> Expr -> FreeCat Expr
evaluatePatternMatch [] e = return e
evaluatePatternMatch ((Equation c0 _ p d pos):defs) e =
do unifyResult <- unifyExprWithPattern c0 e p
case unifyResult of
Just c1 -> evaluate c1 d
Nothing -> evaluatePatternMatch defs e
-- Takes an expr and a pattern and returns an augmented context in which the
-- pattern variables are defined according to the unification of expr and pattern.
-- That assumes expr can be unified with pattern. If not returns nothing.
-- Assumes expr is evaluated (i.e. in normal form).
unifyExprWithPattern :: Context -> Expr -> Pattern -> FreeCat (Maybe Context)
unifyExprWithPattern c0 e pat =
do unifyResult <- _unifyExprWithPattern (c0, Map.empty) e pat
case unifyResult of
Just (c1, matches) -> return (Just c1)
Nothing -> return Nothing
_unifyExprWithPattern :: (Context, Map String Expr) -> Expr -> Pattern -> FreeCat (Maybe (Context, Map String Expr))
_unifyExprWithPattern (c, matches) e (SymbolExpr t _) =
case Map.lookup (name t) matches of
Just v ->
if e == v
then return (Just (c, matches))
else return Nothing
Nothing ->
case lookupSymbol c (name t) of
Just s ->
case e of
SymbolExpr u _ ->
if u == t
then return (Just (c, matches))
else return Nothing
_ -> return Nothing
Nothing -> do
c' <- augmentContext c (name t) Nothing (definedType t) Nothing
[constantDefinition t (definedType t) e]
return (Just (c', Map.insert (name t) e matches))
_unifyExprWithPattern (c0, matches0) (AppExpr e f _) (AppExpr p q _) =
do unifyResult1 <- _unifyExprWithPattern (c0, matches0) e p
case unifyResult1 of
Nothing -> return Nothing
Just (c1, matches1) ->
do unifyResult2 <- _unifyExprWithPattern (c1, matches1) f q
case unifyResult2 of
Nothing -> return Nothing
Just (c2, matches2) -> return unifyResult2
_unifyExprWithPattern (c, matches) e p = return Nothing
|
#!/bin/bash -ev
# Written by: Tommy Lincoln <pajamapants3000@gmail.com>
# Github: https://github.com/pajamapants3000
# Legal: See LICENSE in parent directory
#
# Check for previous installation:
PROCEED="yes"
grep insync-portable /list-$CHRISTENED"-"$SURNAME > /dev/null && ((\!$?)) &&\
echo "Previous installation detected, proceed?" && read PROCEED
[ $PROCEED = "yes" ] || [ $PROCEED = "y" ] || exit 0
# Download:
wget http://s.insynchq.com/builds/insync-portable_1.1.2.32011_amd64.tar.bz2
#
tar -xvf insync-portable_1.1.2.32011_amd64.tar.bz2
mv -v insync-portable ~/insync-portable
#
# Create convenient executables:
#
tee > ~/bin/insync-start << "EOF"
#!/bin/bash
cd ~/insync-portable
./insync-portable start
EOF
tee > ~/bin/insync-stop << "EOF"
#!/bin/bash
cd ~/insync-portable
./insync-portable quit
EOF
tee > ~/bin/insync-status << "EOF"
#!/bin/bash
cd ~/insync-portable
./insync-portable get_status
EOF
chmod -v +x ~/bin/insync-start
chmod -v +x ~/bin/insync-stop
chmod -v +x ~/bin/insync-status
#
# Add to installed list for this computer:
echo "insync-portable-1.1.2.32011" >> /list-$CHRISTENED"-"$SURNAME
#
# Follow-up
# go to http://goo.gl/kTvy0y (needs javascript-enabled browser :( firefox ok
# but not links or even links -g. Others? Other way?))
# Log in to desired google account
# Obtain (long!) AUTH code
# Copy in to vim, create script - easiest way without copy/paste to terminal
# Script:
##!/bin/bash -ev
#./insync-portable add_account AUTH_CODE PATH
#
# ...where PATH is the folder where you'd like to sync to. Folder need not (must
# not?) exist, but I'm pretty sure the subfolder must.
# That's it!
#
###################################################
|
namespace Kubernetes.Probes.Core
{
public class ProbeConfig
{
public int LivenessSignalIntervalSeconds { get; set; }
public string LivenessFilePath { get; set; }
public string StartupFilePath { get; set; }
}
}
|
import {PatientData} from "./patient-data";
import {PatientRADAIResult} from "./patient-RADAI-result";
import {PatientMoriskyResult} from "./patient-morisky-result";
import {PatientFFbHResult} from "./patient-ffbh-result";
import {PatientObservationGroup} from "./patient-observation-group";
import {PatientMedication} from "./patient-medication";
import {PatientClinician} from "./patient-clinician";
import {PatientCheck} from "./patient-check";
import {PatientDisease} from "./patient-disease";
import {PatientImage} from "./patient-image";
export class PatientODS {
info: PatientData;
radai: PatientRADAIResult[];
morisky: PatientMoriskyResult[];
ffbh: PatientFFbHResult[];
observations: PatientObservationGroup[];
medications: PatientMedication[];
clinicians: PatientClinician[];
checks: PatientCheck[];
diseases: PatientDisease[];
imaging: PatientImage[];
}
|
// Copyright 2020 The Kubernetes Authors.
// SPDX-License-Identifier: Apache-2.0
// Code generated by ./scripts/makeOpenApiInfoDotGo.sh; DO NOT EDIT.
package kubernetesapi
import (
"sigs.k8s.io/kustomize/kyaml/openapi/kubernetesapi/v1212"
)
const Info = "{title:Kubernetes,version:v1.21.2}"
var OpenAPIMustAsset = map[string]func(string) []byte{
"v1212": v1212.MustAsset,
}
const DefaultOpenAPI = "v1212"
|
{-# LANGUAGE LambdaCase #-}
module Haskellorls.Color.Option
( colorParser,
extraColorParser,
module Haskellorls.Color.Type,
)
where
import Haskellorls.Color.Type
import Options.Applicative
colorParser :: Parser Colorize
colorParser =
option reader $
long "color"
<> metavar "WHEN"
<> value NEVER
<> help "When use output with color (default is 'never')"
where
reader =
str >>= \case
"never" -> return NEVER
"no" -> return NEVER
"none" -> return NEVER
"always" -> return ALWAYS
"yes" -> return ALWAYS
"force" -> return ALWAYS
"auto" -> return AUTO
"tty" -> return AUTO
"if-tty" -> return AUTO
_ -> readerError "Only never, always or auto"
extraColorParser :: Parser Bool
extraColorParser =
switch $
long "extra-color"
<> help "Enable extra coloring which is incompatible for GNU ls."
|
/* ====================================================================
*/
using System;
using System.Collections.Generic;
using System.Text;
using System.Drawing;
using System.ComponentModel; // need this for the properties metadata
using System.Drawing.Design;
using System.Xml;
using System.Globalization;
using System.Windows.Forms;
using System.Windows.Forms.Design;
namespace Oranikle.ReportDesigner
{
/// <summary>
/// PropertyAction -
/// </summary>
[TypeConverter(typeof(PropertySortingConverter)),
Editor(typeof(PropertySortingUIEditor), typeof(System.Drawing.Design.UITypeEditor))]
internal class PropertySorting : IReportItem
{
PropertyReportItem pri;
internal PropertySorting(PropertyReportItem ri)
{
pri = ri;
}
public override string ToString()
{
StringBuilder sb = new StringBuilder();
XmlNode sorting = pri.Draw.GetNamedChildNode(pri.Node, "Sorting");
if (sorting == null)
return "";
foreach (XmlNode sNode in sorting.ChildNodes)
{
if (sNode.NodeType != XmlNodeType.Element ||
sNode.Name != "SortBy")
continue;
if (sb.Length > 0)
sb.Append(", ");
// Get the values
XmlNode vNodes = pri.Draw.GetNamedChildNode(sNode, "SortExpression");
if (vNodes != null)
{
sb.Append(vNodes.InnerText);
string dir = pri.Draw.GetElementValue(sNode, "Direction", "Ascending");
sb.Append(' ');
sb.Append(dir);
}
}
return sb.ToString();
}
#region IReportItem Members
public PropertyReportItem GetPRI()
{
return this.pri;
}
#endregion
}
internal class PropertySortingConverter : StringConverter
{
public override bool GetStandardValuesExclusive(ITypeDescriptorContext context)
{
return false;
}
public override bool CanConvertTo(ITypeDescriptorContext context,
System.Type destinationType)
{
if (destinationType == typeof(PropertySorting))
return true;
return base.CanConvertTo(context, destinationType);
}
public override object ConvertTo(ITypeDescriptorContext context,
CultureInfo culture, object value, Type destinationType)
{
if (destinationType == typeof(string) && value is PropertySorting)
{
PropertySorting pb = value as PropertySorting;
return pb.ToString();
}
return base.ConvertTo(context, culture, value, destinationType);
}
}
internal class PropertySortingUIEditor : UITypeEditor
{
internal PropertySortingUIEditor()
{
}
public override UITypeEditorEditStyle GetEditStyle(ITypeDescriptorContext context)
{
return UITypeEditorEditStyle.Modal;
}
public override object EditValue(ITypeDescriptorContext context,
IServiceProvider provider,
object value)
{
if ((context == null) || (provider == null))
return base.EditValue(context, provider, value);
// Access the Property Browser's UI display service
IWindowsFormsEditorService editorService =
(IWindowsFormsEditorService)provider.GetService(typeof(IWindowsFormsEditorService));
if (editorService == null)
return base.EditValue(context, provider, value);
// Create an instance of the UI editor form
IReportItem iri = context.Instance as IReportItem;
if (iri == null)
return base.EditValue(context, provider, value);
PropertyReportItem pri = iri.GetPRI();
PropertySorting pb = value as PropertySorting;
if (pb == null)
return base.EditValue(context, provider, value);
using (SingleCtlDialog scd = new SingleCtlDialog(pri.DesignCtl, pri.Draw, pri.Nodes,
SingleCtlTypeEnum.SortingCtl, null))
{
// Display the UI editor dialog
if (editorService.ShowDialog(scd) == DialogResult.OK)
{
// Return the new property value from the UI editor form
return new PropertySorting(pri);
}
return base.EditValue(context, provider, value);
}
}
}
}
|
package com.carkzis.android.plutus.inflation
import androidx.arch.core.executor.testing.InstantTaskExecutorRule
import com.carkzis.android.plutus.inflation.RpiPctViewModel
import com.carkzis.android.plutus.data.FakeRepository
import com.carkzis.android.plutus.getOrAwaitValue
import com.carkzis.android.plutus.observeForTesting
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.ExperimentalCoroutinesApi
import kotlinx.coroutines.test.TestCoroutineDispatcher
import kotlinx.coroutines.test.resetMain
import kotlinx.coroutines.test.setMain
import org.hamcrest.CoreMatchers
import org.hamcrest.MatcherAssert
import org.junit.After
import org.junit.Before
import org.junit.Rule
import org.junit.Test
@ExperimentalCoroutinesApi
class RpiPctViewModelTest() {
private lateinit var rpiPctViewModel: RpiPctViewModel
private lateinit var inflationRepository: FakeRepository
// This is from the kotlin docs, to allow access to Dispatcher.Main in testing.
private val dispatcher = TestCoroutineDispatcher()
// Executes each task synchronously using Architecture Components.
@get:Rule
var instantExecutorRule = InstantTaskExecutorRule()
@Before
fun setUp() {
Dispatchers.setMain(dispatcher)
inflationRepository = FakeRepository()
rpiPctViewModel = RpiPctViewModel(inflationRepository)
}
@After
fun tearDown() {
Dispatchers.resetMain()
}
@Test
fun refreshRpiInflationRates_noErrorNonEmpty_getSuccess() {
rpiPctViewModel.testRefresh()
// Make sure we then get six items are emitting.
MatcherAssert.assertThat(
rpiPctViewModel.inflationRates.getOrAwaitValue().size,
CoreMatchers.`is`(16)
)
// Check that the status is now done.
MatcherAssert.assertThat(
rpiPctViewModel.loadingStatus.getOrAwaitValue().name,
CoreMatchers.`is`("DONE")
)
}
@Test
fun refreshRpiInflationRates_errorAndNullResults_getLoadingStatusIsError() {
inflationRepository.setNull(true)
inflationRepository.setToEmpty()
// This will set this to the value of the repository.
rpiPctViewModel.inflationRates = inflationRepository.getRpiPercentages()
rpiPctViewModel.inflationRates.observeForTesting {
rpiPctViewModel.testRefresh()
dispatcher.advanceTimeBy(500)
MatcherAssert.assertThat(
rpiPctViewModel.loadingStatus.getOrAwaitValue().name,
CoreMatchers.`is`("ERROR")
)
// This checks that a Toast was displayed
MatcherAssert.assertThat(
rpiPctViewModel.toastText.getOrAwaitValue(),
CoreMatchers.`is`(CoreMatchers.not("null"))
)
}
}
@Test
fun refreshRpiInflationRates_errorButCacheNotEmpty_getLoadingStatusIsHas() {
inflationRepository.setNull(true)
// This will set this to the value of the repository.
rpiPctViewModel.inflationRates = inflationRepository.getRpiPercentages()
rpiPctViewModel.inflationRates.observeForTesting {
rpiPctViewModel.testRefresh()
dispatcher.advanceTimeBy(500)
MatcherAssert.assertThat(
rpiPctViewModel.loadingStatus.getOrAwaitValue().name,
CoreMatchers.`is`("DONE")
)
// This checks that a Toast was displayed
MatcherAssert.assertThat(
rpiPctViewModel.toastText.getOrAwaitValue(),
CoreMatchers.`is`(CoreMatchers.not("null"))
)
}
}
}
|
water_formation = ce"2 H2 + O2 → 2 H2O"
ionic_reaction = ce"Na{+} + Cl{-} > NaCl"
redox = ce"Cr2O7{-2} + H{+1} + {-} = Cr{3+} + H2O"
@testset "ChemEquation" begin
@test water_formation.tuples ==
[(cc"H2", 2), (cc"O2", 1), (cc"H2O", -2)]
@test ionic_reaction.tuples ==
[(cc"Na{+1}", 1), (cc"Cl{-1}", 1), (cc"ClNa", -1)]
@test ChemEquation("N2+O2⇌2NO").tuples ==
[(cc"N2", 1), (cc"O2", 1), (cc"NO", -2)]
testtuple = [(cc"H2", 0.5), (cc"Cl2", 0.5), (cc"HCl", -1.0)]
@test ChemEquation(testtuple) == ChemEquation{Float64}(testtuple)
@test ChemEquation{Rational}("1//2 H2 + 1//2 Cl2 → HCl").tuples == testtuple
@test ChemEquation{Float64}("0.5 H2 + 0.5 Cl2 → HCl").tuples == testtuple
end
@testset "@ce_str" begin
@test ce"2 H2 + O2 → 2 H2O" == water_formation
end
@testset "==" begin
@test ce"H2 = O2" == ce"H2 = O2"
@test ce"H2+O2=H2O" == ce"H2 +O2 =H2O"
@test ce"H2+O2=H2O" ≠ ce"2H2+O2=2H2O"
end
@testset "string" begin
@test string(water_formation) == "2 H2 + O2 = 2 H2O"
@test string(ionic_reaction) == "Na{+} + Cl{-} = NaCl"
@test string(redox) == "Cr2O7{-2} + H{+} + e = Cr{+3} + H2O"
end
@testset "show" begin
@test isa(repr(water_formation), String)
end
@testset "compounds" begin
@test compounds(water_formation) == [cc"H2", cc"O2", cc"H2O"]
@test compounds(ionic_reaction) == [cc"Na{+}", cc"Cl{-}", cc"NaCl"]
@test compounds(redox) == [cc"Cr2O7{-2}", cc"H{+}", cc"{-}", cc"Cr{+3}", cc"H2O"]
end
@testset "elements" begin
@test elements(water_formation) == ["H", "O"]
@test elements(ionic_reaction) == ["Na", "Cl"]
@test elements(redox) == ["Cr", "O", "H", "e"]
end
@testset "hascharge" begin
@test hascharge(water_formation) == false
@test hascharge(ionic_reaction) == true
@test hascharge(redox) == true
end
|
package ecsgen
import (
"errors"
)
// Walkable represents types that can be walked within ecsgen. This allows walking
// from arbitrary points within the graph, as well as from the root.
type Walkable interface {
ListChildren() <-chan *Node
}
// ErrSkipChildren is a used as a return value from WalkFuncs to indicate that the
// callback should not be called for any children of the examined Node.
var ErrSkipChildren = errors.New("node walker: skip remaining children")
// WalkFunc is the type of the function called for each child of a node.
type WalkFunc func(n *Node) error
// Walk is a simple depth first walker for traversing the Schema from a starting Node.
func Walk(root Walkable, fn WalkFunc) error {
// enumerate all children of the root
for elm := range root.ListChildren() {
// call the walk func on the element
err := fn(elm)
// if the returned error is an ErrSkipChildren, simply stop walking
// this branch and continue
if err == ErrSkipChildren {
continue
}
// something else happened, stop immediately and return the error
if err != nil {
return err
}
// recursively call Walk on the element, bubbling up any errors
// that arise in the recursive call
err = Walk(elm, fn)
if err != nil {
return err
}
}
return nil
}
|
// ets_tracing: off
import type * as CL from "../../../../Clock"
import * as SC from "../../../../Schedule"
import type * as C from "../core"
import * as Schedule from "./schedule"
/**
* Emits elements of this stream with a fixed delay in between, regardless of how long it
* takes to produce a value.
*/
export function fixed_<R, E, A>(
self: C.Stream<R, E, A>,
duration: number
): C.Stream<R & CL.HasClock, E, A> {
return Schedule.schedule_(self, SC.fixed(duration))
}
/**
* Emits elements of this stream with a fixed delay in between, regardless of how long it
* takes to produce a value.
*
* @ets_data_first fixed_
*/
export function fixed(duration: number) {
return <R, E, A>(self: C.Stream<R, E, A>) => fixed_(self, duration)
}
|
*
* subroutine tstepsic.f for program goldstein introduced 18/9/02
* updates sea-ice height and area
*
subroutine tstepsic
#include "seaice.cmn"
integer i, j, l
real fe(2), fw(2), fn(2), fs(2,maxi)
+ ,fwsave(2)
c 2nd order explicit transport code using upper level ocean velocities
c southern boundary fluxes
j = 1
do 230 i=1,imax
do 230 l=1,2
fs(l,i) = 0
230 continue
do 100 j=1,jmax
c western boundary fluxes
i = 1
do 210 l=1,2
if (kmax.ge.max(k1(imax,j),k1(1,j)))then
c western doorway
fw(l) = u(1,imax,j)*rc(j)*(varice1(l,1,j) +
1 varice1(l,imax,j))*0.5
if (u(1,imax,j).ge.0.0) then
if (varice1(2,1,j).gt.par_sica_thresh) then
fw(l) = 0
endif
if (varice1(1,1,j).gt.par_sich_thresh) then
fw(l) = 0
endif
else
if (varice1(2,imax,j).gt.par_sica_thresh) then
fw(l) = 0
endif
if (varice1(1,imax,j).gt.par_sich_thresh) then
fw(l) = 0
endif
endif
fw(l) = fw(l) - (varice1(l,1,j) - varice1(l,imax,j))
1 *rc(j)*rc(j)*rdphi*diffsic
else
fw(l) = 0
endif
fwsave(l) = fw(l)
210 continue
do 100 i=1,imax
do 120 l=1,2
c flux to east
if(i.eq.imax)then
c eastern edge(doorway or wall)
fe(l) = fwsave(l)
elseif(kmax.lt.max(k1(i,j),k1(i+1,j)))then
fe(l) = 0
else
fe(l) = u(1,i,j)*rc(j)*(varice1(l,i+1,j) +
1 varice1(l,i,j))*0.5
if (u(1,i,j).ge.0.0) then
if (varice1(2,i+1,j).gt.par_sica_thresh) then
fe(l) = 0
endif
if (varice1(1,i+1,j).gt.par_sich_thresh) then
fe(l) = 0
endif
else
if (varice1(2,i,j).gt.par_sica_thresh) then
fe(l) = 0
endif
if (varice1(1,i,j).gt.par_sich_thresh) then
fe(l) = 0
endif
endif
fe(l) = fe(l) - (varice1(l,i+1,j) - varice1(l,i,j))
1 *rc(j)*rc(j)*rdphi*diffsic
endif
c flux to north
if(kmax.lt.max(k1(i,j),k1(i,j+1)))then
fn(l) = 0
else
fn(l) = cv(j)*u(2,i,j)*(varice1(l,i,j+1) +
1 varice1(l,i,j))*0.5
if (u(2,i,j).ge.0.0) then
if (varice1(2,i,j+1).gt.par_sica_thresh) then
fn(l) = 0
endif
if (varice1(1,i,j+1).gt.par_sich_thresh) then
fn(l) = 0
endif
else
if (varice1(2,i,j).gt.par_sica_thresh) then
fn(l) = 0
endif
if (varice1(1,i,j).gt.par_sich_thresh) then
fn(l) = 0
endif
endif
fn(l) = fn(l) - cv(j)*cv(j)*(varice1(l,i,j+1) -
1 varice1(l,i,j))*rdsv(j)*diffsic
endif
c
if(kmax.ge.k1(i,j))then
varice(l,i,j) = varice1(l,i,j) - dtsic*(
1 (fe(l) - fw(l))*rdphi
2 + (fn(l) - fs(l,i))*rds(j))
1 + tsc*dtsic*dtha(l,i,j)
endif
fw(l) = fe(l)
fs(l,i) = fn(l)
120 continue
100 continue
end
|
## AppDomain assemblies
Assemblies loaded into the AppDomain are scanned by default. AppDomain assembly scanning can be disabled using:
snippet: ScanningApDomainAssemblies
|
import React from 'react'
import styled from 'styled-components'
import {GlobalContext} from '../context/GlobalContext'
import TestCard from './test_card'
const Container = styled.div`
width: 90%;
margin: auto;
display: flex;
flex-flow: row wrap;
align-content: flex-start;
`
function TestCards(){
let {getMatchApiManifest, getApiByName, genApiTagsByName, search_term} = React.useContext(GlobalContext);
var test = getMatchApiManifest(search_term)
console.log([...test])
return(
<>
<Container>
{
[...test].map( x => {
let api_full_info = getApiByName(x)
let api_tags = genApiTagsByName(x)
return(
<TestCard
key={x}
api_name={x}
name={api_full_info.result.title}
description={api_full_info.result.notes}
tags={api_tags}
last_update={api_full_info.result.metadata_modified}
/>
)
})
}
</Container>
</>
)
}
export default TestCards
// function TestCards(){
// let {getMatchApiManifest, getApiByName, genApiTagsByName} = React.useContext(GlobalContext);
// return (
// <Container>
// {
// [...getMatchApiManifest('CCI')].map( x => {
// let api_full_info = getApiByName(x)
// let api_tags = genApiTagsByName(x)
// return (
// <TestCard
// key={x}
// api_name={x}
// name={api_full_info.result.title}
// description={api_full_info.result.notes}
// tags={api_tags}
// last_update={api_full_info.result.metadata_modified}
// />
// )
// })
// }
// </Container>
// )
// }
// export default TestCards
|
use ash::vk;
use crate::context::instance::VkInstance;
use crate::{vklint, vksint, vkchar, vkptr, vkbool};
use crate::error::{VkResult, VkError};
use std::ffi::CStr;
use std::ptr;
#[derive(Debug, Default)]
pub struct ValidationConfig {
/// `is_enable` tell if validation layer should be enabled.
pub debug_type: DebugType,
/// `report_config` specifies the configuration parameters used in Debug Report.
pub report_config: DebugReportConfig,
/// `utils_config` specifies the configuration parameters used in Debug Utils.
pub utils_config: DebugUtilsConfig,
}
#[derive(Eq, PartialEq, Debug, Clone, Copy)]
pub enum DebugType {
DebugReport,
DebugUtils,
None, // set None to disable Debug tools.
}
impl Default for DebugType {
fn default() -> DebugType {
DebugType::None
}
}
/// `DebugInstance` is used as a trait object.
trait DebugInstance {
/// Destroy this validation tool.
unsafe fn discard(&self);
}
pub trait DebugCreateInfo {}
impl DebugCreateInfo for vk::DebugUtilsMessengerCreateInfoEXT {}
impl DebugCreateInfo for vk::DebugReportCallbackCreateInfoEXT {}
/// Wrapper class for the validation tools used in Vulkan.
pub struct VkDebugger {
target: Option<Box<dyn DebugInstance>>,
}
impl VkDebugger {
pub fn new(instance: &VkInstance, config: ValidationConfig) -> VkResult<VkDebugger> {
let debugger = match config.debug_type {
| DebugType::DebugReport => {
let report = VkDebugReport::new(instance, &config.report_config)?;
Some(Box::new(report) as Box<dyn DebugInstance>)
},
| DebugType::DebugUtils => {
let utils = VkDebugUtils::new(instance, &config.utils_config)?;
Some(Box::new(utils) as Box<dyn DebugInstance>)
},
| DebugType::None => {
None
},
};
let result = VkDebugger { target: debugger };
Ok(result)
}
pub fn instance_debug_info(debug: DebugType, config: &ValidationConfig) -> Option<Box<dyn DebugCreateInfo>> {
match debug {
| DebugType::DebugReport => {
let report = VkDebugReport::create_info(&config.report_config);
Some(Box::new(report) as Box<dyn DebugCreateInfo>)
},
| DebugType::DebugUtils => {
let utils = VkDebugUtils::create_info(&config.utils_config);
Some(Box::new(utils) as Box<dyn DebugCreateInfo>)
},
| DebugType::None => {
None
},
}
}
}
impl Drop for VkDebugger {
fn drop(&mut self) {
if let Some(ref debugger) = self.target {
unsafe {
debugger.discard();
}
}
}
}
// Debug Report -----------------------------------------------------------------------------------
/// the callback function used in Debug Report.
unsafe extern "system" fn vulkan_debug_report_callback(
_flags : vk::DebugReportFlagsEXT,
_obj_type : vk::DebugReportObjectTypeEXT,
_obj : vklint,
_location : usize,
_code : vksint,
_layer_prefix: *const vkchar,
p_message : *const vkchar,
_user_data : vkptr
) -> u32 {
println!("[Debug] {:?}", CStr::from_ptr(p_message));
vk::FALSE
}
/// The configuration parameters used in the initialization of `vk::DebugReport`.
#[derive(Debug)]
pub struct DebugReportConfig {
/// the message type that Validation Layer would report for.
pub flags: vk::DebugReportFlagsEXT,
}
impl Default for DebugReportConfig {
fn default() -> DebugReportConfig {
DebugReportConfig {
flags:
vk::DebugReportFlagsEXT::DEBUG |
vk::DebugReportFlagsEXT::ERROR |
// vk::DebugReportFlagsEXT::INFORMATION |
// vk::DebugReportFlagsEXT::PERFORMANCE_WARNING |
vk::DebugReportFlagsEXT::WARNING,
}
}
}
struct VkDebugReport {
/// the handle of `vk::DebugReport` object.
loader: ash::extensions::ext::DebugReport,
/// the handle of callback function used in Validation Layer.
callback: vk::DebugReportCallbackEXT,
}
impl VkDebugReport {
/// Initialize debug extension loader and `vk::DebugReport` object.
pub fn new(instance: &VkInstance, config: &DebugReportConfig) -> VkResult<VkDebugReport> {
// load the debug extension.
let loader = ash::extensions::ext::DebugReport::new(&instance.entry, &instance.handle);
// configure debug callback.
let debug_callback_ci = VkDebugReport::create_info(config);
let callback = unsafe {
loader.create_debug_report_callback(&debug_callback_ci, None)
.or(Err(VkError::create("Debug Report Callback")))?
};
let report = VkDebugReport { loader, callback };
Ok(report)
}
fn create_info(config: &DebugReportConfig) -> vk::DebugReportCallbackCreateInfoEXT {
vk::DebugReportCallbackCreateInfoEXT {
s_type : vk::StructureType::DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT,
p_next : ptr::null(),
// Enum DebugReportFlags enumerate all available flags.
flags : config.flags,
pfn_callback: Some(vulkan_debug_report_callback),
p_user_data : ptr::null_mut(),
}
}
}
impl DebugInstance for VkDebugReport {
/// Destroy the `vk::DebugReport` object.
unsafe fn discard(&self) {
self.loader.destroy_debug_report_callback(self.callback, None);
}
}
// ------------------------------------------------------------------------------------------------
// Debug Utils ------------------------------------------------------------------------------------
/// the callback function used in Debug Utils.
unsafe extern "system" fn vulkan_debug_utils_callback(
message_severity : vk::DebugUtilsMessageSeverityFlagsEXT,
message_type : vk::DebugUtilsMessageTypeFlagsEXT,
p_callback_data : *const vk::DebugUtilsMessengerCallbackDataEXT,
_p_user_data : vkptr
) -> vkbool {
let severity = match message_severity {
| vk::DebugUtilsMessageSeverityFlagsEXT::VERBOSE => "[Verbose]",
| vk::DebugUtilsMessageSeverityFlagsEXT::WARNING => "[Warning]",
| vk::DebugUtilsMessageSeverityFlagsEXT::ERROR => "[Error]",
| vk::DebugUtilsMessageSeverityFlagsEXT::INFO => "[Info]",
| _ => "[Unknown]",
};
let types = match message_type {
| vk::DebugUtilsMessageTypeFlagsEXT::GENERAL => "[General]",
| vk::DebugUtilsMessageTypeFlagsEXT::PERFORMANCE => "[Performance]",
| vk::DebugUtilsMessageTypeFlagsEXT::VALIDATION => "[Validation]",
| _ => "[Unknown]",
};
let message = CStr::from_ptr((*p_callback_data).p_message);
println!("[Debug]{}{}{:?}", severity, types, message);
vk::FALSE
}
/// The configuration parameters used in the initialization of `vk::DebugUtils`.
#[derive(Debug)]
pub struct DebugUtilsConfig {
pub flags : vk::DebugUtilsMessengerCreateFlagsEXT,
pub severity : vk::DebugUtilsMessageSeverityFlagsEXT,
pub types : vk::DebugUtilsMessageTypeFlagsEXT,
}
impl Default for DebugUtilsConfig {
fn default() -> DebugUtilsConfig {
DebugUtilsConfig {
flags: vk::DebugUtilsMessengerCreateFlagsEXT::empty(),
severity:
vk::DebugUtilsMessageSeverityFlagsEXT::WARNING |
// vk::DebugUtilsMessageSeverityFlagsEXT::VERBOSE |
// vk::DebugUtilsMessageSeverityFlagsEXT::INFO |
vk::DebugUtilsMessageSeverityFlagsEXT::ERROR,
types:
vk::DebugUtilsMessageTypeFlagsEXT::GENERAL |
vk::DebugUtilsMessageTypeFlagsEXT::PERFORMANCE |
vk::DebugUtilsMessageTypeFlagsEXT::VALIDATION,
}
}
}
/// Wrapper class for `vk::DebugUtils` object.
struct VkDebugUtils {
/// the handle of `vk::DebugUtils` object.
loader: ash::extensions::ext::DebugUtils,
/// the handle of callback function used in Validation Layer.
utils_messenger: vk::DebugUtilsMessengerEXT,
}
impl VkDebugUtils {
/// Initialize debug report extension loader and `vk::DebugUtilsMessengerExt` object.
pub fn new(instance: &VkInstance, config: &DebugUtilsConfig) -> VkResult<VkDebugUtils> {
let loader = ash::extensions::ext::DebugUtils::new(&instance.entry, &instance.handle);
let messenger_ci = VkDebugUtils::create_info(config);
let utils_messenger = unsafe {
loader.create_debug_utils_messenger(&messenger_ci, None)
.or(Err(VkError::create("Debug Utils Callback")))?
};
let utils = VkDebugUtils { loader, utils_messenger };
Ok(utils)
}
fn create_info(config: &DebugUtilsConfig) -> vk::DebugUtilsMessengerCreateInfoEXT {
vk::DebugUtilsMessengerCreateInfoEXT {
s_type: vk::StructureType::DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT,
p_next: ptr::null(),
flags : config.flags,
message_severity : config.severity,
message_type : config.types,
pfn_user_callback: Some(vulkan_debug_utils_callback),
p_user_data : ptr::null_mut(),
}
}
}
impl DebugInstance for VkDebugUtils {
/// Destroy the `vk::DebugUtils` object.
unsafe fn discard(&self) {
self.loader.destroy_debug_utils_messenger(self.utils_messenger, None);
}
}
// ------------------------------------------------------------------------------------------------
|
import { createStackNavigator } from '@react-navigation/stack';
import React from 'react';
import ArrowBox from '../../common/components/views/ArrowBox';
import ProfileErase from '../../common/screens/profile/ProfileErase';
import Terms from '../../common/screens/unlogged/Terms';
import { t } from '../../strings';
import Profile from './Profile';
import ProfileEdit from './ProfileEdit';
import ProfileAddCard from './payment/ProfileAddCard';
import ProfilePaymentMethods from './payment/ProfilePaymentMethods';
import { ProfileParamList } from './types';
const Stack = createStackNavigator<ProfileParamList>();
export default function () {
return (
<Stack.Navigator
initialRouteName="Profile"
screenOptions={() => ({
headerBackImage: () => <ArrowBox flipped />,
headerBackTitleVisible: false,
})}
>
<Stack.Screen name="Profile" component={Profile} options={{ headerShown: false }} />
<Stack.Screen
name="ProfileEdit"
component={ProfileEdit}
options={{ title: t('Dados pessoais') }}
/>
<Stack.Screen
name="ProfilePaymentMethods"
component={ProfilePaymentMethods}
options={{ title: t('Formas de pagamento') }}
/>
<Stack.Screen
name="ProfileAddCard"
component={ProfileAddCard}
options={{ title: t('Adicionar cartão') }}
/>
<Stack.Screen name="Terms" component={Terms} options={{ title: t('Termos de uso') }} />
<Stack.Screen
name="ProfileErase"
component={ProfileErase}
options={{ title: t('Excluir minha conta') }}
/>
</Stack.Navigator>
);
}
|
import { Router } from 'express';
import * as CommentAPI from '../controllers/comment.controller';
import { jwtAuth } from '../modules/jwt.local.strategy';
export const path = '/comment';
export const router = Router();
router.get('/startutor', CommentAPI.getAllCommentsCountPerTutor);
router.get('/user/:user_id', jwtAuth(), CommentAPI.getCommentsByUserId);
router.get('/lesson/:lesson_id', jwtAuth(), CommentAPI.getCommentsByLessonId);
router.post('/lesson/:lesson_id', jwtAuth(), CommentAPI.putComment);
router.put('/:comment_id', jwtAuth(), CommentAPI.updateComment);
router.delete('/:comment_id', jwtAuth(), CommentAPI.delComment);
|
//! ASN.1 `INTEGER` support.
// TODO(tarcieri): add support for `i32`/`u32`
use crate::{Any, Encodable, Encoder, Error, ErrorKind, Header, Length, Result, Tag, Tagged};
use core::convert::TryFrom;
//
// i8
//
impl TryFrom<Any<'_>> for i8 {
type Error = Error;
fn try_from(any: Any<'_>) -> Result<i8> {
let tag = any.tag().assert_eq(Tag::Integer)?;
match *any.as_bytes() {
[x] => Ok(x as i8),
_ => Err(ErrorKind::Length { tag }.into()),
}
}
}
impl Encodable for i8 {
fn encoded_len(&self) -> Result<Length> {
Header {
tag: Tag::Integer,
length: 1u8.into(),
}
.encoded_len()?
+ 1u8
}
fn encode(&self, encoder: &mut Encoder<'_>) -> Result<()> {
Header {
tag: Tag::Integer,
length: 1u8.into(),
}
.encode(encoder)?;
encoder.byte(*self as u8)
}
}
impl Tagged for i8 {
const TAG: Tag = Tag::Integer;
}
//
// i16
//
impl TryFrom<Any<'_>> for i16 {
type Error = Error;
fn try_from(any: Any<'_>) -> Result<i16> {
let tag = any.tag().assert_eq(Tag::Integer)?;
match *any.as_bytes() {
[_] => i8::try_from(any).map(|x| x as i16),
[0, lo] if lo < 0x80 => Err(ErrorKind::Noncanonical.into()),
[hi, lo] => Ok(i16::from_be_bytes([hi, lo])),
_ => Err(ErrorKind::Length { tag }.into()),
}
}
}
impl Encodable for i16 {
fn encoded_len(&self) -> Result<Length> {
if let Ok(x) = i8::try_from(*self) {
return x.encoded_len();
}
Header {
tag: Tag::Integer,
length: 2u8.into(),
}
.encoded_len()?
+ 2u8
}
fn encode(&self, encoder: &mut Encoder<'_>) -> Result<()> {
if let Ok(x) = i8::try_from(*self) {
return x.encode(encoder);
}
Header {
tag: Tag::Integer,
length: 2u8.into(),
}
.encode(encoder)?;
encoder.bytes(&self.to_be_bytes())
}
}
impl Tagged for i16 {
const TAG: Tag = Tag::Integer;
}
//
// u8
//
impl TryFrom<Any<'_>> for u8 {
type Error = Error;
fn try_from(any: Any<'_>) -> Result<u8> {
let tag = any.tag().assert_eq(Tag::Integer)?;
match *any.as_bytes() {
[x] if x < 0x80 => Ok(x),
[x] if x >= 0x80 => Err(ErrorKind::Noncanonical.into()),
[0, x] if x < 0x80 => Err(ErrorKind::Noncanonical.into()),
[0, x] if x >= 0x80 => Ok(x),
_ => Err(ErrorKind::Length { tag }.into()),
}
}
}
impl Encodable for u8 {
fn encoded_len(&self) -> Result<Length> {
let inner_len = if *self < 0x80 { 1u8 } else { 2u8 };
Header {
tag: Tag::Integer,
length: inner_len.into(),
}
.encoded_len()?
+ inner_len
}
fn encode(&self, encoder: &mut Encoder<'_>) -> Result<()> {
Header {
tag: Tag::Integer,
length: if *self < 0x80 { 1u8 } else { 2u8 }.into(),
}
.encode(encoder)?;
if *self >= 0x80 {
encoder.byte(0)?;
}
encoder.byte(*self as u8)
}
}
impl Tagged for u8 {
const TAG: Tag = Tag::Integer;
}
//
// u16
//
impl TryFrom<Any<'_>> for u16 {
type Error = Error;
fn try_from(any: Any<'_>) -> Result<u16> {
let tag = any.tag().assert_eq(Tag::Integer)?;
match *any.as_bytes() {
[x] if x < 0x80 => Ok(x as u16),
[x] if x >= 0x80 => Err(ErrorKind::Noncanonical.into()),
[0, x] if x < 0x80 => Err(ErrorKind::Noncanonical.into()),
[hi, lo] if hi < 0x80 => Ok(u16::from_be_bytes([hi, lo])),
[0, hi, lo] if hi >= 0x80 => Ok(u16::from_be_bytes([hi, lo])),
_ => Err(ErrorKind::Length { tag }.into()),
}
}
}
impl Encodable for u16 {
fn encoded_len(&self) -> Result<Length> {
if let Ok(x) = u8::try_from(*self) {
return x.encoded_len();
}
let inner_len = if *self < 0x8000 { 2u16 } else { 3u16 };
Header {
tag: Tag::Integer,
length: inner_len.into(),
}
.encoded_len()?
+ inner_len
}
fn encode(&self, encoder: &mut Encoder<'_>) -> Result<()> {
if let Ok(x) = u8::try_from(*self) {
return x.encode(encoder);
}
Header {
tag: Tag::Integer,
length: if *self < 0x8000 { 2u16 } else { 3u16 }.into(),
}
.encode(encoder)?;
if *self >= 0x8000 {
encoder.byte(0)?;
}
encoder.bytes(&self.to_be_bytes())
}
}
impl Tagged for u16 {
const TAG: Tag = Tag::Integer;
}
#[cfg(test)]
pub(crate) mod tests {
use crate::{Decodable, Encodable};
// Vectors from Section 5.7 of:
// https://luca.ntop.org/Teaching/Appunti/asn1.html
pub(crate) const I0_BYTES: &[u8] = &[0x02, 0x01, 0x00];
pub(crate) const I127_BYTES: &[u8] = &[0x02, 0x01, 0x7F];
pub(crate) const I128_BYTES: &[u8] = &[0x02, 0x02, 0x00, 0x80];
pub(crate) const I256_BYTES: &[u8] = &[0x02, 0x02, 0x01, 0x00];
pub(crate) const INEG128_BYTES: &[u8] = &[0x02, 0x01, 0x80];
pub(crate) const INEG129_BYTES: &[u8] = &[0x02, 0x02, 0xFF, 0x7F];
// Additional vectors
pub(crate) const I255_BYTES: &[u8] = &[0x02, 0x02, 0x00, 0xFF];
pub(crate) const I32767_BYTES: &[u8] = &[0x02, 0x02, 0x7F, 0xFF];
pub(crate) const I65535_BYTES: &[u8] = &[0x02, 0x03, 0x00, 0xFF, 0xFF];
pub(crate) const INEG32768_BYTES: &[u8] = &[0x02, 0x02, 0x80, 0x00];
#[test]
fn decode_i8() {
assert_eq!(0, i8::from_bytes(I0_BYTES).unwrap());
assert_eq!(127, i8::from_bytes(I127_BYTES).unwrap());
assert_eq!(-128, i8::from_bytes(INEG128_BYTES).unwrap());
}
#[test]
fn decode_i16() {
assert_eq!(0, i16::from_bytes(I0_BYTES).unwrap());
assert_eq!(127, i16::from_bytes(I127_BYTES).unwrap());
assert_eq!(128, i16::from_bytes(I128_BYTES).unwrap());
assert_eq!(255, i16::from_bytes(I255_BYTES).unwrap());
assert_eq!(256, i16::from_bytes(I256_BYTES).unwrap());
assert_eq!(32767, i16::from_bytes(I32767_BYTES).unwrap());
assert_eq!(-128, i16::from_bytes(INEG128_BYTES).unwrap());
assert_eq!(-129, i16::from_bytes(INEG129_BYTES).unwrap());
assert_eq!(-32768, i16::from_bytes(INEG32768_BYTES).unwrap());
}
#[test]
fn decode_u8() {
assert_eq!(0, u8::from_bytes(I0_BYTES).unwrap());
assert_eq!(127, u8::from_bytes(I127_BYTES).unwrap());
assert_eq!(255, u8::from_bytes(I255_BYTES).unwrap());
}
#[test]
fn decode_u16() {
assert_eq!(0, u16::from_bytes(I0_BYTES).unwrap());
assert_eq!(127, u16::from_bytes(I127_BYTES).unwrap());
assert_eq!(255, u16::from_bytes(I255_BYTES).unwrap());
assert_eq!(256, u16::from_bytes(I256_BYTES).unwrap());
assert_eq!(32767, u16::from_bytes(I32767_BYTES).unwrap());
assert_eq!(65535, u16::from_bytes(I65535_BYTES).unwrap());
}
#[test]
fn encode_i8() {
let mut buffer = [0u8; 3];
assert_eq!(I0_BYTES, 0i8.encode_to_slice(&mut buffer).unwrap());
assert_eq!(I127_BYTES, 127i8.encode_to_slice(&mut buffer).unwrap());
assert_eq!(
INEG128_BYTES,
(-128i8).encode_to_slice(&mut buffer).unwrap()
);
}
#[test]
fn encode_i16() {
let mut buffer = [0u8; 4];
assert_eq!(I0_BYTES, 0i16.encode_to_slice(&mut buffer).unwrap());
assert_eq!(I127_BYTES, 127i16.encode_to_slice(&mut buffer).unwrap());
assert_eq!(I128_BYTES, 128i16.encode_to_slice(&mut buffer).unwrap());
assert_eq!(I255_BYTES, 255i16.encode_to_slice(&mut buffer).unwrap());
assert_eq!(I256_BYTES, 256i16.encode_to_slice(&mut buffer).unwrap());
assert_eq!(I32767_BYTES, 32767i16.encode_to_slice(&mut buffer).unwrap());
assert_eq!(
INEG128_BYTES,
(-128i16).encode_to_slice(&mut buffer).unwrap()
);
assert_eq!(
INEG129_BYTES,
(-129i16).encode_to_slice(&mut buffer).unwrap()
);
assert_eq!(
INEG32768_BYTES,
(-32768i16).encode_to_slice(&mut buffer).unwrap()
);
}
#[test]
fn encode_u8() {
let mut buffer = [0u8; 4];
assert_eq!(I0_BYTES, 0u8.encode_to_slice(&mut buffer).unwrap());
assert_eq!(I127_BYTES, 127u8.encode_to_slice(&mut buffer).unwrap());
assert_eq!(I255_BYTES, 255u8.encode_to_slice(&mut buffer).unwrap());
}
#[test]
fn encode_u16() {
let mut buffer = [0u8; 5];
assert_eq!(I0_BYTES, 0u16.encode_to_slice(&mut buffer).unwrap());
assert_eq!(I127_BYTES, 127u16.encode_to_slice(&mut buffer).unwrap());
assert_eq!(I128_BYTES, 128u16.encode_to_slice(&mut buffer).unwrap());
assert_eq!(I255_BYTES, 255u16.encode_to_slice(&mut buffer).unwrap());
assert_eq!(I256_BYTES, 256u16.encode_to_slice(&mut buffer).unwrap());
assert_eq!(I32767_BYTES, 32767u16.encode_to_slice(&mut buffer).unwrap());
assert_eq!(I65535_BYTES, 65535u16.encode_to_slice(&mut buffer).unwrap());
}
/// Integers must be encoded with a minimum number of octets
#[test]
fn reject_non_canonical() {
assert!(i8::from_bytes(&[0x02, 0x02, 0x00, 0x00]).is_err());
assert!(i16::from_bytes(&[0x02, 0x02, 0x00, 0x00]).is_err());
assert!(u8::from_bytes(&[0x02, 0x02, 0x00, 0x00]).is_err());
assert!(u16::from_bytes(&[0x02, 0x02, 0x00, 0x00]).is_err());
}
}
|
using System.Diagnostics;
using System.Net.Mqtt.Sdk.Packets;
using System.Net.Mqtt.Sdk.Storage;
using System.Threading.Tasks;
namespace System.Net.Mqtt.Sdk.Flows
{
internal class ServerConnectFlow : IProtocolFlow
{
static readonly ITracer tracer = Tracer.Get<ServerConnectFlow> ();
readonly IMqttAuthenticationProvider authenticationProvider;
readonly IRepository<ClientSession> sessionRepository;
readonly IRepository<ConnectionWill> willRepository;
readonly IPublishSenderFlow senderFlow;
public ServerConnectFlow (IMqttAuthenticationProvider authenticationProvider,
IRepository<ClientSession> sessionRepository,
IRepository<ConnectionWill> willRepository,
IPublishSenderFlow senderFlow)
{
this.authenticationProvider = authenticationProvider;
this.sessionRepository = sessionRepository;
this.willRepository = willRepository;
this.senderFlow = senderFlow;
}
public async Task ExecuteAsync (string clientId, IPacket input, IMqttChannel<IPacket> channel)
{
if (input.Type != MqttPacketType.Connect)
return;
var connect = input as Connect;
if (!authenticationProvider.Authenticate (clientId, connect.UserName, connect.Password)) {
throw new MqttConnectionException (MqttConnectionStatus.BadUserNameOrPassword);
}
var session = sessionRepository.Read (clientId);
var sessionPresent = connect.CleanSession ? false : session != null;
if (connect.CleanSession && session != null) {
sessionRepository.Delete (session.Id);
session = null;
tracer.Info (Server.Properties.Resources.Server_CleanedOldSession, clientId);
}
var sendPendingMessages = false;
if (session == null) {
session = new ClientSession (clientId, connect.CleanSession);
sessionRepository.Create (session);
tracer.Info (Server.Properties.Resources.Server_CreatedSession, clientId);
} else {
sendPendingMessages = true;
}
if (connect.Will != null) {
var connectionWill = new ConnectionWill (clientId, connect.Will);
willRepository.Create (connectionWill);
}
await channel.SendAsync (new ConnectAck (MqttConnectionStatus.Accepted, sessionPresent))
.ConfigureAwait (continueOnCapturedContext: false);
if (sendPendingMessages) {
await SendPendingMessagesAsync (session, channel)
.ConfigureAwait (continueOnCapturedContext: false);
await SendPendingAcknowledgementsAsync (session, channel)
.ConfigureAwait (continueOnCapturedContext: false);
}
}
async Task SendPendingMessagesAsync (ClientSession session, IMqttChannel<IPacket> channel)
{
foreach (var pendingMessage in session.GetPendingMessages ()) {
var publish = new Publish (pendingMessage.Topic, pendingMessage.QualityOfService,
pendingMessage.Retain, pendingMessage.Duplicated, pendingMessage.PacketId)
{
Payload = pendingMessage.Payload
};
if (pendingMessage.Status == PendingMessageStatus.PendingToSend) {
session.RemovePendingMessage (pendingMessage);
sessionRepository.Update (session);
await senderFlow.SendPublishAsync (session.Id, publish, channel)
.ConfigureAwait (continueOnCapturedContext: false);
} else {
await senderFlow.SendPublishAsync (session.Id, publish, channel, PendingMessageStatus.PendingToAcknowledge)
.ConfigureAwait (continueOnCapturedContext: false);
}
}
}
async Task SendPendingAcknowledgementsAsync (ClientSession session, IMqttChannel<IPacket> channel)
{
foreach (var pendingAcknowledgement in session.GetPendingAcknowledgements ()) {
var ack = default(IFlowPacket);
if (pendingAcknowledgement.Type == MqttPacketType.PublishReceived)
ack = new PublishReceived (pendingAcknowledgement.PacketId);
else if (pendingAcknowledgement.Type == MqttPacketType.PublishRelease)
ack = new PublishRelease (pendingAcknowledgement.PacketId);
await senderFlow.SendAckAsync (session.Id, ack, channel, PendingMessageStatus.PendingToAcknowledge)
.ConfigureAwait (continueOnCapturedContext: false);
}
}
}
}
|
const baseUrl = window.baseUrl;
export default {
/**
* 后台接口
*/
"adminLogin" : baseUrl + "Admin/login", // 登录
"adminLogout" : baseUrl + "Admin/logout", // 登出
"adminCheckLogin" : baseUrl + "Admin/check_login", // 验证是否登录
// 菜单处理
'adminMenus' : baseUrl + 'Admin/menus', // 后台菜单
/**
* 模块化接口
*/
"goodcatchModule": baseUrl + "Admin/goodcatch/m/base/modules", // 模块化
};
|
# mongodb-express-example
Sample project using MongoDB and Express (Node.js), to be used as a template.
## Main Libraries
- **express** (Web Framework)
- **mongoose** (MongoDB object modeling)
- **mocha** (Testing)
## Install & Run
Install all dependencies:
```sh
yarn install
```
Run test suites (requires **nodemon**)
```sh
yarn tests
```
## MongoDB setup
MongoDB should be running at `localhost:27017`
### Easy MongoDB setup using Docker (Mac OS or Linux)
Pull image:
```sh
docker pull mongo
```
Run container:
```sh
docker run --name mongodb -p 27017:27017 -v $PWD/data/db:/data/db -d mongo
```
## Node.js setup
Project creacted using **Node.js 10.15**. Older versions should be avoieded.
**Yarn** package manager was used instead of **NPM**. Both should work well, though.
|
#!/usr/bin/env ruby
# code by fre3vi
# method one
numbers_one = [1,2,3,4,5]
p numbers_one
# method two
numbers_two = Array(1..10)
p numbers_two
# method three
numbers_three = (1..10).to_a
p numbers_three
# method four
numbers_four = (1..10).step(2).to_a
p numbers_four
# method five
numbers_five = 2.step(10, 3).to_a
p numbers_five
# method six
numbers_six = (1..10).map { |number| number * number }
p numbers_six
# method seven
numbers_seven = (1..10).lazy
p numbers_seven
p numbers_seven.first(5)
p numbers_seven.first(10)
|
# Style
Rails.application.config.assets.precompile += %w( lato_media/application.css )
# Javascript
Rails.application.config.assets.precompile += %w( lato_media/application.js )
|
class TransactionsController < ApplicationController
before_action :logged_in_user
def index
@transaction = current_user.transactions.includes(:groups).desc
@transaction = @transaction.filter { |trans| !trans.groups.empty? }
@total = 0
@transaction.each { |trans| @total += trans.amount }
end
def new
@transaction = current_user.transactions.build
end
def create
@transaction = current_user.transactions.build(transaction_params.except(:group_ids))
@transaction.author_id = current_user.id
if @transaction.save
if Group.none? || params[:transaction][:group_ids].nil?
GroupTransaction.create(transaction_id: @transaction.id)
redirect_to external_path
else
params[:transaction][:group_ids].reject { |n| n.to_i.zero? }.each do |id|
GroupTransaction.create(transaction_id: @transaction.id, group_id: id.to_i)
redirect_to transactions_path and break
end
end
else
render :new
end
end
def external_transactions
all_transactions = current_user.transactions.pluck(:id)
grouped = GroupTransaction.where(transaction_id: all_transactions).pluck(:transaction_id)
ungrouped = all_transactions - grouped
@external = current_user.transactions.where(id: ungrouped).desc
@total = 0
@external.each { |trans| @total += trans.amount }
end
def show
@transaction = current_user.transactions.find(params[:id])
end
private
def transaction_params
params.require(:transaction).permit(:name, :amount)
end
end
|
use crate::core::{Workspace, WorkspaceData};
use crate::{rh_homepage, rh_name, rh_version};
use reqwest::header::{HeaderMap, HeaderValue};
use super::header;
pub fn upgrade(args: &Workspace, headers: &mut HeaderMap) {
if args.is_json() {
if !headers.contains_key(header::CONTENT_TYPE) {
headers.append(header::CONTENT_TYPE, HeaderValue::from_str("application/json").unwrap());
}
if !headers.contains_key(header::ACCEPT) {
headers.append(header::ACCEPT, HeaderValue::from_str("application/json").unwrap());
}
}
if args.is_form() && !headers.contains_key(header::CONTENT_TYPE) {
headers.append(header::CONTENT_TYPE, HeaderValue::from_str("application/x-www-form-urlencoded").unwrap());
}
if !headers.contains_key(header::USER_AGENT) {
headers.append(
header::USER_AGENT,
HeaderValue::from_str(&format!("{}/{} {}", rh_name!(), rh_version!(), rh_homepage!(),)).unwrap(),
);
}
}
|
package au.id.tmm.intime.cats.instances
import java.time.Month
import cats.{Hash, Order, Show}
trait MonthInstances {
implicit val intimeOrderForMonth: Order[Month] with Hash[Month] = new MonthOrder
implicit val intimeShowForMonthInstances: Show[MonthInstances] = Show.fromToString
}
class MonthOrder extends Order[Month] with Hash[Month] {
override def compare(x: Month, y: Month): Int = x compareTo y
override def hash(x: Month): Int = x.hashCode()
}
|
import ApplicationAdapter from './application';
import { pluralize } from 'ember-inflector';
export default ApplicationAdapter.extend({
namespace: 'v1',
createOrUpdate(store, type, snapshot, requestType) {
const serializer = store.serializerFor(type.modelName);
const data = serializer.serialize(snapshot, requestType);
const { id } = snapshot;
let url = this.urlForSecret(snapshot.record.get('backend'), id);
if (requestType === 'update') {
url = url + '/config';
}
return this.ajax(url, 'POST', { data });
},
createRecord() {
return this.createOrUpdate(...arguments);
},
updateRecord() {
return this.createOrUpdate(...arguments, 'update');
},
deleteRecord(store, type, snapshot) {
const { id } = snapshot;
return this.ajax(this.urlForSecret(snapshot.record.get('backend'), id), 'DELETE');
},
pathForType(type) {
let path;
switch (type) {
case 'cluster':
path = 'clusters';
break;
case 'secret-engine':
path = 'secrets';
break;
default:
path = pluralize(type);
break;
}
return path;
},
urlForSecret(backend, id) {
let url = `${this.buildURL()}/${backend}/keys/`;
if (id) {
url += id;
}
return url;
},
urlForAction(action, backend, id, param) {
let urlBase = `${this.buildURL()}/${backend}/${action}`;
// these aren't key-specific
if (action === 'hash' || action === 'random') {
return urlBase;
}
if (action === 'datakey' && param) {
// datakey action has `wrapped` or `plaintext` as part of the url
return `${urlBase}/${param}/${id}`;
}
if (action === 'export' && param) {
let [type, version] = param;
const exportBase = `${urlBase}/${type}-key/${id}`;
return version ? `${exportBase}/${version}` : exportBase;
}
return `${urlBase}/${id}`;
},
optionsForQuery(id) {
let data = {};
if (!id) {
data['list'] = true;
}
return { data };
},
fetchByQuery(query) {
const { id, backend } = query;
return this.ajax(this.urlForSecret(backend, id), 'GET', this.optionsForQuery(id)).then(resp => {
resp.id = id;
return resp;
});
},
query(store, type, query) {
return this.fetchByQuery(query);
},
queryRecord(store, type, query) {
return this.fetchByQuery(query);
},
// rotate, encrypt, decrypt, sign, verify, hmac, rewrap, datakey
keyAction(action, { backend, id, payload }, options = {}) {
const verb = action === 'export' ? 'GET' : 'POST';
const { wrapTTL } = options;
if (action === 'rotate') {
return this.ajax(this.urlForSecret(backend, id) + '/rotate', verb);
}
const { param } = payload;
delete payload.param;
return this.ajax(this.urlForAction(action, backend, id, param), verb, {
data: payload,
wrapTTL,
});
},
});
|
#!/bin/sh
OUTPUT=$(sensors -A k10temp-pci-00c3 amdgpu-pci-0a00 | rofi -dmenu -p "Temperature")
printf "$(printf "$OUTPUT" | cut -d ':' -f2 | xargs)" | xsel -i -b
|
# ThreeJS cube demo
A simple spinning cube with a wireframe. Click or press Spacebar to stop the animation.
https://user-images.githubusercontent.com/28185591/160942037-accb652d-8f87-4c55-a763-a58a7b77d2ec.mp4
### Local dev using pnpm
```bash
pnpm i
pnpm dev
```
|
using System;
using System.Collections.Generic;
using System.Text;
// ReSharper disable InconsistentNaming
namespace RiotGamesApi.Libraries.Lol.v3.StaticEndPoints.SummonerSpell
{
public enum SummonerSpellTag
{
all,
cooldown,
cooldownBurn,
cost,
costBurn,
costType,
effect,
effectBurn,
image,
key,
leveltip,
maxrank,
modes,
range,
rangeBurn,
resource,
sanitizedDescription,
sanitizedTooltip,
tooltip,
vars,
}
}
|
# example
Example is a storybook, you can run it by `flutter run`
|
import "mocha";
import * as expect from "expect";
import { WatSharpCompiler } from "../../src/compiler/WatSharpCompiler";
describe("WatSharpCompiler - emit structure copy", () => {
it("copy assignment #1", () => {
// --- Arrange
const wComp = new WatSharpCompiler(`
type regs = struct {
u8 l,
u8 h,
u8 q,
u64 m,
*u16[5] ptr
};
regs a;
regs b;
void test() {
b := &a;
}
`);
// --- Act
wComp.trace();
wComp.compile();
// --- Assert
expect(wComp.hasErrors).toBe(false);
const instrs = wComp.traceMessages.filter((t) => t.source === "inject");
expect(instrs[0].message).toBe("i32.const 31");
expect(instrs[1].message).toBe("i32.const 0");
expect(instrs[2].message).toBe("i64.load");
expect(instrs[3].message).toBe("i64.store");
expect(instrs[4].message).toBe("i32.const 31");
expect(instrs[5].message).toBe("i32.const 0");
expect(instrs[6].message).toBe("i64.load offset=8");
expect(instrs[7].message).toBe("i64.store offset=8");
expect(instrs[8].message).toBe("i32.const 31");
expect(instrs[9].message).toBe("i32.const 0");
expect(instrs[10].message).toBe("i64.load offset=16");
expect(instrs[11].message).toBe("i64.store offset=16");
expect(instrs[12].message).toBe("i32.const 31");
expect(instrs[13].message).toBe("i32.const 0");
expect(instrs[14].message).toBe("i32.load offset=24");
expect(instrs[15].message).toBe("i32.store offset=24");
expect(instrs[16].message).toBe("i32.const 31");
expect(instrs[17].message).toBe("i32.const 0");
expect(instrs[18].message).toBe("i32.load16_u offset=28");
expect(instrs[19].message).toBe("i32.store16 offset=28");
expect(instrs[20].message).toBe("i32.const 31");
expect(instrs[21].message).toBe("i32.const 0");
expect(instrs[22].message).toBe("i32.load8_u offset=30");
expect(instrs[23].message).toBe("i32.store8 offset=30");
});
it("copy assignment #2", () => {
// --- Arrange
const wComp = new WatSharpCompiler(`
type regs = struct {
u8 l,
u8 h,
u8 q,
u64 m,
*u16[5] ptr
};
regs a;
regs b;
void test() {
b := &a + 10;
}
`);
// --- Act
wComp.trace();
wComp.compile();
// --- Assert
expect(wComp.hasErrors).toBe(false);
const instrs = wComp.traceMessages.filter((t) => t.source === "inject");
expect(instrs[0].message).toBe("i32.const 10");
expect(instrs[1].message).toBe("set_local $tloc$rcpyi32");
expect(instrs[2].message).toBe("i32.const 31");
expect(instrs[3].message).toBe("get_local $tloc$rcpyi32");
expect(instrs[4].message).toBe("i64.load");
expect(instrs[5].message).toBe("i64.store");
expect(instrs[6].message).toBe("i32.const 31");
expect(instrs[7].message).toBe("get_local $tloc$rcpyi32");
expect(instrs[8].message).toBe("i64.load offset=8");
expect(instrs[9].message).toBe("i64.store offset=8");
expect(instrs[10].message).toBe("i32.const 31");
expect(instrs[11].message).toBe("get_local $tloc$rcpyi32");
expect(instrs[12].message).toBe("i64.load offset=16");
expect(instrs[13].message).toBe("i64.store offset=16");
expect(instrs[14].message).toBe("i32.const 31");
expect(instrs[15].message).toBe("get_local $tloc$rcpyi32");
expect(instrs[16].message).toBe("i32.load offset=24");
expect(instrs[17].message).toBe("i32.store offset=24");
expect(instrs[18].message).toBe("i32.const 31");
expect(instrs[19].message).toBe("get_local $tloc$rcpyi32");
expect(instrs[20].message).toBe("i32.load16_u offset=28");
expect(instrs[21].message).toBe("i32.store16 offset=28");
expect(instrs[22].message).toBe("i32.const 31");
expect(instrs[23].message).toBe("get_local $tloc$rcpyi32");
expect(instrs[24].message).toBe("i32.load8_u offset=30");
expect(instrs[25].message).toBe("i32.store8 offset=30");
});
it("copy assignment #3", () => {
// --- Arrange
const wComp = new WatSharpCompiler(`
type regs = struct {
u8 l,
u8 h,
u8 q,
u64 m,
*u16[5] ptr
};
regs a;
regs b;
void test() {
b := a;
}
`);
// --- Act
wComp.trace();
wComp.compile();
// --- Assert
expect(wComp.hasErrors).toBe(true);
expect(wComp.errors.length).toBe(1);
expect(wComp.errors[0].code).toBe("W143");
});
it("copy assignment #4", () => {
// --- Arrange
const wComp = new WatSharpCompiler(`
type regs = struct {
u8 l,
u8 h,
u8 q,
u64 m,
*u16[5] ptr
};
regs a;
regs b;
void test() {
b := 12.34;
}
`);
// --- Act
wComp.trace();
wComp.compile();
// --- Assert
expect(wComp.hasErrors).toBe(true);
expect(wComp.errors.length).toBe(1);
expect(wComp.errors[0].code).toBe("W167");
});
it("copy assignment #5", () => {
// --- Arrange
const wComp = new WatSharpCompiler(`
type regs = u8[31];
regs a;
regs b;
void test() {
b := &a;
}
`);
// --- Act
wComp.trace();
wComp.compile();
// --- Assert
expect(wComp.hasErrors).toBe(false);
const instrs = wComp.traceMessages.filter((t) => t.source === "inject");
expect(instrs[0].message).toBe("i32.const 31");
expect(instrs[1].message).toBe("i32.const 0");
expect(instrs[2].message).toBe("i64.load");
expect(instrs[3].message).toBe("i64.store");
expect(instrs[4].message).toBe("i32.const 31");
expect(instrs[5].message).toBe("i32.const 0");
expect(instrs[6].message).toBe("i64.load offset=8");
expect(instrs[7].message).toBe("i64.store offset=8");
expect(instrs[8].message).toBe("i32.const 31");
expect(instrs[9].message).toBe("i32.const 0");
expect(instrs[10].message).toBe("i64.load offset=16");
expect(instrs[11].message).toBe("i64.store offset=16");
expect(instrs[12].message).toBe("i32.const 31");
expect(instrs[13].message).toBe("i32.const 0");
expect(instrs[14].message).toBe("i32.load offset=24");
expect(instrs[15].message).toBe("i32.store offset=24");
expect(instrs[16].message).toBe("i32.const 31");
expect(instrs[17].message).toBe("i32.const 0");
expect(instrs[18].message).toBe("i32.load16_u offset=28");
expect(instrs[19].message).toBe("i32.store16 offset=28");
expect(instrs[20].message).toBe("i32.const 31");
expect(instrs[21].message).toBe("i32.const 0");
expect(instrs[22].message).toBe("i32.load8_u offset=30");
expect(instrs[23].message).toBe("i32.store8 offset=30");
});
it("copy assignment #6", () => {
// --- Arrange
const wComp = new WatSharpCompiler(`
type regs = u8[31];
regs a;
regs b;
void test() {
b := &a + 10;
}
`);
// --- Act
wComp.trace();
wComp.compile();
// --- Assert
expect(wComp.hasErrors).toBe(false);
const instrs = wComp.traceMessages.filter((t) => t.source === "inject");
expect(instrs[0].message).toBe("i32.const 10");
expect(instrs[1].message).toBe("set_local $tloc$rcpyi32");
expect(instrs[2].message).toBe("i32.const 31");
expect(instrs[3].message).toBe("get_local $tloc$rcpyi32");
expect(instrs[4].message).toBe("i64.load");
expect(instrs[5].message).toBe("i64.store");
expect(instrs[6].message).toBe("i32.const 31");
expect(instrs[7].message).toBe("get_local $tloc$rcpyi32");
expect(instrs[8].message).toBe("i64.load offset=8");
expect(instrs[9].message).toBe("i64.store offset=8");
expect(instrs[10].message).toBe("i32.const 31");
expect(instrs[11].message).toBe("get_local $tloc$rcpyi32");
expect(instrs[12].message).toBe("i64.load offset=16");
expect(instrs[13].message).toBe("i64.store offset=16");
expect(instrs[14].message).toBe("i32.const 31");
expect(instrs[15].message).toBe("get_local $tloc$rcpyi32");
expect(instrs[16].message).toBe("i32.load offset=24");
expect(instrs[17].message).toBe("i32.store offset=24");
expect(instrs[18].message).toBe("i32.const 31");
expect(instrs[19].message).toBe("get_local $tloc$rcpyi32");
expect(instrs[20].message).toBe("i32.load16_u offset=28");
expect(instrs[21].message).toBe("i32.store16 offset=28");
expect(instrs[22].message).toBe("i32.const 31");
expect(instrs[23].message).toBe("get_local $tloc$rcpyi32");
expect(instrs[24].message).toBe("i32.load8_u offset=30");
expect(instrs[25].message).toBe("i32.store8 offset=30");
});
it("copy assignment #7", () => {
// --- Arrange
const wComp = new WatSharpCompiler(`
type regs = struct {
u8 l,
u8 h,
u8 q,
u64 m,
*u16[5] ptr
};
regs a;
regs b;
u16 c;
void test() {
c := &a;
}
`);
// --- Act
wComp.trace();
wComp.compile();
// --- Assert
expect(wComp.hasErrors).toBe(true);
expect(wComp.errors.length).toBe(1);
expect(wComp.errors[0].code).toBe("W166");
});
it("copy assignment #8", () => {
// --- Arrange
const wComp = new WatSharpCompiler(`
type regs = struct {
u8 l,
u8 h,
u8 q,
u64 m,
*u16[500] ptr
};
regs a;
regs b;
u16 c;
void test() {
b := &a;
}
`);
// --- Act
wComp.trace();
wComp.compile();
// --- Assert
expect(wComp.hasErrors).toBe(true);
expect(wComp.errors.length).toBe(1);
expect(wComp.errors[0].code).toBe("W168");
});
});
|
class BatchHopperLot < ActiveRecord::Base
belongs_to :hopper_lot
belongs_to :batch
validates_uniqueness_of :batch_id, :scope => [:hopper_lot_id]
validates_associated :batch, :hopper_lot
validates_numericality_of :amount, :greater_than_or_equal_to => 0
after_save :calculate_incr
after_destroy :calculate_decr
def calculate_incr
puts "amount: #{self.amount}"
b = Batch.find self.batch_id
#b.total += self.amount
b.save
end
def calculate_decr
b = Batch.find self.batch_id
#b.total -= self.amount
b.save
end
end
|
#!/usr/bin/env bash
# Generate Apache VirtualHost Configuration
vhost="<VirtualHost *:80>
ServerName $1
Alias /fcgi-bin /usr/sbin/php5-fpm
<FilesMatch \"\.ph(p3?|tml)$\">
SetHandler php5-fcgi
Action php5-fcgi /fcgi-bin virtual
</FilesMatch>
<Directory /fcgi-bin>
Options -Indexes +FollowSymLinks +ExecCGI +Includes
Order deny,allow
Deny from all
Require env REDIRECT_STATUS
</Directory>
DocumentRoot $2
<Directory $2>
Options -Indexes +FollowSymLinks
AllowOverride All
</Directory>
</VirtualHost>"
echo "$vhost" > "/etc/apache2/sites-available/$1.conf"
a2ensite $1
# Restart Services
service apache2 restart
service php5-fpm restart
|
//===== Copyright (c) Valve Corporation, All rights reserved. ======//
//
// Purpose:
//
// $NoKeywords: $
// Utility class for building command buffers into memory
//==================================================================//
#ifndef COMMANDBUILDER_H
#define COMMANDBUILDER_H
#ifdef _WIN32
#pragma once
#endif
#include "shaderapi/commandbuffer.h"
#include "shaderapi/ishaderapi.h"
#include "shaderlib/BaseShader.h"
#include "tier1/convar.h"
#ifdef _PS3
#include "ps3gcm\gcmdrawstate.h"
#include "ps3gcm\gcmtexture.h"
#endif
#ifdef DBGFLAG_ASSERT
#define TRACK_STORAGE 1
#else
#define TRACK_STORAGE 0
#endif
//-----------------------------------------------------------------------------
// Buffer for storing commands into
//-----------------------------------------------------------------------------
template<int N> class CFixedCommandStorageBuffer
{
public:
uint8 m_Data[N];
uint8 *m_pDataOut;
#if TRACK_STORAGE
size_t m_nNumBytesRemaining;
#endif
FORCEINLINE CFixedCommandStorageBuffer( void )
{
m_pDataOut = m_Data;
#if TRACK_STORAGE
m_nNumBytesRemaining = N;
#endif
}
FORCEINLINE void EnsureCapacity( size_t sz )
{
#if TRACK_STORAGE
if ( m_nNumBytesRemaining < sz + 32 )
Error( "getting scary\n" );
#endif
Assert( m_nNumBytesRemaining >= sz );
}
template<class T> FORCEINLINE void Put( T const &nValue )
{
EnsureCapacity( sizeof( T ) );
*( reinterpret_cast<T *>( m_pDataOut ) ) = nValue;
m_pDataOut += sizeof( nValue );
#if TRACK_STORAGE
m_nNumBytesRemaining -= sizeof( nValue );
#endif
}
FORCEINLINE void PutInt( int nValue )
{
Put( nValue );
}
FORCEINLINE void PutFloat( float nValue )
{
Put( nValue );
}
FORCEINLINE void PutPtr( void * pPtr )
{
Put( pPtr );
}
FORCEINLINE void PutMemory( const void *pMemory, size_t nBytes )
{
EnsureCapacity( nBytes );
memcpy( m_pDataOut, pMemory, nBytes );
m_pDataOut += nBytes;
#if TRACK_STORAGE
m_nNumBytesRemaining -= nBytes;
#endif
}
FORCEINLINE uint8 *Base( void )
{
return m_Data;
}
FORCEINLINE void Reset( void )
{
m_pDataOut = m_Data;
#if TRACK_STORAGE
m_nNumBytesRemaining = N;
#endif
}
FORCEINLINE size_t Size( void ) const
{
return m_pDataOut - m_Data;
}
};
#ifdef _PS3
class CDynamicCommandStorageBuffer
{
public:
uint8 *m_Data;
uint8 *m_pDataOut;
#if TRACK_STORAGE_PS3
size_t m_nNumBytesRemaining;
#endif
FORCEINLINE CDynamicCommandStorageBuffer()
{
m_Data = gpGcmDrawState->OpenDynECB();
m_pDataOut = m_Data;
#if TRACK_STORAGE_PS3
m_nNumBytesRemaining = 0x1000;
#endif
}
FORCEINLINE void EnsureCapacity( size_t sz )
{
#if TRACK_STORAGE_PS3
if ( m_nNumBytesRemaining < sz + 32 )
Error( "getting scary\n" );
Assert( m_nNumBytesRemaining >= sz );
#endif
}
template<class T> FORCEINLINE void Put( T const &nValue )
{
EnsureCapacity( sizeof( T ) );
*( reinterpret_cast<T *>( m_pDataOut ) ) = nValue;
m_pDataOut += sizeof( nValue );
#if TRACK_STORAGE_PS3
m_nNumBytesRemaining -= sizeof( nValue );
#endif
}
FORCEINLINE void PutInt( int nValue )
{
Put( nValue );
}
FORCEINLINE void PutFloat( float nValue )
{
Put( nValue );
}
FORCEINLINE void PutPtr( void * pPtr )
{
Put( pPtr );
}
FORCEINLINE void PutMemory( const void *pMemory, size_t nBytes )
{
EnsureCapacity( nBytes );
memcpy( m_pDataOut, pMemory, nBytes );
m_pDataOut += nBytes;
#if TRACK_STORAGE_PS3
m_nNumBytesRemaining -= nBytes;
#endif
}
FORCEINLINE uint8 *Base( void )
{
return m_Data;
}
FORCEINLINE void Reset( void )
{
m_pDataOut = m_Data;
#if TRACK_STORAGE_PS3
m_nNumBytesRemaining = N;
#endif
}
FORCEINLINE size_t Size( void ) const
{
return m_pDataOut - m_Data;
}
};
#endif
//-----------------------------------------------------------------------------
// Base class used to build up command buffers
//-----------------------------------------------------------------------------
template<class S> class CBaseCommandBufferBuilder
{
public:
#ifdef _PS3
ALIGN16 S m_Storage ALIGN16_POST;
#else
S m_Storage;
#endif
FORCEINLINE void End( void )
{
m_Storage.PutInt( CBCMD_END );
}
FORCEINLINE IMaterialVar *Param( int nVar ) const
{
return CBaseShader::s_ppParams[nVar];
}
FORCEINLINE void Reset( void )
{
m_Storage.Reset();
}
FORCEINLINE size_t Size( void ) const
{
return m_Storage.Size();
}
FORCEINLINE uint8 *Base( void )
{
return m_Storage.Base();
}
FORCEINLINE void OutputConstantData( float const *pSrcData )
{
m_Storage.PutFloat( pSrcData[0] );
m_Storage.PutFloat( pSrcData[1] );
m_Storage.PutFloat( pSrcData[2] );
m_Storage.PutFloat( pSrcData[3] );
}
FORCEINLINE void OutputConstantData4( float flVal0, float flVal1, float flVal2, float flVal3 )
{
m_Storage.PutFloat( flVal0 );
m_Storage.PutFloat( flVal1 );
m_Storage.PutFloat( flVal2 );
m_Storage.PutFloat( flVal3 );
}
};
//-----------------------------------------------------------------------------
// Used by SetPixelShaderFlashlightState
//-----------------------------------------------------------------------------
struct CBCmdSetPixelShaderFlashlightState_t
{
Sampler_t m_LightSampler;
Sampler_t m_DepthSampler;
Sampler_t m_ShadowNoiseSampler;
int m_nColorConstant;
int m_nAttenConstant;
int m_nOriginConstant;
int m_nDepthTweakConstant;
int m_nScreenScaleConstant;
int m_nWorldToTextureConstant;
bool m_bFlashlightNoLambert;
bool m_bSinglePassFlashlight;
};
//-----------------------------------------------------------------------------
// Used to build a per-pass command buffer
//-----------------------------------------------------------------------------
template<class S> class CCommandBufferBuilder : public CBaseCommandBufferBuilder<S>
{
typedef CBaseCommandBufferBuilder<S> PARENT;
#ifdef _PS3
uint32 m_numPs3Tex;
#endif
public:
#ifdef _PS3
FORCEINLINE CCommandBufferBuilder()
{
// For PS3, command buffers begin with up to four Std textures
m_numPs3Tex = 0;
this->m_Storage.PutInt(CBCMD_LENGTH);
this->m_Storage.PutInt(0);
this->m_Storage.PutInt(CBCMD_PS3TEX);
for(int i = 0; i < CBCMD_MAX_PS3TEX; i++) this->m_Storage.PutInt(0);
}
FORCEINLINE void Reset()
{
this->m_Storage.Reset();
m_numPs3Tex = 0;
this->m_Storage.PutInt(CBCMD_LENGTH);
this->m_Storage.PutInt(0);
this->m_Storage.PutInt(CBCMD_PS3TEX);
for(int i = 0; i < CBCMD_MAX_PS3TEX; i++) this->m_Storage.PutInt(0);
}
FORCEINLINE int* GetPs3Textures()
{
return (int*) (this->m_Storage.Base() + sizeof(int) + 2*sizeof(int));
}
#endif
FORCEINLINE void End( void )
{
this->m_Storage.PutInt( CBCMD_END );
#ifdef _PS3
uint32 len = this->m_Storage.Size();
if ( (this->m_Storage.m_Data >= g_aDynECB) && (this->m_Storage.m_Data < &g_aDynECB[sizeof(g_aDynECB)]) )
{
gpGcmDrawState->CloseDynECB(len);
}
uint32* pLength = (uint32*)(this->m_Storage.m_Data + 4);
if (pLength[-1] != CBCMD_LENGTH) Error("Length missing\n");
*pLength = len;
#endif
}
FORCEINLINE void SetPixelShaderConstants( int nFirstConstant, int nConstants )
{
this->m_Storage.PutInt( CBCMD_SET_PIXEL_SHADER_FLOAT_CONST );
this->m_Storage.PutInt( nFirstConstant );
this->m_Storage.PutInt( nConstants );
}
FORCEINLINE void OutputConstantData( float const *pSrcData )
{
this->m_Storage.PutFloat( pSrcData[0] );
this->m_Storage.PutFloat( pSrcData[1] );
this->m_Storage.PutFloat( pSrcData[2] );
this->m_Storage.PutFloat( pSrcData[3] );
}
FORCEINLINE void OutputConstantData4( float flVal0, float flVal1, float flVal2, float flVal3 )
{
this->m_Storage.PutFloat( flVal0 );
this->m_Storage.PutFloat( flVal1 );
this->m_Storage.PutFloat( flVal2 );
this->m_Storage.PutFloat( flVal3 );
}
FORCEINLINE void SetPixelShaderConstant( int nFirstConstant, float const *pSrcData, int nNumConstantsToSet )
{
SetPixelShaderConstants( nFirstConstant, nNumConstantsToSet );
this->m_Storage.PutMemory( pSrcData, 4 * sizeof( float ) * nNumConstantsToSet );
}
FORCEINLINE void SetPixelShaderConstant( int nFirstConstant, int nVar )
{
SetPixelShaderConstant( nFirstConstant, this->Param( nVar )->GetVecValue() );
}
void SetPixelShaderConstantGammaToLinear( int pixelReg, int constantVar )
{
float val[4];
this->Param(constantVar)->GetVecValue( val, 3 );
val[0] = val[0] > 1.0f ? val[0] : GammaToLinear( val[0] );
val[1] = val[1] > 1.0f ? val[1] : GammaToLinear( val[1] );
val[2] = val[2] > 1.0f ? val[2] : GammaToLinear( val[2] );
val[3] = 1.0;
SetPixelShaderConstant( pixelReg, val );
}
FORCEINLINE void SetPixelShaderConstant( int nFirstConstant, float const *pSrcData )
{
SetPixelShaderConstants( nFirstConstant, 1 );
OutputConstantData( pSrcData );
}
FORCEINLINE void SetPixelShaderConstant4( int nFirstConstant, float flVal0, float flVal1, float flVal2, float flVal3 )
{
SetPixelShaderConstants( nFirstConstant, 1 );
OutputConstantData4( flVal0, flVal1, flVal2, flVal3 );
}
FORCEINLINE void SetPixelShaderConstant_W( int pixelReg, int constantVar, float fWValue )
{
if ( constantVar != -1 )
{
float val[3];
this->Param(constantVar)->GetVecValue( val, 3);
SetPixelShaderConstant4( pixelReg, val[0], val[1], val[2], fWValue );
}
}
void SetPixelShaderTextureTransform( int vertexReg, int transformVar )
{
Vector4D transformation[2];
IMaterialVar* pTransformationVar = ( transformVar >= 0 ) ? this->Param( transformVar ) : NULL;
if (pTransformationVar && (pTransformationVar->GetType() == MATERIAL_VAR_TYPE_MATRIX))
{
const VMatrix &mat = pTransformationVar->GetMatrixValue();
transformation[0].Init( mat[0][0], mat[0][1], mat[0][2], mat[0][3] );
transformation[1].Init( mat[1][0], mat[1][1], mat[1][2], mat[1][3] );
}
else
{
transformation[0].Init( 1.0f, 0.0f, 0.0f, 0.0f );
transformation[1].Init( 0.0f, 1.0f, 0.0f, 0.0f );
}
SetPixelShaderConstant( vertexReg, transformation[0].Base(), 2 );
}
FORCEINLINE void SetVertexShaderConstant( int nFirstConstant, float const *pSrcData )
{
this->m_Storage.PutInt( CBCMD_SET_VERTEX_SHADER_FLOAT_CONST );
this->m_Storage.PutInt( nFirstConstant );
this->m_Storage.PutInt( 1 );
OutputConstantData( pSrcData );
}
FORCEINLINE void SetVertexShaderConstant( int nFirstConstant, float const *pSrcData, int nConsts )
{
this->m_Storage.PutInt( CBCMD_SET_VERTEX_SHADER_FLOAT_CONST );
this->m_Storage.PutInt( nFirstConstant );
this->m_Storage.PutInt( nConsts );
this->m_Storage.PutMemory( pSrcData, 4 * nConsts * sizeof( float ) );
}
FORCEINLINE void SetVertexShaderConstant4( int nFirstConstant, float flVal0, float flVal1, float flVal2, float flVal3 )
{
this->m_Storage.PutInt( CBCMD_SET_VERTEX_SHADER_FLOAT_CONST );
this->m_Storage.PutInt( nFirstConstant );
this->m_Storage.PutInt( 1 );
this->m_Storage.PutFloat( flVal0 );
this->m_Storage.PutFloat( flVal1 );
this->m_Storage.PutFloat( flVal2 );
this->m_Storage.PutFloat( flVal3 );
}
void SetVertexShaderTextureTransform( int vertexReg, int transformVar )
{
Vector4D transformation[2];
IMaterialVar* pTransformationVar = ( transformVar >= 0 ) ? this->Param( transformVar ) : NULL;
if (pTransformationVar && (pTransformationVar->GetType() == MATERIAL_VAR_TYPE_MATRIX))
{
const VMatrix &mat = pTransformationVar->GetMatrixValue();
transformation[0].Init( mat[0][0], mat[0][1], mat[0][2], mat[0][3] );
transformation[1].Init( mat[1][0], mat[1][1], mat[1][2], mat[1][3] );
}
else
{
transformation[0].Init( 1.0f, 0.0f, 0.0f, 0.0f );
transformation[1].Init( 0.0f, 1.0f, 0.0f, 0.0f );
}
SetVertexShaderConstant( vertexReg, transformation[0].Base(), 2 );
}
void SetVertexShaderTextureScaledTransformRotate( int vertexReg, int transformVar, int scaleVar, int rotateVar )
{
Vector2D scale( 1, 1 );
IMaterialVar* pScaleVar = this->Param( scaleVar );
if (pScaleVar)
{
if (pScaleVar->GetType() == MATERIAL_VAR_TYPE_VECTOR)
pScaleVar->GetVecValue( scale.Base(), 2 );
else if (pScaleVar->IsDefined())
scale[0] = scale[1] = pScaleVar->GetFloatValue();
}
float flRotateVar = 0.0f;
IMaterialVar* pRotateVar = this->Param( rotateVar );
if ( pRotateVar && pRotateVar->IsDefined() )
{
flRotateVar = pRotateVar->GetFloatValue();
}
Vector4D transformation[2];
IMaterialVar* pTransformationVar = this->Param( transformVar );
if (pTransformationVar && (pTransformationVar->GetType() == MATERIAL_VAR_TYPE_MATRIX))
{
VMatrix matRot = pTransformationVar->GetMatrixValue();
MatrixTranslate( matRot, Vector( 0.5, 0.5, 0 ) );
MatrixRotate( matRot, Vector( 0, 0, 1), flRotateVar );
MatrixTranslate( matRot, Vector( -0.5 * scale[0], -0.5 * scale[1], 0 ) );
matRot = matRot.Scale( Vector(scale[0], scale[1], 1) );
transformation[0].Init( matRot[0][0], matRot[0][1], matRot[0][2], matRot[0][3] );
transformation[1].Init( matRot[1][0], matRot[1][1], matRot[1][2], matRot[1][3] );
SetVertexShaderConstant( vertexReg, transformation[0].Base(), 2 );
}
}
void SetVertexShaderTextureScaledTransform( int vertexReg, int transformVar, int scaleVar )
{
Vector4D transformation[2];
IMaterialVar* pTransformationVar = this->Param( transformVar );
if (pTransformationVar && (pTransformationVar->GetType() == MATERIAL_VAR_TYPE_MATRIX))
{
const VMatrix &mat = pTransformationVar->GetMatrixValue();
transformation[0].Init( mat[0][0], mat[0][1], mat[0][2], mat[0][3] );
transformation[1].Init( mat[1][0], mat[1][1], mat[1][2], mat[1][3] );
}
else
{
transformation[0].Init( 1.0f, 0.0f, 0.0f, 0.0f );
transformation[1].Init( 0.0f, 1.0f, 0.0f, 0.0f );
}
Vector2D scale( 1, 1 );
IMaterialVar* pScaleVar = this->Param( scaleVar );
if (pScaleVar)
{
if (pScaleVar->GetType() == MATERIAL_VAR_TYPE_VECTOR)
pScaleVar->GetVecValue( scale.Base(), 2 );
else if (pScaleVar->IsDefined())
scale[0] = scale[1] = pScaleVar->GetFloatValue();
}
// Apply the scaling
transformation[0][0] *= scale[0];
transformation[0][1] *= scale[1];
transformation[1][0] *= scale[0];
transformation[1][1] *= scale[1];
transformation[0][3] *= scale[0];
transformation[1][3] *= scale[1];
SetVertexShaderConstant( vertexReg, transformation[0].Base(), 2 );
}
FORCEINLINE void SetEnvMapTintPixelShaderDynamicState( int pixelReg, int tintVar )
{
if( g_pConfig->bShowSpecular && g_pConfig->nFullbright != 2 )
{
SetPixelShaderConstant( pixelReg, this->Param( tintVar)->GetVecValue() );
}
else
{
SetPixelShaderConstant4( pixelReg, 0.0, 0.0, 0.0, 0.0 );
}
}
FORCEINLINE void SetEnvMapTintPixelShaderDynamicStateGammaToLinear( int pixelReg, int tintVar, float fAlphaVal = 1.0f )
{
if( g_pConfig->bShowSpecular && g_pConfig->nFullbright != 2 )
{
float color[4];
color[3] = fAlphaVal;
//this->Param( tintVar)->GetLinearVecValue( color, 3 );
// (wills) converted this line to the following so that envmaptint can be over-driven beyond 0-1 range
this->Param( tintVar)->GetVecValue( color, 3 );
color[0] = GammaToLinearFullRange( color[0] );
color[1] = GammaToLinearFullRange( color[1] );
color[2] = GammaToLinearFullRange( color[2] );
SetPixelShaderConstant( pixelReg, color );
}
else
{
SetPixelShaderConstant4( pixelReg, 0.0, 0.0, 0.0, fAlphaVal );
}
}
FORCEINLINE void StoreEyePosInPixelShaderConstant( int nConst, float wValue = 1.0f )
{
this->m_Storage.PutInt( CBCMD_STORE_EYE_POS_IN_PSCONST );
this->m_Storage.PutInt( nConst );
this->m_Storage.PutFloat( wValue );
}
FORCEINLINE void SetPixelShaderFogParams( int nReg )
{
this->m_Storage.PutInt( CBCMD_SETPIXELSHADERFOGPARAMS );
this->m_Storage.PutInt( nReg );
}
#ifndef _PS3
FORCEINLINE void BindStandardTexture( Sampler_t nSampler, TextureBindFlags_t nBindFlags, StandardTextureId_t nTextureId )
{
this->m_Storage.PutInt( CBCMD_BIND_STANDARD_TEXTURE );
this->m_Storage.PutInt( nSampler | nBindFlags );
this->m_Storage.PutInt( nTextureId );
}
FORCEINLINE void BindTexture( CBaseShader *pShader, Sampler_t nSampler, TextureBindFlags_t nBindFlags, ITexture *pTexture, int nFrame )
{
ShaderAPITextureHandle_t hTexture = pShader->GetShaderAPITextureBindHandle( pTexture, nFrame );
Assert( hTexture != INVALID_SHADERAPI_TEXTURE_HANDLE );
this->m_Storage.PutInt( CBCMD_BIND_SHADERAPI_TEXTURE_HANDLE );
this->m_Storage.PutInt( nSampler | nBindFlags );
this->m_Storage.Put( hTexture );
}
FORCEINLINE void BindTexture( Sampler_t nSampler, TextureBindFlags_t nBindFlags, ShaderAPITextureHandle_t hTexture )
{
Assert( hTexture != INVALID_SHADERAPI_TEXTURE_HANDLE );
this->m_Storage.PutInt( CBCMD_BIND_SHADERAPI_TEXTURE_HANDLE );
this->m_Storage.PutInt( nSampler | nBindFlags );
this->m_Storage.Put( hTexture );
}
#else
FORCEINLINE void BindTexture( Sampler_t nSampler, TextureBindFlags_t nBindFlags, ShaderAPITextureHandle_t hTexture )
{
Assert( hTexture != INVALID_SHADERAPI_TEXTURE_HANDLE );
if (m_numPs3Tex >= CBCMD_MAX_PS3TEX)
{
Error("Too many textures in single draw ECB\n");
}
int* pOffset = GetPs3Textures() + m_numPs3Tex;
CPs3BindParams_t tex;
tex.m_sampler = nSampler;
tex.m_nBindFlags = nBindFlags >> 24; // Top byte only
tex.m_hTexture = hTexture;
tex.m_boundStd = -1;
tex.m_nBindTexIndex = m_numPs3Tex;
this->m_Storage.PutInt( CBCMD_BIND_PS3_TEXTURE );
*pOffset = (this->m_Storage.m_pDataOut - this->m_Storage.m_Data);
this->m_Storage.PutMemory(&tex, sizeof(tex)) ;
m_numPs3Tex++;
}
FORCEINLINE void BindStandardTexture( Sampler_t nSampler, TextureBindFlags_t nBindFlags, StandardTextureId_t nTextureId )
{
if (m_numPs3Tex >= CBCMD_MAX_PS3TEX)
{
Error("Too many textures in single draw ECB\n");
}
int* pOffset = GetPs3Textures() + m_numPs3Tex;
CPs3BindParams_t tex;
tex.m_sampler = nSampler;
tex.m_nBindFlags = nBindFlags >> 24;
tex.m_boundStd = nTextureId;
tex.m_hTexture = -1;
tex.m_nBindTexIndex = m_numPs3Tex;
this->m_Storage.PutInt( CBCMD_BIND_PS3_STANDARD_TEXTURE );
*pOffset = (this->m_Storage.m_pDataOut - this->m_Storage.m_Data);
this->m_Storage.PutMemory(&tex, sizeof(tex)) ;
m_numPs3Tex++;
}
FORCEINLINE void BindTexture( CBaseShader *pShader, Sampler_t nSampler, TextureBindFlags_t nBindFlags, ITexture *pTexture, int nFrame )
{
ShaderAPITextureHandle_t hTexture = pShader->GetShaderAPITextureBindHandle( pTexture, nFrame );
BindTexture(nSampler, nBindFlags, hTexture);
}
#endif
FORCEINLINE void BindTexture( CBaseShader *pShader, Sampler_t nSampler, TextureBindFlags_t nBindFlags, int nTextureVar, int nFrameVar = -1 )
{
ShaderAPITextureHandle_t hTexture = pShader->GetShaderAPITextureBindHandle( nTextureVar, nFrameVar );
BindTexture( nSampler, nBindFlags, hTexture );
}
// Same as BindTexture, except it checks to see if the texture handle is actually the "internal" env_cubemap. If so, it binds it as a standard texture so the proper texture bind flags are
// recorded during instance rendering in CShaderAPIDX8.
FORCEINLINE void BindEnvCubemapTexture( CBaseShader *pShader, Sampler_t nSampler, TextureBindFlags_t nBindFlags, int nTextureVar, int nFrameVar = -1 )
{
Assert( nTextureVar != -1 );
Assert( CBaseShader::GetPPParams() );
if ( CBaseShader::GetPPParams()[nTextureVar]->IsTextureValueInternalEnvCubemap() )
{
BindStandardTexture( nSampler, nBindFlags, TEXTURE_LOCAL_ENV_CUBEMAP );
}
else
{
ShaderAPITextureHandle_t hTexture = pShader->GetShaderAPITextureBindHandle( nTextureVar, nFrameVar );
BindTexture( nSampler, nBindFlags, hTexture );
}
}
FORCEINLINE void BindMultiTexture( CBaseShader *pShader, Sampler_t nSampler1, Sampler_t nSampler2, TextureBindFlags_t nBindFlags, int nTextureVar, int nFrameVar )
{
ShaderAPITextureHandle_t hTexture = pShader->GetShaderAPITextureBindHandle( nTextureVar, nFrameVar, 0 );
BindTexture( nSampler1, nBindFlags, hTexture );
hTexture = pShader->GetShaderAPITextureBindHandle( nTextureVar, nFrameVar, 1 );
BindTexture( nSampler2, nBindFlags, hTexture );
}
FORCEINLINE void SetPixelShaderIndex( int nIndex )
{
this->m_Storage.PutInt( CBCMD_SET_PSHINDEX );
this->m_Storage.PutInt( nIndex );
}
FORCEINLINE void SetVertexShaderIndex( int nIndex )
{
this->m_Storage.PutInt( CBCMD_SET_VSHINDEX );
this->m_Storage.PutInt( nIndex );
}
FORCEINLINE void SetDepthFeatheringShaderConstants( int iConstant, float fDepthBlendScale )
{
this->m_Storage.PutInt( CBCMD_SET_DEPTH_FEATHERING_CONST );
this->m_Storage.PutInt( iConstant );
this->m_Storage.PutFloat( fDepthBlendScale );
}
FORCEINLINE void SetVertexShaderFlashlightState( int iConstant )
{
this->m_Storage.PutInt( CBCMD_SET_VERTEX_SHADER_FLASHLIGHT_STATE );
this->m_Storage.PutInt( iConstant );
}
FORCEINLINE void SetPixelShaderFlashlightState( const CBCmdSetPixelShaderFlashlightState_t &state )
{
this->m_Storage.PutInt( CBCMD_SET_PIXEL_SHADER_FLASHLIGHT_STATE );
this->m_Storage.PutInt( state.m_LightSampler );
this->m_Storage.PutInt( state.m_DepthSampler );
this->m_Storage.PutInt( state.m_ShadowNoiseSampler );
this->m_Storage.PutInt( state.m_nColorConstant );
this->m_Storage.PutInt( state.m_nAttenConstant );
this->m_Storage.PutInt( state.m_nOriginConstant );
this->m_Storage.PutInt( state.m_nDepthTweakConstant );
this->m_Storage.PutInt( state.m_nScreenScaleConstant );
this->m_Storage.PutInt( state.m_nWorldToTextureConstant );
this->m_Storage.PutInt( state.m_bFlashlightNoLambert );
this->m_Storage.PutInt( state.m_bSinglePassFlashlight );
}
FORCEINLINE void SetPixelShaderUberLightState( int iEdge0Const, int iEdge1Const, int iEdgeOOWConst, int iShearRoundConst, int iAABBConst, int iWorldToLightConst )
{
this->m_Storage.PutInt( CBCMD_SET_PIXEL_SHADER_UBERLIGHT_STATE );
this->m_Storage.PutInt( iEdge0Const );
this->m_Storage.PutInt( iEdge1Const );
this->m_Storage.PutInt( iEdgeOOWConst );
this->m_Storage.PutInt( iShearRoundConst );
this->m_Storage.PutInt( iAABBConst );
this->m_Storage.PutInt( iWorldToLightConst );
}
FORCEINLINE void Goto( uint8 *pCmdBuf )
{
this->m_Storage.PutInt( CBCMD_JUMP );
this->m_Storage.PutPtr( pCmdBuf );
}
FORCEINLINE void Call( uint8 *pCmdBuf )
{
this->m_Storage.PutInt( CBCMD_JSR );
this->m_Storage.PutPtr( pCmdBuf );
}
#ifndef _PS3
FORCEINLINE void Reset( void )
{
this->m_Storage.Reset();
}
#endif
FORCEINLINE size_t Size( void ) const
{
return this->m_Storage.Size();
}
FORCEINLINE uint8 *Base( void )
{
return this->m_Storage.Base();
}
FORCEINLINE void SetVertexShaderNearAndFarZ( int iRegNum )
{
this->m_Storage.PutInt( CBCMD_SET_VERTEX_SHADER_NEARZFARZ_STATE );
this->m_Storage.PutInt( iRegNum );
}
};
//-----------------------------------------------------------------------------
// Builds a command buffer specifically for per-instance state setting
//-----------------------------------------------------------------------------
template<class S> class CInstanceCommandBufferBuilder : public CBaseCommandBufferBuilder< S >
{
typedef CBaseCommandBufferBuilder< S > PARENT;
public:
FORCEINLINE void End( void )
{
this->m_Storage.PutInt( CBICMD_END );
}
FORCEINLINE void SetPixelShaderLocalLighting( int nConst )
{
this->m_Storage.PutInt( CBICMD_SETPIXELSHADERLOCALLIGHTING );
this->m_Storage.PutInt( nConst );
}
FORCEINLINE void SetPixelShaderAmbientLightCube( int nConst )
{
this->m_Storage.PutInt( CBICMD_SETPIXELSHADERAMBIENTLIGHTCUBE );
this->m_Storage.PutInt( nConst );
}
FORCEINLINE void SetVertexShaderLocalLighting( )
{
this->m_Storage.PutInt( CBICMD_SETVERTEXSHADERLOCALLIGHTING );
}
FORCEINLINE void SetVertexShaderAmbientLightCube( void )
{
this->m_Storage.PutInt( CBICMD_SETVERTEXSHADERAMBIENTLIGHTCUBE );
}
FORCEINLINE void SetSkinningMatrices( void )
{
this->m_Storage.PutInt( CBICMD_SETSKINNINGMATRICES );
}
FORCEINLINE void SetPixelShaderAmbientLightCubeLuminance( int nConst )
{
this->m_Storage.PutInt( CBICMD_SETPIXELSHADERAMBIENTLIGHTCUBELUMINANCE );
this->m_Storage.PutInt( nConst );
}
FORCEINLINE void SetPixelShaderGlintDamping( int nConst )
{
this->m_Storage.PutInt( CBICMD_SETPIXELSHADERGLINTDAMPING );
this->m_Storage.PutInt( nConst );
}
FORCEINLINE void SetModulationPixelShaderDynamicState_LinearColorSpace_LinearScale( int nConst, const Vector &vecGammaSpaceColor2Factor, float scale )
{
this->m_Storage.PutInt( CBICMD_SETMODULATIONPIXELSHADERDYNAMICSTATE_LINEARCOLORSPACE_LINEARSCALE );
this->m_Storage.PutInt( nConst );
this->m_Storage.Put( vecGammaSpaceColor2Factor );
this->m_Storage.PutFloat( 1.0 ); // pad for vector4
this->m_Storage.PutFloat( scale );
}
FORCEINLINE void SetModulationPixelShaderDynamicState_LinearScale( int nConst, const Vector &vecGammaSpaceColor2Factor, float scale )
{
this->m_Storage.PutInt( CBICMD_SETMODULATIONPIXELSHADERDYNAMICSTATE_LINEARSCALE );
this->m_Storage.PutInt( nConst );
this->m_Storage.Put( vecGammaSpaceColor2Factor );
this->m_Storage.PutFloat( 1.0 ); // alpha modulation wants 1 1.0 here for simd
this->m_Storage.PutFloat( scale );
}
FORCEINLINE void SetModulationPixelShaderDynamicState_LinearScale_ScaleInW( int nConst, const Vector &vecGammaSpaceColor2Factor, float scale )
{
this->m_Storage.PutInt( CBICMD_SETMODULATIONPIXELSHADERDYNAMICSTATE_LINEARSCALE_SCALEINW );
this->m_Storage.PutInt( nConst );
this->m_Storage.Put( vecGammaSpaceColor2Factor );
this->m_Storage.PutFloat( scale );
}
FORCEINLINE void SetModulationPixelShaderDynamicState_LinearColorSpace( int nConst, const Vector &vecGammaSpaceColor2Factor )
{
this->m_Storage.PutInt( CBICMD_SETMODULATIONPIXELSHADERDYNAMICSTATE_LINEARCOLORSPACE );
this->m_Storage.PutInt( nConst );
this->m_Storage.Put( vecGammaSpaceColor2Factor );
this->m_Storage.PutFloat( 1.0 ); // pad with a 1 for vector4d simd access. Important that this be a 1 because alpha is multipled by it.
}
FORCEINLINE void SetModulationPixelShaderDynamicState( int nConst, const Vector &vecGammaSpaceColor2Factor )
{
this->m_Storage.PutInt( CBICMD_SETMODULATIONPIXELSHADERDYNAMICSTATE );
this->m_Storage.PutInt( nConst );
this->m_Storage.Put( vecGammaSpaceColor2Factor );
}
FORCEINLINE void SetModulationPixelShaderDynamicState_Identity( int nConst )
{
this->m_Storage.PutInt( CBICMD_SETMODULATIONPIXELSHADERDYNAMICSTATE_IDENTITY );
this->m_Storage.PutInt( nConst );
}
FORCEINLINE void SetModulationVertexShaderDynamicState( int nConst, const Vector &vecGammaSpaceColor2Factor )
{
this->m_Storage.PutInt( CBICMD_SETMODULATIONVERTEXSHADERDYNAMICSTATE );
this->m_Storage.PutInt( nConst );
this->m_Storage.Put( vecGammaSpaceColor2Factor );
}
FORCEINLINE void SetModulationVertexShaderDynamicState_LinearScale( int nConst, const Vector &vecGammaSpaceColor2Factor, float flScale )
{
this->m_Storage.PutInt( CBICMD_SETMODULATIONVERTEXSHADERDYNAMICSTATE_LINEARSCALE );
this->m_Storage.PutInt( nConst );
this->m_Storage.Put( vecGammaSpaceColor2Factor );
this->m_Storage.PutFloat( flScale );
}
};
#endif // commandbuilder_h
|
package scientifik.kmath.prob
import scientifik.kmath.chains.Chain
import scientifik.kmath.chains.ConstantChain
import scientifik.kmath.chains.map
import scientifik.kmath.chains.zip
import scientifik.kmath.operations.Space
class BasicSampler<T : Any>(val chainBuilder: (RandomGenerator) -> Chain<T>) : Sampler<T> {
override fun sample(generator: RandomGenerator): Chain<T> = chainBuilder(generator)
}
class ConstantSampler<T : Any>(val value: T) : Sampler<T> {
override fun sample(generator: RandomGenerator): Chain<T> = ConstantChain(value)
}
/**
* A space for samplers. Allows to perform simple operations on distributions
*/
class SamplerSpace<T : Any>(val space: Space<T>) : Space<Sampler<T>> {
override val zero: Sampler<T> = ConstantSampler(space.zero)
override fun add(a: Sampler<T>, b: Sampler<T>): Sampler<T> = BasicSampler { generator ->
a.sample(generator).zip(b.sample(generator)) { aValue, bValue -> space.run { aValue + bValue } }
}
override fun multiply(a: Sampler<T>, k: Number): Sampler<T> = BasicSampler { generator ->
a.sample(generator).map { space.run { it * k.toDouble() } }
}
}
|
/**
* @fileOverview Processing Jobs with kickq
*/
var sinon = require('sinon');
var grunt = require('grunt');
var assert = require('chai').assert;
var kickq = require('../../');
var tester = require('../lib/tester');
var jobItem = require('./jobItem.test');
var when = require('when');
var noop = function(){};
suite('2.0 Job Processing', function() {
setup(function(done) {
kickq.reset();
kickq.config({
redisNamespace: tester.NS
});
tester.clear(done);
});
teardown(function(done) {
kickq.reset();
tester.clear(done);
});
test('2.0.1 The job instance argument', function() {
var jobid;
kickq.create('process-test-one', 'data', {}, function(err, key) {
jobid = key;
});
kickq.process('process-test-one', function(job, data, cb) {
jobItem.testNewItemPropsType(job);
assert.equal(jobid, job.id, 'The job id should be the same');
assert.equal(job.name, 'process-test-one', 'The job name should be the same');
assert.equal(job.state, 'processing', 'State should be "processing"');
assert.equal(job.runs.length, 1, 'there should be one process item');
var processItem = job.runs[0];
jobItem.testPtemItem(processItem);
assert.equal(processItem.count, 1, 'The process count should be 1 (the first)');
assert.equal(processItem.state, 'processing', 'The process item should be "processing"');
cb();
done();
});
});
test('2.0.2 Concurrent jobs', function(done) {
// allow some time to execute
this.timeout(5000);
// create 20 jobs
var jobPromises = [];
for (var i = 0; i < 20; i++) {
jobPromises.push(kickq.create('process-test-Concurrent'));
}
var jobProcessCount = 0;
var jobProcessQueue = [];
function startProcess() {
var opts = {concurrentJobs: 10};
kickq.process('process-test-Concurrent', opts, function(jobObj, data, cb) {
jobProcessCount++;
});
// allow for all to-process jobs to be collected
setTimeout(function(){
assert.equal(jobProcessCount, 10, '10 jobs should be queued for processing');
done();
}, 3000);
}
when.all(jobPromises).then(startProcess);
});
//
// TODO when we expose a .get() method so we can fetch the job and examine it
//
// test('2.0.3 Process creates ghost by never reporting outcome', function(done){
// var clock = sinon.useFakeTimers();
// var firstPop = true;
// kickq.create('process-ghost', {processTimeout:2000}).then(function(){
// kickq.process('process-ghost', function(jobObj, data, cb) {
// if (firstPop) {
// firstPop = false;
// clock.tick(2100);
// return;
// }
// });
// });
// });
});
|
<?php
use Illuminate\Support\Facades\Schema;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Migrations\Migration;
class CreateDepositingreceipt extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('depositingreceipt', function (Blueprint $table) {
$table->bigIncrements('id')->unsigned();
$table->BigInteger('baker_id')->unsigned();
$table->foreign('baker_id')->references('id')->on('bakers');
$table->BigInteger('city_id')->unsigned();
$table->foreign('city_id')->references('id')->on('city');
$table->date('deposit_date');
$table->Integer('deposit_amount');
$table->String('flour_type' , 50);
$table->BigInteger('flour_id')->unsigned();
$table->foreign('flour_id')->references('id')->on('products');
$table->Integer('number_bags');
$table->BigInteger('flourfactory_id')->unsigned();
$table->foreign('flourfactory_id')->references('id')->on('flourfactory');
$table->Integer('branch_code');
$table->timestamps();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::dropIfExists('depositingreceipt');
}
}
|
{-# LANGUAGE LambdaCase, FlexibleContexts #-}
module UI where
import System.Console.Haskeline
import qualified System.Console.Haskeline.Brick as HB
import Brick
import Brick.BChan
import qualified Brick.Widgets.Center as C
import qualified Brick.Widgets.Border as B
import qualified Graphics.Vty as V
import Parser
import Meaning
import qualified Runner as R
import Data.Foldable (traverse_)
import Data.List (isPrefixOf)
import qualified Data.Map.Lazy as M
import Control.Monad (void)
import Control.Monad.State.Lazy
import Control.Concurrent (forkFinally)
import Control.Monad.IO.Class (liftIO, MonadIO)
import System.Directory (getAppUserDataDirectory)
import Data.Maybe (fromMaybe)
data Event = StateUpdated R.State
| FromHBWidget HB.ToBrick
| HaskelineDied (Either SomeException ())
data AppName = TheApp | HaskelineWidget | StackViewport
deriving (Ord, Eq, Show)
data AppState = AppState { haskelineWidget :: HB.Widget AppName
, joyState :: Maybe R.State
}
initialAppState :: AppState
initialAppState =
AppState { haskelineWidget = HB.initialWidget HaskelineWidget
, joyState = Nothing
}
app :: HB.Config Event -> App AppState Event AppName
app c = App { appDraw = drawUI
, appChooseCursor = \_ -> showCursorNamed HaskelineWidget
, appHandleEvent = handleEvent c
, appStartEvent = return
, appAttrMap = const theMap
}
handleEvent :: HB.Config Event
-> AppState -> BrickEvent AppName Event
-> EventM AppName (Next AppState)
handleEvent c s@AppState{haskelineWidget = hw} e = do
hw' <- HB.handleEvent c hw e
handleAppEvent (s { haskelineWidget = hw' }) e
handleAppEvent :: AppState -> BrickEvent AppName Event
-> EventM AppName (Next AppState)
handleAppEvent s (AppEvent (HaskelineDied e)) = halt s
handleAppEvent s (AppEvent (StateUpdated st)) = continue $
s { joyState = Just st }
handleAppEvent s _ = continue s
drawUI :: AppState -> [Widget AppName]
drawUI s = [HB.render (haskelineWidget s) <+> stackWidget]
where
stackWidget =
B.border $ hLimit 20 $ viewport StackViewport Vertical $
vBox $ (map $ str . show) $
fromMaybe [] (R.stack <$> joyState s)
theMap :: AttrMap
theMap = attrMap V.defAttr []
runTui :: IO ()
runTui = do
chan <- newBChan 10
config <- HB.configure
chan
FromHBWidget
(\case { FromHBWidget x -> Just x; _ -> Nothing })
_ <- forkFinally
(runTuiInputT config chan)
(writeBChan chan . HaskelineDied)
void $ customMain
(V.mkVty V.defaultConfig)
(Just chan)
(app config)
initialAppState
runTuiInputT :: HB.Config Event -> BChan Event -> IO ()
runTuiInputT c chan = do
hs <- haskelineSettings
(flip evalStateT) R.initialState . runInputTBehavior (HB.useBrick c) hs $ loop
where
loop :: InputT (StateT R.State IO) ()
loop = do
minput <- getInputLine "> "
case minput of
Just unparsed -> doParsing unparsed
Nothing -> return ()
doParsing :: String -> InputT (StateT R.State IO) ()
doParsing unparsed =
case parse unparsed of
Right parsed -> do
s <- lift get
x <- R.runInputT s . meaning $ parsed
case x of
Right (s', ()) -> do
lift (put s')
liftIO $ writeBChan chan $ StateUpdated s'
loop
Left e -> (outputStrLn $ show e) >> loop
Left e -> outputStrLn e >> loop
runRepl :: IO ()
runRepl = do
hs <- haskelineSettings
(flip evalStateT) R.initialState . runInputT hs $ loop
where
loop :: InputT (StateT R.State IO) ()
loop = do
minput <- getInputLine "> "
case minput of
Just (':' : 's' : _) -> dumpStack
Just unparsed -> doParsing unparsed
Nothing -> return ()
dumpStack = do
s <- lift get
traverse_ (outputStrLn . show) (R.stack s)
loop
doParsing :: String -> InputT (StateT R.State IO) ()
doParsing unparsed =
case parse unparsed of
Right parsed -> do
s <- lift get
x <- R.runInputT s . meaning $ parsed
case x of
Right (s', ()) -> lift (put s') >> loop
Left e -> (outputStrLn $ show e) >> loop
Left e -> (outputStrLn e) >> loop
-- Leverage the MonadException from haskeline and the StateT
-- from mtl.
-- This piece of code is taken verbatim from here:
-- https://hackage.haskell.org/package/haskeline-0.7.4.0/docs/src/System.Console.Haskeline.MonadException.html#line-152
instance MonadException m => MonadException (StateT s m) where
controlIO f = StateT $ \s -> controlIO $ \run ->
fmap (flip runStateT s) $ f $ stateRunIO s run
where
stateRunIO :: s -> RunIO m -> RunIO (StateT s m)
stateRunIO s (RunIO run) = RunIO (\m -> fmap (StateT . const)
$ run (runStateT m s))
haskelineSettings :: (MonadState R.State m, MonadIO m)
=> IO (Settings m)
haskelineSettings = do
hf <- getAppUserDataDirectory "silly-joy.history"
return $ Settings { historyFile = Just hf
, complete = completer
, autoAddHistory = True
}
where completer = completeWord Nothing [' ', '\t'] $ \w -> do
s <- get
return $ map simpleCompletion
$ filter (isPrefixOf w) . M.keys . R.dict
$ s
|
require 'spec_helper'
require 'industry'
describe Industry do
it 'should simulate industry' do
expect { Universe.simulate! }.not_to raise_error
end
end
|
# Copyright (c) 2006 Dave Vasilevsky
package Nova::Util;
use strict;
use warnings;
use base qw(Exporter);
use List::Util qw(max min sum);
our @EXPORT_OK = qw(deaccent commaNum termWidth wrap prettyPrint printIter
makeFilter regexFilter printable indent);
=head1 NAME
Nova::Util - Miscellaneous utilities
=head1 SYNOPSIS
my $str = deaccent($str);
my $str = commaNum($num);
my $width = termWidth;
=cut
# $str = deaccent($str);
#
# Remove accents from a resource type, and canonicalizes is to lower-case.
# Eg: mïsn => misn
sub deaccent {
my ($s) = @_;
$s =~ tr/\x{e4}\x{eb}\x{ef}\x{f6}\x{fc}\x{ff}/aeiouy/;
return lc $s;
}
# Get the comma-delimited form of the given number. Eg: 1234567 => 1,234,567
sub commaNum {
my ($n) = @_;
return $n if $n < 1000;
return commaNum(int($n/1000)) . sprintf ",%03d", $n % 1000;
}
# Get the width of the terminal
sub termWidth {
if (eval { require Fink::CLI }) {
my $w = Fink::CLI::get_term_width();
return $w if $w;
}
if (exists $ENV{COLUMNS} && $ENV{COLUMNS}) {
return $ENV{COLUMNS};
}
return 80;
}
# wrap($text, $first, $rest);
#
# Wrap a line of text.
sub wrap {
my ($text, $first, $rest) = @_;
$first = '' unless defined $first;
$rest = '' unless defined $rest;
require Text::Wrap;
local $Text::Wrap::columns = termWidth;
return Text::Wrap::wrap($first, $rest, printable($text));
}
# prettyPrint($text);
#
# Print some text nicely
sub prettyPrint {
my ($text) = @_;
print wrap($text);
}
# printIter { $code }, $iter, $verb;
#
# Print the results of applying a code-block to an iterator's contents
sub printIter (&$$) {
my ($code, $iter, $verb) = @_;
my $found = 0;
my $delim = "\n" x min(2, $verb + 1);
while (defined(local $_ = $iter->next)) {
my $s = $code->();
next unless $s;
print $delim if $found++;
prettyPrint $s;
}
}
sub _filterFromCode {
my ($code, $spec) = @_;
my $filt = eval "sub { $code }";
die "Bad filter '$spec': $@\n" if $@;
return $filt;
}
# Make a filter from a specification
sub makeFilter {
my ($spec) = @_;
my $code;
if (defined (my $filt = regexFilter($spec))) {
return $filt; # Regex
} elsif ($spec =~ /\$_/) {
$code = $spec; # Code
} elsif ($spec =~ /^\s*([><=!]+|eq|ne|ge|le|gt|lt)/) {
$code = "\$_ $spec"; # Relation
} elsif ($spec =~ /^\s*-?\d[_\d]*([eE]-?\d+)?(\.\d*)?\s*$/) {
$code = "\$_ == $spec"; # Numeric equality
} else {
$code = "\$_ eq \"\Q$spec\E\""; # String equality
}
return _filterFromCode($code, $spec);
}
# Make a filter from a regex spec. If it doesn't look like a regex, return
# undef.
sub regexFilter {
my ($spec) = @_;
if ($spec =~ m,^\s*/.*/[imsx]*\s*$,
|| $spec =~ /^\s*m(\W).*\1[imsx]*\s*$/
|| $spec =~ /^\s*m[[<({].*[]>)}][imsx]*\s*$/) {
return _filterFromCode($spec, $spec);
} else {
return undef;
}
}
# Get a printable version of a string, with no wide chars
sub printable {
local $_ = shift;
s/[\x{2018}\x{2019}]/'/g; # smart single quote
s/[\x{201c}\x{201d}]/"/g; # smart double quote
s/\x{2026}/.../g; # ellipsis
s/\x{b0}/ deg/g; # degree symbol
s/\x{2122}/(TM)/g; # trademark symbol
s/\x{ae}/(R)/g; # registered symbol
s/\x{2211}/Sigma/g; # sigma symbol
s/\x{3c0}/Pi/g; # pi symbol
s/\x{2202}/d/g; # partial differential, assume it means 'd'
s/\x{a3}/S/g; # section symbol (used as noise)
s/\x{a7}/\$/g; # pound symbol (used as noise)
s,\x{b1},+/-,g; # +/- symbol (used as noise)
s/\x{2014}/--/g; # em dash
s/\x{2013}/-/g; # en dash
return $_;
}
sub indent {
my ($str) = @_;
my $ret = '';
for my $line (split /\n/, $str) {
my ($ind) = ($line =~ /^(\s*)/);
$ret .= wrap($line, '', "$ind ") . "\n";
}
return $ret;
}
1;
|
const appConfig = {
port: 3000,
allowedCorsOrigin: '*',
authToken: 'securemovie',
db: {
uri: 'mongodb://127.0.0.1:27017/movieApp'
},
apiVersion: '1.0.0'
};
module.exports = { appConfig };
|
//! Affine transformation matrix
// |x'| |a b c| |x|
// |y'| = |d e f| |y|
// |1 | |0 0 1| |1 |
// For optimization, each element of the matrix is a fixed-point number.
// sin(), cos() for no_std environment
use micromath::F32Ext;
// scaling factor
const FIXED_POINT_FRAC_BITS: i32 = 10;
// representation of 1
const FIXED_POINT_ONE: i32 = 1 << FIXED_POINT_FRAC_BITS;
pub struct AffineMatrix {
a: i32,
b: i32,
c: i32,
d: i32,
e: i32,
f: i32,
}
impl AffineMatrix {
// generate an identity matrix
pub fn new() -> Self {
AffineMatrix {
a: FIXED_POINT_ONE,
b: 0,
c: 0,
d: 0,
e: FIXED_POINT_ONE,
f: 0,
}
}
pub fn transform(&self, x: u32, y: u32) -> (u32, u32) {
let x_new = ((self.a * x as i32 + self.b * y as i32 + self.c) as u32
>> FIXED_POINT_FRAC_BITS)
& 0x7f;
let y_new = ((self.d * x as i32 + self.e * y as i32 + self.f) as u32
>> FIXED_POINT_FRAC_BITS)
& 0x7f;
(x_new, y_new)
}
// production of two affine matrices
fn apply(&mut self, m: Self) {
let mut t = Self::new();
// Overflow may occur in this calculation.
// Here we can ignore overflows because we masks MSBs of final result.
// However, Rust panics by overflows in debug build.
// To avoid panics, we use wrapping_add() and wrapping_mul().
// Hmm, it's hard to read...
// What we want to do is following;
// t.a = (self.a * m.a + self.b * m.d) >> FIXED_POINT_FRAC_BITS;
// t.b = (self.a * m.b + self.b * m.e) >> FIXED_POINT_FRAC_BITS;
// t.c = ((self.a * m.c + self.b * m.f) >> FIXED_POINT_FRAC_BITS) + self.c;
// t.d = (self.d * m.a + self.e * m.d) >> FIXED_POINT_FRAC_BITS;
// t.e = (self.d * m.b + self.e * m.e) >> FIXED_POINT_FRAC_BITS;
// t.f = ((self.d * m.c + self.e * m.f) >> FIXED_POINT_FRAC_BITS) + self.f;
t.a = (self.a.wrapping_mul(m.a)).wrapping_add(self.b.wrapping_mul(m.d))
>> FIXED_POINT_FRAC_BITS;
t.b = (self.a.wrapping_mul(m.b)).wrapping_add(self.b.wrapping_mul(m.e))
>> FIXED_POINT_FRAC_BITS;
t.c = ((self.a.wrapping_mul(m.c)).wrapping_add(self.b.wrapping_mul(m.f))
>> FIXED_POINT_FRAC_BITS)
.wrapping_add(self.c);
t.d = (self.d.wrapping_mul(m.a)).wrapping_add(self.e.wrapping_mul(m.d))
>> FIXED_POINT_FRAC_BITS;
t.e = (self.d.wrapping_mul(m.b)).wrapping_add(self.e.wrapping_mul(m.e))
>> FIXED_POINT_FRAC_BITS;
t.f = ((self.d.wrapping_mul(m.c)).wrapping_add(self.e.wrapping_mul(m.f))
>> FIXED_POINT_FRAC_BITS)
.wrapping_add(self.f);
*self = t;
}
pub fn translate(&mut self, tx: f32, ty: f32) {
let t = AffineMatrix {
a: FIXED_POINT_ONE,
b: 0,
c: (tx * FIXED_POINT_ONE as f32) as i32,
d: 0,
e: FIXED_POINT_ONE,
f: (ty * FIXED_POINT_ONE as f32) as i32,
};
self.apply(t);
}
pub fn rotate(&mut self, theta: f32) {
let t = AffineMatrix {
a: (theta.cos() * FIXED_POINT_ONE as f32) as i32,
b: (-theta.sin() * FIXED_POINT_ONE as f32) as i32,
c: 0,
d: (theta.sin() * FIXED_POINT_ONE as f32) as i32,
e: (theta.cos() * FIXED_POINT_ONE as f32) as i32,
f: 0,
};
self.apply(t);
}
pub fn scale(&mut self, scale: f32) {
let t = AffineMatrix {
a: (scale * FIXED_POINT_ONE as f32) as i32,
b: 0,
c: 0,
d: 0,
e: (scale * FIXED_POINT_ONE as f32) as i32,
f: 0,
};
self.apply(t);
}
}
|
package com.tinymooc.handler.user.controller;
import com.tinymooc.common.domain.Level;
import com.tinymooc.common.domain.Rule;
import com.tinymooc.common.domain.User;
import com.tinymooc.handler.user.service.UserService;
import org.hibernate.criterion.Restrictions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.servlet.ModelAndView;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Created by 哓哓 on 2015/11/23 0026.
*/
@Controller
public class UserController {
private static final Logger logger = LoggerFactory.getLogger(UserController.class);
@Autowired
private UserService userService;
@RequestMapping("goAccountRevise.htm")
public ModelAndView goAccountRevise() {
return new ModelAndView("userPage/accountRevise");
}
@RequestMapping("goAccount.htm")
public ModelAndView goAccount() {
return new ModelAndView("userPage/account");
}
@RequestMapping("goAccountUserName.htm")
public ModelAndView goAccountNickname() {
return new ModelAndView("userPage/accountUserName");
}
@RequestMapping("goAccountAvatar.htm")
public ModelAndView goAccountAvatar() {
return new ModelAndView("userPage/accountAvatar");
}
@RequestMapping("goAccountPassword.htm")
public ModelAndView goAccountPassword() {
return new ModelAndView("userPage/accountPassword");
}
@RequestMapping("revisePassword.htm")
public ModelAndView revisePassword(HttpServletRequest request, HttpServletResponse response) {
User user = (User) request.getSession().getAttribute("user");
String userId = user.getUserId();
String oldPassword = request.getParameter("oldPassword");
String newPassword = request.getParameter("newPassword");
String newPasswordConfirm = request.getParameter("newPasswordConfirm");
String message = "失败";
if (oldPassword == null || oldPassword == "" || newPassword == null || newPassword == "" || newPasswordConfirm == null || newPasswordConfirm == "") {
message = "密码不能为空";
request.setAttribute("message", message);
return new ModelAndView("forward:goAccountPassword.htm");
}
if (!newPassword.equals(newPasswordConfirm)) {
message = "两次密码不一致";
request.setAttribute("message", message);
return new ModelAndView("forward:goAccountPassword.htm");
}
if (oldPassword.length() < 6 || newPassword.length() < 6 || newPasswordConfirm.length() < 6) {
message = "密码太短";
request.setAttribute("message", message);
return new ModelAndView("forward:goAccountPassword.htm");
}
if (oldPassword.length() > 25 || newPassword.length() > 25 || newPasswordConfirm.length() > 25) {
message = "密码太长";
request.setAttribute("message", message);
return new ModelAndView("forward:goAccountPassword.htm");
}
// 非法字符检测
String regex = "[`~!@#$%^&*()+=|{}':;',\\[\\].<>/?~!@#¥%……&*()——+|{}【】‘;:”“’。,、?]";
Pattern myPattern = Pattern.compile(regex);
Matcher myMatcher = myPattern.matcher(newPassword);
boolean flag = myMatcher.matches();
if (flag) {
message = "非法字符";
request.setAttribute("message", message);
return new ModelAndView("forward:goAccountPassword.htm");
}
// 字母数字检测
regex = "^[A-Za-z0-9]+$";
myPattern = Pattern.compile(regex);
myMatcher = myPattern.matcher(newPassword);
flag = myMatcher.matches();
if (!flag) {
message = "请输入由数字和26个英文字母组成的密码";
request.setAttribute("message", message);
return new ModelAndView("forward:goAccountPassword.htm");
}
if (user.getPassword().equals(oldPassword)) {
user.setPassword(newPassword);
userService.update(user);
message = "修改成功";
request.setAttribute("message", message);
return new ModelAndView("forward:goAccountPassword.htm");
}
request.setAttribute("message", message);
return new ModelAndView("forward:goAccountPassword.htm");
}
// 获取用户信息和等级
public void getUserInfo(HttpServletRequest request, HttpServletResponse response) {
User user = (User) request.getSession().getAttribute("user");
int credit = user.getCredit();
Level level = userService.getUserLevel(credit);
HttpSession hs = request.getSession();
hs.removeAttribute("user");
hs.setAttribute("user", user);
hs.removeAttribute("level");
hs.setAttribute("level", level);
}
@RequestMapping("account.htm")
public ModelAndView goAccount(HttpServletRequest request, HttpServletResponse response) throws Exception {
//获取用户信息和等级
getUserInfo(request, response);
return new ModelAndView("forward:goAccount.htm");
}
//用户修改个人信息
@RequestMapping("accountUpdate.htm")
public ModelAndView account(HttpServletRequest request, HttpServletResponse response) throws Exception {
User user = (User) request.getSession().getAttribute("user");
String message = "";
//获取修改的信息
String usersex = request.getParameter("userprofile_sex");
String useryear = request.getParameter("year");
String usermonth = request.getParameter("month");
String userday = request.getParameter("day");
String usercity = request.getParameter("city");
String userabout = request.getParameter("about");
String userbirth = useryear + "-" + usermonth + "-" + userday;
if (!(userabout == null || userabout == "")) {
user.setIntro(userabout);
}
if (!(usercity == null || usercity == "")) {
user.setCity(usercity);
}
user.setGender(usersex);
if (!(useryear == "" || usermonth == "" || userday == "" || useryear == null || usermonth == null || userday == null)) {
SimpleDateFormat sdf1 = new SimpleDateFormat("yyyy-MM-dd");
Date date = sdf1.parse(userbirth);
user.setBirthday(date);
//保存信息
userService.update(user);
message = "信息修改成功啦~";
return new ModelAndView("forward:goAccount.htm", "message", message);
} else {
message = "修改的信息不能为空,请重试!";
return new ModelAndView("forward:goAccountRevise.htm", "message", message);
}
}
// 与金币关联的个性用户名
@RequestMapping("accountUserName.htm")
public ModelAndView accountUserName(HttpServletRequest request, HttpServletResponse response) throws Exception {
User user = (User) request.getSession().getAttribute("user");
//声明消息
String messageNickname = "";
//读取金币规则---修改昵称
int gold = user.getGold();
if (gold >= 5) {
String userName = request.getParameter("userName");
if (userName == null || userName == "") {
messageNickname = "昵称不能为空!";
return new ModelAndView("forward:goAccountUserName.htm", "messageNickname", messageNickname);
}
if (user.getUserName().equals(userName)) {
messageNickname = "昵称没有变化,请重试!";
return new ModelAndView("forward:goAccountUserName.htm", "messageNickname", messageNickname);
}
Rule rule = (Rule) userService.getCurrentSession().createCriteria(Rule.class).add(Restrictions.eq("action", "修改昵称")).uniqueResult();
gold = gold - rule.getGold();
//保存信息
user.setGold(gold);
user.setUserName(userName);
request.getSession().setAttribute("user", user);
userService.update(user);
messageNickname = "修改成功";
logger.info("============messageNickname={}=======", messageNickname);
return new ModelAndView("forward:goAccountUserName.htm", "messageNickname", messageNickname);
} else {
logger.info("====进入金币不足========");
messageNickname = "金币不足";
getUserInfo(request, response);
return new ModelAndView("forward:goAccount.htm", "messageNickname", messageNickname);
}
}
}
|
import komand
from .schema import MonitorSourcesInput, MonitorSourcesOutput
# Custom imports below
class MonitorSources(komand.Action):
def __init__(self):
super(self.__class__, self).__init__(
name='monitor_sources',
description='Return merged results from: freshness, last_query, sched_report, size_by_source',
input=MonitorSourcesInput(),
output=MonitorSourcesOutput())
def run(self, params={}):
monitor_results = self.connection.api.monitor_sources()
return {'monitor_results': monitor_results}
|
# 大阪 新型コロナウィルス
{% include plotly.html %}
## 感染日と気温/絶対湿度の関係性
詳細は北海道の説明を参照。
{% include osaka-tvh-cont.html %}
### 相対湿度 [%RH] のグラフ
{% include osaka-trh-cont.html %}
|
import { getAdjust, registerAdjust } from './factory';
import Adjust from './adjusts/adjust';
import Dodge from './adjusts/dodge';
import Jitter from './adjusts/jitter';
import Stack from './adjusts/stack';
import Symmetric from './adjusts/symmetric';
// 注册内置的 adjust
registerAdjust('Dodge', Dodge);
registerAdjust('Jitter', Jitter);
registerAdjust('Stack', Stack);
registerAdjust('Symmetric', Symmetric);
// 最终暴露给外部的方法
export { getAdjust, registerAdjust, Adjust };
export * from './interface';
//# sourceMappingURL=index.js.map
|
<?php
namespace rest\versions\v1\models;
use common\models\User as CommonUser;
use yii\filters\RateLimitInterface;
/**
* This is the model class for table "tbl_user". *
* @property mixed user_id
* @property mixed type
* @property string title
* @property string title_clean
* @property string teaser
* @property mixed|null category
* @property mixed|null genre
* @property string content
* @property string author
* @property array image
*/
class User extends CommonUser implements RateLimitInterface
{
/**
* @inheritdoc
*/
public function getRateLimit($request, $action)
{
if (($request->isPut || $request->isDelete || $request->isPost)) {
return [\Yii::$app->params['maxRateLimit'], \Yii::$app->params['perRateLimit']];
}
return [\Yii::$app->params['maxGetRateLimit'], \Yii::$app->params['perGetRateLimit']];
}
/**
* @inheritdoc
*/
public function loadAllowance($request, $action)
{
return [
\Yii::$app->cache->get($request->getPathInfo() . $request->getMethod() . '_remaining'),
\Yii::$app->cache->get($request->getPathInfo() . $request->getMethod() . '_ts')
];
}
/**
* @inheritdoc
*/
public function saveAllowance($request, $action, $allowance, $timestamp)
{
\Yii::$app->cache->set($request->getPathInfo() . $request->getMethod() . '_remaining', $allowance);
\Yii::$app->cache->set($request->getPathInfo() . $request->getMethod() . '_ts', $timestamp);
}
}
|
package com.controller;
import com.service.NewsService;
import com.service.impl.NewsServiceImpl;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.util.logging.Logger;
@WebServlet("/delete")
public class DeleteNewsServlet extends HttpServlet {
private final NewsService newsService=new NewsServiceImpl();
private static Logger logger = Logger.getLogger(NewsServiceImpl.class.getName());
@Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
int id = Integer.parseInt(req.getParameter("id"));
newsService.deleteNews(id);
resp.sendRedirect(req.getContextPath()+ "/index");
logger.info("修改成功"+"id");
}
}
|
"use strict"
module.exports = testCode
var esprima = require("esprima")
var controlFlow = require("../../cfg")
var toJS = require("control-flow-to-js")
var vm = require("vm")
var util = require("util")
var stripNodes = require("./strip")
function testCode(t, code, remark) {
var ast = esprima.parse(code)
var cfg = controlFlow(ast)
var regen = toJS(cfg)
//TODO: Inspect control flow graph and verify that code is consistent
//console.log(util.inspect(stripNodes(cfg), {depth:10}))
//console.log(regen)
var reference, result, exceptReference, exceptResult
try {
reference = vm.runInNewContext(code)
} catch(e) {
exceptReference = e
}
try {
result = vm.runInNewContext(regen)
} catch(e) {
exceptResult = e
}
t.same(result, reference, remark)
t.same(exceptResult, exceptReference, remark)
}
|
goog.provide('goog.ds.JsonDataSource');
goog.require('goog.Uri');
goog.require('goog.dom');
goog.require('goog.ds.DataManager');
goog.require('goog.ds.JsDataSource');
goog.require('goog.ds.LoadState');
goog.require('goog.ds.logger');
goog.ds.JsonDataSource = function(uri, name, opt_callbackParamName) {
goog.ds.JsDataSource.call(this, null, name, null);
if(uri) {
this.uri_ = new goog.Uri(uri);
} else {
this.uri_ = null;
}
this.callbackParamName_ = opt_callbackParamName || 'callback';
};
goog.inherits(goog.ds.JsonDataSource, goog.ds.JsDataSource);
goog.ds.JsonDataSource.prototype.loadState_ = goog.ds.LoadState.NOT_LOADED;
goog.ds.JsonDataSource['dataSources']= { };
goog.ds.JsonDataSource.prototype.load = function() {
if(this.uri_) {
goog.ds.JsonDataSource['dataSources'][this.dataName_]= this;
goog.ds.logger.info('Sending JS request for DataSource ' + this.getDataName() + ' to ' + this.uri_);
this.loadState_ = goog.ds.LoadState.LOADING;
var uriToCall = new goog.Uri(this.uri_);
uriToCall.setParameterValue(this.callbackParamName_, 'JsonReceive.' + this.dataName_);
goog.global['JsonReceive'][this.dataName_]= goog.bind(this.receiveData, this);
var scriptEl = goog.dom.createDom('script', { 'src': uriToCall });
goog.dom.getElementsByTagNameAndClass('head')[0].appendChild(scriptEl);
} else {
this.root_ = { };
this.loadState_ = goog.ds.LoadState.NOT_LOADED;
}
};
goog.ds.JsonDataSource.prototype.getLoadState = function() {
return this.loadState_;
};
goog.ds.JsonDataSource.prototype.receiveData = function(obj) {
this.setRoot(obj);
this.loadState_ = goog.ds.LoadState.LOADED;
goog.ds.DataManager.getInstance().fireDataChange(this.getDataName());
};
goog.global['JsonReceive']= { };
|
// <copyright file="PrivateMessage.cs" company="Drastic Actions">
// Copyright (c) Drastic Actions. All rights reserved.
// </copyright>
using System;
using System.Collections.Generic;
using System.Text;
using Awful.Core.Entities.PostIcons;
using Awful.Core.Entities.Posts;
namespace Awful.Core.Entities.Messages
{
/// <summary>
/// SA Private Message.
/// </summary>
public class PrivateMessage : SAItem
{
/// <summary>
/// Gets or sets the ID.
/// </summary>
public int PrivateMessageId { get; set; }
/// <summary>
/// Gets or sets the Post Icon.
/// </summary>
public PostIcon Icon { get; set; }
/// <summary>
/// Gets or sets the Image Icon Endpoint.
/// </summary>
public string ImageIconLocation { get; set; }
/// <summary>
/// Gets or sets the title.
/// </summary>
public string Title { get; set; }
/// <summary>
/// Gets or sets the sender of the pm.
/// </summary>
public string Sender { get; set; }
/// <summary>
/// Gets or sets the date.
/// </summary>
public DateTime Date { get; set; }
/// <summary>
/// Gets or sets the post.
/// </summary>
public Post Post { get; set; }
/// <summary>
/// Gets or sets the message endpoint.
/// </summary>
public string MessageEndpoint { get; set; }
/// <summary>
/// Gets the image icon endpoint.
/// </summary>
public string ImageIconEndpoint { get; internal set; }
/// <summary>
/// Gets the Status Image Icon Endpoint.
/// </summary>
public string StatusImageIconEndpoint { get; internal set; }
/// <summary>
/// Gets the Status Image Icon Location.
/// </summary>
public string StatusImageIconLocation { get; internal set; }
}
}
|
import java.net.InetSocketAddress
import java.nio.ByteBuffer
import java.nio.channels.SelectionKey
import java.nio.channels.Selector
import java.nio.channels.ServerSocketChannel
import java.nio.channels.SocketChannel
import java.util.concurrent.ConcurrentLinkedDeque
import java.util.concurrent.Executors
import java.util.concurrent.SynchronousQueue
typealias handler = (request: Request) -> Response
class SimpleHttpServer(private val port: Int = 3000) {
private val wpool = ConcurrentLinkedDeque<SelectionKey>()
private val selector = Selector.open()
private val channel = ServerSocketChannel.open()
init {
channel.configureBlocking(false)
val socket = channel.socket()
val address = InetSocketAddress(port)
socket.bind(address)
channel.register(selector, SelectionKey.OP_ACCEPT)
}
fun addWriter(selectionKey: SelectionKey) {
wpool.add(selectionKey)
selector.wakeup()
}
fun run() {
val readWorker = ReadWorker(this)
val writeWorker = WriteWorker(::requestHandler)
runWorkers(readWorker, writeWorker)
while (true) {
val selectorCount = selector.select()
if (selectorCount > 0) {
val keys = selector.selectedKeys()
println("keys $keys")
val l = keys.map { selectedKey ->
try {
processSelector(selectedKey, readWorker, writeWorker)
} catch (e: Exception) {
e.printStackTrace()
}
selectedKey
}
keys.removeAll(l)
} else {
while (!wpool.isEmpty()) {
val key = wpool.poll()
val channel = key.channel() as SocketChannel
try {
channel.register(selector, SelectionKey.OP_WRITE, key.attachment())
} catch (e: Exception) {
try {
channel.closeSafely()
} catch (e: Exception) {}
}
}
}
}
}
private fun processSelector(selectedKey: SelectionKey, readWorker: ReadWorker, writeWorker: WriteWorker) {
if (!selectedKey.isValid) {
return
}
when {
selectedKey.isAcceptable -> {
println("accept $selectedKey")
val ssChannel = selectedKey.channel() as ServerSocketChannel
val sc = ssChannel.accept()
sc?.let {
it.configureBlocking(false)
val parser = RequestParser(this, it)
selectedKey.attach(parser)
it.register(selector, SelectionKey.OP_READ, parser)
}
}
selectedKey.isReadable -> {
readWorker.add(selectedKey)
}
selectedKey.isWritable -> {
writeWorker.add(selectedKey)
selectedKey.cancel()
}
}
}
private fun runWorkers(readWorker: ReadWorker, writeWorker: WriteWorker) {
val executor = Executors.newFixedThreadPool(2)
executor.execute {
readWorker.readInfinite()
}
executor.execute {
writeWorker.writeInfinite()
}
}
}
class ReadWorker(private val server: SimpleHttpServer) {
private val maxBufferSize: Int = 1024
private val pool = SynchronousQueue<SelectionKey>()
fun add(selectionKey: SelectionKey) {
pool.put(selectionKey)
}
fun readInfinite() {
while (true) {
val selectionKey = pool.take()
read(selectionKey)
}
}
private fun read(key: SelectionKey) {
println("read ")
println(key)
val channel = key.channel() as SocketChannel
val parser = key.attachment() as RequestParser
val byteArray = read(channel)
if (!byteArray.isEmpty()) {
parser.add(byteArray)
if (parser.isFinished) {
server.addWriter(key)
key.cancel()
} else {
println("-------")
}
} else {
key.cancel()
}
}
private fun read(socketChannel: SocketChannel): ByteArray {
val buffer = ByteBuffer.allocate(maxBufferSize)
socketChannel.read(buffer)
val array = buffer.array()
return array.copyOf(buffer.position())
}
}
class WriteWorker(private val handler: handler) {
private val pool = SynchronousQueue<SelectionKey>()
private val executor = Executors.newFixedThreadPool(32)
fun add(selectionKey: SelectionKey) {
pool.put(selectionKey)
}
fun writeInfinite() {
while (true) {
val selectionKey = pool.take()
executor.execute { write(selectionKey) }
}
}
private fun write(key: SelectionKey) {
println("write ")
println(key)
val channel = key.channel() as SocketChannel
val parser = key.attachment() as RequestParser
val request = parser.parse()
val response = handler(request)
val buf = ByteBuffer.wrap(response.getOutput())
try {
channel.write(buf)
channel.closeSafely()
} catch (e: Exception) {
}
}
}
fun SocketChannel.closeSafely() = try {
this.finishConnect()
this.socket().close()
this.close()
} catch (e: Exception) {}
fun main(args: Array<String>) {
SimpleHttpServer().run()
}
|
using UnityEngine;
using UnityEngine.EventSystems;
public class OnDrag : MonoBehaviour, IBeginDragHandler, IDragHandler, IEndDragHandler
{
private GameObject lua;
[SerializeField] private GameObject PanelSeed;
public void OnBeginDrag(PointerEventData eventData)
{
Debug.Log("On Begin Drag");
PanelSeed.SetActive(false);
Vector3 pos = Camera.main.ScreenToWorldPoint(Input.mousePosition);
pos.z = 0;
lua = Instantiate(DataGlobal.instance.SeedObject, pos, Quaternion.identity);
}
public void OnEndDrag(PointerEventData eventData)
{
}
void IDragHandler.OnDrag(PointerEventData eventData)
{
}
}
|
class FlightDealCLI::CLI
def call
start
deals
again
goodbye
end
def start
puts "** Welcome to AirfareWatchDog's Flight Deals **"
FlightDealCLI::Scraper.new.make_deal
puts ""
puts "Let me find today's best deals! One moment please.."
puts "---------------------------------------------------"
end
def deals
print_deals_list
puts ""
puts "Which deal would you like more information on? Please type a number:"
input = gets.strip.to_i
choice = FlightDealCLI::Deal.find(input)
FlightDealCLI::Scraper.scrape_additional_details(choice)
print_one_deal(choice)
end
def again
input = nil
while input != 'exit'
puts ""
puts "Would you like to check out another flight deal? Please type 'yes' or 'no':"
puts "You may type exit at any time to exit the program."
input = gets.strip.downcase
case input
when 'yes'
deals
when 'no'
goodbye
else
puts "Invalid entry. Please enter 'yes' or 'no':"
end
end
end
def goodbye
puts ""
puts "***Thanks for coming! Come back tomorrow for more great flight deals!***"
exit
end
def print_deals_list
puts ""
FlightDealCLI::Deal.all.each_with_index do |deal,index|
puts "#{index+1}. #{deal.departure} - #{deal.arrival} for #{deal.price} #{deal.rt}"
end
end
def print_one_deal(choice)
puts ""
puts "********#{choice.departure} ---> #{choice.arrival}********"
puts ""
puts "Price: #{choice.price} #{choice.rt}"
puts "#{choice.label_1}: #{choice.value_1}"
puts "#{choice.label_2}: #{choice.value_2}"
puts "#{choice.label_3}: #{choice.value_3}"
puts "#{choice.label_4}: #{choice.value_4}" if choice.label_4 && choice.value_4 != nil
puts "#{choice.label_5}: #{choice.value_5}" if choice.label_5 && choice.value_5 != nil
puts "#{choice.label_6}: #{choice.value_6}" if choice.label_6 && choice.value_6 != nil
puts ""
puts "For more info, visit: #{choice.link}"
end
end
|
#!/bin/bash
pat=$SKIFF_FINAL_CONFIG_DIR/skiff_config
ext=$SKIFF_FINAL_CONFIG_DIR/skiff_extra_configs_path
if [ -d "$SKIFF_FINAL_CONFIG_DIR" ]; then
if [ -f "$pat" ] && [ -z "$SKIFF_CONFIG" ]; then
export SKIFF_WARN_ABOUT_RECOVERED_CONFIG=true
export SKIFF_CONFIG=$(cat $pat)
fi
if [ -f "$ext" ] && [ -z "$SKIFF_EXTRA_CONFIGS_PATH" ]; then
export SKIFF_EXTRA_CONFIGS_PATH=$(cat $ext)
fi
fi
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.