content stringlengths 4 1.04M | lang stringclasses 358
values | score int64 0 5 | repo_name stringlengths 5 114 | repo_path stringlengths 4 229 | repo_licenses listlengths 1 8 |
|---|---|---|---|---|---|
server.port=8081
#logging.level.root=DEBUG | INI | 1 | zeesh49/tutorials | spring-ejb/spring-ejb-client/src/main/resources/application.properties | [
"MIT"
] |
# Unless otherwise noted, replace all settings w/in curly braces with values
# suited to your own environment.
<VirtualHost *:80>
ServerName helloworld.example.sonno.dev
ServerAdmin {youraddress@you.com}
DocumentRoot {your/path/to}/examples/helloworld/www
<Directory {your/path/to}/examples/helloworld/www>
Options Indexes FollowSymLinks MultiViews
AllowOverride None
Order allow,deny
allow from all
</Directory>
ErrorLog {your/path/to/error.log}
# Possible values include: debug, info, notice, warn, error, crit,
# alert, emerg.
LogLevel warn
# Main handler rewrite
# Do not replace values inside curly braces {} w/in the Location tag!!!
<Location />
RewriteEngine On
RewriteCond %{REQUEST_FILENAME} !-f
RewriteCond %{REQUEST_FILENAME} !-d
RewriteRule !\.(js|ico|gif|jpg|png|css)$ /index.php
</Location>
</VirtualHost> | ApacheConf | 5 | davehauenstein/sonno | examples/helloworld/tpl/sample.vhost | [
"BSD-Source-Code"
] |
// Copyright (c) 2020 Bluespec, Inc. All Rights Reserved
package AXI4_Addr_Translator;
// ================================================================
// This package defines an AXI4-slave-to-AXI4-slave 'address-translator' function.
// That just adds/subtracts a fixed constant from addresses.
// ================================================================
// Bluespec library imports
// none
// ----------------
// BSV additional libs
// none
// ================================================================
// Project imports
import AXI4_Types :: *;
// ================================================================
// Master-to-Master interface transformer with address translation
function AXI4_Master_IFC #(wd_id, wd_addr, wd_data, wd_user)
fv_AXI4_Master_Address_Translator (Bool add_not_sub,
Bit #(wd_addr) addr_delta,
AXI4_Master_IFC #(wd_id, wd_addr, wd_data, wd_user) ifc);
function Bit #(wd_addr) fv_addr_translate (Bit #(wd_addr) addr);
return (add_not_sub ? addr + addr_delta : addr - addr_delta);
endfunction
return interface AXI4_Master_IFC
// Wr Addr channel
method Bool m_awvalid = ifc.m_awvalid; // out
method Bit #(wd_id) m_awid = ifc.m_awid; // out
method Bit #(wd_addr) m_awaddr = fv_addr_translate (ifc.m_awaddr); // out
method Bit #(8) m_awlen = ifc.m_awlen; // out
method AXI4_Size m_awsize = ifc.m_awsize; // out
method Bit #(2) m_awburst = ifc.m_awburst; // out
method Bit #(1) m_awlock = ifc.m_awlock; // out
method Bit #(4) m_awcache = ifc.m_awcache; // out
method Bit #(3) m_awprot = ifc.m_awprot; // out
method Bit #(4) m_awqos = ifc.m_awqos; // out
method Bit #(4) m_awregion = ifc.m_awregion; // out
method Bit #(wd_user) m_awuser = ifc.m_awuser; // out
method Action m_awready (Bool awready) = ifc.m_awready (awready); // in
// Wr Data channel
method Bool m_wvalid = ifc.m_wvalid; // out
method Bit #(wd_data) m_wdata = ifc.m_wdata; // out
method Bit #(TDiv #(wd_data, 8)) m_wstrb = ifc.m_wstrb; // out
method Bool m_wlast = ifc.m_wlast; // out
method Bit #(wd_user) m_wuser = ifc.m_wuser; // out
method Action m_wready (Bool wready) = ifc.m_wready (wready); // in
// Wr Response channel
method Action m_bvalid (Bool bvalid, // in
Bit #(wd_id) bid, // in
Bit #(2) bresp, // in
Bit #(wd_user) buser); // in
ifc.m_bvalid (bvalid, bid, bresp, buser);
endmethod
method Bool m_bready = ifc.m_bready; // out
// Rd Addr channel
method Bool m_arvalid = ifc.m_arvalid; // out
method Bit #(wd_id) m_arid = ifc.m_arid; // out
method Bit #(wd_addr) m_araddr = fv_addr_translate (ifc.m_araddr); // out
method Bit #(8) m_arlen = ifc.m_arlen; // out
method AXI4_Size m_arsize = ifc.m_arsize; // out
method Bit #(2) m_arburst = ifc.m_arburst; // out
method Bit #(1) m_arlock = ifc.m_arlock; // out
method Bit #(4) m_arcache = ifc.m_arcache; // out
method Bit #(3) m_arprot = ifc.m_arprot; // out
method Bit #(4) m_arqos = ifc.m_arqos; // out
method Bit #(4) m_arregion = ifc.m_arregion; // out
method Bit #(wd_user) m_aruser = ifc.m_aruser; // out
method Action m_arready (Bool arready) = ifc.m_arready (arready); // in
// Rd Data channel
method Action m_rvalid (Bool rvalid, // in
Bit #(wd_id) rid, // in
Bit #(wd_data) rdata, // in
Bit #(2) rresp, // in
Bool rlast, // in
Bit #(wd_user) ruser); // in
ifc.m_rvalid (rvalid, rid, rdata, rresp, rlast, ruser);
endmethod
method Bool m_rready = ifc.m_rready; // out
endinterface;
endfunction
// ================================================================
// Slave-to-Slave interface transformer with address translation
function AXI4_Slave_IFC #(wd_id, wd_addr, wd_data, wd_user)
fv_AXI4_Slave_Address_Translator (Bool add_not_sub,
Bit #(wd_addr) addr_delta,
AXI4_Slave_IFC #(wd_id, wd_addr, wd_data, wd_user) ifc);
function Bit #(wd_addr) fv_addr_translate (Bit #(wd_addr) addr);
return (add_not_sub ? addr + addr_delta : addr - addr_delta);
endfunction
return interface AXI4_Slave_IFC
// Wr Addr channel
method Action m_awvalid (Bool awvalid,
Bit #(wd_id) awid,
Bit #(wd_addr) awaddr,
Bit #(8) awlen,
AXI4_Size awsize,
Bit #(2) awburst,
Bit #(1) awlock,
Bit #(4) awcache,
Bit #(3) awprot,
Bit #(4) awqos,
Bit #(4) awregion,
Bit #(wd_user) awuser);
ifc.m_awvalid (awvalid, awid,
fv_addr_translate (awaddr),
awlen, awsize, awburst, awlock, awcache, awprot, awqos, awregion, awuser);
endmethod
method Bool m_awready = ifc.m_awready;
// Wr Data channel
method Action m_wvalid (Bool wvalid,
Bit #(wd_data) wdata,
Bit #(TDiv #(wd_data, 8)) wstrb,
Bool wlast,
Bit #(wd_user) wuser);
ifc.m_wvalid (wvalid, wdata, wstrb, wlast, wuser);
endmethod
method Bool m_wready = ifc.m_wready;
// Wr Response channel
method Bool m_bvalid = ifc.m_bvalid;
method Bit #(wd_id) m_bid = ifc.m_bid;
method Bit #(2) m_bresp = ifc.m_bresp;
method Bit #(wd_user) m_buser = ifc.m_buser;
method Action m_bready (Bool bready) = ifc.m_bready (bready);
// Rd Addr channel
method Action m_arvalid (Bool arvalid,
Bit #(wd_id) arid,
Bit #(wd_addr) araddr,
Bit #(8) arlen,
AXI4_Size arsize,
Bit #(2) arburst,
Bit #(1) arlock,
Bit #(4) arcache,
Bit #(3) arprot,
Bit #(4) arqos,
Bit #(4) arregion,
Bit #(wd_user) aruser);
ifc.m_arvalid (arvalid, arid,
fv_addr_translate (araddr),
arlen, arsize, arburst, arlock, arcache, arprot, arqos, arregion, aruser);
endmethod
method Bool m_arready = ifc.m_arready;
// Rd Data channel
method Bool m_rvalid = ifc.m_rvalid;
method Bit #(wd_id) m_rid = ifc.m_rid;
method Bit #(wd_data) m_rdata = ifc.m_rdata;
method Bit #(2) m_rresp = ifc.m_rresp;
method Bool m_rlast = ifc.m_rlast;
method Bit #(wd_user) m_ruser = ifc.m_ruser;
method Action m_rready (Bool rready);
ifc.m_rready (rready);
endmethod
endinterface;
endfunction
// ================================================================
endpackage: AXI4_Addr_Translator
| Bluespec | 5 | darius-bluespec/Flute | src_Testbench/Fabrics/AXI4/AXI4_Addr_Translator.bsv | [
"Apache-2.0"
] |
{-# LANGUAGE UndecidableInstances #-}
module Hasura.GraphQL.Parser.Column
( UnpreparedValue (..),
ValueWithOrigin (..),
openValueOrigin,
peelWithOrigin,
mkParameter,
)
where
import Hasura.GraphQL.Parser.Class
import Hasura.GraphQL.Parser.Internal.TypeChecking
import Hasura.GraphQL.Parser.Internal.Types
import Hasura.GraphQL.Parser.Schema
import Hasura.Prelude
import Hasura.RQL.Types.Backend
import Hasura.RQL.Types.Column hiding
( EnumValue (..),
EnumValueInfo (..),
)
import Hasura.SQL.Backend
import Hasura.Session (SessionVariable)
import Language.GraphQL.Draft.Syntax qualified as G
-- -------------------------------------------------------------------------------------------------
data UnpreparedValue (b :: BackendType)
= -- | A SQL value that can be parameterized over.
UVParameter
(Maybe VariableInfo)
-- ^ The GraphQL variable this value came from, if any.
(ColumnValue b)
| -- | A literal SQL expression that /cannot/ be parameterized over.
UVLiteral (SQLExpression b)
| -- | The entire session variables JSON object.
UVSession
| -- | A single session variable.
UVSessionVar (SessionVarType b) SessionVariable
deriving instance
( Backend b,
Eq (ColumnValue b),
Eq (ScalarValue b)
) =>
Eq (UnpreparedValue b)
deriving instance
( Backend b,
Show (ColumnValue b),
Show (ScalarValue b)
) =>
Show (UnpreparedValue b)
-- | This indicates whether a variable value came from a certain GraphQL variable
data ValueWithOrigin a
= ValueWithOrigin VariableInfo a
| ValueNoOrigin a
deriving (Functor)
openValueOrigin :: ValueWithOrigin a -> a
openValueOrigin (ValueWithOrigin _ a) = a
openValueOrigin (ValueNoOrigin a) = a
mkParameter :: ValueWithOrigin (ColumnValue b) -> UnpreparedValue b
mkParameter (ValueWithOrigin valInfo columnValue) = UVParameter (Just valInfo) columnValue
mkParameter (ValueNoOrigin columnValue) = UVParameter Nothing columnValue
-- TODO: figure out what the purpose of this method is.
peelWithOrigin :: MonadParse m => Parser 'Both m a -> Parser 'Both m (ValueWithOrigin a)
peelWithOrigin parser =
parser
{ pParser = \case
GraphQLValue (G.VVariable var@Variable {vInfo, vValue}) -> do
-- Check types c.f. 5.8.5 of the June 2018 GraphQL spec
typeCheck False (toGraphQLType $ pType parser) var
ValueWithOrigin vInfo <$> pParser parser (absurd <$> vValue)
value -> ValueNoOrigin <$> pParser parser value
}
| Haskell | 4 | devrsi0n/graphql-engine | server/src-lib/Hasura/GraphQL/Parser/Column.hs | [
"Apache-2.0",
"MIT"
] |
#include "fir12_par.h"
#include <stdio.h>
#include "fir12.h"
void fir12_1(const int coefficients[], int offset, int N, streaming chanend cin, streaming chanend cout, int data[]) {
int dat;
int w = 0;
long long result, res;
for(int i = 0; i < N+12; i++) {
data[i] = 0; // no unsafe arrays here - checks bound
}
#pragma unsafe arrays
while(1) {
cout <: data[w];
cin :> dat;
data[w] = dat;
if (w < 12) {
data[N+w] = dat;
}
result = fir12coffset(coefficients, data, w, N, offset);
cout :> res;
cin <: result+res;
w--;
if (w < 0) {
w = N-1;
}
}
}
void fir12_e(const int coefficients[], int offset, int N, streaming chanend cin, int data[]) {
int dat;
int w = 0;
for(int i = 0; i < N+12; i++) {
data[i] = 0; // no unsafe arrays here - checks bound
}
#pragma unsafe arrays
while(1) {
cin :> dat;
data[w] = dat;
if (w < 12) {
data[N+w] = dat;
}
cin <: fir12coffset(coefficients, data, w, N, offset);
w--;
if (w < 0) {
w = N-1;
}
}
}
void fir_par4_48(int coefficients[], int N, streaming chanend cin,
int data0[], int data1[], int data2[], int data3[]) {
streaming chan a, b, c;
if (N%48 != 0) {
return;
}
coefficients[N-1]; // Check bound on coefficients
par {
fir12_1(coefficients, 0*N/4, N/4, cin, a, data0);
fir12_1(coefficients, 1*N/4, N/4, a, b, data1);
fir12_1(coefficients, 2*N/4, N/4, b, c, data2);
fir12_e(coefficients, 3*N/4, N/4, c, data3);
}
}
void fir_par3_36(int coefficients[], int N, streaming chanend cin,
int data0[], int data1[], int data2[]) {
streaming chan a, b;
if (N%36 != 0) {
return;
}
coefficients[N-1]; // Check bound on coefficients
par {
fir12_1(coefficients, 0*N/3, N/3, cin, a, data0);
fir12_1(coefficients, 1*N/3, N/3, a, b, data1);
fir12_e(coefficients, 2*N/3, N/3, b, data2);
}
}
void fir_par2_24(int coefficients[], int N, streaming chanend cin,
int data0[], int data1[]) {
streaming chan a;
if (N%24 != 0) {
return;
}
coefficients[N-1]; // Check bound on coefficients
par {
fir12_1(coefficients, 0*N/2, N/2, cin, a, data0);
fir12_e(coefficients, 1*N/2, N/2, a, data1);
}
}
void fir_par1_12(int coefficients[], int N, streaming chanend cin,
int data0[]) {
if (N%12 != 0) {
return;
}
coefficients[N-1]; // Check bound on coefficients
fir12_e(coefficients, 0, N, cin, data0);
}
| XC | 4 | smola/language-dataset | data/github.com/xcore/sc_dsp_filters/2b55737d94e1c4c9001d075b7e7becd4f4b900b6/module_fir/src/fir12_par.xc | [
"MIT"
] |
/*
Copyright © 2011 MLstate
This file is part of Opa.
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
/*
* Author : Nicolas Glondu <nicolas.glondu@mlstate.com>
**/
/**
* GitHub generic API module
*
* @category api
* @author Nicolas Glondu, 2011
* @destination public
*/
package stdlib.apis.github
/* Types returned by API */
type GitHub.encoding = {utf8} / {base64}
type GitHub.state = {open} / {closed}
type GitHub.direction = {asc} / {desc}
type GitHub.plan = {
name : string
space : int
private_repos : int
}
/**
* Type of a GitHub user provided to various functions
*/
type GitHub.user_id =
{ self : string } /** Token of user - provides more information */
/ { login : string } /** Login of user */
type GitHub.user_more = {
total_private_repos : int
collaborators : int
disk_usage : int
owned_private_repos : int
private_gists : int
plan : GitHub.plan
}
type GitHub.short_user = {
id : int
login : string
url : string
gravatar_id : string
avatar_url : string
`type` : string // Not always present
contributions : int // Only if as contributor
}
type GitHub.user = {
id : int
login : string
name : string
company : string
gravatar_id : string
created_at : Date.date
location : string
blog : string
public_repos : int
public_gists : int
followers : int
following : int
user_type : string
avatar_url : string
url : string
html_url : string
more : option(GitHub.user_more)
}
type GitHub.repository = {
url : string
html_url : string
clone_url : string
git_url : string
ssh_url : string
svn_url : string
mirror_url : string
id : int
owner : GitHub.short_user
name : string
full_name : string
description : string
homepage : string /* Not always returned */
language : string
`private` : bool
fork : bool
forks : int
watchers : int
size : int
master_branch : string
open_issues : int
pushed_at : Date.date
created_at : Date.date
updated_at : Date.date
organization : option(GitHub.short_user)
parent : option(GitHub.repository)
source : option(GitHub.repository)
has_issues : option(bool)
has_wiki : option(bool)
has_downloads : option(bool)
}
type GitHub.repo_comment = {
html_url : string
url : string
id : int
body : string
path : string
position : int
line : int
commit_id : string
user : GitHub.short_user
created_at : Date.date
updated_at : Date.date
}
type GitHub.download = {
url : string
html_url : string
id : int
name : string
description : string
size : int
download_count : int
content_type : string
}
type GitHub.gist_file =
(string,{
size : int
filename : string
raw_url : string
content : string
})
type GitHub.gist_forks = {
user : GitHub.short_user
url : string
created_at : Date.date
}
type GitHub.gist_history = {
user : GitHub.short_user
version : string
url : string
change_status : option({ deletions : int additions : int total : int })
committed_at : Date.date
}
type GitHub.gist = {
id : int
public : bool
description : string
user : GitHub.short_user
url : string
html_url : string
git_push_url : string
git_pull_url : string
comments : int
created_at : Date.date
updated_at : Date.date
files : list(GitHub.gist_file)
forks : list(GitHub.gist_forks)
history : list(GitHub.gist_history)
}
type GitHub.label = {
url : string
name : string
color : string
}
type GitHub.milestone = {
url : string
number : int
state : {open}/{closed}
title : string
description : string
creator : GitHub.short_user
open_issues : int
closed_issues : int
created_at : Date.date
due_on : Date.date
}
type GitHub.pull_request = {
html_url : string
diff_url : string
patch_url : string
}
type GitHub.issue = {
url : string
html_url : string
number : int
state : {open}/{closed}
title : string
body : string
user : GitHub.short_user
labels : list(GitHub.label)
assignee : GitHub.short_user
milestone : option(GitHub.milestone)
comments : int
pull_request : option(GitHub.pull_request)
closed_at : Date.date
created_at : Date.date
updated_at : Date.date
}
type GitHub.issue_comment = {
id : int
url : string
body : string
user : GitHub.short_user
created_at : Date.date
updated_at : Date.date
}
type GitHub.ref = {
label : string
ref : string
sha : string
user : GitHub.short_user
repository : GitHub.repository
}
type GitHub.pull_req = {
url : string
html_url : string
diff_url : string
patch_url : string
issue_url : string
number : int
state : {open}/{closed:Date.date}/{other:string}
title : string
body : string
created_at : Date.date
updated_at : Date.date
closed_at : Date.date
merged_at : Date.date
head : GitHub.ref
base : GitHub.ref
_links : GitHub.links
user : GitHub.short_user
merged : bool
mergeable : bool
merged_by : GitHub.short_user
comments : int
commits : int
additions : int
deletions : int
changed_files : int
}
type GitHub.public_key = {
id : int
key : string
title : string
url : string
verified : bool
}
type GitHub.commit_user = {
name : string
email : string
date : Date.date
}
type GitHub.url_sha = {
url : string
sha : string
}
type GitHub.commit = {
author : GitHub.commit_user
url : string
sha : string
message : string
tree : GitHub.url_sha
committer : GitHub.commit_user
}
type GitHub.stats = {
additions : int
deletions : int
total : int
}
type GitHub.file = {
sha : string
filename : string
status : string
additions : int
deletions : int
changes : int
blob_url : string
raw_url : string
patch : string
}
type GitHub.full_commit = {
sha : string
commit : GitHub.commit
author : GitHub.short_user
parents : list(GitHub.url_sha)
url : string
committer : GitHub.short_user
stats : option(GitHub.stats)
files : list(GitHub.file)
}
type GitHub.links = {
self : { href : string }
html : { href : string }
comments : { href : string }
review_comments : { href : string }
pull_request : { href : string }
git : { href : string }
}
type GitHub.links_no_href = {
self : string
html : string
comments : string
review_comments : string
pull_request : string
git : string
}
type GitHub.id_name_url = {
url : string
name : string
id : int
}
| Opa | 5 | Machiaweliczny/oppailang | lib/stdlib/apis/github/github.opa | [
"MIT"
] |
module Main where
import ADTProto
import Parse (parseProgram)
import Compile (protoProgram, checkProgram)
import System.Environment (getArgs)
import System.Exit (exitFailure)
import System.IO (hPrint, stderr)
import Text.Parsec (runP)
main :: IO ()
main = do [filename] <- getArgs
input <- readFile filename
prog <- case runP parseProgram () filename input of
Right result -> return result
Left err -> do hPrint stderr err
exitFailure
writeFile (filename ++ ".proto") $ show $ protoProgram prog
writeFile (filename ++ ".cc") $ show $ checkProgram prog
| Haskell | 4 | zadcha/rethinkdb | scripts/adtproto/Main.hs | [
"Apache-2.0"
] |
extends ConfirmationDialog
onready var frame_num = $VBoxContainer/GridContainer/FrameNum
onready var frame_dur = $VBoxContainer/GridContainer/FrameTime
func set_frame_label(frame : int) -> void:
frame_num.set_text(str(frame + 1))
func set_frame_dur(duration : float) -> void:
frame_dur.set_value(duration)
func _on_FrameProperties_popup_hide() -> void:
Global.dialog_open(false)
func _on_FrameProperties_confirmed():
var frame : int = int(frame_num.get_text()) - 1
var duration : float = frame_dur.get_value()
var new_duration = Global.current_project.frames[frame].duration
new_duration = duration
Global.current_project.undos += 1
Global.current_project.undo_redo.create_action("Change frame duration")
Global.current_project.undo_redo.add_do_property(Global.current_project.frames[frame], "duration", new_duration)
Global.current_project.undo_redo.add_undo_property(Global.current_project.frames[frame], "duration", Global.current_project.frames[frame].duration)
Global.current_project.undo_redo.add_do_method(Global, "redo")
Global.current_project.undo_redo.add_undo_method(Global, "undo")
Global.current_project.undo_redo.commit_action()
| GDScript | 4 | triptych/Pixelorama | src/UI/Timeline/FrameProperties.gd | [
"MIT"
] |
#include "caffe2/operators/quantized/int8_slice_op.h"
namespace caffe2 {
REGISTER_CPU_OPERATOR(Int8Slice, int8::Int8SliceOp);
OPERATOR_SCHEMA(Int8Slice)
.NumInputs(1, 3)
.NumOutputs(1)
.SetDoc(R"DOC(
Produces a slice of the input Int8 tensor. Currently, only slicing in a single
dimension is supported.
Slices are passed as 2 1D vectors or as two keyword argument lists with starting
and end indices for each dimension of the input `data` tensor. If a negative
value is passed for any of the start or end indices, it represents the number of
elements before the end of that dimension. End indices are non-inclusive unless
negative (end index -1 means up to and including the last element).
Example:
data = [
[1, 2, 3, 4],
[5, 6, 7, 8],
]
starts = [0, 1]
ends = [-1, 3]
result = [
[2, 3],
[6, 7],
]
)DOC")
.Input(0, "data", "Int8 Tensor of data to extract slices from.")
.Input(1, "starts", "1D tensor: start-indices for each dimension of data.")
.Input(2, "ends", "1D tensor: end-indices for each dimension of data.")
.Arg("Y_scale", "Output tensor quantization scale")
.Arg("Y_zero_point", "Output tensor quantization offset")
.Arg("starts", "List of starting indices")
.Arg("ends", "List of ending indices")
.Arg(
"dim",
"(Optional) The dimension to slice over. If specified start_idx and end_idx should also be given and it takes precedence over starts and ends")
.Arg("start_idx", "(Optional) The dimension to start slice from. Default is 0")
.Arg("end_idx", "(Optional) The dimension to end the slice. Default is -1")
.Output(0, "output", "Sliced Int8 data tensor.")
.InheritOnnxSchema("Slice");
} // namespace caffe2
| C++ | 4 | Hacky-DH/pytorch | caffe2/operators/quantized/int8_slice_op.cc | [
"Intel"
] |
<style type="text/css">
body { color: #FFF; }
</style> | Latte | 1 | Antholoj/netbeans | php/php.latte/test/unit/data/testfiles/lexer/top/testIssue231352.latte | [
"Apache-2.0"
] |
== Roadmap
This document lists short-term, medium-term, and long-term goals for the project.
[NOTE]
====
These are goals, not necessarily commitments. The sections are not intended to represent exclusive focus during these terms. For example, when you start at a college or university you may have a long-term goal to graduate and a short-term goal to find a job for supplemental income. We will similarly work toward many of our medium-term and long-term goals in the near future as we move toward meeting our short-term goals.
====
=== Short-term
> Usability, stability, and marketing.
Short term are focused on improving contributor and user productivity (part of this is getting the word out).
* CLI improvements
** Search functionality (e.g. what generators support retrofit, what generators are available for kotlin)
* Build automation improvements
** Discuss consolidating current third-party build systems
** Investigate custom docker containerization for prepared build environments
** Automated release stability
* General
** OAS3.0 features support: anyOf, oneOf, callbacks, etc
** Consider opt-in telemetry about generators being used, limited to a counter of invocations by generator name). This would allow us to make prioritization decisions based on statistics.
** Code clean up
*** centralize build scripts
*** organize samples/bin scripts according to new generator names
*** consolidate typescript generators
*** jaxrs => use Swagger core v3 (see https://github.com/OpenAPITools/openapi-generator/issues/27[#27])
* Documentation
** Static pages, preferably on gh-pages, devoted to each generator
** Explain generator options
** Centralized docs on generated code usage/examples/configuration
=== Medium-term
> Feature set, well-defined API (code and templates), and extensibility improvements.
* API
** Typed representation of the model bound to our templates. As it is, everything is treated an an Object, and this can lead to changes in the interface which might be unexpected from the template perspective.
* Feature set (potential generators to add; not an exhaustive list)
** Azure functions (node.js, server)
** Finagle HTTP Client (Scala, client)
** Finagle Http Server (Scala, server)
** Finatra (Scala, server)
** Kotlin Spring MVC/Springboot (server)
** C++ Server, any framework (server)
* General
** Migrate from Maven to Gradle
** Java 9+ support
* Feature set (other options to investigate)
** SPI plugins
*** Templating engine
*** Language extensions
*** Custom extensions (e.g. allowing users to load support for https://github.com/Azure/azure-rest-api-specs[azure-rest-api-specs])
** Customizable templating engines (handlebars support)
** Unit-testing templates (to previously mentioned explicit type as an interface to the template)
* Reduce coupling
** Make types extending `CodegenConfig` become the generation entrypoint
** Allow current `CodegenConfig` types to define templating engine
** Allow current `CodegenConfig` types to modify workflow (currently encapsulated in `DefaultGenerator` and tightly coupled to the template engine
** Clearer reuse of "language" features, outside of "generator" types. That is, rather than enforcing polymorphic sharing of "language" which currently allows the super type to redefine framework-specific mapping functionality, generators could compose one or more language support types.
* Define template deprecation/removal process
=== Long-term
> Expanding tooling offered, integrations, potentially SaaS offering to partially fund efforts.
* Generator UI wrappers
** Move jimschubert/intellij-swagger-codegen plugin under the org, and rename
** Look into an Eclipse UI wrapper around the generator
** Look at Visual Studio Code (and/or Atom, sublime text) integration
* Provide a native GUI for viewing/editing specs. Most tools are currently geared toward developers, but often times it may be non-technical business users who are interested in an API.
* A paid service (SaaS) for generation may be enticing for some users. Such a service would allow for statistics (mentioned earlier in telemetry)
* Additional tools
** node.js build system(s) integration (grunt/gulp/webpack/etc)
** ruby gem
** others (which may require previously mentioned SaaS API)
| AsciiDoc | 2 | MalcolmScoffable/openapi-generator | docs/roadmap.adoc | [
"Apache-2.0"
] |
class SizedNumTest {
Int32 size := 1
Int8 flag := 2
Void main() {
Int16 x := flag
echo("$size $flag $x")
}
} | Fantom | 2 | fanx-dev/fanx | library/baseTest/fan/SizedNumTest.fan | [
"AFL-3.0"
] |
upstream nginx_backends {
zone nginx_backends 64K;
state /tmp/nginx_backends.state;
sticky cookie test;
}
upstream unit_backends {
zone unit_backends 64K;
state /tmp/unit_backends.state;
}
match server_ok {
status 200;
body ~ "Status: OK";
}
server {
listen 80;
status_zone nginx_ws;
location / {
proxy_pass http://nginx_backends;
sub_filter '<!--IP-->' "$upstream_addr";
sub_filter_once on;
health_check uri=/healthcheck.html match=server_ok;
}
}
server {
listen 9080;
status_zone unit;
location / {
proxy_pass http://unit_backends;
}
}
server {
listen 8080;
root /usr/share/nginx/html;
location / {
index dashboard.html;
}
location /api {
api write=on;
}
}
| SourcePawn | 4 | rowi1de/NGINX-Demos | autoscaling-demo/nginx_config/demo.conf.sp | [
"Apache-2.0"
] |
package {
import GzOpenGL.OpenGL;
import GzOpenGL.OpGpuInfo;
import GZ.Gfx.Face;
import GZ.Gpu.GpuObj.GpuBatch;
import GZ.Gpu.ShaderBase.Evbo;
import GZ.Gpu.ShaderModel.GzModel.GzShModel;
import GZ.Sys.Interface.Context;
import GZ.Gpu.GpuObj.GpuFace;
public class OpGpuBatch overplace GpuBatch {
public var aIndice : CArray<UInt8, 1, 6>;
public var oEvbo : Evbo;
<cpp_namespace>
static char aIndice2TriA[] = {3,2,0, 2,1,0 };
</cpp_namespace>
public function OpGpuBatch( _oShModel : GzShModel ):Void{
oEvbo = new Evbo(_oShModel.oProgram );
oEvbo.fSetQuadElement();
oEvbo.fSendData();
}
override function fDraw():Void {
oEvbo.fBind();
//var _nNbElement : Int = Context.oItf.nTotalFaces;
//var _nNbElement : Int = Context.oItf.nTotalFaces-1; //TODO bakcround (-1)
// var _nNbElement : Int = Context.oItf.nTotalFaces-1; //TODO bakcround (-1)
// var _nNbElement : Int = Context.oItf.nTotalFaces-1; //TODO bakcround (-1)
var _nNbElement : Int = GpuFace.nCurrent;
//Debug.fTrace("---Draw _nNbElement: " + _nNbElement);
if(oFBO_Destination == null){
OpenGL.fBindFramebuffer(FRAMEBUFFER, null); //Default
}else{
oFBO_Destination.fBind();
}
//OpenGL.fClearColor(0.0, 5.0, 0.5, 1.0);
if(bAutoClear){
OpenGL.fClear(COLOR_BUFFER_BIT );
}
//TODO Switch case
if(hBlendMode == eBlendMode.None){
OpenGL.fDisable( BLEND );
}
if(hBlendMode == eBlendMode.Alpha){
//Blending kill FPS on Android!!
OpenGL.fEnable( BLEND );
//OpenGL.fDisable( BLEND );
//OpenGL.fBlendFunc(ONE_MINUS_DST_ALPHA, ONE); //Front to back
//OpenGL.fBlendFunc(ONE_MINUS_SRC_ALPHA, ONE); //Back to front
//With FBO, this is reversed?
//OpenGL.fBlendFunc(ONE, ONE_MINUS_DST_ALPHA); //Front to front
OpenGL.fBlendFunc(ONE, ONE_MINUS_SRC_ALPHA); //Back to front
//////////////////////////////////
}
//OpenGL.fDisable( BLEND );
//OpenGL.fClearColor(1.0, 1.0, 0.0, 1.0);
//OpenGL.fClear(COLOR_BUFFER_BIT );
//OpenGL.fDisable( BLEND );
//OpenGL.fDisable( DEPTH_TEST );
// OpenGL.fBindTexture(TEXTURE_2D, 1);
// OpenGL.fBindTexture(TEXTURE_2D, 2);
// OpenGL.fBindTexture(TEXTURE_2D, 3);
// //Blending kill FPS on Android!!
// OpenGL.fEnable( BLEND );
//OpenGL.fDisable( BLEND );
//OpenGL.fBlendFunc(ONE_MINUS_DST_ALPHA, ONE); //Front to back
//OpenGL.fBlendFunc(ONE_MINUS_SRC_ALPHA, ONE); //Back to front
//oShModel.oUnType.nVal = 4;
//oShModel.oUnType.nVal = 6;//Tiles
//oShModel.oUnType.fSend();
/*
if(oRc != 0){
GL_fUniform1i(oGzSh->nLocType, 4);
GL_fActiveTexture( GL_TEXTURE1);
GL_fBindTexture( GL_TEXTURE_2D, oDest->oGpuObj->nTexId );
if(oRc->nGpuTexId){
GL_fActiveTexture( GL_TEXTURE0 );
GL_fBindTexture(GL_TEXTURE_2D, oRc->nGpuTexId);
GL_fUniform2f(oGzSh->nLocTexDim, oRc->nWidth + 2.0, oRc->nHeight + 2.0); //2px of border
}
}else{ //No texture, Unicolor?
//GL_fUniform2f(oGzSh->nLocTexDim,10, 10);
// GL_fUniform1i(oGzSh->nLocType, 6);
}*/
//TODO crash with no info if VBO as not sended DATA
OpenGL.fDrawElementsInstanced(TRIANGLES, 6, UNSIGNED_BYTE, 0, _nNbElement);
//GL_fDrawElementsInstancedfDrawElementsInstanced(TRIANGLES, 6, UNSIGNED_BYTE, 0, _nNbElement);
//GL_fDrawElementsInstanced(GL_TRIANGLES, 6, GL_UNSIGNED_BYTE, _aIndice2Tri, nNbElement );
// GL_fDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_BYTE, _aIndice2Tri );
//GL_fBindBuffer(GL_ARRAY_BUFFER,0);
// OpenGL.fDisable( BLEND );
}
}
} | Redcode | 3 | VLiance/GZE | src/SubLib_GPU/OpenGL/Lib_GzOpenGL/GpuObj/OpGpuBatch.cw | [
"Apache-2.0"
] |
{-
Epigram: a dependently typed programming language
This file is a demonstration of Cochon, the Epigram interactive interface that
looks a bit like a programming language if you squint. Once you have built
the system, start it by running "./Pig", then copy lines one at a time from
this file to the prompt.
If you use Emacs, run send-line.el, then open this file in one window and
a shell running Pig in the other window. You can then type C-c C-r to run
the current line and C-c C-u to undo it.
-}
{-
Bool is one of the simplest possible data types, with only two constructors.
-}
data Bool := ('false : Bool) ; ('true : Bool) ;
elab 'true : Bool ;
elab 'false : Bool ;
{-
Natural numbers are a data type with two constructors: zero is a number, and
every number has a successor.
-}
data Nat := ('zero : Nat) ; ('suc : (n : Nat) -> Nat) ;
elab 'zero : Nat ;
make one := 'suc 'zero : Nat ; elab one ;
make two := 'suc ('suc 'zero) : Nat ; elab two ;
{-
We can write functions to manipulate this data.
The addition function, written in a pattern-matching style, looks like this:
plus : Nat -> Nat -> Nat
plus zero n = n
plus (suc k) n = suc (plus k n)
This definition is recursive (it refers to itself), so why does it make sense?
In other words, why does evaluation of |plus k n| terminate? In fact, the
definition is structurally recursive on the first argument, but the
pattern-matching style hides this.
Epigram requires programs to be total: general recursion is not allowed.
The structural recursion is made explicit by appeal to the induction principle
for natural numbers, here called Nat.Ind.
-}
let plus (m : Nat)(n : Nat) : Nat ;
refine plus m n <= Nat.Ind m ;
next ;
refine plus ('suc k) n = 'suc (plus k n) ;
root ;
{-
Notice that we did not define plus for the case when the first argument is
zero. We are allowed to leave "holes" in programs and fill them in later.
Watch what happens when we try to execute plus:
-}
elab plus two two ;
{-
Some computation is possible, but not all of it! Perhaps we should go back and
fill in the missing line of the program:
-}
next ;
refine plus 'zero n = n ;
root ;
{-
Now we get the result we expect:
-}
elab plus two two ;
{-
A major benefit of explicitly appealing to an induction principle is that we
can invent our own, rather than being restricted to structural recursion.
If we have two numbers x and y, we can eliminate them by comparison, provided
we say what to do in three cases:
l - x is less than y
e - x and y are equal
g - x is greater than y
-}
let compare (x : Nat)(y : Nat)(P : Nat -> Nat -> Set)(l : (x : Nat)(y : Nat) -> P x (plus x ('suc y)))(e : (x : Nat) -> P x x)(g : (x : Nat)(y : Nat) -> P (plus y ('suc x)) y) : P x y ;
refine compare x y P l e g <= Nat.Ind x ;
refine compare 'zero y P l e g <= Nat.Ind y ;
refine compare 'zero 'zero P l e g = e 'zero ;
refine compare 'zero ('suc k) P l e g = l 'zero k ;
refine compare ('suc j) y P l e g <= Nat.Ind y ;
refine compare ('suc j) 'zero P l e g = g j 'zero ;
refine compare ('suc j) ('suc k) P l e g <= compare j k ;
refine compare ('suc j) ('suc (plus j ('suc k))) P l e g = l ('suc j) k ;
refine compare ('suc j) ('suc j) P l e g = e ('suc j) ;
refine compare ('suc (plus k ('suc j))) ('suc k) P l e g = g j ('suc k) ;
root ;
{-
Now that we have our new induction principle, we can use it just like Nat.Ind.
We already did this when explaining how to compare two successors. Here's a
simpler example:
-}
let max (a : Nat)(b : Nat) : Nat ;
refine max a b <= compare a b ;
refine max a (plus a ('suc b)) = plus a ('suc b) ;
refine max a a = a ;
refine max (plus b ('suc a)) b = plus b ('suc a) ;
root ;
elab max two one ;
elab max 'zero one ;
elab max 'zero 'zero ;
{-
The Curry-Howard correspondence is the observation that types correspond to
mathematical theorems, and a program is a proof of a theorem. We can use this
to state and prove theorems about our programs.
For example, we can show that plus is commutative (independent of the order
of its arguments):
-}
make plus-commutative := ? : :- ((k : Nat)(n : Nat) => plus k n == plus n k) ;
{-
In fact, thanks to our model of equality we can prove a stronger result,
which is not true in most other comparable systems: that the function
|plus| is equal to |flip plus|.
-}
make flip := (\ f k n -> f n k) : (Nat -> Nat -> Nat) -> Nat -> Nat -> Nat ;
make plus-function-commutative := ? : :- (flip plus == plus) ;
{-
See Plus.pig in the Epigram tests directory for proofs of these theorems.
-}
{-
Parameterised types such as lists are fundamental to functional programming.
A list is either empty (nil) or a value followed by a list (cons).
-}
data List (A : Set) := ('nil : List A) ; ('cons : A -> List A -> List A) ;
elab 'nil : List Bool ;
elab 'cons 'true 'nil : List Bool ;
elab 'cons one ('cons two 'nil) : List Nat ;
{-
What happens if we try to write a function to extract the first element of a
list? There is nothing we can do in the nil case, so we just skip it.
-}
let head (A : Set)(as : List A) : A ;
refine head A as <= List.Ind A as ;
next ;
refine head A ('cons a _) = a ;
root ;
elab head Bool ('cons 'true 'nil) ;
elab head Bool ('cons 'false ('cons 'true 'nil)) ;
elab head Bool 'nil ;
{-
There are many similar examples that arise in functional programming.
How can we resolve this problem? Perhaps instead of using lists, we should
work with vectors: lists indexed by their length.
In the following line, note that the natural number is an index, not a
parameter: it varies depending on which constructor you choose.
-}
idata Vec (A : Set) : Nat -> Set := ('vnil : Vec A 'zero) ; ('vcons : (n : Nat) -> A -> Vec A n -> Vec A ('suc n)) ;
{-
Now we can safely define the vector version of head. Since we ask for a vector
of length at least one, we know we can always return a result.
-}
let vhead (A : Set)(n : Nat)(as : Vec A ('suc n)) : A ;
refine vhead A n as <= Vec.Ind A ('suc n) as ;
refine vhead A n ('vcons n a as) = a ;
root ;
elab vhead Bool 'zero ('vcons 'zero 'true 'vnil) ;
elab vhead Bool one ('vcons one 'false ('vcons 'zero 'true 'vnil)) ;
{-
The vectorised application function takes a vector of functions and a vector
of arguments, and applies the functions pointwise.
-}
let vapp (A : Set)(B : Set)(n : Nat)(fs : Vec (A -> B) n)(as : Vec A n) : Vec B n ;
refine vapp A B n fs as <= Vec.Ind (A -> B) n fs ;
refine vapp A B 'zero 'vnil as = 'vnil ;
refine vapp A B ('suc j) ('vcons j f fs) as <= Vec.Ind A ('suc j) as ;
refine vapp A B ('suc j) ('vcons j f fs) ('vcons j a as) = 'vcons j (f a) (vapp A B j fs as) ;
root ;
make fs := 'vcons one (plus one) ('vcons 'zero (\ m -> m) 'vnil) : Vec (Nat -> Nat) two ;
make as := 'vcons one two ('vcons 'zero one 'vnil) : Vec Nat two ;
elab vapp Nat Nat two fs as ;
{-
Another dependent type is the type of finite numbers: |Fin n| is the type of
natural numbers less than |n|.
-}
idata Fin : Nat -> Set := ('fzero : (n : Nat) -> Fin ('suc n)) ; ('fsuc : (n : Nat) -> Fin n -> Fin ('suc n)) ;
elab 'fzero 'zero : Fin one ;
elab 'fzero one : Fin two ;
elab 'fsuc one ('fzero 'zero) : Fin two ;
{-
We can prove that, if you have an element of Fin 'zero, you must be lying:
-}
let nuffin (x : Fin 'zero) : :- FF ;
refine nuffin x <= Fin.Ind 'zero x ;
root ;
{-
Now that we can represent numbers less than a certain value, we can explain
how to safely lookup an index in a vector. At runtime, it would not be
necessary to check array bounds, because out-of-bounds accesses are prevented
by the type system.
-}
let lookup (A : Set)(n : Nat)(as : Vec A n)(fn : Fin n) : A ;
refine lookup A n as fn <= Vec.Ind A n as ;
refine lookup A 'zero 'vnil fn = naughtE (nuffin fn) A ;
refine lookup A ('suc k) ('vcons k a as) fn <= Fin.Ind ('suc k) fn ;
refine lookup A ('suc k) ('vcons k a _) ('fzero k) = a ;
refine lookup A ('suc k) ('vcons k a as) ('fsuc k fn) = lookup A k as fn ;
root ;
elab lookup Bool one ('vcons 'zero 'true 'vnil) ('fzero 'zero) ;
elab lookup Bool two ('vcons one 'true ('vcons 'zero 'false 'vnil)) ('fzero one) ;
elab lookup Bool two ('vcons one 'true ('vcons 'zero 'false 'vnil)) ('fsuc one ('fzero 'zero)) ;
{-
Here ends the demo. If you are feeling brave, go ahead and look at Cat.pig ;-)
-} | PigLatin | 5 | mietek/epigram | test/Demo.pig | [
"MIT"
] |
thing onePressButton import "onePressButton.thingml"
thing lightSensor import "lightSensor.thingml"
thing smartLight import "smartLight.thingml"
thing temperatureSensor import "temperatureSensor.thingml"
thing airConditioner import "airConditioner.thingml"
thing heater import "heater.thingml"
thing remoteDisplay import "remoteDisplay.thingml"
thing gateway import "gateway.thingml"
role sensitive
user manufacturer
user bob
// Policies
policy smartPolicy {
rule roomLightSensor->state:isMedium trigger:goToState roomSmartLight->state:isOn
rule roomLightSensor->state:isMedium
trigger:executeFunction roomSmartLight->function:setIntensity("50")
}
policy commControlPolicy {
rule myAirConditioner deny:send roomLightSensor
rule sensitive deny:send manufacturer
}
policy bridgeRule {
rule temperatureChannel->path:sensedTemperature bridge:to remoteDisplayChannel->path:sensedTemperature
rule lightChannel->path:sensedLight bridge:to remoteDisplayChannel->path:sensedLightRoom
}
// Channels
channel temperatureChannel {
path bobSmartHome
path sensedTemperature (temperatureSensorMessage:JSON) subpathOf bobSmartHome
}
channel lightChannel {
path sensedLight (lightSensorMessage:JSON)
}
channel remoteDisplayChannel {
path rooms
path sensedTemperature (temperatureSensorMessage:JSON) subpathOf rooms
path sensedLightRoom (lightSensorMessage:JSON) subpathOf rooms
path logs = "logs"
}
channel manufacturerChannel {
path maintenanceLogs (logsMessage:JSON)
}
network smartHomeNetwork {
domain org.atlanmod.smarthome
// Medium Things
instance myGateway:gateway platform ARDUINO owner bob
instance myRemoteDisplay:remoteDisplay platform JAVASCRIPT owner bob
// Sensors and actuators
instance roomLightSensor:lightSensor platform POSIX owner bob
instance roomSmartLight:smartLight platform POSIX owner bob
instance myAirConditioner:airConditioner platform POSIX owner bob
instance myTemperature:temperatureSensor platform POSIX owner bob
instance myHeater:heater platform POSIX owner bob
// Channels
instance temperatureZigbeeChannel:temperatureChannel protocol ZIGBEE
instance heaterZigbeeChannel:temperatureChannel protocol ZWAVE
instance sensedLightZigbeeChannel:lightChannel protocol ZIGBEE
instance myZigbeeChanel:manufacturerChannel protocol ZIGBEE
instance myMQTTChannel:remoteDisplayChannel protocol MQTT
instance myAMQPChanel:manufacturerChannel protocol AMQP
// Receiving sensed information in Gateway
bind sensedTemperature : myGateway.temperaturePort <= temperatureZigbeeChannel{sensedTemperature}
bind logsForManufacturer : myGateway.manufacturerLogsPort <= myZigbeeChanel{maintenanceLogs}
bind sensedLight : myGateway.sensedLightPort <= sensedLightZigbeeChannel{sensedLight}
// Sending Sensed Light to Smart Light
bind roomLightSensor.sendingSensedLightPort => sensedLightZigbeeChannel{sensedLight}
bind roomSmartLight.receivingSensedLightPort <= sensedLightZigbeeChannel{sensedLight}
// Sending Sensed Temperature to Air Conditioner
bind myTemperature.sendingTemperaturePort => temperatureZigbeeChannel{sensedTemperature}
// bind myTemperature.manufacturerLogsPort => myZigbeeChanel{maintenanceLogs}
bind myAirConditioner.receivingTemperaturePort <= temperatureZigbeeChannel{sensedTemperature}
// Receive temperature in Zwave channel
forward sensedTemperature to heaterZigbeeChannel{sensedTemperature}
// Bridge received sensed temperature in Gateway to an MQTT channel
forward sensedLight to myMQTTChannel{sensedLightRoom,logs}
// Bridge received sensed light in Gateway to an MQTT channel
forward sensedTemperature to myMQTTChannel{sensedTemperature,logs}
// Bridge received logs in Gateway to an AMQP channel (Manufacturer)
forward logsForManufacturer to myAMQPChanel{maintenanceLogs}
// Receiving Sensed Light in Monitor
bind myRemoteDisplay.receivingSensedLightPort <= myMQTTChannel{sensedLightRoom}
// Receiving Sensed Temperature in Monitor
bind myRemoteDisplay.receivingTemperaturePort <= myMQTTChannel{sensedTemperature}
} | Cycript | 5 | atlanmod/wsan | examples/smarthome/newmain.cy | [
"Apache-2.0"
] |
' Licensed to the .NET Foundation under one or more agreements.
' The .NET Foundation licenses this file to you under the MIT license.
' See the LICENSE file in the project root for more information.
Namespace Microsoft.CodeAnalysis.Editor.VisualBasic.UnitTests.Recommendations.Declarations.ModifierKeywordRecommenderTests
Public Class InsideStructureDeclaration
Inherits RecommenderTests
#Region "Scope Keywords"
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub PublicExistsTest()
VerifyRecommendationsContain(<StructureDeclaration>|</StructureDeclaration>, "Public")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub ProtectedMissingTest()
VerifyRecommendationsMissing(<StructureDeclaration>|</StructureDeclaration>, "Protected")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub PrivateExistsTest()
VerifyRecommendationsContain(<StructureDeclaration>|</StructureDeclaration>, "Private")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub FriendExistsTest()
VerifyRecommendationsContain(<StructureDeclaration>|</StructureDeclaration>, "Friend")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub ProtectedFriendMissingTest()
VerifyRecommendationsMissing(<StructureDeclaration>|</StructureDeclaration>, "Protected Friend")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub PublicNotAfterPublicTest()
VerifyRecommendationsMissing(<StructureDeclaration>Public |</StructureDeclaration>, "Public")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub ProtectedNotAfterPublicTest()
VerifyRecommendationsMissing(<StructureDeclaration>Public |</StructureDeclaration>, "Protected")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub PrivateNotAfterPublicTest()
VerifyRecommendationsMissing(<StructureDeclaration>Public |</StructureDeclaration>, "Private")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub FriendNotAfterPublicTest()
VerifyRecommendationsMissing(<StructureDeclaration>Public |</StructureDeclaration>, "Friend")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub ProtectedFriendNotAfterPublicTest()
VerifyRecommendationsMissing(<StructureDeclaration>Public |</StructureDeclaration>, "Protected Friend")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub FriendNotAfterProtectedTest()
VerifyRecommendationsMissing(<StructureDeclaration>Protected |</StructureDeclaration>, "Friend")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub FriendNotAfterProtectedFriendTest()
VerifyRecommendationsMissing(<StructureDeclaration>Protected Friend |</StructureDeclaration>, "Friend")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub ProtectedNotAfterFriendTest()
VerifyRecommendationsMissing(<StructureDeclaration>Friend |</StructureDeclaration>, "Protected")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub ProtectedNotAfterProtectedFriendTest()
VerifyRecommendationsMissing(<StructureDeclaration>Protected Friend |</StructureDeclaration>, "Protected")
End Sub
#End Region
#Region "Narrowing and Widening Keywords"
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub NarrowingExistsTest()
VerifyRecommendationsContain(<StructureDeclaration>|</StructureDeclaration>, "Narrowing")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub WideningExistsTest()
VerifyRecommendationsContain(<StructureDeclaration>|</StructureDeclaration>, "Widening")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub NarrowingNotAfterWideningTest()
VerifyRecommendationsMissing(<StructureDeclaration>Widening |</StructureDeclaration>, "Narrowing")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub WideningNotAfterNarrowingTest()
VerifyRecommendationsMissing(<StructureDeclaration>Narrowing |</StructureDeclaration>, "Widening")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub NarrowingNotAfterProtectedTest()
VerifyRecommendationsMissing(<StructureDeclaration>Protected |</StructureDeclaration>, "Narrowing")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub WideningNotAfterProtectedTest()
VerifyRecommendationsMissing(<StructureDeclaration>Protected |</StructureDeclaration>, "Widening")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub NarrowingNotAfterPrivateTest()
VerifyRecommendationsMissing(<StructureDeclaration>Private |</StructureDeclaration>, "Narrowing")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub WideningNotAfterPrivateTest()
VerifyRecommendationsMissing(<StructureDeclaration>Private |</StructureDeclaration>, "Widening")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub NarrowingNotAfterProtectedFriendTest()
VerifyRecommendationsMissing(<StructureDeclaration>Protected Friend |</StructureDeclaration>, "Narrowing")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub WideningNotAfterProtectedFriendTest()
VerifyRecommendationsMissing(<StructureDeclaration>Protected Friend |</StructureDeclaration>, "Widening")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub NarrowingNotAfterMustOverrideTest()
VerifyRecommendationsMissing(<StructureDeclaration>MustOverride |</StructureDeclaration>, "Narrowing")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub WideningNotAfterMustOverrideTest()
VerifyRecommendationsMissing(<StructureDeclaration>MustOverride |</StructureDeclaration>, "Widening")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub NarrowingNotAfterMustInheritTest()
VerifyRecommendationsMissing(<StructureDeclaration>MustInherit |</StructureDeclaration>, "Narrowing")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub WideningNotAfterNotInheritableTest()
VerifyRecommendationsMissing(<StructureDeclaration>NotInheritable |</StructureDeclaration>, "Widening")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub NarrowingNotAfterNotInheritableTest()
VerifyRecommendationsMissing(<StructureDeclaration>NotInheritable |</StructureDeclaration>, "Narrowing")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub WideningNotAfterMustInheritTest()
VerifyRecommendationsMissing(<StructureDeclaration>MustInherit |</StructureDeclaration>, "Widening")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub NarrowingNotAfterNotOverridableTest()
VerifyRecommendationsMissing(<StructureDeclaration>NotOverridable |</StructureDeclaration>, "Narrowing")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub WideningNotAfterNotOverridableTest()
VerifyRecommendationsMissing(<StructureDeclaration>NotOverridable |</StructureDeclaration>, "Widening")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub NarrowingAfterOverloadsTest()
VerifyRecommendationsContain(<StructureDeclaration>Overloads |</StructureDeclaration>, "Narrowing")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub WideningAfterOverloadsTest()
VerifyRecommendationsContain(<StructureDeclaration>Overloads |</StructureDeclaration>, "Widening")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub NarrowingNotAfterOverridableTest()
VerifyRecommendationsMissing(<StructureDeclaration>Overridable |</StructureDeclaration>, "Narrowing")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub WideningNotAfterOverridableTest()
VerifyRecommendationsMissing(<StructureDeclaration>Overridable |</StructureDeclaration>, "Widening")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub NarrowingNotAfterPartialTest()
VerifyRecommendationsMissing(<StructureDeclaration>Partial |</StructureDeclaration>, "Narrowing")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub WideningNotAfterPartialTest()
VerifyRecommendationsMissing(<StructureDeclaration>Partial |</StructureDeclaration>, "Widening")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub NarrowingAfterSharedTest()
VerifyRecommendationsContain(<StructureDeclaration>Shared |</StructureDeclaration>, "Narrowing")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub WideningAfterSharedTest()
VerifyRecommendationsContain(<StructureDeclaration>Shared |</StructureDeclaration>, "Widening")
End Sub
#End Region
#Region "MustInherit and NotInheritable Keywords"
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub MustInheritExistsTest()
VerifyRecommendationsContain(<StructureDeclaration>|</StructureDeclaration>, "MustInherit")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub NotInheritableExistsTest()
VerifyRecommendationsContain(<StructureDeclaration>|</StructureDeclaration>, "NotInheritable")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub MustInheritNotAfterNotInheritableTest()
VerifyRecommendationsMissing(<StructureDeclaration>NotInheritable |</StructureDeclaration>, "MustInherit")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub NotInheritableNotAfterMustInheritTest()
VerifyRecommendationsMissing(<StructureDeclaration>MustInherit |</StructureDeclaration>, "NotInheritable")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub MustInheritNotAfterNarrowingTest()
VerifyRecommendationsMissing(<StructureDeclaration>Narrowing |</StructureDeclaration>, "MustInherit")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub NotInheritableNotAfterNarrowingTest()
VerifyRecommendationsMissing(<StructureDeclaration>Narrowing |</StructureDeclaration>, "NotInheritable")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub MustInheritNotAfterWideningTest()
VerifyRecommendationsMissing(<StructureDeclaration>Widening |</StructureDeclaration>, "MustInherit")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub NotInheritableNotAfterWideningTest()
VerifyRecommendationsMissing(<StructureDeclaration>Widening |</StructureDeclaration>, "NotInheritable")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub MustInheritNotAfterMustOverrideTest()
VerifyRecommendationsMissing(<StructureDeclaration>MustOverride |</StructureDeclaration>, "MustInherit")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub NotInheritableNotAfterMustOverrideTest()
VerifyRecommendationsMissing(<StructureDeclaration>MustOverride |</StructureDeclaration>, "NotInheritable")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub MustInheritNotAfterNotOverridableTest()
VerifyRecommendationsMissing(<StructureDeclaration>NotOverridable |</StructureDeclaration>, "MustInherit")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub NotInheritableNotAfterNotOverridableTest()
VerifyRecommendationsMissing(<StructureDeclaration>NotOverridable |</StructureDeclaration>, "NotInheritable")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub MustInheritNotAfterOverridableTest()
VerifyRecommendationsMissing(<StructureDeclaration>Overridable |</StructureDeclaration>, "MustInherit")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub NotInheritableNotAfterOverridableTest()
VerifyRecommendationsMissing(<StructureDeclaration>Overridable |</StructureDeclaration>, "NotInheritable")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub MustInheritAfterPartialTest()
VerifyRecommendationsContain(<StructureDeclaration>Partial |</StructureDeclaration>, "MustInherit")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub NotInheritableAfterPartialTest()
VerifyRecommendationsContain(<StructureDeclaration>Partial |</StructureDeclaration>, "NotInheritable")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub MustInheritNotAfterReadOnlyTest()
VerifyRecommendationsMissing(<StructureDeclaration>ReadOnly |</StructureDeclaration>, "MustInherit")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub NotInheritableNotAfterReadOnlyTest()
VerifyRecommendationsMissing(<StructureDeclaration>ReadOnly |</StructureDeclaration>, "NotInheritable")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub MustInheritNotAfterSharedTest()
VerifyRecommendationsMissing(<StructureDeclaration>Shared |</StructureDeclaration>, "MustInherit")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub NotInheritableNotAfterSharedTest()
VerifyRecommendationsMissing(<StructureDeclaration>Shared |</StructureDeclaration>, "NotInheritable")
End Sub
#End Region
#Region "Overrides and Overridable Set of Keywords"
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub OverridesExistsTest()
VerifyRecommendationsContain(<StructureDeclaration>|</StructureDeclaration>, "Overrides")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub OverridesNotAfterOverridesTest()
VerifyRecommendationsMissing(<StructureDeclaration>Overrides |</StructureDeclaration>, "Overrides")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub OverridesNotAfterMustOverrideTest()
VerifyRecommendationsMissing(<StructureDeclaration>MustOverride |</StructureDeclaration>, "Overrides")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub OverridesNotAfterOverridableTest()
VerifyRecommendationsMissing(<StructureDeclaration>Overridable |</StructureDeclaration>, "Overrides")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub OverridesNotAfterNotOverridableTest()
VerifyRecommendationsMissing(<StructureDeclaration>NotOverridable |</StructureDeclaration>, "Overrides")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub OverridesNotAfterShadowsTest()
VerifyRecommendationsMissing(<StructureDeclaration>Shadows |</StructureDeclaration>, "Overrides")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub OverridesAfterOverloadsTest()
VerifyRecommendationsContain(<StructureDeclaration>Overloads |</StructureDeclaration>, "Overrides")
End Sub
' ---------
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub MustOverrideMissingTest()
VerifyRecommendationsMissing(<StructureDeclaration>|</StructureDeclaration>, "MustOverride")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub MustOverrideNotAfterOverridesTest()
VerifyRecommendationsMissing(<StructureDeclaration>Overrides |</StructureDeclaration>, "MustOverride")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub MustOverrideNotAfterMustOverrideTest()
VerifyRecommendationsMissing(<StructureDeclaration>MustOverride |</StructureDeclaration>, "MustOverride")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub MustOverrideNotAfterOverridableTest()
VerifyRecommendationsMissing(<StructureDeclaration>Overridable |</StructureDeclaration>, "MustOverride")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub MustOverrideNotAfterNotOverridableTest()
VerifyRecommendationsMissing(<StructureDeclaration>NotOverridable |</StructureDeclaration>, "MustOverride")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub MustOverrideNotAfterShadowsTest()
VerifyRecommendationsMissing(<StructureDeclaration>Shadows |</StructureDeclaration>, "MustOverride")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub MustOverrideNotAfterOverloadsTest()
VerifyRecommendationsMissing(<StructureDeclaration>Overloads |</StructureDeclaration>, "MustOverride")
End Sub
' ---------
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub OverridableMissingTest()
VerifyRecommendationsMissing(<StructureDeclaration>|</StructureDeclaration>, "Overridable")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub OverridableNotAfterOverridesTest()
VerifyRecommendationsMissing(<StructureDeclaration>Overrides |</StructureDeclaration>, "Overridable")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub OverridableNotAfterMustOverrideTest()
VerifyRecommendationsMissing(<StructureDeclaration>MustOverride |</StructureDeclaration>, "Overridable")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub OverridableNotAfterOverridableTest()
VerifyRecommendationsMissing(<StructureDeclaration>Overridable |</StructureDeclaration>, "Overridable")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub OverridableNotAfterNotOverridableTest()
VerifyRecommendationsMissing(<StructureDeclaration>NotOverridable |</StructureDeclaration>, "Overridable")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub OverridableNotAfterShadowsTest()
VerifyRecommendationsMissing(<StructureDeclaration>Shadows |</StructureDeclaration>, "Overridable")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub OverridableNotAfterOverloadsTest()
VerifyRecommendationsMissing(<StructureDeclaration>Overloads |</StructureDeclaration>, "Overridable")
End Sub
' ---------
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub NotOverridableMissingTest()
VerifyRecommendationsMissing(<StructureDeclaration>|</StructureDeclaration>, "NotOverridable")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub NotOverridableAfterOverridesTest()
VerifyRecommendationsMissing(<StructureDeclaration>Overrides |</StructureDeclaration>, "NotOverridable")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub NotOverridableNotAfterMustOverrideTest()
VerifyRecommendationsMissing(<StructureDeclaration>MustOverride |</StructureDeclaration>, "NotOverridable")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub NotOverridableNotAfterOverridableTest()
VerifyRecommendationsMissing(<StructureDeclaration>Overridable |</StructureDeclaration>, "NotOverridable")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub NotOverridableNotAfterNotOverridableTest()
VerifyRecommendationsMissing(<StructureDeclaration>NotOverridable |</StructureDeclaration>, "NotOverridable")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub NotOverridableNotAfterShadowsTest()
VerifyRecommendationsMissing(<StructureDeclaration>Shadows |</StructureDeclaration>, "NotOverridable")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub NotOverridableNotAfterOverloadsTest()
VerifyRecommendationsMissing(<StructureDeclaration>Overloads |</StructureDeclaration>, "NotOverridable")
End Sub
' ---------
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub OverloadsExistsTest()
VerifyRecommendationsContain(<StructureDeclaration>|</StructureDeclaration>, "Overloads")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub OverloadsAfterOverridesTest()
VerifyRecommendationsContain(<StructureDeclaration>Overrides |</StructureDeclaration>, "Overloads")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub OverloadsNotAfterMustOverrideTest()
VerifyRecommendationsMissing(<StructureDeclaration>MustOverride |</StructureDeclaration>, "Overloads")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub OverloadsNotAfterOverridableTest()
VerifyRecommendationsMissing(<StructureDeclaration>Overridable |</StructureDeclaration>, "Overloads")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub OverloadsNotAfterNotOverridableTest()
VerifyRecommendationsMissing(<StructureDeclaration>NotOverridable |</StructureDeclaration>, "Overloads")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub OverloadsNotAfterShadowsTest()
VerifyRecommendationsMissing(<StructureDeclaration>Shadows |</StructureDeclaration>, "Overloads")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub OverloadsNotAfterOverloadsTest()
VerifyRecommendationsMissing(<StructureDeclaration>Overloads |</StructureDeclaration>, "Overloads")
End Sub
#End Region
#Region "ReadOnly and WriteOnly Keywords"
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub ReadOnlyExistsTest()
VerifyRecommendationsContain(<StructureDeclaration>|</StructureDeclaration>, "ReadOnly")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub WriteOnlyExistsTest()
VerifyRecommendationsContain(<StructureDeclaration>|</StructureDeclaration>, "WriteOnly")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub ReadOnlyAfterSharedTest()
VerifyRecommendationsContain(<StructureDeclaration>Shared |</StructureDeclaration>, "ReadOnly")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub WriteOnlyAfterSharedTest()
VerifyRecommendationsContain(<StructureDeclaration>Shared |</StructureDeclaration>, "WriteOnly")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub ReadOnlyAfterDefaultTest()
VerifyRecommendationsContain(<StructureDeclaration>Default |</StructureDeclaration>, "ReadOnly")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub WriteOnlyAfterDefaultTest()
VerifyRecommendationsContain(<StructureDeclaration>Default |</StructureDeclaration>, "WriteOnly")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub ReadOnlyNotAfterMustInheritTest()
VerifyRecommendationsMissing(<StructureDeclaration>MustInherit |</StructureDeclaration>, "ReadOnly")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub WriteOnlyNotAfterMustInheritTest()
VerifyRecommendationsMissing(<StructureDeclaration>MustInherit |</StructureDeclaration>, "WriteOnly")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub ReadOnlyAfterOverridableTest()
VerifyRecommendationsMissing(<StructureDeclaration>Overridable |</StructureDeclaration>, "ReadOnly")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub WriteOnlyAfterOverridableTest()
VerifyRecommendationsMissing(<StructureDeclaration>Overridable |</StructureDeclaration>, "WriteOnly")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub ReadOnlyNotAfterNotOverridableTest()
VerifyRecommendationsMissing(<StructureDeclaration>NotOverridable |</StructureDeclaration>, "ReadOnly")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub WriteOnlyAfterNotOverridableTest()
VerifyRecommendationsMissing(<StructureDeclaration>NotOverridable |</StructureDeclaration>, "WriteOnly")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub ReadOnlyAfterOverloadsTest()
VerifyRecommendationsContain(<StructureDeclaration>Overloads |</StructureDeclaration>, "ReadOnly")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub WriteOnlyAfterOverloadsTest()
VerifyRecommendationsContain(<StructureDeclaration>Overloads |</StructureDeclaration>, "WriteOnly")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub ReadOnlyNotAfterMustOverrideTest()
VerifyRecommendationsMissing(<StructureDeclaration>MustOverride |</StructureDeclaration>, "ReadOnly")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub WriteOnlyNotAfterMustOverrideTest()
VerifyRecommendationsMissing(<StructureDeclaration>MustOverride |</StructureDeclaration>, "WriteOnly")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub ReadOnlyNotAfterNarrowingTest()
VerifyRecommendationsMissing(<StructureDeclaration>Narrowing |</StructureDeclaration>, "ReadOnly")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub WriteOnlyNotAfterNarrowingTest()
VerifyRecommendationsMissing(<StructureDeclaration>Narrowing |</StructureDeclaration>, "WriteOnly")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub ReadOnlyNotAfterPartialTest()
VerifyRecommendationsMissing(<StructureDeclaration>Partial |</StructureDeclaration>, "ReadOnly")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub WriteOnlyNotAfterPartialTest()
VerifyRecommendationsMissing(<StructureDeclaration>Partial |</StructureDeclaration>, "WriteOnly")
End Sub
#End Region
#Region "Partial Keyword"
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub PartialExistsTest()
VerifyRecommendationsContain(<StructureDeclaration>|</StructureDeclaration>, "Partial")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub PartialNotAfterMustOverrideTest()
VerifyRecommendationsMissing(<StructureDeclaration>MustOverride |</StructureDeclaration>, "Partial")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub PartialNotAfterPartialTest()
VerifyRecommendationsMissing(<StructureDeclaration>Partial |</StructureDeclaration>, "Partial")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub PartialAfterMustInheritTest()
VerifyRecommendationsContain(<StructureDeclaration>MustInherit |</StructureDeclaration>, "Partial")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub PartialAfterNotInheritableTest()
VerifyRecommendationsContain(<StructureDeclaration>NotInheritable |</StructureDeclaration>, "Partial")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub PartialNotAfterNotOverridableTest()
VerifyRecommendationsMissing(<StructureDeclaration>NotOverridable |</StructureDeclaration>, "Partial")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub PartialNotAfterOverloadsTest()
VerifyRecommendationsMissing(<StructureDeclaration>Overloads |</StructureDeclaration>, "Partial")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub PartialNotAfterOverridesTest()
VerifyRecommendationsMissing(<StructureDeclaration>Overrides |</StructureDeclaration>, "Partial")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub PartialNotAfterOverridableTest()
VerifyRecommendationsMissing(<StructureDeclaration>Overridable |</StructureDeclaration>, "Partial")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub PartialNotAfterReadOnlyTest()
VerifyRecommendationsMissing(<StructureDeclaration>ReadOnly |</StructureDeclaration>, "Partial")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub PartialNotAfterWriteOnlyTest()
VerifyRecommendationsMissing(<StructureDeclaration>WriteOnly |</StructureDeclaration>, "Partial")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub PartialNotAfterNarrowingTest()
VerifyRecommendationsMissing(<StructureDeclaration>Narrowing |</StructureDeclaration>, "Partial")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub PartialNotAfterWideningTest()
VerifyRecommendationsMissing(<StructureDeclaration>Widening |</StructureDeclaration>, "Partial")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub PartialNotAfterShadowsTest()
VerifyRecommendationsMissing(<StructureDeclaration>Shadows |</StructureDeclaration>, "Partial")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub PartialNotAfterDefaultTest()
VerifyRecommendationsMissing(<StructureDeclaration>Default |</StructureDeclaration>, "Partial")
End Sub
#End Region
#Region "Shadows Keyword"
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub ShadowsExistsTest()
VerifyRecommendationsContain(<StructureDeclaration>|</StructureDeclaration>, "Shadows")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub ShadowsNotAfterMustOverrideTest()
VerifyRecommendationsMissing(<StructureDeclaration>MustOverride |</StructureDeclaration>, "Shadows")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub ShadowsNotAfterMustInheritTest()
VerifyRecommendationsMissing(<StructureDeclaration>MustInherit |</StructureDeclaration>, "Shadows")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub ShadowsNotAfterNotInheritableTest()
VerifyRecommendationsMissing(<StructureDeclaration>NotInheritable |</StructureDeclaration>, "Shadows")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub ShadowsAfterNotOverridableTest()
VerifyRecommendationsMissing(<StructureDeclaration>NotOverridable |</StructureDeclaration>, "Shadows")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub ShadowsNotAfterOverloadsTest()
VerifyRecommendationsMissing(<StructureDeclaration>Overloads |</StructureDeclaration>, "Shadows")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub ShadowsNotAfterOverridesTest()
VerifyRecommendationsMissing(<StructureDeclaration>Overrides |</StructureDeclaration>, "Shadows")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub ShadowsNotAfterOverridableTest()
VerifyRecommendationsMissing(<StructureDeclaration>Overridable |</StructureDeclaration>, "Shadows")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub ShadowsAfterReadOnlyTest()
VerifyRecommendationsContain(<StructureDeclaration>ReadOnly |</StructureDeclaration>, "Shadows")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub ShadowsAfterWriteOnlyTest()
VerifyRecommendationsContain(<StructureDeclaration>WriteOnly |</StructureDeclaration>, "Shadows")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub ShadowsAfterNarrowingTest()
VerifyRecommendationsContain(<StructureDeclaration>Narrowing |</StructureDeclaration>, "Shadows")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub ShadowsAfterWideningTest()
VerifyRecommendationsContain(<StructureDeclaration>Widening |</StructureDeclaration>, "Shadows")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub ShadowsNotAfterShadowsTest()
VerifyRecommendationsMissing(<StructureDeclaration>Shadows |</StructureDeclaration>, "Shadows")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub ShadowsAfterDefaultTest()
VerifyRecommendationsContain(<StructureDeclaration>Default |</StructureDeclaration>, "Shadows")
End Sub
#End Region
#Region "Shared Keyword"
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub SharedDoesExistTest()
VerifyRecommendationsContain(<StructureDeclaration>|</StructureDeclaration>, "Shared")
End Sub
<Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)>
Public Sub SharedDoesNotExistAfterSharedTest()
VerifyRecommendationsMissing(<StructureDeclaration>Shared |</StructureDeclaration>, "Shared")
End Sub
#End Region
End Class
End Namespace
| Visual Basic | 4 | frandesc/roslyn | src/EditorFeatures/VisualBasicTest/Recommendations/Declarations/ModifierKeywordRecommenderTests.InsideStructureDeclaration.vb | [
"MIT"
] |
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% This file is part of Logtalk <https://logtalk.org/>
% Copyright 2017 Ebrahim Azarisooreh <ebrahim.azarisooreh@gmail.com> and
% Paulo Moura <pmoura@logtalk.org>
% SPDX-License-Identifier: Apache-2.0
%
% Licensed under the Apache License, Version 2.0 (the "License");
% you may not use this file except in compliance with the License.
% You may obtain a copy of the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS,
% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
% See the License for the specific language governing permissions and
% limitations under the License.
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
:- object(nor_metric_tests,
extends(lgtunit)).
:- info([
version is 0:3:0,
author is 'Ebrahim Azarisooreh and Paulo Moura',
date is 2021-05-08,
comment is 'Unit tests for the entity number of rules metric.'
]).
cover(code_metric).
cover(code_metrics_utilities).
cover(nor_metric).
:- uses(nor_metric, [entity_score/2]).
test(nor_cat_a, true(Score == number_of_rules(0, 0))) :-
entity_score(cat_a, Score).
test(nor_cat_b, true(Score == number_of_rules(1, 1))) :-
entity_score(cat_b, Score).
test(nor_cat_c, true(Score == number_of_rules(1, 1))) :-
entity_score(cat_c, Score).
test(nor_cat_d, true(Score == number_of_rules(0, 0))) :-
entity_score(cat_d, Score).
test(nor_obj_e_wrong_clause, true(Score \== number_of_rules(2, 2))) :-
entity_score(obj_e, Score).
test(nor_obj_e, true(Score == number_of_rules(1, 1))) :-
entity_score(obj_e, Score).
test(nor_obj_d, true(Score == number_of_rules(2, 2))) :-
entity_score(obj_d, Score).
test(nor_obj_a, true(Score == number_of_rules(3, 3))) :-
entity_score(obj_a, Score).
test(nor_obj_b, true(Score == number_of_rules(1, 1))) :-
entity_score(obj_b, Score).
test(nor_obj_c, true(Score == number_of_rules(1, 1))) :-
entity_score(obj_c, Score).
test(nor_prot_a, true(Score == number_of_rules(0, 0))) :-
entity_score(prot_a, Score).
test(nor_prot_b, true(Score == number_of_rules(0, 0))) :-
entity_score(prot_b, Score).
test(nor_car, true(Score == number_of_rules(0, 0))) :-
entity_score(car, Score).
test(nor_vehicle, true(Score == number_of_rules(0, 0))) :-
entity_score(vehicle, Score).
test(nor_meta_vehicle, true(Score == number_of_rules(0, 0))) :-
entity_score(meta_vehicle, Score).
test(nor_herring, true(Score == number_of_rules(0, 0))) :-
entity_score(herring, Score).
test(nor_bird, true(Score == number_of_rules(0, 0))) :-
entity_score(bird, Score).
:- end_object.
| Logtalk | 4 | PaulBrownMagic/logtalk3 | tools/code_metrics/nor_metric_tests.lgt | [
"Apache-2.0"
] |
.class public Lothers/TestN21;
.super Ljava/lang/Object;
.method private static test([BI)I
.locals 5
const/4 v1, 0x0
const/16 v0, 0xe
aget-byte v0, p0, v0
shl-int/lit8 v0, v0, 0x10
move v2, v1
:goto_0
if-nez v2, :cond_1
const/4 v2, 0x3
and-int/lit16 v3, p1, 0xff
:try_start_0
aget-byte v3, p0, v3
and-int/lit16 v3, v3, 0xff
shr-int/lit8 v4, p1, 0x8
and-int/lit16 v4, v4, 0xff
aget-byte v4, p0, v4
and-int/lit16 v4, v4, 0xff
shl-int/lit8 v4, v4, 0x8
or-int/2addr v3, v4
shr-int/lit8 v4, p1, 0x10
and-int/lit16 v4, v4, 0xff
aget-byte v4, p0, v4
and-int/lit16 v4, v4, 0xff
shl-int/lit8 v4, v4, 0x10
or-int/2addr v3, v4
shr-int/lit8 v4, p1, 0x18
and-int/lit16 v4, v4, 0xff
aget-byte v0, p0, v4
:try_end_0
.catch Ljava/lang/Exception; {:try_start_0 .. :try_end_0} :catch_1
shl-int/lit8 v0, v0, 0x18
or-int/2addr v0, v3
:cond_0
:goto_1
return v0
:catch_0
move-exception v2
:cond_1
if-nez v1, :cond_0
const/4 v1, 0x2
and-int/lit8 v2, p1, 0x7f
:try_start_1
aget-byte v0, p0, v2
:try_end_1
.catch Ljava/lang/Exception; {:try_start_1 .. :try_end_1} :catch_0
shr-int/lit8 v0, v0, 0x8
goto :goto_1
:catch_1
move-exception v3
goto :goto_0
.end method
| Smali | 2 | mazhidong/jadx | jadx-core/src/test/smali/others/TestN21.smali | [
"Apache-2.0"
] |
#
# dbevents_calc_mapparams
#
# Script to calculate parameters for dbevents edp map projections
# Taimi Mulder
# 2004 Nov
#
# Enter: latc lonc lat_ll lon_ll lat_ur lon_ur
use Getopt::Std ;
use Datascope;
# To be used with dbevents.pf to find the parameters
# xdelmin, xdelmax, ydelmin, ydelmax when the user
# has specified the lowerleft and upperright corners of the map
# projection in gmt (eg. -JE and -R with r option)
print "Enter centre latitude: ";
chomp( $latc = <STDIN> );
print "Enter centre longitude: ";
chomp( $lonc = <STDIN> );
print "Enter latitude lower left: ";
chomp( $lat_ll = <STDIN> );
print "Enter longitude lower left: ";
chomp( $lon_ll = <STDIN> );
print "Enter latitude upper right: ";
chomp( $lat_ur = <STDIN> );
print "Enter longitude upper right: ";
chomp( $lon_ur = <STDIN> );
# create temp database for use with dbex_eval
@db = dbtmp("rt1.0");
# Find x,y delmin for dbevents.pf file. Note that nawk expects
# angle to be in radians; dbcalc distance and azimuth return values in
# degrees.
# Lower Left
$distance = dbex_eval (@db, "distance ($latc,$lonc,$lat_ll,$lon_ll)");
$azm = dbex_eval (@db, "azimuth($latc,$lonc,$lat_ll,$lon_ll)");
$xdelmin = $distance * sin( $azm * 2 * 3.1416 / 360 );
$ydelmin = $distance * cos( $azm * 2 * 3.1416 / 360 );
print "\n\nxdelmin\t\t",$xdelmin,"\nydelmin\t\t",$ydelmin,"\n";
# Find x,y delmax for dbevents.pf file. Note that nawk expects
# angle to be in radians; dbcalc distance and azimuth return values in
# degrees.
# Upper Right
$distance = dbex_eval (@db, "distance ($latc,$lonc,$lat_ur,$lon_ur)");
$azm = dbex_eval (@db, "azimuth($latc,$lonc,$lat_ur,$lon_ur)");
$xdelmax = $distance * sin( $azm * 2 * 3.1416 / 360 );
$ydelmax = $distance * cos( $azm * 2 * 3.1416 / 360 );
print "xdelmax\t\t",$xdelmax,"\nydelmax\t\t",$ydelmax,"\n\n";
| XProc | 4 | jreyes1108/antelope_contrib | bin/utility/dbevents_calc_mapparams/dbevents_calc_mapparams.xpl | [
"BSD-2-Clause",
"MIT"
] |
//
// This file is part of the Simutrans project under the Artistic License.
// (see LICENSE.txt)
//
//
// Tests for scenario rules/conditions
//
function test_scenario_rules_allow_forbid_tool()
{
local raise = command_x(tool_raise_land)
local lower = command_x(tool_lower_land)
local pl = player_x(0)
{
rules.forbid_tool(0, tool_raise_land)
ASSERT_EQUAL(raise.work(pl, coord3d(4, 2, 0)), null) // FIXME this should fail
ASSERT_EQUAL(lower.work(pl, coord3d(4, 2, 1)), null)
}
// clean up
rules.allow_tool(player_all, tool_raise_land)
RESET_ALL_PLAYER_FUNDS()
}
function test_scenario_rules_allow_forbid_way_tool_rect()
{
local waybuilder = command_x(tool_build_way)
local road = way_desc_x.get_available_ways(wt_road, st_flat)[0]
local rail = way_desc_x.get_available_ways(wt_rail, st_flat)[0]
local pl = player_x(0)
rules.forbid_way_tool_rect(0, tool_build_way, wt_road, coord(2, 2), coord(5, 5), "Foo Bar")
// Fully in forbiden zone
{
ASSERT_EQUAL(command_x.build_way(pl, coord3d(2, 2, 0), coord3d(5, 5, 0), road, true), "")
ASSERT_WAY_PATTERN(wt_road, coord3d(0, 0, 0),
[
"........",
"........",
"........",
"........",
"........",
"........",
"........",
"........"
])
}
// Ending in forbidden zone
{
ASSERT_EQUAL(command_x.build_way(pl, coord3d(0, 2, 0), coord3d(2, 2, 0), road, true), "")
ASSERT_WAY_PATTERN(wt_road, coord3d(0, 0, 0),
[
"........",
"........",
"........",
"........",
"........",
"........",
"........",
"........"
])
}
// Starting in forbidden zone
{
ASSERT_EQUAL(command_x.build_way(pl, coord3d(2, 2, 0), coord3d(0, 2, 0), road, true), "")
ASSERT_WAY_PATTERN(wt_road, coord3d(0, 0, 0),
[
"........",
"........",
"........",
"........",
"........",
"........",
"........",
"........"
])
}
// make sure we can build other ways
{
ASSERT_EQUAL(command_x.build_way(pl, coord3d(2, 2, 0), coord3d(0, 2, 0), rail, true), null)
ASSERT_WAY_PATTERN(wt_rail, coord3d(0, 0, 0),
[
"........",
"........",
"2A8.....",
"........",
"........",
"........",
"........",
"........"
])
ASSERT_EQUAL(command_x(tool_remove_way).work(pl, coord3d(2, 2, 0), coord3d(0, 2, 0), "" + wt_rail), null)
}
// clean up
rules.clear()
RESET_ALL_PLAYER_FUNDS()
}
function test_scenario_rules_allow_forbid_way_tool_cube()
{
local waybuilder = command_x(tool_build_way)
local setslope = command_x(tool_setslope)
local road = way_desc_x.get_available_ways(wt_road, st_flat)[0]
local pl = player_x(0)
rules.forbid_way_tool_cube(0, tool_build_way, wt_road, coord3d(2, 2, 1), coord3d(5, 5, 2), "Foo Bar")
// build below
{
ASSERT_EQUAL(command_x.build_way(pl, coord3d(2, 2, 0), coord3d(0, 2, 0), road, true), null)
ASSERT_WAY_PATTERN(wt_road, coord3d(0, 0, 0),
[
"........",
"........",
"2A8.....",
"........",
"........",
"........",
"........",
"........"
])
ASSERT_EQUAL(command_x(tool_remove_way).work(pl, coord3d(2, 2, 0), coord3d(0, 2, 0), "" + wt_road), null)
}
// build into forbidden zone
{
ASSERT_EQUAL(setslope.work(pl, coord3d(3, 4, 0), "" + slope.all_up_slope), null)
ASSERT_EQUAL(setslope.work(pl, coord3d(3, 3, 0), "" + slope.north), null)
ASSERT_EQUAL(command_x.build_way(pl, coord3d(3, 0, 0), coord3d(3, 4, 1), road, true), "")
ASSERT_WAY_PATTERN(wt_road, coord3d(0, 0, 0),
[
"........",
"........",
"........",
"........",
"........",
"........",
"........",
"........"
])
ASSERT_EQUAL(setslope.work(pl, coord3d(3, 4, 1), "" + slope.all_down_slope), null)
ASSERT_EQUAL(setslope.work(pl, coord3d(3, 3, 0), "" + slope.flat), null)
}
rules.clear()
rules.forbid_way_tool_cube(0, tool_build_way, wt_road, coord3d(0, 0, 1), coord3d(0, 0, 1), "Foo Bar")
// build double height slope through forbidden cube
{
ASSERT_EQUAL(setslope.work(pl, coord3d(1, 1, 0), "" + (2*slope.east)), null)
ASSERT_EQUAL(command_x.build_way(pl, coord3d(2, 1, 0), coord3d(1, 1, 0), road, true), null)
ASSERT_WAY_PATTERN(wt_road, coord3d(0, 0, 0),
[
"........",
".28.....",
"........",
"........",
"........",
"........",
"........",
"........"
])
ASSERT_EQUAL(command_x(tool_remove_way).work(pl, coord3d(2, 1, 0), coord3d(1, 1, 0), "" + wt_road), null)
ASSERT_EQUAL(setslope.work(pl, coord3d(1, 1, 0), "" + slope.flat), null)
}
// clean up
rules.clear()
RESET_ALL_PLAYER_FUNDS()
}
function test_scenario_rules_allow_forbid_tool_stacked_rect()
{
local pl = player_x(0)
local waybuilder = command_x(tool_build_way)
local setslope = command_x(tool_setslope)
local road_desc = way_desc_x.get_available_ways(wt_road, st_flat)[0]
rules.forbid_way_tool_rect(0, tool_build_way, wt_road, coord(1, 1), coord(14, 14), "Foo Bar 1")
// build in outer allowed ring, near map border
{
ASSERT_EQUAL(command_x.build_way(pl, coord3d(5, 0, 0), coord3d(0, 5, 0), road_desc, false), null)
ASSERT_WAY_PATTERN(wt_road, coord3d(0, 0, 0),
[
"6AAAA8..",
"5.......",
"5.......",
"5.......",
"5.......",
"1.......",
"........",
"........"
])
}
// build in outer forbidden ring
{
ASSERT_EQUAL(command_x.build_way(pl, coord3d(5, 1, 0), coord3d(1, 5, 0), road_desc, false), "")
ASSERT_WAY_PATTERN(wt_road, coord3d(0, 0, 0),
[
"6AAAA8..",
"5.......",
"5.......",
"5.......",
"5.......",
"1.......",
"........",
"........"
])
}
rules.allow_way_tool_rect(0, tool_build_way, wt_road, coord(2, 2), coord(13, 13))
// try building in allowed ring, does not work because rules cannot be stacked
{
ASSERT_EQUAL(command_x.build_way(pl, coord3d(5, 2, 0), coord3d(2, 5, 0), road_desc, false), "")
ASSERT_WAY_PATTERN(wt_road, coord3d(0, 0, 0),
[
"6AAAA8..",
"5.......",
"5.......",
"5.......",
"5.......",
"1.......",
"........",
"........"
])
}
rules.clear()
ASSERT_EQUAL(command_x(tool_remove_way).work(pl, coord3d(5, 0, 0), coord3d(0, 5, 0), "" + wt_road), null)
RESET_ALL_PLAYER_FUNDS()
}
function test_scenario_rules_allow_forbid_tool_stacked_cube()
{
local pl = player_x(0)
local waybuilder = command_x(tool_build_way)
local setslope = command_x(tool_setslope)
local road_desc = way_desc_x.get_available_ways(wt_road, st_flat)[0]
rules.forbid_way_tool_cube(0, tool_build_way, wt_road, coord3d(1, 1, 0), coord3d(14, 14, 0), "Foo Bar 1")
// build in outer allowed ring, near map border
{
ASSERT_EQUAL(command_x.build_way(pl, coord3d(5, 0, 0), coord3d(0, 5, 0), road_desc, false), null)
ASSERT_WAY_PATTERN(wt_road, coord3d(0, 0, 0),
[
"6AAAA8..",
"5.......",
"5.......",
"5.......",
"5.......",
"1.......",
"........",
"........"
])
}
// build in outer forbidden ring
{
ASSERT_EQUAL(command_x.build_way(pl, coord3d(5, 1, 0), coord3d(1, 5, 0), road_desc, false), "")
ASSERT_WAY_PATTERN(wt_road, coord3d(0, 0, 0),
[
"6AAAA8..",
"5.......",
"5.......",
"5.......",
"5.......",
"1.......",
"........",
"........"
])
}
rules.allow_way_tool_cube(0, tool_build_way, wt_road, coord3d(2, 2, 0), coord3d(13, 13, 0))
// try building in allowed ring, does not work because rules cannot be stacked
{
ASSERT_EQUAL(command_x.build_way(pl, coord3d(5, 2, 0), coord3d(2, 5, 0), road_desc, false), "")
ASSERT_WAY_PATTERN(wt_road, coord3d(0, 0, 0),
[
"6AAAA8..",
"5.......",
"5.......",
"5.......",
"5.......",
"1.......",
"........",
"........"
])
}
rules.clear()
ASSERT_EQUAL(command_x(tool_remove_way).work(pl, coord3d(5, 0, 0), coord3d(0, 5, 0), "" + wt_road), null)
RESET_ALL_PLAYER_FUNDS()
}
| Squirrel | 5 | Andarix/simutrans_nightly | tests/tests/test_scenario.nut | [
"Artistic-1.0"
] |
module audiostreamerscrobbler.factories.GroupFactory
import audiostreamerscrobbler.factories.Config
import audiostreamerscrobbler.groups.{FixedPlayersGroupStrategy, Group}
import audiostreamerscrobbler.maintypes.Player
import audiostreamerscrobbler.maintypes.Player.types.PlayerTypes
import java.util.TreeMap
let CONFIG_PLAYER_TYPES = map[[t: playerTypeId(): toLowerCase(), t: playerTypeId()] foreach t in getAllPlayerTypes()]
function createGroupFactory = {
let factory = DynamicObject("PlayerControlThreadFactory"):
define("createGroup", |this, cbProcessEvents| -> createConfiguredGroup(cbProcessEvents))
return factory
}
local function createConfiguredGroup = |cbProcessEvents| {
let config = getConfig()
let createGroupTypes = [
^_createConfiguredFixedPlayerGroup,
^_createConfiguredLegacySinglePlayerGroup]
foreach createGroupFunction in createGroupTypes {
let group = createGroupFunction(cbProcessEvents, config)
if (group != null) {
return group
}
}
raise("Error in configuration: could not create a player group")
}
local function _createConfiguredFixedPlayerGroup = |cbProcessEvents, config| {
let playersConfig = config: get("players")
if playersConfig is null {
return null
}
let expectedPlayers = TreeMap()
foreach playerTypeConfig in playersConfig: entrySet() {
let playerTypeId = _getConfiguredPlayerTypeId(playerTypeConfig: getKey())
let playerTypeValues = playerTypeConfig: getValue()
if (playerTypeValues: get("enabled") isnt null and playerTypeValues: get("enabled")) {
let playerNames = [p foreach p in playerTypeValues: get("players")]
let playerIds = list[createPlayerId(playerTypeId, p) foreach p in playerNames]
expectedPlayers: put(playerTypeId, playerIds)
}
}
return _createFixedPlayerGroup(expectedPlayers, cbProcessEvents)
}
local function _createConfiguredLegacySinglePlayerGroup = |cbProcessEvents, config| {
let playerConfig = config: get("player")
if playerConfig is null {
return null
}
let playerTypeInConfig = playerConfig: get("type")
let playerTypeId = _getConfiguredPlayerTypeId(playerTypeInConfig)
let playerName = playerConfig: get("name")
let playerId = createPlayerId(playerTypeId, playerName)
let expectedPlayers = map[[playerTypeId, list[playerId]]]
return _createFixedPlayerGroup(expectedPlayers, cbProcessEvents)
}
local function _getConfiguredPlayerTypeId = |playerTypeInConfig| {
let playerTypeId = CONFIG_PLAYER_TYPES: get(playerTypeInConfig)
if (playerTypeId is null) {
raise("Unknown player type specified in configuration: '" + playerTypeInConfig + "'")
}
return playerTypeId
}
local function _createFixedPlayerGroup = |expectedPlayers, cbProcessEvents| {
let strategy = createFixedPlayersGroupStrategy(expectedPlayers, cbProcessEvents)
let group = createGroup("Player Group", strategy)
return group
}
| Golo | 4 | vvdleun/audiostreamerscrobbler | src/main/golo/include/factories/GroupFactory.golo | [
"MIT"
] |
// Io is the bottle neck for this test - this is more of a performance sanity check
for(v, 1, 10000,
URL with("http://localhost:8080?action=write&value=" .. v) fetch
if(v % 1000 == 0, writeln(v))
)
URL with("http://localhost:8080?action=collectGarbage") fetch
| Io | 2 | BooBSD/vertexdb | tests/stress.io | [
"BSD-3-Clause"
] |
"""Config flow for DLNA DMS."""
from __future__ import annotations
import logging
from pprint import pformat
from typing import Any, cast
from urllib.parse import urlparse
from async_upnp_client.profiles.dlna import DmsDevice
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.components import ssdp
from homeassistant.const import CONF_DEVICE_ID, CONF_HOST, CONF_URL
from homeassistant.data_entry_flow import AbortFlow, FlowResult
from .const import CONF_SOURCE_ID, CONFIG_VERSION, DEFAULT_NAME, DOMAIN
from .util import generate_source_id
LOGGER = logging.getLogger(__name__)
class DlnaDmsFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a DLNA DMS config flow.
The Unique Service Name (USN) of the DMS device is used as the unique_id for
config entries and for entities. This USN may differ from the root USN if
the DMS is an embedded device.
"""
VERSION = CONFIG_VERSION
def __init__(self) -> None:
"""Initialize flow."""
self._discoveries: dict[str, ssdp.SsdpServiceInfo] = {}
self._location: str | None = None
self._usn: str | None = None
self._name: str | None = None
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Handle a flow initialized by the user by listing unconfigured devices."""
LOGGER.debug("async_step_user: user_input: %s", user_input)
if user_input is not None and (host := user_input.get(CONF_HOST)):
# User has chosen a device
discovery = self._discoveries[host]
await self._async_parse_discovery(discovery, raise_on_progress=False)
return self._create_entry()
if not (discoveries := await self._async_get_discoveries()):
# Nothing found, abort configuration
return self.async_abort(reason="no_devices_found")
self._discoveries = {
cast(str, urlparse(discovery.ssdp_location).hostname): discovery
for discovery in discoveries
}
discovery_choices = {
host: f"{discovery.upnp.get(ssdp.ATTR_UPNP_FRIENDLY_NAME)} ({host})"
for host, discovery in self._discoveries.items()
}
data_schema = vol.Schema({vol.Optional(CONF_HOST): vol.In(discovery_choices)})
return self.async_show_form(step_id="user", data_schema=data_schema)
async def async_step_ssdp(self, discovery_info: ssdp.SsdpServiceInfo) -> FlowResult:
"""Handle a flow initialized by SSDP discovery."""
LOGGER.debug("async_step_ssdp: discovery_info %s", pformat(discovery_info))
await self._async_parse_discovery(discovery_info)
# Abort if the device doesn't support all services required for a DmsDevice.
# Use the discovery_info instead of DmsDevice.is_profile_device to avoid
# contacting the device again.
discovery_service_list = discovery_info.upnp.get(ssdp.ATTR_UPNP_SERVICE_LIST)
if not discovery_service_list:
return self.async_abort(reason="not_dms")
services = discovery_service_list.get("service")
if not services:
discovery_service_ids: set[str] = set()
elif isinstance(services, list):
discovery_service_ids = {service.get("serviceId") for service in services}
else:
# Only one service defined (etree_to_dict failed to make a list)
discovery_service_ids = {services.get("serviceId")}
if not DmsDevice.SERVICE_IDS.issubset(discovery_service_ids):
return self.async_abort(reason="not_dms")
# Abort if another config entry has the same location, in case the
# device doesn't have a static and unique UDN (breaking the UPnP spec).
self._async_abort_entries_match({CONF_URL: self._location})
self.context["title_placeholders"] = {"name": self._name}
return await self.async_step_confirm()
async def async_step_confirm(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Allow the user to confirm adding the device."""
if user_input is not None:
return self._create_entry()
self._set_confirm_only()
return self.async_show_form(step_id="confirm")
def _create_entry(self) -> FlowResult:
"""Create a config entry, assuming all required information is now known."""
LOGGER.debug(
"_create_entry: name: %s, location: %s, USN: %s",
self._name,
self._location,
self._usn,
)
assert self._name
assert self._location
assert self._usn
data = {
CONF_URL: self._location,
CONF_DEVICE_ID: self._usn,
CONF_SOURCE_ID: generate_source_id(self.hass, self._name),
}
return self.async_create_entry(title=self._name, data=data)
async def _async_parse_discovery(
self, discovery_info: ssdp.SsdpServiceInfo, raise_on_progress: bool = True
) -> None:
"""Get required details from an SSDP discovery.
Aborts if a device matching the SSDP USN has already been configured.
"""
LOGGER.debug(
"_async_parse_discovery: location: %s, USN: %s",
discovery_info.ssdp_location,
discovery_info.ssdp_usn,
)
if not discovery_info.ssdp_location or not discovery_info.ssdp_usn:
raise AbortFlow("bad_ssdp")
if not self._location:
self._location = discovery_info.ssdp_location
self._usn = discovery_info.ssdp_usn
await self.async_set_unique_id(self._usn, raise_on_progress=raise_on_progress)
# Abort if already configured, but update the last-known location
self._abort_if_unique_id_configured(
updates={CONF_URL: self._location}, reload_on_update=False
)
self._name = (
discovery_info.upnp.get(ssdp.ATTR_UPNP_FRIENDLY_NAME)
or urlparse(self._location).hostname
or DEFAULT_NAME
)
async def _async_get_discoveries(self) -> list[ssdp.SsdpServiceInfo]:
"""Get list of unconfigured DLNA devices discovered by SSDP."""
# Get all compatible devices from ssdp's cache
discoveries: list[ssdp.SsdpServiceInfo] = []
for udn_st in DmsDevice.DEVICE_TYPES:
st_discoveries = await ssdp.async_get_discovery_info_by_st(
self.hass, udn_st
)
discoveries.extend(st_discoveries)
# Filter out devices already configured
current_unique_ids = {
entry.unique_id
for entry in self._async_current_entries(include_ignore=False)
}
discoveries = [
disc for disc in discoveries if disc.ssdp_udn not in current_unique_ids
]
return discoveries
| Python | 5 | mtarjoianu/core | homeassistant/components/dlna_dms/config_flow.py | [
"Apache-2.0"
] |
rm -rf destruct x
rm -f deltest2 *~ .*~
| Stata | 1 | BlameJohnny/redo | t/202-del/clean.do | [
"Apache-2.0"
] |
// TEST TOOL U8U16Test
// Performance tests for UTF-8 <--> UTF-16 conversions, related to PR #4093
// NOTE The functions u8u16 and u16u8 contain own algorithms. Tests have shown that they perform
// worse than the platform API functions.
// Thus, these functions are *unrelated* to the til::u8u16 and til::u16u8 implementation.
#pragma once
#undef WIN32_LEAN_AND_MEAN
#undef NOMINMAX
#define NOMINMAX
#include <stdexcept>
#include <string>
#include <string_view>
#include <array>
#include <algorithm>
#include <windows.h>
#include <intsafe.h>
class u8state final
{
public:
u8state() noexcept;
[[nodiscard]] HRESULT operator()(const std::string_view in, std::string_view& out) noexcept;
void reset() noexcept;
private:
enum _Utf8BitMasks : BYTE
{
IsAsciiByte = 0b0'0000000, // Any byte representing an ASCII character has the MSB set to 0
MaskAsciiByte = 0b1'0000000, // Bit mask to be used in a bitwise AND operation to find out whether or not a byte match the IsAsciiByte pattern
IsContinuationByte = 0b10'000000, // Continuation bytes of any UTF-8 non-ASCII character have the MSB set to 1 and the adjacent bit set to 0
MaskContinuationByte = 0b11'000000, // Bit mask to be used in a bitwise AND operation to find out whether or not a byte match the IsContinuationByte pattern
IsLeadByteTwoByteSequence = 0b110'00000, // A lead byte that indicates a UTF-8 non-ASCII character consisting of two bytes has the two highest bits set to 1 and the adjacent bit set to 0
MaskLeadByteTwoByteSequence = 0b111'00000, // Bit mask to be used in a bitwise AND operation to find out whether or not a lead byte match the IsLeadByteTwoByteSequence pattern
IsLeadByteThreeByteSequence = 0b1110'0000, // A lead byte that indicates a UTF-8 non-ASCII character consisting of three bytes has the three highest bits set to 1 and the adjacent bit set to 0
MaskLeadByteThreeByteSequence = 0b1111'0000, // Bit mask to be used in a bitwise AND operation to find out whether or not a lead byte match the IsLeadByteThreeByteSequence pattern
IsLeadByteFourByteSequence = 0b11110'000, // A lead byte that indicates a UTF-8 non-ASCII character consisting of four bytes has the four highest bits set to 1 and the adjacent bit set to 0
MaskLeadByteFourByteSequence = 0b11111'000 // Bit mask to be used in a bitwise AND operation to find out whether or not a lead byte match the IsLeadByteFourByteSequence pattern
};
// array of bitmasks
constexpr static std::array<BYTE, 4> _cmpMasks{
0, // unused
_Utf8BitMasks::MaskContinuationByte,
_Utf8BitMasks::MaskLeadByteTwoByteSequence,
_Utf8BitMasks::MaskLeadByteThreeByteSequence,
};
// array of values for the comparisons
constexpr static std::array<BYTE, 4> _cmpOperands{
0, // unused
_Utf8BitMasks::IsAsciiByte, // intentionally conflicts with MaskContinuationByte
_Utf8BitMasks::IsLeadByteTwoByteSequence,
_Utf8BitMasks::IsLeadByteThreeByteSequence,
};
std::string _buffer8;
std::array<char, 4> _utf8Partials; // buffer for code units of a partial UTF-8 code point that have to be cached
size_t _partialsLen{}; // number of cached UTF-8 code units
};
class u16state final
{
public:
u16state() noexcept;
[[nodiscard]] HRESULT operator()(const std::wstring_view in, std::wstring_view& out) noexcept;
void reset() noexcept;
private:
std::wstring _buffer16;
wchar_t _highSurrogate{}; // UTF-16 high surrogate that has to be cached
size_t _cached{}; // 1 if a high surrogate has been cached, 0 otherwise
};
[[nodiscard]] HRESULT u8u16(const std::string_view in, std::wstring& out, bool discardInvalids = false) noexcept;
[[nodiscard]] HRESULT u8u16_ptr(const std::string_view in, std::wstring& out, bool discardInvalids = false) noexcept;
[[nodiscard]] HRESULT u8u16(const std::string_view in, std::wstring& out, u8state& state, bool discardInvalids = false) noexcept;
[[nodiscard]] HRESULT u16u8(const std::wstring_view in, std::string& out, bool discardInvalids = false) noexcept;
[[nodiscard]] HRESULT u16u8_ptr(const std::wstring_view in, std::string& out, bool discardInvalids = false) noexcept;
[[nodiscard]] HRESULT u16u8(const std::wstring_view in, std::string& out, u16state& state, bool discardInvalids = false) noexcept;
std::wstring u8u16(const std::string_view in, bool discardInvalids = false);
std::wstring u8u16(const std::string_view in, u8state& state, bool discardInvalids = false);
std::string u16u8(const std::wstring_view in, bool discardInvalids = false);
std::string u16u8(const std::wstring_view in, u16state& state, bool discardInvalids = false);
| C++ | 5 | memcpy-rand-rand-rand/terminal | src/tools/U8U16Test/U8U16Test.hpp | [
"MIT"
] |
.http a:after,
.https a:after {
margin-left: 5px;
border: 1px solid;
border-radius: 2px;
padding: 0px 5px;
font-size: 11px;
color: #adb9bd;
}
.http a:after {
content: "http";
}
.https a:after {
content: "https";
}
| CSS | 3 | narayanim84/brackets | src/extensions/default/RemoteFileAdapter/styles.css | [
"MIT"
] |
#include "script_component.hpp"
/*
Name: TFAR_fnc_getSwChannel
Author: NKey
Gets the channel for the passed radio
Arguments:
Radio classname <STRING>
Return Value:
Channel <NUMBER>
Example:
_channel = (call TFAR_fnc_ActiveSwRadio) call TFAR_fnc_getSwChannel;
Public: Yes
*/
params[["_radio", "", [""]]];
(_radio call TFAR_fnc_getSwSettings) param [ACTIVE_CHANNEL_OFFSET, 0]
| SQF | 4 | MrDj200/task-force-arma-3-radio | addons/core/functions/fnc_getSwChannel.sqf | [
"RSA-MD"
] |
use Datascope;
use Getopt::Std;
sub addacro {
local( $findacro, $force, @dbacro ) = @_;
my( $expansion );
if( $force ) {
$msg = "Force addition of $findacro: ";
} else {
$msg = "$findacro not found. Add: ";
}
$expansion = ask( $msg );
$expansion =~ /^\s*$/ && exit 0;
$expansion =~ /^\s*[nN]\s*$/ && exit 0;
$expansion =~ /^\s*[qQ]\s*$/ && exit 0;
if( $expansion =~ /^\s*[yY]([eE][sS])?\s*$/ ) {
$expansion = ask( "Expansion for $findacro: " );
}
$context = ask( "Context for $findacro: " );
if( $context =~ /^\s*$/ ) {
$context = "-";
}
$comment = ask( "Comment for $findacro: " );
if( $comment =~ /^\s*$/ ) {
$comment = "-";
}
print "\n";
$dbacro[3] = dbaddnull( @dbacro );
dbputv( @dbacro,
"acronym", $findacro,
"expansion", $expansion,
"context", $context,
"comment", $comment );
}
sub printacros {
local( @db ) = @_;
my( $nrecs, $acronym, $expansion, $op );
$nrecs = dbquery( @db, "dbRECORD_COUNT" );
print "\n";
for( $db[3] = 0; $db[3] < $nrecs; $db[3]++ ) {
( $acronym, $expansion, $context, $comment ) =
dbgetv( @db, "acronym", "expansion", "context", "comment" );
print "\t$acronym\t$expansion";
if( $context eq "-" && $comment eq "-" ) {
print "\n";
} elsif( $context eq "-" ) {
print " ($comment)\n";
} elsif( $comment eq "-" ) {
print " ($context)\n";
} else {
print " ($context; $comment)\n";
}
}
print "\n";
}
$Usage = "Usage: acro [-d] [-f] [-n] [-g] [-c] [-e] [-o] [acronym | expression]\n";
if( ! getopts('dfngceo') || $#ARGV > 0 ) {
die( $Usage );
} elsif( $#ARGV != 0 && ! $opt_n && ! $opt_d ) {
die( $Usage );
} else {
unless( $opt_n || $opt_d ) {
$findacro = $ARGV[0];
if( ( $opt_g || $opt_c || $opt_e || $opt_o ) &&
$findacro !~ m@^\s*/@ ) {
$findacro = "/.*$findacro.*/";
}
}
}
$dbname = pfget( "acronyms", "dbname" );
if( $opt_d ) {
system( "dbe -e $dbname.acronyms &" );
exit 0;
}
@db = dbopen( $dbname, "r+" );
@dbacro = dblookup( @db, "", "acronyms", "", "" );
$nacros = dbquery( @dbacro, "dbRECORD_COUNT" );
if( $opt_n ) {
print "\t$nacros acronyms\n";
exit 0;
}
if( $opt_g ) {
@db = dbsubset( @dbacro, "acronym =~ $findacro" );
} elsif( $opt_c ) {
@db = dbsubset( @dbacro, "context =~ $findacro" );
} elsif( $opt_e ) {
@db = dbsubset( @dbacro, "expansion =~ $findacro" );
} elsif( $opt_o ) {
@db = dbsubset( @dbacro, "comment =~ $findacro" );
} else {
@db = dbsubset( @dbacro, "acronym == \"$findacro\"" );
}
$nrecs = dbquery( @db, "dbRECORD_COUNT" );
if( ( $nrecs <= 0 || $opt_f ) && !
( $opt_g || $opt_e || $opt_c || $opt_o ) ) {
addacro( $findacro, $opt_f, @dbacro );
@db = dbsubset( @dbacro, "acronym == \"$findacro\"" );
}
printacros( @db );
| XProc | 4 | jreyes1108/antelope_contrib | nobuild/bin/utility/acro/acro.xpl | [
"BSD-2-Clause",
"MIT"
] |
{layout '@layout.latte'}
{block title}{$annotation|firstUpper}{/block}
{block content}
<h1>{include title}</h1>
{if $hasElements}
{if $classes}
<table class="summary table table-responsive table-bordered table-striped" id="classes">
<tr><th colspan="2">Classes Summary</th></tr>
{include items, items => $classes}
</table>
{/if}
{if $interfaces}
<table class="summary table table-responsive table-bordered table-striped" id="interfaces">
<tr><th colspan="2">Interfaces Summary</th></tr>
{include items, items => $allInterfaces}
</table>
{/if}
{if $traits}
<table class="summary table table-responsive table-bordered table-striped" id="traits">
<tr><th colspan="2">Traits Summary</th></tr>
{include items, items => $traits}
</table>
{/if}
{if $methods}
<table class="summary table table-responsive table-bordered table-striped" id="methods">
<tr><th colspan="3">Methods Summary</th></tr>
<tr n:foreach="$methods as $method">
<td class="name">
<a href="{$method->getDeclaringClass()|linkReflection}">{$method->getDeclaringClassName()}</a>
</td>
<td class="name">
<code><a href="{$method|linkReflection}">{$method->getName()}()</a></code>
</td>
<td>
{include reflectionAnnotationsList, "reflection" => $method, "annotation" => $annotation}
</td>
</tr>
</table>
{/if}
{if $properties}
<table class="summary table table-responsive table-bordered table-striped" id="properties">
<tr><th colspan="3">Properties Summary</th></tr>
<tr n:foreach="$properties as $property">
<td class="name">
<a href="{$property->getDeclaringClass()|linkReflection}">{$property->getDeclaringClassName()}</a>
</td>
<td class="name">
<a href="{$property|linkReflection}" class="property-name">${$property->getName()}</a>
</td>
<td>
{include reflectionAnnotationsList, "reflection" => $property, "annotation" => $annotation}
</td>
</tr>
</table>
{/if}
{if $functions}
<table class="summary table table-responsive table-bordered table-striped" id="functions">
<tr><th colspan="2">Functions Summary</th></tr>
<tr n:foreach="$allFunctions as $function">
<td class="name">
<code><a href="{$function|linkReflection}">{$function->getName()}</a></code>
</td>
<td>
{include reflectionAnnotationsList, "reflection" => $function, "annotation" => $annotation}
</td>
</tr>
</table>
{/if}
{else}
<p>No elements with <code>@{$annotation}</code> annotation found.</p>
{/if}
{/block}
{define items}
<tr n:foreach="$items as $class">
<td class="name"><a href="{$class|linkReflection}">{$class->getName()}</a></td>
<td>
{foreach $class->getAnnotations($annotation) as $classAnnotation}
{$classAnnotation|annotation:$class|noescape}<br>
{/foreach}
</td>
</tr>
{/define}
{define reflectionAnnotationsList}
{foreach $reflection->getAnnotation($annotation) as $reflectionAnnotation}
{$reflectionAnnotation|annotation:$reflection|noescape}<br>
{/foreach}
{/define}
| Latte | 4 | pujak17/tets | packages/ThemeDefault/src/annotation-group.latte | [
"MIT"
] |
export default function Docs(props) {
return <div>Hello again 👋</div>
}
| JavaScript | 3 | blomqma/next.js | test/integration/export-default-map/pages/v1.12/docs.js | [
"MIT"
] |
# start fbterm automatically in /dev/tty*
if (( ${+commands[fbterm]} )); then
if [[ "$TTY" = /dev/tty* ]] ; then
fbterm && exit
fi
fi
| Shell | 3 | chensanle/ohmyzsh | plugins/fbterm/fbterm.plugin.zsh | [
"MIT"
] |
object NewDiskForm: TNewDiskForm
Left = 226
Top = 162
BorderIcons = [biSystemMenu]
BorderStyle = bsDialog
Caption = '*'
ClientHeight = 169
ClientWidth = 377
Color = clBtnFace
Font.Charset = DEFAULT_CHARSET
Font.Color = clWindowText
Font.Height = -11
Font.Name = 'MS Sans Serif'
Font.Style = []
OldCreateOrder = True
Scaled = False
OnCloseQuery = FormCloseQuery
DesignSize = (
377
169)
PixelsPerInch = 96
TextHeight = 13
object DiskBitmapImage: TBitmapImage
Left = 8
Top = 10
Width = 48
Height = 48
end
object CancelButton: TNewButton
Left = 296
Top = 137
Width = 73
Height = 23
Anchors = [akRight, akBottom]
Cancel = True
Caption = '*'
ModalResult = 2
TabOrder = 5
end
object OKButton: TNewButton
Left = 216
Top = 137
Width = 73
Height = 23
Anchors = [akRight, akBottom]
Caption = '*'
Default = True
ModalResult = 1
TabOrder = 4
end
object BrowseButton: TNewButton
Left = 296
Top = 95
Width = 73
Height = 23
Anchors = [akTop, akRight]
Caption = '*'
TabOrder = 3
OnClick = BrowseButtonClick
end
object PathEdit: TEdit
Left = 8
Top = 96
Width = 281
Height = 21
Anchors = [akLeft, akTop, akRight]
TabOrder = 2
end
object PathLabel: TNewStaticText
Left = 8
Top = 80
Width = 5
Height = 14
Caption = '*'
FocusControl = PathEdit
TabOrder = 1
end
object SelectDiskLabel: TNewStaticText
Left = 72
Top = 8
Width = 297
Height = 72
Anchors = [akLeft, akTop, akRight]
AutoSize = False
Caption = '*'
ShowAccelChar = False
TabOrder = 0
WordWrap = True
end
end
| Pascal | 3 | mqt635/issrc | Projects/NewDisk.dfm | [
"FSFAP"
] |
component{
function foo(mailBody){
var mailService=new mail(
to="...",
from="...",
subject="...",
type="HTML",
body=mailBody
);
}
} | ColdFusion CFC | 3 | tonym128/CFLint | src/test/resources/com/cflint/tests/VarScoper/fpositive/mail.cfc | [
"BSD-3-Clause"
] |
/*
* Copyright 2012-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.buildpack.platform.build;
import java.util.Arrays;
import org.junit.jupiter.api.Test;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException;
import static org.assertj.core.api.Assertions.assertThatIllegalStateException;
/**
* Tests for {@link ApiVersion}.
*
* @author Phillip Webb
* @author Scott Frederick
*/
class ApiVersionTests {
@Test
void parseWhenVersionIsNullThrowsException() {
assertThatIllegalArgumentException().isThrownBy(() -> ApiVersion.parse(null))
.withMessage("Value must not be empty");
}
@Test
void parseWhenVersionIsEmptyThrowsException() {
assertThatIllegalArgumentException().isThrownBy(() -> ApiVersion.parse(""))
.withMessage("Value must not be empty");
}
@Test
void parseWhenVersionDoesNotMatchPatternThrowsException() {
assertThatIllegalArgumentException().isThrownBy(() -> ApiVersion.parse("bad"))
.withMessage("Malformed version number 'bad'");
}
@Test
void parseReturnsVersion() {
ApiVersion version = ApiVersion.parse("1.2");
assertThat(version.getMajor()).isEqualTo(1);
assertThat(version.getMinor()).isEqualTo(2);
}
@Test
void assertSupportsWhenSupports() {
ApiVersion.parse("1.2").assertSupports(ApiVersion.parse("1.0"));
}
@Test
void assertSupportsWhenDoesNotSupportThrowsException() {
assertThatIllegalStateException()
.isThrownBy(() -> ApiVersion.parse("1.2").assertSupports(ApiVersion.parse("1.3")))
.withMessage("Detected platform API version '1.3' does not match supported version '1.2'");
}
@Test
void supportsWhenSame() {
assertThat(supports("0.0", "0.0")).isTrue();
assertThat(supports("0.1", "0.1")).isTrue();
assertThat(supports("1.0", "1.0")).isTrue();
assertThat(supports("1.1", "1.1")).isTrue();
}
@Test
void supportsWhenDifferentMajor() {
assertThat(supports("0.0", "1.0")).isFalse();
assertThat(supports("1.0", "0.0")).isFalse();
assertThat(supports("1.0", "2.0")).isFalse();
assertThat(supports("2.0", "1.0")).isFalse();
assertThat(supports("1.1", "2.1")).isFalse();
assertThat(supports("2.1", "1.1")).isFalse();
}
@Test
void supportsWhenDifferentMinor() {
assertThat(supports("1.2", "1.1")).isTrue();
assertThat(supports("1.2", "1.3")).isFalse();
}
@Test
void supportsWhenMajorZeroAndDifferentMinor() {
assertThat(supports("0.2", "0.1")).isFalse();
assertThat(supports("0.2", "0.3")).isFalse();
}
@Test
void supportsAnyWhenOneMatches() {
assertThat(supportsAny("0.2", "0.1", "0.2")).isTrue();
}
@Test
void supportsAnyWhenNoneMatch() {
assertThat(supportsAny("0.2", "0.3", "0.4")).isFalse();
}
@Test
void toStringReturnsString() {
assertThat(ApiVersion.parse("1.2").toString()).isEqualTo("1.2");
}
@Test
void equalsAndHashCode() {
ApiVersion v12a = ApiVersion.parse("1.2");
ApiVersion v12b = ApiVersion.parse("1.2");
ApiVersion v13 = ApiVersion.parse("1.3");
assertThat(v12a.hashCode()).isEqualTo(v12b.hashCode());
assertThat(v12a).isEqualTo(v12a).isEqualTo(v12b).isNotEqualTo(v13);
}
private boolean supports(String v1, String v2) {
return ApiVersion.parse(v1).supports(ApiVersion.parse(v2));
}
private boolean supportsAny(String v1, String... others) {
return ApiVersion.parse(v1)
.supportsAny(Arrays.stream(others).map(ApiVersion::parse).toArray(ApiVersion[]::new));
}
}
| Java | 5 | techAi007/spring-boot | spring-boot-project/spring-boot-tools/spring-boot-buildpack-platform/src/test/java/org/springframework/boot/buildpack/platform/build/ApiVersionTests.java | [
"Apache-2.0"
] |
<?php use PHPCI\Helper\Lang; ?>
<?php if (empty($error)): ?>
<div class="box-header">
<?php Lang::out('reset_enter_password'); ?>
</div>
<div class="box-body">
<form class="form" action="<?php print PHPCI_URL; ?>session/reset-password/<?php print $id; ?>/<?php print $key; ?>" method="POST">
<div class="form-group">
<label for="password"><?php Lang::out('reset_new_password'); ?></label>
<input type="password" id="password" name="password" class="form-control" required>
</div>
<div class="form-group">
<input class="btn btn-success" type="submit" value="<?php Lang::out('reset_change_password'); ?>">
</div>
</form>
</div>
<?php else: ?>
<div class="alert alert-danger" style="margin-bottom: 0">
<?php print $error; ?>
</div>
<?php endif; ?>
| HTML+PHP | 3 | studio201/PHPCI | PHPCI/View/Session/resetPassword.phtml | [
"BSD-2-Clause"
] |
var x >= 1 integer;
s.t. c: x <= 0;
option presolve 0;
| AMPL | 0 | ampl/plugins | test/data/infeasible.ampl | [
"BSD-3-Clause"
] |
import {Component, NgModule} from '@angular/core';
@Component({
selector: 'my-app',
template: `
<div attr.title="a{{one}}b{{two}}c{{three}}d{{four}}e{{five}}f{{six}}g{{seven}}h{{eight}}i{{nine}}j"></div>
<div attr.title="a{{one}}b{{two}}c{{three}}d{{four}}e{{five}}f{{six}}g{{seven}}h{{eight}}i"></div>
<div attr.title="a{{one}}b{{two}}c{{three}}d{{four}}e{{five}}f{{six}}g{{seven}}h"></div>
<div attr.title="a{{one}}b{{two}}c{{three}}d{{four}}e{{five}}f{{six}}g"></div>
<div attr.title="a{{one}}b{{two}}c{{three}}d{{four}}e{{five}}f"></div>
<div attr.title="a{{one}}b{{two}}c{{three}}d{{four}}e"></div>
<div attr.title="a{{one}}b{{two}}c{{three}}d"></div>
<div attr.title="a{{one}}b{{two}}c"></div>
<div attr.title="a{{one}}b"></div>
<div attr.title="{{one}}"></div>
`
})
export class MyComponent {
name = 'John Doe';
one!: any;
two!: any;
three!: any;
four!: any;
five!: any;
six!: any;
seven!: any;
eight!: any;
nine!: any;
}
@NgModule({declarations: [MyComponent]})
export class MyModule {
}
| TypeScript | 3 | John-Cassidy/angular | packages/compiler-cli/test/compliance/test_cases/r3_view_compiler_bindings/attribute_bindings/interpolated_attributes.ts | [
"MIT"
] |
--TEST--
Trying to access undeclared static property
--FILE--
<?php
class bar {
public function __set($a, $b) {
print "hello\n";
}
}
class foo extends bar {
public function __construct() {
static::$f = 1;
}
public function __set($a, $b) {
print "foo\n";
}
}
new foo;
?>
--EXPECTF--
Fatal error: Uncaught Error: Access to undeclared static property foo::$f in %s:%d
Stack trace:
#0 %s(%d): foo->__construct()
#1 {main}
thrown in %s on line %d
| PHP | 3 | NathanFreeman/php-src | Zend/tests/objects_029.phpt | [
"PHP-3.01"
] |
# DO NOT EDIT THIS FILE. This file will be overwritten when re-running go-raml.
@0xa7923fa19ab459cd;
struct User {
name @0 :Text;
username @1 :Text;
}
| Cap'n Proto | 4 | mrpotes/go-raml | docs/tutorial/tarantool/server/handlers/schemas/User.capnp | [
"BSD-2-Clause"
] |
Feature: manipulate databases:
create, drop, connect, disconnect
Scenario: create and drop temporary database
When we create database
then we see database created
when we drop database
then we confirm the destructive warning
then we see database dropped
when we connect to dbserver
then we see database connected
Scenario: connect and disconnect from test database
When we connect to test database
then we see database connected
when we connect to dbserver
then we see database connected
| Cucumber | 4 | yolabingo/pgcli | tests/features/crud_database.feature | [
"BSD-3-Clause"
] |
signature windows_reverse_shell {
ip-proto == tcp
tcp-state established,originator
event "ATTACK-RESPONSES Microsoft cmd.exe banner (reverse-shell originator)"
payload /.*Microsoft Windows.*\x28C\x29 Copyright 1985-.*Microsoft Corp/
}
signature windows_shell {
ip-proto == tcp
tcp-state established,responder
event "ATTACK-RESPONSES Microsoft cmd.exe banner (normal-shell responder)"
payload /.*Microsoft Windows.*\x28C\x29 Copyright 1985-.*Microsoft Corp/
}
| Standard ML | 3 | yaplej/bro | scripts/policy/frameworks/signatures/detect-windows-shells.sig | [
"Apache-2.0"
] |
/**
*
*/
import Util;
import OpenApi;
import EndpointUtil;
extends OpenApi;
init(config: OpenApi.Config){
super(config);
@endpointRule = 'regional';
checkConfig(config);
@endpoint = getEndpoint('eds-user', @regionId, @endpointRule, @network, @suffix, @endpointMap, @endpoint);
}
function getEndpoint(productId: string, regionId: string, endpointRule: string, network: string, suffix: string, endpointMap: map[string]string, endpoint: string) throws: string{
if (!Util.empty(endpoint)) {
return endpoint;
}
if (!Util.isUnset(endpointMap) && !Util.empty(endpointMap[regionId])) {
return endpointMap[regionId];
}
return EndpointUtil.getEndpointRules(productId, regionId, endpointRule, network, suffix);
}
model GetUserByTokenRequest {
token?: string(name='Token'),
}
model GetUserByTokenResponseBody = {
requestId?: string(name='RequestId', description='Id of the request'),
user?: {
id?: long(name='Id'),
name?: string(name='Name'),
email?: string(name='Email'),
tenantId?: string(name='TenantId'),
}(name='User'),
}
model GetUserByTokenResponse = {
headers: map[string]string(name='headers'),
body: GetUserByTokenResponseBody(name='body'),
}
async function getUserByTokenWithOptions(request: GetUserByTokenRequest, runtime: Util.RuntimeOptions): GetUserByTokenResponse {
Util.validateModel(request);
var req = new OpenApi.OpenApiRequest{
body = Util.toMap(request),
};
return doRPCRequest('GetUserByToken', '2021-06-22', 'HTTPS', 'POST', 'AK', 'json', req, runtime);
}
async function getUserByToken(request: GetUserByTokenRequest): GetUserByTokenResponse {
var runtime = new Util.RuntimeOptions{};
return getUserByTokenWithOptions(request, runtime);
}
| Tea | 5 | aliyun/alibabacloud-sdk | eds-user-20210622/main.tea | [
"Apache-2.0"
] |
\require "b@>=0.2" | LilyPond | 0 | HolgerPeters/lyp | spec/package_setups/big/a@0.2.1/package.ly | [
"MIT"
] |
<div
style="
display: flex;
margin-top: 48px;
padding-top: 20px;
border-top: 1px solid #eaecef;
"
>
<img
style="height: 72px; width: 72px; border-radius: 50%"
src="{{ include.profile_photo }}"
/>
<div
style="
display: flex;
flex-direction: column;
justify-content: space-between;
margin-left: 14px;
"
>
<div
style="
margin-top: 6px;
font-size: 21px;
font-weight: 600;
line-height: 24px;
color: rgba(0, 0, 0, 0.84);
"
>
{{ include.display_name }}
</div>
<div style="display: flex">
{% if include.twitter_username %}
<a
style="display: flex"
href="https://twitter.com/{{ include.twitter_username }}"
>
<svg
class="social-media-icon-svg"
style="margin: 6px"
width="24"
height="24"
viewBox="0 0 24 24"
>
<path opacity="0" d="M0 0h24v24H0z" />
<path
d="M23.643 4.937c-.835.37-1.732.62-2.675.733.962-.576 1.7-1.49 2.048-2.578-.9.534-1.897.922-2.958 1.13-.85-.904-2.06-1.47-3.4-1.47-2.572 0-4.658 2.086-4.658 4.66 0 .364.042.718.12 1.06-3.873-.195-7.304-2.05-9.602-4.868-.4.69-.63 1.49-.63 2.342 0 1.616.823 3.043 2.072 3.878-.764-.025-1.482-.234-2.11-.583v.06c0 2.257 1.605 4.14 3.737 4.568-.392.106-.803.162-1.227.162-.3 0-.593-.028-.877-.082.593 1.85 2.313 3.198 4.352 3.234-1.595 1.25-3.604 1.995-5.786 1.995-.376 0-.747-.022-1.112-.065 2.062 1.323 4.51 2.093 7.14 2.093 8.57 0 13.255-7.098 13.255-13.254 0-.2-.005-.402-.014-.602.91-.658 1.7-1.477 2.323-2.41z"
/>
</svg>
</a>
{% endif %} {% if include.medium_username %}
<a
style="display: flex"
href="https://medium.com/@{{ include.medium_username }}"
>
<svg
class="social-media-icon-svg"
style="margin: 6px"
width="24"
height="24"
viewBox="0 0 45 45"
>
<path
d="M5 40V5h35v35H5zm8.56-12.627c0 .555-.027.687-.318 1.03l-2.457 2.985v.396h6.974v-.396l-2.456-2.985c-.291-.343-.344-.502-.344-1.03V18.42l6.127 13.364h.714l5.256-13.364v10.644c0 .29 0 .342-.185.528l-1.848 1.796v.396h9.19v-.396l-1.822-1.796c-.184-.186-.21-.238-.21-.528V15.937c0-.291.026-.344.21-.528l1.823-1.797v-.396h-6.471l-4.622 11.542-5.203-11.542h-6.79v.396l2.14 2.64c.239.292.291.37.291.768v10.353z"
/>
</svg>
</a>
{% endif %} {% if include.github_username %}
<a
style="display: flex"
href="https://github.com/{{ include.github_username }}/"
>
<svg
class="social-media-icon-svg"
style="margin: 6px"
height="24"
viewBox="-2 -2 20 20"
width="24"
>
<path
fill-rule="evenodd"
d="M8 0C3.58 0 0 3.58 0 8c0 3.54 2.29 6.53 5.47 7.59.4.07.55-.17.55-.38 0-.19-.01-.82-.01-1.49-2.01.37-2.53-.49-2.69-.94-.09-.23-.48-.94-.82-1.13-.28-.15-.68-.52-.01-.53.63-.01 1.08.58 1.23.82.72 1.21 1.87.87 2.33.66.07-.52.28-.87.51-1.07-1.78-.2-3.64-.89-3.64-3.95 0-.87.31-1.59.82-2.15-.08-.2-.36-1.02.08-2.12 0 0 .67-.21 2.2.82.64-.18 1.32-.27 2-.27.68 0 1.36.09 2 .27 1.53-1.04 2.2-.82 2.2-.82.44 1.1.16 1.92.08 2.12.51.56.82 1.27.82 2.15 0 3.07-1.87 3.75-3.65 3.95.29.25.54.73.54 1.48 0 1.07-.01 1.93-.01 2.2 0 .21.15.46.55.38A8.013 8.013 0 0 0 16 8c0-4.42-3.58-8-8-8z"
></path>
</svg>
</a>
{% endif %} {% if include.linkedin_username %}
<a
style="display: flex"
href="https://www.linkedin.com/in/{{ include.linkedin_username }}/"
>
<svg
class="social-media-icon-svg"
style="margin: 6px"
width="24"
height="24"
viewBox="0 0 24 24"
>
<rect height="11" width="4" x="3" y="9" />
<circle cx="5" cy="5" r="2" />
<path
d="M16.5,8.25A4.47251,4.47251,0,0,0,13,9.95343V9H9V20h4V13a2,2,0,0,1,4,0v7h4V12.75A4.5,4.5,0,0,0,16.5,8.25Z"
/>
</svg>
</a>
{% endif %}
</div>
</div>
</div>
| Liquid | 3 | TommyTeaVee/training | docs/_includes/profile.liquid | [
"MIT"
] |
%/* This file is copied from RFC1813
% * Copyright 1995 Sun Micrososystems (I assume)
% */
const MNTPATHLEN = 1024; /* Maximum bytes in a path name */
const MNTNAMLEN = 255; /* Maximum bytes in a name */
const FHSIZE3 = 64; /* Maximum bytes in a V3 file handle */
typedef opaque fhandle3<FHSIZE3>;
typedef string dirpath<MNTPATHLEN>;
typedef string name<MNTNAMLEN>;
typedef struct exportnode *exports;
typedef struct groupnode *groups;
typedef struct mountbody *mountlist;
enum mountstat3 {
MNT3_OK = 0, /* no error */
MNT3ERR_PERM = 1, /* Not owner */
MNT3ERR_NOENT = 2, /* No such file or directory */
MNT3ERR_IO = 5, /* I/O error */
MNT3ERR_ACCES = 13, /* Permission denied */
MNT3ERR_NOTDIR = 20, /* Not a directory */
MNT3ERR_INVAL = 22, /* Invalid argument */
MNT3ERR_NAMETOOLONG = 63, /* Filename too long */
MNT3ERR_NOTSUPP = 10004, /* Operation not supported */
MNT3ERR_SERVERFAULT = 10006 /* A failure on the server */
};
struct mountres3_ok {
fhandle3 fhandle;
int auth_flavors<>;
};
union mountres3 switch (mountstat3 fhs_status) {
case MNT3_OK:
mountres3_ok mountinfo;
default:
void;
};
struct mountbody {
name ml_hostname;
dirpath ml_directory;
mountlist ml_next;
};
struct groupnode {
name gr_name;
groups gr_next;
};
struct exportnode {
dirpath ex_dir;
groups ex_groups;
exports ex_next;
};
program MOUNT_PROGRAM {
version MOUNT_V3 {
void MOUNTPROC3_NULL(void) = 0;
mountres3 MOUNTPROC3_MNT(dirpath) = 1;
mountlist MOUNTPROC3_DUMP(void) = 2;
void MOUNTPROC3_UMNT(dirpath) = 3;
void MOUNTPROC3_UMNTALL(void) = 4;
exports MOUNTPROC3_EXPORT(void) = 5;
} = 3;
} = 100005;
| Logos | 3 | Davidfind/rt-thread | components/dfs/filesystems/nfs/mount.x | [
"Apache-2.0"
] |
# Tuning the performance of Integer#times:
require: "profiler"
class Integer {
dynamic_method('times_impl:) |g| {
while = g new_label()
end = g new_label()
exc = g new_label()
exc_iter = g new_label()
g total_args=(1)
g required_args=(1)
# Locals:
# 0: block argument
# 1: counter (starts at 0)
# Set up locals
g meta_push_0()
g set_local(1)
# Set up exception handler
g setup_unwind(exc, Rubinius AST RescueType)
while set!()
g push_local(1) # S: counter
g push_self() # S: counter, self
g meta_send_op_lt(g find_literal('<))
g goto_if_false(end)
# Invoke the actual block with counter as arg.
g push_local(0) # S: block
g push_local(1) # S: block, counter
g meta_send_call(g find_literal('call), 1) # Faster calling for blocks (g send('call, 1))
g pop() # Don't use return of block call.
# Increment counter
g push_local(1)
g meta_push_1()
g meta_send_op_plus(g find_literal('+))
g set_local(1); g pop()
# Go back to start of loop.
g check_interrupts()
g goto(while)
# Exception handler. Checks for Fancy::BreakIteration and
# Fancy::StopIteration.
exc set!()
# Check if it's a BreakIteration
g push_const('Fancy) # S: Fancy
g find_const('BreakIteration) # S: Fancy BreakIteration
g push_current_exception() # S: Fancy BreakIteration, Exception
g kind_of()
g goto_if_true(exc_iter)
# Check if it's a StopIteration
g push_const('Fancy) # S: Fancy
g find_const('StopIteration) # S: Fancy StopIteration
g push_current_exception() # S: Fancy StopIteration, Exception
g kind_of()
g goto_if_true(exc_iter)
# Not a break or stop, so just raise it up.
g reraise()
# If it is a break or stop iteration, then call :result on the exception
# and return that.
exc_iter set!()
g push_current_exception()
g clear_exception()
g send(':result, 0)
g ret()
# Clean end; return last counter value.
end set!()
g pop_unwind() # Clean up unwind handler
g push_local(1) # Return counter value
g ret()
}
}
n = 100_000_000
b = |x| { x println }
# "Bytecode:" println
# (n times_impl: b) inspect println
# "Plain:" println
# (n times: b) inspect println
# System exit
"[profile] Starting times: with n = #{n to_s}... " print
s = Time now
#start_profile!
n times: |x| {
x
}
#stop_profile!
"Done in #{(Time now - s) to_s}" println
#Profiler show()
"[profile] Starting times_impl: with n = #{n to_s}... " print
s = Time now
#start_profile!
n times_impl: |x| {
x
}
#stop_profile!
"Done in #{(Time now - s) to_s}" println
#Profiler show()
| Fancy | 4 | bakkdoor/fancy | tools/benchmarks/profile-int-times.fy | [
"BSD-3-Clause"
] |
module Main where
import Debug.Trace
main = trace "Hello World"
| PureScript | 2 | Gabrielarodrigues10/ga | p/PureScript.purs | [
"MIT"
] |
import { autoUpdater } from 'electron/main';
import { expect } from 'chai';
import { ifit, ifdescribe } from './spec-helpers';
import { emittedOnce } from './events-helpers';
ifdescribe(!process.mas)('autoUpdater module', function () {
describe('checkForUpdates', function () {
ifit(process.platform === 'win32')('emits an error on Windows if the feed URL is not set', async function () {
const errorEvent = emittedOnce(autoUpdater, 'error');
autoUpdater.setFeedURL({ url: '' });
autoUpdater.checkForUpdates();
const [error] = await errorEvent;
expect(error.message).to.equal('Update URL is not set');
});
});
describe('getFeedURL', () => {
it('returns an empty string by default', () => {
expect(autoUpdater.getFeedURL()).to.equal('');
});
ifit(process.platform === 'win32')('correctly fetches the previously set FeedURL', function () {
const updateURL = 'https://fake-update.electron.io';
autoUpdater.setFeedURL({ url: updateURL });
expect(autoUpdater.getFeedURL()).to.equal(updateURL);
});
});
describe('setFeedURL', function () {
ifdescribe(process.platform === 'win32' || process.platform === 'darwin')('on Mac or Windows', () => {
it('sets url successfully using old (url, headers) syntax', () => {
const url = 'http://electronjs.org';
try {
(autoUpdater.setFeedURL as any)(url, { header: 'val' });
} catch (err) { /* ignore */ }
expect(autoUpdater.getFeedURL()).to.equal(url);
});
it('throws if no url is provided when using the old style', () => {
expect(() => (autoUpdater.setFeedURL as any)()).to.throw('Expected an options object with a \'url\' property to be provided');
});
it('sets url successfully using new ({ url }) syntax', () => {
const url = 'http://mymagicurl.local';
try {
autoUpdater.setFeedURL({ url });
} catch (err) { /* ignore */ }
expect(autoUpdater.getFeedURL()).to.equal(url);
});
it('throws if no url is provided when using the new style', () => {
expect(() => autoUpdater.setFeedURL({ noUrl: 'lol' } as any)
).to.throw('Expected options object to contain a \'url\' string property in setFeedUrl call');
});
});
ifdescribe(process.platform === 'darwin' && process.arch !== 'arm64')('on Mac', function () {
it('emits an error when the application is unsigned', async () => {
const errorEvent = emittedOnce(autoUpdater, 'error');
autoUpdater.setFeedURL({ url: '' });
const [error] = await errorEvent;
expect(error.message).equal('Could not get code signature for running application');
});
it('does not throw if default is the serverType', () => {
// "Could not get code signature..." means the function got far enough to validate that serverType was OK.
expect(() => autoUpdater.setFeedURL({ url: '', serverType: 'default' })).to.throw('Could not get code signature for running application');
});
it('does not throw if json is the serverType', () => {
// "Could not get code signature..." means the function got far enough to validate that serverType was OK.
expect(() => autoUpdater.setFeedURL({ url: '', serverType: 'json' })).to.throw('Could not get code signature for running application');
});
it('does throw if an unknown string is the serverType', () => {
expect(() => autoUpdater.setFeedURL({ url: '', serverType: 'weow' as any })).to.throw('Expected serverType to be \'default\' or \'json\'');
});
});
});
describe('quitAndInstall', () => {
ifit(process.platform === 'win32')('emits an error on Windows when no update is available', async function () {
const errorEvent = emittedOnce(autoUpdater, 'error');
autoUpdater.quitAndInstall();
const [error] = await errorEvent;
expect(error.message).to.equal('No update available, can\'t quit and install');
});
});
});
| TypeScript | 5 | lingxiao-Zhu/electron | spec-main/api-auto-updater-spec.ts | [
"MIT"
] |
{:duct.core/include ["hello/server-jetty"]} ;default | edn | 1 | xsoheilalizadeh/FrameworkBenchmarks | frameworks/Clojure/duct/resources/hello/server.edn | [
"BSD-3-Clause"
] |
[{:type :error,
:file "/win32nativeext/src/main.cpp",
:line 10,
:message
"In file included from upload/win32nativeext/src/main.cpp:10:\nIn file included from /opt/MacOSX10.13.sdk//usr/include/c++/4.2.1/algorithm:68:\ninvalid operands to binary expression ('std::vector<int, std::allocator<int> >' and 'int')\n if (*__first == __val)\n ~~~~~~~~ ^ ~~~~~"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 19,
:message
"in instantiation of function template specialization 'std::__find<__gnu_cxx::__normal_iterator<std::vector<int, std::allocator<int> > *, std::vector<std::vector<int, std::allocator<int> >, std::allocator<std::vector<int, std::allocator<int> > > > >, int>' requested here\n return std::__find(__first, __last, __val,\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 35,
:column 64,
:message
"in instantiation of function template specialization 'std::find<__gnu_cxx::__normal_iterator<std::vector<int, std::allocator<int> > *, std::vector<std::vector<int, std::allocator<int> >, std::allocator<std::vector<int, std::allocator<int> > > > >, int>' requested here\n std::vector< std::vector <int> >::const_iterator it = std::find( v.begin(), v.end(), a );\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 35,
:column 64,
:message
"candidate template ignored: could not match 'fpos' against 'vector'\n operator==(const fpos<_StateT>& __lhs, const fpos<_StateT>& __rhs)\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 35,
:column 64,
:message
"candidate template ignored: could not match 'pair' against 'vector'\n operator==(const pair<_T1, _T2>& __x, const pair<_T1, _T2>& __y)\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 35,
:column 64,
:message
"candidate template ignored: could not match 'reverse_iterator' against 'vector'\n operator==(const reverse_iterator<_Iterator>& __x,\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 35,
:column 64,
:message
"candidate template ignored: could not match 'reverse_iterator' against 'vector'\n operator==(const reverse_iterator<_IteratorL>& __x,\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 35,
:column 64,
:message
"candidate template ignored: could not match '__normal_iterator' against 'vector'\n operator==(const __normal_iterator<_IteratorL, _Container>& __lhs,\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 35,
:column 64,
:message
"candidate template ignored: could not match '__normal_iterator' against 'vector'\n operator==(const __normal_iterator<_Iterator, _Container>& __lhs,\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 35,
:column 64,
:message
"candidate template ignored: could not match 'new_allocator' against 'vector'\n operator==(const new_allocator<_Tp>&, const new_allocator<_Tp>&)\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 35,
:column 64,
:message
"candidate template ignored: could not match 'allocator' against 'vector'\n operator==(const allocator<_T1>&, const allocator<_T2>&)\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 35,
:column 64,
:message
"candidate template ignored: could not match 'vector<type-parameter-0-0, type-parameter-0-1>' against 'const int'\n operator==(const vector<_Tp, _Alloc>& __x, const vector<_Tp, _Alloc>& __y)\n ^"}
{:type :error,
:file "/win32nativeext/src/main.cpp",
:line 10,
:message
"In file included from upload/win32nativeext/src/main.cpp:10:\nIn file included from /opt/MacOSX10.13.sdk//usr/include/c++/4.2.1/algorithm:68:\ninvalid operands to binary expression ('std::vector<int, std::allocator<int> >' and 'int')\n if (*__first == __val)\n ~~~~~~~~ ^ ~~~~~"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match 'fpos' against 'vector'\n operator==(const fpos<_StateT>& __lhs, const fpos<_StateT>& __rhs)\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match 'pair' against 'vector'\n operator==(const pair<_T1, _T2>& __x, const pair<_T1, _T2>& __y)\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match 'reverse_iterator' against 'vector'\n operator==(const reverse_iterator<_Iterator>& __x,\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match 'reverse_iterator' against 'vector'\n operator==(const reverse_iterator<_IteratorL>& __x,\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match '__normal_iterator' against 'vector'\n operator==(const __normal_iterator<_IteratorL, _Container>& __lhs,\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match '__normal_iterator' against 'vector'\n operator==(const __normal_iterator<_Iterator, _Container>& __lhs,\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match 'new_allocator' against 'vector'\n operator==(const new_allocator<_Tp>&, const new_allocator<_Tp>&)\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match 'allocator' against 'vector'\n operator==(const allocator<_T1>&, const allocator<_T2>&)\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match 'vector<type-parameter-0-0, type-parameter-0-1>' against 'const int'\n operator==(const vector<_Tp, _Alloc>& __x, const vector<_Tp, _Alloc>& __y)\n ^"}
{:type :error,
:file "/win32nativeext/src/main.cpp",
:line 10,
:message
"In file included from upload/win32nativeext/src/main.cpp:10:\nIn file included from /opt/MacOSX10.13.sdk//usr/include/c++/4.2.1/algorithm:68:\ninvalid operands to binary expression ('std::vector<int, std::allocator<int> >' and 'int')\n if (*__first == __val)\n ~~~~~~~~ ^ ~~~~~"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match 'fpos' against 'vector'\n operator==(const fpos<_StateT>& __lhs, const fpos<_StateT>& __rhs)\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match 'pair' against 'vector'\n operator==(const pair<_T1, _T2>& __x, const pair<_T1, _T2>& __y)\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match 'reverse_iterator' against 'vector'\n operator==(const reverse_iterator<_Iterator>& __x,\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match 'reverse_iterator' against 'vector'\n operator==(const reverse_iterator<_IteratorL>& __x,\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match '__normal_iterator' against 'vector'\n operator==(const __normal_iterator<_IteratorL, _Container>& __lhs,\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match '__normal_iterator' against 'vector'\n operator==(const __normal_iterator<_Iterator, _Container>& __lhs,\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match 'new_allocator' against 'vector'\n operator==(const new_allocator<_Tp>&, const new_allocator<_Tp>&)\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match 'allocator' against 'vector'\n operator==(const allocator<_T1>&, const allocator<_T2>&)\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match 'vector<type-parameter-0-0, type-parameter-0-1>' against 'const int'\n operator==(const vector<_Tp, _Alloc>& __x, const vector<_Tp, _Alloc>& __y)\n ^"}
{:type :error,
:file "/win32nativeext/src/main.cpp",
:line 10,
:message
"In file included from upload/win32nativeext/src/main.cpp:10:\nIn file included from /opt/MacOSX10.13.sdk//usr/include/c++/4.2.1/algorithm:68:\ninvalid operands to binary expression ('std::vector<int, std::allocator<int> >' and 'int')\n if (*__first == __val)\n ~~~~~~~~ ^ ~~~~~"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match 'fpos' against 'vector'\n operator==(const fpos<_StateT>& __lhs, const fpos<_StateT>& __rhs)\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match 'pair' against 'vector'\n operator==(const pair<_T1, _T2>& __x, const pair<_T1, _T2>& __y)\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match 'reverse_iterator' against 'vector'\n operator==(const reverse_iterator<_Iterator>& __x,\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match 'reverse_iterator' against 'vector'\n operator==(const reverse_iterator<_IteratorL>& __x,\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match '__normal_iterator' against 'vector'\n operator==(const __normal_iterator<_IteratorL, _Container>& __lhs,\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match '__normal_iterator' against 'vector'\n operator==(const __normal_iterator<_Iterator, _Container>& __lhs,\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match 'new_allocator' against 'vector'\n operator==(const new_allocator<_Tp>&, const new_allocator<_Tp>&)\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match 'allocator' against 'vector'\n operator==(const allocator<_T1>&, const allocator<_T2>&)\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match 'vector<type-parameter-0-0, type-parameter-0-1>' against 'const int'\n operator==(const vector<_Tp, _Alloc>& __x, const vector<_Tp, _Alloc>& __y)\n ^"}
{:type :error,
:file "/win32nativeext/src/main.cpp",
:line 10,
:message
"In file included from upload/win32nativeext/src/main.cpp:10:\nIn file included from /opt/MacOSX10.13.sdk//usr/include/c++/4.2.1/algorithm:68:\ninvalid operands to binary expression ('std::vector<int, std::allocator<int> >' and 'int')\n if (*__first == __val)\n ~~~~~~~~ ^ ~~~~~"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match 'fpos' against 'vector'\n operator==(const fpos<_StateT>& __lhs, const fpos<_StateT>& __rhs)\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match 'pair' against 'vector'\n operator==(const pair<_T1, _T2>& __x, const pair<_T1, _T2>& __y)\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match 'reverse_iterator' against 'vector'\n operator==(const reverse_iterator<_Iterator>& __x,\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match 'reverse_iterator' against 'vector'\n operator==(const reverse_iterator<_IteratorL>& __x,\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match '__normal_iterator' against 'vector'\n operator==(const __normal_iterator<_IteratorL, _Container>& __lhs,\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match '__normal_iterator' against 'vector'\n operator==(const __normal_iterator<_Iterator, _Container>& __lhs,\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match 'new_allocator' against 'vector'\n operator==(const new_allocator<_Tp>&, const new_allocator<_Tp>&)\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match 'allocator' against 'vector'\n operator==(const allocator<_T1>&, const allocator<_T2>&)\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match 'vector<type-parameter-0-0, type-parameter-0-1>' against 'const int'\n operator==(const vector<_Tp, _Alloc>& __x, const vector<_Tp, _Alloc>& __y)\n ^"}
{:type :error,
:file "/win32nativeext/src/main.cpp",
:line 10,
:message
"In file included from upload/win32nativeext/src/main.cpp:10:\nIn file included from /opt/MacOSX10.13.sdk//usr/include/c++/4.2.1/algorithm:68:\ninvalid operands to binary expression ('std::vector<int, std::allocator<int> >' and 'int')\n if (*__first == __val)\n ~~~~~~~~ ^ ~~~~~"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match 'fpos' against 'vector'\n operator==(const fpos<_StateT>& __lhs, const fpos<_StateT>& __rhs)\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match 'pair' against 'vector'\n operator==(const pair<_T1, _T2>& __x, const pair<_T1, _T2>& __y)\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match 'reverse_iterator' against 'vector'\n operator==(const reverse_iterator<_Iterator>& __x,\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match 'reverse_iterator' against 'vector'\n operator==(const reverse_iterator<_IteratorL>& __x,\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match '__normal_iterator' against 'vector'\n operator==(const __normal_iterator<_IteratorL, _Container>& __lhs,\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match '__normal_iterator' against 'vector'\n operator==(const __normal_iterator<_Iterator, _Container>& __lhs,\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match 'new_allocator' against 'vector'\n operator==(const new_allocator<_Tp>&, const new_allocator<_Tp>&)\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match 'allocator' against 'vector'\n operator==(const allocator<_T1>&, const allocator<_T2>&)\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match 'vector<type-parameter-0-0, type-parameter-0-1>' against 'const int'\n operator==(const vector<_Tp, _Alloc>& __x, const vector<_Tp, _Alloc>& __y)\n ^"}
{:type :error,
:file "/win32nativeext/src/main.cpp",
:line 10,
:message
"In file included from upload/win32nativeext/src/main.cpp:10:\nIn file included from /opt/MacOSX10.13.sdk//usr/include/c++/4.2.1/algorithm:68:\ninvalid operands to binary expression ('std::vector<int, std::allocator<int> >' and 'int')\n if (*__first == __val)\n ~~~~~~~~ ^ ~~~~~"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match 'fpos' against 'vector'\n operator==(const fpos<_StateT>& __lhs, const fpos<_StateT>& __rhs)\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match 'pair' against 'vector'\n operator==(const pair<_T1, _T2>& __x, const pair<_T1, _T2>& __y)\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match 'reverse_iterator' against 'vector'\n operator==(const reverse_iterator<_Iterator>& __x,\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match 'reverse_iterator' against 'vector'\n operator==(const reverse_iterator<_IteratorL>& __x,\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match '__normal_iterator' against 'vector'\n operator==(const __normal_iterator<_IteratorL, _Container>& __lhs,\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match '__normal_iterator' against 'vector'\n operator==(const __normal_iterator<_Iterator, _Container>& __lhs,\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match 'new_allocator' against 'vector'\n operator==(const new_allocator<_Tp>&, const new_allocator<_Tp>&)\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match 'allocator' against 'vector'\n operator==(const allocator<_T1>&, const allocator<_T2>&)\n ^"}
{:type :note,
:file "/win32nativeext/src/main.cpp",
:line 10,
:column 5,
:message
"candidate template ignored: could not match 'vector<type-parameter-0-0, type-parameter-0-1>' against 'const int'\n operator==(const vector<_Tp, _Alloc>& __x, const vector<_Tp, _Alloc>& __y)\n ^"}]
| edn | 1 | cmarincia/defold | editor/test/resources/native_extension_error_parsing/templateBarf_parsed.edn | [
"ECL-2.0",
"Apache-2.0"
] |
#include "opencv2/ts.hpp"
#include <opencv2/core/utils/logger.hpp>
#include "opencv2/core/utility.hpp"
#if !defined(__EMSCRIPTEN__)
#include "opencv2/core/private.hpp"
#endif
#ifdef GTEST_LINKED_AS_SHARED_LIBRARY
#error ts module should not have GTEST_LINKED_AS_SHARED_LIBRARY defined
#endif
| C++ | 2 | xipingyan/opencv | modules/ts/src/precomp.hpp | [
"Apache-2.0"
] |
import data.matrix.notation
import data.vector2
/-!
Helpers that don't currently fit elsewhere...
-/
lemma split_eq {m n : Type*} (x : m × n) (p p' : m × n) :
p = x ∨ p' = x ∨ (x ≠ p ∧ x ≠ p') := by tauto
-- For `playfield`s, the piece type and/or piece index type.
variables (X : Type*)
variables [has_repr X]
namespace chess.utils
section repr
/--
An auxiliary wrapper for `option X` that allows for overriding the `has_repr` instance
for `option`, and rather, output just the value in the `some` and a custom provided
`string` for `none`.
-/
structure option_wrapper :=
(val : option X)
(none_s : string)
instance wrapped_option_repr : has_repr (option_wrapper X) :=
⟨λ ⟨val, s⟩, (option.map has_repr.repr val).get_or_else s⟩
variables {X}
/--
Construct an `option_wrapper` term from a provided `option X` and the `string`
that will override the `has_repr.repr` for `none`.
-/
def option_wrap (val : option X) (none_s : string) : option_wrapper X := ⟨val, none_s⟩
-- The size of the "vectors" for a `fin n' → X`, for `has_repr` definitions
variables {m' n' : ℕ}
/--
For a "vector" `X^n'` represented by the type `Π n' : ℕ, fin n' → X`, where
the `X` has a `has_repr` instance itself, we can provide a `has_repr` for the "vector".
This definition is used for displaying rows of the playfield, when it is defined
via a `matrix`, likely through notation.
-/
def vec_repr : Π {n' : ℕ}, (fin n' → X) → string :=
λ _ v, string.intercalate ", " ((vector.of_fn v).to_list.map repr)
instance vec_repr_instance : has_repr (fin n' → X) := ⟨vec_repr⟩
/--
For a `matrix` `X^(m' × n')` where the `X` has a `has_repr` instance itself,
we can provide a `has_repr` for the matrix, using `vec_repr` for each of the rows of the matrix.
This definition is used for displaying the playfield, when it is defined
via a `matrix`, likely through notation.
-/
def matrix_repr : Π {m' n'}, matrix (fin m') (fin n') X → string :=
λ _ _ M, string.intercalate ";\n" ((vector.of_fn M).to_list.map repr)
instance matrix_repr_instance :
has_repr (matrix (fin n') (fin m') X) := ⟨matrix_repr⟩
end repr
end chess.utils
| Lean | 5 | ka7/bat | tests/syntax-tests/source/Lean/test.lean | [
"Apache-2.0",
"MIT"
] |
<?xml version="1.0" encoding="UTF-8"?>
<faces-config>
<faces-config-extension>
<namespace-uri>http://www.ibm.com/xsp/custom</namespace-uri>
<default-prefix>xc</default-prefix>
</faces-config-extension>
<composite-component>
<component-type>topicThreadForum</component-type>
<composite-name>topicThreadForum</composite-name>
<composite-file>/topicThreadForum.xsp</composite-file>
<property>
<property-name>refreshId</property-name>
<property-class>string</property-class>
</property>
<composite-extension>
<designer-extension>
<render-markup/>
</designer-extension>
</composite-extension>
<property>
<property-name>isForceShowRow</property-name>
<property-class>javax.faces.el.MethodBinding</property-class>
<property-extension>
<method-binding-property>true</method-binding-property>
</property-extension>
</property>
<property>
<property-name>isLoadBodyArea</property-name>
<property-class>boolean</property-class>
</property>
<property>
<property-name>isShowBodyArea</property-name>
<property-class>javax.faces.el.MethodBinding</property-class>
<property-extension>
<method-binding-property>true</method-binding-property>
</property-extension>
</property>
<property>
<property-name>isLoadEditArea</property-name>
<property-class>boolean</property-class>
</property>
<property>
<property-name>isShowEditArea</property-name>
<property-class>javax.faces.el.MethodBinding</property-class>
<property-extension>
<method-binding-property>true</method-binding-property>
</property-extension>
</property>
<property>
<property-name>isLoadMainEditArea</property-name>
<property-class>boolean</property-class>
</property>
</composite-component>
</faces-config>
| XPages | 3 | jesse-gallagher/XPagesExtensionLibrary | extlib/lwp/product/nsf/Teamroom/CustomControls/topicThreadForum.xsp-config | [
"Apache-2.0"
] |
server {
listen 80;
server_name www.example.com example.com;
root /var/www/www.example.com/web;
if ($http_host != "www.example.com") {
rewrite ^ http://www.example.com$request_uri permanent;
}
index index.php index.html;
location = /favicon.ico {
log_not_found off;
access_log off;
}
location = /robots.txt {
allow all;
log_not_found off;
access_log off;
}
# Make sure files with the following extensions do not get loaded by nginx because nginx would display the source code, and these files can contain PASSWORDS!
location ~* \.(tpl|html5|xhtml)$ {
deny all;
}
# Deny all attempts to access hidden files such as .htaccess, .htpasswd, .DS_Store (Mac).
location ~ /\. {
deny all;
access_log off;
log_not_found off;
}
location / {
try_files $uri $uri/ /index.php?$args;
}
location ~* \.(jpg|jpeg|png|gif|css|js|ico)$ {
expires max;
log_not_found off;
}
location ~ \.php$ {
try_files $uri =404;
include /etc/nginx/fastcgi_params;
fastcgi_pass 127.0.0.1:9000;
fastcgi_param SCRIPT_FILENAME $document_root$fastcgi_script_name;
}
}
| ApacheConf | 4 | tsrivishnu/certbot | certbot-compatibility-test/nginx/nginx-roundtrip-testdata/contao/sites-available/example.com.vhost | [
"Apache-2.0"
] |
%span.position-relative.gl-pr-6.gl-display-inline-flex
= yield
| Haml | 1 | Testiduk/gitlabhq | app/views/shared/namespaces/cascading_settings/_setting_label_container.html.haml | [
"MIT"
] |
package unit.issues;
class Issue9899 extends unit.Test {
function test() {
t(switch (macro !a is T) {
case {expr: EIs({expr: EUnop(_)}, _)}: true;
case _: false;
});
}
} | Haxe | 3 | wiltonlazary/haxe | tests/unit/src/unit/issues/Issue9899.hx | [
"MIT"
] |
// run-pass
// Tests that we can call a function bounded over a supertrait from
// a default method
fn require_y<T: Y>(x: T) -> isize { x.y() }
trait Y {
fn y(self) -> isize;
}
trait Z: Y + Sized {
fn x(self) -> isize {
require_y(self)
}
}
impl Y for isize {
fn y(self) -> isize { self }
}
impl Z for isize {}
pub fn main() {
assert_eq!(12.x(), 12);
}
| Rust | 4 | Eric-Arellano/rust | src/test/ui/traits/default-method-supertrait-vtable.rs | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] |
#market_list_wrapper.has-right-dropdown
- if @market_groups.size > 1
.dropdown-wrapper
ul.dropdown-menu
li: a.active data-name='all' href="javascript:;" = t('.all_html')
- @market_groups.each do |name|
li: a data-name=name href="javascript:;" = t(".#{name}_html")
#market_list
.panel.panel-default
.panel-body.panel-body-head
table.table
thead: tr
th.col-xs-7
span.name = t('.all')
th.col-xs-4.text-right.price = t('.price')
th.col-xs-5.text-right.change = t('.change')
.panel-body.panel-body-content
table.table.table-hover.markets.all
tbody
- @markets.each do |market|
tr.market id="market-list-#{market.id}" class="quote-#{market.quote_unit}" data-market=market.id
td.col-xs-4.name
= link_to market.name, market_path(market)
td.col-xs-15.price
= Global[market.id].ticker[:last]
td.col-xs-5.change
| +0.00%
| Slim | 4 | gsmlg/peatio | app/views/private/markets/_market_list.html.slim | [
"MIT"
] |
# coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import remove_start
class Ir90TvIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?90tv\.ir/video/(?P<id>[0-9]+)/.*'
_TESTS = [{
'url': 'http://90tv.ir/video/95719/%D8%B4%D8%A7%DB%8C%D8%B9%D8%A7%D8%AA-%D9%86%D9%82%D9%84-%D9%88-%D8%A7%D9%86%D8%AA%D9%82%D8%A7%D9%84%D8%A7%D8%AA-%D9%85%D9%87%D9%85-%D9%81%D9%88%D8%AA%D8%A8%D8%A7%D9%84-%D8%A7%D8%B1%D9%88%D9%BE%D8%A7-940218',
'md5': '411dbd94891381960cb9e13daa47a869',
'info_dict': {
'id': '95719',
'ext': 'mp4',
'title': 'شایعات نقل و انتقالات مهم فوتبال اروپا 94/02/18',
'thumbnail': r're:^https?://.*\.jpg$',
}
}, {
'url': 'http://www.90tv.ir/video/95719/%D8%B4%D8%A7%DB%8C%D8%B9%D8%A7%D8%AA-%D9%86%D9%82%D9%84-%D9%88-%D8%A7%D9%86%D8%AA%D9%82%D8%A7%D9%84%D8%A7%D8%AA-%D9%85%D9%87%D9%85-%D9%81%D9%88%D8%AA%D8%A8%D8%A7%D9%84-%D8%A7%D8%B1%D9%88%D9%BE%D8%A7-940218',
'only_matching': True,
}]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
title = remove_start(self._html_search_regex(
r'<title>([^<]+)</title>', webpage, 'title'), '90tv.ir :: ')
video_url = self._search_regex(
r'<source[^>]+src="([^"]+)"', webpage, 'video url')
thumbnail = self._search_regex(r'poster="([^"]+)"', webpage, 'thumbnail url', fatal=False)
return {
'url': video_url,
'id': video_id,
'title': title,
'video_url': video_url,
'thumbnail': thumbnail,
}
| Python | 4 | hackarada/youtube-dl | youtube_dl/extractor/ir90tv.py | [
"Unlicense"
] |
CREATE TABLE `tb_baxubvrijb` (
`col_kcwxoovscx` set('enum_or_set_0','enum_or_set_1','enum_or_set_2') CHARACTER SET utf8 DEFAULT 'enum_or_set_0',
`col_fpfpelfmso` float(57,8) NULL,
`col_dbtkzzfxoi` varbinary(22) NOT NULL,
CONSTRAINT symb_hrwhbxnbts PRIMARY KEY (`col_dbtkzzfxoi`(10)),
UNIQUE `col_dbtkzzfxoi` (`col_dbtkzzfxoi`(2)),
UNIQUE KEY `col_fpfpelfmso` (`col_fpfpelfmso`,`col_dbtkzzfxoi`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
CREATE TABLE `tb_ddzfshecwo` (
`col_rryluiawnq` varchar(24) CHARACTER SET latin1 DEFAULT '',
`col_bdlbhtissa` year(4) NULL,
`col_kfawipagkn` tinyblob
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
RENAME TABLE `tb_ddzfshecwo` TO `tb_bcsetyvuul`, `tb_baxubvrijb` TO `tb_nxbyklxowd`;
RENAME TABLE `tb_bcsetyvuul` TO `tb_dlnhztodao`;
ALTER TABLE `tb_nxbyklxowd` ADD `col_zkfycwevvo` time;
ALTER TABLE `tb_nxbyklxowd` ADD COLUMN (`col_ememhbgtcx` bit NULL DEFAULT b'0', `col_zrrfkkosvo` char(203) CHARACTER SET utf8mb4);
ALTER TABLE `tb_nxbyklxowd` ADD (`col_fpafdojbdn` decimal(9,9) NULL, `col_juolcidyaj` datetime DEFAULT '2019-07-04 00:00:00');
ALTER TABLE `tb_nxbyklxowd` ADD `col_qhgfympzxa` longtext;
ALTER TABLE `tb_nxbyklxowd` ADD COLUMN (`col_tymlykugck` integer(83) zerofill, `col_awkvpagorc` set('enum_or_set_0','enum_or_set_1','enum_or_set_2') CHARACTER SET utf8 COLLATE utf8_unicode_ci DEFAULT 'enum_or_set_0');
ALTER TABLE `tb_nxbyklxowd` ADD COLUMN `col_crrxfxfdzs` datetime(0);
ALTER TABLE `tb_nxbyklxowd` ADD COLUMN (`col_cgdxdxesyd` decimal NOT NULL, `col_pjkofcyiht` set('enum_or_set_0','enum_or_set_1','enum_or_set_2') CHARACTER SET utf8 DEFAULT 'enum_or_set_0');
ALTER TABLE `tb_nxbyklxowd` ADD COLUMN `col_iiqainhxkc` smallint(81) zerofill NULL;
ALTER TABLE `tb_nxbyklxowd` ADD (`col_kbnueyzmpm` tinyblob, `col_zixjkfdybk` binary NOT NULL);
ALTER TABLE `tb_nxbyklxowd` CHARACTER SET = utf8mb4;
ALTER TABLE `tb_nxbyklxowd` ADD UNIQUE `col_cgdxdxesyd`(`col_cgdxdxesyd`,`col_iiqainhxkc`);
ALTER TABLE `tb_nxbyklxowd` ADD UNIQUE KEY `uk_regnlkocpv` (`col_zrrfkkosvo`(16),`col_fpafdojbdn`);
ALTER TABLE `tb_nxbyklxowd` ALTER `col_dbtkzzfxoi` DROP DEFAULT;
ALTER TABLE `tb_nxbyklxowd` ALTER `col_iiqainhxkc` DROP DEFAULT;
ALTER TABLE `tb_nxbyklxowd` ALTER `col_kcwxoovscx` DROP DEFAULT;
ALTER TABLE `tb_nxbyklxowd` CHANGE COLUMN `col_fpfpelfmso` `col_xkzfmyvfvv` smallint DEFAULT '1' FIRST;
ALTER TABLE `tb_nxbyklxowd` DROP `col_juolcidyaj`, DROP `col_pjkofcyiht`;
ALTER TABLE `tb_nxbyklxowd` DROP COLUMN `col_xkzfmyvfvv`;
ALTER TABLE `tb_nxbyklxowd` DROP INDEX `col_cgdxdxesyd`;
ALTER TABLE `tb_nxbyklxowd` DROP INDEX `uk_regnlkocpv`;
| SQL | 3 | yuanweikang2020/canal | parse/src/test/resources/ddl/alter/test_16.sql | [
"Apache-2.0"
] |
server {
listen 80;
listen [::]:80;
root <path/to/o2system/public/folder>;
autoindex on;
index index.html index.php;
server_name <domain>;
server_alias *.<domain>;
access_log /var/log/nginx/<domain>-access.log;
error_log /var/log/nginx/<domain>-error.log;
# enforce www (exclude certain subdomains)
#if ($host !~* ^(www|subdomain))
#{
# rewrite ^/(.*)$ $scheme://www.$host/$1 permanent;
#}
# enforce NO www
if ($host ~* ^www\.(.*))
{
set $host_without_www $1;
rewrite ^/(.*)$ $scheme://$host_without_www/$1 permanent;
}
# removes trailing slashes (prevents SEO duplicate content issues)
if (!-d $request_filename)
{
rewrite ^/(.+)/$ /$1 permanent;
}
# set expiration of assets to MAX for caching
location ~* \.(ico|css|js|gif|jpe?g|png)(\?[0-9]+)?$ {
expires max;
access_log off;
log_not_found off;
add_header Pragma public;
add_header Cache-Control "public, must-revalidate, proxy-revalidate";
}
# unless the request is for a valid file (image, js, css, etc.), send to o2system
if (!-e $request_filename)
{
rewrite ^/(.*)$ /index.php?/$1 last;
break;
}
# canonicalize o2system url end points
# if your default controller is something other than "hello" you should change the following
if ($request_uri ~* ^(/hello(/index)?|/index(.php)?)/?$)
{
rewrite ^(.*)$ / permanent;
}
# catch all errors page
error_page 404 /index.php;
error_page 500 /index.php;
error_page 502 /index.php;
error_page 503 /index.php;
location / {
# Check if a file or directory index file exists, else route it to index.php.
try_files $uri $uri/ /index.php;
location = /index.php {
fastcgi_pass 127.0.0.1:9000;
fastcgi_param SCRIPT_FILENAME <path/to/o2system/public/folder>$fastcgi_script_name;
include fastcgi_params;
}
}
# deny hidden files
location ~ /\. {
deny all;
access_log off;
log_not_found off;
}
# deny .config file
location ~ /\.config$ {
deny all;
access_log off;
log_not_found off;
}
# deny .vhost file
location ~ /\.config$ {
deny all;
access_log off;
log_not_found off;
}
# for production server
#location ~ \.php$ {
# return 444;
#}
} | ApacheConf | 5 | vulnwalker/apiUjiWawasan | public/nginx.vhost | [
"MIT"
] |
---
title: "v1.23.0 - 2021-09-03"
linkTitle: "v1.23.0 - 2021-09-03"
weight: -99
---
<html>
<head>
<title>kubernetes/minikube - Leaderboard</title>
<link rel="preconnect" href="https://fonts.gstatic.com">
<link href="https://fonts.googleapis.com/css2?family=Open+Sans:wght@300;400;600;700&display=swap" rel="stylesheet">
<script type="text/javascript" src="https://www.gstatic.com/charts/loader.js"></script>
<script type="text/javascript">
google.charts.load("current", {packages:["corechart"]});
</script>
<style>
body {
font-family: 'Open Sans', sans-serif;
background-color: #f7f7fa;
padding: 1em;
}
h1 {
color: rgba(66,133,244);
margin-bottom: 0em;
}
.subtitle {
color: rgba(23,90,201);
font-size: small;
}
pre {
white-space: pre-wrap;
word-wrap: break-word;
color: #666;
font-size: small;
}
h2.cli {
color: #666;
}
h2 {
color: #333;
}
.board p {
font-size: small;
color: #999;
text-align: center;
}
.board {
clear: right;
display: inline-block;
padding: 0.5em;
margin: 0.5em;
background-color: #fff;
}
.board:nth-child(4n+3) {
border: 2px solid rgba(66,133,244,0.25);
color: rgba(66,133,244);
}
.board:nth-child(4n+2) {
border: 2px solid rgba(219,68,55,0.25);
color: rgba rgba(219,68,55);
}
.board:nth-child(4n+1) {
border: 2px solid rgba(244,160,0,0.25);
color: rgba(244,160,0);
}
.board:nth-child(4n) {
border: 2px solid rgba(15,157,88,0.25);
color: rgba(15,157,88);
}
h3 {
text-align: center;
}
</style>
</head>
<body>
<h1>kubernetes/minikube</h1>
<div class="subtitle">2021-07-07 — 2021-09-03</div>
<h2>Reviewers</h2>
<div class="board">
<h3>Most Influential</h3>
<p># of Merged PRs reviewed</p>
<div id="chart_reviewCounts" style="width: 450px; height: 350px;"></div>
<script type="text/javascript">
google.charts.setOnLoadCallback(drawreviewCounts);
function drawreviewCounts() {
var data = new google.visualization.arrayToDataTable([
[{label:'',type:'string'},{label: '# of Merged PRs reviewed', type: 'number'}, { role: 'annotation' }],
["medyagh", 38, "38"],
["sharifelgamal", 15, "15"],
["afbjorklund", 8, "8"],
["spowelljr", 8, "8"],
["andriyDev", 3, "3"],
["ilya-zuyev", 2, "2"],
["azhao155", 1, "1"],
["mikebrow", 1, "1"],
["briandealwis", 1, "1"],
["ncresswell", 1, "1"],
["iliadmitriev", 1, "1"],
["shahiddev", 1, "1"],
]);
var options = {
axisTitlesPosition: 'none',
bars: 'horizontal', // Required for Material Bar Charts.
axes: {
x: {
y: { side: 'top'} // Top x-axis.
}
},
legend: { position: "none" },
bar: { groupWidth: "85%" }
};
var chart = new google.visualization.BarChart(document.getElementById('chart_reviewCounts'));
chart.draw(data, options);
};
</script>
</div>
<div class="board">
<h3>Most Helpful</h3>
<p># of words written in merged PRs</p>
<div id="chart_reviewWords" style="width: 450px; height: 350px;"></div>
<script type="text/javascript">
google.charts.setOnLoadCallback(drawreviewWords);
function drawreviewWords() {
var data = new google.visualization.arrayToDataTable([
[{label:'',type:'string'},{label: '# of words written in merged PRs', type: 'number'}, { role: 'annotation' }],
["medyagh", 1407, "1407"],
["sharifelgamal", 526, "526"],
["afbjorklund", 465, "465"],
["spowelljr", 261, "261"],
["mikebrow", 145, "145"],
["iliadmitriev", 101, "101"],
["andriyDev", 79, "79"],
["shahiddev", 36, "36"],
["ilya-zuyev", 27, "27"],
["ncresswell", 18, "18"],
["briandealwis", 12, "12"],
["azhao155", 2, "2"],
]);
var options = {
axisTitlesPosition: 'none',
bars: 'horizontal', // Required for Material Bar Charts.
axes: {
x: {
y: { side: 'top'} // Top x-axis.
}
},
legend: { position: "none" },
bar: { groupWidth: "85%" }
};
var chart = new google.visualization.BarChart(document.getElementById('chart_reviewWords'));
chart.draw(data, options);
};
</script>
</div>
<div class="board">
<h3>Most Demanding</h3>
<p># of Review Comments in merged PRs</p>
<div id="chart_reviewComments" style="width: 450px; height: 350px;"></div>
<script type="text/javascript">
google.charts.setOnLoadCallback(drawreviewComments);
function drawreviewComments() {
var data = new google.visualization.arrayToDataTable([
[{label:'',type:'string'},{label: '# of Review Comments in merged PRs', type: 'number'}, { role: 'annotation' }],
["medyagh", 25, "25"],
["spowelljr", 13, "13"],
["sharifelgamal", 10, "10"],
["mikebrow", 7, "7"],
["afbjorklund", 7, "7"],
["andriyDev", 5, "5"],
["ilya-zuyev", 2, "2"],
["iliadmitriev", 2, "2"],
["briandealwis", 0, "0"],
["ncresswell", 0, "0"],
["azhao155", 0, "0"],
["shahiddev", 0, "0"],
]);
var options = {
axisTitlesPosition: 'none',
bars: 'horizontal', // Required for Material Bar Charts.
axes: {
x: {
y: { side: 'top'} // Top x-axis.
}
},
legend: { position: "none" },
bar: { groupWidth: "85%" }
};
var chart = new google.visualization.BarChart(document.getElementById('chart_reviewComments'));
chart.draw(data, options);
};
</script>
</div>
<h2>Pull Requests</h2>
<div class="board">
<h3>Most Active</h3>
<p># of Pull Requests Merged</p>
<div id="chart_prCounts" style="width: 450px; height: 350px;"></div>
<script type="text/javascript">
google.charts.setOnLoadCallback(drawprCounts);
function drawprCounts() {
var data = new google.visualization.arrayToDataTable([
[{label:'',type:'string'},{label: '# of Pull Requests Merged', type: 'number'}, { role: 'annotation' }],
["andriyDev", 41, "41"],
["spowelljr", 24, "24"],
["sharifelgamal", 17, "17"],
["afbjorklund", 14, "14"],
["medyagh", 12, "12"],
["jeffmaury", 4, "4"],
["ilya-zuyev", 3, "3"],
["prezha", 3, "3"],
["kadern0", 2, "2"],
["mahalrs", 2, "2"],
["browncrane", 2, "2"],
["rajdevworks", 1, "1"],
["zigarn", 1, "1"],
["valaparthvi", 1, "1"],
["zhan9san", 1, "1"],
]);
var options = {
axisTitlesPosition: 'none',
bars: 'horizontal', // Required for Material Bar Charts.
axes: {
x: {
y: { side: 'top'} // Top x-axis.
}
},
legend: { position: "none" },
bar: { groupWidth: "85%" }
};
var chart = new google.visualization.BarChart(document.getElementById('chart_prCounts'));
chart.draw(data, options);
};
</script>
</div>
<div class="board">
<h3>Big Movers</h3>
<p>Lines of code (delta)</p>
<div id="chart_prDeltas" style="width: 450px; height: 350px;"></div>
<script type="text/javascript">
google.charts.setOnLoadCallback(drawprDeltas);
function drawprDeltas() {
var data = new google.visualization.arrayToDataTable([
[{label:'',type:'string'},{label: 'Lines of code (delta)', type: 'number'}, { role: 'annotation' }],
["prezha", 20587, "20587"],
["medyagh", 6068, "6068"],
["andriyDev", 2567, "2567"],
["afbjorklund", 1205, "1205"],
["spowelljr", 771, "771"],
["sharifelgamal", 711, "711"],
["kadern0", 334, "334"],
["ilya-zuyev", 284, "284"],
["coolamiy", 238, "238"],
["AkihiroSuda", 231, "231"],
["dinever", 171, "171"],
["balasu", 159, "159"],
["raghavendra-talur", 120, "120"],
["zhan9san", 97, "97"],
["mahalrs", 65, "65"],
]);
var options = {
axisTitlesPosition: 'none',
bars: 'horizontal', // Required for Material Bar Charts.
axes: {
x: {
y: { side: 'top'} // Top x-axis.
}
},
legend: { position: "none" },
bar: { groupWidth: "85%" }
};
var chart = new google.visualization.BarChart(document.getElementById('chart_prDeltas'));
chart.draw(data, options);
};
</script>
</div>
<div class="board">
<h3>Most difficult to review</h3>
<p>Average PR size (added+changed)</p>
<div id="chart_prSize" style="width: 450px; height: 350px;"></div>
<script type="text/javascript">
google.charts.setOnLoadCallback(drawprSize);
function drawprSize() {
var data = new google.visualization.arrayToDataTable([
[{label:'',type:'string'},{label: 'Average PR size (added+changed)', type: 'number'}, { role: 'annotation' }],
["prezha", 2803, "2803"],
["medyagh", 376, "376"],
["AkihiroSuda", 204, "204"],
["kadern0", 166, "166"],
["dinever", 162, "162"],
["balasu", 159, "159"],
["raghavendra-talur", 117, "117"],
["coolamiy", 103, "103"],
["afbjorklund", 81, "81"],
["zhan9san", 71, "71"],
["ilya-zuyev", 68, "68"],
["BlaineEXE", 57, "57"],
["andriyDev", 41, "41"],
["de-sh", 38, "38"],
["vishjain", 35, "35"],
]);
var options = {
axisTitlesPosition: 'none',
bars: 'horizontal', // Required for Material Bar Charts.
axes: {
x: {
y: { side: 'top'} // Top x-axis.
}
},
legend: { position: "none" },
bar: { groupWidth: "85%" }
};
var chart = new google.visualization.BarChart(document.getElementById('chart_prSize'));
chart.draw(data, options);
};
</script>
</div>
<h2>Issues</h2>
<div class="board">
<h3>Most Active</h3>
<p># of comments</p>
<div id="chart_comments" style="width: 450px; height: 350px;"></div>
<script type="text/javascript">
google.charts.setOnLoadCallback(drawcomments);
function drawcomments() {
var data = new google.visualization.arrayToDataTable([
[{label:'',type:'string'},{label: '# of comments', type: 'number'}, { role: 'annotation' }],
["afbjorklund", 45, "45"],
["RA489", 36, "36"],
["sharifelgamal", 31, "31"],
["spowelljr", 28, "28"],
["medyagh", 20, "20"],
["andriyDev", 13, "13"],
["mprimeaux", 7, "7"],
["jayesh-srivastava", 6, "6"],
["Bytesu", 5, "5"],
["Osprey2021", 4, "4"],
["Luttik", 4, "4"],
["Rits333", 3, "3"],
["pfeigl", 3, "3"],
["Himkwok", 3, "3"],
["yangyaofei", 2, "2"],
]);
var options = {
axisTitlesPosition: 'none',
bars: 'horizontal', // Required for Material Bar Charts.
axes: {
x: {
y: { side: 'top'} // Top x-axis.
}
},
legend: { position: "none" },
bar: { groupWidth: "85%" }
};
var chart = new google.visualization.BarChart(document.getElementById('chart_comments'));
chart.draw(data, options);
};
</script>
</div>
<div class="board">
<h3>Most Helpful</h3>
<p># of words (excludes authored)</p>
<div id="chart_commentWords" style="width: 450px; height: 350px;"></div>
<script type="text/javascript">
google.charts.setOnLoadCallback(drawcommentWords);
function drawcommentWords() {
var data = new google.visualization.arrayToDataTable([
[{label:'',type:'string'},{label: '# of words (excludes authored)', type: 'number'}, { role: 'annotation' }],
["D2C-Cai", 4008, "4008"],
["afbjorklund", 2677, "2677"],
["andriyDev", 1510, "1510"],
["spowelljr", 1364, "1364"],
["deepakponnada", 1190, "1190"],
["mcg1969", 932, "932"],
["sharifelgamal", 828, "828"],
["yangyaofei", 757, "757"],
["medyagh", 713, "713"],
["Luttik", 452, "452"],
["pfeigl", 410, "410"],
["apupier", 388, "388"],
["rlanting", 384, "384"],
["mprimeaux", 371, "371"],
["edemen", 356, "356"],
]);
var options = {
axisTitlesPosition: 'none',
bars: 'horizontal', // Required for Material Bar Charts.
axes: {
x: {
y: { side: 'top'} // Top x-axis.
}
},
legend: { position: "none" },
bar: { groupWidth: "85%" }
};
var chart = new google.visualization.BarChart(document.getElementById('chart_commentWords'));
chart.draw(data, options);
};
</script>
</div>
<div class="board">
<h3>Top Closers</h3>
<p># of issues closed (excludes authored)</p>
<div id="chart_issueCloser" style="width: 450px; height: 350px;"></div>
<script type="text/javascript">
google.charts.setOnLoadCallback(drawissueCloser);
function drawissueCloser() {
var data = new google.visualization.arrayToDataTable([
[{label:'',type:'string'},{label: '# of issues closed (excludes authored)', type: 'number'}, { role: 'annotation' }],
["spowelljr", 47, "47"],
["medyagh", 41, "41"],
["sharifelgamal", 22, "22"],
["andriyDev", 6, "6"],
["ilya-zuyev", 3, "3"],
["afbjorklund", 1, "1"],
]);
var options = {
axisTitlesPosition: 'none',
bars: 'horizontal', // Required for Material Bar Charts.
axes: {
x: {
y: { side: 'top'} // Top x-axis.
}
},
legend: { position: "none" },
bar: { groupWidth: "85%" }
};
var chart = new google.visualization.BarChart(document.getElementById('chart_issueCloser'));
chart.draw(data, options);
};
</script>
</div>
</body>
</html>
| HTML | 4 | skyplaying/minikube | site/content/en/docs/contrib/leaderboard/v1.23.0.html | [
"Apache-2.0"
] |
Literate CoffeeScript
====================
This is a spec using written in Literate CoffeeScript
describe 'Coffee.litcoffee', ->
it 'should pass', ->
expect(1+2).toEqual(3)
| Literate CoffeeScript | 4 | sidartastan/SoftwareVerificationValidationTesting_JasmineTest | node_modules/jasmine-node/spec/litcoffee/Litcoffee.spec.litcoffee | [
"MIT"
] |
#! /bin/jconsole
require 'graphics/png'
read_dsv =: (1 :'<;._2@,&u;._2')(@:(CR-.~[:1!:1@:<'data/',,&'.txt'))
dat =. ".@> 1{"1]2}._8}. TAB read_dsv 'ZKE0 R'
interval =. 16*60%~20-~_30 (<./,>./)\ dat
img =. |: (|.i.16) (1=I.)~"1]_0.5 0+"1<.0.2 _0.2+"1 interval
'favicon.png' writepng~ 256#.0 10 200,"0 1~255<.256 <.@:* img
exit ''
| J | 3 | Banbeucmas/CrinGraph | favicon.ijs | [
"0BSD"
] |
*** Settings ***
Library OperatingSystem
Library ParameterLibrary after1with after2with WITH NAME Params
Library ParameterLibrary after1 after2
Library ParameterLibrary xxx yyy with name Won't work
*** Test Cases ***
Import Library Normally After Importing With Name In Another Suite
OperatingSystem.Should Exist .
ParameterLibrary.Parameters Should Be after1 after2
Import Library With Name After Importing With Name In Another Suite
Params.Parameters Should Be after1with after2with
Correct Error When Using Keyword From Same Library With Different Names Without Prefix 3
[Documentation] FAIL Multiple keywords with name 'Parameters' found. \
... Give the full name of the keyword you want to use:
... ${SPACE*4}ParameterLibrary.Parameters
... ${SPACE*4}Params.Parameters
Parameters
| RobotFramework | 4 | rdagum/robotframework | atest/testdata/test_libraries/with_name_3.robot | [
"ECL-2.0",
"Apache-2.0"
] |
DeployingCloud:Deploying to a cloud service
ProductionHeroku:Deploying to Heroku
Deploying-CloudFoundry:Deploying to Cloud Foundry
Deploying-CleverCloud:Deploying to Clever Cloud
Deploying-Boxfuse:Deploying to Boxfuse and AWS | TeX | 2 | eed3si9n/playframework | documentation/manual/working/commonGuide/production/cloud/index.toc | [
"Apache-2.0"
] |
#ifndef NUMPY_CORE_SRC_MULTIARRAY_NPY_BUFFER_H_
#define NUMPY_CORE_SRC_MULTIARRAY_NPY_BUFFER_H_
extern NPY_NO_EXPORT PyBufferProcs array_as_buffer;
NPY_NO_EXPORT int
_buffer_info_free(void *buffer_info, PyObject *obj);
NPY_NO_EXPORT PyArray_Descr*
_descriptor_from_pep3118_format(char const *s);
NPY_NO_EXPORT int
void_getbuffer(PyObject *obj, Py_buffer *view, int flags);
#endif /* NUMPY_CORE_SRC_MULTIARRAY_NPY_BUFFER_H_ */
| C | 3 | iam-abbas/numpy | numpy/core/src/multiarray/npy_buffer.h | [
"BSD-3-Clause"
] |
upstream @backends {
server 127.0.0.1:8000 max_fails=250 fail_timeout=60s;
server 127.0.0.1:8001 max_fails=250 fail_timeout=60s;
server 127.0.0.1:8002 max_fails=250 fail_timeout=60s;
server 127.0.0.1:8003 max_fails=250 fail_timeout=60s;
server 127.0.0.1:8004 max_fails=250 fail_timeout=60s;
server 127.0.0.1:8005 max_fails=250 fail_timeout=60s;
server 127.0.0.1:8006 max_fails=250 fail_timeout=60s;
server 127.0.0.1:8007 max_fails=250 fail_timeout=60s;
keepalive 64;
}
server {
listen 127.0.0.1:80;
server_name backend.local;
add_header X-RateLimit-Limit 100;
add_header X-RateLimit-Remaining 50;
add_header X-RateLimit-Reset 1372700873;
add_header Content-Type "application/json; charset=utf-8";
rewrite ^(.*) http://$server_name$request_uri? permanent;
limit_conn conn_limit_per_ip 10;
limit_req zone=req_limit_per_ip burst=5 nodelay;
if ($request_method !~ ^(GET|HEAD|PUT|POST|DELETE|OPTIONS)$ ){
return 405;
}
include /data/etc/nginx/errors.conf;
include /data/etc/nginx/common.conf;
location / {
proxy_redirect off;
proxy_pass_header Server;
proxy_set_header Host $http_host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Scheme $scheme;
proxy_set_header X-NginX-Proxy true;
proxy_set_header X-Frame-Options "DENY";
proxy_set_header Connection "";
proxy_http_version 1.1;
proxy_connect_timeout 10;
proxy_read_timeout 10;
proxy_pass http://@backends;
allow 127.0.0.1;
deny all;
}
try_files $uri /maintenance.html @backends;
access_log /dev/null;
# error_log /dev/null crit;
}
server {
listen 443 ssl;
server_name backend.domain.tld;
add_header X-RateLimit-Limit 100;
add_header X-RateLimit-Remaining 50;
add_header X-RateLimit-Reset 1372700873;
add_header Content-Type "application/json; charset=utf-8";
ssl on;
ssl_certificate /data/ssl/server.crt;
ssl_certificate_key /data/ssl/server.key;
rewrite ^(.*) https://$server_name$request_uri? permanent;
limit_conn conn_limit_per_ip 10;
limit_req zone=req_limit_per_ip burst=5 nodelay;
if ($request_method !~ ^(GET|HEAD|PUT|POST|DELETE|OPTIONS)$ ){
return 405;
}
include /data/etc/nginx/errors.conf;
include /data/etc/nginx/common.conf;
location / {
proxy_redirect off;
proxy_pass_header Server;
proxy_set_header Host $http_host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Scheme $scheme;
proxy_set_header X-NginX-Proxy true;
proxy_set_header X-Frame-Options "DENY";
proxy_set_header Connection "";
proxy_http_version 1.1;
proxy_connect_timeout 10;
proxy_read_timeout 10;
proxy_pass http://@backends;
}
try_files $uri /maintenance.html @backends;
access_log /dev/null;
# error_log /dev/null crit;
}
| ApacheConf | 3 | alejandrobernardis/python-server-stack | installer/config/nginx/data/etc/nginx/sites-available/backend.vhost | [
"MIT"
] |
moment = require 'moment'
React = require "react"
ReactDOM = require "react-dom"
ReactTestUtils = require 'react-addons-test-utils'
MessageTimestamp = require('../lib/message-timestamp').default
msgTime = ->
moment([2010, 1, 14, 15, 25, 50, 125]) # Feb 14, 2010 at 3:25 PM
describe "MessageTimestamp", ->
beforeEach ->
@item = ReactTestUtils.renderIntoDocument(
<MessageTimestamp date={msgTime()} />
)
it "still processes one day, even if it crosses a month divider", ->
# this should be tested in moment.js, but we add a test here for our own sanity too
feb28 = moment([2015, 1, 28])
mar01 = moment([2015, 2, 1])
expect(mar01.diff(feb28, 'days')).toBe 1
| CoffeeScript | 4 | cnheider/nylas-mail | packages/client-app/internal_packages/message-list/spec/message-timestamp-spec.cjsx | [
"MIT"
] |
fun main () : nothing
var i : int;
var x : int[10];
{
if("Sfdsdfsdf" # "qwerrwer") then{
puti(9);
}
}
| Grace | 1 | yorgosk/grace-compiler | examples/semantics/cond_tests.grace | [
"MIT"
] |
{**
* @param string $basePath web base path
* @param string $robots tell robots how to index the content of a page (optional)
* @param array $flashes flash messages
*}
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta name="description" content="">
<meta name="author" content="">
<meta name="robots" content="{$robots}" n:ifset="$robots">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>{ifset $title}{$title} › {/ifset}Translation report</title>
<link rel="stylesheet" media="screen,projection,tv" href="{$cdnUrl}/css/style.css?v={$cssHash}">
<link rel="shortcut icon" href="{$cdnUrl}/favicon.png">
<!-- HTML5 shim and Respond.js IE8 support of HTML5 elements and media queries -->
<!--[if lt IE 9]>
<script src="https://oss.maxcdn.com/libs/html5shiv/3.7.0/html5shiv.js"></script>
<script src="https://oss.maxcdn.com/libs/respond.js/1.3.0/respond.min.js"></script>
<![endif]-->
<script n:syntax="off">
(function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
(i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
})(window,document,'script','//www.google-analytics.com/analytics.js','ga');
ga('create', 'UA-33892654-4', 'khanovaskola.cz');
ga('send', 'pageview');
</script>
{block #head}{/block}
</head>
<body class="amara-guest history-empty">
<script> document.documentElement.className+=' js' </script>
{block #navbar}
{include _navbar.latte}
{/block}
<div class="container">
<div class="row">
<div class="col-md-8 col-md-offset-2" n:inner-foreach="$flashes as $flash">
{include _flash.latte, flash => $flash}
</div>
</div>
{include #content}
</div>
<footer>
</footer>
<script src="{$cdnUrl}/js/compiled.js?v={$jsHash}"></script>
{block #scripts}{/block}
</body>
</html>
| Latte | 4 | JavascriptID/sourcerer-app | src/test/resources/samples/langs/Latte/layout.latte | [
"MIT"
] |
@env
testing
PREFERENCES_DOT_ARC_USERLAND_ENV_VAR "Why hello there from preferences.arc!"
preferences_dot_arc_lowcase_env_var "Why hello there from preferences.arc!"
| Arc | 1 | hicksy/sandbox | test/mock/env/preferences/preferences.arc | [
"Apache-2.0"
] |
script = require './scriptAssertions'
require 'chai'.should()
shouldOutput = script.shouldOutput
describe 'lists'
it 'can construct an empty list'
'print []' shouldOutput '[]'
it 'can construct a list'
'print [1, 2, 3]' shouldOutput '[ 1, 2, 3 ]'
describe 'splats'
it 'can splat at the end of a list'
'list = [2, 3]
print [1, list, ...]' shouldOutput '[ 1, 2, 3 ]'
it 'can splat in the middle of a list'
'list = [2, 3]
print [1, list, ..., 4]' shouldOutput '[ 1, 2, 3, 4 ]'
it 'can splat at the start of a list'
'list = [1, 2, 3]
print [list, ..., 4]' shouldOutput '[ 1, 2, 3, 4 ]'
it 'can just be a splat'
'list = [1, 2, 3]
print [list, ...]' shouldOutput '[ 1, 2, 3 ]'
describe 'ranges'
it 'can accept a range'
'print [1..3]' shouldOutput '[ 1, 2, 3 ]'
it 'can put a range before other items'
'print [1..3, 4]' shouldOutput '[ 1, 2, 3, 4 ]'
it 'can put a range after other items'
'print [0, 1..3]' shouldOutput '[ 0, 1, 2, 3 ]'
it 'must be in a list'
@{ '1..3' shouldOutput '' }.should.throw r/range operator can only be used in a list, as in \[1\.\.3\]/
| PogoScript | 5 | featurist/pogoscript | test/listSpec.pogo | [
"BSD-2-Clause"
] |
/*
MySQL (Positive Technologies) grammar
The MIT License (MIT).
Copyright (c) 2015-2017, Ivan Kochurkin (kvanttt@gmail.com), Positive Technologies.
Copyright (c) 2017, Ivan Khudyashev (IHudyashov@ptsecurity.com)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
parser grammar MySqlParser;
options { tokenVocab=MySqlLexer; }
// Top Level Description
// details
intervalType
: intervalTypeBase
| YEAR | YEAR_MONTH | DAY_HOUR | DAY_MINUTE
| DAY_SECOND | HOUR_MINUTE | HOUR_SECOND | MINUTE_SECOND
| SECOND_MICROSECOND | MINUTE_MICROSECOND
| HOUR_MICROSECOND | DAY_MICROSECOND
;
// DB Objects
fullId
: uid (DOT_ID | '.' uid)?
;
fullColumnName
: uid (dottedId dottedId? )?
;
charsetName
: BINARY
| charsetNameBase
| STRING_LITERAL
| CHARSET_REVERSE_QOUTE_STRING
;
collationName
: uid | STRING_LITERAL;
uid
: simpleId
| REVERSE_QUOTE_ID
| CHARSET_REVERSE_QOUTE_STRING
;
simpleId
: ID
| charsetNameBase
| intervalTypeBase
| dataTypeBase
| keywordsCanBeId
| functionNameBase
;
dottedId
: DOT_ID
| '.' uid
;
// Literals
decimalLiteral
: DECIMAL_LITERAL | ZERO_DECIMAL | ONE_DECIMAL | TWO_DECIMAL
;
stringLiteral
: STRING_LITERAL+
;
booleanLiteral
: TRUE | FALSE;
hexadecimalLiteral
: STRING_CHARSET_NAME? HEXADECIMAL_LITERAL;
nullNotnull
: NOT? (NULL_LITERAL | NULL_SPEC_LITERAL)
;
constant
: stringLiteral | decimalLiteral
| '-' decimalLiteral
| hexadecimalLiteral | booleanLiteral
| REAL_LITERAL | BIT_STRING
| NOT? nullLiteral=(NULL_LITERAL | NULL_SPEC_LITERAL)
;
// Data Types
convertedDataType
: typeName=(BINARY| NCHAR) lengthOneDimension?
| typeName=CHAR lengthOneDimension? (CHARACTER SET charsetName)?
| typeName=(DATE | DATETIME | TIME)
| typeName=DECIMAL lengthTwoDimension?
| (SIGNED | UNSIGNED) INTEGER?
;
lengthOneDimension
: '(' decimalLiteral ')'
;
lengthTwoDimension
: '(' decimalLiteral ',' decimalLiteral ')'
;
// Common Lists
expressions
: expression (',' expression)*
;
// Common Expressons
currentTimestamp
:
(
(CURRENT_TIMESTAMP | LOCALTIME | LOCALTIMESTAMP) ('(' decimalLiteral? ')')?
| NOW '(' decimalLiteral? ')'
)
;
// Functions
functionCall
: specificFunction #specificFunctionCall
| scalarFunctionName '(' functionArgs? ')' #scalarFunctionCall
| fullId '(' functionArgs? ')' #udfFunctionCall
;
specificFunction
: (
CURRENT_DATE | CURRENT_TIME | CURRENT_TIMESTAMP
| CURRENT_USER | LOCALTIME
) #simpleFunctionCall
| CONVERT '(' expression separator=',' convertedDataType ')' #dataTypeFunctionCall
| CONVERT '(' expression USING charsetName ')' #dataTypeFunctionCall
| CAST '(' expression AS convertedDataType ')' #dataTypeFunctionCall
| VALUES '(' fullColumnName ')' #valuesFunctionCall
| CASE expression caseFuncAlternative+
(ELSE elseArg=functionArg)? END #caseFunctionCall
| CASE caseFuncAlternative+
(ELSE elseArg=functionArg)? END #caseFunctionCall
| CHAR '(' functionArgs (USING charsetName)? ')' #charFunctionCall
| POSITION
'('
(
positionString=stringLiteral
| positionExpression=expression
)
IN
(
inString=stringLiteral
| inExpression=expression
)
')' #positionFunctionCall
| (SUBSTR | SUBSTRING)
'('
(
sourceString=stringLiteral
| sourceExpression=expression
) FROM
(
fromDecimal=decimalLiteral
| fromExpression=expression
)
(
FOR
(
forDecimal=decimalLiteral
| forExpression=expression
)
)?
')' #substrFunctionCall
| TRIM
'('
positioinForm=(BOTH | LEADING | TRAILING)
(
sourceString=stringLiteral
| sourceExpression=expression
)?
FROM
(
fromString=stringLiteral
| fromExpression=expression
)
')' #trimFunctionCall
| TRIM
'('
(
sourceString=stringLiteral
| sourceExpression=expression
)
FROM
(
fromString=stringLiteral
| fromExpression=expression
)
')' #trimFunctionCall
| WEIGHT_STRING
'('
(stringLiteral | expression)
(AS stringFormat=(CHAR | BINARY)
'(' decimalLiteral ')' )? levelsInWeightString?
')' #weightFunctionCall
| EXTRACT
'('
intervalType
FROM
(
sourceString=stringLiteral
| sourceExpression=expression
)
')' #extractFunctionCall
| GET_FORMAT
'('
datetimeFormat=(DATE | TIME | DATETIME)
',' stringLiteral
')' #getFormatFunctionCall
;
caseFuncAlternative
: WHEN condition=functionArg
THEN consequent=functionArg
;
levelsInWeightString
: LEVEL levelInWeightListElement
(',' levelInWeightListElement)* #levelWeightList
| LEVEL
firstLevel=decimalLiteral '-' lastLevel=decimalLiteral #levelWeightRange
;
levelInWeightListElement
: decimalLiteral orderType=(ASC | DESC | REVERSE)?
;
scalarFunctionName
: functionNameBase
| ASCII | CURDATE | CURRENT_DATE | CURRENT_TIME
| CURRENT_TIMESTAMP | CURTIME | DATE_ADD | DATE_SUB
| IF | INSERT | LOCALTIME | LOCALTIMESTAMP | MID | NOW
| REPLACE | SUBSTR | SUBSTRING | SYSDATE | TRIM
| UTC_DATE | UTC_TIME | UTC_TIMESTAMP
;
functionArgs
: (constant | fullColumnName | functionCall | expression)
(
','
(constant | fullColumnName | functionCall | expression)
)*
;
functionArg
: constant | fullColumnName | functionCall | expression
;
// Expressions, predicates
// Simplified approach for expression
expression
: notOperator=(NOT | '!') expression #notExpression
| expression logicalOperator expression #logicalExpression
| predicate IS NOT? testValue=(TRUE | FALSE | UNKNOWN) #isExpression
| predicate #predicateExpression
;
predicate
: predicate NOT? IN '(' expressions ')' #inPredicate
| predicate IS nullNotnull #isNullPredicate
| left=predicate comparisonOperator right=predicate #binaryComparisonPredicate
| predicate NOT? BETWEEN predicate AND predicate #betweenPredicate
| predicate SOUNDS LIKE predicate #soundsLikePredicate
| predicate NOT? LIKE predicate (ESCAPE STRING_LITERAL)? #likePredicate
| predicate NOT? regex=(REGEXP | RLIKE) predicate #regexpPredicate
| (LOCAL_ID VAR_ASSIGN)? expressionAtom #expressionAtomPredicate
;
// Add in ASTVisitor nullNotnull in constant
expressionAtom
: constant #constantExpressionAtom
| fullColumnName #fullColumnNameExpressionAtom
| functionCall #functionCallExpressionAtom
| expressionAtom COLLATE collationName #collateExpressionAtom
| unaryOperator expressionAtom #unaryExpressionAtom
| BINARY expressionAtom #binaryExpressionAtom
| '(' expression (',' expression)* ')' #nestedExpressionAtom
| ROW '(' expression (',' expression)+ ')' #nestedRowExpressionAtom
| INTERVAL expression intervalType #intervalExpressionAtom
| left=expressionAtom bitOperator right=expressionAtom #bitExpressionAtom
| left=expressionAtom mathOperator right=expressionAtom #mathExpressionAtom
;
unaryOperator
: '!' | '~' | '+' | '-' | NOT
;
comparisonOperator
: '=' | '>' | '<' | '<' '=' | '>' '='
| '<' '>' | '!' '=' | '<' '=' '>'
;
logicalOperator
: AND | '&' '&' | XOR | OR | '|' '|'
;
bitOperator
: '<' '<' | '>' '>' | '&' | '^' | '|'
;
mathOperator
: '*' | '/' | '%' | DIV | MOD | '+' | '-' | '--'
;
// Simple id sets
// (that keyword, which can be id)
charsetNameBase
: ARMSCII8 | ASCII | BIG5 | CP1250 | CP1251 | CP1256 | CP1257
| CP850 | CP852 | CP866 | CP932 | DEC8 | EUCJPMS | EUCKR
| GB2312 | GBK | GEOSTD8 | GREEK | HEBREW | HP8 | KEYBCS2
| KOI8R | KOI8U | LATIN1 | LATIN2 | LATIN5 | LATIN7 | MACCE
| MACROMAN | SJIS | SWE7 | TIS620 | UCS2 | UJIS | UTF16
| UTF16LE | UTF32 | UTF8 | UTF8MB3 | UTF8MB4
;
intervalTypeBase
: QUARTER | MONTH | DAY | HOUR
| MINUTE | WEEK | SECOND | MICROSECOND
;
dataTypeBase
: DATE | TIME | TIMESTAMP | DATETIME | YEAR | ENUM | TEXT
;
keywordsCanBeId
: ACCOUNT | ACTION | AFTER | AGGREGATE | ALGORITHM | ANY
| AT | AUTHORS | AUTOCOMMIT | AUTOEXTEND_SIZE
| AUTO_INCREMENT | AVG_ROW_LENGTH | BEGIN | BINLOG | BIT
| BLOCK | BOOL | BOOLEAN | BTREE | CASCADED | CHAIN | CHANGED
| CHANNEL | CHECKSUM | CIPHER | CLIENT | COALESCE | CODE
| COLUMNS | COLUMN_FORMAT | COMMENT | COMMIT | COMPACT
| COMPLETION | COMPRESSED | COMPRESSION | CONCURRENT
| CONNECTION | CONSISTENT | CONTAINS | CONTEXT
| CONTRIBUTORS | COPY | CPU | DATA | DATAFILE | DEALLOCATE
| DEFAULT_AUTH | DEFINER | DELAY_KEY_WRITE | DIRECTORY
| DISABLE | DISCARD | DISK | DO | DUMPFILE | DUPLICATE
| DYNAMIC | ENABLE | ENCRYPTION | ENDS | ENGINE | ENGINES
| ERROR | ERRORS | ESCAPE | EVEN | EVENT | EVENTS | EVERY
| EXCHANGE | EXCLUSIVE | EXPIRE | EXTENDED | EXTENT_SIZE | FAST | FAULTS
| FIELDS | FILE_BLOCK_SIZE | FILTER | FIRST | FIXED
| FOLLOWS | FULL | FUNCTION | GLOBAL | GRANTS
| GROUP_REPLICATION | HASH | HOST | IDENTIFIED
| IGNORE_SERVER_IDS | IMPORT | INDEXES | INITIAL_SIZE
| INPLACE | INSERT_METHOD | INSTANCE | INVOKER | IO
| IO_THREAD | IPC | ISOLATION | ISSUER | KEY_BLOCK_SIZE
| LANGUAGE | LAST | LEAVES | LESS | LEVEL | LIST | LOCAL
| LOGFILE | LOGS | MASTER | MASTER_AUTO_POSITION
| MASTER_CONNECT_RETRY | MASTER_DELAY
| MASTER_HEARTBEAT_PERIOD | MASTER_HOST | MASTER_LOG_FILE
| MASTER_LOG_POS | MASTER_PASSWORD | MASTER_PORT
| MASTER_RETRY_COUNT | MASTER_SSL | MASTER_SSL_CA
| MASTER_SSL_CAPATH | MASTER_SSL_CERT | MASTER_SSL_CIPHER
| MASTER_SSL_CRL | MASTER_SSL_CRLPATH | MASTER_SSL_KEY
| MASTER_TLS_VERSION | MASTER_USER
| MAX_CONNECTIONS_PER_HOUR | MAX_QUERIES_PER_HOUR
| MAX_ROWS | MAX_SIZE | MAX_UPDATES_PER_HOUR
| MAX_USER_CONNECTIONS | MEDIUM | MEMORY | MERGE | MID | MIGRATE
| MIN_ROWS | MODIFY | MUTEX | MYSQL | NAME | NAMES
| NCHAR | NEVER | NO | NODEGROUP | NONE | OFFLINE | OFFSET
| OJ | OLD_PASSWORD | ONE | ONLINE | ONLY | OPTIMIZER_COSTS
| OPTIONS | OWNER | PACK_KEYS | PAGE | PARSER | PARTIAL
| PARTITIONING | PARTITIONS | PASSWORD | PHASE | PLUGINS
| PLUGIN_DIR | PORT | PRECEDES | PREPARE | PRESERVE | PREV
| PROCESSLIST | PROFILE | PROFILES | PROXY | QUERY | QUICK
| REBUILD | RECOVER | REDO_BUFFER_SIZE | REDUNDANT
| RELAYLOG | RELAY_LOG_FILE | RELAY_LOG_POS | REMOVE
| REORGANIZE | REPAIR | REPLICATE_DO_DB | REPLICATE_DO_TABLE
| REPLICATE_IGNORE_DB | REPLICATE_IGNORE_TABLE
| REPLICATE_REWRITE_DB | REPLICATE_WILD_DO_TABLE
| REPLICATE_WILD_IGNORE_TABLE | REPLICATION | RESUME
| RETURNS | ROLLBACK | ROLLUP | ROTATE | ROW | ROWS
| ROW_FORMAT | SAVEPOINT | SCHEDULE | SECURITY | SERVER
| SESSION | SHARE | SHARED | SIGNED | SIMPLE | SLAVE
| SNAPSHOT | SOCKET | SOME | SOUNDS | SOURCE
| SQL_AFTER_GTIDS | SQL_AFTER_MTS_GAPS | SQL_BEFORE_GTIDS
| SQL_BUFFER_RESULT | SQL_CACHE | SQL_NO_CACHE | SQL_THREAD
| START | STARTS | STATS_AUTO_RECALC | STATS_PERSISTENT
| STATS_SAMPLE_PAGES | STATUS | STOP | STORAGE | STRING
| SUBJECT | SUBPARTITION | SUBPARTITIONS | SUSPEND | SWAPS
| SWITCHES | TABLESPACE | TEMPORARY | TEMPTABLE | THAN
| TRANSACTION | TRUNCATE | UNDEFINED | UNDOFILE
| UNDO_BUFFER_SIZE | UNKNOWN | UPGRADE | USER | VALIDATION
| VALUE | VARIABLES | VIEW | WAIT | WARNINGS | WITHOUT
| WORK | WRAPPER | X509 | XA | XML
;
functionNameBase
: ABS | ACOS | ADDDATE | ADDTIME | AES_DECRYPT | AES_ENCRYPT
| AREA | ASBINARY | ASIN | ASTEXT | ASWKB | ASWKT
| ASYMMETRIC_DECRYPT | ASYMMETRIC_DERIVE
| ASYMMETRIC_ENCRYPT | ASYMMETRIC_SIGN | ASYMMETRIC_VERIFY
| ATAN | ATAN2 | BENCHMARK | BIN | BIT_COUNT | BIT_LENGTH
| BUFFER | CEIL | CEILING | CENTROID | CHARACTER_LENGTH
| CHARSET | CHAR_LENGTH | COERCIBILITY | COLLATION
| COMPRESS | CONCAT | CONCAT_WS | CONNECTION_ID | CONV
| CONVERT_TZ | COS | COT | COUNT | CRC32
| CREATE_ASYMMETRIC_PRIV_KEY | CREATE_ASYMMETRIC_PUB_KEY
| CREATE_DH_PARAMETERS | CREATE_DIGEST | CROSSES | DATABASE | DATE
| DATEDIFF | DATE_FORMAT | DAY | DAYNAME | DAYOFMONTH
| DAYOFWEEK | DAYOFYEAR | DECODE | DEGREES | DES_DECRYPT
| DES_ENCRYPT | DIMENSION | DISJOINT | ELT | ENCODE
| ENCRYPT | ENDPOINT | ENVELOPE | EQUALS | EXP | EXPORT_SET
| EXTERIORRING | EXTRACTVALUE | FIELD | FIND_IN_SET | FLOOR
| FORMAT | FOUND_ROWS | FROM_BASE64 | FROM_DAYS
| FROM_UNIXTIME | GEOMCOLLFROMTEXT | GEOMCOLLFROMWKB
| GEOMETRYCOLLECTION | GEOMETRYCOLLECTIONFROMTEXT
| GEOMETRYCOLLECTIONFROMWKB | GEOMETRYFROMTEXT
| GEOMETRYFROMWKB | GEOMETRYN | GEOMETRYTYPE | GEOMFROMTEXT
| GEOMFROMWKB | GET_FORMAT | GET_LOCK | GLENGTH | GREATEST
| GTID_SUBSET | GTID_SUBTRACT | HEX | HOUR | IFNULL
| INET6_ATON | INET6_NTOA | INET_ATON | INET_NTOA | INSTR
| INTERIORRINGN | INTERSECTS | ISCLOSED | ISEMPTY | ISNULL
| ISSIMPLE | IS_FREE_LOCK | IS_IPV4 | IS_IPV4_COMPAT
| IS_IPV4_MAPPED | IS_IPV6 | IS_USED_LOCK | LAST_INSERT_ID
| LCASE | LEAST | LEFT | LENGTH | LINEFROMTEXT | LINEFROMWKB
| LINESTRING | LINESTRINGFROMTEXT | LINESTRINGFROMWKB | LN
| LOAD_FILE | LOCATE | LOG | LOG10 | LOG2 | LOWER | LPAD
| LTRIM | MAKEDATE | MAKETIME | MAKE_SET | MASTER_POS_WAIT
| MBRCONTAINS | MBRDISJOINT | MBREQUAL | MBRINTERSECTS
| MBROVERLAPS | MBRTOUCHES | MBRWITHIN | MD5 | MICROSECOND
| MINUTE | MLINEFROMTEXT | MLINEFROMWKB | MONTH | MONTHNAME
| MPOINTFROMTEXT | MPOINTFROMWKB | MPOLYFROMTEXT
| MPOLYFROMWKB | MULTILINESTRING | MULTILINESTRINGFROMTEXT
| MULTILINESTRINGFROMWKB | MULTIPOINT | MULTIPOINTFROMTEXT
| MULTIPOINTFROMWKB | MULTIPOLYGON | MULTIPOLYGONFROMTEXT
| MULTIPOLYGONFROMWKB | NAME_CONST | NULLIF | NUMGEOMETRIES
| NUMINTERIORRINGS | NUMPOINTS | OCT | OCTET_LENGTH | ORD
| OVERLAPS | PERIOD_ADD | PERIOD_DIFF | PI | POINT
| POINTFROMTEXT | POINTFROMWKB | POINTN | POLYFROMTEXT
| POLYFROMWKB | POLYGON | POLYGONFROMTEXT | POLYGONFROMWKB
| POSITION| POW | POWER | QUARTER | QUOTE | RADIANS | RAND
| RANDOM_BYTES | RELEASE_LOCK | REVERSE | RIGHT | ROUND
| ROW_COUNT | RPAD | RTRIM | SECOND | SEC_TO_TIME
| SESSION_USER | SHA | SHA1 | SHA2 | SIGN | SIN | SLEEP
| SOUNDEX | SQL_THREAD_WAIT_AFTER_GTIDS | SQRT | SRID
| STARTPOINT | STRCMP | STR_TO_DATE | ST_AREA | ST_ASBINARY
| ST_ASTEXT | ST_ASWKB | ST_ASWKT | ST_BUFFER | ST_CENTROID
| ST_CONTAINS | ST_CROSSES | ST_DIFFERENCE | ST_DIMENSION
| ST_DISJOINT | ST_DISTANCE | ST_ENDPOINT | ST_ENVELOPE
| ST_EQUALS | ST_EXTERIORRING | ST_GEOMCOLLFROMTEXT
| ST_GEOMCOLLFROMTXT | ST_GEOMCOLLFROMWKB
| ST_GEOMETRYCOLLECTIONFROMTEXT
| ST_GEOMETRYCOLLECTIONFROMWKB | ST_GEOMETRYFROMTEXT
| ST_GEOMETRYFROMWKB | ST_GEOMETRYN | ST_GEOMETRYTYPE
| ST_GEOMFROMTEXT | ST_GEOMFROMWKB | ST_INTERIORRINGN
| ST_INTERSECTION | ST_INTERSECTS | ST_ISCLOSED | ST_ISEMPTY
| ST_ISSIMPLE | ST_LINEFROMTEXT | ST_LINEFROMWKB
| ST_LINESTRINGFROMTEXT | ST_LINESTRINGFROMWKB
| ST_NUMGEOMETRIES | ST_NUMINTERIORRING
| ST_NUMINTERIORRINGS | ST_NUMPOINTS | ST_OVERLAPS
| ST_POINTFROMTEXT | ST_POINTFROMWKB | ST_POINTN
| ST_POLYFROMTEXT | ST_POLYFROMWKB | ST_POLYGONFROMTEXT
| ST_POLYGONFROMWKB | ST_SRID | ST_STARTPOINT
| ST_SYMDIFFERENCE | ST_TOUCHES | ST_UNION | ST_WITHIN
| ST_X | ST_Y | SUBDATE | SUBSTRING_INDEX | SUBTIME
| SYSTEM_USER | TAN | TIME | TIMEDIFF | TIMESTAMP
| TIMESTAMPADD | TIMESTAMPDIFF | TIME_FORMAT | TIME_TO_SEC
| TOUCHES | TO_BASE64 | TO_DAYS | TO_SECONDS | UCASE
| UNCOMPRESS | UNCOMPRESSED_LENGTH | UNHEX | UNIX_TIMESTAMP
| UPDATEXML | UPPER | UUID | UUID_SHORT
| VALIDATE_PASSWORD_STRENGTH | VERSION
| WAIT_UNTIL_SQL_THREAD_AFTER_GTIDS | WEEK | WEEKDAY
| WEEKOFYEAR | WEIGHT_STRING | WITHIN | YEAR | YEARWEEK
| Y_FUNCTION | X_FUNCTION
;
| ANTLR | 5 | shiyuhang0/TiBigData | tidb/src/main/java/io/tidb/bigdata/tidb/parser/MySqlParser.g4 | [
"Apache-2.0"
] |
<div class="mb-3">
<label class="form-label">Icon input</label>
{% include ui/form/input-icon.html class="mb-3" %}
{% include ui/form/input-icon.html class="mb-3" icon="user" prepend=true placeholder="Username" %}
</div>
<div class="mb-3">
<label class="form-label">Loader input</label>
{% include ui/form/input-icon.html loader=true class="mb-3" placeholder="Loading…" %}
{% include ui/form/input-icon.html loader=true class="mb-3" icon="user" prepend=true placeholder="Loading…" %}
</div> | HTML | 3 | muhginanjar/tabler | src/pages/_includes/parts/form/input-icon.html | [
"MIT"
] |
if exists('b:did_ftplugin')
finish
endif
let b:did_ftplugin = 1
function! ShaDaIndent(lnum)
if a:lnum == 1 || getline(a:lnum) =~# '\mwith timestamp.*:$'
return 0
else
return shiftwidth()
endif
endfunction
setlocal expandtab tabstop=2 softtabstop=2 shiftwidth=2
setlocal indentexpr=ShaDaIndent(v:lnum) indentkeys=<:>,o,O
let b:undo_ftplugin = 'setlocal et< ts< sts< sw< indentexpr< indentkeys<'
| VimL | 4 | uga-rosa/neovim | runtime/ftplugin/shada.vim | [
"Vim"
] |
$! MAKE_COMMAND.COM
$! Record MM[SK]/Make parameters in configuration report
$!
$! Author: Peter Prymmer <pvhp@lns62.lns.cornell.edu>
$! Version: 1.0 18-Jan-1996
$!
$! DCL usage (choose one):
$! @MAKE_COMMAND !or
$! @MAKE_COMMAND/OUTPUT=MYCONFIG.OUT
$!------------------------------------------------
$ $mms = "'"+p1
$ $makeline = p2+" "+p3+" "+p4+" "+p5+" "+p6+" "+p7+" "+p8
$quotable:
$ if f$locate("""",$makeline).lt.f$length($makeline)
$ then
$ $makeline = $makeline - """"
$ goto quotable
$ endif
$ $makeline = f$edit($makeline,"COMPRESS,TRIM")
$ write sys$output " make_cmd=''$mms'"+" ''$makeline''"
$!------------------------------------------------
| DIGITAL Command Language | 3 | vlinhd11/vlinhd11-android-scripting | perl/src/vms/make_command.com | [
"Apache-2.0"
] |
;; Scopes
(document) @scope
(directive) @scope
;; Definitions
(title) @definition
(substitution_definition
name: (substitution) @definition)
(footnote
name: (label) @definition)
(citation
name: (label) @definition)
(target
name: (name) @definition)
; Inline targets
(inline_target) @definition
; The role directive can define a new role
((directive
name: (type) @_type
body: (body (arguments) @definition))
(#eq? @_type "role"))
;; References
[
(substitution_reference)
(footnote_reference)
(citation_reference)
(reference)
(role)
] @reference
| Scheme | 2 | yzia2000/nvim-treesitter | queries/rst/locals.scm | [
"Apache-2.0"
] |
MEAN(X)
;X is assumed to be a list of numbers separated by "^"
QUIT:'$DATA(X) "No data"
QUIT:X="" "Empty Set"
NEW S,I
SET S=0,I=1
FOR QUIT:I>$L(X,"^") SET S=S+$P(X,"^",I),I=I+1
QUIT (S/$L(X,"^"))
| M | 3 | LaudateCorpus1/RosettaCodeData | Task/Averages-Arithmetic-mean/MUMPS/averages-arithmetic-mean.mumps | [
"Info-ZIP"
] |
--TEST--
Bug #69279 (Compressed ZIP Phar extractTo() creates garbage files)
--EXTENSIONS--
phar
zlib
--INI--
phar.readonly=0
--FILE--
<?php
$w = new Phar(__DIR__ . "/bug69279.phar.zip");
$w["bug69279.txt"] = "Sample content.";
$w->compressFiles(Phar::GZ);
unset($w);
$r = new Phar(__DIR__ . "/bug69279.phar.zip");
var_dump($r["bug69279.txt"]->isCompressed());
$r->extractTo(__DIR__, NULL, TRUE);
var_dump(file_get_contents(__DIR__ . "/bug69279.txt"));
?>
--EXPECT--
bool(true)
string(15) "Sample content."
--CLEAN--
<?php
@unlink(__DIR__ . "/bug69279.txt");
@unlink(__DIR__ . "/bug69279.phar.zip");
?>
| PHP | 3 | NathanFreeman/php-src | ext/phar/tests/bug69279.phpt | [
"PHP-3.01"
] |
" Vim syntax file
" Language: Murphi model checking language
" Maintainer: Matthew Fernandez <matthew.fernandez@gmail.com>
" Last Change: 2019 Aug 27
" Version: 2
" Remark: Originally authored by Diego Ongaro <ongaro@cs.stanford.edu>
if version < 600
syntax clear
elseif exists("b:current_syntax")
finish
endif
" Keywords are case insensitive.
" Keep these in alphabetical order.
syntax case ignore
syn keyword murphiKeyword alias
syn keyword murphiStructure array
syn keyword murphiKeyword assert
syn keyword murphiKeyword begin
syn keyword murphiType boolean
syn keyword murphiKeyword by
syn keyword murphiLabel case
syn keyword murphiKeyword clear
syn keyword murphiLabel const
syn keyword murphiRepeat do
syn keyword murphiConditional else
syn keyword murphiConditional elsif
syn keyword murphiKeyword end
syn keyword murphiKeyword endalias
syn keyword murphiRepeat endexists
syn keyword murphiRepeat endfor
syn keyword murphiRepeat endforall
syn keyword murphiKeyword endfunction
syn keyword murphiConditional endif
syn keyword murphiKeyword endprocedure
syn keyword murphiStructure endrecord
syn keyword murphiKeyword endrule
syn keyword murphiKeyword endruleset
syn keyword murphiKeyword endstartstate
syn keyword murphiConditional endswitch
syn keyword murphiRepeat endwhile
syn keyword murphiStructure enum
syn keyword murphiKeyword error
syn keyword murphiRepeat exists
syn keyword murphiBoolean false
syn keyword murphiRepeat for
syn keyword murphiRepeat forall
syn keyword murphiKeyword function
syn keyword murphiConditional if
syn keyword murphiKeyword in
syn keyword murphiKeyword interleaved
syn keyword murphiLabel invariant
syn keyword murphiFunction ismember
syn keyword murphiFunction isundefined
syn keyword murphiKeyword log
syn keyword murphiStructure of
syn keyword murphiType multiset
syn keyword murphiFunction multisetadd
syn keyword murphiFunction multisetcount
syn keyword murphiFunction multisetremove
syn keyword murphiFunction multisetremovepred
syn keyword murphiKeyword procedure
syn keyword murphiKeyword program
syn keyword murphiKeyword put
syn keyword murphiStructure record
syn keyword murphiKeyword return
syn keyword murphiLabel rule
syn keyword murphiLabel ruleset
syn keyword murphiType scalarset
syn keyword murphiLabel startstate
syn keyword murphiConditional switch
syn keyword murphiConditional then
syn keyword murphiRepeat to
syn keyword murphiKeyword traceuntil
syn keyword murphiBoolean true
syn keyword murphiLabel type
syn keyword murphiKeyword undefine
syn keyword murphiStructure union
syn keyword murphiLabel var
syn keyword murphiRepeat while
syn keyword murphiTodo contained todo xxx fixme
syntax case match
" Integers.
syn match murphiNumber "\<\d\+\>"
" Operators and special characters.
syn match murphiOperator "[\+\-\*\/%&|=!<>:\?]\|\."
syn match murphiDelimiter "\(:=\@!\|[;,]\)"
syn match murphiSpecial "[()\[\]]"
" Double equal sign is a common error: use one equal sign for equality testing.
syn match murphiError "==[^>]"he=e-1
" Double && and || are errors.
syn match murphiError "&&\|||"
" Strings. This is defined so late so that it overrides previous matches.
syn region murphiString start=+"+ end=+"+
" Comments. This is defined so late so that it overrides previous matches.
syn region murphiComment start="--" end="$" contains=murphiTodo
syn region murphiComment start="/\*" end="\*/" contains=murphiTodo
" Link the rules to some groups.
hi def link murphiComment Comment
hi def link murphiString String
hi def link murphiNumber Number
hi def link murphiBoolean Boolean
hi def link murphiIdentifier Identifier
hi def link murphiFunction Function
hi def link murphiStatement Statement
hi def link murphiConditional Conditional
hi def link murphiRepeat Repeat
hi def link murphiLabel Label
hi def link murphiOperator Operator
hi def link murphiKeyword Keyword
hi def link murphiType Type
hi def link murphiStructure Structure
hi def link murphiSpecial Special
hi def link murphiDelimiter Delimiter
hi def link murphiError Error
hi def link murphiTodo Todo
let b:current_syntax = "murphi"
| VimL | 5 | uga-rosa/neovim | runtime/syntax/murphi.vim | [
"Vim"
] |
%% This tests that no warnings appear when there is no specific
%% information about the types and the variables are not bound.
-module(whereis_vars1).
-export([start/3]).
start(AnAtom, OtherAtom, Fun) ->
case whereis(AnAtom) of
undefined ->
Pid = spawn(Fun),
case Pid =:= self() of
true -> ok;
false -> register(OtherAtom, Pid)
end;
P when is_pid(P) ->
ok
end.
| Erlang | 4 | jjhoo/otp | lib/dialyzer/test/race_SUITE_data/src/whereis_vars1.erl | [
"Apache-2.0"
] |
<section class="content-header">
<div class="container-fluid">
{%- if page_title is defined and page_title is not empty -%}
<h1>
{{- page_title }}
{%- if page_subtitle is defined and page_subtitle is not empty -%}
<small>{{ page_subtitle -}}</small>
{%- endif -%}
</h1>
{%- endif -%}
</div>
</section>
| Volt | 3 | PSD-Company/phalcon-devtools-docker | src/Web/Tools/Views/partials/content_header.volt | [
"BSD-3-Clause"
] |
BnotJ< |