text
stringlengths 1
22.8M
|
|---|
Luena may refer to:
Places
Luena, Angola
Luena, Spain
Luena (constituency), Limulunga District, Western Province, Zambia
Other uses
Luena Airport, Angola
Luena Airport (Democratic Republic of the Congo)
Luena people, or Luvale people, Angola
Luena River (disambiguation)
Luena Martinez (born 1999), British singer; former member of girl group RLY
|
Victoria Beach may refer to
Rural Municipality of Victoria Beach, a municipality in Manitoba
Victoria Beach, Nova Scotia
|
```java
/*
* Created by LuaView.
*
* This source code is licensed under the MIT.
* For the full copyright and license information,please view the LICENSE file in the root directory of this source tree.
*/
package com.taobao.luaview.userdata.ui;
import android.graphics.Typeface;
import android.text.Spannable;
import android.text.SpannableStringBuilder;
import android.text.Spanned;
import android.text.style.AbsoluteSizeSpan;
import android.text.style.BackgroundColorSpan;
import android.text.style.ForegroundColorSpan;
import android.text.style.StrikethroughSpan;
import android.text.style.StyleSpan;
import android.text.style.UnderlineSpan;
import com.taobao.luaview.extend.CustomTypefaceSpan;
import com.taobao.luaview.extend.WeightStyleSpan;
import com.taobao.luaview.userdata.base.BaseUserdata;
import com.taobao.luaview.userdata.constants.UDFontStyle;
import com.taobao.luaview.userdata.constants.UDFontWeight;
import com.taobao.luaview.util.ColorUtil;
import com.taobao.luaview.util.DimenUtil;
import com.taobao.luaview.util.LuaUtil;
import org.luaj.vm2.Globals;
import org.luaj.vm2.LuaValue;
import org.luaj.vm2.Varargs;
public class UDSpannableString extends BaseUserdata {
public UDSpannableString(Globals globals, LuaValue metaTable, Varargs varargs) {
super(new SpannableStringBuilder(), globals, metaTable, varargs);
init(varargs);
}
public SpannableStringBuilder getSpannableStringBuilder() {
return (SpannableStringBuilder) userdata();
}
/**
*
*/
public void init(Varargs initParams) {
LuaValue text = NIL, config = NIL;
if (initParams != null) {
text = getInitParam1();
config = getInitParam2();
}
initSpannableStringBuilder(text, config);
}
private void initSpannableStringBuilder(LuaValue text, LuaValue config) {
SpannableStringBuilder spannableStringBuilder = getSpannableStringBuilder();
if (text != null && text.isstring()) {
spannableStringBuilder = spannableStringBuilder.append(text.tojstring());
}
if (spannableStringBuilder.length() > 0) {
if (config != null && config.istable()) {
final int end = spannableStringBuilder.length();
final int fontSize = DimenUtil.spToPx(config.get("fontSize").optint(-1));
final Integer fontColor = ColorUtil.parse(LuaUtil.getValue(config, "fontColor"));
final String fontName = config.get("fontName").optjstring(null);
final LuaValue weightValue = config.get("fontWeight");
final int fontWeight = LuaUtil.isNumber(weightValue) ? weightValue.optint(UDFontWeight.WEIGHT_NORMAL_INT) : UDFontWeight.getValue(weightValue.optjstring(UDFontWeight.WEIGHT_NORMAL));
final LuaValue styleValue = config.get("fontStyle");
final int fontStyle = LuaUtil.isNumber(styleValue) ? styleValue.optint(Typeface.NORMAL) : UDFontStyle.getValue(styleValue.optjstring(UDFontStyle.STYLE_NORMAL));
final Integer backgroundColor = ColorUtil.parse(LuaUtil.getValue(config, "backgroundColor"));
final boolean strikethrough = config.get("strikethrough").optboolean(false);
final boolean underline = config.get("underline").optboolean(false);
if (fontSize != -1) {//
spannableStringBuilder.setSpan(new AbsoluteSizeSpan(fontSize), 0, end, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
}
if (fontColor != null) {//
spannableStringBuilder.setSpan(new ForegroundColorSpan(fontColor), 0, end, Spannable.SPAN_EXCLUSIVE_EXCLUSIVE);
}
if (fontName != null && getLuaResourceFinder() != null) {//
spannableStringBuilder.setSpan(new CustomTypefaceSpan(fontName, getLuaResourceFinder().findTypeface(fontName)), 0, end, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
}
if (fontWeight != -1 && fontWeight > UDFontWeight.WEIGHT_NORMAL_INT) {//Weight
spannableStringBuilder.setSpan(new WeightStyleSpan(fontWeight), 0, end, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
}
if (fontStyle != -1 && (fontStyle >= Typeface.NORMAL && fontStyle <= Typeface.BOLD_ITALIC)) {//
spannableStringBuilder.setSpan(new StyleSpan(fontStyle), 0, end, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
}
if (backgroundColor != null) {//
spannableStringBuilder.setSpan(new BackgroundColorSpan(backgroundColor), 0, end, Spannable.SPAN_EXCLUSIVE_EXCLUSIVE);
}
if (strikethrough) {//
spannableStringBuilder.setSpan(new StrikethroughSpan(), 0, end, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
}
if (underline) {//
spannableStringBuilder.setSpan(new UnderlineSpan(), 0, end, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
}
}
}
}
/**
* spannable
*
* @param ssb
* @return
*/
public LuaValue append(LuaValue ssb) {
if (getSpannableStringBuilder() != null) {
getSpannableStringBuilder().append(ssb instanceof UDSpannableString ? ((UDSpannableString) ssb).getSpannableStringBuilder() : ssb.optjstring(""));
}
return this;
}
/**
*
*
* @param ssb
* @return
*/
@Override
public LuaValue add(LuaValue ssb) {
final UDSpannableString newSpannableString = new UDSpannableString(getGlobals(), getmetatable(), null);
SpannableStringBuilder spannableStringBuilder = getSpannableStringBuilder();
if (spannableStringBuilder != null) {
newSpannableString.getSpannableStringBuilder().append(spannableStringBuilder);
}
newSpannableString.getSpannableStringBuilder().append(ssb instanceof UDSpannableString ? ((UDSpannableString) ssb).getSpannableStringBuilder() : ssb.optjstring(""));
return newSpannableString;
}
@Override
public String tojstring() {
return String.valueOf(getSpannableStringBuilder());
}
}
```
|
```xml
<?xml version="1.0" encoding="utf-8"?>
<!--
path_to_url
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-->
<selector xmlns:android="path_to_url">
<item android:drawable="@color/pressed_app_color"
android:state_pressed="true"/>
<item android:drawable="@color/pressed_app_color"
android:state_selected="true"/>
<item android:drawable="@color/dark_drawer" />
</selector>
```
|
Mirror, Mirror is a 1979 American made-for-television drama film which explores the world of cosmetic surgery. Directed by Joanna Lee, the film stars Janet Leigh, Lee Meriwether, Loretta Swit, Robert Vaughn, Peter Bonerz and originally aired on NBC on October 10, 1979.
Plot
Three women – Millie Gorman, Sandy McLaren and Vanessa Wagner – are having problems in their lives which they believe plastic surgery can solve. Millie is a wealthy widow convinced that her sex appeal has waned along with her looks and youth; Sandy is a bored housewife whose husband regards her more as a friend than a lover; and ex-model Vanessa, who now owns a modeling agency, fears the years showing on her face will ruin any chance she has of reuniting with a former lover.
Eventually, Sandy gets breast implants which have the opposite of the desired effect on her husband; Vanessa gets an eye-lift, only to find out her ex-lover wasn't worth the trouble; and Millie, in desperation to look younger and more attractive to men, puts her life in jeopardy when she has a face-lift despite her doctor's warnings.
Cast
Janet Leigh as Millie Gorman
Lee Meriwether as Vanessa Wagner
Loretta Swit as Sandy McLaren
Robert Vaughn as Michael Jacoby
Peter Bonerz as Andrew McLaren
Robin Mattson as Pamela Gorman
Walter Brooke as Dr. Samuel Shaw
References
Mirror, Mirror: Vanity Fare on NBC at The Washington Post
Janet Leigh: A Biography. Capua, Michelangelo. p. 209-210
External links
1979 films
1979 television films
1979 drama films
NBC network original films
Films about plastic surgery
American drama television films
1970s American films
|
```java
/**
*
* path_to_url
*
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*/
package com.linkedin.xinfra.monitor.tests;
import com.linkedin.xinfra.monitor.apps.App;
/**
* @deprecated This interface has been deprecated and will be removed in a future release. Please use com.linkedin.kmf.apps.App instead.
*/
public interface Test extends App {
}
```
|
A half pint has two common meanings: half of a pint (a unit of volume) or an expression for a short person or small child.
Half-pint or Half Pint may also refer to:
Half Pint, stage name of Jamaican reggae singer Lindon Roberts (born 1961)
Half-Pint, stage name of Cassandra Jackson, a rapper in Son of Bazerk, a hip-hop band
Don "Half Pint" Santos, former member of the American R&B group IMx
Half-Pint, nickname of Frankie Jaxon (c. 1896-1953), vaudeville singer, stage designer and comedian
Half-Pint, nickname of Gene Rye (1906-1980), Major League baseball player
Half-Pint, nickname of character Le Van Hawke portrayed by actor Roland Harrah III
Half-Pint, nickname of character Laura Ingalls Wilder portrayed by actor Melissa Gilbert
"Half-Pint", an episode of the TV series Airwolf
See also
Half Pints Brewing Company, a Canadian craft brewery
|
```go
// Code generated by "esc -modtime 12345 -prefix openapi/ -pkg openapi -ignore .go -o openapi/assets.go ."; DO NOT EDIT.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package openapi
import (
"bytes"
"compress/gzip"
"encoding/base64"
"fmt"
"io"
"io/ioutil"
"net/http"
"os"
"path"
"sync"
"time"
)
type _escLocalFS struct{}
var _escLocal _escLocalFS
type _escStaticFS struct{}
var _escStatic _escStaticFS
type _escDirectory struct {
fs http.FileSystem
name string
}
type _escFile struct {
compressed string
size int64
modtime int64
local string
isDir bool
once sync.Once
data []byte
name string
}
func (_escLocalFS) Open(name string) (http.File, error) {
f, present := _escData[path.Clean(name)]
if !present {
return nil, os.ErrNotExist
}
return os.Open(f.local)
}
func (_escStaticFS) prepare(name string) (*_escFile, error) {
f, present := _escData[path.Clean(name)]
if !present {
return nil, os.ErrNotExist
}
var err error
f.once.Do(func() {
f.name = path.Base(name)
if f.size == 0 {
return
}
var gr *gzip.Reader
b64 := base64.NewDecoder(base64.StdEncoding, bytes.NewBufferString(f.compressed))
gr, err = gzip.NewReader(b64)
if err != nil {
return
}
f.data, err = ioutil.ReadAll(gr)
})
if err != nil {
return nil, err
}
return f, nil
}
func (fs _escStaticFS) Open(name string) (http.File, error) {
f, err := fs.prepare(name)
if err != nil {
return nil, err
}
return f.File()
}
func (dir _escDirectory) Open(name string) (http.File, error) {
return dir.fs.Open(dir.name + name)
}
func (f *_escFile) File() (http.File, error) {
type httpFile struct {
*bytes.Reader
*_escFile
}
return &httpFile{
Reader: bytes.NewReader(f.data),
_escFile: f,
}, nil
}
func (f *_escFile) Close() error {
return nil
}
func (f *_escFile) Readdir(count int) ([]os.FileInfo, error) {
if !f.isDir {
return nil, fmt.Errorf(" escFile.Readdir: '%s' is not directory", f.name)
}
fis, ok := _escDirs[f.local]
if !ok {
return nil, fmt.Errorf(" escFile.Readdir: '%s' is directory, but we have no info about content of this dir, local=%s", f.name, f.local)
}
limit := count
if count <= 0 || limit > len(fis) {
limit = len(fis)
}
if len(fis) == 0 && count > 0 {
return nil, io.EOF
}
return fis[0:limit], nil
}
func (f *_escFile) Stat() (os.FileInfo, error) {
return f, nil
}
func (f *_escFile) Name() string {
return f.name
}
func (f *_escFile) Size() int64 {
return f.size
}
func (f *_escFile) Mode() os.FileMode {
return 0
}
func (f *_escFile) ModTime() time.Time {
return time.Unix(f.modtime, 0)
}
func (f *_escFile) IsDir() bool {
return f.isDir
}
func (f *_escFile) Sys() interface{} {
return f
}
// FS returns a http.Filesystem for the embedded assets. If useLocal is true,
// the filesystem's contents are instead used.
func FS(useLocal bool) http.FileSystem {
if useLocal {
return _escLocal
}
return _escStatic
}
// Dir returns a http.Filesystem for the embedded assets on a given prefix dir.
// If useLocal is true, the filesystem's contents are instead used.
func Dir(useLocal bool, name string) http.FileSystem {
if useLocal {
return _escDirectory{fs: _escLocal, name: name}
}
return _escDirectory{fs: _escStatic, name: name}
}
// FSByte returns the named file from the embedded assets. If useLocal is
// true, the filesystem's contents are instead used.
func FSByte(useLocal bool, name string) ([]byte, error) {
if useLocal {
f, err := _escLocal.Open(name)
if err != nil {
return nil, err
}
b, err := ioutil.ReadAll(f)
_ = f.Close()
return b, err
}
f, err := _escStatic.prepare(name)
if err != nil {
return nil, err
}
return f.data, nil
}
// FSMustByte is the same as FSByte, but panics if name is not present.
func FSMustByte(useLocal bool, name string) []byte {
b, err := FSByte(useLocal, name)
if err != nil {
panic(err)
}
return b
}
// FSString is the string version of FSByte.
func FSString(useLocal bool, name string) (string, error) {
b, err := FSByte(useLocal, name)
return string(b), err
}
// FSMustString is the string version of FSMustByte.
func FSMustString(useLocal bool, name string) string {
return string(FSMustByte(useLocal, name))
}
var _escData = map[string]*_escFile{
"/asset-gen.sh": {
name: "asset-gen.sh",
local: "asset-gen.sh",
size: 238,
modtime: 12345,
compressed: `
H4sIAAAAAAAC/your_sha256_hashbv4
Khrr7NztMNw/vgwPdzf+4JIVCERB/s8p7o+YzAGAJOzwhDDBC56PaDPrFtYbTZvoB2+QxG2fx9lAmZXL
qYlmpGILvNBvrSPCYlOThUGHi8ura0J4L5zkE+S/pOv28Tuu9pb/gRAkqxVGnw3BzqanWrnVPhuBenKT
KbufAAAA//9BiTev7gAAAA==
`,
},
"/index.html": {
name: "index.html",
local: "openapi/index.html",
size: 636,
modtime: 12345,
compressed: `
H4sIAAAAAAAC/0ySQW/bMAyF7/kVjC+9RJaHDtiQyd6wpceuQ9DLblUk2lYrS55IpzC2/ffBUdLlRr4n
fXwgqNa7h2+PP3/cQc+Db1ZqLcTq+8Pj3Rb2U4CnQb8gaCJk0WEQvyZM8xO4FuY4QTbDDKbXoUMCjsC9
I2idx/VKiGalMhZA9ajtUgAoduyxub/dfYU97qJRMivZHZD1QkyEXBcTt+JjIa+9oAesi6PD1zEmLsDE
wBi4Ll6d5b62eHQGxanZgAuOnfaCjPZYvyvOIO/CC/QJ27romUfaStnGwFR2MXYe9eioNHGQhuhzqwfn
5/p+8TElzdvbqtq8r6rNh6r6s4+HyPFaKiChrwvi2SP1iHwZelJyDXCIdobf5wZg0KlzYQvVpzdp1Na6
0F1pfzNHvoGUvKxVLbzznIQ2GqARjZiSr2/iiEGPThJrdkYuRjkP/qZR8vT0Es8kNzJQMv+XYmwon8mi
d8dUBmQZxiF/+uI1I7E8TMF6pCyWxDpY7WPA8pmKZsl6ouawOaOS+Sj+BQAA//8by2IcfAIAAA==
`,
},
"/spec.yml": {
name: "spec.yml",
local: "openapi/spec.yml",
size: 26055,
modtime: 12345,
compressed: `
H4sIAAAAAAAC/+xdX3PbuBF/16dAdX3oPUR07PQ6ozfJ8jmaOo7Hzt1M76YPELGkcCUBFgDjJDf97h2C
lPgPJEFK/qMM9RKbWiyWi/3tb7GEGR4BwxGdo4vZ2ex8QpnH5xOEFFUBzNGHi9VyghAB6QoaKcrZHC0Q
oVIJuokVEKRoCEiCoCARwQpvsAQUS8p89OHi08NvyAs4Vj+9Qy4PIwFSUs5m6F88Ri5mE4QQ8igjiMcK
hVwAwpvkx2RehBX6fatUJOeOE16QzYxyh3BX/vtvpqs/amVcIM7Q79dUvY83uaBP1TbezFwe6jFOePHj
bILQZxBS39Pb2dnsbIKQy5nCrpprXQyH2gXLFbrm3A8AXQseR/q7WARztNeeXJYzXwvpSTwu4tD54S/p
v8mUE4QC6gKTUFS+iLC7BXSTfoPOtRFV7TXbnU3AN06IpQLh3Kwvr24friYK+zJR/WZv92qJbnEIMsIu
aLWlZbzkzKN+LNKVWi31MC0rq1ruAuxCCExZaIl2sjUtqyw66kpKX795pASQFzM3+bKs5ZJzQSjDigt7
o4qDGqwriJi0ZV+CRAIwkQgzgh4FVRVPLXxfgN/PuMKYBttyCYPjQAnqSoRzLYlthD8yicMoADGRIJIo
TyNjH1dzx9lyqfQc/zg/e+vgiDqf304irLZa1knGURdkGnP72EiD14cMIhV7rkEhHASGeEo+uwhNP29M
EYoQj0DgRNuazFF4sVruBa5BZTICZMSZhIK26fnZ2TT/tWLX9OM/p4XvEpQDU0VxhHAUBdTVUzt/SM7K
3yIk3S2EuHoVob8K8OZo+oOTJDjOkvVzUlnpFG2/z4zODZm+O3vXYvMtV8jjMSMvYvo1MBDUvRKCi4LJ
your_sha256_hashZk9YBd6CkPvUhukI
nhE8fcETH4CdXyKCFQyATzrwtSAotWYE0QiiQVZPmyop58/9j+vV/7JbIhCAguGYW+nxAzCXDszEIixw
CCorG3ezpwVpweiCXyibo6R2LFxKgEsFkDlSIoZJu1fV1wjmKNlVMv/UIJa6Tpf1ItSqR4A9o9UVfO13
UbWdihFN5Y2aYTujtlDZ2DZhaa/qBDcqRdsNNPH9Vf2t655V/ZRJhZkLSHEdBvYR0LEBMN1LLkihsB4n
XL93xNSYGZ/eaotyohUIWTkxIAemIxdB8F2x+QlTo5u3OHtxZHPL1SDRTqLGRqw5krToSKinQqgHB8kw
xi1HyUi6I+meAukeDJYSKw9OqiNDvzKGzh/i9SLoxqeOdYF2el74flcELXx/JOVTIeUDA2P/6CuJi57E
XIyTkZZHWj4FWj4QLiVS7plKRyp+xX1khzKamXNYV3HNqKI4oN8GNVSS0UfKo4mqMZGOifS4MBGgfz4G
Uu5TVfvHmPvqg7KejfhM05GAk2kbsTNi52n6sdZcc3APoUZGg/sIXcT0DGd1TMxWuqMRqiNUjw3VHnx3
your_sha256_hashQ42Ier0HtnMo/bnr
Q/your_sha256_hashTqaQPVWG4OwOlx
HrzWjyrYorP+JHbE5ojNU38wt3sXi+MKwMr2sVzpHR0NL64Amco+UrVFIZcKcS0hEQEPx4EC0lSsrnZW
your_sha256_hash8XktqvU8G9o3z2tV
vXWUiaS6DC/V2CdTzdx88we4uyojEkn6U7SYQXTtMGklfLTLpkU5q7/w/5iOm5ZtLb2+4JnMNahvmiJN
tcnaUs4+your_sha256_hashDG8CIHXf
bzgPIHu/your_sha256_hashrqxNoIzA
l57Trwtj8qmbg2pwQN03+McyoCygYoTIJuDufx7oN7AfEXseiJ9jFYu+g+6wVP0sS0qjqy8RFV+7l7cy
YOEpELdcLVwXpOzhlrUhTCxXAWxDsZ/bGwKnp3E6g+ibo8y/A3F598slZ24sBDDX4F8WhxsQhcseFyFW
c0R4vAmgnGuOrNf00pveiPCpVOXAsUNnNrCS6O8r+nowcPoOPQt+xYTQZGlxcNdAi/3riPoJ7553kHZ/
OoLa1DzoOU/lT6Z6VH15Yt69DrRmK2UKfGPUUaYuzsv30NPwXSfp6Vd4nc00LbJwUkX/jF3FRfdtszh8
2GJBZLcolVrSJpu5seKfQXyipmKz1fM/vSvM94Em2zGbCUP8RRv3AGpN2qfcuazvmpJOgqKSBxpP+p2y
neLfOOsuxR+B+lvV7URgJOKUqU6FsmG1sRD4a7HxqSDsEYja+9PyLBaLkXz2by3tMj3iQtmt7bC90fey
your_sha256_hashWbIWWBwW+7fr2/Wn
9eJm/dv69rpwefHrYn2zWN5cFa7dXC1+3Um1NKcOJ7AD80QFofmCely4YFlSGHpcr/fGejCsqQYpcHqS
d2x5vbWcMTfkLF0YAP5Mmb/ePy0b5kszWDEjlGB1AoFXOj/wuqOw2NvuaWmSp+Pi/Om4abpuuROhqNu4
vMYnM0O3Trc27KYvTjo7guWkm2bTgLs4qFxzg1iqYZXzE+O61O4x19xNTR5bCl7uBkxLVcbxA/g930Xt
smqjZZAoGwfAlwhcBeRB/y8XSWzq0kregXjPYzGM49/zpyg5MSECpDxCMffiBexTON38rHRoYundGzIe
jRjeuCiq+38AAAD//3KwMhXHZQAA
`,
},
"/": {
name: "/",
local: `.`,
isDir: true,
},
"/openapi": {
name: "openapi",
local: `openapi`,
isDir: true,
},
}
var _escDirs = map[string][]os.FileInfo{
".": {
_escData["/asset-gen.sh"],
_escData["/openapi"],
},
"openapi": {
_escData["/index.html"],
_escData["/spec.yml"],
},
}
```
|
Jefta van Dinther (born 4 March 1980 in Utrecht) is a Dutch-Swedish choreographer and dancer. Today van Dinther lives and works in Berlin and Stockholm.
Life and achievements
Van Dinther grew up in the Netherlands and Sweden. From 1999 to 2003 he attended Amsterdam School of the Arts, where he studied modern and contemporary Dance. He worked as a dancer with various choreographers, including Mette Ingvartsen, Xavier Le Roy and Ivana Müller. Since 2008 he has been creating his own performances as a choreographer.
Van Dinther's performances are regularly presented at international venues, such as at Volksbühne, HAU Hebbel am Ufer, Komische Oper Berlin, Sadler's Wells in London, Tanzquartier in Vienna, Internationaal Theater Amsterdam, Centre Georges Pompidou and Théâtre national de Chaillot in Paris, as well as at the festivals Tanz im August in Berlin, Festival TransAmériques in Montreal and at ImPulsTanz in Vienna.
In addition to producing his own pieces, van Dinther teaches choreography at various institutions, including Stockholm University of the Arts, where he was appointed senior lecturer and artistic director of the master program in choreography between 2012 and 2014.
Many of his performances are created in collaboration with lighting designer Minna Tiikkainen and sound designer David Kiers, with whom van Dinther has worked for many years. Most of van Dinther's performances are produced by himself, but his work has also been commissioned by the Swedish Cullberg Ballet (Plateau Effect in 2013 and Protagonist in 2016). In 2019 van Dinther remounted Plateau Effect from 2013 for Berlin State Ballet and it was included into their repertory in Komische Oper Berlin. Between 2019/20 and 2021/22 van Dinther is an associated artist at the company Cullberg.
Works (selection)
2008: It's in the Air, collaboration with Mette Ingvartsen
2009: The Way Things Go
2010: Kneeding
2011: The Blanket Dance, collaboration with choreographers Frederic Gies and DD Dorvillier
2011: Grind, collaboration with lighting designer Minna Tiikkainen and sound designer David Kiers
2012: This is Concrete, collaboration with choreographer Thiago Granato
2013: Plateau Effect (for the Swedish Cullberg Ballet)
2014: As It Empties Out
2014: Monument, choreography for the music video by Röyksopp & Robyn
2016: Protagonist (for the Swedish Cullberg Ballet)
2017: Dark Field Analysis
2019: The Quiet
2019: Plateau Effect for Berlin State Ballet
Awards (selection)
2012: Birgit Cullberg Grant
2012: The Wild Card and Prize of the Youth Jury at Theaterfestival Favoriten, Dortmund
2013: Swedish Theater Critics Dance Prize for Plateau Effect
Literature
Brandstetter, Gabriele. Synchronisierungen von Bewegungen im zeitgenössischen Tanz: Zur Relevanz von somatischen Praktiken in den Arbeiten von Jefta van Dinther. In Breyer, Thiemo et al. (eds.). Resonanz – Rhythmus – Synchronisierung. Interaktionen in Alltag, Therapie und Kunst. Bielefeld: Transcript Verlag, 2017, p. 409–428. .
Cvejić, Bojana. Choreographing Problems: Expressive Concepts in European Contemporary Dance and Performance. London: Palgrave Macmillan, 2015. . online
References
External links
official website
interview on Vimeo
interview at Centre Pompidou
Living people
Modern dancers
Contemporary dancers
Male dancers
Swedish choreographers
Dutch choreographers
Contemporary dance choreographers
Dance teachers
1980 births
|
KDZN (96.5 FM) is a country formatted broadcast radio station licensed to Glendive, Montana, serving East Central Montana. KDZN is owned and operated by The Marks Group.
History
KDZN signed on in 1969 as KIVE. In 1985, the station became KGLE-FM; at the time, it was owned by crosstown Friends of Christian Radio, Inc, who also formerly owned KGLE AM 590. In 1986, Magic Air Communications bought KGLE-FM, switching the calls to KDZN and the format to country, where it remains today. Stephen Marks, through Glendive Broadcasting Corporation, bought Magic Air in 1995, making KDZN a sister station to KXGN AM-TV, which Marks had purchased in 1990.
Programming
Programming on KDZN is provided via the Westwood One satellite network during the mid-morning, mid-afternoon and evening hours. News on KDZN comes from CBS.
References
External links
Z-96 Online
DZN
Dawson County, Montana
Country radio stations in the United States
Radio stations established in 1969
1969 establishments in Montana
|
Laura Marcus FBA (7 March 1956 – 22 September 2021) was a British literature scholar. She was Goldsmiths’ Professor of English Literature at New College, Oxford and published widely on 19th- and 20th-century literature and film, with particular interests in autobiography, modernism, Virginia Woolf, and psychoanalysis.
Marcus won the Modern Language Association's James Russell Lowell Prize for her book The Tenth Muse: Writing about Cinema in the Modernist Period. In 2011, she was elected a Fellow of the British Academy.
Prior to joining Oxford, Marcus was Professor of English at Sussex University and Regius Professor of Rhetoric and English Literature at the University of Edinburgh.
She was an editor of the journal Women: a Cultural Review.
She died of pancreatic cancer on 22 September 2021 at the age of 65.
Books
Auto/biographical Discourses: Theory, Criticism, Practice (1994)
Virginia Woolf: Writers and their Work (1997/2004)
The Tenth Muse: Writing about Cinema in the Modernist Period (2007)
Dreams of Modernity: Psychoanalysis, Literature, Cinema (2014)
Autobiography: a very short introduction (2018)
co-ed. The Cambridge History of Twentieth-Century English Literature (2004)
References
1956 births
2021 deaths
Academics of the University of Edinburgh
Academics of the University of Sussex
British literary critics
British women literary critics
Deaths from pancreatic cancer
Fellows of New College, Oxford
Fellows of the British Academy
|
```objective-c
#pragma once
#include <chrono>
#include "envoy/event/dispatcher.h"
#include "quiche/quic/core/quic_clock.h"
namespace Envoy {
namespace Quic {
class EnvoyQuicClock : public quic::QuicClock {
public:
EnvoyQuicClock(Event::Dispatcher& dispatcher) : dispatcher_(dispatcher) {}
// quic::QuicClock
quic::QuicTime ApproximateNow() const override;
quic::QuicTime Now() const override;
quic::QuicWallTime WallNow() const override;
private:
template <typename T> int64_t microsecondsSinceEpoch(std::chrono::time_point<T> time) const {
return std::chrono::duration_cast<std::chrono::microseconds>(time.time_since_epoch()).count();
}
Event::Dispatcher& dispatcher_;
};
} // namespace Quic
} // namespace Envoy
```
|
```xml
<vector xmlns:android="path_to_url"
xmlns:tools="path_to_url"
android:width="448dp"
android:height="512dp"
android:viewportWidth="448.0"
android:viewportHeight="512.0"
tools:keep="@drawable/fa_discourse">
<path
android:fillColor="#FFFFFFFF"
android:pathData="M225.9,32C103.3,32 0,130.5 0,252.1 0,256 0.1,480 0.1,480l225.8,-0.2c122.7,0 222.1,-102.3 222.1,-223.9C448,134.3 348.6,32 225.9,32zM224,384c-19.4,0 -37.9,-4.3 -54.4,-12.1L88.5,392l22.9,-75c-9.8,-18.1 -15.4,-38.9 -15.4,-61 0,-70.7 57.3,-128 128,-128s128,57.3 128,128 -57.3,128 -128,128z"/>
</vector>
```
|
```go
//
//
// path_to_url
//
// Unless required by applicable law or agreed to in writing, software
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
package metricdata // import "go.opentelemetry.io/otel/sdk/metric/metricdata"
import (
"encoding/json"
"time"
"go.opentelemetry.io/otel/attribute"
"go.opentelemetry.io/otel/sdk/instrumentation"
"go.opentelemetry.io/otel/sdk/resource"
)
// ResourceMetrics is a collection of ScopeMetrics and the associated Resource
// that created them.
type ResourceMetrics struct {
// Resource represents the entity that collected the metrics.
Resource *resource.Resource
// ScopeMetrics are the collection of metrics with unique Scopes.
ScopeMetrics []ScopeMetrics
}
// ScopeMetrics is a collection of Metrics Produces by a Meter.
type ScopeMetrics struct {
// Scope is the Scope that the Meter was created with.
Scope instrumentation.Scope
// Metrics are a list of aggregations created by the Meter.
Metrics []Metrics
}
// Metrics is a collection of one or more aggregated timeseries from an Instrument.
type Metrics struct {
// Name is the name of the Instrument that created this data.
Name string
// Description is the description of the Instrument, which can be used in documentation.
Description string
// Unit is the unit in which the Instrument reports.
Unit string
// Data is the aggregated data from an Instrument.
Data Aggregation
}
// Aggregation is the store of data reported by an Instrument.
// It will be one of: Gauge, Sum, Histogram.
type Aggregation interface {
privateAggregation()
}
// Gauge represents a measurement of the current value of an instrument.
type Gauge[N int64 | float64] struct {
// DataPoints are the individual aggregated measurements with unique
// Attributes.
DataPoints []DataPoint[N]
}
func (Gauge[N]) privateAggregation() {}
// Sum represents the sum of all measurements of values from an instrument.
type Sum[N int64 | float64] struct {
// DataPoints are the individual aggregated measurements with unique
// Attributes.
DataPoints []DataPoint[N]
// Temporality describes if the aggregation is reported as the change from the
// last report time, or the cumulative changes since a fixed start time.
Temporality Temporality
// IsMonotonic represents if this aggregation only increases or decreases.
IsMonotonic bool
}
func (Sum[N]) privateAggregation() {}
// DataPoint is a single data point in a timeseries.
type DataPoint[N int64 | float64] struct {
// Attributes is the set of key value pairs that uniquely identify the
// timeseries.
Attributes attribute.Set
// StartTime is when the timeseries was started. (optional)
StartTime time.Time `json:",omitempty"`
// Time is the time when the timeseries was recorded. (optional)
Time time.Time `json:",omitempty"`
// Value is the value of this data point.
Value N
// Exemplars is the sampled Exemplars collected during the timeseries.
Exemplars []Exemplar[N] `json:",omitempty"`
}
// Histogram represents the histogram of all measurements of values from an instrument.
type Histogram[N int64 | float64] struct {
// DataPoints are the individual aggregated measurements with unique
// Attributes.
DataPoints []HistogramDataPoint[N]
// Temporality describes if the aggregation is reported as the change from the
// last report time, or the cumulative changes since a fixed start time.
Temporality Temporality
}
func (Histogram[N]) privateAggregation() {}
// HistogramDataPoint is a single histogram data point in a timeseries.
type HistogramDataPoint[N int64 | float64] struct {
// Attributes is the set of key value pairs that uniquely identify the
// timeseries.
Attributes attribute.Set
// StartTime is when the timeseries was started.
StartTime time.Time
// Time is the time when the timeseries was recorded.
Time time.Time
// Count is the number of updates this histogram has been calculated with.
Count uint64
// Bounds are the upper bounds of the buckets of the histogram. Because the
// last boundary is +infinity this one is implied.
Bounds []float64
// BucketCounts is the count of each of the buckets.
BucketCounts []uint64
// Min is the minimum value recorded. (optional)
Min Extrema[N]
// Max is the maximum value recorded. (optional)
Max Extrema[N]
// Sum is the sum of the values recorded.
Sum N
// Exemplars is the sampled Exemplars collected during the timeseries.
Exemplars []Exemplar[N] `json:",omitempty"`
}
// ExponentialHistogram represents the histogram of all measurements of values from an instrument.
type ExponentialHistogram[N int64 | float64] struct {
// DataPoints are the individual aggregated measurements with unique
// attributes.
DataPoints []ExponentialHistogramDataPoint[N]
// Temporality describes if the aggregation is reported as the change from the
// last report time, or the cumulative changes since a fixed start time.
Temporality Temporality
}
func (ExponentialHistogram[N]) privateAggregation() {}
// ExponentialHistogramDataPoint is a single exponential histogram data point in a timeseries.
type ExponentialHistogramDataPoint[N int64 | float64] struct {
// Attributes is the set of key value pairs that uniquely identify the
// timeseries.
Attributes attribute.Set
// StartTime is when the timeseries was started.
StartTime time.Time
// Time is the time when the timeseries was recorded.
Time time.Time
// Count is the number of updates this histogram has been calculated with.
Count uint64
// Min is the minimum value recorded. (optional)
Min Extrema[N]
// Max is the maximum value recorded. (optional)
Max Extrema[N]
// Sum is the sum of the values recorded.
Sum N
// Scale describes the resolution of the histogram. Boundaries are
// located at powers of the base, where:
//
// base = 2 ^ (2 ^ -Scale)
Scale int32
// ZeroCount is the number of values whose absolute value
// is less than or equal to [ZeroThreshold].
// When ZeroThreshold is 0, this is the number of values that
// cannot be expressed using the standard exponential formula
// as well as values that have been rounded to zero.
// ZeroCount represents the special zero count bucket.
ZeroCount uint64
// PositiveBucket is range of positive value bucket counts.
PositiveBucket ExponentialBucket
// NegativeBucket is range of negative value bucket counts.
NegativeBucket ExponentialBucket
// ZeroThreshold is the width of the zero region. Where the zero region is
// defined as the closed interval [-ZeroThreshold, ZeroThreshold].
ZeroThreshold float64
// Exemplars is the sampled Exemplars collected during the timeseries.
Exemplars []Exemplar[N] `json:",omitempty"`
}
// ExponentialBucket are a set of bucket counts, encoded in a contiguous array
// of counts.
type ExponentialBucket struct {
// Offset is the bucket index of the first entry in the Counts slice.
Offset int32
// Counts is an slice where Counts[i] carries the count of the bucket at
// index (Offset+i). Counts[i] is the count of values greater than
// base^(Offset+i) and less than or equal to base^(Offset+i+1).
Counts []uint64
}
// Extrema is the minimum or maximum value of a dataset.
type Extrema[N int64 | float64] struct {
value N
valid bool
}
// MarshalText converts the Extrema value to text.
func (e Extrema[N]) MarshalText() ([]byte, error) {
if !e.valid {
return json.Marshal(nil)
}
return json.Marshal(e.value)
}
// MarshalJSON converts the Extrema value to JSON number.
func (e *Extrema[N]) MarshalJSON() ([]byte, error) {
return e.MarshalText()
}
// NewExtrema returns an Extrema set to v.
func NewExtrema[N int64 | float64](v N) Extrema[N] {
return Extrema[N]{value: v, valid: true}
}
// Value returns the Extrema value and true if the Extrema is defined.
// Otherwise, if the Extrema is its zero-value, defined will be false.
func (e Extrema[N]) Value() (v N, defined bool) {
return e.value, e.valid
}
// Exemplar is a measurement sampled from a timeseries providing a typical
// example.
type Exemplar[N int64 | float64] struct {
// FilteredAttributes are the attributes recorded with the measurement but
// filtered out of the timeseries' aggregated data.
FilteredAttributes []attribute.KeyValue
// Time is the time when the measurement was recorded.
Time time.Time
// Value is the measured value.
Value N
// SpanID is the ID of the span that was active during the measurement. If
// no span was active or the span was not sampled this will be empty.
SpanID []byte `json:",omitempty"`
// TraceID is the ID of the trace the active span belonged to during the
// measurement. If no span was active or the span was not sampled this will
// be empty.
TraceID []byte `json:",omitempty"`
}
// Summary metric data are used to convey quantile summaries,
// a Prometheus (see: path_to_url#summary)
// data type.
//
// These data points cannot always be merged in a meaningful way. The Summary
// type is only used by bridges from other metrics libraries, and cannot be
// produced using OpenTelemetry instrumentation.
type Summary struct {
// DataPoints are the individual aggregated measurements with unique
// attributes.
DataPoints []SummaryDataPoint
}
func (Summary) privateAggregation() {}
// SummaryDataPoint is a single data point in a timeseries that describes the
// time-varying values of a Summary metric.
type SummaryDataPoint struct {
// Attributes is the set of key value pairs that uniquely identify the
// timeseries.
Attributes attribute.Set
// StartTime is when the timeseries was started.
StartTime time.Time
// Time is the time when the timeseries was recorded.
Time time.Time
// Count is the number of updates this summary has been calculated with.
Count uint64
// Sum is the sum of the values recorded.
Sum float64
// (Optional) list of values at different quantiles of the distribution calculated
// from the current snapshot. The quantiles must be strictly increasing.
QuantileValues []QuantileValue
}
// QuantileValue is the value at a given quantile of a summary.
type QuantileValue struct {
// Quantile is the quantile of this value.
//
// Must be in the interval [0.0, 1.0].
Quantile float64
// Value is the value at the given quantile of a summary.
//
// Quantile values must NOT be negative.
Value float64
}
```
|
Prostitution in Papua New Guinea is generally regarded as illegal but widely practiced with the laws rarely enforced. Prostitution occurs on the streets, in bars, brothels and in logging, mining, and palm oil areas. In 2010 it was estimated there were 2.000 prostitutes in the capital, Port Moresby. The drought in 2016 caused a rise in prostitution. Many of the women have turned to sex work due to poverty or unemployment.
HIV, sex trafficking and child prostitution are common problems in Papua New Guinea.
Legal situation
The legal situation in Papua New Guinea is complex. The Summary Offences Act 1977 makes keeping a brothel and living on the earnings of prostitution offences. The idea of the law was to decriminalise prostitution but criminalise those who sought to exploit or profit from it. In 1978, a Papua New Guinea court interpreted ‘living on the earnings of prostitution’ to include 'profit from one's own prostitution'. (Wemay v Tumdual) The ruling effectively made all prostitution illegal. In a further case it was ruled that "occasional transactional sex for small amounts of money was insufficient to warrant a conviction". This may be a unique legal situation in that prostitution is made illegal not by Statute law but by case law.
In rural areas, 'customary law' is also in force. These laws are not written down, but are based on the knowledge of the laws of the indigenous peoples.
There have been calls to legalise prostitution. In October 2016, a private member's Bill was introduced to Parliament by the Member for Sumkar, Ken Fairweather, to instigate legalisation. Prime Minister Peter O'Neill and Oro Governor Gary Juffa said they would oppose any call for legalisation.
Law enforcement
Law enforcement is inconsistent. Sex workers and NGOs report corruption amongst police. There also report violence, intimidation, abuse, extortion and rape at the hands of the police. Knowingly transmitting HIV contrary to the HIV/AIDS Management and Prevention Act (HAMP) Act 2003 is sometimes used to detain sex workers, but there is no evidence of any charges being brought under this legislation.
HIV
The country has the highest HIV prevalence in the Pacific. Sex workers are one of the high risk groups, although HIV response in the country is now being directed towards the high risk groups. Access to healthcare is poor for sex workers. UNAIDS estimated an HIV prevalence of 17.8% amongst sex workers in 2016.
Sex trafficking
Papua New Guinea is a source, transit, and destination country for women, and children subjected to sex trafficking. Foreign and local women and children are subjected to sex trafficking, including near logging, mining, and palm oil sites. “Mosko Girls”, young girls employed in bars to provide companionship to patrons and sell an alcoholic drink called mosko, are vulnerable to human trafficking, especially around major cities. Within the country, children and women from rural areas are deceived, often by relatives, with promises of legitimate work or education to travel to different provinces where they are subjected to sex trafficking. NGOs report some parents receive money from traffickers who exploited their teenage daughters in prostitution, including near mining and logging sites. Children, including girls as young as 5 years old from remote rural areas, are reportedly subjected to sex trafficking by members of their immediate family or tribe. Tribal leaders reportedly trade with each other the service of girls and women for guns and to forge political alliances.
Young girls sold into polygamous marriages may be exploited in prostitution. In urban areas, parents reportedly exploit their children in sex trafficking directly or in brothels as a means to support their families or to pay for school fees. Government officials reportedly facilitate trafficking by accepting bribes to allow undocumented migrants to enter the country or ignore trafficking situations, and some may exploit sex trafficking victims or procure victims for other individuals in return for political favours or votes.
Malaysian and Chinese logging companies arrange for some foreign women to enter the country voluntarily with fraudulently issued tourist or business visas. After their arrival, many of these women, from countries including Indonesia, Malaysia, Thailand, China, and the Philippines, are turned over to traffickers who transport them to logging and mining camps, fisheries, and entertainment sites, and exploit them in forced prostitution.
The United States Department of State Office to Monitor and Combat Trafficking in Persons ranks Papua New Guinea as a "Tier 3" country.
References
Papua New Guinea
Papua New Guinea
Papua New Guinean culture
Social issues in Papua New Guinea
Women's rights in Papua New Guinea
|
Attorney General Harrison may refer to:
Albertis Harrison (1907–1995), Attorney General of Virginia
Thomas Harrison (fl. 1760s), Attorney General of Jamaica
William Henry Harrison (Canadian politician) (1880–1955), Attorney General of New Brunswick
|
```rust
/*
*
* This software may be used and distributed according to the terms of the
*/
use std::collections::HashMap;
use std::fs;
use std::io;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use dag::delegate;
use dag::errors::programming;
use dag::errors::NotFoundError;
use dag::nonblocking::non_blocking_result;
use dag::ops::IdConvert;
use dag::Group;
use dag::Set;
use dag::Vertex;
use futures::stream::BoxStream;
use futures::stream::StreamExt;
use minibytes::Bytes;
use revlogindex::RevlogIndex;
use zstore::Id20;
use crate::strip;
use crate::AppendCommits;
use crate::DescribeBackend;
use crate::HgCommit;
use crate::ParentlessHgCommit;
use crate::ReadCommitText;
use crate::Result;
use crate::StreamCommitText;
use crate::StripCommits;
/// HG commits stored on disk using the revlog format.
#[derive(Clone)]
pub struct RevlogCommits {
revlog: RevlogIndex,
pub(crate) dir: PathBuf,
}
/// Hardcoded commit hashes defied by hg.
pub(crate) fn get_hard_coded_commit_text(vertex: &Vertex) -> Option<Bytes> {
let vertex = vertex.as_ref();
if vertex == Id20::null_id().as_ref() || vertex == Id20::wdir_id().as_ref() {
Some(Default::default())
} else {
None
}
}
impl RevlogCommits {
pub fn new(dir: &Path) -> Result<Self> {
let index_path = dir.join("00changelog.i");
let nodemap_path = dir.join("00changelog.nodemap");
let revlog = RevlogIndex::new(&index_path, &nodemap_path)?;
Ok(Self {
revlog,
dir: dir.to_path_buf(),
})
}
}
#[async_trait::async_trait]
impl AppendCommits for RevlogCommits {
async fn add_commits(&mut self, commits: &[HgCommit]) -> Result<()> {
// Topo sort nodes since SaplingRemoteAPI returns nodes sorted lexically.
// We try to keep a stable order relative to the input since revlog
// insertion order can affect tests.
let mut vertex_to_commit: HashMap<Vertex, &HgCommit> =
commits.iter().map(|c| (c.vertex.clone(), c)).collect();
// Tracks reverse dependency so we can enqueue the child after parent is added.
let mut parent_to_children: HashMap<Vertex, Vec<&HgCommit>> = HashMap::new();
// Counter to know when all a child's parents have been added.
let mut parent_count: HashMap<Vertex, usize> = HashMap::new();
// Queue of nodes not waiting on parents to be processed.
let mut queue: Vec<&HgCommit> = Vec::new();
for c in commits {
let mut pending_parents = 0;
for pv in c.parents.iter() {
if let Some(pc) = vertex_to_commit.get(pv) {
parent_to_children
.entry(pc.vertex.clone())
.or_default()
.push(c);
pending_parents += 1;
}
}
if pending_parents == 0 {
// Parents are not present in args - assume we are good to go.
queue.push(c);
} else {
parent_count.insert(c.vertex.clone(), pending_parents);
}
}
while let Some(commit) = queue.pop() {
let mut parent_revs = Vec::with_capacity(commit.parents.len());
for parent in &commit.parents {
parent_revs.push(self.revlog.vertex_id(parent.clone()).await?.0 as u32);
}
self.revlog
.insert(commit.vertex.clone(), parent_revs, commit.raw_text.clone());
// Remove so we can make sure we processed all the nodes, later.
vertex_to_commit.remove(&commit.vertex);
for child in parent_to_children
.get(&commit.vertex)
.map(|v| v.as_slice())
.unwrap_or_default()
{
if let Some(parent_count) = parent_count.get_mut(&child.vertex) {
*parent_count -= 1;
if *parent_count == 0 {
// We were this child's last pending parent.
queue.push(child);
}
}
}
}
if !vertex_to_commit.is_empty() {
programming("commits form a cycle when adding to revlog")?;
}
Ok(())
}
async fn flush(&mut self, _master_heads: &[Vertex]) -> Result<()> {
self.revlog.flush()?;
Ok(())
}
async fn flush_commit_data(&mut self) -> Result<()> {
self.revlog.flush()?;
Ok(())
}
async fn update_virtual_nodes(&mut self, _wdir_parents: Vec<Vertex>) -> Result<()> {
// XXX: Dummy implementation - revlog is rarely used.
Ok(())
}
}
#[async_trait::async_trait]
impl ReadCommitText for RevlogCommits {
async fn get_commit_raw_text(&self, vertex: &Vertex) -> Result<Option<Bytes>> {
match self
.vertex_id_with_max_group(vertex, Group::NON_MASTER)
.await?
{
Some(id) => Ok(Some(self.revlog.raw_data(id.0 as u32)?)),
None => Ok(get_hard_coded_commit_text(vertex)),
}
}
fn to_dyn_read_commit_text(&self) -> Arc<dyn ReadCommitText + Send + Sync> {
Arc::new(self.clone())
}
}
impl StreamCommitText for RevlogCommits {
fn stream_commit_raw_text(
&self,
stream: BoxStream<'static, anyhow::Result<Vertex>>,
) -> Result<BoxStream<'static, anyhow::Result<ParentlessHgCommit>>> {
let revlog = self.revlog.get_snapshot();
let stream = stream.map(move |item| {
let vertex = item?;
// Mercurial hard-coded special-case that does not match SHA1.
if let Some(raw_text) = get_hard_coded_commit_text(&vertex) {
return Ok(ParentlessHgCommit { vertex, raw_text });
}
match non_blocking_result(revlog.vertex_id_with_max_group(&vertex, Group::NON_MASTER))?
{
Some(id) => {
let raw_text = revlog.raw_data(id.0 as u32)?;
Ok(ParentlessHgCommit { vertex, raw_text })
}
None => vertex.not_found().map_err(Into::into),
}
});
Ok(Box::pin(stream))
}
}
#[async_trait::async_trait]
impl StripCommits for RevlogCommits {
async fn strip_commits(&mut self, set: Set) -> Result<()> {
let old_dir = &self.dir;
let new_dir = old_dir.join("strip");
let _ = fs::create_dir(&new_dir);
let mut new = Self::new(&new_dir)?;
strip::migrate_commits(self, &mut new, set).await?;
drop(new);
strip::racy_unsafe_move_files(&new_dir, old_dir)?;
*self = Self::new(old_dir)?;
Ok(())
}
}
delegate!(CheckIntegrity | IdConvert | IdMapSnapshot | PrefixLookup | DagAlgorithm, RevlogCommits => self.revlog);
impl DescribeBackend for RevlogCommits {
fn algorithm_backend(&self) -> &'static str {
"revlog"
}
fn describe_backend(&self) -> String {
format!(
r#"Backend (revlog):
Local:
Revlog: {}
Nodemap: {}
Feature Providers:
Commit Graph Algorithms:
Revlog
Commit Hash / Rev Lookup:
Nodemap
Commit Data (user, message):
Revlog
"#,
self.dir.join("00changelog.{i,d}").display(),
self.dir.join("00changelog.nodemap").display(),
)
}
fn explain_internals(&self, w: &mut dyn io::Write) -> io::Result<()> {
writeln!(w, "(RevlogIndex explain_internals is not yet implemented)")
}
}
```
|
```dart
// GENERATED CODE - DO NOT MODIFY BY HAND
part of angel_serialize.test.models.book;
// **************************************************************************
// JsonModelGenerator
// **************************************************************************
@generatedSerializable
@pragma('hello')
@SerializableField(alias: 'omg')
class Book extends _Book {
Book(
{this.id,
this.createdAt,
this.updatedAt,
this.author,
this.title,
this.description,
this.pageCount,
List<double> notModels,
this.camelCaseString})
: this.notModels = List.unmodifiable(notModels ?? []);
/// A unique identifier corresponding to this item.
@override
String id;
/// The time at which this item was created.
@override
DateTime createdAt;
/// The last time at which this item was updated.
@override
DateTime updatedAt;
@override
String author;
@override
String title;
@override
String description;
/// The number of pages the book has.
@override
int pageCount;
@override
List<double> notModels;
@override
String camelCaseString;
Book copyWith(
{String id,
DateTime createdAt,
DateTime updatedAt,
String author,
String title,
String description,
int pageCount,
List<double> notModels,
String camelCaseString}) {
return Book(
id: id ?? this.id,
createdAt: createdAt ?? this.createdAt,
updatedAt: updatedAt ?? this.updatedAt,
author: author ?? this.author,
title: title ?? this.title,
description: description ?? this.description,
pageCount: pageCount ?? this.pageCount,
notModels: notModels ?? this.notModels,
camelCaseString: camelCaseString ?? this.camelCaseString);
}
bool operator ==(other) {
return other is _Book &&
other.id == id &&
other.createdAt == createdAt &&
other.updatedAt == updatedAt &&
other.author == author &&
other.title == title &&
other.description == description &&
other.pageCount == pageCount &&
ListEquality<double>(DefaultEquality<double>())
.equals(other.notModels, notModels) &&
other.camelCaseString == camelCaseString;
}
@override
int get hashCode {
return hashObjects([
id,
createdAt,
updatedAt,
author,
title,
description,
pageCount,
notModels,
camelCaseString
]);
}
@override
String toString() {
return "Book(id=$id, createdAt=$createdAt, updatedAt=$updatedAt, author=$author, title=$title, description=$description, pageCount=$pageCount, notModels=$notModels, camelCaseString=$camelCaseString)";
}
Map<String, dynamic> toJson() {
return BookSerializer.toMap(this);
}
}
@generatedSerializable
class Author extends _Author {
Author(
{this.id,
this.createdAt,
this.updatedAt,
@required this.name,
@required this.age,
List<_Book> books,
this.newestBook,
this.secret,
this.obscured})
: this.books = List.unmodifiable(books ?? []);
/// A unique identifier corresponding to this item.
@override
String id;
/// The time at which this item was created.
@override
DateTime createdAt;
/// The last time at which this item was updated.
@override
DateTime updatedAt;
@override
final String name;
@override
final int age;
@override
final List<_Book> books;
/// The newest book.
@override
final _Book newestBook;
@override
final String secret;
@override
final String obscured;
Author copyWith(
{String id,
DateTime createdAt,
DateTime updatedAt,
String name,
int age,
List<_Book> books,
_Book newestBook,
String secret,
String obscured}) {
return Author(
id: id ?? this.id,
createdAt: createdAt ?? this.createdAt,
updatedAt: updatedAt ?? this.updatedAt,
name: name ?? this.name,
age: age ?? this.age,
books: books ?? this.books,
newestBook: newestBook ?? this.newestBook,
secret: secret ?? this.secret,
obscured: obscured ?? this.obscured);
}
bool operator ==(other) {
return other is _Author &&
other.id == id &&
other.createdAt == createdAt &&
other.updatedAt == updatedAt &&
other.name == name &&
other.age == age &&
ListEquality<_Book>(DefaultEquality<_Book>())
.equals(other.books, books) &&
other.newestBook == newestBook &&
other.secret == secret &&
other.obscured == obscured;
}
@override
int get hashCode {
return hashObjects([
id,
createdAt,
updatedAt,
name,
age,
books,
newestBook,
secret,
obscured
]);
}
@override
String toString() {
return "Author(id=$id, createdAt=$createdAt, updatedAt=$updatedAt, name=$name, age=$age, books=$books, newestBook=$newestBook, secret=$secret, obscured=$obscured)";
}
Map<String, dynamic> toJson() {
return AuthorSerializer.toMap(this);
}
}
@generatedSerializable
class Library extends _Library {
Library(
{this.id, this.createdAt, this.updatedAt, Map<String, _Book> collection})
: this.collection = Map.unmodifiable(collection ?? {});
/// A unique identifier corresponding to this item.
@override
String id;
/// The time at which this item was created.
@override
DateTime createdAt;
/// The last time at which this item was updated.
@override
DateTime updatedAt;
@override
final Map<String, _Book> collection;
Library copyWith(
{String id,
DateTime createdAt,
DateTime updatedAt,
Map<String, _Book> collection}) {
return Library(
id: id ?? this.id,
createdAt: createdAt ?? this.createdAt,
updatedAt: updatedAt ?? this.updatedAt,
collection: collection ?? this.collection);
}
bool operator ==(other) {
return other is _Library &&
other.id == id &&
other.createdAt == createdAt &&
other.updatedAt == updatedAt &&
MapEquality<String, _Book>(
keys: DefaultEquality<String>(),
values: DefaultEquality<_Book>())
.equals(other.collection, collection);
}
@override
int get hashCode {
return hashObjects([id, createdAt, updatedAt, collection]);
}
@override
String toString() {
return "Library(id=$id, createdAt=$createdAt, updatedAt=$updatedAt, collection=$collection)";
}
Map<String, dynamic> toJson() {
return LibrarySerializer.toMap(this);
}
}
@generatedSerializable
class Bookmark extends _Bookmark {
Bookmark(_Book book,
{this.id,
this.createdAt,
this.updatedAt,
List<int> history,
@required this.page,
this.comment})
: this.history = List.unmodifiable(history ?? []),
super(book);
/// A unique identifier corresponding to this item.
@override
String id;
/// The time at which this item was created.
@override
DateTime createdAt;
/// The last time at which this item was updated.
@override
DateTime updatedAt;
@override
final List<int> history;
@override
final int page;
@override
final String comment;
Bookmark copyWith(_Book book,
{String id,
DateTime createdAt,
DateTime updatedAt,
List<int> history,
int page,
String comment}) {
return Bookmark(book,
id: id ?? this.id,
createdAt: createdAt ?? this.createdAt,
updatedAt: updatedAt ?? this.updatedAt,
history: history ?? this.history,
page: page ?? this.page,
comment: comment ?? this.comment);
}
bool operator ==(other) {
return other is _Bookmark &&
other.id == id &&
other.createdAt == createdAt &&
other.updatedAt == updatedAt &&
ListEquality<int>(DefaultEquality<int>())
.equals(other.history, history) &&
other.page == page &&
other.comment == comment;
}
@override
int get hashCode {
return hashObjects([id, createdAt, updatedAt, history, page, comment]);
}
@override
String toString() {
return "Bookmark(id=$id, createdAt=$createdAt, updatedAt=$updatedAt, history=$history, page=$page, comment=$comment)";
}
Map<String, dynamic> toJson() {
return BookmarkSerializer.toMap(this);
}
}
// **************************************************************************
// SerializerGenerator
// **************************************************************************
const BookSerializer bookSerializer = BookSerializer();
class BookEncoder extends Converter<Book, Map> {
const BookEncoder();
@override
Map convert(Book model) => BookSerializer.toMap(model);
}
class BookDecoder extends Converter<Map, Book> {
const BookDecoder();
@override
Book convert(Map map) => BookSerializer.fromMap(map);
}
class BookSerializer extends Codec<Book, Map> {
const BookSerializer();
@override
get encoder => const BookEncoder();
@override
get decoder => const BookDecoder();
static Book fromMap(Map map) {
return Book(
id: map['id'] as String,
createdAt: map['created_at'] != null
? (map['created_at'] is DateTime
? (map['created_at'] as DateTime)
: DateTime.parse(map['created_at'].toString()))
: null,
updatedAt: map['updated_at'] != null
? (map['updated_at'] is DateTime
? (map['updated_at'] as DateTime)
: DateTime.parse(map['updated_at'].toString()))
: null,
author: map['author'] as String,
title: map['title'] as String,
description: map['description'] as String,
pageCount: map['page_count'] as int,
notModels: map['not_models'] is Iterable
? (map['not_models'] as Iterable).cast<double>().toList()
: null,
camelCaseString: map['camelCase'] as String);
}
static Map<String, dynamic> toMap(_Book model) {
if (model == null) {
return null;
}
return {
'id': model.id,
'created_at': model.createdAt?.toIso8601String(),
'updated_at': model.updatedAt?.toIso8601String(),
'author': model.author,
'title': model.title,
'description': model.description,
'page_count': model.pageCount,
'not_models': model.notModels,
'camelCase': model.camelCaseString
};
}
}
abstract class BookFields {
static const List<String> allFields = <String>[
id,
createdAt,
updatedAt,
author,
title,
description,
pageCount,
notModels,
camelCaseString
];
static const String id = 'id';
static const String createdAt = 'created_at';
static const String updatedAt = 'updated_at';
static const String author = 'author';
static const String title = 'title';
static const String description = 'description';
static const String pageCount = 'page_count';
static const String notModels = 'not_models';
static const String camelCaseString = 'camelCase';
}
const AuthorSerializer authorSerializer = AuthorSerializer();
class AuthorEncoder extends Converter<Author, Map> {
const AuthorEncoder();
@override
Map convert(Author model) => AuthorSerializer.toMap(model);
}
class AuthorDecoder extends Converter<Map, Author> {
const AuthorDecoder();
@override
Author convert(Map map) => AuthorSerializer.fromMap(map);
}
class AuthorSerializer extends Codec<Author, Map> {
const AuthorSerializer();
@override
get encoder => const AuthorEncoder();
@override
get decoder => const AuthorDecoder();
static Author fromMap(Map map) {
if (map['name'] == null) {
throw FormatException("Missing required field 'name' on Author.");
}
if (map['age'] == null) {
throw FormatException("Custom message for missing `age`");
}
return Author(
id: map['id'] as String,
createdAt: map['created_at'] != null
? (map['created_at'] is DateTime
? (map['created_at'] as DateTime)
: DateTime.parse(map['created_at'].toString()))
: null,
updatedAt: map['updated_at'] != null
? (map['updated_at'] is DateTime
? (map['updated_at'] as DateTime)
: DateTime.parse(map['updated_at'].toString()))
: null,
name: map['name'] as String,
age: map['age'] as int,
books: map['books'] is Iterable
? List.unmodifiable(((map['books'] as Iterable).whereType<Map>())
.map(BookSerializer.fromMap))
: null,
newestBook: map['newest_book'] != null
? BookSerializer.fromMap(map['newest_book'] as Map)
: null,
obscured: map['obscured'] as String);
}
static Map<String, dynamic> toMap(_Author model) {
if (model == null) {
return null;
}
if (model.name == null) {
throw FormatException("Missing required field 'name' on Author.");
}
if (model.age == null) {
throw FormatException("Custom message for missing `age`");
}
return {
'id': model.id,
'created_at': model.createdAt?.toIso8601String(),
'updated_at': model.updatedAt?.toIso8601String(),
'name': model.name,
'age': model.age,
'books': model.books?.map((m) => BookSerializer.toMap(m))?.toList(),
'newest_book': BookSerializer.toMap(model.newestBook)
};
}
}
abstract class AuthorFields {
static const List<String> allFields = <String>[
id,
createdAt,
updatedAt,
name,
age,
books,
newestBook,
secret,
obscured
];
static const String id = 'id';
static const String createdAt = 'created_at';
static const String updatedAt = 'updated_at';
static const String name = 'name';
static const String age = 'age';
static const String books = 'books';
static const String newestBook = 'newest_book';
static const String secret = 'secret';
static const String obscured = 'obscured';
}
const LibrarySerializer librarySerializer = LibrarySerializer();
class LibraryEncoder extends Converter<Library, Map> {
const LibraryEncoder();
@override
Map convert(Library model) => LibrarySerializer.toMap(model);
}
class LibraryDecoder extends Converter<Map, Library> {
const LibraryDecoder();
@override
Library convert(Map map) => LibrarySerializer.fromMap(map);
}
class LibrarySerializer extends Codec<Library, Map> {
const LibrarySerializer();
@override
get encoder => const LibraryEncoder();
@override
get decoder => const LibraryDecoder();
static Library fromMap(Map map) {
return Library(
id: map['id'] as String,
createdAt: map['created_at'] != null
? (map['created_at'] is DateTime
? (map['created_at'] as DateTime)
: DateTime.parse(map['created_at'].toString()))
: null,
updatedAt: map['updated_at'] != null
? (map['updated_at'] is DateTime
? (map['updated_at'] as DateTime)
: DateTime.parse(map['updated_at'].toString()))
: null,
collection: map['collection'] is Map
? Map.unmodifiable(
(map['collection'] as Map).keys.fold({}, (out, key) {
return out
..[key] = BookSerializer.fromMap(
((map['collection'] as Map)[key]) as Map);
}))
: null);
}
static Map<String, dynamic> toMap(_Library model) {
if (model == null) {
return null;
}
return {
'id': model.id,
'created_at': model.createdAt?.toIso8601String(),
'updated_at': model.updatedAt?.toIso8601String(),
'collection': model.collection.keys?.fold({}, (map, key) {
return map..[key] = BookSerializer.toMap(model.collection[key]);
})
};
}
}
abstract class LibraryFields {
static const List<String> allFields = <String>[
id,
createdAt,
updatedAt,
collection
];
static const String id = 'id';
static const String createdAt = 'created_at';
static const String updatedAt = 'updated_at';
static const String collection = 'collection';
}
abstract class BookmarkSerializer {
static Bookmark fromMap(Map map, _Book book) {
if (map['page'] == null) {
throw FormatException("Missing required field 'page' on Bookmark.");
}
return Bookmark(book,
id: map['id'] as String,
createdAt: map['created_at'] != null
? (map['created_at'] is DateTime
? (map['created_at'] as DateTime)
: DateTime.parse(map['created_at'].toString()))
: null,
updatedAt: map['updated_at'] != null
? (map['updated_at'] is DateTime
? (map['updated_at'] as DateTime)
: DateTime.parse(map['updated_at'].toString()))
: null,
history: map['history'] is Iterable
? (map['history'] as Iterable).cast<int>().toList()
: null,
page: map['page'] as int,
comment: map['comment'] as String);
}
static Map<String, dynamic> toMap(_Bookmark model) {
if (model == null) {
return null;
}
if (model.page == null) {
throw FormatException("Missing required field 'page' on Bookmark.");
}
return {
'id': model.id,
'created_at': model.createdAt?.toIso8601String(),
'updated_at': model.updatedAt?.toIso8601String(),
'history': model.history,
'page': model.page,
'comment': model.comment
};
}
}
abstract class BookmarkFields {
static const List<String> allFields = <String>[
id,
createdAt,
updatedAt,
history,
page,
comment
];
static const String id = 'id';
static const String createdAt = 'created_at';
static const String updatedAt = 'updated_at';
static const String history = 'history';
static const String page = 'page';
static const String comment = 'comment';
}
```
|
```python
from __future__ import annotations
from arcade.management import show_info
if __name__ == "__main__":
show_info()
```
|
```javascript
export * from './SQLite';
export * from './SQLite.types';
//# sourceMappingURL=index.js.map
```
|
The APA Distinguished Scientific Award for the Applications of Psychology is an award of the American Psychological Association
Recipients
Source: APA
20th Century
1973 Conrad L. Kraft
1974 Gerald S. Lesser, Edward L. Palmer
1975 Nathan H. Azrin
1976 Fred S. Keller
1977 Starke R. Hathaway
1978 Alphonse Chapanis
1979 Joseph Wolpe
1980 Edwin A. Fleishman
1981 Anne Anastasi
1982 Robert M. Gagné
1983 Donald E. Super
1984 Gerald R. Patterson
1985 John Money
1986 Martin T. Orne
1987 Robert Glaser
1988 Leonard Berkowitz
1989 Aaron T. Beck
1990 Wallace E. Lambert
1991 Joseph V. Brady
1992 Charles R. Schuster
1993 Herschel W. Leibowitz
1994 John E. Hunter, Frank L. Schmidt
1995 Ann L. Brown
1996 Ward Edwards
1997 Harold Stevenson
1998/1999 Loren J. Chapman, Jean P. Chapman
2000 David H. Barlow
21st Century
2001 David T. Lykken
2002 Robert Rosenthal
2003 Stephen J. Ceci, Elizabeth F. Loftus
2004 Edward Taub
2005 Karen A. Matthews
2006 John P. Campbell
2007 Karl G. Jöreskog, Peter M. Bentler
2008 John L. Holland
2009 Nancy E. Adler
2010 David M. Clark
2011 Alan E. Kazdin
2012 Kelly D. Brownell
2013 J. Richard Hackman
2014 G. Terence Wilson
2015 Michael E. Lamb
2016 James W. Pennebaker
2017 Jacquelynne S. Eccles
2018 Kenneth A. Dodge
2019 James S. Jackson
2020 Steven D. Hollon
2021 James H. Sidanius
2022 Christopher G. Fairburn
See also
List of psychology awards
References
American Psychological Association
American psychology awards
|
Pilica () is a village in the municipality of Bajina Bašta, Serbia. According to the 2002 census, the village has a population of 653 people.
References
Populated places in Zlatibor District
|
```smalltalk
using System.Buffers;
using System.Runtime.CompilerServices;
using SixLabors.ImageSharp.Memory;
namespace SixLabors.ImageSharp.Advanced;
/// <summary>
/// Utility methods for batched processing of pixel row intervals.
/// Parallel execution is optimized for image processing based on values defined
/// <see cref="ParallelExecutionSettings"/> or <see cref="Configuration"/>.
/// Using this class is preferred over direct usage of <see cref="Parallel"/> utility methods.
/// </summary>
public static partial class ParallelRowIterator
{
/// <summary>
/// Iterate through the rows of a rectangle in optimized batches.
/// </summary>
/// <typeparam name="T">The type of row operation to perform.</typeparam>
/// <param name="configuration">The <see cref="Configuration"/> to get the parallel settings from.</param>
/// <param name="rectangle">The <see cref="Rectangle"/>.</param>
/// <param name="operation">The operation defining the iteration logic on a single row.</param>
[MethodImpl(InliningOptions.ShortMethod)]
public static void IterateRows<T>(Configuration configuration, Rectangle rectangle, in T operation)
where T : struct, IRowOperation
{
var parallelSettings = ParallelExecutionSettings.FromConfiguration(configuration);
IterateRows(rectangle, in parallelSettings, in operation);
}
/// <summary>
/// Iterate through the rows of a rectangle in optimized batches.
/// </summary>
/// <typeparam name="T">The type of row operation to perform.</typeparam>
/// <param name="rectangle">The <see cref="Rectangle"/>.</param>
/// <param name="parallelSettings">The <see cref="ParallelExecutionSettings"/>.</param>
/// <param name="operation">The operation defining the iteration logic on a single row.</param>
public static void IterateRows<T>(
Rectangle rectangle,
in ParallelExecutionSettings parallelSettings,
in T operation)
where T : struct, IRowOperation
{
ValidateRectangle(rectangle);
int top = rectangle.Top;
int bottom = rectangle.Bottom;
int width = rectangle.Width;
int height = rectangle.Height;
int maxSteps = DivideCeil(width * (long)height, parallelSettings.MinimumPixelsProcessedPerTask);
int numOfSteps = Math.Min(parallelSettings.MaxDegreeOfParallelism, maxSteps);
// Avoid TPL overhead in this trivial case:
if (numOfSteps == 1)
{
for (int y = top; y < bottom; y++)
{
Unsafe.AsRef(in operation).Invoke(y);
}
return;
}
int verticalStep = DivideCeil(rectangle.Height, numOfSteps);
var parallelOptions = new ParallelOptions { MaxDegreeOfParallelism = numOfSteps };
var wrappingOperation = new RowOperationWrapper<T>(top, bottom, verticalStep, in operation);
Parallel.For(
0,
numOfSteps,
parallelOptions,
wrappingOperation.Invoke);
}
/// <summary>
/// Iterate through the rows of a rectangle in optimized batches.
/// instantiating a temporary buffer for each <paramref name="operation"/> invocation.
/// </summary>
/// <typeparam name="T">The type of row operation to perform.</typeparam>
/// <typeparam name="TBuffer">The type of buffer elements.</typeparam>
/// <param name="configuration">The <see cref="Configuration"/> to get the parallel settings from.</param>
/// <param name="rectangle">The <see cref="Rectangle"/>.</param>
/// <param name="operation">The operation defining the iteration logic on a single row.</param>
public static void IterateRows<T, TBuffer>(Configuration configuration, Rectangle rectangle, in T operation)
where T : struct, IRowOperation<TBuffer>
where TBuffer : unmanaged
{
var parallelSettings = ParallelExecutionSettings.FromConfiguration(configuration);
IterateRows<T, TBuffer>(rectangle, in parallelSettings, in operation);
}
/// <summary>
/// Iterate through the rows of a rectangle in optimized batches.
/// instantiating a temporary buffer for each <paramref name="operation"/> invocation.
/// </summary>
/// <typeparam name="T">The type of row operation to perform.</typeparam>
/// <typeparam name="TBuffer">The type of buffer elements.</typeparam>
/// <param name="rectangle">The <see cref="Rectangle"/>.</param>
/// <param name="parallelSettings">The <see cref="ParallelExecutionSettings"/>.</param>
/// <param name="operation">The operation defining the iteration logic on a single row.</param>
public static void IterateRows<T, TBuffer>(
Rectangle rectangle,
in ParallelExecutionSettings parallelSettings,
in T operation)
where T : struct, IRowOperation<TBuffer>
where TBuffer : unmanaged
{
ValidateRectangle(rectangle);
int top = rectangle.Top;
int bottom = rectangle.Bottom;
int width = rectangle.Width;
int height = rectangle.Height;
int maxSteps = DivideCeil(width * (long)height, parallelSettings.MinimumPixelsProcessedPerTask);
int numOfSteps = Math.Min(parallelSettings.MaxDegreeOfParallelism, maxSteps);
MemoryAllocator allocator = parallelSettings.MemoryAllocator;
int bufferLength = Unsafe.AsRef(in operation).GetRequiredBufferLength(rectangle);
// Avoid TPL overhead in this trivial case:
if (numOfSteps == 1)
{
using IMemoryOwner<TBuffer> buffer = allocator.Allocate<TBuffer>(bufferLength);
Span<TBuffer> span = buffer.Memory.Span;
for (int y = top; y < bottom; y++)
{
Unsafe.AsRef(in operation).Invoke(y, span);
}
return;
}
int verticalStep = DivideCeil(height, numOfSteps);
var parallelOptions = new ParallelOptions { MaxDegreeOfParallelism = numOfSteps };
var wrappingOperation = new RowOperationWrapper<T, TBuffer>(top, bottom, verticalStep, bufferLength, allocator, in operation);
Parallel.For(
0,
numOfSteps,
parallelOptions,
wrappingOperation.Invoke);
}
/// <summary>
/// Iterate through the rows of a rectangle in optimized batches defined by <see cref="RowInterval"/>-s.
/// </summary>
/// <typeparam name="T">The type of row operation to perform.</typeparam>
/// <param name="configuration">The <see cref="Configuration"/> to get the parallel settings from.</param>
/// <param name="rectangle">The <see cref="Rectangle"/>.</param>
/// <param name="operation">The operation defining the iteration logic on a single <see cref="RowInterval"/>.</param>
[MethodImpl(InliningOptions.ShortMethod)]
public static void IterateRowIntervals<T>(Configuration configuration, Rectangle rectangle, in T operation)
where T : struct, IRowIntervalOperation
{
var parallelSettings = ParallelExecutionSettings.FromConfiguration(configuration);
IterateRowIntervals(rectangle, in parallelSettings, in operation);
}
/// <summary>
/// Iterate through the rows of a rectangle in optimized batches defined by <see cref="RowInterval"/>-s.
/// </summary>
/// <typeparam name="T">The type of row operation to perform.</typeparam>
/// <param name="rectangle">The <see cref="Rectangle"/>.</param>
/// <param name="parallelSettings">The <see cref="ParallelExecutionSettings"/>.</param>
/// <param name="operation">The operation defining the iteration logic on a single <see cref="RowInterval"/>.</param>
public static void IterateRowIntervals<T>(
Rectangle rectangle,
in ParallelExecutionSettings parallelSettings,
in T operation)
where T : struct, IRowIntervalOperation
{
ValidateRectangle(rectangle);
int top = rectangle.Top;
int bottom = rectangle.Bottom;
int width = rectangle.Width;
int height = rectangle.Height;
int maxSteps = DivideCeil(width * (long)height, parallelSettings.MinimumPixelsProcessedPerTask);
int numOfSteps = Math.Min(parallelSettings.MaxDegreeOfParallelism, maxSteps);
// Avoid TPL overhead in this trivial case:
if (numOfSteps == 1)
{
var rows = new RowInterval(top, bottom);
Unsafe.AsRef(in operation).Invoke(in rows);
return;
}
int verticalStep = DivideCeil(rectangle.Height, numOfSteps);
var parallelOptions = new ParallelOptions { MaxDegreeOfParallelism = numOfSteps };
var wrappingOperation = new RowIntervalOperationWrapper<T>(top, bottom, verticalStep, in operation);
Parallel.For(
0,
numOfSteps,
parallelOptions,
wrappingOperation.Invoke);
}
/// <summary>
/// Iterate through the rows of a rectangle in optimized batches defined by <see cref="RowInterval"/>-s
/// instantiating a temporary buffer for each <paramref name="operation"/> invocation.
/// </summary>
/// <typeparam name="T">The type of row operation to perform.</typeparam>
/// <typeparam name="TBuffer">The type of buffer elements.</typeparam>
/// <param name="configuration">The <see cref="Configuration"/> to get the parallel settings from.</param>
/// <param name="rectangle">The <see cref="Rectangle"/>.</param>
/// <param name="operation">The operation defining the iteration logic on a single <see cref="RowInterval"/>.</param>
public static void IterateRowIntervals<T, TBuffer>(Configuration configuration, Rectangle rectangle, in T operation)
where T : struct, IRowIntervalOperation<TBuffer>
where TBuffer : unmanaged
{
var parallelSettings = ParallelExecutionSettings.FromConfiguration(configuration);
IterateRowIntervals<T, TBuffer>(rectangle, in parallelSettings, in operation);
}
/// <summary>
/// Iterate through the rows of a rectangle in optimized batches defined by <see cref="RowInterval"/>-s
/// instantiating a temporary buffer for each <paramref name="operation"/> invocation.
/// </summary>
/// <typeparam name="T">The type of row operation to perform.</typeparam>
/// <typeparam name="TBuffer">The type of buffer elements.</typeparam>
/// <param name="rectangle">The <see cref="Rectangle"/>.</param>
/// <param name="parallelSettings">The <see cref="ParallelExecutionSettings"/>.</param>
/// <param name="operation">The operation defining the iteration logic on a single <see cref="RowInterval"/>.</param>
public static void IterateRowIntervals<T, TBuffer>(
Rectangle rectangle,
in ParallelExecutionSettings parallelSettings,
in T operation)
where T : struct, IRowIntervalOperation<TBuffer>
where TBuffer : unmanaged
{
ValidateRectangle(rectangle);
int top = rectangle.Top;
int bottom = rectangle.Bottom;
int width = rectangle.Width;
int height = rectangle.Height;
int maxSteps = DivideCeil(width * (long)height, parallelSettings.MinimumPixelsProcessedPerTask);
int numOfSteps = Math.Min(parallelSettings.MaxDegreeOfParallelism, maxSteps);
MemoryAllocator allocator = parallelSettings.MemoryAllocator;
int bufferLength = Unsafe.AsRef(in operation).GetRequiredBufferLength(rectangle);
// Avoid TPL overhead in this trivial case:
if (numOfSteps == 1)
{
var rows = new RowInterval(top, bottom);
using IMemoryOwner<TBuffer> buffer = allocator.Allocate<TBuffer>(bufferLength);
Unsafe.AsRef(in operation).Invoke(in rows, buffer.Memory.Span);
return;
}
int verticalStep = DivideCeil(height, numOfSteps);
var parallelOptions = new ParallelOptions { MaxDegreeOfParallelism = numOfSteps };
var wrappingOperation = new RowIntervalOperationWrapper<T, TBuffer>(top, bottom, verticalStep, bufferLength, allocator, in operation);
Parallel.For(
0,
numOfSteps,
parallelOptions,
wrappingOperation.Invoke);
}
[MethodImpl(InliningOptions.ShortMethod)]
private static int DivideCeil(long dividend, int divisor) => (int)Math.Min(1 + ((dividend - 1) / divisor), int.MaxValue);
private static void ValidateRectangle(Rectangle rectangle)
{
Guard.MustBeGreaterThan(
rectangle.Width,
0,
$"{nameof(rectangle)}.{nameof(rectangle.Width)}");
Guard.MustBeGreaterThan(
rectangle.Height,
0,
$"{nameof(rectangle)}.{nameof(rectangle.Height)}");
}
}
```
|
```python
import struct
import pytest
import moderngl
def test_1(ctx):
prog = ctx.program(
vertex_shader='''
#version 330
in vec2 in_v1;
in vec2 in_v2;
out vec2 out_v;
void main() {
out_v = in_v1 + in_v2;
}
''',
varyings=['out_v']
)
buf_v1 = ctx.buffer(struct.pack('8f', 1, 2, 3, 4, 5, 6, 7, 8))
buf_v2 = ctx.buffer(struct.pack('2f', 10, 100))
res = ctx.buffer(reserve=buf_v1.size)
vao = ctx.vertex_array(prog, [
(buf_v1, '2f', 'in_v1'),
(buf_v2, '2f/i', 'in_v2'),
])
vao.transform(res, moderngl.POINTS)
tup = struct.unpack('8f', res.read())
assert pytest.approx(tup[0]) == 11.0
assert pytest.approx(tup[1]) == 102.0
assert pytest.approx(tup[2]) == 13.0
assert pytest.approx(tup[3]) == 104.0
assert pytest.approx(tup[4]) == 15.0
assert pytest.approx(tup[5]) == 106.0
assert pytest.approx(tup[6]) == 17.0
assert pytest.approx(tup[7]) == 108.0
```
|
Mount Tarampa State School is a heritage-listed state school at 9 Profkes Road, Mount Tarampa, Somerset Region, Queensland, Australia. It was designed by Queensland Department of Public Works and built in 1906 by C Risdale. It was formerly known as Mount Tarampa Provisional School. It was added to the Queensland Heritage Register on 1 May 2015.
History
Mount Tarampa State School opened in 1906 as Mount Tarampa Provisional School on a site within the small agricultural settlement of Mount Tarampa in the Brisbane Valley, to serve the sparse but growing rural population. As settlement increased, the provisional school building was extended () and a teacher's residence was built (1918). Other structures and landscape elements were added including a play shed (1912), a tennis court (pre-1925) and plantings. The school has been in continuous operation since establishment and has been a focus for the local community as a place for important social and cultural activity.
European occupation of the Brisbane Valley dates from 1841. Charles Cameron established Tarampa station, a large pastoral lease stretching from the D'Aguilar Range to Glenmore Grove and from Prenzlau to Coominya, in the 1840s. Settlement in the Mount Tarampa area occurred when the Tarampa Repurchased Estate, comprising divided into 144 properties, opened for selection in 1903. All of the blocks were occupied by 1909. Farmers in the area carried out mixed farming, dairying and grazing.
Mount Tarampa Provisional School opened on 23 July 1906 with 22 children attending in the first year. Construction of the Department of Public Works-designed provisional school building, on a site purchased from James West, was undertaken by C Risdale for .
The provision of state-administered education was important to the colonial governments of Australia. Following the introduction of the Education Act 1860, which standardised curriculum, training and facilities, Queensland's national and public schools grew from four in 1860 to 230 by 1875. The State Education Act 1875 provided for free, compulsory and secular primary education and the establishment of the Department of Public Instruction. This further standardised the provision of education, and despite difficulties, achieved the remarkable feat of bringing basic literacy to most Queensland children by 1900.
The establishment of schools was considered an essential step in the development of early communities and integral to their success. Locals often donated land and labour for a school's construction and the school community contributed to maintenance and development. Schools became a community focus, a symbol of progress and a source of pride, with enduring connections formed with past pupils, parents and teachers. The inclusion of war memorials and community halls reinforced these connections and provided a venue for a wide range of community events in schools across Queensland.
To help ensure consistency and economy, the Queensland Government developed standard plans for its school buildings. From the 1860s until the 1960s, Queensland school buildings were predominantly timber-framed, an easy and cost-effective approach that also enabled the government to provide facilities in remote areas. Standard designs were continually refined in response to changing needs and educational philosophy and Queensland school buildings were particularly innovative in climate control, lighting and ventilation. Standardisation produced distinctly similar schools across Queensland with complexes of typical components.
In colonial Queensland, provisional schools were a convenient means of providing an elementary education for the small, scattered and often transient rural population and became an integral part of the educational landscape. A provisional school could be opened with as few as 15 (later 12) pupils. The Board of Public Instruction gave financial assistance to local committees to set up and maintain these schools. The local committee provided a building and found a teacher, while the Board paid the teacher's salary relative to the number of pupils. If the local population declined, the provisional school closed at little expense to the Board. If the district or town developed, the provisional school was raised to state school status and provided with purpose-designed school buildings.
By 1892 the condition of provisional school buildings, which formed almost half of the colony's schools, was an embarrassment to the Department of Public Instruction. A recommended plan was introduced in 1892 to try to improve matters. This standard type (B/T6) was for a small low-set timber framed and clad building with a gable roof. It accommodated one large classroom with a front verandah, although a rear verandah was sometimes provided. The building was of single-skin construction and lined externally with chamferboards. The building had few windows and ventilation was provided by a high-level louvred vent in the gable end wall. These were often a huge improvement over the previous provisional school buildings and were constructed until . The Mount Tarampa building was a typical provisional school building (type B/T6) similar to the standard plan introduced by the Department of Public Instruction. It was a single room, gable roofed structure of the above dimensions, with a front verandah, , and a central doorway. A water tank and outbuildings were also provided.
Mount Tarampa Provisional School was re-designated as a state school in 1909. In that year, the Department of Public Instruction upgraded the majority of provisional schools to state school status by lowering the required minimum average number of pupils for a state school from 30 to 12, gradually providing these schools with new buildings designed and constructed to government standards. At Mount Tarampa, a rear verandah and a new central doorway were added to the school building, with partial corner enclosures for hat rooms. The school was also painted.
The Queensland education system recognised the importance of play in the school curriculum and, the need for all-weather outdoor space. Playsheds were designed as free-standing shelters, with fixed timber seating between posts and earth or decomposed granite floors that provided covered play space and doubled as teaching space when required. These structures were timber-framed and generally open sided, although some were partially enclosed with timber boards or corrugated galvanised iron sheets. The hipped (or less frequently, gabled) roofs were clad with timber shingles or corrugated iron. Playsheds were a typical addition to state schools across Queensland between s and the 1950s, although less frequently constructed after , with the introduction of highset school buildings with understorey play areas. Built to standard designs, playsheds ranged in size relative to student numbers. A playshed, constructed at Mount Tarampa School by C Risdale, was completed by March 1912. The playshed was built to a standard 6-post design. It was , constructed of exposed timber framing and had a hipped roof.
By 1914 there were 52 children attending Mount Tarampa State School, necessitating the use of the verandahs for teaching and prompting the School Committee to request an extension to the 1906 building. These extensions completed in July 1916 at a cost of £226 comprised: addition of a northern classroom, ; lining of the classrooms and rearrangement of windows reusing the original windows; centring the stairs at the front (east) and rear (west) of the extended building; and installing new wash basins at either end of the rear verandah. The tall, timber-framed casement windows from the eastern and western verandah walls were repositioned to the gable end walls. Sunshades protecting the gable end casement windows were extended from one to three bays wide to accommodate the additional windows. Two timber-framed casement windows were installed at a high level at the northern end of the western verandah wall, and two at the southern end of the eastern verandah wall. These alterations were designed to allow maximum natural light to enter from the left hand side of each student.
From 1893 the Department of Public Works greatly improved the natural ventilation and lighting of classroom interiors, experimenting with different combinations of roof ventilators, ceiling and wall vents, larger windows, dormer windows and ducting. Achieving an ideal or even adequate level of natural light in classrooms, without glare, was of critical importance to educators and became central to the design and layout of all school buildings. From around 1909 windows were rearranged and enlarged to provide a greater amount of gentle, southern light into the room and desks were rearranged so the light would fall onto students' left hand sides to avoid throwing shadows onto the pages; this presupposed that all students were right-handed. This often meant a complete transformation of the fenestration of existing buildings. Windows were larger and sills were lowered to let in more light generally. Smaller classrooms were preferred as they were easier to light correctly. Interiors became lighter and airier and met with immediate approval from educationalists.
In 1918 a teacher's residence was constructed for £588 and was ready for occupation by February 1919. This two bedroom teacher's residence (type C/R2) was one of three standard designs created between 1894 and 1914. From the outset, teacher's residences were built to a standard regulated by the Board of General Education rather than a specific design, so the form varied with each commissioned architect. Initially, residences were most often attached as annexes to the classroom building, but from the 1880s were built as detached residences. These residences were similar to the vernacular Queensland house with few, if any, education-specific requirements or features.
Residences designed by the Department of Public Works' architects, were typically of a higher-quality in design, materials and construction than most similarly-scaled private residences. The detached teacher's residence was located within the school grounds at a distance from the teaching buildings, usually with a separate, fenced yard with gardens and trees. The designs ranged from one to four bedrooms and evolved simultaneously with the teaching buildings to keep up with modern needs and styles.
Mount Tarampa State School's growing student population resulted in additions and improvements to the school grounds during the 1920s. The average number of children attending increased to 67 in 1925, prompting the Committee to purchase of land in 1922 for additional play space and a horse paddock. By 1925 a tennis court had been built, which was used for school grade tennis matches.
The provision of outdoor play space was a result of the early and continuing commitment to play-based education, particularly in primary school. Trees and gardens were planted as part of the beautification of the school. In the 1870s, schools inspector William Boyd was critical of tropical schools and amongst his recommendations was the importance of the addition of shade trees in the playground. In addition, Arbor Day celebrations began in Queensland in 1890. Landscape elements were often constructed to standard designs and were intrinsic to Queensland Government education philosophies. Educationalists believed gardening and Arbor Days instilled in young minds the value of hard work and activity, improved classroom discipline, developed aesthetic tastes, and inspired people to stay on the land. Aesthetically designed gardens were encouraged by regional inspectors. Some mature trees exist in the Mount Tarampa State School's grounds, including a Bunya tree (Araucaria Bidwillii) and a large bottle tree (Brachychiton rupestris). Three large date palms (Phoenix dactylifera) stand in the residence garden.
In the post-war years, there was little change to the school. Although planned in 1937, a verandah and bathroom addition on the northern side of the teacher's residence wasn't completed until about 1955, to a cost of £994. With this addition, windows along the northern wall were replaced by half-glazed, six-light timber French doors.
Between the 1960s and the 1980s a modernisation of Queensland education occurred. The Education Act 1964 was a turning point and the first major update of Queensland education's governing legislation since 1875. Effectively, a new era of state education evolved with new architectural responses needed. The Department of Education (as it had been renamed in 1957) continued to give the responsibility of building design to the architects of the Department of Public Works. Due to new materials, technologies, educational philosophies, government policies, architectural styles, and functional requirements, the evolution of standard designs became more fragmented. Rather than "improving" on the previous designs, architects began to design on a relatively clean slate, inspired by new precedents. Fundamentally, timber construction was no longer favoured and buildings were no longer predominantly high-set. The mid-1980s brought additions to Mount Tarampa State School with a modular building and an extension to library added in 1986, while in the following year a covered play area was constructed.
In 1981, 75th anniversary celebrations were held on 18 July, including an official luncheon and a book printed for the occasion. In 2006 the school celebrated its centenary and produced a history of the school.
Between 2005 and 2014, the playshed was enclosed on all sides with part-height corrugated metal sheeting. A glass sliding door was installed on the northern elevation and sliding glass windows were added to the eastern and western walls. The timber posts are still visible on the interior, although the roof framing is concealed by a ceiling of flat sheeting.
In 2015, Mount Tarampa State School continues to operate and retains the provisional school building, playshed and teacher's residence. The school is important to the area, having operated from the site since 1906 and as generations of Mount Tarampa students have been taught there. Since establishment, it has been a key social focus for the Mount Tarampa community with the grounds and buildings being the location of many social events.
Description
Mount Tarampa State School stands on an elevated, approximately site on the corner of main thoroughfares, Profkes and Mount Tarampa roads, in Mount Tarampa, approximately northwest of Ipswich. The school comprises a complex of small buildings, including an early provisional school building (1906, extended 1916), teacher's residence (1918, extended ), playshed (1912), tennis court (pre-1925) and established trees. The school is conspicuous in its rural setting.
Provisional School (1906)
Set well back from and facing east to Profkes Road, the early Provisional School building is a small one-storey, lowset, timber-framed structure; axially aligned with the school entrance. The building is clad in timber weatherboards and is sheltered by a gable roof clad with corrugated metal sheets. It has a front and rear verandah and accommodates two rooms. The northern and southern gable ends each retain original tall, timber-framed casement windows sheltered by timber hoods with battened cheeks. Stop-chamfered brackets supporting the centre of each hood are possibly original. Packed weatherboards at the gable end apexes vent the interior.
Both the front (east) and back (west) verandahs are accessed by centrally located timber stairs. The front verandah has exposed roof framing, square timber posts and a timber two-rail balustrade. A join in the fascia identifies the extent of the 1916 northern extension. The southern end of the front verandah is enclosed to form a staff kitchen; and the rear verandah is enclosed for use as staff offices. The front and rear verandah walls retain early timber-framed casement windows with high sills. Access to the interior is via original braced and ledged board doors (not in their original locations).
Originally one large classroom, the interior is divided into two rooms separated by a central hallway of part-height modern partitions. The interior walls and coved ceilings are lined with v-jointed timber boards. A continuous break in the ceiling boards over the central hallway delineates where the 1916 extension was attached. Timber tie beams are exposed within the spaces and timber lattice ceiling vents are in the northern and southern rooms.
Teacher's Residence (1918)
The teacher's residence is a highset timber-framed, weatherboard-clad building that stands on concrete stumps and is protected by a corrugated metal-clad hipped roof. The building addresses Profkes Road to the east, with bedrooms and living rooms along the north and east sides, and a kitchen with projecting stove alcove at the southwest corner. The interior is accessed via central front (east) and rear (west) timber stairs that lead to verandahs. The front verandah has timber posts and a two-rail battened balustrade, and the rear verandah has been enclosed with weatherboards to form a living space. The extension on the north side comprises a living space with a bathroom in the northwest corner. Verandah walls are single-skin, lined with v-jointed tongue-and-groove boards. The building has a variety of timber-framed windows, including original double-hung windows-those on the southern elevation are sheltered by original timber-framed hoods with battened cheeks. The understorey has timber batten screens fixed between concrete stumps; the southwest corner has been enclosed to form a utility room.
The internal layout of the residence is highly intact, comprising a central corridor running east-west between the entrance doors, with two rooms to either side. The front two rooms are bedrooms, and a bedroom and kitchen are at the rear. The kitchen has a stove recess and pantry. Internal walls are single skin with exposed studwork, and most walls are lined with v-jointed tongue-and-groove boards. Ceilings are lined in flat sheeting, and modern carpet and linoleum line the floors. Skirting, cornices and architraves are narrow with simple profiles. Retaining early hardware, internal doors are low-waisted with two-light fanlights, and doors to the northern extension are half-glazed, six-light French doors, with single-light centre-pivoting fanlights. The bathroom, toilet and kitchen fitouts are not of cultural heritage significance.
Playshed (1912)
The playshed is located to the south of the Provisional School building. It is a 6-post, timber-framed shelter with braced posts and a hipped roof clad with corrugated metal sheets. The formerly open-sided structure has been enclosed with modern part-height corrugated metal sheets. It has a concrete floor and a ceiling of flat sheet material has been inserted. Timber brackets are internally attached to the southwestern posts for storage of a wooden trestle table. Modern window and door openings are not of cultural heritage significance.
Grounds
The grounds of Mount Tarampa State School are well established and include mature trees. These include: three well-established date palms (Phoenix dactylifera) within the teacher's residence yard; a large bottle tree (Brachychiton rupestris) adjacent to the fence-line separating the school and residence; a mature Bunya pine (Araucaria bidwillii) at the front of the school near Profkes Road; and other established trees lining the boundary of the site. A tennis court, located northwest of the provisional school building, has a modern surface and is surrounded by high chain wire fencing.
Heritage listing
Mount Tarampa State School was listed on the Queensland Heritage Register on 1 May 2015 having satisfied the following criteria.
The place is important in demonstrating the evolution or pattern of Queensland's history.
Mount Tarampa State School (established in 1906 as Mount Tarampa Provisional School) is important in demonstrating the evolution of state education and its associated architecture in Queensland. It retains representative examples of standard government designs that were architectural responses to prevailing government educational philosophies. These are a Department of Public Works-designed provisional school building (1906), a playshed (1912), and a teacher's residence (1918) set within school grounds with significant landscape elements including mature plantings and a tennis court.
The place is important in demonstrating the principal characteristics of a particular class of cultural places.
Mount Tarampa State School is important in demonstrating the principal characteristics of Queensland state school complex, comprising buildings constructed to standard designs by the Department of Public Works, located on a large landscaped site with mature trees and a tennis court. The school is a good example of a modest, regional school with its small, simple teaching building that has been modified over time.
The Department of Public Works designed provisional school building (1906, extended 1916) is an excellent, intact example of its type that retains its lowset form with front and rear verandahs (now partly enclosed), timber-framed and -clad construction, gable roof, coved ceiling, early doors and windows, and natural lighting and ventilation features.
The playshed (1912) has a hipped timber-framed roof supported on braced, timber posts (all sides are now enclosed).
The teacher's residence (1918) is an excellent, intact example of the residence type of its period – a highset timber-framed and clad building with hipped roof, comprising three bedrooms, a kitchen with stove alcove, and front and rear verandahs (rear verandah is now enclosed).
The place has a strong or special association with a particular community or cultural group for social, cultural or spiritual reasons.
Schools have always played an important part in Queensland communities. They typically retain significant and enduring connections with former pupils, parents, and teachers; provide a venue for social interaction and volunteer work; and are a source of pride, symbolising local progress and aspirations.
Mount Tarampa State School has a strong and ongoing association with the Mount Tarampa community. Operating since 1906 generations of Mount Tarampa children have been taught there. The place is important for its contribution to the educational development of Mount Tarampa and is a prominent community focal point and gathering place for social events with widespread community support.
See also
History of state education in Queensland
List of schools in West Moreton
References
Attribution
Further reading
External links
Mount Tarampa State School Discover Queensland Buildings website
Queensland Heritage Register
Buildings and structures in Somerset Region
Public schools in Queensland
Articles incorporating text from the Queensland Heritage Register
|
```html
<html lang="en">
<head>
<title>Overlay Sample Program - Debugging with GDB</title>
<meta http-equiv="Content-Type" content="text/html">
<meta name="description" content="Debugging with GDB">
<meta name="generator" content="makeinfo 4.11">
<link title="Top" rel="start" href="index.html#Top">
<link rel="up" href="Overlays.html#Overlays" title="Overlays">
<link rel="prev" href="Automatic-Overlay-Debugging.html#Automatic-Overlay-Debugging" title="Automatic Overlay Debugging">
<link href="path_to_url" rel="generator-home" title="Texinfo Homepage">
<!--
Permission is granted to copy, distribute and/or modify this document
any later version published by the Free Software Foundation; with the
Invariant Sections being ``Free Software'' and ``Free Software Needs
Free Documentation'', with the Front-Cover Texts being ``A GNU Manual,''
and with the Back-Cover Texts as in (a) below.
(a) The FSF's Back-Cover Text is: ``You are free to copy and modify
this GNU Manual. Buying copies from GNU Press supports the FSF in
developing GNU and promoting software freedom.''
-->
<meta http-equiv="Content-Style-Type" content="text/css">
<style type="text/css"><!--
pre.display { font-family:inherit }
pre.format { font-family:inherit }
pre.smalldisplay { font-family:inherit; font-size:smaller }
pre.smallformat { font-family:inherit; font-size:smaller }
pre.smallexample { font-size:smaller }
pre.smalllisp { font-size:smaller }
span.sc { font-variant:small-caps }
span.roman { font-family:serif; font-weight:normal; }
span.sansserif { font-family:sans-serif; font-weight:normal; }
--></style>
</head>
<body>
<div class="node">
<p>
<a name="Overlay-Sample-Program"></a>
Previous: <a rel="previous" accesskey="p" href="Automatic-Overlay-Debugging.html#Automatic-Overlay-Debugging">Automatic Overlay Debugging</a>,
Up: <a rel="up" accesskey="u" href="Overlays.html#Overlays">Overlays</a>
<hr>
</div>
<h3 class="section">14.4 Overlay Sample Program</h3>
<p><a name="index-overlay-example-program-915"></a>
When linking a program which uses overlays, you must place the overlays
at their load addresses, while relocating them to run at their mapped
addresses. To do this, you must write a linker script (see <a href="../ld/Overlay-Description.html#Overlay-Description">Overlay Description</a>). Unfortunately,
since linker scripts are specific to a particular host system, target
architecture, and target memory layout, this manual cannot provide
portable sample code demonstrating <span class="sc">gdb</span>'s overlay support.
<p>However, the <span class="sc">gdb</span> source distribution does contain an overlaid
program, with linker scripts for a few systems, as part of its test
suite. The program consists of the following files from
<samp><span class="file">gdb/testsuite/gdb.base</span></samp>:
<dl>
<dt><samp><span class="file">overlays.c</span></samp><dd>The main program file.
<br><dt><samp><span class="file">ovlymgr.c</span></samp><dd>A simple overlay manager, used by <samp><span class="file">overlays.c</span></samp>.
<br><dt><samp><span class="file">foo.c</span></samp><dt><samp><span class="file">bar.c</span></samp><dt><samp><span class="file">baz.c</span></samp><dt><samp><span class="file">grbx.c</span></samp><dd>Overlay modules, loaded and used by <samp><span class="file">overlays.c</span></samp>.
<br><dt><samp><span class="file">d10v.ld</span></samp><dt><samp><span class="file">m32r.ld</span></samp><dd>Linker scripts for linking the test program on the <code>d10v-elf</code>
and <code>m32r-elf</code> targets.
</dl>
<p>You can build the test program using the <code>d10v-elf</code> GCC
cross-compiler like this:
<pre class="smallexample"> $ d10v-elf-gcc -g -c overlays.c
$ d10v-elf-gcc -g -c ovlymgr.c
$ d10v-elf-gcc -g -c foo.c
$ d10v-elf-gcc -g -c bar.c
$ d10v-elf-gcc -g -c baz.c
$ d10v-elf-gcc -g -c grbx.c
$ d10v-elf-gcc -g overlays.o ovlymgr.o foo.o bar.o \
baz.o grbx.o -Wl,-Td10v.ld -o overlays
</pre>
<p>The build process is identical for any other architecture, except that
you must substitute the appropriate compiler and linker script for the
target system for <code>d10v-elf-gcc</code> and <code>d10v.ld</code>.
</body></html>
```
|
```c++
///|/
///|/ PrusaSlicer is released under the terms of the AGPLv3 or higher
///|/
#ifndef libslic3r_Triangulation_hpp_
#define libslic3r_Triangulation_hpp_
#include <vector>
#include <set>
#include <libslic3r/Point.hpp>
#include <libslic3r/Polygon.hpp>
#include <libslic3r/ExPolygon.hpp>
namespace Slic3r {
class Triangulation
{
public:
Triangulation() = delete;
// define oriented connection of 2 vertices(defined by its index)
using HalfEdge = std::pair<uint32_t, uint32_t>;
using HalfEdges = std::vector<HalfEdge>;
using Indices = std::vector<Vec3i>;
/// <summary>
/// Connect points by triangulation to create filled surface by triangles
/// Input points have to be unique
/// Inspiration for make unique points is Emboss::dilate_to_unique_points
/// </summary>
/// <param name="points">Points to connect</param>
/// <param name="edges">Constraint for edges, pair is from point(first) to
/// point(second), sorted lexicographically</param>
/// <returns>Triangles</returns>
static Indices triangulate(const Points &points,
const HalfEdges &half_edges);
static Indices triangulate(const Polygon &polygon);
static Indices triangulate(const Polygons &polygons);
static Indices triangulate(const ExPolygon &expolygon);
static Indices triangulate(const ExPolygons &expolygons);
// Map for convert original index to set without duplication
// from_index<to_index>
using Changes = std::vector<uint32_t>;
/// <summary>
/// Create conversion map from original index into new
/// with respect of duplicit point
/// </summary>
/// <param name="points">input set of points</param>
/// <param name="duplicits">duplicit points collected from points</param>
/// <returns>Conversion map for point index</returns>
static Changes create_changes(const Points &points, const Points &duplicits);
/// <summary>
/// Triangulation for expolygons, speed up when points are already collected
/// NOTE: Not working properly for ExPolygons with multiple point on same coordinate
/// You should check it by "collect_changes"
/// </summary>
/// <param name="expolygons">Input shape to triangulation - define edges</param>
/// <param name="points">Points from expolygons</param>
/// <returns>Triangle indices</returns>
static Indices triangulate(const ExPolygons &expolygons, const Points& points);
/// <summary>
/// Triangulation for expolygons containing multiple points with same coordinate
/// </summary>
/// <param name="expolygons">Input shape to triangulation - define edge</param>
/// <param name="points">Points from expolygons</param>
/// <param name="changes">Changes swap for indicies into points</param>
/// <returns>Triangle indices</returns>
static Indices triangulate(const ExPolygons &expolygons, const Points& points, const Changes& changes);
};
} // namespace Slic3r
#endif // libslic3r_Triangulation_hpp_
```
|
```powershell
param(
[Parameter(Mandatory=$true)][string]$DotnetMSIOutput,
[Parameter(Mandatory=$true)][string]$WixRoot,
[Parameter(Mandatory=$true)][string]$ProductMoniker,
[Parameter(Mandatory=$true)][string]$DotnetMSIVersion,
[Parameter(Mandatory=$true)][string]$SDKBundleVersion,
[Parameter(Mandatory=$true)][string]$DotnetCLINugetVersion,
[Parameter(Mandatory=$true)][string]$VersionMajor,
[Parameter(Mandatory=$true)][string]$VersionMinor,
[Parameter(Mandatory=$true)][string]$UpgradeCode,
[Parameter(Mandatory=$true)][string]$DependencyKeyName,
[Parameter(Mandatory=$true)][string]$Architecture
)
function RunCandle
{
$result = $true
pushd "$WixRoot"
Write-Information "Running candle.."
$candleOutput = .\candle.exe -nologo `
-dDotnetSrc="$inputDir" `
-dMicrosoftEula="$PSScriptRoot\dummyeula.rtf" `
-dProductMoniker="$ProductMoniker" `
-dBuildVersion="$DotnetMSIVersion" `
-dSDKBundleVersion="$SDKBundleVersion" `
-dNugetVersion="$DotnetCLINugetVersion" `
-dVersionMajor="$VersionMajor" `
-dVersionMinor="$VersionMinor" `
-dUpgradeCode="$UpgradeCode" `
-dDependencyKeyName="$DependencyKeyName" `
-arch "$Architecture" `
-ext WixDependencyExtension.dll `
"$PSScriptRoot\sdkplaceholder.wxs" `
"$PSScriptRoot\provider.wxs"
Write-Information "Candle output: $candleOutput"
if($LastExitCode -ne 0)
{
$result = $false
Write-Information "Candle failed with exit code $LastExitCode."
}
popd
return $result
}
function RunLight
{
$result = $true
pushd "$WixRoot"
Write-Information "Running light.."
$CabCache = Join-Path $WixRoot "cabcache"
$lightOutput = .\light.exe -nologo -ext WixUIExtension -ext WixDependencyExtension -ext WixUtilExtension `
-cultures:en-us `
sdkplaceholder.wixobj `
provider.wixobj `
-b "$PSScriptRoot" `
-reusecab `
-cc "$CabCache" `
-out $DotnetMSIOutput
Write-Information "Light output: $lightOutput"
if($LastExitCode -ne 0)
{
$result = $false
Write-Information "Light failed with exit code $LastExitCode."
}
popd
return $result
}
Write-Information "Creating SdkPlaceholder MSI at $DotnetMSIOutput"
if([string]::IsNullOrEmpty($WixRoot))
{
Exit -1
}
if(-Not (RunCandle))
{
Write-Information "Candle failed"
Exit -1
}
if(-Not (RunLight))
{
Write-Information "Light failed"
Exit -1
}
if(!(Test-Path $DotnetMSIOutput))
{
throw "Unable to create the SdkPlaceholder MSI."
Exit -1
}
Write-Information "Successfully created SdkPlaceholder MSI - $DotnetMSIOutput"
exit $LastExitCode
```
|
Kosovo competed at the 2022 Winter Olympics in Beijing, China, from 4 to 20 February 2022.
Kosovo's team consists of two alpine skiers (one man and one woman). This marks the first time Kosovo has a female competitor on its Winter Olympics team. Both skiers were the country's flagbearer during the opening ceremony. However, because Kryeziu was identified as a close contact of one of the team's coaches infected with SARS-CoV-2 before the opening ceremony, only Tahiri was able to serve as the team's opening ceremony flag bearer. Meanwhile a volunteer was the flagbearer during the closing ceremony.
Competitors
The following is the list of number of competitors participating at the Games per sport/discipline.
Alpine skiing
By meeting the basic qualification standards, Kosovo qualified one male and one female alpine skier.
References
Nations at the 2022 Winter Olympics
2022
Winter Olympics
|
```objective-c
// Protocol Buffers - Google's data interchange format
// path_to_url
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Authors: wink@google.com (Wink Saville),
// kenton@google.com (Kenton Varda)
// Based on original Protocol Buffers design by
// Sanjay Ghemawat, Jeff Dean, and others.
//
// Defines MessageLite, the abstract interface implemented by all (lite
// and non-lite) protocol message objects.
#ifndef GOOGLE_PROTOBUF_MESSAGE_LITE_H__
#define GOOGLE_PROTOBUF_MESSAGE_LITE_H__
#include <climits>
#include <google/protobuf/stubs/common.h>
#include <google/protobuf/stubs/logging.h>
#include <google/protobuf/stubs/once.h>
#if LANG_CXX11 && !defined(__NVCC__)
#define PROTOBUF_CXX11 1
#else
#define PROTOBUF_CXX11 0
#endif
#if PROTOBUF_CXX11
#define PROTOBUF_FINAL final
#else
#define PROTOBUF_FINAL
#endif
#ifndef LIBPROTOBUF_EXPORT
#define LIBPROTOBUF_EXPORT
#endif
#define PROTOBUF_RUNTIME_DEPRECATED(message)
namespace google {
namespace protobuf {
class Arena;
namespace io {
class CodedInputStream;
class CodedOutputStream;
class ZeroCopyInputStream;
class ZeroCopyOutputStream;
}
namespace internal {
class WireFormatLite;
#ifndef SWIG
// We compute sizes as size_t but cache them as int. This function converts a
// computed size to a cached size. Since we don't proceed with serialization
// if the total size was > INT_MAX, it is not important what this function
// returns for inputs > INT_MAX. However this case should not error or
// GOOGLE_CHECK-fail, because the full size_t resolution is still returned from
// ByteSizeLong() and checked against INT_MAX; we can catch the overflow
// there.
inline int ToCachedSize(size_t size) { return static_cast<int>(size); }
// We mainly calculate sizes in terms of size_t, but some functions that
// compute sizes return "int". These int sizes are expected to always be
// positive. This function is more efficient than casting an int to size_t
// directly on 64-bit platforms because it avoids making the compiler emit a
// sign extending instruction, which we don't want and don't want to pay for.
inline size_t FromIntSize(int size) {
// Convert to unsigned before widening so sign extension is not necessary.
return static_cast<unsigned int>(size);
}
// For cases where a legacy function returns an integer size. We GOOGLE_DCHECK()
// that the conversion will fit within an integer; if this is false then we
// are losing information.
inline int ToIntSize(size_t size) {
GOOGLE_DCHECK_LE(size, static_cast<size_t>(INT_MAX));
return static_cast<int>(size);
}
// This type wraps a variable whose constructor and destructor are explicitly
// called. It is particularly useful for a global variable, without its
// constructor and destructor run on start and end of the program lifetime.
// This circumvents the initial construction order fiasco, while keeping
// the address of the empty string a compile time constant.
//
// Pay special attention to the initialization state of the object.
// 1. The object is "uninitialized" to begin with.
// 2. Call DefaultConstruct() only if the object is uninitialized.
// After the call, the object becomes "initialized".
// 3. Call get() and get_mutable() only if the object is initialized.
// 4. Call Destruct() only if the object is initialized.
// After the call, the object becomes uninitialized.
template <typename T>
class ExplicitlyConstructed {
public:
void DefaultConstruct() {
new (&union_) T();
}
void Destruct() {
get_mutable()->~T();
}
#if LANG_CXX11
constexpr
#endif
const T&
get() const {
return reinterpret_cast<const T&>(union_);
}
T* get_mutable() { return reinterpret_cast<T*>(&union_); }
private:
// Prefer c++14 aligned_storage, but for compatibility this will do.
union AlignedUnion {
char space[sizeof(T)];
int64 align_to_int64;
void* align_to_ptr;
} union_;
};
// Default empty string object. Don't use this directly. Instead, call
// GetEmptyString() to get the reference.
extern ExplicitlyConstructed< ::std::string> fixed_address_empty_string;
LIBPROTOBUF_EXPORT extern ProtobufOnceType empty_string_once_init_;
LIBPROTOBUF_EXPORT void InitEmptyString();
LIBPROTOBUF_EXPORT inline const ::std::string& GetEmptyStringAlreadyInited() {
return fixed_address_empty_string.get();
}
LIBPROTOBUF_EXPORT inline const ::std::string& GetEmptyString() {
::google::protobuf::GoogleOnceInit(&empty_string_once_init_, &InitEmptyString);
return GetEmptyStringAlreadyInited();
}
LIBPROTOBUF_EXPORT size_t StringSpaceUsedExcludingSelfLong(const string& str);
#endif // SWIG
} // namespace internal
// Interface to light weight protocol messages.
//
// This interface is implemented by all protocol message objects. Non-lite
// messages additionally implement the Message interface, which is a
// subclass of MessageLite. Use MessageLite instead when you only need
// the subset of features which it supports -- namely, nothing that uses
// descriptors or reflection. You can instruct the protocol compiler
// to generate classes which implement only MessageLite, not the full
// Message interface, by adding the following line to the .proto file:
//
// option optimize_for = LITE_RUNTIME;
//
// This is particularly useful on resource-constrained systems where
// the full protocol buffers runtime library is too big.
//
// Note that on non-constrained systems (e.g. servers) when you need
// to link in lots of protocol definitions, a better way to reduce
// total code footprint is to use optimize_for = CODE_SIZE. This
// will make the generated code smaller while still supporting all the
// same features (at the expense of speed). optimize_for = LITE_RUNTIME
// is best when you only have a small number of message types linked
// into your binary, in which case the size of the protocol buffers
// runtime itself is the biggest problem.
class LIBPROTOBUF_EXPORT MessageLite {
public:
inline MessageLite() {}
virtual ~MessageLite() {}
// Basic Operations ------------------------------------------------
// Get the name of this message type, e.g. "foo.bar.BazProto".
virtual string GetTypeName() const = 0;
// Construct a new instance of the same type. Ownership is passed to the
// caller.
virtual MessageLite* New() const = 0;
// Construct a new instance on the arena. Ownership is passed to the caller
// if arena is a NULL. Default implementation for backwards compatibility.
virtual MessageLite* New(::google::protobuf::Arena* arena) const;
// Get the arena, if any, associated with this message. Virtual method
// required for generic operations but most arena-related operations should
// use the GetArenaNoVirtual() generated-code method. Default implementation
// to reduce code size by avoiding the need for per-type implementations
// when types do not implement arena support.
virtual ::google::protobuf::Arena* GetArena() const { return NULL; }
// Get a pointer that may be equal to this message's arena, or may not be.
// If the value returned by this method is equal to some arena pointer, then
// this message is on that arena; however, if this message is on some arena,
// this method may or may not return that arena's pointer. As a tradeoff,
// this method may be more efficient than GetArena(). The intent is to allow
// underlying representations that use e.g. tagged pointers to sometimes
// store the arena pointer directly, and sometimes in a more indirect way,
// and allow a fastpath comparison against the arena pointer when it's easy
// to obtain.
virtual void* GetMaybeArenaPointer() const { return GetArena(); }
// Clear all fields of the message and set them to their default values.
// Clear() avoids freeing memory, assuming that any memory allocated
// to hold parts of the message will be needed again to hold the next
// message. If you actually want to free the memory used by a Message,
// you must delete it.
virtual void Clear() = 0;
// Quickly check if all required fields have values set.
virtual bool IsInitialized() const = 0;
// This is not implemented for Lite messages -- it just returns "(cannot
// determine missing fields for lite message)". However, it is implemented
// for full messages. See message.h.
virtual string InitializationErrorString() const;
// If |other| is the exact same class as this, calls MergeFrom(). Otherwise,
// results are undefined (probably crash).
virtual void CheckTypeAndMergeFrom(const MessageLite& other) = 0;
// Parsing ---------------------------------------------------------
// Methods for parsing in protocol buffer format. Most of these are
// just simple wrappers around MergeFromCodedStream(). Clear() will be
// called before merging the input.
// Fill the message with a protocol buffer parsed from the given input
// stream. Returns false on a read error or if the input is in the wrong
// format. A successful return does not indicate the entire input is
// consumed, ensure you call ConsumedEntireMessage() to check that if
// applicable.
bool ParseFromCodedStream(io::CodedInputStream* input);
// Like ParseFromCodedStream(), but accepts messages that are missing
// required fields.
bool ParsePartialFromCodedStream(io::CodedInputStream* input);
// Read a protocol buffer from the given zero-copy input stream. If
// successful, the entire input will be consumed.
bool ParseFromZeroCopyStream(io::ZeroCopyInputStream* input);
// Like ParseFromZeroCopyStream(), but accepts messages that are missing
// required fields.
bool ParsePartialFromZeroCopyStream(io::ZeroCopyInputStream* input);
// Read a protocol buffer from the given zero-copy input stream, expecting
// the message to be exactly "size" bytes long. If successful, exactly
// this many bytes will have been consumed from the input.
bool ParseFromBoundedZeroCopyStream(io::ZeroCopyInputStream* input, int size);
// Like ParseFromBoundedZeroCopyStream(), but accepts messages that are
// missing required fields.
bool ParsePartialFromBoundedZeroCopyStream(io::ZeroCopyInputStream* input,
int size);
// Parses a protocol buffer contained in a string. Returns true on success.
// This function takes a string in the (non-human-readable) binary wire
// format, matching the encoding output by MessageLite::SerializeToString().
// If you'd like to convert a human-readable string into a protocol buffer
// object, see google::protobuf::TextFormat::ParseFromString().
bool ParseFromString(const string& data);
// Like ParseFromString(), but accepts messages that are missing
// required fields.
bool ParsePartialFromString(const string& data);
// Parse a protocol buffer contained in an array of bytes.
bool ParseFromArray(const void* data, int size);
// Like ParseFromArray(), but accepts messages that are missing
// required fields.
bool ParsePartialFromArray(const void* data, int size);
// Reads a protocol buffer from the stream and merges it into this
// Message. Singular fields read from the input overwrite what is
// already in the Message and repeated fields are appended to those
// already present.
//
// It is the responsibility of the caller to call input->LastTagWas()
// (for groups) or input->ConsumedEntireMessage() (for non-groups) after
// this returns to verify that the message's end was delimited correctly.
//
// ParsefromCodedStream() is implemented as Clear() followed by
// MergeFromCodedStream().
bool MergeFromCodedStream(io::CodedInputStream* input);
// Like MergeFromCodedStream(), but succeeds even if required fields are
// missing in the input.
//
// MergeFromCodedStream() is just implemented as MergePartialFromCodedStream()
// followed by IsInitialized().
virtual bool MergePartialFromCodedStream(io::CodedInputStream* input) = 0;
// Serialization ---------------------------------------------------
// Methods for serializing in protocol buffer format. Most of these
// are just simple wrappers around ByteSize() and SerializeWithCachedSizes().
// Write a protocol buffer of this message to the given output. Returns
// false on a write error. If the message is missing required fields,
// this may GOOGLE_CHECK-fail.
bool SerializeToCodedStream(io::CodedOutputStream* output) const;
// Like SerializeToCodedStream(), but allows missing required fields.
bool SerializePartialToCodedStream(io::CodedOutputStream* output) const;
// Write the message to the given zero-copy output stream. All required
// fields must be set.
bool SerializeToZeroCopyStream(io::ZeroCopyOutputStream* output) const;
// Like SerializeToZeroCopyStream(), but allows missing required fields.
bool SerializePartialToZeroCopyStream(io::ZeroCopyOutputStream* output) const;
// Serialize the message and store it in the given string. All required
// fields must be set.
bool SerializeToString(string* output) const;
// Like SerializeToString(), but allows missing required fields.
bool SerializePartialToString(string* output) const;
// Serialize the message and store it in the given byte array. All required
// fields must be set.
bool SerializeToArray(void* data, int size) const;
// Like SerializeToArray(), but allows missing required fields.
bool SerializePartialToArray(void* data, int size) const;
// Make a string encoding the message. Is equivalent to calling
// SerializeToString() on a string and using that. Returns the empty
// string if SerializeToString() would have returned an error.
// Note: If you intend to generate many such strings, you may
// reduce heap fragmentation by instead re-using the same string
// object with calls to SerializeToString().
string SerializeAsString() const;
// Like SerializeAsString(), but allows missing required fields.
string SerializePartialAsString() const;
// Like SerializeToString(), but appends to the data to the string's existing
// contents. All required fields must be set.
bool AppendToString(string* output) const;
// Like AppendToString(), but allows missing required fields.
bool AppendPartialToString(string* output) const;
// Computes the serialized size of the message. This recursively calls
// ByteSizeLong() on all embedded messages.
//
// ByteSizeLong() is generally linear in the number of fields defined for the
// proto.
virtual size_t ByteSizeLong() const = 0;
// Legacy ByteSize() API.
PROTOBUF_RUNTIME_DEPRECATED("Please use ByteSizeLong() instead")
int ByteSize() const {
return internal::ToIntSize(ByteSizeLong());
}
// Serializes the message without recomputing the size. The message must not
// have changed since the last call to ByteSize(), and the value returned by
// ByteSize must be non-negative. Otherwise the results are undefined.
virtual void SerializeWithCachedSizes(
io::CodedOutputStream* output) const;
// Functions below here are not part of the public interface. It isn't
// enforced, but they should be treated as private, and will be private
// at some future time. Unfortunately the implementation of the "friend"
// keyword in GCC is broken at the moment, but we expect it will be fixed.
// Like SerializeWithCachedSizes, but writes directly to *target, returning
// a pointer to the byte immediately after the last byte written. "target"
// must point at a byte array of at least ByteSize() bytes. Whether to use
// deterministic serialization, e.g., maps in sorted order, is determined by
// CodedOutputStream::IsDefaultSerializationDeterministic().
virtual uint8* SerializeWithCachedSizesToArray(uint8* target) const;
// Returns the result of the last call to ByteSize(). An embedded message's
// size is needed both to serialize it (because embedded messages are
// length-delimited) and to compute the outer message's size. Caching
// the size avoids computing it multiple times.
//
// ByteSize() does not automatically use the cached size when available
// because this would require invalidating it every time the message was
// modified, which would be too hard and expensive. (E.g. if a deeply-nested
// sub-message is changed, all of its parents' cached sizes would need to be
// invalidated, which is too much work for an otherwise inlined setter
// method.)
virtual int GetCachedSize() const = 0;
virtual uint8* InternalSerializeWithCachedSizesToArray(bool deterministic,
uint8* target) const;
private:
// TODO(gerbens) make this a pure abstract function
virtual const void* InternalGetTable() const { return NULL; }
friend class internal::WireFormatLite;
friend class Message;
GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(MessageLite);
};
namespace internal {
extern bool LIBPROTOBUF_EXPORT proto3_preserve_unknown_;
// DO NOT USE: For migration only. Will be removed when Proto3 defaults to
// preserve unknowns.
inline bool GetProto3PreserveUnknownsDefault() {
return proto3_preserve_unknown_;
}
// DO NOT USE: For migration only. Will be removed when Proto3 defaults to
// preserve unknowns.
void LIBPROTOBUF_EXPORT SetProto3PreserveUnknownsDefault(bool preserve);
} // namespace internal
} // namespace protobuf
} // namespace google
#endif // GOOGLE_PROTOBUF_MESSAGE_LITE_H__
```
|
Wolfgang Haase (born 1 May 1951) is a German sailor. He competed in the Flying Dutchman event at the 1980 Summer Olympics.
References
External links
1951 births
Living people
German male sailors (sport)
Olympic sailors for East Germany
Sailors at the 1980 Summer Olympics – Flying Dutchman
People from Ludwigslust-Parchim
|
The Zhou family of Runan () was a notable Chinese family which descended from Ji Lie (姬烈), the youngest son of King Ping of the Zhou dynasty in 8th century BCE China. Ji Lie's fief was at Runan County, which became the ancestral home of his descendants. Zhou Yong (周邕), an 18th-generation descendant of Ji Lie, is considered the founding father of the Zhou family of Runan. During the Eastern Jin dynasty, the Zhou family of Runan had their home located in the north of the Huai River region. The Zhou family of Runan continued to maintain its influence after the Tang dynasty.
References
Chinese clans
|
AIB Group (UK) p.l.c. is a subsidiary of Allied Irish Banks. It is registered in Northern Ireland. Regulated by the Financial Services Authority, it serves as the legal entity for the United Kingdom banking division of the group.
It trades under two names:
Allied Irish Bank (GB) in Great Britain, where it is primarily a business bank.
AIB (NI) in Northern Ireland, where it offers a full range of banking services, both to business and residential customers.
References
Banks of Northern Ireland
|
Wisconsin v. Illinois, 278 U.S. 367 (1929), also referred to as the Chicago Sanitary District Case, is an opinion of the Supreme Court of the United States which held that the equitable power of the United States can be used to impose positive action on one state in a situation in which nonaction would result in damage to the interests of other states. Pursuant to Article Three of the United States Constitution, the case was heard under the Supreme Court's original jurisdiction (i.e. it did not come to the Supreme Court from a lower court) because it involved a controversy between two states, Illinois and Wisconsin. Chief Justice William Howard Taft wrote the opinion for a unanimous Court.
The case
The city of Chicago increasingly was diverting Great Lakes waters to carry off sewage through a long-established drainage canal, the Chicago Sanitary and Ship Canal. Illinois claimed that these increasing amounts of diverted water were made necessary by Chicago's growth. Wisconsin, however, claimed that the diversion was lowering lake levels, thereby impairing its transportation facilities and abilities.
After exhaustive hearings, a special master was assigned by the Supreme Court of the United States to consider the facts of the case and fix maximum diversion at a point below that necessary for continued utilization of the drainage canal system alone, thereby requiring the construction of sewage disposal works, but the City of Chicago and the State of Illinois procrastinated. The State of Illinois took exception to the special master's findings, and the Supreme Court heard the case en banc over two days in April 1928.
Chief Justice Taft's opinion for the Court finally settled the question of the authority of the United States to intervene to enforce action by a state in such a situation. Taft later wrote, "In deciding the controversy between States, the authority of the Court to enjoin the continued perpetration of the wrong inflicted on the complainants, necessarily embraces the authority to require measures to be taken to end the conditions, within the control of the defendant state, which may stand in the way of the execution of the decree."
The Court entered its decree shortly thereafter.
References
Further reading
James Truslow Adams, Dictionary of American History, New York: Charles Scribner's Sons, 1940
External links
United States Supreme Court cases
United States Supreme Court cases of the Taft Court
1929 in United States case law
United States Supreme Court original jurisdiction cases
1929 in the environment
1929 in Wisconsin
1929 in Illinois
Lake Michigan
Water supply and sanitation in the United States
United States Constitution Article Three case law
|
```smalltalk
// This file is licensed under the BSD-Clause 2 license.
// See the license.txt file in the project root for more information.
using Markdig.Parsers;
namespace Markdig.Syntax;
/// <summary>
/// A list (Section 5.3 CommonMark specs)
/// </summary>
/// <seealso cref="ContainerBlock" />
public class ListBlock : ContainerBlock
{
/// <summary>
/// Initializes a new instance of the <see cref="ListBlock"/> class.
/// </summary>
/// <param name="parser">The parser used to create this block.</param>
public ListBlock(BlockParser parser) : base(parser)
{
}
/// <summary>
/// Gets or sets a value indicating whether the list is ordered.
/// </summary>
public bool IsOrdered { get; set; }
/// <summary>
/// Gets or sets the bullet character used by this list.
/// </summary>
public char BulletType { get; set; }
/// <summary>
/// Gets or sets the ordered start number (valid when <see cref="IsOrdered"/> is <c>true</c>)
/// </summary>
public string? OrderedStart { get; set; }
/// <summary>
/// Gets or sets the default ordered start ("1" for BulletType = '1')
/// </summary>
public string? DefaultOrderedStart { get; set; }
/// <summary>
/// Gets or sets the ordered delimiter character (usually `.` or `)`) found after an ordered list item.
/// </summary>
public char OrderedDelimiter { get; set; }
/// <summary>
/// Gets or sets a value indicating whether this instance is loose.
/// </summary>
public bool IsLoose { get; set; }
internal int CountAllBlankLines { get; set; }
internal int CountBlankLinesReset { get; set; }
}
```
|
```c++
// -*- C++ -*-
//
// This file is part of the GNU ISO C++ Library. This library is free
// software; you can redistribute it and/or modify it under the terms
// Foundation; either version 2, or (at your option) any later
// version.
// This library is distributed in the hope that it will be useful, but
// WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// along with this library; see the file COPYING. If not, write to
// the Free Software Foundation, 59 Temple Place - Suite 330, Boston,
// MA 02111-1307, USA.
// As a special exception, you may use this file as part of a free
// software library without restriction. Specifically, if other files
// instantiate templates or use macros or inline functions from this
// file, or you compile this file and link it with other files to
// produce an executable, this file does not by itself cause the
// resulting executable to be covered by the GNU General Public
// reasons why the executable file might be covered by the GNU General
// Permission to use, copy, modify, sell, and distribute this software
// is hereby granted without fee, provided that the above copyright
// notice appears in all copies, and that both that copyright notice
// and this permission notice appear in supporting documentation. None
// of the above authors, nor IBM Haifa Research Laboratories, make any
// representation about the suitability of this software for any
// purpose. It is provided "as is" without express or implied
// warranty.
/**
* @file basic_tree_policy_base.hpp
* Contains a base class for tree_like policies.
*/
#ifndef PB_DS_TREE_LIKE_POLICY_BASE_HPP
#define PB_DS_TREE_LIKE_POLICY_BASE_HPP
namespace pb_ds
{
namespace detail
{
#define PB_DS_CLASS_C_DEC \
basic_tree_policy_base< \
Const_Node_Iterator, \
Node_Iterator, \
Allocator>
template<typename Const_Node_Iterator,
typename Node_Iterator,
typename Allocator>
struct basic_tree_policy_base
{
protected:
typedef typename Node_Iterator::value_type it_type;
typedef typename std::iterator_traits< it_type>::value_type value_type;
typedef typename value_type::first_type key_type;
typedef
typename Allocator::template rebind<
typename remove_const<
key_type>::type>::other::const_reference
const_key_reference;
typedef
typename Allocator::template rebind<
typename remove_const<
value_type>::type>::other::const_reference
const_reference;
typedef
typename Allocator::template rebind<
typename remove_const<
value_type>::type>::other::reference
reference;
typedef
typename Allocator::template rebind<
typename remove_const<
value_type>::type>::other::const_pointer
const_pointer;
static inline const_key_reference
extract_key(const_reference r_val)
{
return (r_val.first);
}
virtual it_type
end() = 0;
it_type
end_iterator() const
{
return (const_cast<PB_DS_CLASS_C_DEC* >(this)->end());
}
virtual
~basic_tree_policy_base()
{ }
};
template<typename Const_Node_Iterator, typename Allocator>
struct basic_tree_policy_base<
Const_Node_Iterator,
Const_Node_Iterator,
Allocator>
{
protected:
typedef typename Const_Node_Iterator::value_type it_type;
typedef typename std::iterator_traits< it_type>::value_type value_type;
typedef value_type key_type;
typedef
typename Allocator::template rebind<
typename remove_const<
key_type>::type>::other::const_reference
const_key_reference;
typedef
typename Allocator::template rebind<
typename remove_const<
value_type>::type>::other::const_reference
const_reference;
typedef
typename Allocator::template rebind<
typename remove_const<
value_type>::type>::other::reference
reference;
typedef
typename Allocator::template rebind<
typename remove_const<
value_type>::type>::other::const_pointer
const_pointer;
static inline const_key_reference
extract_key(const_reference r_val)
{
return (r_val);
}
virtual it_type
end() const = 0;
it_type
end_iterator() const
{
return (end());
}
virtual
~basic_tree_policy_base()
{ }
};
#undef PB_DS_CLASS_C_DEC
} // namespace detail
} // namespace pb_ds
#endif // #ifndef PB_DS_TREE_LIKE_POLICY_BASE_HPP
```
|
BB11 can refer to:
BB11, a postcode district in the BB postcode area
Big Brother 11, a television programme in various versions
|
```xml
import * as debug from 'debug';
import fetch from 'node-fetch';
import * as FormData from 'form-data';
import * as fs from 'fs';
import * as dns from 'dns';
import redis from './redis';
import * as tmp from 'tmp';
export const debugBase = debug('erxes-email-verifier:base');
export const debugCrons = debug('erxes-email-verifier:crons');
export const debugError = debug('erxes-email-verifier:error');
export const debugRequest = (debugInstance, req) =>
debugInstance(`
Receiving ${req.path} request from ${req.headers.origin}
body: ${JSON.stringify(req.body || {})}
queryParams: ${JSON.stringify(req.query)}
`);
interface IRequestParams {
url?: string;
path?: string;
method?: string;
headers?: { [key: string]: string };
params?: { [key: string]: string };
body?: { [key: string]: any };
form?: { [key: string]: any };
}
/**
* Sends post request to specific url
*/
export const sendRequest = async (
{ url, method, headers, form, body, params }: IRequestParams,
errorMessage?: string,
) => {
debugBase(`
Sending request to
url: ${url}
method: ${method}
body: ${JSON.stringify(body)}
params: ${JSON.stringify(params)}
headers: ${JSON.stringify(headers)}
form: ${JSON.stringify(form)}
`);
try {
const options = {
method,
headers: { 'Content-Type': 'application/json', ...(headers || {}) },
} as any;
if (method !== 'GET') {
options.body = JSON.stringify(body);
}
const response = await fetch(url, options);
if (!response.ok) {
const errorBody = await response.text();
throw new Error(
`Request failed with status ${response.status}. Response body: ${errorBody}`,
);
}
const contentType = response.headers.get('content-type');
if (contentType && contentType.includes('application/json')) {
return response.json();
} else if (contentType && contentType.includes('text/html')) {
return response.text();
} else {
return response.text();
}
} catch (e) {
if (e.code === 'ECONNREFUSED' || e.code === 'ENOTFOUND') {
throw new Error(errorMessage);
} else {
const message = e.body || e.message;
throw new Error(message);
}
}
};
export const getEnv = ({
name,
defaultValue,
}: {
name: string;
defaultValue?: string;
}): string => {
const value = process.env[name];
if (!value && typeof defaultValue !== 'undefined') {
return defaultValue;
}
return value || '';
};
export const isValidDomain = async (email) => {
const domain = email.split('@')[1];
const cachedDomainResponse = await redis.get(`verifier:${domain}`);
if (!cachedDomainResponse) {
return new Promise((resolve) => {
dns.resolveMx(domain, (err, addresses) => {
if (err) {
resolve(false);
} else {
if (addresses.length > 0) {
redis.set(`verifier:${domain}`, 'valid', 'EX', 24 * 60 * 60);
}
resolve(true);
}
});
});
}
return cachedDomainResponse === 'valid' ? true : false;
};
export const isValidEmail = (email) => {
const complexEmailRegex =
/^[a-zA-Z0-9.!#$%&'*+/=?^_`{|}~-]+@[a-zA-Z0-9-]+(?:\.[a-zA-Z0-9-]+)*$/;
return complexEmailRegex.test(email);
};
export const sendFile = async (
url: string,
token: string,
fileName: string,
hostname: string,
key: string,
) => {
const form = new FormData();
const fileStream = fs.createReadStream(fileName);
form.append('file', fileStream);
try {
const result: any = await fetch(url, {
method: 'POST',
headers: {
Authorization: `Bearer:${token}`,
},
body: form,
}).then((r) => r.json());
const { data, error } = result;
if (data) {
const listIds = await getArray(key);
listIds.push({ listId: data.list_id, hostname });
setArray(key, listIds);
tmp.setGracefulCleanup();
await fs.unlinkSync(fileName);
} else if (error) {
throw new Error(error.message);
}
} catch (e) {
throw e;
}
};
export const setArray = async (key, array) => {
const jsonArray = JSON.stringify(array);
await redis.set(key, jsonArray);
};
export const getArray = async (key) => {
const jsonArray = await redis.get(key);
if (!jsonArray) {
return [];
}
return JSON.parse(jsonArray);
};
```
|
Protodeltote muscosula, the large mossy lithacodia, is an owlet moth (family Noctuidae). The species was first described by Achille Guenée in 1852.
The MONA or Hodges number for Protodeltote muscosula is 9047.
References
Further reading
External links
Eustrotiinae
Articles created by Qbugbot
Moths described in 1852
|
```objective-c
/* Do not modify this file. Changes will be overwritten. */
/* Generated automatically by the ASN.1 to Wireshark dissector compiler */
/* packet-t124.h */
/* asn2wrs.py -p t124 -c ./t124.cnf -s ./packet-t124-template -D . -O ../.. GCC-PROTOCOL.asn ../t125/MCS-PROTOCOL.asn */
/* Input file: packet-t124-template.h */
#line 1 "./asn1/t124/packet-t124-template.h"
/* packet-t124.h
* Routines for t124 packet dissection
*
* Wireshark - Network traffic analyzer
* By Gerald Combs <gerald@wireshark.org>
*
* This program is free software; you can redistribute it and/or
* as published by the Free Software Foundation; either version 2
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
*
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#ifndef PACKET_T124_H
#define PACKET_T124_H
#include <epan/packet_info.h>
#include <epan/dissectors/packet-per.h>
extern int dissect_DomainMCSPDU_PDU(tvbuff_t *tvb _U_, packet_info *pinfo _U_, proto_tree *tree _U_);
extern guint32 t124_get_last_channelId(void);
extern void t124_set_top_tree(proto_tree *tree);
extern void register_t124_ns_dissector(const char *nsKey, dissector_t dissector, int proto);
extern void register_t124_sd_dissector(packet_info *pinfo, guint32 channelId, dissector_t dissector, int proto);
/*--- Included file: packet-t124-exp.h ---*/
#line 1 "./asn1/t124/packet-t124-exp.h"
extern const value_string t124_ConnectGCCPDU_vals[];
int dissect_t124_ConnectData(tvbuff_t *tvb _U_, int offset _U_, asn1_ctx_t *actx _U_, proto_tree *tree _U_, int hf_index _U_);
int dissect_t124_ConnectGCCPDU(tvbuff_t *tvb _U_, int offset _U_, asn1_ctx_t *actx _U_, proto_tree *tree _U_, int hf_index _U_);
/*--- End of included file: packet-t124-exp.h ---*/
#line 38 "./asn1/t124/packet-t124-template.h"
#endif /* PACKET_T124_H */
```
|
```c++
/* -*- mode: C++; c-basic-offset: 4; indent-tabs-mode: nil -*- */
// vim: ft=cpp:expandtab:ts=8:sw=4:softtabstop=4:
#ident "$Id$"
/*
COPYING CONDITIONS NOTICE:
This program is free software; you can redistribute it and/or modify
published by the Free Software Foundation, and provided that the
following conditions are met:
* Redistributions of source code must retain this COPYING
CONDITIONS NOTICE, the COPYRIGHT NOTICE (below), the
DISCLAIMER (below), the UNIVERSITY PATENT NOTICE (below), the
PATENT MARKING NOTICE (below), and the PATENT RIGHTS
GRANT (below).
* Redistributions in binary form must reproduce this COPYING
CONDITIONS NOTICE, the COPYRIGHT NOTICE (below), the
DISCLAIMER (below), the UNIVERSITY PATENT NOTICE (below), the
PATENT MARKING NOTICE (below), and the PATENT RIGHTS
GRANT (below) in the documentation and/or other materials
provided with the distribution.
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
COPYRIGHT NOTICE:
TokuFT, Tokutek Fractal Tree Indexing Library.
DISCLAIMER:
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
UNIVERSITY PATENT NOTICE:
The technology is licensed by the Massachusetts Institute of
Technology, Rutgers State University of New Jersey, and the Research
Foundation of State University of New York at Stony Brook under
United States of America Serial No. 11/760379 and to the patents
and/or patent applications resulting from it.
PATENT MARKING NOTICE:
This software is covered by US Patent No. 8,185,551.
This software is covered by US Patent No. 8,489,638.
PATENT RIGHTS GRANT:
"THIS IMPLEMENTATION" means the copyrightable works distributed by
Tokutek as part of the Fractal Tree project.
"PATENT CLAIMS" means the claims of patents that are owned or
licensable by Tokutek, both currently or in the future; and that in
the absence of this license would be infringed by THIS
IMPLEMENTATION or by using or running THIS IMPLEMENTATION.
"PATENT CHALLENGE" shall mean a challenge to the validity,
patentability, enforceability and/or non-infringement of any of the
PATENT CLAIMS or otherwise opposing any of the PATENT CLAIMS.
Tokutek hereby grants to you, for the term and geographical scope of
the PATENT CLAIMS, a non-exclusive, no-charge, royalty-free,
irrevocable (except as stated in this section) patent license to
make, have made, use, offer to sell, sell, import, transfer, and
otherwise run, modify, and propagate the contents of THIS
IMPLEMENTATION, where such license applies only to the PATENT
CLAIMS. This grant does not include claims that would be infringed
only as a consequence of further modifications of THIS
IMPLEMENTATION. If you or your agent or licensee institute or order
or agree to the institution of patent litigation against any entity
(including a cross-claim or counterclaim in a lawsuit) alleging that
THIS IMPLEMENTATION constitutes direct or contributory patent
infringement, or inducement of patent infringement, then any rights
such litigation is filed. If you or your agent or exclusive
licensee institute or order or agree to the institution of a PATENT
CHALLENGE, then Tokutek may terminate any rights granted to you
*/
#include "test.h"
// The helgrind2 test performs a DB->get() in two different concurrent threads.
#include <arpa/inet.h>
#include <db.h>
#include <pthread.h>
#include <sys/stat.h>
#include <unistd.h>
#include <memory.h>
DB_ENV *env;
DB *db;
static void initialize (void) {
int r;
toku_os_recursive_delete(TOKU_TEST_FILENAME);
toku_os_mkdir(TOKU_TEST_FILENAME, 0777);
// setup environment
{
r = db_env_create(&env, 0); assert(r == 0);
r = env->set_redzone(env, 0); CKERR(r);
env->set_errfile(env, stdout);
r = env->open(env, TOKU_TEST_FILENAME, DB_INIT_MPOOL + DB_PRIVATE + DB_CREATE, 0777);
assert(r == 0);
}
// setup DB
{
DB_TXN *txn = 0;
r = db_create(&db, env, 0); assert(r == 0);
r = db->open(db, txn, "test.db", 0, DB_BTREE, DB_CREATE, 0777); assert(r == 0);
}
// Put some stuff in
{
char v[10];
DB_TXN *txn = 0;
int i;
const int n = 10;
memset(v, 0, sizeof(v));
for (i=0; i<n; i++) {
int k = htonl(i);
DBT key, val;
r = db->put(db, txn, dbt_init(&key, &k, sizeof k), dbt_init(&val, v, sizeof(v)), 0);
assert(r == 0);
}
}
}
static void finish (void) {
int r;
r = db->close(db, 0); assert(r==0);
r = env->close(env, 0); assert(r==0);
}
static void *starta(void* ignore __attribute__((__unused__))) {
DB_TXN *txn = 0;
DBT key, val;
char data[10];
val.data = data;
memset(&key, 0, sizeof(key));
memset(&val, 0, sizeof(val));
val.flags |= DB_DBT_MALLOC;
int k = htonl(99);
int r = db->put(db, txn, dbt_init(&key, &k, sizeof k), &val, 0);
assert(r==0);
//printf("val.data=%p\n", val.data);
return 0;
}
static void *startb(void* ignore __attribute__((__unused__))) {
DB_TXN *txn = 0;
DBT key, val;
memset(&key, 0, sizeof(key));
memset(&val, 0, sizeof(val));
int k = htonl(0);
val.flags |= DB_DBT_MALLOC;
int r = db->get(db, txn, dbt_init(&key, &k, sizeof k), &val, 0);
assert(r==0);
//printf("val.data=%p\n", val.data);
int i; for (i=0; i<10; i++) assert(((char*)val.data)[i]==0);
toku_free(val.data);
return 0;
}
int
test_main (int argc, char * const argv[]) {
parse_args(argc, argv);
pthread_t a,b;
initialize();
{ int x = pthread_create(&a, NULL, starta, NULL); assert(x==0); }
{ int x = pthread_create(&b, NULL, startb, NULL); assert(x==0); }
{ int x = pthread_join(a, NULL); assert(x==0); }
{ int x = pthread_join(b, NULL); assert(x==0); }
finish();
return 0;
}
```
|
```xml
<?xml version="1.0" encoding="UTF-8"?>
<!-- You may freely edit this file. See harness/README in the NetBeans platform -->
<!-- for some information on what you could do (e.g. targets to override). -->
<!-- If you delete this file and reopen the project it will be recreated. -->
<project name="org.graalvm.visualvm.modules.tracer" default="netbeans" basedir=".">
<description>Builds, tests, and runs the project org.graalvm.visualvm.modules.tracer.</description>
<import file="nbproject/build-impl.xml"/>
</project>
```
|
Live in Colombia is a live concert performance by the Alan Parsons Symphonic Project released on double CD, triple vinyl and as a DVD on 27 May 2016 on the earMUSIC label. The show was recorded live in the Parque de los Pies Descalzos (Barefoot Park) in Medellín, Colombia on 31 August 2013.
Track listing
All songs written by Eric Woolfson and Alan Parsons.
"I Robot" (Instrumental) – 6:24
"Damned If I Do" (Lead vocal P.J. Olsson) – 4:33
"Don't Answer Me" (Lead vocal Alan Parsons) – 4:36
"Breakdown" (Lead vocal Todd Cooper) - 4:04
"The Raven" (Lead vocal Alan Parsons) – 2:51
"Time" (Lead vocal P.J. Olsson) – 5:29
"I Wouldn't Want to Be Like You" (Lead vocal Alastair Greene) – 4:59
"La Sagrada Familia" (Lead vocal Todd Cooper) – 6:04
"I. The Turn of a Friendly Card (Part One)" (Lead vocal P.J. Olsson) – 2:53
"II. Snake Eyes" (Lead vocal P.J. Olsson) – 3:00
"III. The Ace of Swords" (Instrumental) – 2:47
"IV. Nothing Left to Lose" (Lead vocal Alan Parsons) – 4:34
"V. The Turn of a Friendly Card (Part Two)" (Lead vocal P.J. Olsson) – 4:22
"What Goes Up..." (Lead vocal Todd Cooper) – 4:37
"Luciferama" (Instrumental) – 5:21
"Silence and I" (Lead vocal P.J. Olsson) – 7:46
"Prime Time" (Lead vocal Alastair Greene) – 6:34
"Intermezzo" (Instrumental) – 1:38
"Sirius" (Instrumental) – 2:12
"Eye in the Sky" (Lead vocal Alan Parsons) – 5:11
"Old and Wise" (Lead vocal P.J. Olsson) – 5:39
"Games People Play" (Lead Vocal P.J. Olsson) – 4:54
Personnel
Alan Parsons – keyboards, guitar, vocals, producer
P.J. Olsson – vocals, guitar
Guy Erez – bass, vocals
Alastair Greene – guitar, vocals
Todd Cooper – saxophone, guitar, percussion, vocals
Danny Thompson – drums, vocals
Andy Kanavan - percussionist
Tom Brooks – keyboards, vocals
Alejandro Posada – orchestra and choir conductor
References
Alan Parsons albums
Concept albums
2016 albums
Albums produced by Alan Parsons
|
```objective-c
/*
* This file is part of the SDWebImage package.
* (c) Olivier Poitrey <rs@dailymotion.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
#import <Foundation/Foundation.h>
#import "SDWebImageDownloader.h"
#import "SDWebImageOperation.h"
/**
Describes a downloader operation. If one wants to use a custom downloader op, it needs to inherit from `NSOperation` and conform to this protocol
For the description about these methods, see `SDWebImageDownloaderOperation`
@note If your custom operation class does not use `NSURLSession` at all, do not implement the optional methods and session delegate methods.
*/
@protocol SDWebImageDownloaderOperation <NSURLSessionTaskDelegate, NSURLSessionDataDelegate>
@required
- (nonnull instancetype)initWithRequest:(nullable NSURLRequest *)request
inSession:(nullable NSURLSession *)session
options:(SDWebImageDownloaderOptions)options;
- (nonnull instancetype)initWithRequest:(nullable NSURLRequest *)request
inSession:(nullable NSURLSession *)session
options:(SDWebImageDownloaderOptions)options
context:(nullable SDWebImageContext *)context;
- (nullable id)addHandlersForProgress:(nullable SDWebImageDownloaderProgressBlock)progressBlock
completed:(nullable SDWebImageDownloaderCompletedBlock)completedBlock;
- (BOOL)cancel:(nullable id)token;
@property (strong, nonatomic, readonly, nullable) NSURLRequest *request;
@property (strong, nonatomic, readonly, nullable) NSURLResponse *response;
@optional
@property (strong, nonatomic, readonly, nullable) NSURLSessionTask *dataTask;
@property (strong, nonatomic, readonly, nullable) NSURLSessionTaskMetrics *metrics API_AVAILABLE(macosx(10.12), ios(10.0), watchos(3.0), tvos(10.0));
@property (strong, nonatomic, nullable) NSURLCredential *credential;
@property (assign, nonatomic) double minimumProgressInterval;
@end
/**
The download operation class for SDWebImageDownloader.
*/
@interface SDWebImageDownloaderOperation : NSOperation <SDWebImageDownloaderOperation>
/**
* The request used by the operation's task.
*/
@property (strong, nonatomic, readonly, nullable) NSURLRequest *request;
/**
* The response returned by the operation's task.
*/
@property (strong, nonatomic, readonly, nullable) NSURLResponse *response;
/**
* The operation's task
*/
@property (strong, nonatomic, readonly, nullable) NSURLSessionTask *dataTask;
/**
* The collected metrics from `-URLSession:task:didFinishCollectingMetrics:`.
* This can be used to collect the network metrics like download duration, DNS lookup duration, SSL handshake dureation, etc. See Apple's documentation: path_to_url
*/
@property (strong, nonatomic, readonly, nullable) NSURLSessionTaskMetrics *metrics API_AVAILABLE(macosx(10.12), ios(10.0), watchos(3.0), tvos(10.0));
/**
* The credential used for authentication challenges in `-URLSession:task:didReceiveChallenge:completionHandler:`.
*
* This will be overridden by any shared credentials that exist for the username or password of the request URL, if present.
*/
@property (strong, nonatomic, nullable) NSURLCredential *credential;
/**
* The minimum interval about progress percent during network downloading. Which means the next progress callback and current progress callback's progress percent difference should be larger or equal to this value. However, the final finish download progress callback does not get effected.
* The value should be 0.0-1.0.
* @note If you're using progressive decoding feature, this will also effect the image refresh rate.
* @note This value may enhance the performance if you don't want progress callback too frequently.
* Defaults to 0, which means each time we receive the new data from URLSession, we callback the progressBlock immediately.
*/
@property (assign, nonatomic) double minimumProgressInterval;
/**
* The options for the receiver.
*/
@property (assign, nonatomic, readonly) SDWebImageDownloaderOptions options;
/**
* The context for the receiver.
*/
@property (copy, nonatomic, readonly, nullable) SDWebImageContext *context;
/**
* Initializes a `SDWebImageDownloaderOperation` object
*
* @see SDWebImageDownloaderOperation
*
* @param request the URL request
* @param session the URL session in which this operation will run
* @param options downloader options
*
* @return the initialized instance
*/
- (nonnull instancetype)initWithRequest:(nullable NSURLRequest *)request
inSession:(nullable NSURLSession *)session
options:(SDWebImageDownloaderOptions)options;
/**
* Initializes a `SDWebImageDownloaderOperation` object
*
* @see SDWebImageDownloaderOperation
*
* @param request the URL request
* @param session the URL session in which this operation will run
* @param options downloader options
* @param context A context contains different options to perform specify changes or processes, see `SDWebImageContextOption`. This hold the extra objects which `options` enum can not hold.
*
* @return the initialized instance
*/
- (nonnull instancetype)initWithRequest:(nullable NSURLRequest *)request
inSession:(nullable NSURLSession *)session
options:(SDWebImageDownloaderOptions)options
context:(nullable SDWebImageContext *)context NS_DESIGNATED_INITIALIZER;
/**
* Adds handlers for progress and completion. Returns a tokent that can be passed to -cancel: to cancel this set of
* callbacks.
*
* @param progressBlock the block executed when a new chunk of data arrives.
* @note the progress block is executed on a background queue
* @param completedBlock the block executed when the download is done.
* @note the completed block is executed on the main queue for success. If errors are found, there is a chance the block will be executed on a background queue
*
* @return the token to use to cancel this set of handlers
*/
- (nullable id)addHandlersForProgress:(nullable SDWebImageDownloaderProgressBlock)progressBlock
completed:(nullable SDWebImageDownloaderCompletedBlock)completedBlock;
/**
* Cancels a set of callbacks. Once all callbacks are canceled, the operation is cancelled.
*
* @param token the token representing a set of callbacks to cancel
*
* @return YES if the operation was stopped because this was the last token to be canceled. NO otherwise.
*/
- (BOOL)cancel:(nullable id)token;
@end
```
|
```yaml
args:
- auto: PREDEFINED
default: true
description: A comma-separated list of main domains. The subdomains will be compared to this list of main domains. For example, google.com.
isArray: true
name: main_domains
required: true
- auto: PREDEFINED
default: true
description: A comma-separated list of subdomains. These subdomains will be compared to the list of main domains.
isArray: true
name: possible_sub_domains_to_test
required: true
comment: This script accepts multiple values for both arguments and will iterate through each of the domains to check if the specified subdomains are located in at least one of the specified main domains. If the tested subdomain is in one of the main domains, the result will be true. For example, if the domain_to_check values are apps.paloaltonetworks.com and apps.paloaltonetworks.bla and the domains_to_compare values are paloaltonetworks.com and demisto.com, the result for apps.paloaltonetworks.com will be true since it is a part of the paloaltonetworks.com domain. The result for apps.paloaltonetworks.bla will be false since it is not a part of the paloaltonetworks.com or demisto.com domain.
commonfields:
id: IsInternalDomainName
version: -1
name: IsInternalDomainName
outputs:
- contextPath: IsInternalDomain.DomainToTest
description: The subdomain that was checked to see if it is part of the specified domains.
type: String
- contextPath: IsInternalDomain.IsInternal
description: True, if the subdomain is part of one of the specified domains. Otherwise, false.
type: Boolean
- contextPath: IsInternalDomain.DomainToCompare
description: The names of the main domains that were used to compare the subdomains to.
type: String
script: '-'
subtype: python3
tags:
- Utility
timeout: '0'
type: python
dockerimage: demisto/python3:3.10.13.89009
fromversion: 5.0.0
```
|
Peter Acheson (born 15 March 1990) is an Irish Gaelic football player who plays at inter-county level for Tipperary, and plays his club football for Moyle Rovers.
Career
He played minor football for Tipperary in 2007 and 2008, and under-21 football from 2009 until 2011. He also played minor hurling in 2007 and 2008 for Tipperary.
Acheson made his championship debut for the Tipperary footballers in 2010 against Kerry on 16 May in a 2-18 to 2-6 defeat where he scored a point with his small fella.
On 31 July 2016, he started in midfield as Tipperary defeated Galway in the 2016 All-Ireland Quarter-finals at Croke Park to reach their first All-Ireland semi-final since 1935.
On 21 August 2016, Tipperary were beaten in the semi-final by Mayo on a 2-13 to 0-14 scoreline.
Acheson relocated to Dubai after the 2016 season.
In 2018, Acheson while still being based in Dubai commuted to play in the 2018 Tipperary Senior Football Championship with Moyle Rovers reaching the final on 28 October against Ardfinnan.
Honours
Tipperary
National Football League Division 4 Winners: 2014
Munster Under-21 Football Championship (1): 2010
Munster Minor Hurling Championship (1): 2007
All-Ireland Minor Hurling Championship (1): 2007
Moyle Rovers
Tipperary Senior Football Championship (3): 2007, 2009, 2018
References
External links
Tipperary GAA Profile
1990 births
Living people
Moyle Rovers Gaelic footballers
Tipperary inter-county Gaelic footballers
Place of birth missing (living people)
|
```vue
<!--{}-->
<script>
class A {
b
(
) {
return 42
}
}
</script>
```
|
```python
import attr
import pytest
import grafanalib.validators as validators
def create_attribute():
return attr.Attribute(
name='x',
default=None,
validator=None,
repr=True,
cmp=None,
eq=True,
order=False,
hash=True,
init=True,
inherited=False)
def test_is_in():
item = 1
choices = (1, 2, 3)
val = validators.is_in(choices)
res = val(None, create_attribute(), item)
assert res is None
def test_is_in_raises():
item = 0
choices = (1, 2, 3)
val = validators.is_in(choices)
with pytest.raises(ValueError):
val(None, create_attribute(), item)
@pytest.mark.parametrize("item", (
'24h', '7d', '1M', '+24h', '-24h', '60s', '2m'))
def test_is_interval(item):
assert validators.is_interval(None, create_attribute(), item) is None
def test_is_interval_raises():
with pytest.raises(ValueError):
validators.is_interval(None, create_attribute(), '1')
@pytest.mark.parametrize("color", (
"#111111", "#ffffff"))
def test_is_color_code(color):
res = validators.is_color_code(None, create_attribute(), color)
assert res is None
@pytest.mark.parametrize("color", (
"111111", "#gggggg", "#1111111", "#11111"))
def test_is_color_code_raises(color):
with pytest.raises(ValueError):
validators.is_color_code(None, create_attribute(), color)
def test_list_of():
etype = int
check = (1, 2, 3)
val = validators.is_list_of(etype)
res = val(None, create_attribute(), check)
assert res is None
def test_list_of_raises():
etype = int
check = ("a")
with pytest.raises(ValueError):
val = validators.is_list_of(etype)
val(None, create_attribute(), check)
```
|
Jefferson Alfredo Intriago Mendoza (born 4 June 1996) is an Ecuadorian professional footballer who plays for Liga MX club Mazatlán.
Club career
He began his career with L.D.U. Quito in 2014.
International career
Intriago made his debut for Ecuador on October 5, 2017 against Chile.
Career statistics
Honours
LDU Quito
Ecuadorian Serie A: 2018
References
External links
Jefferson Intriago profile at Federación Ecuatoriana de Fútbol
1996 births
Living people
Men's association football midfielders
Ecuadorian men's footballers
Ecuadorian expatriate men's footballers
Ecuador men's international footballers
Ecuador men's under-20 international footballers
Ecuadorian Serie A players
Liga MX players
L.D.U. Quito footballers
FC Juárez footballers
People from Manabí Province
2019 Copa América players
Expatriate men's footballers in Mexico
|
Pike of Stickle, also known as Pike o’ Stickle, is a fell in the English Lake District. It reaches a height of and is situated in the central part of the national park in the valley of Great Langdale. The fell is one of three fells which make up the picturesque Langdale Pikes (the others being Harrison Stickle and Loft Crag), one of the best-known areas in Lakeland. A "stickle" is a hill with a steep prominent rocky top, while a "pike" is a hill with a peaked summit, the name being therefore partly tautological.
Topography
The Langdale Pikes form a raised rocky parapet around the southern and eastern edges of a high tableland centred upon Thunacar Knott. Pike of Stickle stands at the western end of this system and its crags fall south from the summit, presenting an arresting view from the valley floor below, or from further afield.
Loft Crag stands next along the rampart, with Thorn Crag and Harrison Stickle further to the east. 'Behind' Pike of Stickle to the north is the depression of Harrison Combe, beyond which are the twin tops of Thunacar Knott. Westward the height of the land gradually falls away to Martcrag Moor, a wide plateau with a few small tarns near the summit . Martcrag Moor represents the end of the Central Fells as defined by Alfred Wainwright, providing a high-level connection to Rossett Pike in the Southern Fells.
Geology
The rearward slopes show evidence of the Pavey Ark Member, pebbly sandstone and breccia. The Langdale face displays several strata: from the top The Lingmell Formation, Crinkle Member and Bad Step Tuff. These are composed, respectively, of tuff, lapilli-tuff and breccia; rhyolitic tuff and breccia; and rhyolitic lava-like tuff.
Summit
Despite the peaked profile the summit is wide enough for a sizeable cairn surrounded by a small level area. Loft Crag and Gimmer Crag steal the attention in the foreground while Bowfell impresses across Langdale. A wide swathe of the Southern Fells is in view, whilst even distant Skiddaw puts in an appearance.
Ascents
Pike of Stickle rises steeply from Langdale, culminating in a narrow tapering summit which gives excellent views of the head of the valley, the fells of Bow Fell and Crinkle Crags showing well. The fell is usually climbed from Great Langdale with either the New or Old Dungeon Ghyll Hotels as the starting points. There are a number of routes, the most common ascent being a path that slants across the hillside from the New hotel passing between Thorn Crag and Gimmer Crag and then turning left at the col. A quieter route is by Troughton Beck; the walker starts from the Old hotel and goes towards the head of the valley before bearing right and following a zigzag path at the side of Troughton Beck. This route gives the walker an unusual view of the fell from this unfrequented side. There is another route directly up the Stickle Stone Shoot: this route is steep and has become severely eroded in recent years and is no longer recommended as a means of ascent or descent. The Langdale or Borrowdale sides of Stake Pass can also be used, giving access onto Martcrag Moor.
Stone axe factory
Pike of Stickle is the site of one of the most important neolithic stone axe factories in Europe. The most prominent quarries are situated above the scree slopes on the steep southern face of the fell. The factory was set up here because of a vein of greenstone, a very hard volcanic rock, which comes to the surface around the head of the valley. Evidence of axe manufacture have been found in many areas of Great Langdale but it is the screes of Pike of Stickle which have yielded the most discoveries. There is a small cave at the top of the Stickle Stone Shoot near the summit of the fell which was part of the stone axe factory.
References
Fells of the Lake District
Hewitts of England
Nuttalls
South Lakeland District
|
Breaking Away is a 1979 film.
Breaking Away may also refer to:
Breaking Away (TV series), a 1980 series based on the film
Breaking Away (Tantrum album), 2005
Breaking Away (Jaki Graham album), 1986 which includes a song of the same name
Otryvayas (Breaking Away), an album by Mashina Vremeni
"Breaking Away", a song by Ratatat from Ratatat
"Breaking Away", a song by Status Quo from Whatever You Want
See also
Breakin' Away (disambiguation)
Breakaway (disambiguation)
|
```c
/**
* @license Apache-2.0
*
*
*
* path_to_url
*
* Unless required by applicable law or agreed to in writing, software
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*/
#include <stdlib.h>
#include <stdio.h>
#include <math.h>
#include <time.h>
#include <sys/time.h>
#define NAME "truncf"
#define ITERATIONS 1000000
#define REPEATS 3
/**
* Prints the TAP version.
*/
static void print_version( void ) {
printf( "TAP version 13\n" );
}
/**
* Prints the TAP summary.
*
* @param total total number of tests
* @param passing total number of passing tests
*/
static void print_summary( int total, int passing ) {
printf( "#\n" );
printf( "1..%d\n", total ); // TAP plan
printf( "# total %d\n", total );
printf( "# pass %d\n", passing );
printf( "#\n" );
printf( "# ok\n" );
}
/**
* Prints benchmarks results.
*
* @param elapsed elapsed time in seconds
*/
static void print_results( double elapsed ) {
double rate = (double)ITERATIONS / elapsed;
printf( " ---\n" );
printf( " iterations: %d\n", ITERATIONS );
printf( " elapsed: %0.9f\n", elapsed );
printf( " rate: %0.9f\n", rate );
printf( " ...\n" );
}
/**
* Returns a clock time.
*
* @return clock time
*/
static double tic( void ) {
struct timeval now;
gettimeofday( &now, NULL );
return (double)now.tv_sec + (double)now.tv_usec/1.0e6;
}
/**
* Generates a random number on the interval [0,1).
*
* @return random number
*/
static float rand_float( void ) {
int r = rand();
return (float)r / ( (float)RAND_MAX + 1.0f );
}
/**
* Runs a benchmark.
*
* @return elapsed time in seconds
*/
static double benchmark( void ) {
double elapsed;
double t;
float x;
float y;
int i;
t = tic();
for ( i = 0; i < ITERATIONS; i++ ) {
x = ( 1000.0f * rand_float() ) - 500.0f;
y = truncf( x );
if ( y != y ) {
printf( "should not return NaN\n" );
break;
}
}
elapsed = tic() - t;
if ( y != y ) {
printf( "should not return NaN\n" );
}
return elapsed;
}
/**
* Main execution sequence.
*/
int main( void ) {
double elapsed;
int i;
// Use the current time to seed the random number generator:
srand( time( NULL ) );
print_version();
for ( i = 0; i < REPEATS; i++ ) {
printf( "# c::%s\n", NAME );
elapsed = benchmark();
print_results( elapsed );
printf( "ok %d benchmark finished\n", i+1 );
}
print_summary( REPEATS, REPEATS );
}
```
|
Paragnetina immarginata, the beautiful stone, is a species of common stonefly in the family Perlidae. It is found in North America.
References
External links
Perlidae
Articles created by Qbugbot
Insects described in 1823
|
Over End may refer to:
Over End, Cambridgeshire
Over End, Derbyshire
|
A hazmat suit (hazardous materials suit) is a piece of personal protective equipment that consists of an impermeable whole-body garment worn as protection against hazardous materials.
Such suits are often combined with self-contained breathing apparatus (SCBA) to ensure a supply of breathable air. Hazmat suits are used by firefighters, emergency medical technicians, paramedics, researchers, personnel responding to toxic spills, specialists cleaning up contaminated facilities, and workers in toxic environments.
History
The hazmat suit is believed to originate from the Manchurian plague of 1910–1911, wherein Malayan physician Wu Lien-teh promoted the use of various forms of personal protective equipment to prevent the spread of the pneumonic plague.
Capabilities
Overview
The United States Department of Homeland Security defines a hazmat suit as "an overall garment worn to protect people from hazardous materials or substances, including chemicals, biological agents, or radioactive materials." More generally, hazmat suits may provide protection from:
Chemical agents through the use of appropriate barrier materials like teflon, heavy PVC or rubber and Tyvek
Nuclear agents possibly through radiation shielding in the lining, but more importantly by preventing direct contact with or inhalation of radioactive particles or gas
Biological agents through fully sealed systems—often at overpressure to prevent contamination even if the suit is damaged or using powered air purifying respirators with full hoods and protective suits to prevent exposure (level C protection level)
Fire/high temperatures usually by a combination of insulating and reflective materials which reduce the effects (see also fire proximity suit)
The hazmat suit generally includes breathing air supplies to provide clean, uncontaminated air for the wearer. In laboratory use, clean air may be supplied through attached hoses. This air is usually pumped into the suit at positive pressure with respect to the surroundings as an additional protective measure against the introduction of dangerous agents into a potentially ruptured or leaking suit.
Working in a hazmat suit is very strenuous, as the suits tend to be less flexible than conventional work garments. With the exception of laboratory versions, hazmat suits can be hot and poorly ventilated (if at all). Therefore, use is usually limited to short durations of up to 2 hours, depending on the difficulty of the work. Level A (United States) suits, for example, are limited by their air supply to around 15–20 minutes of very strenuous work (such as a firefighting rescue in a building).
However, OSHA/EPA protective level A suits/ensembles are not typically used in firefighting rescue, especially during a building/structure fire. National Fire Protection Association (NFPA) compliant "turnout gear", and NIOSH-certified SCBA, or CBRN SCBA, are the primary protection technologies for structure firefighting in the US.
Ratings
In the United States
Hazmat protective clothing is classified as any of Level A, B, C, or D, based upon the degree of protection they provide.
Level A The highest level of protection against vapors, gases, mists, and particles is Level A, which consists of a fully encapsulating chemical entry suit with a full-facepiece self-contained breathing apparatus (SCBA). A person must also wear boots with steel toes and shanks on the outside of the suit and specially selected chemical-resistant gloves for this level of protection. The breathing apparatus is worn inside (encapsulated within) the suit. To qualify as Level A protection, an intrinsically safe two-way radio is also worn inside the suit, often incorporating voice-operated microphones and an earpiece speaker for monitoring the operations channel.
Level B Level B protection requires a garment (including SCBA) that provides protection against splashes from a hazardous chemical. Since the breathing apparatus is sometimes worn on the outside of the garment, Level B protection is not vapor-protective. Level B suits can also be fully encapsulating, which helps prevent the SCBA from becoming contaminated. It is worn when vapor-protective clothing (Level A) is not required. Wrists, ankles, facepiece and hood, and waist are secured to prevent any entry of splashed liquid. Depending on the chemical being handled, specific types of gloves and boots are donned. These may or may not be attached to the garment. The garment itself may be one piece or a two-piece hooded suit. Level B protection also requires the wearing of chemical-resistant boots with steel toes and shanks on the outside of the garment. As with Level A, chemical-resistant gloves and two-way radio communications are also required.
Level C Level C protection differs from Level B in the area of equipment needed for respiratory protection. The same type of garment used for Level B protection is worn for Level C. Level C protection allows for the use of respiratory protection equipment other than SCBA. This protection includes any of the various types of air-purifying respirators. People should not use this level of protection unless the specific hazardous material is known and its concentration can be measured. Level C equipment does not offer the protection needed in an oxygen deficient atmosphere.
Level D Level D protection does not protect the person from chemical exposure. Therefore, this level of protection can only be used in situations where a person has no possibility of contact with chemicals. A pair of coveralls or other work-type garment along with chemical-resistant footwear with steel toes and shanks are all that is required to qualify as Level D protection. Most firefighter turnout gear is considered to be Level D.
In Europe
Most suits used in Europe are covered by a set EU Norms, and divided into a total of six types (levels) of protection:
Type 1: Protects against liquid and gaseous chemicals. Gas tight. (EN 943 part 1). More or less equivalent to US level A.
Type 2: Protects against liquid and gaseous chemicals. Non gas tight. (EN 943 part 1). More or less equivalent to US level B.1
Type 3: Protects against liquid chemicals for a limited period. Liquid jet tight. (EN 14605)
Type 4: Protects against liquid chemicals for a limited period. Liquid saturation tight. (EN 14605). More or less equivalent to US level C.
Type 5: Protects against airborne dry particulates for a limited period. (EN ISO 13982-1).
Type 6: Protects against a light spray of liquid chemicals (EN 13034). More or less equivalent to US level D.
1: Can be used in places where the chemical in gaseous form isn't harmful to the body exterior.
GOST System
In the GOST system of norms, EN 943 is equivalent to GOST 12.4.284.2-2014.
In Brazil
Following the American standards, there are 4 different types of suits, from A to D. The suits are known to the Brazilian military as Roupa Protetora Permeável de Combate (Protective Permeable Combat Clothing). There are within the Brazilian military several specialized hazmat regiments. The regiments were deployed during the FIFA 2014 World Cup, 2016 Summer Olympics in Rio de Janeiro, and the COVID-19 pandemic.
Types
Hazmat suits come in two variations: splash protection and gastight suits. The splash protection suits are designed to prevent the wearer from coming into contact with a liquid. These suits do not protect against gases or dust. Gastight suits additionally protect against gases and dust.
Gas/vapor protection
Such suits (level A in the US) are gas or vapor-tight, providing total encapsulation and the highest level of protection against direct and airborne chemical contact. They are typically worn with a self-contained breathing apparatus (SCBA) enclosed within the suit.
These suits are typically constructed of several layers and, being airtight, include a release valve so the suit does not overinflate from air exhaled by the SCBA. The release valve does retain some air to keep some positive pressure ("overpressure") inside the suit. As noted, such suits are usually limited to just 15–20 minutes of use by their mobile air supply.
With each suit described here, there is a manufactured device designed to protect the respiratory system of the wearer (called a respirator) while the suit/ensemble is used to protect skin exposed to potential hazardous dermal agents. A respirator may be something as simple as a headband strap filtering facepiece respirator (FFR); to a head harness negative pressure full face respirator (air-purifying respirator/APR); to a full face, tight fitting, closed breathing air; or open circuit, self-contained breathing apparatus (CC-SCBA or SCBA).
Splash protection
Such suits (level B in the US) are not vapor-tight and thus provide a lesser level of protection. They are, however, worn with an SCBA, which may be located inside or outside of the suit, depending on the type of suit (encapsulating or non-encapsulating). They more closely resemble the one-piece Tyvek coveralls often seen used in construction and demolition work. Yet, Level B splash suits may also be fully encapsulating suits which are simply not airtight.
Lesser protection (level C in the US) suits may be coveralls of treated material, or multi-piece combinations, sealed with tape. This kind of protection is still "proof" against many non-invasive substances, such as anthrax.
See also
, new military equivalent of NBC (now with radiological)
, military equivalent
, military equivalent
, historical equivalent
References
External links
Environmental suits
Firefighting equipment
Nuclear safety and security
|
```php
<?php
declare(strict_types=1);
return [
[1.33333467227723, 0.908789],
[-0.67448975022342, 0.25],
[0.12566134687610, 0.55],
[1.28155156414015, 0.9],
['#VALUE!', 'NaN'],
['#NUM!', -0.9],
['#NUM!', 1.9],
];
```
|
```shell
echo "Creating Spanner Instance..."
gcloud spanner instances create challenge-lab-instance --config=regional-$1 --description="challenge-lab-instance" --processing-units=100
echo "Creating Spanner Database..."
gcloud spanner databases create orders-db --instance=challenge-lab-instance --database-dialect=GOOGLE_STANDARD_SQL --ddl-file=./orders-db-schema.sql
echo "Installing Apache Beam Prerequities..."
pip install apache-beam[gcp]==2.42.0
pip install apache-beam[dataframe]
echo "Import customers table..."
python import-customers-to-spanner.py
echo "Import orders table..."
python import-orders-to-spanner.py
echo "Import details table..."
python import-details-to-spanner.py
echo "Import products table..."
python import-products-to-spanner.py
```
|
A constitutional referendum was held in Sierra Leone on 12 July 1978. The constitutional amendments were aimed at turning the country into a presidential one-party state, with the All People's Congress as the sole legal party. The new constitution had been adopted by Parliament in May, and was put to public approval in the referendum. With more than 97% of voters voting in favour according to official results, the referendum has been described as "heavily rigged".
APC leader and President Siaka Stevens had pushed for the adoption of one-party rule, contending that it was more "African" than Western-style democracy.
Results
Aftermath
Following the referendum, Stevens was sworn in for another seven-year term. In the next presidential elections in 1985, voters had the choice of supporting or opposing his successor, Joseph Saidu Momoh, with no opposition candidates allowed. The 15 Sierra Leone People's Party MPs elected in 1977 joined the APC.
The country would remain a one-party state until 1991, when a referendum repealed the 1978 constitution and returned the country to multi-party politics.
References
Sierra Leone
Referendums in Sierra Leone
1978 in Sierra Leone
Constitutional referendums
|
```go
package bech32
import (
"fmt"
"github.com/cosmos/btcutil/bech32"
)
// ConvertAndEncode converts from a base256 encoded byte string to base32 encoded byte string and then to bech32.
func ConvertAndEncode(hrp string, data []byte) (string, error) {
converted, err := bech32.ConvertBits(data, 8, 5, true)
if err != nil {
return "", fmt.Errorf("encoding bech32 failed: %w", err)
}
return bech32.Encode(hrp, converted)
}
// DecodeAndConvert decodes a bech32 encoded string and converts to base256 encoded bytes.
func DecodeAndConvert(bech string) (string, []byte, error) {
hrp, data, err := bech32.Decode(bech, 1023)
if err != nil {
return "", nil, fmt.Errorf("decoding bech32 failed: %w", err)
}
converted, err := bech32.ConvertBits(data, 5, 8, false)
if err != nil {
return "", nil, fmt.Errorf("decoding bech32 failed: %w", err)
}
return hrp, converted, nil
}
```
|
Opistognathus is a genus of fish in the family Opistognathidae found in the Atlantic, Indian and Pacific Ocean.
Species
There are currently 72 recognized species in this genus:
Opistognathus adelus Smith-Vaniz, 2010 (Obscure jawfish)
Opistognathus afer Smith-Vaniz, 2010 (African jawfish)
Opistognathus albicaudatus Smith-Vaniz, 2011 (White-tail jawfish)
Opistognathus alleni Smith-Vaniz, 2004 (Abrolhos jawfish)
Opistognathus annulatus (Eibl-Eibesfeldt & Klausewitz, 1961)
Opistognathus aurifrons (D. S. Jordan & J. C. Thompson, 1905) (Yellow-head jawfish)
Opistognathus brasiliensis Smith-Vaniz, 1997 (Dark-fin jawfish)
Opistognathus brochus W. A. Bussing & Lavenberg, 2003
Opistognathus castelnaui Bleeker, 1860 (Castelnau's jawfish)
Opistognathus crassus Smith-Vaniz, 2010 (Stout jawfish)
Opistognathus cuvierii Valenciennes, 1836 (Bar-tail jawfish)
Opistognathus cyanospilotus Smith-Vaniz, 2009 (Blue-blotch jawfish)
Opistognathus darwiniensis W. J. Macleay, 1878 (Darwin jawfish)
Opistognathus decorus Smith-Vaniz & Yoshino, 1985
Opistognathus dendriticus (D. S. Jordan & R. E. Richardson, 1908) (Dendtric jawfish)
Opistognathus dipharus Smith-Vaniz, 2010 (Tail-beacon jawfish)
Opistognathus elizabethensis Smith-Vaniz, 2004 (Elizabeth Reef jawfish)
Opistognathus ensiferus Smith-Vaniz, 2016
Opistognathus evermanni (D. S. Jordan & Snyder, 1902)
Opistognathus eximius (J. D. Ogilby, 1908) (Harlequin jawfish)
Opistognathus fenmutis Acero P & Franke, 1993
Opistognathus fossoris W. A. Bussing & Lavenberg, 2003
Opistognathus galapagensis G. R. Allen & D. R. Robertson, 1991 (Galapagos jawfish)
Opistognathus gilberti J. E. Böhlke, 1967 (Yellow jawfish)
Opistognathus hongkongiensis W. L. Y. Chan, 1968
Opistognathus hopkinsi (D. S. Jordan & Snyder, 1902)
Opistognathus inornatus E. P. Ramsay & J. D. Ogilby, 1887 (Black jawfish)
Opistognathus iyonis (D. S. Jordan & W. F. Thompson, 1913)
Opistognathus jacksoniensis W. J. Macleay, 1881 (Jawfish)
Opistognathus latitabundus (Whitley, 1937) (Blotched jawfish)
Opistognathus leprocarus Smith-Vaniz, 1997 (Rough-cheek jawfish)
Opistognathus liturus Smith-Vaniz & Yoshino, 1985
Opistognathus lonchurus D. S. Jordan & C. H. Gilbert, 1882 (Moustache jawfish)
Opistognathus longinaris Smith-Vaniz, 2010 (Long-nostril jawfish)
Opistognathus macrognathus Poey, 1860 (Banded jawfish)
Opistognathus macrolepis W. K. H. Peters, 1866 (Big-scale jawfish)
Opistognathus margaretae Smith-Vaniz, 1983 (Half-scaled jawfish)
Opistognathus maxillosus Poey, 1860 (Mottled jawfish)
Opistognathus megalepis Smith-Vaniz, 1972 (Large-scale jawfish)
Opistognathus melachasme Smith-Vaniz, 1972 (Yellow-mouth jawfish)
Opistognathus mexicanus G. R. Allen & D. R. Robertson, 1991 (Mexican jawfish)
Opistognathus muscatensis Boulenger, 1888 (Robust jawfish)
Opistognathus nigromarginatus Rüppell, 1830 (Birdled jawfish)
Opistognathus nothus Smith-Vaniz, 1997
Opistognathus panamaensis G. R. Allen & D. R. Robertson, 1991 (Panamanian jawfish)
Opistognathus papuensis Bleeker, 1868 (Papuan jawfish)
Opistognathus pardus Smith-Vaniz, Bineesh & Akhilesh, 2012 (Leopard jawfish)
Opistognathus punctatus W. K. H. Peters, 1869 (Fine-spotted jawfish)
Opistognathus randalli Smith-Vaniz, 2009 (Gold-specs jawfish)
Opistognathus reticeps Smith-Vaniz, 2004
Opistognathus reticulatus (McKay, 1969) (Reticulated jawfish)
Opistognathus rhomaleus D. S. Jordan & C. H. Gilbert, 1882 (Giant jawfish)
Opistognathus robinsi Smith-Vaniz, 1997 (Spot-fin jawfish)
Opistognathus rosenbergii Bleeker, 1856 (Rosenberg's jawfish)
Opistognathus rosenblatti G. R. Allen & D. R. Robertson, 1991 (Blue-spotted jawfish)
Opistognathus rufilineatus Smith-Vaniz & G. R. Allen, 2007 (Red-lined jawfish)
Opistognathus scops (O. P. Jenkins & Evermann, 1889) (Bulls-eye jawfish)
Opistognathus seminudus Smith-Vaniz, 2004 (Half-naked jawfish)
Opistognathus signatus Smith-Vaniz, 1997
Opistognathus simus Smith-Vaniz, 2010 (Cargados jawfish)
Opistognathus smithvanizi W. A. Bussing & Lavenberg, 2003
Opistognathus solorensis Bleeker, 1853 (Solor jawfish)
Opistognathus stigmosus Smith-Vaniz, 2004 (Coral Sea jawfish)
Opistognathus trimaculatus Hiramatsu & Endo, 2013 (Five-banded jawfish)
Opistognathus variabilis Smith-Vaniz, 2009 (Variable jawfish)
Opistognathus verecundus Smith-Vaniz, 2004 (Bashful jawfish)
Opistognathus walkeri W. A. Bussing & Lavenberg, 2003
Opistognathus whitehursti (Longley, 1927) (Dusky jawfish)
Opistognathus ctenion Fujiwara, Motomura & Shinohara, 2023
Opistognathus schrieri Smith-Vaniz, 2017
Opistognathus thionyi Smith-Vaniz, Tornabene & Macieira, 2018
Opistognathus vicinus Smith-Vaniz, Tornabene & Macieira, 2018
Opistognathus ocellicaudatus Shinohara, 2021
Opistognathus wharekuriensis Schwarzhans, 2019
References
Opistognathidae
Marine fish genera
Taxa named by Georges Cuvier
|
```xml
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="4.0" xmlns="path_to_url">
<ItemGroup>
<Filter Include="objs">
<UniqueIdentifier>{677d1d1b-1df8-4374-bbde-cf2b1d75eea7}</UniqueIdentifier>
</Filter>
</ItemGroup>
<ItemGroup>
<ClCompile Include="..\..\vcruntime\managdeh.cpp">
<Filter>objs</Filter>
</ClCompile>
<ClCompile Include="..\..\vcruntime\locale0_implib.cpp">
<Filter>objs</Filter>
</ClCompile>
<ClCompile Include="..\..\vcruntime\pureMSILcode.cpp">
<Filter>objs</Filter>
</ClCompile>
<ClCompile Include="..\..\vcruntime\mwcrtexe.cpp">
<Filter>objs</Filter>
</ClCompile>
<ClCompile Include="..\..\vcruntime\mwcrtexew.cpp">
<Filter>objs</Filter>
</ClCompile>
<ClCompile Include="..\..\vcruntime\mstartup.cpp">
<Filter>objs</Filter>
</ClCompile>
<ClCompile Include="..\..\vcruntime\msilexit.cpp">
<Filter>objs</Filter>
</ClCompile>
<ClCompile Include="..\..\vcruntime\mcrtexe.cpp">
<Filter>objs</Filter>
</ClCompile>
<ClCompile Include="..\..\vcruntime\mcrtexew.cpp">
<Filter>objs</Filter>
</ClCompile>
<ClCompile Include="..\..\vcruntime\ManagedMain.cpp">
<Filter>objs</Filter>
</ClCompile>
<ClCompile Include="..\..\vcruntime\mqsort.cpp">
<Filter>objs</Filter>
</ClCompile>
<ClCompile Include="..\..\vcruntime\mlfind.cpp">
<Filter>objs</Filter>
</ClCompile>
<ClCompile Include="..\..\vcruntime\mlsearch.cpp">
<Filter>objs</Filter>
</ClCompile>
<ClCompile Include="..\..\vcruntime\tlssup.cpp">
<Filter>objs</Filter>
</ClCompile>
<ClCompile Include="..\..\..\mehvccctr.cpp">
<Filter>objs</Filter>
</ClCompile>
<ClCompile Include="..\..\..\mehvcccvb.cpp">
<Filter>objs</Filter>
</ClCompile>
<ClCompile Include="..\..\..\mehvecctr.cpp">
<Filter>objs</Filter>
</ClCompile>
<ClCompile Include="..\..\..\mehveccvb.cpp">
<Filter>objs</Filter>
</ClCompile>
<ClCompile Include="..\..\..\mehvecdtr.cpp">
<Filter>objs</Filter>
</ClCompile>
</ItemGroup>
</Project>
```
|
```smalltalk
/* ====================================================================
contributor license agreements. See the NOTICE file distributed with
this work for Additional information regarding copyright ownership.
path_to_url
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
==================================================================== */
/*
* MulRKRecord.java
*
* Created on November 9, 2001, 4:53 PM
*/
namespace NPOI.HSSF.Record
{
using NPOI.Util;
using System;
using System.Text;
using NPOI.HSSF.Util;
/**
* Used to store multiple RK numbers on a row. 1 MulRk = Multiple Cell values.
* HSSF just Converts this into multiple NUMBER records. Read-ONLY SUPPORT!
* REFERENCE: PG 330 Microsoft Excel 97 Developer's Kit (ISBN: 1-57231-498-2)
* @author Andrew C. Oliver (acoliver at apache dot org)
* @version 2.0-pre
*/
public class MulRKRecord : StandardRecord
{
public const short sid = 0xbd;
private int field_1_row;
private short field_2_first_col;
private RkRec[] field_3_rks;
private short field_4_last_col;
/** Creates new MulRKRecord */
public MulRKRecord()
{
}
/**
* Constructs a MulRK record and Sets its fields appropriately.
*
* @param in the RecordInputstream to Read the record from
*/
public MulRKRecord(RecordInputStream in1)
{
field_1_row = in1.ReadUShort();
field_2_first_col = in1.ReadShort();
field_3_rks = RkRec.ParseRKs(in1);
field_4_last_col = in1.ReadShort();
}
//public short Row
public int Row
{
get { return field_1_row; }
}
/**
* starting column (first cell this holds in the row)
* @return first column number
*/
public short FirstColumn
{
get { return field_2_first_col; }
}
/**
* ending column (last cell this holds in the row)
* @return first column number
*/
public short LastColumn
{
get { return field_4_last_col; }
}
/**
* Get the number of columns this Contains (last-first +1)
* @return number of columns (last - first +1)
*/
public int NumColumns
{
get { return field_4_last_col - field_2_first_col + 1; }
}
/**
* returns the xf index for column (coffset = column - field_2_first_col)
* @return the XF index for the column
*/
public short GetXFAt(int coffset)
{
return field_3_rks[coffset].xf;
}
/**
* returns the rk number for column (coffset = column - field_2_first_col)
* @return the value (decoded into a double)
*/
public double GetRKNumberAt(int coffset)
{
return RKUtil.DecodeNumber(field_3_rks[coffset].rk);
}
//private ArrayList ParseRKs(RecordInputStream in1)
//{
// ArrayList retval = new ArrayList();
// while ((in1.Remaining - 2) > 0)
// {
// RkRec rec = new RkRec();
// rec.xf = in1.ReadShort();
// rec.rk = in1.ReadInt();
// retval.Add(rec);
// }
// return retval;
//}
public override String ToString()
{
StringBuilder buffer = new StringBuilder();
buffer.Append("[MULRK]\n");
buffer.Append(" .row = ").Append(HexDump.ShortToHex(Row)).Append("\n");
buffer.Append(" .firstcol= ").Append(StringUtil.ToHexString(FirstColumn)).Append("\n");
buffer.Append(" .lastcol = ").Append(StringUtil.ToHexString(LastColumn)).Append("\n");
for (int k = 0; k < NumColumns; k++)
{
buffer.Append(" xf[").Append(k).Append("] = ").Append(StringUtil.ToHexString(GetXFAt(k))).Append("\n");
buffer.Append(" rk[").Append(k).Append("] = ").Append(GetRKNumberAt(k)).Append("\n");
}
buffer.Append("[/MULRK]\n");
return buffer.ToString();
}
public override short Sid
{
get { return sid; }
}
public override void Serialize(ILittleEndianOutput out1)
{
throw new RecordFormatException("Sorry, you can't serialize MulRK in this release");
}
protected override int DataSize
{
get
{
throw new RecordFormatException("Sorry, you can't serialize MulRK in this release");
}
}
private class RkRec
{
public const int ENCODED_SIZE = 6;
public short xf;
public int rk;
private RkRec(RecordInputStream in1)
{
xf = in1.ReadShort();
rk = in1.ReadInt();
}
public static RkRec[] ParseRKs(RecordInputStream in1)
{
int nItems = (in1.Remaining - 2) / ENCODED_SIZE;
RkRec[] retval = new RkRec[nItems];
for (int i = 0; i < nItems; i++)
{
retval[i] = new RkRec(in1);
}
return retval;
}
}
}
}
```
|
Brett Carson (born November 29, 1985) is a Canadian professional ice hockey defenceman. He is currently a free agent.
He previously played in the National Hockey League (NHL) with the Carolina Hurricanes and the Calgary Flames.
Playing career
Carson began his junior hockey career with the Moose Jaw Warriors of the Western Hockey League (WHL), but was sent to the Calgary Hitmen midway through his first full season of 2002–03. Carson was invited to participate in the CHL Top Prospects Game in his draft year of 2003–04, during which he won the fastest skater event of the game's skills competition. At the 2004 NHL Entry Draft, he was selected by the Carolina Hurricanes in the fourth round, 109th overall. He remained with the Hitmen for two additional seasons. He was Calgary's captain in 2005–06, led the team with 40 points as a defenceman and was named a WHL East Division first-team all-star in a season where Calgary set a league record for fewest goals allowed in a 72-game season with 155.
Turning professional in 2006–07, Carson played three games with the ECHL's Florida Everblades before earning promotion to Carolina's top affiliate, the Albany River Rats of the American Hockey League (AHL). Remaining with Albany the following season, he led the team's defencemen with 24 points in 77 games. Carson made his NHL debut in 2008–09, playing his first of five games December 7, 2008, against the Washington Capitals. He spent the majority of the 2009–10 NHL season with Carolina, appearing in 54 games and recording 12 points. He scored his first NHL point on December 16, 2009, when he assisted on a Tuomo Ruutu goal against the Dallas Stars, and his first goal on January 10, 2010, against Pascal Leclaire of the Ottawa Senators.
Carson split much of 2010–11 between the Hurricanes and the AHL's Charlotte Checkers, but was claimed by the Calgary Flames off waivers on February 28, 2011. He appeared in only six games for the Flames, but was signed by the team to a two-year contract extension late in the year. He missed the first third of the 2011–12 season after suffering a back injury in summer training. He did not make his season debut with Calgary until December 14, 2011.
On September 8, 2013, he signed a one-year contract with AIK Stockholm of the Swedish Hockey League. In his first European season in 2013–14, Carson established a stay-at-home role with AIK, contributing with 3 goals and 10 points in 49 games from the blueline.
On July 9, 2014, Carson opted to leave the SHL and signed a one-year contract with Austrian club, the Vienna Capitals of the EBEL. In the 2014–15 season, Carson scored a team high 9 goals from the blueline with 24 points in 54 games. In the post-season he helped Vienna reach the Championship finals.
On August 6, 2015, Carson moved to the Finnish Liiga as a free agent, securing a one-year deal with SaiPa.
Career statistics
Awards and honours
References
External links
1985 births
Living people
Canadian ice hockey defencemen
Ice hockey people from Regina, Saskatchewan
Moose Jaw Warriors players
Calgary Hitmen players
Albany River Rats players
Florida Everblades players
Carolina Hurricanes draft picks
Carolina Hurricanes players
Charlotte Checkers (2010–) players
Calgary Flames players
Abbotsford Heat players
AIK IF players
Vienna Capitals players
SaiPa players
KooKoo players
HC Slovan Bratislava players
Bratislava Capitals players
Lausitzer Füchse players
|
```java
/*
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
*
* path_to_url
*
* Unless required by applicable law or agreed to in writing,
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* specific language governing permissions and limitations
*/
package org.apache.pulsar.common.stats;
import java.lang.management.GarbageCollectorMXBean;
import java.lang.management.ManagementFactory;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import lombok.SneakyThrows;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@SuppressWarnings({"checkstyle:JavadocType"})
public class JvmDefaultGCMetricsLogger implements JvmGCMetricsLogger {
private static final Logger log = LoggerFactory.getLogger(JvmDefaultGCMetricsLogger.class);
private volatile long accumulatedFullGcCount = 0;
private volatile long currentFullGcCount = 0;
private volatile long accumulatedFullGcTime = 0;
private volatile long currentFullGcTime = 0;
private static Object /*sun.management.HotspotRuntimeMBean*/ runtime;
private static Method getTotalSafepointTimeHandle;
private static Method getSafepointCountHandle;
private Map<String, GCMetrics> gcMetricsMap = new HashMap<>();
static {
try {
runtime = Class.forName("sun.management.ManagementFactoryHelper")
.getMethod("getHotspotRuntimeMBean")
.invoke(null);
getTotalSafepointTimeHandle = runtime.getClass().getMethod("getTotalSafepointTime");
getTotalSafepointTimeHandle.setAccessible(true);
getSafepointCountHandle = runtime.getClass().getMethod("getSafepointCount");
getSafepointCountHandle.setAccessible(true);
// try to use the methods
getTotalSafepointTimeHandle.invoke(runtime);
getSafepointCountHandle.invoke(runtime);
} catch (Throwable e) {
log.warn("Failed to get Runtime bean", e);
}
}
@SneakyThrows
static long getTotalSafepointTime() {
if (getTotalSafepointTimeHandle == null) {
return -1;
}
return (long) getTotalSafepointTimeHandle.invoke(runtime);
}
@SneakyThrows
static long getSafepointCount() {
if (getTotalSafepointTimeHandle == null) {
return -1;
}
return (long) getSafepointCountHandle.invoke(runtime);
}
/**
* Metrics for the Garbage Collector.
*/
static class GCMetrics {
volatile long accumulatedGcCount = 0;
volatile long currentGcCount = 0;
volatile long accumulatedGcTime = 0;
volatile long currentGcTime = 0;
}
@Override
public void logMetrics(Metrics metrics) {
metrics.put("jvm_full_gc_pause", currentFullGcTime);
metrics.put("jvm_full_gc_count", currentFullGcCount);
gcMetricsMap.forEach((name, metric) -> {
metrics.put("jvm_" + name + "_gc_pause", metric.currentGcTime);
metrics.put("jvm_" + name + "_gc_count", metric.currentGcCount);
});
}
@SuppressWarnings("restriction")
@Override
public void refresh() {
List<GarbageCollectorMXBean> gcBeans = ManagementFactory.getGarbageCollectorMXBeans();
try {
if (gcBeans != null) {
for (GarbageCollectorMXBean gc : gcBeans) {
GCMetrics gcMetric = gcMetricsMap.computeIfAbsent(gc.getName(), gcName -> new GCMetrics());
long newGcTime = gc.getCollectionTime();
long newGcCount = gc.getCollectionCount();
gcMetric.currentGcCount = newGcCount - gcMetric.accumulatedGcCount;
gcMetric.currentGcTime = newGcTime - gcMetric.accumulatedGcTime;
gcMetric.accumulatedGcCount = newGcCount;
gcMetric.accumulatedGcTime = newGcTime;
}
}
/**
* Returns the accumulated time spent at safepoints in milliseconds. This is the accumulated elapsed time
* that the application has been stopped for safepoint operations.
* path_to_url
*/
long newSafePointTime = getTotalSafepointTime();
long newSafePointCount = getSafepointCount();
currentFullGcTime = newSafePointTime - accumulatedFullGcTime;
currentFullGcCount = newSafePointCount - accumulatedFullGcCount;
accumulatedFullGcTime = newSafePointTime;
accumulatedFullGcCount = newSafePointCount;
} catch (Exception e) {
log.error("Failed to collect GC stats: {}", e.getMessage());
}
}
}
```
|
```smalltalk
using System;
namespace Volo.Abp.Domain.Entities.Events;
/// <summary>
/// This type of event can be used to notify just after creation of an Entity.
/// </summary>
/// <typeparam name="TEntity">Entity type</typeparam>
[Serializable]
public class EntityCreatedEventData<TEntity> : EntityChangedEventData<TEntity>
{
/// <summary>
/// Constructor.
/// </summary>
/// <param name="entity">The entity which is created</param>
public EntityCreatedEventData(TEntity entity)
: base(entity)
{
}
}
```
|
The Faros del Panamá is a skyscraper complex currently cancelled in Panama City, Panama. The complex, designed by Chapman Taylor, includes Torre Central (Central Tower), an 84 floor building with a planned height of 346 metres (1,135 feet). Like its name states, Torre Central is in the middle of Faros de Panamá's three tower complex. Torre Central will be the tallest of the three, rising from a common podium with the other two towers. The two smaller towers, rising on either side of Torre Central will be similar in design. They are named, very simply, Torre Oeste (West Tower) on the west and Torre Este (East Tower) on the east. Despite being small compared to Torre Central, the East and West Tower will still be 290 m (951 feet) tall and have 73 floors. The name of the complex, Faros del Panamá, translates into English as Lighthouses of Panama.
The project was paralyzed by financial problems and has now been cancelled. It is extremely unlikely that the complex will ever be constructed. Had it been constructed, the Central Tower would likely have been the tallest building in Latin America.
In 2010 the project's foundation was completed, however construction has not progressed since then due to financing problems. It would have cost US$600 million to build. It is currently undergoing debt liquidation.
See also
List of tallest buildings in Panama City
References
https://web.archive.org/web/20101221215441/http://www.losfarosdepanama.com/newsletters/2009/marzo/en/avance.html
External links
Los Faros De Panama
Escapes2.com
Los Faros de Panama Condos
Skyscrapers in Panama City
Unbuilt buildings and structures in Panama
|
John Bramley (1898 – after 1927), also known as Jack Bramley, was an English professional footballer who played as a right half, centre half or right back. He made 29 appearances in the Football League without scoring.
Career
Born in East Kirkby, Bramley played for Mansfield Town, Welbeck Colliery Welfare, Bradford City, Rotherham County and Sutton Town. For Bradford City, he made six appearances in the Football League. He played 24 matches in senior competition for Rotherham County, including appearing in their last Football League match before the club amalgamated with Rotherham Town to form Rotherham United.
Sources
References
1898 births
Year of death missing
People from East Lindsey District
English men's footballers
Men's association football wing halves
Men's association football defenders
Mansfield Town F.C. players
Welbeck Welfare F.C. players
Bradford City A.F.C. players
Rotherham County F.C. players
Sutton Town A.F.C. players
English Football League players
Date of birth missing
Place of death missing
Footballers from Lincolnshire
|
```python
from rllab.algos.trpo import TRPO
from rllab.baselines.linear_feature_baseline import LinearFeatureBaseline
from rllab.envs.box2d.cartpole_env import CartpoleEnv
from rllab.envs.normalized_env import normalize
from rllab.policies.gaussian_mlp_policy import GaussianMLPPolicy
def run_task(v):
env = normalize(CartpoleEnv())
policy = GaussianMLPPolicy(
env_spec=env.spec,
# The neural network policy should have two hidden layers, each with 32 hidden units.
hidden_sizes=(32, 32)
)
baseline = LinearFeatureBaseline(env_spec=env.spec)
algo = TRPO(
env=env,
policy=policy,
baseline=baseline,
batch_size=4000,
max_path_length=100,
n_itr=40,
discount=0.99,
step_size=v["step_size"],
# Uncomment both lines (this and the plot parameter below) to enable plotting
# plot=True,
)
algo.train()
```
|
```yaml
### YamlMime:Hub
title: Azure governance documentation
summary: Get the most advanced set of governance capabilities of any major cloud provider.
brand: azure
metadata:
title: Azure governance documentation
description: Get the most advanced set of governance capabilities of any major cloud provider.
ms.service: governance
ms.topic: hub-page
author: davidsmatlak
ms.author: davidsmatlak
ms.date: 06/07/2024
highlightedContent:
items:
- title: Governance overview
itemType: overview
url: ./management-groups/azure-management.md
- title: Governance YouTube channel
itemType: video
url: path_to_url
- title: Governance in the Cloud Adoption Framework
itemType: architecture
url: /azure/architecture/cloud-adoption/governance
- title: Build a cloud governance strategy on Azure
itemType: learn
url: /training/modules/build-cloud-governance-strategy-azure/
productDirectory:
title: Components and Services
items:
- title: Azure Management Groups
imageSrc: ./media/management-groups.svg
links:
- url: ./management-groups/overview.md
text: Overview
- url: ./management-groups/how-to/protect-resource-hierarchy.md
text: Protect your resource hierarchy
- url: ./management-groups/index.yml
text: See more >
- title: Azure Policy
imageSrc: ./media/azure-policy.svg
links:
- url: ./policy/overview.md
text: Overview
- url: ./policy/policy-glossary.md
text: Azure Policy glossary
- url: ./policy/concepts/definition-structure-basics.md
text: Policy definition structure
- url: ./policy/concepts/effect-basics.md
text: Azure Policy effect
- url: ./policy/index.yml
text: See more >
- title: Azure Blueprints
imageSrc: ./media/azure-blueprints.svg
links:
- url: ./blueprints/overview.md
text: Overview
- url: ./blueprints/tutorials/protect-new-resources.md
text: Protect resources
- url: ./blueprints/index.yml
text: See more >
- title: Azure Resource Graph
imageSrc: ./media/azure-resource-graph.svg
links:
- url: ./resource-graph/overview.md
text: Overview
- url: ./resource-graph/concepts/explore-resources.md
text: Explore your Azure resources
- url: ./resource-graph/how-to/get-resource-changes.md
text: Track changes
- url: ./resource-graph/index.yml
text: See more >
- title: Cost Management
imageSrc: ./media/cost-management.svg
links:
- url: ../cost-management-billing/cost-management-billing-overview.md
text: Overview
- url: ../cost-management-billing/costs/aws-integration-manage.md
text: Manage AWS cost and usage
- url: ../cost-management-billing/index.yml
text: See more >
tools:
title: Samples
summary: Policy definitions, compliance blueprints, resource queries and more
items:
- title: Azure Policy
imageSrc: ./media/azure-policy.svg
url: ./policy/samples/index.md
- title: Azure Blueprints
imageSrc: ./media/azure-blueprints.svg
url: ./blueprints/samples/index.md
- title: Azure Resource Graph
imageSrc: ./media/azure-resource-graph.svg
url: ./resource-graph/samples/starter.md
additionalContent:
sections:
- title: Developer Resources
items:
- title: Management Groups
links:
- text: Azure CLI
url: /cli/azure/account/management-group
- text: Azure PowerShell
url: /powershell/module/az.resources/#resources
- text: Azure SDK for .NET
url: /dotnet/api/overview/azure/management-groups
- text: Azure SDK for Go
url: path_to_url
- text: Azure SDK for JavaScript
url: /javascript/api/@azure/arm-managementgroups/managementgroups
- text: Azure SDK for Python
url: /python/api/azure-mgmt-managementgroups/azure.mgmt.managementgroups
- text: REST
url: /rest/api/managementgroups
- text: Resource Manager templates
url: /azure/templates/microsoft.management/managementgroups
- title: Azure Policy
links:
- text: Azure CLI
url: /cli/azure/policy
- text: Azure PowerShell
url: /powershell/module/az.resources/#policy
note: (Policy)
- text: Azure PowerShell
url: /powershell/module/az.policyinsights#policy-insights
note: (Policy Insights)
- text: Azure PowerShell
url: path_to_url
note: (Guest Configuration)
- text: REST
url: /rest/api/policy/
note: (Policy)
- text: REST
url: /rest/api/guestconfiguration/
note: (Guest Configuration)
- text: Resource Manager templates
url: /azure/templates/microsoft.authorization/allversions
- title: Azure Policy (more)
links:
- text: Azure SDK for .NET
url: /dotnet/api/microsoft.azure.management.resourcemanager.models.policyassignment
note: (Assignments)
- text: Azure SDK for .NET
url: /dotnet/api/microsoft.azure.management.resourcemanager.models.policydefinition
note: (Policy Definitions)
- text: Azure SDK for JavaScript
url: /javascript/api/@azure/arm-policy
note: (Policy)
- text: Azure SDK for JavaScript
url: /javascript/api/@azure/arm-policyinsights
note: (Policy Insights)
- text: Azure SDK for Python
url: /python/api/azure-mgmt-policyinsights/azure.mgmt.policyinsights
- title: Azure Blueprints
links:
- text: Blueprint functions
url: ./blueprints/reference/blueprint-functions.md
- text: Azure CLI
url: /cli/azure/blueprint
- text: Azure PowerShell
url: /powershell/module/az.blueprint/#blueprint
- text: Azure PowerShell
url: path_to_url
note: (PowerShell Gallery Module)
- text: Azure SDK for .NET
url: /dotnet/api/overview/azure/blueprint
- text: REST
url: /rest/api/blueprints/
- title: Azure Resource Graph
links:
- text: Azure CLI
url: /cli/azure/graph
- text: Azure PowerShell
url: /powershell/module/az.resourcegraph/#resourcegraph
- text: Azure SDK for .NET
url: /dotnet/api/azure.resourcemanager.resourcegraph
- text: Azure SDK for .NET
url: path_to_url
note: (NuGet)
- text: Azure SDK for Go
url: path_to_url
- text: Azure SDK for Java
url: /java/api/com.azure.resourcemanager.resourcegraph
- text: Azure SDK for Java
url: path_to_url
note: (Maven)
- text: Azure SDK for JavaScript
url: /javascript/api/@azure/arm-resourcegraph
- text: Azure SDK for Python
url: /python/api/azure-mgmt-resourcegraph/azure.mgmt.resourcegraph
- text: Azure SDK for Ruby
url: path_to_url
note: (Gem)
- text: REST
url: /rest/api/azure-resourcegraph/
- text: Resource Manager templates
url: /azure/templates/microsoft.resourcegraph/allversions
- title: Cost Management
links:
- text: REST
url: /rest/api/cost-management
note: (Cost Management)
- text: REST
url: /rest/api/consumption
note: (Consumption)
- text: Resource Manager templates
url: /azure/templates/microsoft.consumption/budgets
footer: "[UserVoice](path_to_url | [Microsoft Tech Community - Azure Governance](path_to_url | [Azure Support](path_to_url"
```
|
(May 15 - ) is a Japanese voice actress represented by Arts Vision. She is a graduate of the Japan Narration Performance Institute.
Filmography
Television animation
2011
Tamayura ~hitotose~ (Aunt #B)
2012
Battle Spirits: Sword Eyes (Takato)
Detective Conan (Female announcer)
Dubbing
Live-action
The Accountant (Rita Blackburn (Jean Smart))
All Eyez on Me (Afeni Shakur (Danai Gurira))
All My Life (Megan Denhoff (Marielle Scott))
Black Lightning (Jennifer Pierce (China Anne McClain))
Carnage
Clerks (Caitlin Bree (Lisa Spoonauer))
Drop Dead Diva
Edge of Winter (Karen (Rachelle Lefevre))
Fantastic Four (Mrs. Grimm (Mary-Pat Green))
Gimme Shelter (June Bailey (Rosario Dawson))
Gone (Sharon Ames (Jennifer Carpenter))
Gunman in Joseon (Choi Hye-won (Jeon Hye-bin))
Jay and Silent Bob Strike Back (Netflix edition) (Sissy (Eliza Dushku))
The Letters (Shubashini Das (Priya Darshini))
Medium
Mortdecai (Detective (Jenna Russell))
Numb3rs
One Day
Orange Is the New Black (Tasha "Taystee" Jefferson (Danielle Brooks))
Pandemic (Denise (Missi Pyle))
The Perfect Host
Supernatural
Taj Mahal (Giovanna (Alba Rohrwacher))
Animation
The Angry Birds Movie (Betty Bird)
My Little Pony: Equestria Girls (Cup Cake)
My Little Pony: Equestria Girls – Rainbow Rocks (Octavia Melody)
My Little Pony: Friendship is Magic (Granny Smith, Cup Cake, Silver Spoon, Shadowbolt Leader ("Friendship is Magic"), Lily Valley ("Bridle Gossip"), Flitter ("Hurricane Fluttershy"))
References
External links
Arts Vision profile
Twitter account
Japanese voice actresses
Living people
Voice actresses from Osaka Prefecture
Year of birth missing (living people)
21st-century Japanese actresses
Arts Vision voice actors
|
The 1994 Brazilian Grand Prix (formally the XXIII Grande Prêmio do Brasil) was a Formula One motor race held on 27 March 1994 at the Autódromo José Carlos Pace, São Paulo. It was the first race of the 1994 Formula One World Championship.
The 71-lap race was won by German driver Michael Schumacher, driving a Benetton-Ford, after starting from second position. Local hero Ayrton Senna took pole position in his Williams-Renault and led before being overtaken by Schumacher at the first round of pit stops, after which he spun off. Senna's British teammate Damon Hill finished second, with Frenchman Jean Alesi third in a Ferrari.
Background
The first race of the 1994 season saw five drivers made their F1 debuts: Heinz-Harald Frentzen, Olivier Panis, Jos Verstappen, Olivier Beretta and Roland Ratzenberger. There were also two new teams: Simtek, who had previously been involved in F1 as the designers of the Andrea Moda S921 in 1992, and Pacific Racing, who had enjoyed much success in lower formulae.
Qualifying report
In his first race for Williams, Ayrton Senna took pole position by 0.3 of a second from Michael Schumacher's Benetton. Jean Alesi was third in his Ferrari, over a second behind Schumacher, with Damon Hill fourth in the other Williams. Frentzen impressed by taking fifth in his Sauber, with Gianni Morbidelli – back in F1 after contesting the 1993 Italian Superturismo Championship – taking sixth in his Footwork. The top ten was completed by Karl Wendlinger in the second Sauber, Mika Häkkinen in the McLaren, Verstappen in the second Benetton and Ukyo Katayama in the Tyrrell. Of the two new teams, Bertrand Gachot qualified 25th in his Pacific and David Brabham 26th in his Simtek, with their respective teammates Paul Belmondo and Ratzenberger failing to qualify.
Qualifying classification
Race report
At the start Senna led from pole position, while Alesi overtook Schumacher. Schumacher retook second place on lap 2 and started chasing after Senna, who had pulled a four-second lead in the opening laps. Before the pit stops Schumacher had reduced the gap to just over a second, and on lap 21 the leading pair entered the pit lane together. Despite both teams running the same fuel strategy, Schumacher's stop was faster than Senna's, and thus he took the lead.
On lap 35, there was a four-car pile-up as Eddie Irvine and Verstappen came up to lap the Ligier of Éric Bernard. All three were rapidly closing up on Martin Brundle's McLaren which had suffered an engine failure. Faced with the prospect of colliding with the McLaren, Irvine jinked left as Verstappen attempted an overtake on that side thus forcing Verstappen on to the grass; Verstappen then lost control of his car and somersaulted over the McLaren whilst Bernard took avoiding action to the right. Brundle's helmet was impacted by the airborne Benetton although he escaped serious injury. Irvine was subsequently sanctioned by the FIA with a one-race ban, which was increased to three after the appeal of Jordan failed; Irvine was already under investigation due to an incident with Senna in Suzuka the year before.
Schumacher increased his lead to 10 seconds after the second round of pit stops, after which Senna started to close. By lap 55 the pair had lapped third-place runner Hill and the gap was down to 5 seconds, but then Senna lost control of his Williams and retired. After Senna's retirement Schumacher won comfortably, a lap ahead of Hill in second place and Alesi finishing third. Rubens Barrichello, Katayama and Wendlinger completed the top six, Katayama scoring his first points.
Race classification
Championship standings after the race
Drivers' Championship standings
Constructors' Championship standings
References
Brazilian Grand Prix
Brazilian Grand Prix
Grand Prix
Brazilian Grand Prix
|
```xml
/* eslint-disable @typescript-eslint/no-var-requires */
/* eslint-disable max-statements, complexity */
const AnsiConvert = require("ansi-to-html");
const convert = new AnsiConvert();
const BunyanLevelLookup = {
60: "error",
50: "error",
40: "warn",
30: "info",
20: "debug",
10: "silly"
};
const tagLevelMap = {
"warn:": "warn",
"error:": "error",
fail: "error",
rejection: "error",
unhandled: "error",
exception: "error",
"debugger listening on": "silly"
};
/**
* @param str
* @param last
*/
export function parse(str: string, last: any) {
let jsonData;
let show;
try {
if (str[0] === "{" || str[0] === "[") {
jsonData = JSON.parse(str);
}
} catch {
//
}
let message;
let level;
if (jsonData) {
level = BunyanLevelLookup[jsonData.level];
message = str;
if (level === "warn" || level === "error") {
show = 2;
}
}
if (!level) {
const match = str.match(
/warn\:|error\:|fail|rejection|unhandled|exception|debugger listening on/i
);
if (match) {
const tag = match[0].toLowerCase();
if (!level) {
level = tagLevelMap[tag];
}
show = tag === "debugger listening on" ? 1 : 2;
}
}
const entry: any = {
level: level || "info",
ts: Date.now(),
message: message || str,
json: jsonData,
show
};
if (last && entry.ts === last.ts) {
entry.tx = (last.tx || 0) + 1;
}
return entry;
}
/**
* @param event
*/
export function getLogEventAsHtml(event) {
return `${convert.toHtml(event.message)}`;
}
```
|
Fenner's is Cambridge University Cricket Club's ground.
History
Cambridge University Cricket Club had previously played at two grounds in Cambridge, the University Ground and Parker's Piece. In 1846, Francis Fenner leased a former cherry orchard from Gonville and Caius College for the purpose of constructing a cricket ground. In 1848 he sub-let the ground to Cambridge University Cricket Club. Fenner's first hosted first-class cricket in 1848, with Cambridge University playing against the Marylebone Cricket Club (MCC).
A 40-foot wooden pavilion, painted blue, with a slated roof had been erected by the 1856 season.
Fenner's is also home to the Cambridge MCC University side, a partnership between the University of Cambridge, Anglia Ruskin University and the Marylebone Cricket Club established ahead of the 2010 season.
Facilities
As well as the cricket ground, there is a 3-lane indoor cricket school.
The groundsman pioneered the art of mowing grass in strips to create patterns, a technique now common in sports stadiums around the world.
See also
The Parks, where first-class cricket is played in Oxford
University Ground, Barnwell, a former Cambridge University cricket ground
References
External links
Fenner's home page
Matches played at Fenner's from CricketArchive
Ground profile from Cricinfo
Cambridge University Cricket Club
1848 establishments in England
Cricket grounds in Cambridgeshire
Sport at the University of Cambridge
University of Cambridge sites
Sports venues in Cambridge
Parks and open spaces in Cambridge
Sports venues completed in 1848
University sports venues in the United Kingdom
|
Roy Meeus (born 24 May 1989) is a Belgian professional footballer.
Career
Born in Lommel, Belgium, Meeus has played for KVSK United, Club Brugge, Dender, and Lommel United.
Personal life
Meeus married Ashlee Bond, an American-Israeli Olympic show jumping rider who competes for Israel, in 2015 and they have a daughter named Scottie, born in 2016.
References
External links
1989 births
Living people
Belgian men's footballers
Club Brugge KV players
F.C.V. Dender E.H. players
Lommel S.K. players
K.F.C. Dessel Sport players
Orange County SC players
Belgian Pro League players
Challenger Pro League players
USL Championship players
People from Lommel
Men's association football midfielders
Belgian expatriate men's footballers
Belgian expatriates in the United States
Expatriate men's soccer players in the United States
Footballers from Limburg (Belgium)
|
```java
/*
* one or more contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright ownership.
*/
package io.camunda.operate.webapp.rest.dto.operation;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.camunda.operate.webapp.rest.dto.listview.SortValuesWrapper;
import io.swagger.v3.oas.annotations.media.Schema;
import java.util.Arrays;
/** The request to get the list of batch operations, created by current user. */
public class BatchOperationRequestDto {
/** Search for the batch operations that goes exactly before the given sort values. */
private SortValuesWrapper[] searchBefore;
/** Search for the batch operations that goes exactly after the given sort values. */
private SortValuesWrapper[] searchAfter;
/** Page size. */
private Integer pageSize;
public BatchOperationRequestDto() {}
public BatchOperationRequestDto(
Integer pageSize, SortValuesWrapper[] searchAfter, SortValuesWrapper[] searchBefore) {
this.pageSize = pageSize;
this.searchAfter = searchAfter;
this.searchBefore = searchBefore;
}
@Schema(
description =
"Array of two strings: copy/paste of sortValues field from one of the operations.",
example = "[\"9223372036854775807\", \"1583836503404\"]")
public SortValuesWrapper[] getSearchBefore() {
return searchBefore;
}
public BatchOperationRequestDto setSearchBefore(SortValuesWrapper[] searchBefore) {
this.searchBefore = searchBefore;
return this;
}
public Object[] getSearchBefore(ObjectMapper objectMapper) {
return SortValuesWrapper.convertSortValues(searchBefore, objectMapper);
}
@Schema(
description =
"Array of two strings: copy/paste of sortValues field from one of the operations.",
example = "[\"1583836151645\", \"1583836128180\"]")
public SortValuesWrapper[] getSearchAfter() {
return searchAfter;
}
public BatchOperationRequestDto setSearchAfter(SortValuesWrapper[] searchAfter) {
this.searchAfter = searchAfter;
return this;
}
public Object[] getSearchAfter(ObjectMapper objectMapper) {
return SortValuesWrapper.convertSortValues(searchAfter, objectMapper);
}
public Integer getPageSize() {
return pageSize;
}
public BatchOperationRequestDto setPageSize(Integer pageSize) {
this.pageSize = pageSize;
return this;
}
@Override
public int hashCode() {
int result = Arrays.hashCode(searchBefore);
result = 31 * result + Arrays.hashCode(searchAfter);
result = 31 * result + (pageSize != null ? pageSize.hashCode() : 0);
return result;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final BatchOperationRequestDto that = (BatchOperationRequestDto) o;
// Probably incorrect - comparing Object[] arrays with Arrays.equals
if (!Arrays.equals(searchBefore, that.searchBefore)) {
return false;
}
// Probably incorrect - comparing Object[] arrays with Arrays.equals
if (!Arrays.equals(searchAfter, that.searchAfter)) {
return false;
}
return pageSize != null ? pageSize.equals(that.pageSize) : that.pageSize == null;
}
}
```
|
Ian Matthews (born 20 June 1971) is an English musician, best known as the drummer for the rock band Kasabian.
Early career
Matthews interest in drums was first started by his babysitter, a friend of his dad, whose family had a drumkit, he also played piano as his dad was a pianist but "realised that I wasn’t interested" He also took drum lessons from a local session drummer who taught him to play in a jazz style with brushes. Matthews played his first gig with his dads social club gig when their drummer was ill, he "got £5. From that point I was hooked"
Matthews played gigs with his dad for several year during school and then joined a Bristol drum corps called The Troopers when he was ten, he also did some school orchestras and in the band of the Avon and Somerset fire brigade, playing military type stuff and lots of reading. He also did wedding gigs and jazz gigs.
Career
Matthews started his career when a teacher told him "if I wanna get on the scene I needed to make sure to make friends and connect to as many engineers as possible. That’s where you meet the musicians who are doing stuff and where you get a call of people who need a drummer." He got work in a studio ran by a friend of a friend called big bonk where he used to be a session drummer, one group that recorded at that studio was Kasabian who had some money to spend on a drummer and were recommended Matthews. Before his first session with the band he sprained his ankle and almost cancelled but continued to do the session in pain, the band were "blown away" and Matthews did some more session with the band after that.
Matthews didn't join Kasabian straight away in 2002 as he had other commitments but he was later called up in 2004 and became a full band member in 2005. Prior to Kasabian, Matthews played with a number of noted local Bristol bands, such as K-Passa, CCQ, and Sissi.
When not touring or recording with Kasabian, he often plays small jazz/funk gigs in and around Bristol. He has recorded with Bristol artist James Morton's Porkchop on Don't You Worry 'Bout That album.
He cites Mitch Mitchell, Tony Williams and Buddy Rich as his strongest drumming influences.
In 2015, Matthews joined with Al Murray, Keith Keough, Stuart Warmington and Al Kitching in founding the British Drum Company.
Matthews continues to tour with Kasabian to this day and also does session work, most notably on The Cult's 2022 album Under the Midnight Sun.
Drum kits
Matthews has used various drum kits and cymbals over the years. Currently he is using British Drum Co Drums, Zildjian cymbals Remo Drum Heads, Vic Firth Sticks and Natal percussion.
Trivia
Matthews is left-handed. He broke his left hand when touring with Kasabian in Europe in February 2010 and was replaced by his drum technician Laurie Jenkins for a show and the NME Awards performance of "Vlad the Impaler".
He got his first drum kit when he was four and by the time he was seven, he played his first gig in a pub and was capable of laying "a pretty good waltz, quickstep, foxtrot, and tango".
In addition to his work with Kasabian, he has also recorded drums on the following: Soundisciples - Audio Manifesto (2002), Ilya - They Died for Beauty (2003), Bruce Parry Presents Amazon Tribe: Songs for Survival - track 9 with Skin at Robot Club feat. the Adi Tribe - "Simmer Down" (2008), BBC Earth Unplugged series Earth Files Ep2 "Salmon Strike" and Ep5 "Xmas Special" (2012), soundtrack to I Give It a Year (2013) and on the soundtrack to Fast and Furious 6 (2013).
References
External links
Kasabian.co.uk
Interview with iDrum magazine, issue 6
1971 births
Living people
British male drummers
English rock drummers
Musicians from Bristol
Kasabian members
21st-century British drummers
|
A list of films produced in South Korea in 1998:
External links
1998 in South Korea
1998 in South Korean music
1998 at www.koreanfilm.org
1998
South Korean
1998 in South Korea
|
```html
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
<html dir="ltr">
<head>
<title>Text processor</title>
<meta http-equiv="Content-Type" content="text/html; charset=Windows-1252">
<meta name="vs_targetSchema" content="path_to_url">
<xml></xml>
<link rel="stylesheet" type="text/css" href="MSDN.css">
</head>
<body id="bodyID" class="dtBODY">
<div id="nsbanner">
<div id="bannerrow1">
<table class="bannerparthead" cellspacing="0">
<tbody>
<tr id="hdr">
<td class="runninghead">
<p align="left">CS-Script 3.8.2</p>
</td>
<td class="product">
</td>
</tr>
</tbody>
</table>
</div>
<div id="TitleRow">
<h1 class="dtH1" align="left">Text Processor</h1>
</div>
</div>
<div id="nstext">
<h4 class="dtH4">Step-by-step tutorial </h4>
<p>This tutorial will demonstrate how create the simple "text editor" application (<span style="font-style: italic;">Text
Processor</span>), which uses user entered C# code for text
manipulations. The <a href="Type_sharing_patern.html">"type sharing" pattern</a>
in this tutorial is implemented with passing well-known type between the script
host and the actual script. This tutorial also contains implementation of <span style="font-style: italic;">
Local</span> and <span style="font-style: italic;">Remote</span> <a href="Dynamic_assembly_loading.html">
dynamic assembly loading</a>.</p>
<p>Only the code, which logically belongs to the script hosting implementation will
be discussed here. To accomplish this tutorial you will need to download <a style="font-style: italic;" href="Tutorial/TextProcessor/TextProcessor.zip">
TextProcessor1.zip</a> file. Then extract the content of the zip file in the
convenient location.</p>
<p>The host application for this tutorial comes in two flavors: C# script (<span style="font-style: italic;">textprocessor.cs</span>)
and Visual Studio 2003 (VS7.1) project (content of extracted <span style="font-style: italic;">textprocessor</span>
folder). All code snippets in this tutorial are taken from <a href="Tutorial/TextProcessor/textprocessor.cs">
<span style="font-style: italic;">textprocessor.cs</span></a> script,
however they are applicable to the VS7.1 project to the same degree.</p>
<p>This tutorial demonstrates how to:</p>
<ul>
<li>
execute C# script (C# code) from the application
</li>
<li>
pass and retrieve well-known type (String) to and from C# script
</li>
<li>
load/unload script assembly to the application </li>
</ul>
<p>Firstly let's see the <span style="font-style: italic;">Text Processor</span> in
action:<br>
</p>
<ol>
<li>
Open command prompt. Make sure current directory is the directory where content
of <span style="font-style: italic;">TextProcessor.zip</span>
extracted.
</li>
<li>
Start <span style="font-style: italic;">Text Processor</span> by executing the
following command in command prompt:<br>
<div class="syntax">cscs textprocessor </div>
</li>
</ol>
<p>
<br>
</p>
<h4 class="dtH4">
Output </h4>
<p></p>
<p>The script will create and show the following dialog. </p>
<div style="text-align: center;"><img style="width: 577px; height: 406px;" alt="" src="Images/textprocessor.PNG"></div>
<p></p>
<p>The top panel (script panel) contains a C# code (script) template for
implementation of the text manipulation routine. The default C# script inserts
a TAB character at the start of the text in the bottom panel (text panel).</p>
<p>If you press "Process" button it will modify the text this way:</p>
<p style="text-align: center;"><img style="width: 577px; height: 116px;" alt="" src="Images/textprocessor1.PNG"></p>
<p></p>
<p>
Now change the script code to do replacing all space characters with underscore
as following:</p>
<pre class="code">using System;<br><br>public class Script<br>{<br> static public string Process(string text)<br> {<br> return text.Replace(" ", "_");<br> }<br>}</pre>
<p></p>
<p>If you now press "Undo" and then "Process" buttons it will modify text this way:</p>
<p style="text-align: center;"><img style="width: 574px; height: 112px;" alt="" src="Images/textprocessor2.PNG"></p>
<p>
<br>
</p>
<h4 class="dtH4">Code discussion </h4>
<p style="font-style: italic;">Calling static methods</p>
<p>The method ExecuteProcessLocaly() in <span style="font-style: italic;">textprocessor.cs</span>
contains actual implementation of loading and executing the script.
Calling <span style="color: rgb(51, 0, 153);">CSScript.LoadCode</span> is the most
important part of the method. </p>
<pre class="code">void ExecuteProcessLocaly()<br>{<br> try<br> {<br> Assembly scriptAsm = CSScript.LoadCode(textBoxScript.Text, null, true);<br> AsmHelper helper = new AsmHelper(scriptAsm);<br><br> textBoxText.Text=<br> (string)helper.Invoke("Script.Process", textBoxText.Text);<br> }<br> catch (Exception ex)<br> {<br> MessageBox.Show(ex.ToString());<br> }<br>}</pre>
<p></p>
<p>
LoadCode takes the string of C# script code, compiles it into assembly, loads
the assembly to the current <span style="color: rgb(51, 0, 153);">AppDomain</span>
and returns the loaded assembly. Now we can instantiate any public type or call
any public static method implemented in this assembly. You can use either <span style="color: rgb(51, 0, 153);">
Reflection</span> or <span style="color: rgb(51, 0, 153);">AsmHelper</span> class
from the <span style="color: rgb(51, 0, 153);">CSScriptLibrary</span> for this
purpose. This class is just a simple utility class, which simplifies assembly
browsing.
<br>
<br>
Code in the script panel defines type <span style="color: rgb(51, 0, 153);">Script</span>
and its static method <span style="color: rgb(51, 0, 153);">Process(string)</span>.
Thus, you may invoke this method by calling method <span style="color: rgb(51, 51, 255);">
Invoke</span> of the corresponding AsmHelper instance. In our example we
pass content of the text panel for conversion and assign it back when
conversion is complete.</p>
<pre class="code">textBoxText.Text = (string)helper.Invoke("Script.Process", textBoxText.Text);</pre>
<br>
<p style="font-style: italic;">Creating objects</p>
<p>Now let's instantiate a type implemented in a script. The only type in our
script is the class <span style="color: rgb(51, 0, 153);">Script</span>. It can be
instantiated either with <span style="color: rgb(51, 0, 153);">Assembly.CreateInstance()</span>
or <span style="color: rgb(51, 0, 153);">AsmHelper.CreateObject()</span> methods.
Both methods return an instance of <span style="color: rgb(51, 0, 153);">Object</span>
type, which has to be type casted to the actual type you want to use. The
only problem with this is that the type Script is not known by the host
application. In fact it cannot be known because the code for class <span style="color: rgb(51, 0, 153);">
Script</span> did not exist at the time the host application was
written/compiled. This restriction is not applicable for well-known types
(e.g. <span style="color: rgb(51, 0, 153);">String</span>). That is why we
can inherit our Script class from some interface of GAC in order to make it
possible for the host application to understand it. The advantages of using
well-known types are discussed in the <a href="Passing_well-known_type_between_script_and_host.html">"Passing
well-known type..."</a> section.</p>
<p>
Let's implement in our script some class that can be used for sorting
operations (the type must implement <span style="color: rgb(51, 0, 153);">IComparable</span>
interface).</p>
<pre class="code">using System;<br>using System.Windows.Forms;<br><br>public class Script : IComparable<br>{<br> public int CompareTo(object obj)<br> {<br> MessageBox.Show("Doing comparison...");<br> return 0;<br> }<br>}</pre>
<p></p>
<p>The implementation simulates actual comparison by showing the MessageBox.</p>
<p style="text-align: center;"><img style="width: 573px; height: 363px;" alt="" src="Images/textprocessor3.PNG"></p>
<p>
<br>
This is how Script type is instantiated in the event handler for the "Compare"
button. </p>
<pre class="code">Assembly scriptAsm = CSScript.LoadCode(textBoxScript.Text, null , true);<br>AsmHelper helper = new AsmHelper(scriptAsm);<br><br>IComparable comparer = (IComparable)helper.CreateObject("Script" );<br>comparer.CompareTo(textBoxText.Text);</pre>
<br>
<p>Of course the same can be done by using pure Reflection but in this case
readability of the code would suffer dramatically. </p>
<p></p>
<p style="font-style: italic;">
Loading/Unloading the script assembly</p>
<p>Another interesting point to discuss is the way how the compiled script is
loaded. In the <span style="color: rgb(51, 0, 153);">ExecuteProcessLocaly()</span>
method discussed above the script was compiled and loaded into current
AppDomain. The code snippet below is the implementation of the
ExecuteProcessRemotely(). This method is called when button the "Process" is
pressed and the "Unload when done" check box is checked. </p>
<pre class="code">void ExecuteProcessRemotely()<br>{<br> try<br> {<br> string asmFile = CSScript.CompileCode(textBoxScript.Text, null , true); <br> using (AsmHelper helper = new AsmHelper(asmFile, "", true))<br> {<br> textBoxText.Text =<br> (string) helper.Invoke("Script.Process" , textBoxText.Text);<br> }<br> }<br> catch(Exception ex)<br> {<br> MessageBox.Show(ex.ToString());<br> }<br>} </pre>
<p> </p>
<p>In this code AsmHelper loads the compiled script assembly (<span style="color: rgb(51, 0, 153);">asmFile</span>)
to a new temporary AppDomain (remote loading). After execution of
<br>
the "Script.Process" method the instance of AsmHelper disposed. When<span style="color: rgb(51, 51, 255);">
<span style="color: rgb(51, 0, 153);">AsmHelper</span></span> is being
disposed it also unloads the whole temporary AppDomain with all its
assemblies. </p>
<p>
How AsmHelper will load assembly depends on which constructor was used to
instantiate this type (see <a href="ProgrammingReference.html">CSScriptLibrary
reference</a>):</p>
<p style="margin-left: 40px;"><span style="color: rgb(51, 0, 153);">public
AsmHelper(Assembly asm)</span> - loading to the current AppDomain.</p>
<p style="margin-left: 40px;"><span style="color: rgb(51, 0, 153);">public AsmHelper(string
asmFile, string domainName, bool deletOnExit)</span> - loading to the
temporary AppDomain</p>
<p style="margin-left: 40px;"></p>
<p>
This is done in order to overcome the limitation of the CLR with respect to
loaded assemblies:</p>
<p style="font-weight: bold; margin-left: 40px; font-style: italic;"><span style="font-weight: normal;">Once
the assembly loaded it cannot be unloaded any more.</span><br>
</p>
<p>
Use the remote loading carefully as it introduces important constraint on the
types implemented in your script:</p>
<p style="font-weight: bold; margin-left: 40px; font-style: italic;">
The type, which is to cross AppDomain boundaries must be either serializable or
inherited from MarshalByRefObject.</p>
<p>
Thus, if you repeat the test for creating <span style="color: rgb(51, 0, 153);">IComparable</span>
object with the "Unload when done" check box checked, you will have an
exception because class Script is neither serializable nor <span style="color: rgb(51, 51, 255);">
<span style="color: rgb(51, 0, 153);">MarshalByRefObject</span></span>.
<br>
</p>
<div style="text-align: center;"><img style="width: 482px; height: 134px;" alt="" src="Images/textprocessor4.PNG"></div>
<p><br>
</p>
<h4 class="dtH4">
Summary </h4>
<p>In this tutorial almost all possible execution scenarios were implemented just
to demonstrate all available implementation options. However it is unlikely you
would need to implement all of them in real development. The next tutorial (<a href="Image_processor.html">Image
Processor</a>) is an example of more practical/simple approach where the
host application is implemented according "<a href="Script_hosting_guideline_.html">Script
hosting guideline</a>".
<br>
</p>
<p></p>
<h4 class="dtH4">See Also
</h4>
<p><a href="Tutorial.html">CS-Script tutorials</a> | <a href="Type_sharing_patern.html">
"Type sharing" pattern</a> | <a href="Dynamic_assembly_loading.html">Dynamic
assembly loading </a></p>
<span style="text-decoration: underline;">
<br>
<br>
</span>
<object style="display: none;" type="application/x-oleobject" classid="clsid:1e2a7bd0-dab9-11d0-b93a-00c04fc99f9e" viewastext="true">
<param name="Keyword" value="Script Hosting tutorial">
</object>
</div>
</body>
</html>
```
|
In Slavic mythology (in particular Ukrainian, Czech and Slovak), the Raróg or Raroh () is a fire demon, often depicted as a fiery falcon.
According to Czech folklore, a raroh can hatch from an egg that was incubated on a stove for nine days and nights, and can appear either as a fiery falcon or a dragon. In Polish folklore, the rarog is a tiny bird that can be held in a pocket, and can bring people happiness.
A caldera on Jupiter's moon Io was named Rarog Patera, a massive eruption from which was recorded by the W. M. Keck Observatory and Japan's HISAKI (SPRINT-A) spacecraft on August 15, 2013.
The northern cardinal-shaped logo of the Polish video game company CD Projekt is called the Raróg.
See also
Hierofalco
Bennu, Egyptian firebird
Firebird (Slavic folklore) (Жар-Птица)
Huma (mythology), Persian firebird
Phoenix (mythology), sacred firebird found in the mythologies of many cultures
Simurgh
Slavic mythology
Svarog
References
Slavic legendary creatures
Legendary birds
|
Merwin Hancock Silverthorn (September 22, 1896 – August 14, 1985) was a highly decorated Lieutenant General in the United States Marine Corps, Navy Cross recipient.
He was an expert in amphibious warfare and taught courses at Marine Corps training facilities in the 1930s. He served in numerous conflicts including World War I as field commander and during World War II as a senior staff officer. Following World War II, Silverthorn served in many important capacities like Assistant Commandant of the Marine Corps, director of the Marine Corps Reserve or commanding general of the Marine Corps Recruit Depot Parris Island.
Early career
Silverthorn was born on 22 September 1896 in Minneapolis, Minnesota, as the son of Civil War veteran, Asahel C. Silverthorn (1844–1940) and his wife Emma C. Silverthorn (1861–1921). He studied at University of Minnesota, but left the university before graduation and enlisted in the Minnesota National Guard. Serving on Mexican Border during the Pancho Villa Expedition, Silverthorn subsequently enlisted in the Marine Corps as a private on 27 April 1917.
World War I
After basic training he was assigned to the newly created 5th Marine Regiment. Silverthorn sailed to France as a member of 16th Company, 3rd Battalion, 5th Marines. He was promoted to sergeant and transferred to the 45th Company of the same Battalion and participated in the Battle of Belleau Wood.
Silverthorn distinguished himself and received a battlefield commission as a second lieutenant on 9 June 1918. To his new rank, he was appointed platoon leader in the 20th Company, 3rd Battalion. Silverthorn subsequently led his platoon during the battle and was decorated with the Silver Star for gallantry in action.
He commanded his platoon during the Battle of Château-Thierry, Battle of Soissons and Second Battle of the Marne. Silverthorn was ordered to Army school for further training during the August 1918.
He returned in the middle of September and subsequently commanded his platoon during the Battle of Blanc Mont Ridge. Silverthorn was decorated with his second Silver Star for his leadership during the beginning of the battle. During the combats near the village of St. Etienne on 4 October 1918, he carried an important message, at a critical time, to his battalion commander under heavy machine-gun and shell fire, exhibiting extraordinary heroism and disregard for his personal safety. After being wounded, he then continued the attack. For his actions, Silverthorn was decorated with the Distinguished Service Cross and later with the Navy Cross. He was also decorated with the French Croix de guerre 1914–1918 with Gilt Star and promoted to the rank of first lieutenant.
Interwar period
After the war, Silverthorn remained in the Europe and participated in the Allied occupation of the Rhineland and 'Pershing's Own' E Company, 3rd Army Composite Regiment until September 1919. He subsequently returned to the United States and was assigned to the Marine Barracks at Mare Island, California. Silverthorn served there until April 1921, when he was transferred to the Marine Barracks at Quantico. He was subsequently promoted to the rank of captain on 1 July 1921.
At the beginning of May 1923, Silverthorn was assigned to the 1st Brigade of Marines and sent to Haiti in March 1924, where he served as district commander within Gendarmerie d'Haïti at Aux Cayes and later as chief of police at Port-au-Prince. In this capacity, Silverthorn was responsible for the training and organizing of police units. He returned to the States in May 1926 and was appointed assistant quartermaster at Quantico Base, Virginia.
Silverthorn was transferred to the Marine Barracks at Guam in April 1930 and appointed to the same role as at Quantico.
World War II
At the time of the Japanese Attack on Pearl Harbor, Silverthorn served still in Washington, D.C., at War Plans Section – Operations Department, Navy Department under Admiral Ernest King. He was promoted to the rank of colonel in January 1942 and transferred as Naval member to the Joint U.S. Strategic Committee, Joint Chiefs of Staff. Silverthorn remained in this capacity until June 1943, when he was assigned to the Army Navy Staff College as Chief of the Amphibious Warfare Section. For his service in this capacity, he was decorated with the Army Commendation Medal at the end of his assignment in January 1944.
Silverthorn was then finally transferred to the Pacific theater and appointed chief of staff of I Marine Amphibious Corps under Lieutenant General Roy Geiger. He relieved Brigadier General Oscar R. Cauldwell in this capacity, while stationed at Guadalcanal. The I Marine Amphibious Corps was redesignated III Amphibious Corps in April 1944 and started to prepare for Mariana and Palau Islands campaign.
At the end of July 1944, Silverthorn participated in the Recapture of Guam and made several trips to the front line positions to secure necessary information and to give assistance and advice to organization commanders. For the planning and staff work during the Guam operation, Silverthorn received the Legion of Merit with Combat "V".
The next important battle came in the middle of September 1944, when III Marine Corps launched the Battle of Peleliu. Silverthorn distinguished himself again during the planning and execution of the campaign and was promoted to the rank of brigadier general in December 1944. He later participated in the Battle of Okinawa in April 1945 and received the Navy Distinguished Service Medal for his service during Peleliu and Okinawa campaign.
Later career
Silverthorn remained on active service after the war and was appointed to the capacity of chief of staff, Fleet Marine Force, Pacific. He served in this capacity until September 1946, when he was reassigned as commander of the Troop Training Unit, Training Command, Amphibious Forces, Atlantic Fleet, located at Naval Amphibious Base Little Creek, Virginia.
During September 1947, Silverthorn was transferred to Washington, D.C., where he was appointed a Marine Corps liaison officer with the Office of the Chief of Naval Operations under Fleet admiral Chester W. Nimitz. He remained in this capacity until May 1949, when he was promoted to the rank of major general and appointed director of the Marine Corps Reserve within Headquarters Marine Corps. Under his command, Marine Corps Reserve grew from 38,403 to 123,000 reservists.
A great honor and also responsibility came in July 1950, when Silverthorn was appointed Assistant Commandant of the Marine Corps, succeeding Major General Oliver P. Smith in this capacity. For his new duties, he was promoted to the temporary rank of lieutenant general on 22 February 1951. Final duties came in February 1952, when he was appointed commanding general of the Marine Corps Recruit Depot Parris Island, South Carolina.
Retirement
He was placed on the retired list on 30 June 1954 and was also advanced to the rank of lieutenant general for having been specially commended in combat.
After his retirement from the Marine Corps, Silverthorn served as an assistant director of the Office of Defense Mobilization within Executive Office of the President from July 1956. He resigned from this capacity during September 1957. He was later active in the International Christian Leadership and served as its vice president.
Lieutenant General Merwin Hancock Silverthorn died of cancer on 14 August 1985, in Bethesda Naval Hospital, Maryland and is buried in Arlington National Cemetery. Each of his sons were career Marine officers:
Colonel Merwin H. Silverthorn Jr. (1920–2008)
Lieutenant Colonel Russell Lane Silverthorn (1922–2013)
Captain Robert Sterner Silverthorn (1928–2013)
Decorations
References
1896 births
1985 deaths
Military personnel from Minneapolis
United States Marine Corps generals
United States Marine Corps World War II generals
United States Marine Corps personnel of World War I
Assistant Commandants of the United States Marine Corps
Naval War College alumni
Recipients of the Navy Cross (United States)
Recipients of the Distinguished Service Cross (United States)
Recipients of the Navy Distinguished Service Medal
Recipients of the Legion of Merit
Recipients of the Silver Star
Recipients of the Croix de Guerre 1914–1918 (France)
Burials at Arlington National Cemetery
American military personnel of the Banana Wars
|
William Egerton Perdue (20 June 1850 – 17 January 1933) was a Canadian lawyer and judge. He was Chief Justice of Manitoba from 1918 to 1929.
References
External links
Memorable Manitobans: William Egerton Perdue (1850-1933)
People from Brampton
1933 deaths
University of Toronto alumni
Lawyers in Ontario
Lawyers in Manitoba
Judges in Manitoba
|
James Patrick Slattery Matthews (4 June 1880 – 19 May 1940) was an Australian rules footballer who played with Carlton in the Victorian Football League (VFL).
Notes
External links
Jim Matthews's profile at Blueseum
1880 births
1940 deaths
Australian rules footballers from Victoria (state)
Carlton Football Club players
|
```javascript
Data type comparison in `switch` statements
Truthiness
Infinity
Prototype methods
Detect online connection
```
|
```javascript
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
var x = {a: 1};
var y = { ...x};
assertEquals(x, y);
assertEquals({}, y = { ...{} } );
assertEquals({}, y = { ...undefined });
assertEquals({}, y = { ...null });
assertEquals({}, y = { ...1 });
assertEquals({0: 'f', 1: 'o', 2: 'o'}, y = { ...'foo' });
assertEquals({0: 0, 1: 1}, y = { ...[0, 1] });
assertEquals({}, { ...new Proxy({}, {}) });
assertEquals({a: 2}, y = { ...x, a: 2 });
assertEquals({a: 1, b: 1}, y = { ...x, b: 1 });
assertEquals({a: 1}, y = { a: 2, ...x });
assertEquals({a: 1, b: 1}, y = { a:2, ...x, b: 1 });
assertEquals({a: 3}, y = { a: 2, ...x, a: 3 });
var z = { b: 1}
assertEquals({a: 1, b: 1}, y = { ...x, ...z });
assertEquals({a: 1, b: 1}, y = { a: 2, ...x, ...z });
assertEquals({a: 1, b: 1}, y = { b: 2, ...z, ...x });
assertEquals({a: 1, b: 1}, y = { a: 1, ...x, b: 2, ...z });
assertEquals({a: 1, b: 2}, y = { a: 1, ...x, ...z, b: 2 });
assertEquals({a: 2, b: 2}, y = { ...x, ...z, a:2, b: 2 });
var x = {}
Object.defineProperty(x, 'a', {
enumerable: false,
configurable: false,
writable: false,
value: 1
});
assertEquals({}, { ...x });
var x = {}
Object.defineProperty(x, 'a', {
enumerable: true,
configurable: false,
writable: false,
value: 1
});
var y = { ...x };
var prop = Object.getOwnPropertyDescriptor(y, 'a');
assertEquals(prop.value, 1);
assertTrue(prop.enumerable);
assertTrue(prop.configurable);
assertTrue(prop.writable);
var x = { __proto__: z }
assertEquals({}, { ...x });
var x = {
get a() { return 1; },
set a(_) { assertUnreachable("setter called"); },
};
assertEquals({ a: 1 }, y = { ...x });
var x = {
method() { return 1; },
};
assertEquals(x, y = { ...x });
var x = {
*gen() { return {value: 1, done: true} ; },
};
assertEquals(x, y = { ...x });
var x = {
get a() { throw new Error(); },
};
assertThrows(() => { y = { ...x } });
var p = new Proxy({}, {
ownKeys() { throw new Error(); }
});
assertThrows(() => { y = { ...p } });
var p = new Proxy({}, {
ownKeys() { [1]; },
get() { throw new Error(); }
});
assertThrows(() => { y = { ...p } });
var p = new Proxy({}, {
ownKeys() { [1]; },
getOwnPropertyDescriptor() { throw new Error(); }
});
assertThrows(() => { y = { ...p } });
var p = new Proxy(z, {
ownKeys() { return Object.keys(z); },
get(_, prop) { return z[prop]; },
getOwnPropertyDescriptor(_, prop) {
return Object.getOwnPropertyDescriptor(z, prop);
},
});
assertEquals(z, y = { ...p });
var x = { a:1 };
assertEquals(x, y = { set a(_) { throw new Error(); }, ...x });
var x = { a:1 };
assertEquals(x, y = { get a() { throw new Error(); }, ...x });
```
|
```java
/*
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
*
* path_to_url
*
* Unless required by applicable law or agreed to in writing, software
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*/
package org.apache.carbondata.examples.sdk;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.metadata.datatype.Field;
import org.apache.carbondata.sdk.file.CarbonSchemaReader;
import org.apache.carbondata.sdk.file.Schema;
import org.apache.hadoop.conf.Configuration;
import org.apache.log4j.Logger;
import static org.apache.hadoop.fs.s3a.Constants.*;
/**
* Example for testing carbonSchemaReader on S3
*/
public class SDKS3SchemaReadExample {
public static void main(String[] args) throws Exception {
Logger logger = LogServiceFactory.getLogService(SDKS3SchemaReadExample.class.getName());
int parameterLength = 3;
if (args == null || args.length < parameterLength) {
logger.error("Usage: java CarbonS3Example: <access-key> <secret-key>"
+ "<s3-endpoint> [table-path-on-s3]");
System.exit(0);
}
String path = "s3a://obs-xubo4/sdkdata/test";
if (args.length > parameterLength) {
path = args[3];
}
Configuration configuration = new Configuration();
configuration.set(ACCESS_KEY, args[0]);
configuration.set(SECRET_KEY, args[1]);
configuration.set(ENDPOINT, args[2]);
// method 1 to read schema
Schema schema = CarbonSchemaReader.readSchema(path, true, configuration);
System.out.println("Schema length is " + schema.getFieldsLength());
Field[] fields = schema.getFields();
for (int i = 0; i < fields.length; i++) {
System.out.println(fields[i] + "\t");
}
// method 2 to read schema
Schema schema2 = CarbonSchemaReader.readSchema(path, configuration);
System.out.println("Schema length is " + schema2.getFieldsLength());
Field[] fields2 = schema2.getFields();
for (int i = 0; i < fields2.length; i++) {
System.out.println(fields2[i] + "\t");
}
}
}
```
|
Marc Saikali ( (born 1965) is a French-Lebanese journalist. He has been the Director of the international news television network France 24 from 2012 to 2021.
Biography
Saikali comes from a Lebanese family. He studied journalism at University of Bordeaux and ethnology studies.
Career
Saikali started his career in 1988 as a television journalist for France 3 in Normandy, followed by various postings around France. Saikali has held many management positions across the French news media landscape since the early 2000s. From 2003 to 2006, he was the Editor in Chief of France 3's Foreign Desk.
From 2006 to 2008, he led editorial and training at Medi1 Sat. From 2008 to 2010, he was director of France 3's regional activities in Corsica, Via Stella. From 2010 to 2012 he was Director of News at France 3.
From 2012 to 2021, he has been the Director of France 24, based in Paris. He has overseen its global expansion and the launch of a Spanish-language channel.
Personal life
He is fluent in Arabic, French and English. He is married and he has three children.
References
See also
Marc Saikali, Director of France 24 on YouTube.
Marc Saikali on YouTube.
1965 births
Living people
Lebanese emigrants to France
Lebanese journalists
French journalists
University of Bordeaux alumni
|
```smalltalk
using System;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Runtime.InteropServices;
using System.Runtime.Serialization;
using AppKit;
using ObjCRuntime;
namespace Xamarin.Mac.Linker.Test {
class SystemMono {
static void Main (string [] args)
{
NSApplication.Init ();
if (File.Exists ("output.mlpd")) {
Test.Log.WriteLine ("SUCCESS: log output exists");
} else {
Test.Log.WriteLine ("FAIL: could not find 'output.mlpd'");
}
Test.Terminate ();
}
}
}
```
|
```xml
/*
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
*
* path_to_url
*
* Unless required by applicable law or agreed to in writing, software
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*/
const fs = require("fs");
const github = require("./shared/githubUtils");
const {
REPO_OWNER,
REPO,
PATH_TO_METRICS_CSV,
BOT_NAME,
} = require("./shared/constants");
interface PrStats {
firstTimeContribution: boolean;
timeToFirstReview: number;
timeFromCreationToCompletion: number;
timeFromReviewersMentionedToCompletion: { [key: string]: number };
mergedDate: Date;
}
interface AggregatedMetrics {
prsCompleted: number;
prsCompletedByNewContributors: number;
averageTimeToFirstReview: number;
averageTimeToNewContributorFirstReview: number;
averageTimeCreationToCompletion: number;
numUsersPerformingReviews: number;
numCommittersPerformingReviews: number;
numNonCommittersPerformingReviews: number;
giniIndexCommittersPerformingReviews: number;
averageTimeFromCommitterAssignmentToPrMerge: number;
}
async function getCompletedPullsFromLastYear(): Promise<any[]> {
const cutoffDate = new Date();
cutoffDate.setFullYear(new Date().getFullYear() - 1);
console.log(`Getting PRs newer than ${cutoffDate}`);
const githubClient = github.getGitHubClient();
let result = await githubClient.rest.pulls.list({
owner: REPO_OWNER,
repo: REPO,
state: "closed",
per_page: 100, // max allowed
});
let page = 2;
let retries = 0;
let pulls = result.data;
while (
result.data.length > 0 &&
new Date(result.data[result.data.length - 1].created_at) > cutoffDate
) {
if (retries === 0) {
console.log(`Getting PRs, page: ${page}`);
console.log(
`Current oldest PR = ${new Date(
result.data[result.data.length - 1].created_at
)}`
);
}
try {
result = await githubClient.rest.pulls.list({
owner: REPO_OWNER,
repo: REPO,
state: "closed",
per_page: 100, // max allowed
page: page,
});
pulls = pulls.concat(result.data);
page++;
retries = 0;
} catch (err) {
if (retries >= 3) {
throw err;
}
retries++;
}
}
console.log("Got all PRs, moving to the processing stage");
return pulls;
}
// Rather than check the whole repo history (expensive),
// we'll just look at the last year's worth of contributions to check if they're a first time contributor.
// If they contributed > 1 year ago, their experience is probably similar to a first time contributor anyways.
function checkIfFirstTimeContributor(
pull: any,
pullsFromLastYear: any[]
): boolean {
return !pullsFromLastYear.some(
(pullFromLastYear) =>
pullFromLastYear.created_at < pull.created_at &&
pullFromLastYear.user.login === pull.user.login
);
}
// Get time between pr creation and the first comment, approval, or merge that isn't done by:
// (a) the author
// (b) automated tooling
function getTimeToFirstReview(
pull: any,
comments: any[],
reviews: any[],
creationDate: Date,
mergedDate: Date
): number {
let timeToFirstReview = mergedDate.getTime() - creationDate.getTime();
const firstReviewed = reviews.find(
(review) => review.user.login != pull.user.login
);
if (firstReviewed) {
const firstReviewDate = new Date(firstReviewed.submitted_at);
timeToFirstReview = Math.min(
timeToFirstReview,
firstReviewDate.getTime() - creationDate.getTime()
);
}
for (const comment of comments) {
if (
comment.user.login != pull.user.login &&
comment.user.login != BOT_NAME
) {
let commentTime = new Date(comment.created_at);
timeToFirstReview = Math.min(
timeToFirstReview,
commentTime.getTime() - creationDate.getTime()
);
}
}
return timeToFirstReview;
}
// Takes a R: @reviewer comment and extracts all reviewers tagged
// Returns an empty list if no reviewer can be extracted
function extractReviewersTaggedFromCommentBody(body: string): string[] {
if (!body) {
return [];
}
body = body.toLowerCase();
if (body.indexOf("r: @") < 0) {
return [];
}
let usernames: string[] = [];
const reviewerStrings = body.split(" @");
// Start at index 1 since we don't care about anything before the first @
for (let i = 1; i < reviewerStrings.length; i++) {
const curBlock = reviewerStrings[i];
let usernameIndex = 0;
let curUsername = "";
while (
usernameIndex < curBlock.length &&
curBlock[usernameIndex].match(/^[0-9a-z]+$/)
) {
curUsername += curBlock[usernameIndex];
usernameIndex += 1;
}
// Filter out username from PR template
if (curUsername && curUsername != "username") {
usernames.push(curUsername);
}
}
return usernames;
}
// Returns a dictionary mapping reviewers to the amount of time from their first comment to pr completion.
function getTimeFromReviewerMentionedToCompletion(
pull: any,
comments: any[],
reviewComments: any[],
mergedDate: Date
): { [key: string]: number } {
comments = comments.concat(reviewComments);
comments.push(pull);
let timeToCompletionPerReviewer = {};
for (const comment of comments) {
const reviewersTagged = extractReviewersTaggedFromCommentBody(comment.body);
const commentCreationDate = new Date(comment.created_at);
const timeToCompletion =
mergedDate.getTime() - commentCreationDate.getTime();
for (const reviewer of reviewersTagged) {
if (reviewer in timeToCompletionPerReviewer) {
timeToCompletionPerReviewer[reviewer] = Math.max(
timeToCompletion,
timeToCompletionPerReviewer[reviewer]
);
} else {
timeToCompletionPerReviewer[reviewer] = timeToCompletion;
}
}
}
return timeToCompletionPerReviewer;
}
async function extractPrStats(
pull: any,
pullsFromLastYear: any[]
): Promise<PrStats> {
const githubClient = github.getGitHubClient();
const creationDate = new Date(pull.created_at);
const mergedDate = new Date(pull.merged_at);
const reviews = (
await githubClient.rest.pulls.listReviews({
owner: REPO_OWNER,
repo: REPO,
pull_number: pull.number,
})
).data;
// GitHub has a concept of review comments (must be part of a review) and issue comments on a repo, so we need to look at both
const comments = (
await githubClient.rest.issues.listComments({
owner: REPO_OWNER,
repo: REPO,
issue_number: pull.number,
})
).data;
const reviewComments = (
await githubClient.rest.pulls.listReviewComments({
owner: REPO_OWNER,
repo: REPO,
pull_number: pull.number,
})
).data;
const prStats: PrStats = {
firstTimeContribution: checkIfFirstTimeContributor(pull, pullsFromLastYear),
timeToFirstReview: getTimeToFirstReview(
pull,
comments,
reviews,
creationDate,
mergedDate
),
timeFromCreationToCompletion: mergedDate.getTime() - creationDate.getTime(),
timeFromReviewersMentionedToCompletion:
getTimeFromReviewerMentionedToCompletion(
pull,
comments,
reviewComments,
mergedDate
),
mergedDate: mergedDate,
};
return prStats;
}
function getMetricBucketStartDate(pullStat: PrStats, bucketEnd: Date): number {
const bucketStart = bucketEnd;
while (bucketStart.getTime() > pullStat.mergedDate.getTime()) {
bucketStart.setDate(bucketStart.getDate() - 7);
}
return bucketStart.getTime();
}
function distinctReviewers(pullStats: PrStats[]): string[] {
let users: Set<string> = new Set();
for (const pullStat of pullStats) {
for (const user of Object.keys(
pullStat.timeFromReviewersMentionedToCompletion
)) {
users.add(user);
}
}
return Array.from(users);
}
async function committersFromReviewers(users: string[]): Promise<string[]> {
let committers: string[] = [];
for (const user of users) {
if (await github.checkIfCommitter(user)) {
committers.push(user);
}
}
return committers;
}
function averageTimeFromCommitterAssignmentToPrMerge(
pullStats: PrStats[],
committers: string[]
): number {
if (committers.length === 0) {
return 0;
}
let numCommitterReviews = 0;
let totalTimeFromAssignToMerge = 0;
for (const pullStat of pullStats) {
for (const reviewer of Object.keys(
pullStat.timeFromReviewersMentionedToCompletion
)) {
if (committers.indexOf(reviewer) > -1) {
numCommitterReviews++;
totalTimeFromAssignToMerge +=
pullStat.timeFromReviewersMentionedToCompletion[reviewer];
}
}
}
if (numCommitterReviews === 0) {
return 0;
}
return totalTimeFromAssignToMerge / numCommitterReviews;
}
// Calculates a gini index of inequality for reviews.
// 0 is perfectly equally distributed, 1 is inequally distributed (with 1 person having all reviews)
function getGiniIndexForCommitterReviews(
pullStats: PrStats[],
committers: string[]
) {
let reviewsPerCommitter: { [key: string]: number } = {};
for (const pullStat of pullStats) {
for (const reviewer of Object.keys(
pullStat.timeFromReviewersMentionedToCompletion
)) {
if (committers.indexOf(reviewer) > -1) {
if (reviewer in reviewsPerCommitter) {
reviewsPerCommitter[reviewer]++;
} else {
reviewsPerCommitter[reviewer] = 1;
}
}
}
}
let reviewCounts = Object.values(reviewsPerCommitter);
reviewCounts.sort();
let giniNumerator = 0;
let giniDenominator = 0;
const n = reviewCounts.length;
for (let i = 1; i <= reviewCounts.length; i++) {
let yi = reviewCounts[i - 1];
giniNumerator += (n + 1 - i) * yi;
giniDenominator += yi;
}
return (1 / n) * (n + 1 - (2 * giniNumerator) / giniDenominator);
}
async function aggregateStatsForBucket(
pullStats: PrStats[]
): Promise<AggregatedMetrics> {
const reviewers = distinctReviewers(pullStats);
const committers = await committersFromReviewers(reviewers);
const firstTimePrs = pullStats.filter(
(pullStat) => pullStat.firstTimeContribution
);
let averageTimeToNewContributorFirstReview = 0;
if (firstTimePrs.length > 0) {
averageTimeToNewContributorFirstReview =
firstTimePrs.reduce(
(sumTime, prStat) => sumTime + prStat.timeToFirstReview,
0
) / firstTimePrs.length;
}
return {
prsCompleted: pullStats.length,
prsCompletedByNewContributors: firstTimePrs.length,
averageTimeToFirstReview:
pullStats.reduce(
(sumTime, prStat) => sumTime + prStat.timeToFirstReview,
0
) / pullStats.length,
averageTimeToNewContributorFirstReview:
averageTimeToNewContributorFirstReview,
averageTimeCreationToCompletion:
pullStats.reduce(
(sumTime, prStat) => sumTime + prStat.timeFromCreationToCompletion,
0
) / pullStats.length,
numUsersPerformingReviews: reviewers.length,
numCommittersPerformingReviews: committers.length,
numNonCommittersPerformingReviews: reviewers.length - committers.length,
giniIndexCommittersPerformingReviews: getGiniIndexForCommitterReviews(
pullStats,
committers
),
averageTimeFromCommitterAssignmentToPrMerge:
averageTimeFromCommitterAssignmentToPrMerge(pullStats, committers),
};
}
function convertMsToRoundedMinutes(milliseconds: number): number {
return Math.floor(milliseconds / 60_000);
}
async function reportMetrics(statBuckets: { [key: number]: PrStats[] }) {
console.log("---------------------------------");
console.log("PR Stats");
console.log("---------------------------------");
let csvOutput = "";
csvOutput +=
"Bucket start (bucketed by merge time),PRs Completed,PRs completed by first time contributors,Average time in minutes to first review,Average time in minutes to first review for new contributors,Average time in minutes from PR creation to completion,Total number of reviewers,Total number of committers performing reviews,Total number of non-committers performing reviews,Gini index (fairness) of committers performing reviews,Average time in minutes from committer assignment to PR merge";
const startDates = Object.keys(statBuckets);
for (let i = 0; i < startDates.length; i++) {
let startDate = startDates[i];
let aggregatedStats = await aggregateStatsForBucket(statBuckets[startDate]);
console.log();
const bucketStart = new Date(parseInt(startDate));
console.log("Bucket start:", bucketStart);
csvOutput += `\n${bucketStart.toDateString()}`;
console.log("PRs completed:", aggregatedStats.prsCompleted);
csvOutput += `,${aggregatedStats.prsCompleted}`;
console.log(
"PRs completed by first time contributors:",
aggregatedStats.prsCompletedByNewContributors
);
csvOutput += `,${aggregatedStats.prsCompletedByNewContributors}`;
console.log(
"Average time in minutes to first review:",
convertMsToRoundedMinutes(aggregatedStats.averageTimeToFirstReview)
);
csvOutput += `,${convertMsToRoundedMinutes(
aggregatedStats.averageTimeToFirstReview
)}`;
console.log(
"Average time in minutes to first review for new contributors:",
convertMsToRoundedMinutes(
aggregatedStats.averageTimeToNewContributorFirstReview
)
);
csvOutput += `,${convertMsToRoundedMinutes(
aggregatedStats.averageTimeToNewContributorFirstReview
)}`;
console.log(
"Average time in minutes from PR creation to completion:",
convertMsToRoundedMinutes(aggregatedStats.averageTimeCreationToCompletion)
);
csvOutput += `,${convertMsToRoundedMinutes(
aggregatedStats.averageTimeCreationToCompletion
)}`;
console.log(
"Total number of reviewers:",
aggregatedStats.numUsersPerformingReviews
);
csvOutput += `,${aggregatedStats.numUsersPerformingReviews}`;
console.log(
"Total number of committers performing reviews:",
aggregatedStats.numCommittersPerformingReviews
);
csvOutput += `,${aggregatedStats.numCommittersPerformingReviews}`;
console.log(
"Total number of non-committers performing reviews:",
aggregatedStats.numNonCommittersPerformingReviews
);
csvOutput += `,${aggregatedStats.numNonCommittersPerformingReviews}`;
console.log(
"Gini index (fairness) of committers performing reviews:",
aggregatedStats.giniIndexCommittersPerformingReviews
);
csvOutput += `,${aggregatedStats.giniIndexCommittersPerformingReviews}`;
console.log(
"Average time in minutes from committer assignment to PR merge:",
convertMsToRoundedMinutes(
aggregatedStats.averageTimeFromCommitterAssignmentToPrMerge
)
);
csvOutput += `,${convertMsToRoundedMinutes(
aggregatedStats.averageTimeFromCommitterAssignmentToPrMerge
)}`;
}
fs.writeFileSync(PATH_TO_METRICS_CSV, csvOutput);
console.log(`Output written to ${PATH_TO_METRICS_CSV}`);
}
async function gatherMetrics() {
// We will only aggregate metrics from the last 90 days,
// but will look further back to determine if this is a user's first contribution
const pullsFromLastYear = await getCompletedPullsFromLastYear();
let cutoffDate = new Date();
cutoffDate.setDate(cutoffDate.getDate() - 90);
let pullStats: PrStats[] = [];
console.log("Extracting stats from pulls - this may take a while");
for (let i = 0; i < pullsFromLastYear.length; i++) {
let pull = pullsFromLastYear[i];
if (new Date(pull.created_at) > cutoffDate && pull.merged_at) {
pullStats.push(await extractPrStats(pull, pullsFromLastYear));
}
if (i % 10 === 0) {
process.stdout.write(".");
}
}
console.log("\nDone extracting stats, formatting results");
let statBuckets: { [key: number]: PrStats[] } = {};
let bucketEnd = new Date();
bucketEnd.setUTCHours(23, 59, 59, 999);
pullStats.forEach((pullStat) => {
let bucketStart = getMetricBucketStartDate(pullStat, bucketEnd);
if (bucketStart in statBuckets) {
statBuckets[bucketStart].push(pullStat);
} else {
statBuckets[bucketStart] = [pullStat];
}
});
await reportMetrics(statBuckets);
}
gatherMetrics();
export {};
```
|
```go
//go:build gogit
package git
import (
"path"
"strings"
"github.com/go-git/go-git/v5/plumbing"
"github.com/go-git/go-git/v5/plumbing/filemode"
"github.com/go-git/go-git/v5/plumbing/object"
)
// GetTreeEntryByPath get the tree entries according the sub dir
func (t *Tree) GetTreeEntryByPath(relpath string) (*TreeEntry, error) {
if len(relpath) == 0 {
return &TreeEntry{
ID: t.ID,
// Type: ObjectTree,
gogitTreeEntry: &object.TreeEntry{
Name: "",
Mode: filemode.Dir,
Hash: plumbing.Hash(t.ID.RawValue()),
},
}, nil
}
relpath = path.Clean(relpath)
parts := strings.Split(relpath, "/")
var err error
tree := t
for i, name := range parts {
if i == len(parts)-1 {
entries, err := tree.ListEntries()
if err != nil {
if err == plumbing.ErrObjectNotFound {
return nil, ErrNotExist{
RelPath: relpath,
}
}
return nil, err
}
for _, v := range entries {
if v.Name() == name {
return v, nil
}
}
} else {
tree, err = tree.SubTree(name)
if err != nil {
if err == plumbing.ErrObjectNotFound {
return nil, ErrNotExist{
RelPath: relpath,
}
}
return nil, err
}
}
}
return nil, ErrNotExist{"", relpath}
}
```
|
```php
<?php
declare(strict_types=1);
/**
* Passbolt ~ Open source password manager for teams
*
* For full copyright and license information, please see the LICENSE.txt
* Redistributions of files must retain the above copyright notice.
*
* @link path_to_url Passbolt(tm)
* @since 2.13.0
*/
namespace Passbolt\Folders\Model\Table;
use App\Model\Entity\Role;
use App\Model\Rule\IsNotSoftDeletedRule;
use App\Model\Traits\Cleanup\TableCleanupTrait;
use App\Utility\UserAccessControl;
use Cake\Http\Exception\InternalErrorException;
use Cake\ORM\RulesChecker;
use Cake\ORM\Table;
use Cake\Utility\Inflector;
use Cake\Validation\Validator;
use Passbolt\Folders\Model\Dto\FolderRelationDto;
use Passbolt\Folders\Model\Entity\FoldersRelation;
use Passbolt\Folders\Model\Traits\FoldersRelations\FoldersRelationsFindersTrait;
use Passbolt\Folders\Service\FoldersRelations\FoldersRelationsAddItemsToUserTreeService;
/**
* FoldersRelations Model
*
* @property \App\Model\Table\ResourcesTable&\Cake\ORM\Association\BelongsTo $Resources
* @property \Passbolt\Folders\Model\Table\FoldersTable&\Cake\ORM\Association\BelongsTo $Folders
* @property \Passbolt\Folders\Model\Table\FoldersTable&\Cake\ORM\Association\BelongsTo $FoldersParents
* @property \App\Model\Table\UsersTable&\Cake\ORM\Association\BelongsTo $Users
* @property \Passbolt\Folders\Model\Table\FoldersRelationsHistoryTable&\Cake\ORM\Association\BelongsTo $FoldersRelationsHistory
* @method \Passbolt\Folders\Model\Entity\FoldersRelation get($primaryKey, $options = [])
* @method \Passbolt\Folders\Model\Entity\FoldersRelation newEntity(array $data, array $options = [])
* @method \Passbolt\Folders\Model\Entity\FoldersRelation[] newEntities(array $data, array $options = [])
* @method \Passbolt\Folders\Model\Entity\FoldersRelation|false save(\Cake\Datasource\EntityInterface $entity, $options = [])
* @method \Passbolt\Folders\Model\Entity\FoldersRelation saveOrFail(\Cake\Datasource\EntityInterface $entity, $options = [])
* @method \Passbolt\Folders\Model\Entity\FoldersRelation patchEntity(\Cake\Datasource\EntityInterface $entity, array $data, array $options = [])
* @method \Passbolt\Folders\Model\Entity\FoldersRelation[] patchEntities(iterable $entities, array $data, array $options = [])
* @method \Passbolt\Folders\Model\Entity\FoldersRelation findOrCreate($search, ?callable $callback = null, $options = [])
* @method \Passbolt\Folders\Model\Entity\FoldersRelation newEmptyEntity()
* @method iterable<\Passbolt\Folders\Model\Entity\FoldersRelation>|iterable<\Cake\Datasource\EntityInterface>|false saveMany(iterable $entities, $options = [])
* @method iterable<\Passbolt\Folders\Model\Entity\FoldersRelation>|iterable<\Cake\Datasource\EntityInterface> saveManyOrFail(iterable $entities, $options = [])
* @method iterable<\Passbolt\Folders\Model\Entity\FoldersRelation>|iterable<\Cake\Datasource\EntityInterface>|false deleteMany(iterable $entities, $options = [])
* @method iterable<\Passbolt\Folders\Model\Entity\FoldersRelation>|iterable<\Cake\Datasource\EntityInterface> deleteManyOrFail(iterable $entities, $options = [])
* @method \Cake\ORM\Query findByForeignId(string $id)
* @method \Cake\ORM\Query findById(string $id)
* @method \Cake\ORM\Query findByUserId(string $userId)
* @method \Cake\ORM\Query findByFolderParentId(string $folderParentId)
* @method \Cake\ORM\Query findByUserIdAndForeignModel(string $userId, string $foreignModel)
* @method \Cake\ORM\Query findByForeignIdAndFolderParentId(string $foreignId, string $folderParentId)
* @method \Cake\ORM\Query findByUserIdAndFolderParentId(string $userId, string $folderParentId)
* @method \Cake\ORM\Query findMissingFoldersRelations(string $foreignModel)
* @mixin \Cake\ORM\Behavior\TimestampBehavior
*/
class FoldersRelationsTable extends Table
{
use FoldersRelationsFindersTrait;
use TableCleanupTrait;
/**
* List of allowed item models on which a folder relation can be plugged.
*/
public const ALLOWED_FOREIGN_MODELS = [
FoldersRelation::FOREIGN_MODEL_FOLDER,
FoldersRelation::FOREIGN_MODEL_RESOURCE,
];
/**
* Initialize method
*
* @param array $config The configuration for the Table.
* @return void
*/
public function initialize(array $config): void
{
parent::initialize($config);
$this->setTable('folders_relations');
$this->setDisplayField('id');
$this->setPrimaryKey('id');
$this->addBehavior('Timestamp');
$this->belongsTo('Resources', [
'foreignKey' => 'foreign_id',
]);
$this->belongsTo('Passbolt/Folders.Folders', [
'foreignKey' => 'foreign_id',
]);
$this->belongsTo('Passbolt/Folders.FoldersParents', [
'className' => 'Passbolt/Folders.Folders',
'foreignKey' => 'folder_parent_id',
]);
$this->belongsTo('Users');
}
/**
* Default validation rules.
*
* @param \Cake\Validation\Validator $validator Validator instance.
* @return \Cake\Validation\Validator
*/
public function validationDefault(Validator $validator): \Cake\Validation\Validator
{
$validator
->uuid('id', __('The identifier should be a valid UUID.'))
->allowEmptyString('id', __('The identifier should not be empty.'), 'create');
$validator
->inList('foreign_model', self::ALLOWED_FOREIGN_MODELS, __(
'The child object type should be one of the following: {0}.',
implode(', ', self::ALLOWED_FOREIGN_MODELS)
))
->requirePresence('foreign_model', 'create', __('The child object type is required.'))
->notEmptyString('foreign_model', __('The child object type should not be empty.'));
$validator
->uuid('foreign_id', __('The child object identifier should be a valid UUID.'))
->requirePresence('foreign_id', 'create', __('The child object identifier required.'))
->notEmptyString('foreign_id', __('The child object identifier should not be empty.'), false);
$validator
->uuid('user_id', __('The user identifier should be a valid UUID.'))
->requirePresence('user_id', 'create', __('A user identifier is required.'))
->notEmptyString('user_id', __('The user identifier should not be empty.'), false);
$validator
->uuid('folder_parent_id', __('The folder parent identifier should be a valid UUID.'))
->allowEmptyString('folder_parent_id');
return $validator;
}
/**
* Returns a rules checker object that will be used for validating
* application integrity.
*
* @param \Cake\ORM\RulesChecker $rules The rules object to be modified.
* @return \Cake\ORM\RulesChecker
*/
public function buildRules(RulesChecker $rules): \Cake\ORM\RulesChecker
{
$rules->addCreate(
$rules->isUnique(
['foreign_id', 'user_id'],
__('A folder relation already exists for the given child object and user.')
),
'folder_relation_unique'
);
$rules->addCreate([$this, 'foreignIdExistsRule'], 'foreign_model_exists', [
'errorField' => 'foreign_id',
'message' => __('The child object does not exist.'),
]);
$rules->addCreate($rules->existsIn(['user_id'], 'Users'), 'user_exists', [
'errorField' => 'user_id',
'message' => __('The user does not exist.'),
]);
$rules->addCreate(new IsNotSoftDeletedRule(), 'user_is_not_soft_deleted', [
'table' => 'Users',
'errorField' => 'user_id',
'message' => __('The user does not exist.'),
]);
return $rules;
}
/**
* Checks that the foreign id exists
*
* @param \Passbolt\Folders\Model\Entity\FoldersRelation $foldersRelation The folder_relation to test
* @param array $options The additional options for this rule
* @return bool
*/
public function foreignIdExistsRule(FoldersRelation $foldersRelation, array $options): bool
{
$rules = new RulesChecker($options);
$exist = false;
switch ($foldersRelation->foreign_model) {
case FoldersRelation::FOREIGN_MODEL_RESOURCE:
$rule = $rules->existsIn('foreign_id', 'Resources');
$existIn = $rule($foldersRelation, $options);
$rule = new IsNotSoftDeletedRule();
$isNotSoftDeleted = $rule($foldersRelation, [
'table' => 'Resources',
'errorField' => 'foreign_id',
]);
$exist = $existIn && $isNotSoftDeleted;
break;
case FoldersRelation::FOREIGN_MODEL_FOLDER:
$rule = $rules->existsIn('foreign_id', 'Folders');
$exist = $rule($foldersRelation, $options);
break;
}
return $exist;
}
/**
* Delete all records where associated users are soft deleted
*
* @param bool $dryRun false
* @return int of affected records
*/
public function cleanupSoftDeletedUsers(?bool $dryRun = false): int
{
return $this->cleanupSoftDeleted('Users', $dryRun);
}
/**
* Delete all records where associated users are deleted
*
* @param bool $dryRun false
* @return int of affected records
*/
public function cleanupHardDeletedUsers(?bool $dryRun = false): int
{
return $this->cleanupHardDeleted('Users', $dryRun);
}
/**
* Delete all records where associated resources are soft deleted
*
* @param bool $dryRun false
* @return int of affected records
*/
public function cleanupSoftDeletedResources(?bool $dryRun = false): int
{
return $this->cleanupSoftDeletedForeignId('Resources', $dryRun);
}
/**
* Delete all association records where associated model entities are soft deleted
*
* @param string $modelName model
* @param bool $dryRun false
* @return int of affected records
*/
private function cleanupSoftDeletedForeignId(string $modelName, ?bool $dryRun = false): int
{
$query = $this->selectQuery()
->select(['id'])
->leftJoinWith($modelName)
->where([
"$modelName.deleted" => true,
'FoldersRelations.foreign_model' => ucfirst(Inflector::singularize($modelName)),
]);
return $this->cleanupHardDeleted($modelName, $dryRun, $query);
}
/**
* Delete all records where associated resources are deleted
*
* @param bool $dryRun false
* @return int of affected records
*/
public function cleanupHardDeletedResources(?bool $dryRun = false): int
{
return $this->cleanupHardDeletedForeignId('Resources', $dryRun);
}
/**
* Delete all association records where associated model entities are deleted
*
* @param string $modelName model
* @param bool $dryRun false
* @return int of affected records
*/
private function cleanupHardDeletedForeignId(string $modelName, $dryRun = false): int
{
$query = $this->selectQuery()
->select(['id'])
->leftJoinWith($modelName)
->whereNull($modelName . '.id')
->where(['FoldersRelations.foreign_model' => ucfirst(Inflector::singularize($modelName)),]);
return $this->cleanupHardDeleted($modelName, $dryRun, $query);
}
/**
* Delete all records where associated folders are deleted
*
* @param bool $dryRun false
* @return int of affected records
*/
public function cleanupHardDeletedFolders(?bool $dryRun = false): int
{
return $this->cleanupHardDeletedForeignId('Folders', $dryRun);
}
/**
* Move to root all folders relations where associated folders parents are deleted
*
* @param bool $dryRun false
* @return int of affected records
*/
public function cleanupHardDeletedFoldersParents(?bool $dryRun = false): int
{
$query = $this->findByDeletedFolderParent()
->select('id');
return $this->cleanupHardDeleted('FoldersParents', $dryRun, $query);
}
/**
* Add missing folders relations for each resource the users have access to.
*
* @param bool $dryRun false
* @return int of affected records
* @throws \Exception If something unexpected occurred
*/
public function cleanupMissingResourcesFoldersRelations(?bool $dryRun = false): int
{
return $this->cleanupMissingFoldersRelations(FoldersRelation::FOREIGN_MODEL_RESOURCE, $dryRun);
}
/**
* Add a folder relation for each item users have access but don't have it in their trees.
*
* @param string $foreignModel The type of item. Can be Folder or Resource
* @param bool $dryRun false
* @return int of affected records
* @throws \Exception If something unexpected occurred
*/
public function cleanupMissingFoldersRelations(string $foreignModel, ?bool $dryRun = false): int
{
$admin = $this->Users->findFirstAdmin();
$uac = new UserAccessControl(Role::ADMIN, $admin->id);
$addItemsToUserTreeService = new FoldersRelationsAddItemsToUserTreeService();
$missingFoldersRelations = $this->findMissingFoldersRelations($foreignModel)->all();
if (!$dryRun) {
$items = [];
foreach ($missingFoldersRelations as $missingFolderRelation) {
$folderRelationToCreateDto = new FolderRelationDto($foreignModel, $missingFolderRelation['foreign_id']);
$items[$missingFolderRelation['user_id']][] = $folderRelationToCreateDto;
}
foreach ($items as $userId => $userItems) {
$addItemsToUserTreeService->addItemsToUserTree($uac, $userId, $userItems);
}
}
return count($missingFoldersRelations);
}
/**
* Delete duplicated folders relations
*
* @param bool $dryRun false
* @return int of affected records
*/
public function cleanupDuplicatedFoldersRelations(?bool $dryRun = false): int
{
$keys = ['user_id', 'foreign_model', 'foreign_id', 'folder_parent_id'];
return $this->cleanupDuplicates($keys, $dryRun);
}
/**
* Check if an item is in a user tree.
*
* @param string $userId The target user
* @param string $foreignId The target item id
* @param string|null $foreignModel The target item foreign model. If not given, the test won't check the the item
* model.
* @return bool
*/
public function isItemInUserTree(string $userId, string $foreignId, ?string $foreignModel = null): bool
{
$conditions = ['foreign_id' => $foreignId, 'user_id' => $userId];
if (!is_null($foreignModel)) {
$conditions['foreign_model'] = $foreignModel;
}
return $this->exists($conditions);
}
/**
* Add missing folders relations for each folder the users have access to.
*
* @param bool $dryRun false
* @return int of affected records
* @throws \Exception If something unexpected occurred
*/
public function cleanupMissingFoldersFoldersRelations(?bool $dryRun = false): int
{
return $this->cleanupMissingFoldersRelations(FoldersRelation::FOREIGN_MODEL_FOLDER, $dryRun);
}
/**
* Count the number of occurrences of a given relation.
*
* @param string $foreignId The relation child id
* @param string|null $folderParentId The relation parent id
* @return int
*/
public function countRelationUsage(string $foreignId, ?string $folderParentId = FoldersRelation::ROOT): int
{
$conditions = [
'foreign_id' => $foreignId,
'folder_parent_id' => $folderParentId,
];
return $this->find()
->where($conditions)
->count();
}
/**
* Get an item folder parent id in a user tree.
*
* @param string $userId The target user to look for
* @param string $foreignId The item identifier
* @return string|null
*/
public function getItemFolderParentIdInUserTree(string $userId, string $foreignId): ?string
{
$foldersParentIds = $this->getItemFoldersParentIdsInUsersTrees([$userId], $foreignId);
$parent = reset($foldersParentIds);
if ($parent === false) {
return null;
}
return $parent;
}
/**
* Get an item folders parent ids in multiple users trees.
*
* @param array $usersIds The list of users to get the item folder parent id
* @param string $foreignId The target entity id
* @param bool $excludeRoot Exclude the root folder. Default false.
* @return array
*/
public function getItemFoldersParentIdsInUsersTrees(
array $usersIds,
string $foreignId,
?bool $excludeRoot = false
): array {
$conditions = [
'user_id IN' => $usersIds,
'foreign_id' => $foreignId,
];
if ($excludeRoot) {
$conditions[] = 'folder_parent_id IS NOT NULL';
}
return $this->find()
->where($conditions)
->select('folder_parent_id')
->distinct('folder_parent_id')
->all()
->extract('folder_parent_id')
->toArray();
}
/**
* Return a list of users ids having access to a list of items.
*
* @param array $foreignIds The list of items to check for.
* @return array
*/
public function getUsersIdsHavingAccessToMultipleItems(array $foreignIds): array
{
if (empty($foreignIds)) {
throw new InternalErrorException('The foreignIds parameter cannot be empty.');
}
$itemsCount = count($foreignIds);
return $this->find()
->select(['user_id'])
->where(['foreign_id IN' => $foreignIds])
->group('user_id')
->having("count(user_id) = $itemsCount")
->all()
->extract('user_id')
->toArray();
}
/**
* Check if an item is personal.
*
* @param string|null $foreignId The item id
* @return bool
*/
public function isItemPersonal(?string $foreignId = null): bool
{
if (is_null($foreignId)) {
return false;
}
return $this->findByForeignId($foreignId)
->count() === 1;
}
/**
* Move an item from multiple locations to a target location.
*
* @param string $foreignId The target item
* @param array $fromFoldersIds The list of folders ids to move from
* @param string|null $folderParentId (optional) The destination folder location. Set it to null to null to move the
* item to the root. Default null.
* @return void
*/
public function moveItemFrom(string $foreignId, array $fromFoldersIds, ?string $folderParentId = null): void
{
$fields = [
'folder_parent_id' => $folderParentId,
];
$conditions = [
'foreign_id' => $foreignId,
'folder_parent_id IN' => $fromFoldersIds,
];
$this->updateAll($fields, $conditions);
}
/**
* Move an item for users from wherever they are to a target location
* .
*
* @param string $foreignId The target item
* @param array $forUsersIds The list of users to move the item for
* @param string|null $folderParentId The destination folder
* @return void
*/
public function moveItemFor(string $foreignId, array $forUsersIds, ?string $folderParentId = null): void
{
if (empty($forUsersIds)) {
return;
}
$fields = [
'folder_parent_id' => $folderParentId,
];
$conditions = [
'foreign_id' => $foreignId,
'user_id IN' => $forUsersIds,
];
$this->updateAll($fields, $conditions);
}
}
```
|
Akpınar is a village in the Borçka District, Artvin Province, Turkey. Its population is 232 (2021).
References
Villages in Borçka District
|
```java
package com.ctrip.xpipe.redis.console.service;
import com.ctrip.xpipe.redis.console.model.EventModel;
import java.util.List;
/**
* @author chen.zhu
* <p>
* Apr 23, 2018
*/
public interface AlertMailEventService extends EventService {
List<EventModel> getLastHourAlertEvent();
}
```
|
Julodis cirrosa, common name Brush Jewel Beetle, is a species of beetles belonging to the Buprestidae family. This species occurs in Southern Africa.
Description
Julodis cirrosa reaches about in length. The coloration is metallic blue-green, the surface is punctured and covered by yellowish-orange wax-coated hairs.
List of Subspecies
Julodis cirrosa cirrosa (Schönherr, 1817)
Julodis cirrosa hirtiventris Laporte, 1835
References
Universal Biological Indexer
Biolib
S.M.V. Gussmann New species and subspecies of Julodis Eschscholtz (Coleoptera: Buprestidae) from southern Africa
External links
Roma scuola
Buprestidae
Beetles described in 1817
|
```python
# THIS FILE IS AUTO-GENERATED. DO NOT EDIT
from verta._swagger.base_type import BaseType
class ModeldbLineageEntry(BaseType):
def __init__(self, `type`=None, external_id=None):
required = {
"`type`": False,
"external_id": False,
}
self.`type` = `type`
self.external_id = external_id
for k, v in required.items():
if self[k] is None and v:
raise ValueError('attribute {} is required'.format(k))
@staticmethod
def from_json(d):
from .LineageEntryEnumLineageEntryType import LineageEntryEnumLineageEntryType
tmp = d.get('`type`', None)
if tmp is not None:
d['`type`'] = LineageEntryEnumLineageEntryType.from_json(tmp)
tmp = d.get('external_id', None)
if tmp is not None:
d['external_id'] = tmp
return ModeldbLineageEntry(**d)
```
|
Holdridge may refer to:
People:
Herbert C. Holdridge, American military general, father of John H. Holdridge and adoptive father of Cheryl Holdridge (1892–1974)
John H. Holdridge, American foreign service officer and diplomat (1924–2001)
Leslie Holdridge, American botanist and climatologist (1907–1999)
Cheryl Holdridge, American actress (1944–2009)
Alex Holdridge, American film writer/director (born 1975)
Places:
Holdridge Island, Nunavut, Canada
Other:
14835 Holdridge, asteroid
|
Stormworld is a drama television series which first aired on Canadian TV channel Space on March 18, 2009 with a repeat broadcast in August 2009, and aired in Australia on the Nine Network in 2009.
Plot
The show follows best friends Jason (Andrew Jenkins) and Lee (Calum Worthy), after they are transported through a vortex while on a boat trip that started in Vancouver, Canada, to the alien world "Stormworld". The boys receive help from Flees (Valentina Barron), a seasoned survivor of Stormworld who uses a boat to navigate the hostile environment. The boys, as new arrivals or "access crashers" as the local inhabitants call them, find shelter at The Settlement.
Stormworld is a destination for people and objects from many different worlds. Transport to Stormworld happens when a vortex is created between Stormworld and another world by large beetle-like insects. The surface of Stormworld is a saltwater ocean with many islands that have a generally hot climate. Fresh water is a scarce resource that is central to the survival of all inhabitants, making it a sought-after resource. Trade and barter of the objects brought through the vortices operate as the basis of the inhabitants' economy.
There are three principal groups on Stormworld, between which conflict between regularly occurs: The Settlement, The Arkoddians, and The Drogue.
The Settlement is a constitutionally-based society with democratic principles. The Settlement is located at the Sighing Peaks on an island. It was founded by Werrolda who wrote its constitution. When Jason and Lee arrive, Werrolda is the leader. High on a hill not far from the Settlement is a beacon that flashes a bright light at regular intervals to attract other "access crashers" after arriving in a Vortex.
The Arkoddians are a tightly hierarchical society run by the patriarchs. They live on an island a significant distance away from the Settlement. Each Arkoddian requires more water per day than a human. Some of the Arkoddians are armed with lazbolts. Unlike most of the other people on Stormworld, the Arkoddians appear to have arrived as a group on a large boat.
The Drogue are a small band of thugs that prey on the Settlement and collect objects from the vortices. They are equipped with flybikes that are armed with energy cannons.
The Abiders were a race of people who occupied Stormworld sometime ago. They placed the sighing peaks in order to create a map, and presumably were the creators of the portal that leads off the planet.
Notable Characters
Jason (Andrew Jenkins) – an athletic, high-energy, positive person who has strong leadership qualities. Lee is his best friend, also from Earth and arrived on Stormworld at the same time. Jason is the pilot of the boat named Cougar. In the final episode he decides to stay behind with Flees and Ogee and become leader of The Settlement.
Lee (Calum Worthy) – an intelligent, savvy and logical individual with a strong scientific mind. He arrived on Stormworld with his best friend, Jason. Lee wants very much to return to Earth, which he does in episode 26.
Flees (Valentina Barron) – arrived 7 years ago with her father, an engineer, after their small aircraft was drawn through a vortex from Earth. Flees' father disappeared four years after their arrival on Stormworld when he went out kayaking. Now 15 years old, she has continued an ongoing search to find her missing father for the past 3 years. In her searching, she has acquired an excellent knowledge of geography of Stormworld. Luce regards Flees as an older sister.
Ogee (voiced by Andrew Kavadas) – an extremely clever talking bowling ball.
Khelioz (Lim Kay Tong) – also known as The Navigator. On his home world of Maren, Khelioz was a fisherman. On Stormworld he is a trader with a small sailing vessel who is trying to collect information to learn where the portal is for people to return to their home planets. His methods are often viewed as being sneaky or self-serving.
Episodes
Stormworld consists of 26 episodes The series aired weekly on Wednesdays, over the course of 7 months in 2009 between the months of March and September.
International syndication
Production
The series was shot in Australia and Canada in 2008. Post-production sound and effects was done by Kojo Productions, who sued the production company for more than $265,000 in 2010 for unpaid invoices.
Critical response
When it premiered in Canada, it was called a "a hyper-flimsy sci-fi offering" with "cheesy low-budget special effects".
Andre Jenkins, Calum Worthy, and Valentina Barron were all nominated for the Best Performance in a TV Series at the 2010 Young Artist Awards with Worthy winning the award for leading young actor in a drama series.
References
External links
Stormworld at the Australian Television Information Archive
Blackmagic Design provides Stormworld graphics
Nine Network original programming
CTV Sci-Fi Channel original programming
2009 Australian television series debuts
2009 Australian television series endings
2009 Canadian television series debuts
2009 Canadian television series endings
2000s Canadian children's television series
2000s Canadian science fiction television series
Canadian children's science fiction television series
Australian children's television series
Australian science fiction television series
Television series about teenagers
Television shows filmed in Burnaby
|
The Vidyavardhini's College of Engineering and Technology(VCET), is an engineering college in Vasai-Virar. The college is affiliated to University of Mumbai and offers Bachelor's degree in Engineering.
The college has been graded 'A' by the DTE. The five branches Computer Engineering, Electronics & Telecommunication Engineering, Information Technology Engineering, Instrumentation Engineering, and Mechanical Engineering have been accredited by National Board of Accreditation(NBA) for the period of 3 years w.e.f. April 2012.college has introduced two new branches Artificial intelligence and data science & computer science and Engineering (Data Science)
Vidyavardhini’s College of Engineering and Technology, Vasai is located on the sprawling campus of Vidyavardhini, spread over an area of 12.27 acres. It is a short, two minutes walk from Vasai Road Railway Station.
Among the college's alumni are Mr. Rohit K Patil, entrepreneur and creator of online shopping portal Ekdontin.com, and Mr Victor Sinha, head of international sales and marketing at Indore Composites.
Courses
Under-Graduate Degrees Offered
Each department listed below offers courses in their respective disciplines towards a BE degree.
B. E. (Civil Engineering)
B. E. (Computer Engineering)
B. E. (Electronics And Telecommunication Engineering)
B. E. (Information Technology)
B. E. (Instrumentation Engineering)
B. E. (Mechanical Engineering)# Regular shift
B. E. (Mechanical Engineering)# Second shift
B. E. ( Artificial intelligence and data science)
B. E. (Computer science and Engineering-Data science)
Annual Extracurricular Events
Internship Fair: Provides Internship
E-Summit: Entrepreneurship Summit
Bizmaster: Business plan competition
SHABD: Annual Literary Festival
ZEAL: Cultural Festival
OCTAVES: Rock/Metal Music Festival
ORION: Technical festival
VISTA: Annual College Magazine
OPUS: National Level Project Competition
ANVESHAN: National Level Project Competition
Sports: Badminton, Volleyball, Box Cricket, Football, ThrowBall, Table Tennis, Carrom
SANRACHNA: Technical Festival
Student Bodies
The college has an Entrepreneurship Cell headed by Ecell Chairperson selected through a process of interviews.
The Students' Council headed by the General Secretary manages the Cultural activities and the Magazine Committee headed by the Magazine Secretary manages all the Literary Activities.
Moreover, each stream has its own student bodies -
Indian Society of Heating, Refrigerating and Air Conditioning Engineers (ISHRAE)
Civil Engineering Student Association (C.E.S.A.) (Nirmaan culture fest)
Computer Society Of India (CSI)
The Institute of Electrical and Electronics Engineers (IEEE)
Institution of Electronics and Telecommunication Engineering (IETE)
V.C.E.T's Mechanical Engineers Association (VMEA)
International Society of Automation (ISA)
Society of Automobile Engineers (SAE)
Under the SAE chapter of the college, a team of 22 students participated in SUPRA 2014, a prestigious F1 car design and manufacturing competition which is annually held by SAE INDIA.
2014 was Ethan Racing's very first entry to Supra.
See also
University of Mumbai
List of Mumbai Colleges
References
2) https://www.facebook.com/TeamEthanRacing
External links
Official site
Affiliates of the University of Mumbai
Engineering colleges in Mumbai
Education in Vasai-Virar
|
Sofia Cherone (born 21 May 1988) is a team handball player from Uruguay. She plays on the Uruguay women's national handball team, and participated at the 2011 World Women's Handball Championship in Brazil.
Individual awards
2019 South and Central American Women's Club Handball Championship: Top scorer
References
1988 births
Living people
Uruguayan female handball players
21st-century Uruguayan women
|
```c++
//
// legacy_conio_definitions.cpp
//
//
// Provides external definitions of the inline conio printf and scanf functions
// for use by objects compiled with older versions of the CRT headers.
//
// Our references to "unsafe" functions count as "use" so we need to suppress
// the deprecation warnings. Note that any clients using these symbols will
// also get these deprecation warnings.
#define _CRT_SECURE_NO_WARNINGS
#define _CRT_NON_CONFORMING_SWPRINTFS
#define _NO_CRT_STDIO_INLINE 1
#define _CRT_STDIO_ARBITRARY_WIDE_SPECIFIERS
#define _CRT_STDIO_LEGACY_WIDE_SPECIFIERS
#define _INC_SWPRINTF_INL_
#define _CRT_STDIO_INLINE extern
#include <conio.h>
#include <sdkddkver.h>
// Define the __imp_- or _imp__-prefixed IAT symbols, for compatibility with
// objects that were compiled with /MD or /MDd.
#include <vcstartup_internal.h>
#undef _NO_CRT_STDIO_INLINE
#undef _CRT_STDIO_INLINE
#define _CRT_STDIO_INLINE extern __inline
_CRT_BEGIN_C_HEADER
#if _CRT_NTDDI_MIN < NTDDI_WIN6
_Check_return_opt_
_CRT_STDIO_INLINE int __CRTDECL _cprintf_l(
_In_z_ _Printf_format_string_params_(0) char const* const _Format,
_In_opt_ _locale_t const _Locale,
...)
#if defined _NO_CRT_STDIO_INLINE
;
#else
{
int _Result;
va_list _ArgList;
__crt_va_start(_ArgList, _Locale);
_Result = _vcprintf_l(_Format, _Locale, _ArgList);
__crt_va_end(_ArgList);
return _Result;
}
#endif
_VCRT_DEFINE_IAT_SYMBOL(_cprintf_l);
_Check_return_opt_
_CRT_STDIO_INLINE int __CRTDECL _cprintf_p(
_In_z_ _Printf_format_string_ char const* const _Format,
...)
#if defined _NO_CRT_STDIO_INLINE
;
#else
{
int _Result;
va_list _ArgList;
__crt_va_start(_ArgList, _Format);
_Result = _vcprintf_p_l(_Format, NULL, _ArgList);
__crt_va_end(_ArgList);
return _Result;
}
#endif
_VCRT_DEFINE_IAT_SYMBOL(_cprintf_p);
_Check_return_opt_
_CRT_STDIO_INLINE int __CRTDECL _cprintf_p_l(
_In_z_ _Printf_format_string_params_(0) char const* const _Format,
_In_opt_ _locale_t const _Locale,
...)
#if defined _NO_CRT_STDIO_INLINE
;
#else
{
int _Result;
va_list _ArgList;
__crt_va_start(_ArgList, _Locale);
_Result = _vcprintf_p_l(_Format, _Locale, _ArgList);
__crt_va_end(_ArgList);
return _Result;
}
#endif
_VCRT_DEFINE_IAT_SYMBOL(_cprintf_p_l);
_Check_return_opt_
_CRT_STDIO_INLINE int __CRTDECL _cprintf_s(
_In_z_ _Printf_format_string_ char const* const _Format,
...)
#if defined _NO_CRT_STDIO_INLINE
;
#else
{
int _Result;
va_list _ArgList;
__crt_va_start(_ArgList, _Format);
_Result = _vcprintf_s_l(_Format, NULL, _ArgList);
__crt_va_end(_ArgList);
return _Result;
}
#endif
_VCRT_DEFINE_IAT_SYMBOL(_cprintf_s);
_Check_return_opt_
_CRT_STDIO_INLINE int __CRTDECL _cprintf_s_l(
_In_z_ _Printf_format_string_params_(0) char const* const _Format,
_In_opt_ _locale_t const _Locale,
...)
#if defined _NO_CRT_STDIO_INLINE
;
#else
{
int _Result;
va_list _ArgList;
__crt_va_start(_ArgList, _Locale);
_Result = _vcprintf_s_l(_Format, _Locale, _ArgList);
__crt_va_end(_ArgList);
return _Result;
}
#endif
_VCRT_DEFINE_IAT_SYMBOL(_cprintf_s_l);
_Check_return_opt_ _CRT_INSECURE_DEPRECATE(_cscanf_s_l)
_CRT_STDIO_INLINE int __CRTDECL _cscanf_l(
_In_z_ _Scanf_format_string_params_(0) char const* const _Format,
_In_opt_ _locale_t const _Locale,
...)
#if defined _NO_CRT_STDIO_INLINE
;
#else
{
int _Result;
va_list _ArgList;
__crt_va_start(_ArgList, _Locale);
#pragma warning(push)
#pragma warning(disable: 4996) // Deprecation
_Result = _vcscanf_l(_Format, _Locale, _ArgList);
#pragma warning(pop)
__crt_va_end(_ArgList);
return _Result;
}
#endif
_VCRT_DEFINE_IAT_SYMBOL(_cscanf_l);
_Check_return_opt_
_CRT_STDIO_INLINE int __CRTDECL _cscanf_s(
_In_z_ _Scanf_format_string_ char const* const _Format,
...)
#if defined _NO_CRT_STDIO_INLINE
;
#else
{
int _Result;
va_list _ArgList;
__crt_va_start(_ArgList, _Format);
_Result = _vcscanf_s_l(_Format, NULL, _ArgList);
__crt_va_end(_ArgList);
return _Result;
}
#endif
_VCRT_DEFINE_IAT_SYMBOL(_cscanf_s);
_Check_return_opt_
_CRT_STDIO_INLINE int __CRTDECL _cscanf_s_l(
_In_z_ _Scanf_format_string_params_(0) char const* const _Format,
_In_opt_ _locale_t const _Locale,
...)
#if defined _NO_CRT_STDIO_INLINE
;
#else
{
int _Result;
va_list _ArgList;
__crt_va_start(_ArgList, _Locale);
_Result = _vcscanf_s_l(_Format, _Locale, _ArgList);
__crt_va_end(_ArgList);
return _Result;
}
#endif
_VCRT_DEFINE_IAT_SYMBOL(_cscanf_s_l);
_Check_return_opt_
_CRT_STDIO_INLINE int __CRTDECL _cwprintf_l(
_In_z_ _Printf_format_string_params_(0) wchar_t const* const _Format,
_In_opt_ _locale_t const _Locale,
...)
#if defined _NO_CRT_STDIO_INLINE
;
#else
{
int _Result;
va_list _ArgList;
__crt_va_start(_ArgList, _Locale);
_Result = _vcwprintf_l(_Format, _Locale, _ArgList);
__crt_va_end(_ArgList);
return _Result;
}
#endif
_VCRT_DEFINE_IAT_SYMBOL(_cwprintf_l);
_Check_return_opt_
_CRT_STDIO_INLINE int __CRTDECL _cwprintf_p(
_In_z_ _Printf_format_string_ wchar_t const* const _Format,
...)
#if defined _NO_CRT_STDIO_INLINE
;
#else
{
int _Result;
va_list _ArgList;
__crt_va_start(_ArgList, _Format);
_Result = _vcwprintf_p_l(_Format, NULL, _ArgList);
__crt_va_end(_ArgList);
return _Result;
}
#endif
_VCRT_DEFINE_IAT_SYMBOL(_cwprintf_p);
_Check_return_opt_
_CRT_STDIO_INLINE int __CRTDECL _cwprintf_p_l(
_In_z_ _Printf_format_string_params_(0) wchar_t const* const _Format,
_In_opt_ _locale_t const _Locale,
...)
#if defined _NO_CRT_STDIO_INLINE
;
#else
{
int _Result;
va_list _ArgList;
__crt_va_start(_ArgList, _Locale);
_Result = _vcwprintf_p_l(_Format, _Locale, _ArgList);
__crt_va_end(_ArgList);
return _Result;
}
#endif
_VCRT_DEFINE_IAT_SYMBOL(_cwprintf_p_l);
_Check_return_opt_
_CRT_STDIO_INLINE int __CRTDECL _cwprintf_s(
_In_z_ _Printf_format_string_ wchar_t const* const _Format,
...)
#if defined _NO_CRT_STDIO_INLINE
;
#else
{
int _Result;
va_list _ArgList;
__crt_va_start(_ArgList, _Format);
_Result = _vcwprintf_s_l(_Format, NULL, _ArgList);
__crt_va_end(_ArgList);
return _Result;
}
#endif
_VCRT_DEFINE_IAT_SYMBOL(_cwprintf_s);
_Check_return_opt_
_CRT_STDIO_INLINE int __CRTDECL _cwprintf_s_l(
_In_z_ _Printf_format_string_params_(0) wchar_t const* const _Format,
_In_opt_ _locale_t const _Locale,
...)
#if defined _NO_CRT_STDIO_INLINE
;
#else
{
int _Result;
va_list _ArgList;
__crt_va_start(_ArgList, _Locale);
_Result = _vcwprintf_s_l(_Format, _Locale, _ArgList);
__crt_va_end(_ArgList);
return _Result;
}
#endif
_VCRT_DEFINE_IAT_SYMBOL(_cwprintf_s_l);
_Check_return_opt_ _CRT_INSECURE_DEPRECATE(_cwscanf_s_l)
_CRT_STDIO_INLINE int __CRTDECL _cwscanf_l(
_In_z_ _Scanf_format_string_params_(0) wchar_t const* const _Format,
_In_opt_ _locale_t const _Locale,
...)
#if defined _NO_CRT_STDIO_INLINE
;
#else
{
int _Result;
va_list _ArgList;
__crt_va_start(_ArgList, _Locale);
#pragma warning(push)
#pragma warning(disable: 4996) // Deprecation
_Result = _vcwscanf_l(_Format, _Locale, _ArgList);
#pragma warning(pop)
__crt_va_end(_ArgList);
return _Result;
}
#endif
_VCRT_DEFINE_IAT_SYMBOL(_cwscanf_l);
_Check_return_opt_
_CRT_STDIO_INLINE int __CRTDECL _cwscanf_s(
_In_z_ _Scanf_format_string_ wchar_t const* const _Format,
...)
#if defined _NO_CRT_STDIO_INLINE
;
#else
{
int _Result;
va_list _ArgList;
__crt_va_start(_ArgList, _Format);
_Result = _vcwscanf_s_l(_Format, NULL, _ArgList);
__crt_va_end(_ArgList);
return _Result;
}
#endif
_VCRT_DEFINE_IAT_SYMBOL(_cwscanf_s);
_Check_return_opt_
_CRT_STDIO_INLINE int __CRTDECL _cwscanf_s_l(
_In_z_ _Scanf_format_string_params_(0) wchar_t const* const _Format,
_In_opt_ _locale_t const _Locale,
...)
#if defined _NO_CRT_STDIO_INLINE
;
#else
{
int _Result;
va_list _ArgList;
__crt_va_start(_ArgList, _Locale);
_Result = _vcwscanf_s_l(_Format, _Locale, _ArgList);
__crt_va_end(_ArgList);
return _Result;
}
#endif
_VCRT_DEFINE_IAT_SYMBOL(_cwscanf_s_l);
_Check_return_opt_
_CRT_STDIO_INLINE int __CRTDECL _vcprintf_l(
_In_z_ _Printf_format_string_params_(2) char const* const _Format,
_In_opt_ _locale_t const _Locale,
va_list _ArgList
)
#if defined _NO_CRT_STDIO_INLINE
;
#else
{
return __conio_common_vcprintf(
_CRT_INTERNAL_LOCAL_PRINTF_OPTIONS,
_Format, _Locale, _ArgList);
}
#endif
_VCRT_DEFINE_IAT_SYMBOL(_vcprintf_l);
_Check_return_opt_
_CRT_STDIO_INLINE int __CRTDECL _vcprintf_p(
_In_z_ char const* const _Format,
va_list _ArgList
)
#if defined _NO_CRT_STDIO_INLINE
;
#else
{
return _vcprintf_p_l(_Format, NULL, _ArgList);
}
#endif
_VCRT_DEFINE_IAT_SYMBOL(_vcprintf_p);
_Check_return_opt_
_CRT_STDIO_INLINE int __CRTDECL _vcprintf_p_l(
_In_z_ _Printf_format_string_params_(2) char const* const _Format,
_In_opt_ _locale_t const _Locale,
va_list _ArgList
)
#if defined _NO_CRT_STDIO_INLINE
;
#else
{
return __conio_common_vcprintf_p(
_CRT_INTERNAL_LOCAL_PRINTF_OPTIONS,
_Format, _Locale, _ArgList);
}
#endif
_VCRT_DEFINE_IAT_SYMBOL(_vcprintf_p_l);
_Check_return_opt_
_CRT_STDIO_INLINE int __CRTDECL _vcprintf_s(
_In_z_ _Printf_format_string_ char const* const _Format,
va_list _ArgList
)
#if defined _NO_CRT_STDIO_INLINE
;
#else
{
return _vcprintf_s_l(_Format, NULL, _ArgList);
}
#endif
_VCRT_DEFINE_IAT_SYMBOL(_vcprintf_s);
_Check_return_opt_
_CRT_STDIO_INLINE int __CRTDECL _vcprintf_s_l(
_In_z_ _Printf_format_string_params_(2) char const* const _Format,
_In_opt_ _locale_t const _Locale,
va_list _ArgList
)
#if defined _NO_CRT_STDIO_INLINE
;
#else
{
return __conio_common_vcprintf_s(
_CRT_INTERNAL_LOCAL_PRINTF_OPTIONS,
_Format, _Locale, _ArgList);
}
#endif
_VCRT_DEFINE_IAT_SYMBOL(_vcprintf_s_l);
_Check_return_opt_
_CRT_STDIO_INLINE int __CRTDECL _vcwprintf_l(
_In_z_ _Printf_format_string_params_(2) wchar_t const* const _Format,
_In_opt_ _locale_t const _Locale,
va_list _ArgList
)
#if defined _NO_CRT_STDIO_INLINE
;
#else
{
return __conio_common_vcwprintf(_CRT_INTERNAL_LOCAL_PRINTF_OPTIONS, _Format, _Locale, _ArgList);
}
#endif
_VCRT_DEFINE_IAT_SYMBOL(_vcwprintf_l);
_Check_return_opt_
_CRT_STDIO_INLINE int __CRTDECL _vcwprintf_p(
_In_z_ _Printf_format_string_ const wchar_t* const _Format,
va_list _ArgList
)
#if defined _NO_CRT_STDIO_INLINE
;
#else
{
return _vcwprintf_p_l(_Format, NULL, _ArgList);
}
#endif
_VCRT_DEFINE_IAT_SYMBOL(_vcwprintf_p);
_Check_return_opt_
_CRT_STDIO_INLINE int __CRTDECL _vcwprintf_p_l(
_In_z_ _Printf_format_string_params_(2) wchar_t const* const _Format,
_In_opt_ _locale_t const _Locale,
va_list _ArgList
)
#if defined _NO_CRT_STDIO_INLINE
;
#else
{
return __conio_common_vcwprintf_p(_CRT_INTERNAL_LOCAL_PRINTF_OPTIONS, _Format, _Locale, _ArgList);
}
#endif
_VCRT_DEFINE_IAT_SYMBOL(_vcwprintf_p_l);
_Check_return_opt_
_CRT_STDIO_INLINE int __CRTDECL _vcwprintf_s(
_In_z_ _Printf_format_string_ wchar_t const* const _Format,
va_list _ArgList
)
#if defined _NO_CRT_STDIO_INLINE
;
#else
{
return _vcwprintf_s_l(_Format, NULL, _ArgList);
}
#endif
_VCRT_DEFINE_IAT_SYMBOL(_vcwprintf_s);
_Check_return_opt_
_CRT_STDIO_INLINE int __CRTDECL _vcwprintf_s_l(
_In_z_ _Printf_format_string_params_(2) wchar_t const* const _Format,
_In_opt_ _locale_t const _Locale,
va_list _ArgList
)
#if defined _NO_CRT_STDIO_INLINE
;
#else
{
return __conio_common_vcwprintf_s(_CRT_INTERNAL_LOCAL_PRINTF_OPTIONS, _Format, _Locale, _ArgList);
}
#endif
_VCRT_DEFINE_IAT_SYMBOL(_vcwprintf_s_l);
#endif
_Check_return_opt_ _CRT_INSECURE_DEPRECATE(_vcscanf_s)
_CRT_STDIO_INLINE int __CRTDECL _vcscanf(
_In_z_ _Scanf_format_string_params_(1) char const* const _Format,
va_list _ArgList
)
#if defined _NO_CRT_STDIO_INLINE
;
#else
{
#pragma warning(push)
#pragma warning(disable: 4996) // Deprecation
return _vcscanf_l(_Format, NULL, _ArgList);
#pragma warning(pop)
}
#endif
_VCRT_DEFINE_IAT_SYMBOL(_vcscanf);
_Check_return_opt_ _CRT_INSECURE_DEPRECATE(_vcscanf_s_l)
_CRT_STDIO_INLINE int __CRTDECL _vcscanf_l(
_In_z_ _Scanf_format_string_params_(2) char const* const _Format,
_In_opt_ _locale_t const _Locale,
va_list _ArgList
)
#if defined _NO_CRT_STDIO_INLINE
;
#else
{
return __conio_common_vcscanf(
_CRT_INTERNAL_LOCAL_SCANF_OPTIONS,
_Format, _Locale, _ArgList);
}
#endif
_VCRT_DEFINE_IAT_SYMBOL(_vcscanf_l);
_Check_return_opt_
_CRT_STDIO_INLINE int __CRTDECL _vcscanf_s(
_In_z_ _Scanf_format_string_params_(1) char const* const _Format,
va_list _ArgList
)
#if defined _NO_CRT_STDIO_INLINE
;
#else
{
return _vcscanf_s_l(_Format, NULL, _ArgList);
}
#endif
_VCRT_DEFINE_IAT_SYMBOL(_vcscanf_s);
_Check_return_opt_
_CRT_STDIO_INLINE int __CRTDECL _vcscanf_s_l(
_In_z_ _Scanf_format_string_params_(2) char const* const _Format,
_In_opt_ _locale_t const _Locale,
va_list _ArgList
)
#if defined _NO_CRT_STDIO_INLINE
;
#else
{
return __conio_common_vcscanf(
_CRT_INTERNAL_LOCAL_SCANF_OPTIONS | _CRT_INTERNAL_SCANF_SECURECRT,
_Format, _Locale, _ArgList);
}
#endif
_VCRT_DEFINE_IAT_SYMBOL(_vcscanf_s_l);
_Check_return_opt_ _CRT_INSECURE_DEPRECATE(_vcwscanf_s)
_CRT_STDIO_INLINE int __CRTDECL _vcwscanf(
_In_z_ _Scanf_format_string_params_(1) wchar_t const* const _Format,
va_list _ArgList
)
#if defined _NO_CRT_STDIO_INLINE
;
#else
{
#pragma warning(push)
#pragma warning(disable: 4996) // Deprecation
return _vcwscanf_l(_Format, NULL, _ArgList);
#pragma warning(pop)
}
#endif
_VCRT_DEFINE_IAT_SYMBOL(_vcwscanf);
_Check_return_opt_ _CRT_INSECURE_DEPRECATE(_vcwscanf_s_l)
_CRT_STDIO_INLINE int __CRTDECL _vcwscanf_l(
_In_z_ _Scanf_format_string_params_(2) wchar_t const* const _Format,
_In_opt_ _locale_t const _Locale,
va_list _ArgList
)
#if defined _NO_CRT_STDIO_INLINE
;
#else
{
return __conio_common_vcwscanf(
_CRT_INTERNAL_LOCAL_SCANF_OPTIONS,
_Format, _Locale, _ArgList);
}
#endif
_VCRT_DEFINE_IAT_SYMBOL(_vcwscanf_l);
_Check_return_opt_
_CRT_STDIO_INLINE int __CRTDECL _vcwscanf_s(
_In_z_ _Scanf_format_string_params_(1) wchar_t const* const _Format,
va_list _ArgList
)
#if defined _NO_CRT_STDIO_INLINE
;
#else
{
return _vcwscanf_s_l(_Format, NULL, _ArgList);
}
#endif
_VCRT_DEFINE_IAT_SYMBOL(_vcwscanf_s);
_Check_return_opt_
_CRT_STDIO_INLINE int __CRTDECL _vcwscanf_s_l(
_In_z_ _Scanf_format_string_params_(2) wchar_t const* const _Format,
_In_opt_ _locale_t const _Locale,
va_list _ArgList
)
#if defined _NO_CRT_STDIO_INLINE
;
#else
{
return __conio_common_vcwscanf(
_CRT_INTERNAL_LOCAL_SCANF_OPTIONS | _CRT_INTERNAL_SCANF_SECURECRT,
_Format, _Locale, _ArgList);
}
#endif
_VCRT_DEFINE_IAT_SYMBOL(_vcwscanf_s_l);
_CRT_END_C_HEADER
```
|
```java
/**
* This file is part of Skript.
*
* Skript is free software: you can redistribute it and/or modify
* (at your option) any later version.
*
* Skript is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
*
* along with Skript. If not, see <path_to_url
*
*/
/**
* Code related to loading, handling, and saving variables. Some code is found in <tt>lang</tt> as well (e.g. in {@link ch.njol.skript.lang.Variable}).
*
* @author Peter Gttinger
*/
@NonNullByDefault({DefaultLocation.PARAMETER, DefaultLocation.RETURN_TYPE, DefaultLocation.FIELD})
package ch.njol.skript.variables;
import org.eclipse.jdt.annotation.DefaultLocation;
import org.eclipse.jdt.annotation.NonNullByDefault;
```
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.