text
stringlengths 1
22.8M
|
|---|
```objective-c
/* $OpenBSD: utvfu.h,v 1.5 2021/11/24 22:03:05 mglocker Exp $ */
/*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions, and the following disclaimer,
* without modification.
* 2. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* Alternatively, this software may be distributed under the terms of the
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/*
* Fushicai USBTV007 Audio-Video Grabber Driver
*
* Product web site:
* path_to_url
*
* No physical hardware was harmed running Windows during the
* reverse-engineering activity
*/
#ifndef _UTVFU_H_
#define _UTVFU_H_
#include <sys/rwlock.h>
#include <sys/queue.h>
#include <sys/videoio.h>
/* Hardware. */
#define UTVFU_VIDEO_ENDP 0x81
#define UTVFU_AUDIO_ENDP 0x83
#define UTVFU_BASE 0xc000
#define UTVFU_REQUEST_REG 12
#define UTVFU_DFLT_IFACE_IDX 0
#define UTVFU_ALT_IFACE_IDX 1
/*
* Number of concurrent isochronous urbs submitted.
* Higher numbers was seen to overly saturate the USB bus.
*/
#define UTVFU_ISOC_TRANSFERS 3
#define UTVFU_CHUNK_SIZE 256
#define UTVFU_CHUNK 240
#define UTVFU_AUDIO_URBSIZE 20480
#define UTVFU_AUDIO_HDRSIZE 4
#define UTVFU_AUDIO_BUFFER 65536
#define UTVFU_COMPOSITE_INPUT 0
#define UTVFU_SVIDEO_INPUT 1
/* Chunk header. */
#define UTVFU_MAGIC(hdr) (hdr & 0xff000000U)
#define UTVFU_MAGIC_OK(hdr) ((hdr & 0xff000000U) == 0x88000000U)
#define UTVFU_FRAME_ID(hdr) ((hdr & 0x00ff0000U) >> 16)
#define UTVFU_ODD(hdr) ((hdr & 0x0000f000U) >> 15)
#define UTVFU_CHUNK_NO(hdr) (hdr & 0x00000fffU)
#define UTVFU_TV_STD (V4L2_STD_525_60 | V4L2_STD_PAL)
/* parameters for supported TV norms */
struct utvfu_norm_params {
v4l2_std_id norm;
int cap_width;
int cap_height;
int frame_len;
};
#define UTVFU_MAX_BUFFERS 32
struct utvfu_mmap {
SIMPLEQ_ENTRY(utvfu_mmap) q_frames;
uint8_t *buf;
struct v4l2_buffer v4l2_buf;
};
typedef SIMPLEQ_HEAD(, utvfu_mmap) q_mmap;
struct utvfu_frame_buf {
uint off;
uint size;
uint16_t chunks_done;
uint8_t fid;
uint8_t last_odd;
uint8_t *buf;
};
#define UTVFU_NFRAMES_MAX 40
struct utvfu_isoc_xfer {
struct utvfu_softc *sc;
struct usbd_xfer *xfer;
uint16_t size[UTVFU_NFRAMES_MAX];
};
struct utvfu_vs_iface {
struct usbd_pipe *pipeh;
uint32_t psize;
struct utvfu_isoc_xfer ixfer[UTVFU_ISOC_TRANSFERS];
};
struct utvfu_as_iface {
struct usbd_pipe *pipeh;
struct usbd_xfer *xfer;
};
struct utvfu_audio_chan {
uint8_t *start;
uint8_t *end;
uint8_t *cur;
int blksize;
void *intr_arg;
void (*intr)(void *);
struct utvfu_as_iface iface;
struct rwlock rwlock;
};
/* Per-device structure. */
struct utvfu_softc {
struct device sc_dev;
struct usbd_device *sc_udev;
struct usbd_interface *sc_uifaceh;
/* audio & video device */
struct device *sc_audiodev;
struct device *sc_videodev;
int sc_flags;
#define UTVFU_FLAG_MMAP 0x01
#define UTVFU_FLAG_AS_RUNNING 0x02
int sc_normi;
int sc_nchunks;
int sc_input;
int sc_max_frame_sz;
int sc_nframes;
struct utvfu_vs_iface sc_iface;
struct utvfu_frame_buf sc_fb;
struct utvfu_audio_chan sc_audio;
/* mmap */
struct utvfu_mmap sc_mmap[UTVFU_MAX_BUFFERS];
uint8_t *sc_mmap_buffer;
q_mmap sc_mmap_q;
int sc_mmap_bufsz;
int sc_mmap_count;
/* uplayer */
void *sc_uplayer_arg;
int *sc_uplayer_fsize;
uint8_t *sc_uplayer_fbuffer;
void (*sc_uplayer_intr)(void *);
};
int utvfu_max_frame_size(void);
int utvfu_set_regs(struct utvfu_softc *, const uint16_t regs[][2], int);
void utvfu_image_chunk(struct utvfu_softc *, u_char *);
int utvfu_configure_for_norm(struct utvfu_softc *, v4l2_std_id);
int utvfu_start_capture(struct utvfu_softc *);
int utvfu_mmap_queue(struct utvfu_softc *, uint8_t *, int);
void utvfu_read(struct utvfu_softc *, uint8_t *, int);
void utvfu_audio_decode(struct utvfu_softc *, int);
int utvfu_audio_start(struct utvfu_softc *);
int utvfu_audio_stop(struct utvfu_softc *);
int utvfu_audio_start_chip(struct utvfu_softc *);
int utvfu_audio_stop_chip(struct utvfu_softc *);
#endif
```
|
```java
/*
*
*
* path_to_url
*
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
*/
package org.dartlang.vm.service.consumer;
// This is a generated file.
import org.dartlang.vm.service.element.AllocationProfile;
@SuppressWarnings({"WeakerAccess", "unused"})
public interface AllocationProfileConsumer extends Consumer {
void received(AllocationProfile response);
}
```
|
The Song of the Sun () is a 1933 Italian-German comedy film directed by Max Neufeld and starring Vittorio De Sica.
Cast
Giacomo Lauri Volpi as himself
Vittorio De Sica as Paladino, l'avvocato
Lilliane Dietz as Frida Brandt
Eva Magni as Signora Bardelli
Livio Pavanelli as Il Giornalista
Umberto Melnati as Bardelli
Celeste Almieri as Il Segretario
References
External links
1933 films
1933 comedy films
German comedy films
Italian comedy films
Films of the Weimar Republic
1930s Italian-language films
German black-and-white films
Italian black-and-white films
Films directed by Max Neufeld
Italian multilingual films
German multilingual films
1933 multilingual films
1930s Italian films
1930s German films
|
```c++
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// For Open Source Computer Vision Library
//
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
#ifndef OPENCV_STITCHING_SEAM_FINDERS_HPP
#define OPENCV_STITCHING_SEAM_FINDERS_HPP
#include <set>
#include "opencv2/core.hpp"
#include "opencv2/opencv_modules.hpp"
namespace cv {
namespace detail {
//! @addtogroup stitching_seam
//! @{
/** @brief Base class for a seam estimator.
*/
class CV_EXPORTS_W SeamFinder
{
public:
CV_WRAP virtual ~SeamFinder() {}
enum { NO, VORONOI_SEAM, DP_SEAM };
/** @brief Estimates seams.
@param src Source images
@param corners Source image top-left corners
@param masks Source image masks to update
*/
CV_WRAP virtual void find(const std::vector<UMat> &src, const std::vector<Point> &corners,
CV_IN_OUT std::vector<UMat> &masks) = 0;
CV_WRAP static Ptr<SeamFinder> createDefault(int type);
};
/** @brief Stub seam estimator which does nothing.
*/
class CV_EXPORTS_W NoSeamFinder : public SeamFinder
{
public:
CV_WRAP void find(const std::vector<UMat>&, const std::vector<Point>&, CV_IN_OUT std::vector<UMat>&) CV_OVERRIDE {}
};
/** @brief Base class for all pairwise seam estimators.
*/
class CV_EXPORTS_W PairwiseSeamFinder : public SeamFinder
{
public:
CV_WRAP virtual void find(const std::vector<UMat> &src, const std::vector<Point> &corners,
CV_IN_OUT std::vector<UMat> &masks) CV_OVERRIDE;
protected:
void run();
/** @brief Resolves masks intersection of two specified images in the given ROI.
@param first First image index
@param second Second image index
@param roi Region of interest
*/
virtual void findInPair(size_t first, size_t second, Rect roi) = 0;
std::vector<UMat> images_;
std::vector<Size> sizes_;
std::vector<Point> corners_;
std::vector<UMat> masks_;
};
/** @brief Voronoi diagram-based seam estimator.
*/
class CV_EXPORTS_W VoronoiSeamFinder : public PairwiseSeamFinder
{
public:
CV_WRAP virtual void find(const std::vector<UMat> &src, const std::vector<Point> &corners,
CV_IN_OUT std::vector<UMat> &masks) CV_OVERRIDE;
virtual void find(const std::vector<Size> &size, const std::vector<Point> &corners,
std::vector<UMat> &masks);
private:
void findInPair(size_t first, size_t second, Rect roi) CV_OVERRIDE;
};
class CV_EXPORTS_W DpSeamFinder : public SeamFinder
{
public:
enum CostFunction { COLOR, COLOR_GRAD };
DpSeamFinder(CostFunction costFunc = COLOR);
CV_WRAP DpSeamFinder(String costFunc );
CostFunction costFunction() const { return costFunc_; }
void setCostFunction(CostFunction val) { costFunc_ = val; }
CV_WRAP void setCostFunction(String val);
virtual void find(const std::vector<UMat> &src, const std::vector<Point> &corners,
std::vector<UMat> &masks) CV_OVERRIDE;
private:
enum ComponentState
{
FIRST = 1, SECOND = 2, INTERS = 4,
INTERS_FIRST = INTERS | FIRST,
INTERS_SECOND = INTERS | SECOND
};
class ImagePairLess
{
public:
ImagePairLess(const std::vector<Mat> &images, const std::vector<Point> &corners)
: src_(&images[0]), corners_(&corners[0]) {}
bool operator() (const std::pair<size_t, size_t> &l, const std::pair<size_t, size_t> &r) const
{
Point c1 = corners_[l.first] + Point(src_[l.first].cols / 2, src_[l.first].rows / 2);
Point c2 = corners_[l.second] + Point(src_[l.second].cols / 2, src_[l.second].rows / 2);
int d1 = (c1 - c2).dot(c1 - c2);
c1 = corners_[r.first] + Point(src_[r.first].cols / 2, src_[r.first].rows / 2);
c2 = corners_[r.second] + Point(src_[r.second].cols / 2, src_[r.second].rows / 2);
int d2 = (c1 - c2).dot(c1 - c2);
return d1 < d2;
}
private:
const Mat *src_;
const Point *corners_;
};
class ClosePoints
{
public:
ClosePoints(int minDist) : minDist_(minDist) {}
bool operator() (const Point &p1, const Point &p2) const
{
int dist2 = (p1.x-p2.x) * (p1.x-p2.x) + (p1.y-p2.y) * (p1.y-p2.y);
return dist2 < minDist_ * minDist_;
}
private:
int minDist_;
};
void process(
const Mat &image1, const Mat &image2, Point tl1, Point tl2, Mat &mask1, Mat &mask2);
void findComponents();
void findEdges();
void resolveConflicts(
const Mat &image1, const Mat &image2, Point tl1, Point tl2, Mat &mask1, Mat &mask2);
void computeGradients(const Mat &image1, const Mat &image2);
bool hasOnlyOneNeighbor(int comp);
bool closeToContour(int y, int x, const Mat_<uchar> &contourMask);
bool getSeamTips(int comp1, int comp2, Point &p1, Point &p2);
void computeCosts(
const Mat &image1, const Mat &image2, Point tl1, Point tl2,
int comp, Mat_<float> &costV, Mat_<float> &costH);
bool estimateSeam(
const Mat &image1, const Mat &image2, Point tl1, Point tl2, int comp,
Point p1, Point p2, std::vector<Point> &seam, bool &isHorizontal);
void updateLabelsUsingSeam(
int comp1, int comp2, const std::vector<Point> &seam, bool isHorizontalSeam);
CostFunction costFunc_;
// processing images pair data
Point unionTl_, unionBr_;
Size unionSize_;
Mat_<uchar> mask1_, mask2_;
Mat_<uchar> contour1mask_, contour2mask_;
Mat_<float> gradx1_, grady1_;
Mat_<float> gradx2_, grady2_;
// components data
int ncomps_;
Mat_<int> labels_;
std::vector<ComponentState> states_;
std::vector<Point> tls_, brs_;
std::vector<std::vector<Point> > contours_;
std::set<std::pair<int, int> > edges_;
};
/** @brief Base class for all minimum graph-cut-based seam estimators.
*/
class CV_EXPORTS GraphCutSeamFinderBase
{
public:
enum CostType { COST_COLOR, COST_COLOR_GRAD };
};
/** @brief Minimum graph cut-based seam estimator. See details in @cite V03 .
*/
class CV_EXPORTS_W GraphCutSeamFinder : public GraphCutSeamFinderBase, public SeamFinder
{
public:
GraphCutSeamFinder(int cost_type = COST_COLOR_GRAD, float terminal_cost = 10000.f,
float bad_region_penalty = 1000.f);
CV_WRAP GraphCutSeamFinder(String cost_type,float terminal_cost = 10000.f,
float bad_region_penalty = 1000.f);
~GraphCutSeamFinder();
CV_WRAP void find(const std::vector<UMat> &src, const std::vector<Point> &corners,
std::vector<UMat> &masks) CV_OVERRIDE;
private:
// To avoid GCGraph dependency
class Impl;
Ptr<PairwiseSeamFinder> impl_;
};
#ifdef HAVE_OPENCV_CUDALEGACY
class CV_EXPORTS GraphCutSeamFinderGpu : public GraphCutSeamFinderBase, public PairwiseSeamFinder
{
public:
GraphCutSeamFinderGpu(int cost_type = COST_COLOR_GRAD, float terminal_cost = 10000.f,
float bad_region_penalty = 1000.f)
: cost_type_(cost_type), terminal_cost_(terminal_cost),
bad_region_penalty_(bad_region_penalty) {}
void find(const std::vector<cv::UMat> &src, const std::vector<cv::Point> &corners,
std::vector<cv::UMat> &masks) CV_OVERRIDE;
void findInPair(size_t first, size_t second, Rect roi) CV_OVERRIDE;
private:
void setGraphWeightsColor(const cv::Mat &img1, const cv::Mat &img2, const cv::Mat &mask1, const cv::Mat &mask2,
cv::Mat &terminals, cv::Mat &leftT, cv::Mat &rightT, cv::Mat &top, cv::Mat &bottom);
void setGraphWeightsColorGrad(const cv::Mat &img1, const cv::Mat &img2, const cv::Mat &dx1, const cv::Mat &dx2,
const cv::Mat &dy1, const cv::Mat &dy2, const cv::Mat &mask1, const cv::Mat &mask2,
cv::Mat &terminals, cv::Mat &leftT, cv::Mat &rightT, cv::Mat &top, cv::Mat &bottom);
std::vector<Mat> dx_, dy_;
int cost_type_;
float terminal_cost_;
float bad_region_penalty_;
};
#endif
//! @}
} // namespace detail
} // namespace cv
#endif // OPENCV_STITCHING_SEAM_FINDERS_HPP
```
|
Péter Pálos (born 31 August 1985) is a Hungarian para table tennis player. He is one of Hungary's top performing Paralympic table tennis players as a former World Number One in his sports class 11 on three occasions: April to September 2013, April to September 2014 and August to October 2015.
References
1985 births
Table tennis players from Budapest
Hungarian male table tennis players
Paralympic table tennis players for Hungary
Medalists at the 2004 Summer Paralympics
Medalists at the 2012 Summer Paralympics
Medalists at the 2016 Summer Paralympics
Table tennis players at the 2004 Summer Paralympics
Table tennis players at the 2012 Summer Paralympics
Table tennis players at the 2016 Summer Paralympics
Living people
Paralympic medalists in table tennis
Paralympic gold medalists for Hungary
Paralympic bronze medalists for Hungary
Table tennis players at the 2020 Summer Paralympics
21st-century Hungarian people
|
```go
//go:build !race
// +build !race
package gorilla
import (
"bytes"
"context"
"fmt"
"io"
"net/http"
"testing"
"time"
"github.com/luraproject/lura/v2/config"
"github.com/luraproject/lura/v2/logging"
"github.com/luraproject/lura/v2/proxy"
"github.com/luraproject/lura/v2/transport/http/server"
)
func TestDefaultFactory_ok(t *testing.T) {
buff := bytes.NewBuffer(make([]byte, 1024))
logger, err := logging.NewLogger("ERROR", buff, "pref")
if err != nil {
t.Error("building the logger:", err.Error())
return
}
ctx, cancel := context.WithCancel(context.Background())
defer func() {
cancel()
time.Sleep(5 * time.Millisecond)
}()
r := DefaultFactory(noopProxyFactory(map[string]interface{}{"supu": "tupu"}), logger).NewWithContext(ctx)
expectedBody := "{\"supu\":\"tupu\"}"
serviceCfg := config.ServiceConfig{
Port: 8082,
Endpoints: []*config.EndpointConfig{
{
Endpoint: "/get/{id}",
Method: "GET",
Timeout: 10,
Backend: []*config.Backend{
{},
},
},
{
Endpoint: "/post",
Method: "POST",
Timeout: 10,
Backend: []*config.Backend{
{},
},
},
{
Endpoint: "/put",
Method: "PUT",
Timeout: 10,
Backend: []*config.Backend{
{},
},
},
{
Endpoint: "/patch",
Method: "PATCH",
Timeout: 10,
Backend: []*config.Backend{
{},
},
},
{
Endpoint: "/delete",
Method: "DELETE",
Timeout: 10,
Backend: []*config.Backend{
{},
},
},
},
}
go func() { r.Run(serviceCfg) }()
time.Sleep(5 * time.Millisecond)
for _, endpoint := range serviceCfg.Endpoints {
req, _ := http.NewRequest(endpoint.Method, fmt.Sprintf("path_to_url", endpoint.Endpoint), http.NoBody)
req.Header.Set("Content-Type", "application/json")
resp, err := http.DefaultClient.Do(req)
if err != nil {
t.Error("Making the request:", err.Error())
return
}
defer resp.Body.Close()
body, ioerr := io.ReadAll(resp.Body)
if ioerr != nil {
t.Error("Reading the response:", ioerr.Error())
return
}
content := string(body)
if resp.Header.Get("Cache-Control") != "" {
t.Error(endpoint.Endpoint, "Cache-Control error:", resp.Header.Get("Cache-Control"))
}
if resp.Header.Get(server.CompleteResponseHeaderName) != server.HeaderCompleteResponseValue {
t.Error(server.CompleteResponseHeaderName, "error:", resp.Header.Get(server.CompleteResponseHeaderName))
}
if resp.Header.Get("Content-Type") != "application/json" {
t.Error(endpoint.Endpoint, "Content-Type error:", resp.Header.Get("Content-Type"))
}
if resp.Header.Get("X-Krakend") != "Version undefined" {
t.Error(endpoint.Endpoint, "X-Krakend error:", resp.Header.Get("X-Krakend"))
}
if resp.StatusCode != http.StatusOK {
t.Error(endpoint.Endpoint, "Unexpected status code:", resp.StatusCode)
}
if content != expectedBody {
t.Error(endpoint.Endpoint, "Unexpected body:", content, "expected:", expectedBody)
}
}
}
func TestDefaultFactory_ko(t *testing.T) {
buff := bytes.NewBuffer(make([]byte, 1024))
logger, err := logging.NewLogger("ERROR", buff, "pref")
if err != nil {
t.Error("building the logger:", err.Error())
return
}
ctx, cancel := context.WithCancel(context.Background())
defer func() {
cancel()
time.Sleep(5 * time.Millisecond)
}()
r := DefaultFactory(noopProxyFactory(map[string]interface{}{"supu": "tupu"}), logger).NewWithContext(ctx)
serviceCfg := config.ServiceConfig{
Debug: true,
Port: 8083,
Endpoints: []*config.EndpointConfig{
{
Endpoint: "/ignored",
Method: "GETTT",
Backend: []*config.Backend{
{},
},
},
{
Endpoint: "/empty",
Method: "GETTT",
Backend: []*config.Backend{},
},
{
Endpoint: "/also-ignored",
Method: "PUT",
Backend: []*config.Backend{
{},
{},
},
},
},
}
go func() { r.Run(serviceCfg) }()
time.Sleep(5 * time.Millisecond)
for _, subject := range [][]string{
{"GET", "ignored"},
{"GET", "empty"},
{"PUT", "also-ignored"},
} {
req, _ := http.NewRequest(subject[0], fmt.Sprintf("path_to_url", subject[1]), http.NoBody)
req.Header.Set("Content-Type", "application/json")
checkResponseIs404(t, req)
}
}
func TestDefaultFactory_proxyFactoryCrash(t *testing.T) {
buff := bytes.NewBuffer(make([]byte, 1024))
logger, err := logging.NewLogger("ERROR", buff, "pref")
if err != nil {
t.Error("building the logger:", err.Error())
return
}
ctx, cancel := context.WithCancel(context.Background())
defer func() {
cancel()
time.Sleep(5 * time.Millisecond)
}()
r := DefaultFactory(erroredProxyFactory{fmt.Errorf("%s", "crash!!!")}, logger).NewWithContext(ctx)
serviceCfg := config.ServiceConfig{
Debug: true,
Echo: true,
Port: 8084,
Endpoints: []*config.EndpointConfig{
{
Endpoint: "/ignored",
Method: "GET",
Timeout: 10,
Backend: []*config.Backend{
{},
},
},
},
}
go func() { r.Run(serviceCfg) }()
time.Sleep(5 * time.Millisecond)
for _, subject := range [][]string{{"GET", "ignored"}, {"PUT", "also-ignored"}} {
req, _ := http.NewRequest(subject[0], fmt.Sprintf("path_to_url", subject[1]), http.NoBody)
req.Header.Set("Content-Type", "application/json")
checkResponseIs404(t, req)
}
}
func checkResponseIs404(t *testing.T, req *http.Request) {
expectedBody := "404 page not found\n"
resp, err := http.DefaultClient.Do(req)
if err != nil {
t.Error("Making the request:", err.Error())
return
}
defer resp.Body.Close()
body, ioerr := io.ReadAll(resp.Body)
if ioerr != nil {
t.Error("Reading the response:", ioerr.Error())
return
}
content := string(body)
if resp.Header.Get("Cache-Control") != "" {
t.Error("Cache-Control error:", resp.Header.Get("Cache-Control"))
}
if resp.Header.Get(server.CompleteResponseHeaderName) != server.HeaderIncompleteResponseValue {
t.Error(req.URL.String(), server.CompleteResponseHeaderName, "error:", resp.Header.Get(server.CompleteResponseHeaderName))
}
if resp.Header.Get("Content-Type") != "text/plain; charset=utf-8" {
t.Error("Content-Type error:", resp.Header.Get("Content-Type"))
}
if resp.Header.Get("X-Krakend") != "" {
t.Error("X-Krakend error:", resp.Header.Get("X-Krakend"))
}
if resp.StatusCode != http.StatusNotFound {
t.Error("Unexpected status code:", resp.StatusCode)
}
if content != expectedBody {
t.Error("Unexpected body:", content, "expected:", expectedBody)
}
}
type noopProxyFactory map[string]interface{}
func (n noopProxyFactory) New(_ *config.EndpointConfig) (proxy.Proxy, error) {
return func(_ context.Context, _ *proxy.Request) (*proxy.Response, error) {
return &proxy.Response{
IsComplete: true,
Data: n,
}, nil
}, nil
}
type erroredProxyFactory struct {
Error error
}
func (e erroredProxyFactory) New(_ *config.EndpointConfig) (proxy.Proxy, error) {
return proxy.NoopProxy, e.Error
}
type identityMiddleware struct{}
func (identityMiddleware) Handler(h http.Handler) http.Handler {
return h
}
```
|
```yaml
name: Corda Node Explorer
description:
Interact with Corda Blockchain Nodes - explore network, vault, and
transactions.
website: path_to_url
category: Developer Tools
repository: path_to_url
keywords:
- java
- kotlin
- dlt
- blockchain
- rpc
- corda
- cordapp
- smart-contract
- enterprise
- r3
- network
license: Apache
screenshots:
- imageUrl: 'path_to_url
- imageUrl: 'path_to_url
- imageUrl: 'path_to_url
```
|
J. Ernest Browning was a cattle rancher in Arizona during the 1900s. He helped to organize the American Quarter Horse Association in 1940. In 1982 he was awarded the National Livestock Association's "Golden Spur Award" for his contributions to the nation's livestock and ranching industries. Also in 1942, he was inducted into the American Quarter Horse Hall of Fame. He was also co-founder of the National Cowboy Hall of Fame.
Browning was born in 1899 in Elk Canyon, New Mexico, and moved to Willcox, Arizona with his family in 1913. During the 1940s and 1950s he acquired several ranches, the High Lonesome, the Schilling, and the Muleshoe, creating a single huge ranch.
In 1967, Browning was inducted into the Arizona Horsemen's Hall of Fame, and in 1991, along with Barry Goldwater, he was inducted into the National Cowboy Hall of Fame.
Browning died on November 19, 1984.
References
Ranchers from Arizona
|
```smalltalk
using System;
using System.Collections.Generic;
using System.Linq;
using NewLife;
/*
*
* list = FindAll(ID > lastid, ID.Asc() & ID.Asc(), null, 0, 1000)
*/
namespace XCode.Transform
{
/// <summary></summary>
public class IdentityExtracter : ExtracterBase, IExtracter
{
#region
#endregion
#region
/// <summary></summary>
public override void Init()
{
var fi = Field;
//
if (fi == null && FieldName.IsNullOrEmpty()) fi = Field = Factory.Table.Identity;
base.Init();
fi = Field;
if (fi == null) throw new ArgumentNullException(nameof(FieldName), "");
OrderBy = fi.Asc();
}
#endregion
#region
/// <summary></summary>
/// <param name="set"></param>
/// <returns></returns>
public virtual IList<IEntity> Fetch(IExtractSetting set)
{
if (Field == null) throw new ArgumentNullException(nameof(FieldName), "");
if (set == null) throw new ArgumentNullException(nameof(set), "");
var start = set.Row;
var size = set.BatchSize;
if (size <= 0) size = 1000;
//
var list = FetchData(start, size);
//
if (list.Count > 0)
{
var last = (Int32)list.Last()[FieldName];
set.Row = last;
}
return list;
}
/// <summary></summary>
/// <param name="start"></param>
/// <param name="maxRows"></param>
/// <returns></returns>
protected virtual IList<IEntity> FetchData(Int32 start, Int32 maxRows)
{
var fi = Field;
var exp = fi >= start;
if (!Where.IsNullOrEmpty()) exp &= Where;
return Factory.FindAll(exp, OrderBy, Selects, 0, maxRows);
}
#endregion
}
}
```
|
```c++
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// * Neither the name of NVIDIA CORPORATION nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
#include "PxcScratchAllocator.h"
#include "ScConstraintProjectionManager.h"
#include "ScBodySim.h"
#include "ScConstraintSim.h"
#include "ScConstraintInteraction.h"
using namespace physx;
namespace physx
{
namespace Sc
{
template<typename T, const PxU32 elementsPerBlock>
class ScratchAllocatorList
{
private:
struct ElementBlock
{
PX_FORCE_INLINE ElementBlock() {}
PX_FORCE_INLINE void init(PxU32 countAtStart) { next = NULL; count = countAtStart; }
ElementBlock* next;
PxU32 count;
T elements[elementsPerBlock];
};
PX_FORCE_INLINE const ScratchAllocatorList& operator=(const ScratchAllocatorList&) {}
public:
class Iterator
{
friend class ScratchAllocatorList;
public:
T const* getNext()
{
if (mCurrentBlock)
{
if (mIndex < mCurrentBlock->count)
{
return &mCurrentBlock->elements[mIndex++];
}
else
{
if (mCurrentBlock->next)
{
PX_ASSERT(mCurrentBlock->count == elementsPerBlock);
mCurrentBlock = mCurrentBlock->next;
PX_ASSERT(mCurrentBlock->count > 0);
mIndex = 1;
return &mCurrentBlock->elements[0];
}
else
return NULL;
}
}
else
return NULL;
}
private:
Iterator(const ElementBlock* startBlock) : mCurrentBlock(startBlock), mIndex(0) {}
private:
const ElementBlock* mCurrentBlock;
PxU32 mIndex;
};
PX_FORCE_INLINE ScratchAllocatorList(PxcScratchAllocator& scratchAllocator) : mScratchAllocator(scratchAllocator)
{
mFirstBlock = reinterpret_cast<ElementBlock*>(scratchAllocator.alloc(sizeof(ElementBlock), true));
if (mFirstBlock)
mFirstBlock->init(0);
mCurrentBlock = mFirstBlock;
}
PX_FORCE_INLINE ~ScratchAllocatorList()
{
freeMemory();
}
PX_FORCE_INLINE bool add(const T& element)
{
if (mCurrentBlock)
{
if (mCurrentBlock->count < elementsPerBlock)
{
mCurrentBlock->elements[mCurrentBlock->count] = element;
mCurrentBlock->count++;
return true;
}
else
{
PX_ASSERT(mCurrentBlock->next == NULL);
PX_ASSERT(mCurrentBlock->count == elementsPerBlock);
ElementBlock* newBlock = reinterpret_cast<ElementBlock*>(mScratchAllocator.alloc(sizeof(ElementBlock), true));
if (newBlock)
{
newBlock->init(1);
newBlock->elements[0] = element;
mCurrentBlock->next = newBlock;
mCurrentBlock = newBlock;
return true;
}
else
return false;
}
}
else
return false;
}
PX_FORCE_INLINE Iterator getIterator() const
{
return Iterator(mFirstBlock);
}
PX_FORCE_INLINE void freeMemory()
{
ElementBlock* block = mFirstBlock;
while(block)
{
ElementBlock* blockToFree = block;
block = block->next;
mScratchAllocator.free(blockToFree);
}
}
private:
PxcScratchAllocator& mScratchAllocator;
ElementBlock* mFirstBlock;
ElementBlock* mCurrentBlock;
};
}
}
Sc::ConstraintProjectionManager::ConstraintProjectionManager() :
mNodePool(PX_DEBUG_EXP("projectionNodePool"))
{
}
void Sc::ConstraintProjectionManager::addToPendingGroupUpdates(Sc::ConstraintSim& s)
{
PX_ASSERT(!s.readFlag(ConstraintSim::ePENDING_GROUP_UPDATE));
bool isNew = mPendingGroupUpdates.insert(&s);
PX_UNUSED(isNew);
PX_ASSERT(isNew);
s.setFlag(ConstraintSim::ePENDING_GROUP_UPDATE);
}
void Sc::ConstraintProjectionManager::removeFromPendingGroupUpdates(Sc::ConstraintSim& s)
{
PX_ASSERT(s.readFlag(ConstraintSim::ePENDING_GROUP_UPDATE));
bool didExist = mPendingGroupUpdates.erase(&s);
PX_UNUSED(didExist);
PX_ASSERT(didExist);
s.clearFlag(ConstraintSim::ePENDING_GROUP_UPDATE);
}
void Sc::ConstraintProjectionManager::addToPendingTreeUpdates(ConstraintGroupNode& n)
{
PX_ASSERT(&n == &n.getRoot());
PX_ASSERT(!n.readFlag(ConstraintGroupNode::ePENDING_TREE_UPDATE));
bool isNew = mPendingTreeUpdates.insert(&n);
PX_UNUSED(isNew);
PX_ASSERT(isNew);
n.raiseFlag(ConstraintGroupNode::ePENDING_TREE_UPDATE);
}
void Sc::ConstraintProjectionManager::removeFromPendingTreeUpdates(ConstraintGroupNode& n)
{
PX_ASSERT(&n == &n.getRoot());
PX_ASSERT(n.readFlag(ConstraintGroupNode::ePENDING_TREE_UPDATE));
bool didExist = mPendingTreeUpdates.erase(&n);
PX_UNUSED(didExist);
PX_ASSERT(didExist);
n.clearFlag(ConstraintGroupNode::ePENDING_TREE_UPDATE);
}
PX_INLINE Sc::ConstraintGroupNode* Sc::ConstraintProjectionManager::createGroupNode(BodySim& b)
{
ConstraintGroupNode* n = mNodePool.construct(b);
b.setConstraintGroup(n);
return n;
}
//
// Implementation of UNION of
// UNION-FIND algo.
// It also updates the group traversal
// linked list.
//
void Sc::ConstraintProjectionManager::groupUnion(ConstraintGroupNode& root0, ConstraintGroupNode& root1)
{
// Should only get called for the roots
PX_ASSERT(&root0 == root0.parent);
PX_ASSERT(&root1 == root1.parent);
if (&root0 != &root1) //different groups? If not, its already merged.
{
//UNION(this, other); //union-find algo unites groups.
ConstraintGroupNode* newRoot;
ConstraintGroupNode* otherRoot;
if (root0.rank > root1.rank)
{
//hisGroup appended to mygroup.
newRoot = &root0;
otherRoot = &root1;
}
else
{
//myGroup appended to hisGroup.
newRoot = &root1;
otherRoot = &root0;
//there is a chance that the two ranks were equal, in which case the tree depth just increased.
root1.rank++;
}
PX_ASSERT(newRoot->parent == newRoot);
otherRoot->parent = newRoot;
//update traversal linked list:
newRoot->tail->next = otherRoot;
newRoot->tail = otherRoot->tail;
}
}
//
// Add a body to a constraint projection group.
//
void Sc::ConstraintProjectionManager::addToGroup(BodySim& b, BodySim* other, ConstraintSim& c)
{
// If both bodies of the constraint are defined, we want to fetch the reference to the group root
// from body 0 by default (allows to avoid checking both)
PX_ASSERT(&b == c.getBody(0) || (c.getBody(0) == NULL && &b == c.getBody(1)));
PX_UNUSED(c);
ConstraintGroupNode* myRoot;
if (!b.getConstraintGroup())
myRoot = createGroupNode(b);
else
{
myRoot = &b.getConstraintGroup()->getRoot();
if (myRoot->hasProjectionTreeRoot())
myRoot->purgeProjectionTrees(); // If a new constraint gets added to a constraint group, projection trees need to be recreated
}
if (other)
{
ConstraintGroupNode* otherRoot;
if (!other->getConstraintGroup())
otherRoot = createGroupNode(*other);
else
{
otherRoot = &other->getConstraintGroup()->getRoot();
if (otherRoot->hasProjectionTreeRoot())
otherRoot->purgeProjectionTrees(); // If a new constraint gets added to a constraint group, projection trees need to be recreated
}
//merge the two groups, if disjoint.
groupUnion(*myRoot, *otherRoot);
}
}
//
// Add all projection constraints connected to the specified body to the pending update list but
// ignore the specified constraint.
//
void Sc::ConstraintProjectionManager::markConnectedConstraintsForUpdate(BodySim& b, ConstraintSim* c)
{
PxU32 size = b.getActorInteractionCount();
Interaction** interactions = b.getActorInteractions();
while(size--)
{
Interaction* interaction = *interactions++;
if (interaction->getType() == InteractionType::eCONSTRAINTSHADER)
{
ConstraintSim* ct = static_cast<ConstraintInteraction*>(interaction)->getConstraint();
if ((ct != c) && ct->needsProjection() && (!ct->readFlag(ConstraintSim::ePENDING_GROUP_UPDATE)))
{
//mark constraint for pending update:
addToPendingGroupUpdates(*ct);
}
}
}
}
//
// Add all constraints connected to the specified body to an array but
// ignore the specified constraint.
//
PX_FORCE_INLINE static void dumpConnectedConstraints(Sc::BodySim& b, Sc::ConstraintSim* c, Sc::ScratchAllocatorList<Sc::ConstraintSim*>& constraintList)
{
PxU32 size = b.getActorInteractionCount();
Sc::Interaction** interactions = b.getActorInteractions();
while(size--)
{
Sc::Interaction* interaction = *interactions++;
if (interaction->getType() == Sc::InteractionType::eCONSTRAINTSHADER)
{
Sc::ConstraintSim* ct = static_cast<Sc::ConstraintInteraction*>(interaction)->getConstraint();
if ((ct != c) && (!ct->readFlag(Sc::ConstraintSim::ePENDING_GROUP_UPDATE)))
{
bool success = constraintList.add(ct);
PX_UNUSED(success);
PX_ASSERT(success);
}
}
}
}
PX_FORCE_INLINE void Sc::ConstraintProjectionManager::processConstraintForGroupBuilding(ConstraintSim* c, ScratchAllocatorList<ConstraintSim*>& constraintList)
{
c->clearFlag(ConstraintSim::ePENDING_GROUP_UPDATE);
// Find all constraints connected to the two bodies of the dirty constraint.
// - Constraints to static anchors are ignored (note: kinematics can't be ignored because they might get switched to dynamics any time which
// does trigger a projection tree rebuild but not a constraint tree rebuild
// - Already processed bodies are ignored as well
BodySim* b0 = c->getBody(0);
if (b0 && !b0->getConstraintGroup())
{
dumpConnectedConstraints(*b0, c, constraintList);
}
BodySim* b1 = c->getBody(1);
if (b1 && !b1->getConstraintGroup())
{
dumpConnectedConstraints(*b1, c, constraintList);
}
BodySim* b = c->getAnyBody();
PX_ASSERT(b);
addToGroup(*b, c->getOtherBody(b), *c); //this will eventually merge some body's constraint groups.
}
void Sc::ConstraintProjectionManager::processPendingUpdates(PxcScratchAllocator& scratchAllocator)
{
//
// if there are dirty projection trees, then rebuild them
//
const PxU32 nbProjectionTreesToUpdate = mPendingTreeUpdates.size();
if (nbProjectionTreesToUpdate)
{
ConstraintGroupNode* const* projectionTreesToUpdate = mPendingTreeUpdates.getEntries();
for(PxU32 i=0; i < nbProjectionTreesToUpdate; i++)
{
ConstraintGroupNode* n = projectionTreesToUpdate[i];
PX_ASSERT(n == &n->getRoot()); // only root nodes should be in that list
PX_ASSERT(n->readFlag(ConstraintGroupNode::ePENDING_TREE_UPDATE));
n->clearFlag(ConstraintGroupNode::ePENDING_TREE_UPDATE);
// note: it is valid to get here and not have a projection root. This is the case if all nodes of a constraint graph are kinematic
// at some point (hence no projection root) and later some of those get switched to dynamic.
if (n->hasProjectionTreeRoot())
n->purgeProjectionTrees();
n->buildProjectionTrees();
}
mPendingTreeUpdates.clear();
}
//
// if there are new/dirty constraints, update groups
//
const PxU32 nbProjectionConstraintsToUpdate = mPendingGroupUpdates.size();
if (nbProjectionConstraintsToUpdate)
{
ScratchAllocatorList<ConstraintSim*> nonProjectionConstraintList(scratchAllocator);
ConstraintSim* const* projectionConstraintsToUpdate = mPendingGroupUpdates.getEntries();
#if PX_DEBUG
// At the beginning the list should only contain constraints with projection.
// Further below other constraints, connected to the constraints with projection, will be added too.
for(PxU32 i=0; i < nbProjectionConstraintsToUpdate; i++)
{
PX_ASSERT(projectionConstraintsToUpdate[i]->needsProjection());
}
#endif
for(PxU32 i=0; i < nbProjectionConstraintsToUpdate; i++)
{
processConstraintForGroupBuilding(projectionConstraintsToUpdate[i], nonProjectionConstraintList);
}
ScratchAllocatorList<ConstraintSim*>::Iterator iter = nonProjectionConstraintList.getIterator();
ConstraintSim* const* nextConstraint = iter.getNext();
while(nextConstraint)
{
processConstraintForGroupBuilding(*nextConstraint, nonProjectionConstraintList);
nextConstraint = iter.getNext();
}
// Now find all the newly made groups and build projection trees.
// Don't need to iterate over the additionally constraints since the roots are supposed to be
// fetchable from any node.
for (PxU32 i=0; i < nbProjectionConstraintsToUpdate; i++)
{
ConstraintSim* c = projectionConstraintsToUpdate[i];
BodySim* b = c->getAnyBody();
PX_ASSERT(b);
PX_ASSERT(b->getConstraintGroup());
ConstraintGroupNode& root = b->getConstraintGroup()->getRoot();
if (!root.hasProjectionTreeRoot()) // Build projection tree only once
root.buildProjectionTrees();
}
mPendingGroupUpdates.clear();
}
}
//
// Called if a body or a constraint gets deleted. All projecting constraints of the
// group (except the deleted one) are moved to the dirty list and all group nodes are destroyed.
//
void Sc::ConstraintProjectionManager::invalidateGroup(ConstraintGroupNode& node, ConstraintSim* deletedConstraint)
{
ConstraintGroupNode* n = &node.getRoot();
if (n->readFlag(ConstraintGroupNode::ePENDING_TREE_UPDATE))
{
removeFromPendingTreeUpdates(*n);
}
while (n) //go through nodes in constraint group
{
markConnectedConstraintsForUpdate(*n->body, deletedConstraint);
//destroy the body's constraint group information
ConstraintGroupNode* next = n->next; //save next node ptr before we destroy it!
BodySim* b = n->body;
b->setConstraintGroup(NULL);
if (n->hasProjectionTreeRoot())
n->purgeProjectionTrees();
mNodePool.destroy(n);
n = next;
}
}
```
|
```xml
<project xmlns="path_to_url" xmlns:xsi="path_to_url" xsi:schemaLocation="path_to_url path_to_url">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.spotify</groupId>
<artifactId>dockerfile-maven</artifactId>
<version>1.4.14-SNAPSHOT</version>
</parent>
<artifactId>dockerfile-maven-plugin</artifactId>
<packaging>maven-plugin</packaging>
<name>Dockerfile Maven Plugin</name>
<description>Adds support for building Dockerfiles in Maven</description>
<properties>
<docker-client.version>8.16.0</docker-client.version>
</properties>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>com.google.code.findbugs</groupId>
<artifactId>jsr305</artifactId>
<version>2.0.1</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-compress</artifactId>
<version>1.19</version>
</dependency>
<dependency>
<groupId>org.codehaus.plexus</groupId>
<artifactId>plexus-interpolation</artifactId>
<version>1.24</version>
</dependency>
</dependencies>
</dependencyManagement>
<dependencies>
<dependency>
<groupId>com.spotify</groupId>
<artifactId>docker-client</artifactId>
<classifier>shaded</classifier>
<version>${docker-client.version}</version>
</dependency>
<dependency>
<groupId>com.google.auth</groupId>
<artifactId>google-auth-library-oauth2-http</artifactId>
<version>0.6.0</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>23.6.1-jre</version>
</dependency>
<dependency>
<groupId>com.spotify</groupId>
<artifactId>dockerfile-maven-extension</artifactId>
<version>1.4.14-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.apache.maven</groupId>
<artifactId>maven-plugin-api</artifactId>
<version>3.5.4</version>
</dependency>
<dependency>
<groupId>org.apache.maven</groupId>
<artifactId>maven-core</artifactId>
<version>3.5.4</version>
</dependency>
<dependency>
<groupId>org.apache.maven</groupId>
<artifactId>maven-archiver</artifactId>
<version>3.2.0</version>
</dependency>
<dependency>
<groupId>org.sonatype.plexus</groupId>
<artifactId>plexus-sec-dispatcher</artifactId>
<version>1.4</version>
</dependency>
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>2.8.0</version>
</dependency>
<dependency>
<groupId>org.apache.maven.plugin-tools</groupId>
<artifactId>maven-plugin-annotations</artifactId>
<version>3.5.2</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.12</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<artifactId>maven-checkstyle-plugin</artifactId>
</plugin>
<plugin>
<artifactId>maven-enforcer-plugin</artifactId>
</plugin>
<plugin>
<artifactId>maven-failsafe-plugin</artifactId>
</plugin>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.1</version>
<configuration>
<source>1.8</source>
<target>1.8</target>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-plugin-plugin</artifactId>
<version>3.5.2</version>
<configuration>
<goalPrefix>dockerfile</goalPrefix>
<skipErrorNoDescriptorsFound>true</skipErrorNoDescriptorsFound>
</configuration>
<executions>
<execution>
<id>mojo-descriptor</id>
<goals>
<goal>descriptor</goal>
</goals>
</execution>
<execution>
<id>help-goal</id>
<goals>
<goal>helpmojo</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-invoker-plugin</artifactId>
<version>1.9</version>
<dependencies>
<dependency>
<groupId>com.spotify</groupId>
<artifactId>docker-client</artifactId>
<version>${docker-client.version}</version>
</dependency>
</dependencies>
<configuration>
<cloneProjectsTo>${project.build.directory}/it</cloneProjectsTo>
<pomIncludes>
<pomInclude>*/pom.xml</pomInclude>
</pomIncludes>
<postBuildHookScript>verify</postBuildHookScript>
<localRepositoryPath>${project.build.directory}/local-repo</localRepositoryPath>
<settingsFile>src/it/settings.xml</settingsFile>
<streamLogs>true</streamLogs>
<goals>
<goal>clean</goal>
<goal>verify</goal>
</goals>
</configuration>
<executions>
<execution>
<id>integration-test</id>
<goals>
<goal>install</goal>
<goal>integration-test</goal>
<goal>verify</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>
```
|
```stylus
@import "../copyright.styl"
/*! standalone - .bttn-material-circle */
@import "../base.styl"
@import "../bttns/material-circle.styl"
```
|
Schaller GmbH is a German manufacturer of musical instrument hardware based in Postbauer-Heng near Nuremberg, Bavaria. It designs, produces and sells guitar tuners, bridges, tremolos, strap locks, and other accessories primarily for guitars.
Schaller was founded in Feucht near Nuremberg in 1945 by Helmut Schaller as a radio repair shop. Since then, Schaller has developed into one of the most highly regarded suppliers for the music instrument industry.
Dr. Lars Bünning has been the company's owner since 2009.
History
The Schaller company was founded in 1945 by Helmut Schaller (1923–1999). Helmut Schaller was a toolmaker and a radio master mechanic. His radio repair shop prospered in the 1950s. During that time, another department was created, dedicated to the development of amplifiers and speakers, which turned out to be so successful that the entire company was restructured and renamed "Schaller Electronic". Echos and reverb devices were soon added to their portfolio.
By 1953, Schaller had begun manufacturing electronic guitar components such as pickups and switches for Fred Wilfer's guitar company Framus in Bubenreuth, Bavaria.
A couple of years later, other German guitar manufacturers such as Höfner, Hopf and Hoyer also became customers. By the 1960s, American guitar makers, including Fender, Gibson, C. F. Martin and Ovation, started to rely on Schaller products for their guitars and basses.
At the same time, Schaller extended its portfolio to tremolos (1961), bridges (1962) and machine heads (1966). The "M6" tuning machine made a mark as the world's first fully enclosed and self-locking precision tuner.
In 1968, Schaller moved about 15 kilometers from Feucht to Postbauer-Heng into a new site. A new production facility was set up in order to meet the rising demand for Schaller products.
The product portfolio was constantly expanded in the 1970s. The "M4" bass tuners, various bridges, including the "TOM" bridges for Gibson guitars, and numerous other variants of pickups ("Golden 50", "S6", "T6" etc.) were added.
In addition, Schaller was coming up with many product designs on its own. Renowned guitar makers therefore came in touch with Schaller and were convinced to equip their guitars with components from Schaller. In 1977, Floyd D. Rose uttered the idea of a double-locking tremolo system. After a joint workshop (that lasted for about three months) Schaller was able to develop this novel tremolo. The first prototype was produced soon after.
Due to the lack of demand, at the beginning of the 1980s, the production of loudspeakers, amplifiers and reverberators ceased. The core business shifted to metal components for guitars. In 1981 a new product was designed and patented in this respect: the security lock, a part that helps provide a secure connection between guitar and strap. It has been Schaller's most-sold product since then.
The product portfolio steadily expanded during the 1980s and 1990s. This came to a stop after both Helmut Schaller and his son René Schaller died (in 1999 resp. in 1998). Until the late 1990s violin accessories were added to the portfolio as well as cables, and even special spinning machines for strings and ball ends; however, the newly introduced products did not achieve the same success as the earlier guitar components.
A vital period for the company was from 1999 until 2006, since a community of heirs ran the company then. Grete Schaller (1926–2007), Helmut Schaller's widow, tried to lead the company without being able to generate new impetus, without providing ideas for new products, and without modernizing the production facility at Postbauer-Heng. This led to the company stagnating.
New Schaller
In August 2006, a breakthrough was achieved, when the Schaller company was legally and financially restructured. It was renamed to a limited liability company ("GmbH") by a partnership. Subsequently, Dr. Lars Bünning became managing director of the company by December 2006. In January 2009, Dr. Bünning took over the shares of the GmbH belonging to the Schaller family, and has since then acted as both owner and CEO. Further restructuring occurred after 2007.
Current Schaller products
Machine heads
GrandTune Series
M4 Series
M6 Series
F-Series
Da Vinci
BM Series
Lyra
Hauser
Bridges and tailpieces
Signum
Hannes
TOM- and 3D-bridges
STM and GTM Gibson Les Paul replacement bridges, and accompanying tailpiece
Tremolos
LockMeister
Schaller
Vintage series
SureClaw spring tensioner
Accessories
S-Locks
"Flagship" preamp
"Oyster" piezo pickup
Megaswitches
Covers and frames for pickups
The production of pickups by Schaller was discontinued in 2017.
The shift towards machine heads and metal hardware resulted in the company being renamed from "Schaller Electronic" to "Schaller GmbH".
All Schaller products are manufactured in the Schaller factory in Postbauer-Heng. Schaller customers are both guitar manufacturers and musicians.
References
External links
Schaller 456 - Review
Companies based in Bavaria
Manufacturing companies established in 1950
Privately held companies of Germany
German luthiers
String instrument construction
|
Player's Secrets of Ilien is a supplement to the 2nd edition of the Advanced Dungeons & Dragons fantasy role-playing game.
Contents
Player's Secrets of Ilien is a sourcebook for the Birthright campaign setting, part of the "Player's Secrets" line.
Publication history
Player's Secrets of Ilien was published by TSR, Inc. in 1995.
Reception
Cliff Ramshaw reviewed Player's Secrets of Ilien for Arcane magazine, rating it a 6 out of 10 overall. He describes Ilien as a "weakish region, for a wizard to rule. The player inherits a stash of magical spells and items, along with a rival wizard and a shifty guildmaster who's importing arms faster than Iraq." Ramshaw comments on the series at a whole, that "these sourcebooks are a bit overpriced and some of the history behind the domains is hackneyed or tedious. But all the sourcebooks help you to create believable worlds, with enough conflicting agencies to create strong and imaginative plotlines for years to come."
References
Birthright (campaign setting) supplements
Role-playing game supplements introduced in 1995
|
Turneria pacifica is a species of ant in the genus Turneria. Described by William M. Mann in 1919, the species is endemic to the Solomon Islands and Vanuatu.
References
External links
Dolichoderinae
Insects described in 1919
|
```xml
import { ModerationStatus } from "../ModerationStatus";
import { IUpdateListItem } from "./IUpdateListItem";
export interface IModeratedUpdateListItem extends IUpdateListItem {
OData__ModerationStatus: number;
}
export class ModeratedUpdateListItem implements IModeratedUpdateListItem {
public readonly Title: string;
public OData__ModerationStatus: number;
constructor(moderationStatus: ModerationStatus) {
this.OData__ModerationStatus = moderationStatus && moderationStatus.value;
}
}
```
|
```java
/*
*
* This program and the accompanying materials are made
* which is available at path_to_url
*
*/
package org.eclipse.milo.opcua.stack.core.types.enumerated;
import org.eclipse.milo.opcua.stack.core.serialization.SerializationContext;
import org.eclipse.milo.opcua.stack.core.serialization.UaDecoder;
import org.eclipse.milo.opcua.stack.core.serialization.UaEncoder;
import org.eclipse.milo.opcua.stack.core.serialization.UaEnumeration;
import org.eclipse.milo.opcua.stack.core.serialization.codecs.GenericDataTypeCodec;
import org.eclipse.milo.opcua.stack.core.types.builtin.ExpandedNodeId;
import org.jetbrains.annotations.Nullable;
public enum NamingRuleType implements UaEnumeration {
Mandatory(1),
Optional(2),
Constraint(3);
private final int value;
NamingRuleType(int value) {
this.value = value;
}
@Override
public int getValue() {
return value;
}
@Nullable
public static NamingRuleType from(int value) {
switch (value) {
case 1:
return Mandatory;
case 2:
return Optional;
case 3:
return Constraint;
default:
return null;
}
}
public static ExpandedNodeId getTypeId() {
return ExpandedNodeId.parse("nsu=path_to_url");
}
public static class Codec extends GenericDataTypeCodec<NamingRuleType> {
@Override
public Class<NamingRuleType> getType() {
return NamingRuleType.class;
}
@Override
public NamingRuleType decode(SerializationContext context, UaDecoder decoder) {
return decoder.readEnum(null, NamingRuleType.class);
}
@Override
public void encode(SerializationContext context, UaEncoder encoder, NamingRuleType value) {
encoder.writeEnum(null, value);
}
}
}
```
|
The PGA Philanthropy Tournament was a professional golf tournament that was held in Japan from 1991 to 2000. It was an event on the Japan Golf Tour and played at several different courses throughout Japan.
Winners
Notes
References
External links
Coverage on Japan Golf Tour's official site
Former Japan Golf Tour events
Defunct golf tournaments in Japan
Recurring sporting events established in 1991
Recurring sporting events disestablished in 2000
|
```go
//
//
// path_to_url
//
// Unless required by applicable law or agreed to in writing, software
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
package storage
import (
"log"
"testing"
"github.com/coreos/etcd/lease"
"github.com/coreos/etcd/storage/backend"
)
func BenchmarkStorePut(b *testing.B) {
be, tmpPath := backend.NewDefaultTmpBackend()
s := NewStore(be, &lease.FakeLessor{})
defer cleanup(s, be, tmpPath)
// arbitrary number of bytes
bytesN := 64
keys := createBytesSlice(bytesN, b.N)
vals := createBytesSlice(bytesN, b.N)
b.ResetTimer()
for i := 0; i < b.N; i++ {
s.Put(keys[i], vals[i], lease.NoLease)
}
}
// BenchmarkStoreTxnPut benchmarks the Put operation
// with transaction begin and end, where transaction involves
// some synchronization operations, such as mutex locking.
func BenchmarkStoreTxnPut(b *testing.B) {
be, tmpPath := backend.NewDefaultTmpBackend()
s := NewStore(be, &lease.FakeLessor{})
defer cleanup(s, be, tmpPath)
// arbitrary number of bytes
bytesN := 64
keys := createBytesSlice(bytesN, b.N)
vals := createBytesSlice(bytesN, b.N)
b.ResetTimer()
for i := 0; i < b.N; i++ {
id := s.TxnBegin()
if _, err := s.TxnPut(id, keys[i], vals[i], lease.NoLease); err != nil {
log.Fatalf("txn put error: %v", err)
}
s.TxnEnd(id)
}
}
```
|
Yazid Heimur (born 18 November 2002) is a German professional footballer who most recently played as a midfielder for club Viktoria Berlin.
Career
Heimur was one of four players to leave 3. Liga club Viktoria Berlin in the winter transfer window 2021–22 due to "personal reasons".
References
External links
2002 births
Living people
German men's footballers
Men's association football midfielders
Tennis Borussia Berlin players
FC Viktoria 1889 Berlin players
3. Liga players
|
William Robert Ronald Leach (3 April 1883 – 1 November 1969) was an English first-class cricketer and Royal Navy officer.
The son of Major-General Edmund Leach and Frances Elizabeth Ince, he was born at Kensington in April 1883. He was commissioned as a sub-lieutenant in the Royal Navy in September 1902, with promotion to lieutenant following in April 1905. Leach later made a single appearance in first-class cricket for the Royal Navy against the British Army cricket team at Lord's in 1913. He took 3 wickets in the Army first-innings, dismissing Douglas Robinson, Arthur Turner and Francis Wilson to finish with figures of 3 for 61. He was dismissed by Francis Wyatt in both the Royal Navy innings', with the Army winning the match by 10 wickets.
After serving in the First World War, he was placed on the retired list at his own request in December 1919, at which point he held the rank of lieutenant commander. Although retired, he was made a commander in April 1923. Leach died at Eastbourne in November 1969. His brother Henry Leach was a Brigadier-General in the army.
References
External links
1883 births
1969 deaths
Cricketers from Kensington
Royal Navy officers
English cricketers
Royal Navy cricketers
Royal Navy personnel of World War I
Leach family
Military personnel from Middlesex
|
```xml
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="path_to_url" xmlns:xsi="path_to_url" xsi:schemaLocation="path_to_url path_to_url">
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>blueocean-parent</artifactId>
<groupId>io.jenkins.blueocean</groupId>
<version>${revision}${changelist}</version>
</parent>
<artifactId>blueocean-jira</artifactId>
<packaging>hpi</packaging>
<name>JIRA Integration for Blue Ocean</name>
<url>path_to_url
<properties>
<jacoco.haltOnFailure>true</jacoco.haltOnFailure>
</properties>
<dependencies>
<dependency>
<groupId>io.jenkins.blueocean</groupId>
<artifactId>blueocean-rest</artifactId>
</dependency>
<!-- JIRA plugin dependencies -->
<dependency>
<groupId>org.jenkins-ci.plugins</groupId>
<artifactId>jira</artifactId>
</dependency>
<dependency>
<groupId>org.jenkins-ci.plugins</groupId>
<artifactId>jackson2-api</artifactId>
</dependency>
<dependency>
<groupId>org.jenkins-ci.plugins</groupId>
<artifactId>apache-httpcomponents-client-4-api</artifactId>
</dependency>
<!-- FIXME removed once Guava has been removed from Jira plugin -->
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>32.0.0-jre</version>
<scope>test</scope>
</dependency>
</dependencies>
</project>
```
|
The Oriental bay owl (Phodilus badius) is a type of bay owl, usually classified with barn owls. It is completely nocturnal, and can be found throughout Southeast Asia and parts of India. It has several subspecies. It has a heart-shaped face with earlike extensions. The Congo bay owl (Phodilus prigoginei) was formerly classified as a subspecies of Oriental bay owl due to insufficient knowledge, but it has turned out that it might not even belong to the same genus. The Sri Lanka bay owl was also considered a subspecies.
A population of this species has apparently become extinct on Samar Island in the Philippines during the 20th century. It was described as Phodilus badius riverae and was only ever known from a single specimen, which was lost in a bombing raid in 1945. The validity of this taxon is uncertain; it is usually synonymized with the nominate subspecies (for reasons of biogeography) or the subspecies saturatus (from external appearance); it might have been a distinct species, however.
Distribution
The Oriental bay owl is typically found and distributed throughout Nepal, Sikkim, Assam, Nagaland, Manipur, Burma and Thailand, east to south China, and south through the Malay Peninsula to the Greater Sundas. They are most comfortable living in woodland, plantations and mangrove swamps at altitudes of up to , leading to them being scattered around India, Thailand, Singapore, the Philippines and other parts of Southeast Asia.
Habitat and ecology
The Oriental bay owl is very scarcely known, even in its area of origin. It is prevalent in the tropical moist forests as well as in southeast Asia. The species can be seen in the tropical wet evergreen forest as high altitudes reaching heights of 1,040-1,050m above sea level. These sightings of the owl are rare and occurred between February and June 1998 in Sengaltheri, Tamil Nadu, India.
Also located in the evergreen forest of India. There are not many in the population, and so they are not very well known. Their habitat is in two separate locations in India: northern in the Himalayan foothills and that general location, and in the southern areas, including the southern Western Ghats of Kerala/Tamil Nadu, as well as some areas of Sri Lanka (specifically the wetlands). This owl tends to be nocturnal and stays to itself.
This owl tends to reside at high elevations ranging from 200m to a peak of 2300m according to area in which it lives. At the foothills it tends to stay at areas between 200–1,000m elevation, but due to destruction of many of the foothills/ forests in the area (caused by mankind), some of the owls reside at higher altitudes reaching the peak of 2,300m in montane forests.
Breeding habits
This species is caring towards their offspring; both the male and female care for their young. Incubation lasts for approximately 36–42 days and there is usually a 100 percent survival rate for the zygote. The babies are quick to develop and are protected by both parents. They are more protective of the nestlings (young chicks) than of the older chicks. They feed their offspring an assortment of lizards, frogs, rodents and on occasion, insects.
Breeding time is usually from the months of March, April and May. The eggs tend to be laid between March and July. They tend to nest in hollowed out tree trunks as well as any other hole it can find. They can sometimes be found nesting in palms in Java. As mentioned above, both parents care for the offspring. At a time, around 3 to 5 eggs are laid; the appearance of the eggs are white and rather small. The measurements of the eggs on the oblong portion are around 38–40.6mm and on the smaller portion of the egg, the circumference is approximately 30–31 mm. Although both parents care for the offspring, only the mother incubated the egg starting at the 2 day mark. The father is the one who hunts and brings nourishment back for the offspring.
Morphology and vocal characteristics
Individuals of the Oriental bay owl are small animals with distinguishing features such as highly angular heads. The top and back half of their bodies are a deep chestnut color while the bottom of their bodies as well as their stomachs are a cream and tanish color. Oriental bay owls have large, striking black eyes which are highlighted due to the color contrast of their faces. Being that they are small and typically stand upright, they are very difficult to spot as they find comfort in their vast range of habitats. Their ability to resonate is remarkable, for they not only hoot, but also scream and create high pitched whistle songs. Their songs typically consist of four to seven melancholic whistles, lasting anywhere between two and eight seconds, and the pitch slightly increases towards the end. They sometimes alternate with a shorter whistle, that goes kleet-kleet-kleet or kleek-kleek-kleek, when moving between places.
The Oriental bay owl's measurements are as follows *
Size: Length 22.5–29 cm. Wing length 172-237mm. Tail length 168-239mm. Weight 255-308g.
Females are often larger than the males of this species.
Food
The Oriental bay owls prey upon small rodents, bats, birds, snakes, frogs, lizards, magpies and large arthropods such as beetles, grasshoppers, and spiders. This species hunts from a perch and fly through trees in order to catch their prey. Due to their short and rounded wings, this makes hunting easier, especially by the water. Being that the owls find themselves most comfortable in Southeast Asia and parts of India, they are surrounded by a very humid and tropical climate. This offers the species an immense food source and trees to hunt from. The owl feeds its young an assortment of rodents, lizards and frogs when caring for them. *
References
Bruce, M. D. (1999): 15. Oriental Bay-owl. In: del Hoyo, J.; Elliott, A. & Sargatal, J. (eds): Handbook of Birds of the World, Volume 5: Barn-owls to Hummingbirds: 75, plate 3. Lynx Edicions, Barcelona.
Teng, Liwei, et al. “A Preliminary Observation of Breeding Habits of Oriental Bay Owl.” A Preliminary Observation of Breeding Habits of Oriental Bay Owl--《Journal of Northeast Forestry University》2004年01期, 2004, http://en.cnki.com.cn/Article_en/CJFDTotal-DBLY200401012.htm
External links
ARKive - images and movies of the Oriental bay owl (Phodilus badius)
Handbook of the Birds of the World
oriental bay owl
Birds of Southeast Asia
Birds of Yunnan
Birds of Hainan
oriental bay owl
oriental bay owl
|
The 44th Missile Wing (44 MW) is an inactive United States Air Force unit. Its last assignment was with Twentieth Air Force, being assigned to Ellsworth AFB, South Dakota. It was inactivated on 4 July 1994.
For over 40 years the 44th was a front-line Strategic Air Command wing, initially as a B-47 Stratojet medium bomber unit in the 1950s. With the phaseout of the B-47, the wing became a LGM-30 Minuteman ICBM unit in the 1960s, being inactivated in 1994 as part of the drawdown of U.S. strategic forces after the end of the Cold War.
During World War II, its predecessor unit, the 44th Bombardment Group was the first B-24 Liberator heavy bombardment group of VIII Bomber Command stationed in England. Colonel Leon W. Johnson, while commander of the 44th Bombardment Group, was awarded the Medal of Honor for his actions during the Ploesti Raid on 1 August 1943.
History
For additional history and lineage, see 44th Operations Group
Bombardment Wing
The 44th Bombardment Wing, Medium was established in late December 1950 as part of the postwar Hobson Plan. The 90th Bombardment Group, reactivated by Strategic Air Command (SAC) in 1947 was assigned as its combat group. The new wing was organized at March AFB, California where it received B-29 Superfortresses along with some TB-29s. Depended on 22d Bombardment Wing for initial cadre and help in becoming organized.
The wing was reassigned to Lake Charles AFB, Louisiana on 1 August 1951; its mission was to train B-29 and RB-29 aircrews and mechanics for combat duty with units engaged in Korean War combat duty with Far East Air Forces. From 10 October 1951 to 15 May 1952, trained all elements of the 68th Strategic Reconnaissance Wing.
Replaced the propeller-driven B-29s with new B-47E Stratojet swept-wing medium bombers in 1953, capable of flying at high subsonic speeds and primarily designed for penetrating the airspace of the Soviet Union. Deployed at Sidi Slimane Air Base, French Morocco, 19 January – 22 February 1953 and 19 April – 17 June 1954.
In the late 1950s, the B-47 was considered to be reaching obsolescence, and was being phased out of SAC's strategic arsenal as improved Soviet air defenses made the aircraft vulnerable. Began sending aircraft to other B-47 wings as replacements in late 1959, being phased down for inactivation. The 44th Bombardment Wing was inactivated on 15 June 1960; some aircraft and many personnel were reassigned to the 68th Bombardment Wing which remained at Lake Charles AFB flying B-47s until 1963.
Missile Wing
The history of the 44th Missile Wing begins two years before its activation; with the establishment of the 850th Strategic Missile Squadron on 1 December 1960. Assigned to the 28th Bombardment Wing at Ellsworth AFB, South Dakota, the 850th SMS operated the first-generation HGM-25A Titan I ICBM at three dispersed sites near Wicksville, Hermosa, and Sturgis SD. However the Titan I's life span was short in western South Dakota.
About the same time, work began on installations for the second-generation Minuteman missile. On 21 August 1961, construction began on the LGM-30B Minuteman I facilities. For more than a year this squadron prepared for the emplacement of the Minuteman which finally arrived in 1962, shortly after the activation of the 44th Strategic Missile Wing (SMW) in January. At that time Headquarters SAC also named the 44 SMW as host wing at Ellsworth. With its activation, the 850th SMS was reassigned to the 44th SMW, making the 28th Bombardment Wing a fully B-52 Stratofortress organization.
During 1962, three new strategic missile squadrons, the 66th, 67th, and 68th, were activated to support the new Minuteman I system. The 67th Strategic Missile Squadron joined the 44th in August, followed by the 68th Strategic Missile Squadron in September 1962. A 44th Missile Maintenance Squadron was established at the same time. Each strategic missile squadron supported five flights of Minuteman missiles with 50 missiles per squadron. A total of 150 launch facilities were constructed to house the missiles. The first Minuteman missile was positioned near Wall, SD in April 1963. All Minuteman I missiles were in place by the end of 1963.
On 19 November 1964, Secretary of Defense Robert McNamara announced the phase-out of remaining first-generation Titan I missiles by the end of June 1965. Consequently, the Titan Is of the 850th SMS were removed from alert status on 4 January 1965. The last missile was shipped out on 12 February. The Air Force subsequently inactivated the squadron on 25 March.
Ellsworth was slated to host a unique series of operational tests. Approved by the Secretary of Defense in November 1964, "Project Long Life" called for the short-range operational base launch of three modified Minuteman IB ICBMs to provide a realistic test for this system. Each missile would contain enough propellant for a 7-second flight and have inert upper stages and reentry vehicles. On 1 March 1965, "Operation Long Life" took place. This was the first of three scheduled launches of the Minuteman system. A missile with seven seconds of fuel was launched.
With the test proving successful, the additional two launches were canceled. This was the only test launch in US ICBM history to be fired from an operational site. It successfully demonstrated the ability of a SAC missile crew to launch an ICBM.
The 44 SMW played a key role in establishing the Airborne Launch Control System (ALCS) in the late 1960s. The ALCS was created to provide a survivable launch capability for the Minuteman ICBM force. From 1967 to 1970, one of the squadrons that ALCS missile crews belonged to was the 68th SMS at Ellsworth AFB, SD. These ALCS crews worked together with the 28th Air Refueling Squadron (AREFS) at Ellsworth AFB, who operated several EC-135 variants to include the EC-135A, EC-135G, and EC-135L, all of which had ALCS equipment installed on board. In 1970, the ALCS mission was transferred from the 68th SMS to the 4th Airborne Command and Control Squadron, which was assigned to the 28th Bombardment Wing at Ellsworth AFB, SD.
On 30 June 1971, the 44 SMW was named host unit at Ellsworth AFB when the 821st Strategic Aerospace Division was inactivated. The wing was reassigned under the 4th Air Division headquartered at F.E. Warren AFB, Wyoming . The wing was later assigned as part of the 57th Air Division headquartered at Minot AFB, North Dakota.
In October 1971, the transition from Minuteman I to LGM-30F Minuteman II began. The transition, known as "Force Modernization", was complete in March 1973. With these new missiles in place, Ellsworth was selected to host "Giant Pace Test 74-1", the first Simulated Electronic Launch-Minuteman (SELM) exercise. During this test, 11 SELM-configured Minuteman II ICBMs underwent successful simulated launch on command from both underground launch-control centers and the Airborne Launch Control System.
During February 1991, the Secretary of Defense announced that the Air Force would begin retirement of older weapon systems in response to the end of the Cold War and a declining defense budget. The deactivation of the Minuteman II missile system was announced on 15 April 1991. The schedule for Ellsworth included a one squadron per year draw-down beginning with the 67 SMS, followed by the 66 SMS, and finally the 68 SMS.
On 1 September 1991, under the "Objective Wing" concept adapted by the Air Force, the wing was renamed the 44th Missile Wing. The ICBM squadrons were reassigned to the newly established 44th Operations Group, along with the lineage, honors and history of the 44th Bombardment Group.
On 28 September 1991, in response to President Bush's directive to stand down the Minuteman II, personnel of the 44 OG worked around the clock to dissipate launch codes and pin safety control switches at 150 launch facilities. Removal of the first Minuteman II missile assigned to the 44 OG occurred at G-02, near Red Owl, South Dakota, on 3 December 1991. On 6 April 1992, the first launch control center shut down.
On 1 June 1992, the 44th Missile Wing was relieved of its emergency war order mission and its primary focus was deactivation of the Minuteman II weapon system. This day also marked the end of SAC and the beginning of Air Combat Command (ACC).
The 67th Missile Squadron (MS) was inactivated on 15 August 1992, and the 66 MS was inactivated on 1 September 1993. On 1 July 1993 the 44 Missile Wing changed hands from ACC to Air Force Space Command along with all other ICBM wings. Deactivation of the entire missile complex ended in April 1994.
With its mission complete, the 44th Missile Wing was formally inactivated on 4 July 1994.
Lineage
Established as 44th Bombardment Wing, Medium, on 20 December 1950.
Activated on 2 January 1951
Discontinued on 15 June 1960
Re-designated 44th Strategic Missile Wing (ICBM—Minuteman) on 24 November 1961
Organized on 1 January 1962.
Re-designated: 44th Missile Wing 1 September 1991
Inactivated 5 July 1994
Assignments
Fifteenth Air Force, 20 December 1950
12th Air Division, 10 February 1951
21st Air Division, 4 August 1951
806th Air Division, 16 June 1952
Attached to: 5th Air Division from 19 January to 22 February 1953
Attached to: 5th Air Division 9 April to 17 June 1954
Department of the Air Force, 15 June 1960 – 23 November 1961
821st Strategic Aerospace Division, 1 January 1962
4th Strategic Missile Division, 30 June 1971
57th Air Division, 1 May 1982
4th Air Division, 23 January 1987
12th Air Division, 15 July 1988
Strategic Warfare Center, 31 July 1990
Twentieth Air Force, 31 July 1991 – 5 July 1994
ComponentsGroup 44th Bombardment (later Operations) Group: 2 January 1951 – 16 June 1952 (not operational, 10 February 1951 – 16 June 1952), 1 September 1991 – 5 July 1994Squadrons' 44th Air Refueling Squadron: 20 April 1953 – 1 June 1960 (not operational, 20 April – c. 15 May 1953; detached 27 June – 11 October 1957).
66th Bombardment (later, Strategic Missile, later Missile) Squadron: 16 Jun 1952 – 15 Jun 1960; 1 Sep 1962 – 1 Sep 1991
67th Bombardment (later, Strategic Missile, later Missile) Squadron: 16 Jun 1952 – 15 Jun 1960; 1 Sep 1962 – 1 Sep 1991
68th Bombardment (later, Strategic Missile, later Missile) Squadron: 16 Jun 1952 – 15 Jun 1960; 1 Sep 1962 – 1 Sep 1991
506th Bombardment Squadron: 1 December 1958 – 15 June 1960
850th Strategic Missile Squadron: 1 January 1962 – 25 March 1965
Bases Assigned
March AFB, California, 2 January 1951 – 1 August 1951
Lake Charles (later Chennault) AFB, Louisiana 1 August 1951 – 15 June 1960
Deployed at Sidi Slimane Air Base, French Morocco, 19 January – 22 February 1953 and 19 April – 17 June 1954.
Ellsworth AFB, South Dakota, 24 November 1961 – 5 July 1994
Aircraft and missiles
TB-29 Superfortress, 1951; B-29 Superfortress, 1951–1953
B-47 Stratojet, 1953–1960
KC-97 Stratofreighter, 1953–1957, 1957–1960
HGM-25A Titan I, 1962–1965
850th Strategic Missile Squadron
Operated three missile sites: (1 Dec 1960 – 25 Mar 1965)
850-A, 4 miles NNW of Wicksville, South Dakota
850-B, 5 miles SSE of Hermosa, South Dakota
850-C, 10 miles SE of Sturgis, South Dakota
LGM-30B Minuteman I, 1963–1973
Airborne Launch Control System, 1967–1970
LGM-30F Minuteman II, 1971–1994
LGM-30F Minuteman III Missile Alert Facilities (MAF) (each controlling 10 missiles) are located as follows:
66th Missile Squadron
A-01 19.9 mi S of Howes, SD,
B-01 7.5 mi NxNW of Wall SD,
C-01 10.1 mi N of Philip SD,
*D-01 6.7 mi SxSW of Cottonwood SD,
*D-09 (Launch Facility) 4.4 mi SxSW of Quinn SD,
*Designated as part of the Minuteman Missile National Historic Site
E-01 6.3 mi NxNE of Kadoka SD,
67th Missile Squadron
F-01 61.0 mi NxNE of Ellsworth AFB, SD.
G-01 11.3 mi N of Union Center SD,
H-01 10.0 mi SW of Union Center SD,
I-01 5.7 mi E of White Owl SD,
J-01 13.8 mi SE of Maurine SD,
68th Missile Squadron
K-01 5.6 mi N of Spearfish SD,
L-01 6.2 mi SxSE of Vale SD,
M-01 17.7 mi NxNW of Belle Fourche SD,
N-01 6.7 mi NW of Newell SD,
O-01 38.5 mi W of opal, SD,
References for commands and major units assigned, components and stations:Maurer, Maurer (1983). Air Force Combat Units of World War II. Maxwell AFB, Alabama: Office of Air Force History. .
See also
List of B-29 Superfortress operators
List of B-47 units of the United States Air Force
44th Missile Wing LGM-30 Minuteman Missile Launch Sites
References
Ellsworth AFB Minuteman Missile Site Coordinates
Mackay, Ron and Steve Adams. The 44th Bomb Group in World War II: The 'Flying Eight-Balls' Over Europe in the B-24''. Atglen, Pennsylvania: Schiffer Publishing, 2007. .
044
Military units and formations in South Dakota
1951 establishments in California
1994 disestablishments in South Dakota
|
```go
/*
*/
package comm
import (
"crypto/tls"
"crypto/x509"
"os"
"path/filepath"
"testing"
"time"
"github.com/hyperledger/fabric/common/crypto/tlsgen"
"github.com/stretchr/testify/require"
"google.golang.org/grpc"
"google.golang.org/grpc/keepalive"
)
func TestServerKeepaliveOptions(t *testing.T) {
t.Parallel()
kap := keepalive.ServerParameters{
Time: DefaultKeepaliveOptions.ServerInterval,
Timeout: DefaultKeepaliveOptions.ServerTimeout,
}
kep := keepalive.EnforcementPolicy{
MinTime: DefaultKeepaliveOptions.ServerMinInterval,
PermitWithoutStream: true,
}
expectedOpts := []grpc.ServerOption{
grpc.KeepaliveParams(kap),
grpc.KeepaliveEnforcementPolicy(kep),
}
opts := DefaultKeepaliveOptions.ServerKeepaliveOptions()
// Unable to test equality of options since the option methods return
// functions and each instance is a different func.
// Unable to test the equality of applying the options to the server
// implementation because the server embeds channels.
// Fallback to a sanity check.
require.Len(t, opts, len(expectedOpts))
for i := range opts {
require.IsType(t, expectedOpts[i], opts[i])
}
}
func TestClientKeepaliveOptions(t *testing.T) {
t.Parallel()
kap := keepalive.ClientParameters{
Time: DefaultKeepaliveOptions.ClientInterval,
Timeout: DefaultKeepaliveOptions.ClientTimeout,
PermitWithoutStream: true,
}
expectedOpts := []grpc.DialOption{grpc.WithKeepaliveParams(kap)}
opts := DefaultKeepaliveOptions.ClientKeepaliveOptions()
// Unable to test equality of options since the option methods return
// functions and each instance is a different func.
// Fallback to a sanity check.
require.Len(t, opts, len(expectedOpts))
for i := range opts {
require.IsType(t, expectedOpts[i], opts[i])
}
}
func TestClientConfigClone(t *testing.T) {
origin := ClientConfig{
KaOpts: KeepaliveOptions{
ClientInterval: time.Second,
},
SecOpts: SecureOptions{
Key: []byte{1, 2, 3},
},
DialTimeout: time.Second,
AsyncConnect: true,
}
clone := origin
// Same content, different inner fields references.
require.Equal(t, origin, clone)
// We change the contents of the fields and ensure it doesn't
// propagate across instances.
origin.AsyncConnect = false
origin.KaOpts.ServerInterval = time.Second
origin.KaOpts.ClientInterval = time.Hour
origin.SecOpts.Certificate = []byte{1, 2, 3}
origin.SecOpts.Key = []byte{5, 4, 6}
origin.DialTimeout = time.Second * 2
clone.SecOpts.UseTLS = true
clone.KaOpts.ServerMinInterval = time.Hour
expectedOriginState := ClientConfig{
KaOpts: KeepaliveOptions{
ClientInterval: time.Hour,
ServerInterval: time.Second,
},
SecOpts: SecureOptions{
Key: []byte{5, 4, 6},
Certificate: []byte{1, 2, 3},
},
DialTimeout: time.Second * 2,
}
expectedCloneState := ClientConfig{
KaOpts: KeepaliveOptions{
ClientInterval: time.Second,
ServerMinInterval: time.Hour,
},
SecOpts: SecureOptions{
Key: []byte{1, 2, 3},
UseTLS: true,
},
DialTimeout: time.Second,
AsyncConnect: true,
}
require.Equal(t, expectedOriginState, origin)
require.Equal(t, expectedCloneState, clone)
}
func TestSecureOptionsTLSConfig(t *testing.T) {
ca1, err := tlsgen.NewCA()
require.NoError(t, err, "failed to create CA1")
ca2, err := tlsgen.NewCA()
require.NoError(t, err, "failed to create CA2")
ckp, err := ca1.NewClientCertKeyPair()
require.NoError(t, err, "failed to create client key pair")
clientCert, err := tls.X509KeyPair(ckp.Cert, ckp.Key)
require.NoError(t, err, "failed to create client certificate")
newCertPool := func(cas ...tlsgen.CA) *x509.CertPool {
cp := x509.NewCertPool()
for _, ca := range cas {
ok := cp.AppendCertsFromPEM(ca.CertBytes())
require.True(t, ok, "failed to add cert to pool")
}
return cp
}
tests := []struct {
desc string
so SecureOptions
tc *tls.Config
expectedErr string
}{
{desc: "TLSDisabled"},
{desc: "TLSEnabled", so: SecureOptions{UseTLS: true}, tc: &tls.Config{MinVersion: tls.VersionTLS12}},
{
desc: "ServerNameOverride",
so: SecureOptions{UseTLS: true, ServerNameOverride: "bob"},
tc: &tls.Config{MinVersion: tls.VersionTLS12, ServerName: "bob"},
},
{
desc: "WithServerRootCAs",
so: SecureOptions{UseTLS: true, ServerRootCAs: [][]byte{ca1.CertBytes(), ca2.CertBytes()}},
tc: &tls.Config{MinVersion: tls.VersionTLS12, RootCAs: newCertPool(ca1, ca2)},
},
{
desc: "BadServerRootCertificate",
so: SecureOptions{
UseTLS: true,
ServerRootCAs: [][]byte{
[]byte("-----BEGIN CERTIFICATE-----\nYm9ndXM=\n-----END CERTIFICATE-----"),
},
},
expectedErr: "error adding root certificate",
},
{
desc: "WithRequiredClientKeyPair",
so: SecureOptions{UseTLS: true, RequireClientCert: true, Key: ckp.Key, Certificate: ckp.Cert},
tc: &tls.Config{MinVersion: tls.VersionTLS12, Certificates: []tls.Certificate{clientCert}},
},
{
desc: "MissingClientKey",
so: SecureOptions{UseTLS: true, RequireClientCert: true, Certificate: ckp.Cert},
expectedErr: "both Key and Certificate are required when using mutual TLS",
},
{
desc: "MissingClientCert",
so: SecureOptions{UseTLS: true, RequireClientCert: true, Key: ckp.Key},
expectedErr: "both Key and Certificate are required when using mutual TLS",
},
{
desc: "WithTimeShift",
so: SecureOptions{UseTLS: true, TimeShift: 2 * time.Hour},
tc: &tls.Config{MinVersion: tls.VersionTLS12},
},
}
for _, tt := range tests {
t.Run(tt.desc, func(t *testing.T) {
tc, err := tt.so.TLSConfig()
if tt.expectedErr != "" {
require.ErrorContainsf(t, err, tt.expectedErr, "got %v, want %s", err, tt.expectedErr)
return
}
require.NoError(t, err)
if len(tt.so.ServerRootCAs) != 0 {
require.NotNil(t, tc.RootCAs)
require.True(t, tt.tc.RootCAs.Equal(tc.RootCAs))
tt.tc.RootCAs, tc.RootCAs = nil, nil
}
if tt.so.TimeShift != 0 {
require.NotNil(t, tc.Time)
require.WithinDuration(t, time.Now().Add(-1*tt.so.TimeShift), tc.Time(), 10*time.Second)
tc.Time = nil
}
require.Equal(t, tt.tc, tc)
})
}
}
func TestClientConfigDialOptions_GoodConfig(t *testing.T) {
testCerts := LoadTestCerts(t)
config := ClientConfig{}
opts, err := config.DialOptions()
require.NoError(t, err)
require.NotEmpty(t, opts)
secOpts := SecureOptions{
UseTLS: true,
ServerRootCAs: [][]byte{testCerts.CAPEM},
RequireClientCert: false,
}
config.SecOpts = secOpts
opts, err = config.DialOptions()
require.NoError(t, err)
require.NotEmpty(t, opts)
secOpts = SecureOptions{
Certificate: testCerts.CertPEM,
Key: testCerts.KeyPEM,
UseTLS: true,
ServerRootCAs: [][]byte{testCerts.CAPEM},
RequireClientCert: true,
}
clientCert, err := secOpts.ClientCertificate()
require.NoError(t, err)
require.Equal(t, testCerts.ClientCert, clientCert)
config.SecOpts = secOpts
opts, err = config.DialOptions()
require.NoError(t, err)
require.NotEmpty(t, opts)
}
func TestClientConfigDialOptions_BadConfig(t *testing.T) {
testCerts := LoadTestCerts(t)
// bad root cert
config := ClientConfig{
SecOpts: SecureOptions{
UseTLS: true,
ServerRootCAs: [][]byte{[]byte(badPEM)},
},
}
_, err := config.DialOptions()
require.ErrorContains(t, err, "error adding root certificate")
// missing key
config.SecOpts = SecureOptions{
Certificate: []byte("cert"),
UseTLS: true,
RequireClientCert: true,
}
_, err = config.DialOptions()
require.ErrorContains(t, err, "both Key and Certificate are required when using mutual TLS")
// missing cert
config.SecOpts = SecureOptions{
Key: []byte("key"),
UseTLS: true,
RequireClientCert: true,
}
_, err = config.DialOptions()
require.ErrorContains(t, err, "both Key and Certificate are required when using mutual TLS")
// bad key
config.SecOpts = SecureOptions{
Certificate: testCerts.CertPEM,
Key: []byte(badPEM),
UseTLS: true,
RequireClientCert: true,
}
_, err = config.DialOptions()
require.ErrorContains(t, err, "failed to load client certificate")
// bad cert
config.SecOpts = SecureOptions{
Certificate: []byte(badPEM),
Key: testCerts.KeyPEM,
UseTLS: true,
RequireClientCert: true,
}
_, err = config.DialOptions()
require.ErrorContains(t, err, "failed to load client certificate")
}
type TestCerts struct {
CAPEM []byte
CertPEM []byte
KeyPEM []byte
ClientCert tls.Certificate
ServerCert tls.Certificate
}
func LoadTestCerts(t *testing.T) TestCerts {
t.Helper()
var certs TestCerts
var err error
certs.CAPEM, err = os.ReadFile(filepath.Join("testdata", "certs", "Org1-cert.pem"))
if err != nil {
t.Fatalf("unexpected error reading root cert for test: %v", err)
}
certs.CertPEM, err = os.ReadFile(filepath.Join("testdata", "certs", "Org1-client1-cert.pem"))
if err != nil {
t.Fatalf("unexpected error reading cert for test: %v", err)
}
certs.KeyPEM, err = os.ReadFile(filepath.Join("testdata", "certs", "Org1-client1-key.pem"))
if err != nil {
t.Fatalf("unexpected error reading key for test: %v", err)
}
certs.ClientCert, err = tls.X509KeyPair(certs.CertPEM, certs.KeyPEM)
if err != nil {
t.Fatalf("unexpected error loading certificate for test: %v", err)
}
certs.ServerCert, err = tls.LoadX509KeyPair(
filepath.Join("testdata", "certs", "Org1-server1-cert.pem"),
filepath.Join("testdata", "certs", "Org1-server1-key.pem"),
)
require.NoError(t, err)
return certs
}
```
|
```java
package com.yahoo.security;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Objects;
import static com.yahoo.security.ArrayUtils.hex;
/**
* A SealedSharedKey represents the public part of a secure one-way ephemeral key exchange.
*
* It is "sealed" in the sense that it is expected to be computationally infeasible
* for anyone to derive the correct shared key from the sealed key without holding
* the correct private key.
*
* A SealedSharedKey can be converted to--and from--an opaque string token representation.
* This token representation is expected to be used as a convenient serialization
* form when communicating shared keys.
*/
public record SealedSharedKey(int version, KeyId keyId, byte[] enc, byte[] ciphertext) {
/** Current encoding version of opaque sealed key tokens. Must be less than 256. */
public static final int CURRENT_TOKEN_VERSION = 2;
/** Encryption context for v{1,2} tokens is always a 32-byte X25519 public key */
public static final int MAX_ENC_CONTEXT_LENGTH = 255;
// Expected max decoded size for v1 is 3 + 255 + 32 + 32 = 322. For simplicity, round this
// up to 512 to effectively not have to care about the overhead of any reasonably chosen encoding.
public static final int MAX_TOKEN_STRING_LENGTH = 512;
public SealedSharedKey {
if (enc.length > MAX_ENC_CONTEXT_LENGTH) {
throw new IllegalArgumentException("Encryption context is too large to be encoded (max is %d, got %d)"
.formatted(MAX_ENC_CONTEXT_LENGTH, enc.length));
}
}
/**
* Creates an opaque URL-safe string token that contains enough information to losslessly
* reconstruct the SealedSharedKey instance when passed verbatim to fromTokenString().
*/
public String toTokenString() {
return Base62.codec().encode(toSerializedBytes());
}
byte[] toSerializedBytes() {
byte[] keyIdBytes = keyId.asBytes();
// u8 token version || u8 length(key id) || key id || u8 length(enc) || enc || ciphertext
ByteBuffer encoded = ByteBuffer.allocate(1 + 1 + keyIdBytes.length + 1 + enc.length + ciphertext.length);
encoded.put((byte)version);
encoded.put((byte)keyIdBytes.length);
encoded.put(keyIdBytes);
encoded.put((byte)enc.length);
encoded.put(enc);
encoded.put(ciphertext);
encoded.flip();
byte[] encBytes = new byte[encoded.remaining()];
encoded.get(encBytes);
return encBytes;
}
/**
* Attempts to unwrap a SealedSharedKey opaque token representation that was previously
* created by a call to toTokenString().
*/
public static SealedSharedKey fromTokenString(String tokenString) {
verifyInputTokenStringNotTooLarge(tokenString);
byte[] rawTokenBytes = Base62.codec().decode(tokenString);
return fromSerializedBytes(rawTokenBytes);
}
static SealedSharedKey fromSerializedBytes(byte[] rawTokenBytes) {
if (rawTokenBytes.length < 1) {
throw new IllegalArgumentException("Decoded token too small to contain a version");
}
ByteBuffer decoded = ByteBuffer.wrap(rawTokenBytes);
// u8 token version || u8 length(key id) || key id || u8 length(enc) || enc || ciphertext
int version = Byte.toUnsignedInt(decoded.get());
if (version < 1 || version > CURRENT_TOKEN_VERSION) {
throw new IllegalArgumentException("Token had unexpected version. Expected value in [1, %d], was %d"
.formatted(CURRENT_TOKEN_VERSION, version));
}
int keyIdLen = Byte.toUnsignedInt(decoded.get());
byte[] keyIdBytes = new byte[keyIdLen];
decoded.get(keyIdBytes);
int encLen = Byte.toUnsignedInt(decoded.get());
byte[] enc = new byte[encLen];
decoded.get(enc);
byte[] ciphertext = new byte[decoded.remaining()];
decoded.get(ciphertext);
return new SealedSharedKey(version, KeyId.ofBytes(keyIdBytes), enc, ciphertext);
}
public int tokenVersion() { return version; }
private static void verifyInputTokenStringNotTooLarge(String tokenString) {
if (tokenString.length() > MAX_TOKEN_STRING_LENGTH) {
throw new IllegalArgumentException("Token string is too long to possibly be a valid token");
}
}
// Friendlier toString() with hex dump of enc/ciphertext fields
@Override
public String toString() {
return "SealedSharedKey{" +
"version=" + version +
", keyId=" + keyId +
", enc=" + hex(enc) +
", ciphertext=" + hex(ciphertext) +
'}';
}
// Explicitly generated equals() and hashCode() to use _contents_ of
// enc/ciphertext arrays, and not just their refs.
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
SealedSharedKey that = (SealedSharedKey) o;
return version == that.version && keyId.equals(that.keyId) &&
Arrays.equals(enc, that.enc) &&
Arrays.equals(ciphertext, that.ciphertext);
}
@Override
public int hashCode() {
int result = Objects.hash(version, keyId);
result = 31 * result + Arrays.hashCode(enc);
result = 31 * result + Arrays.hashCode(ciphertext);
return result;
}
}
```
|
```python
import gym
from baselines import deepq
import tensorflow as tf
# from baselines import bench, logger
# import os
def callback(lcl, glb):
# stop training if reward exceeds 199
is_solved = lcl['t'] > 100 and sum(lcl['episode_rewards'][-101:-1]) / 100 >= 199
return is_solved
# Define the environment
env = gym.make("CartPole-v0")
# # set up the logger
# logdir = '/tmp/experiments/discrete/DQN/'
# logger.configure(os.path.abspath(logdir))
# print("logger.get_dir(): ", logger.get_dir() and os.path.join(logger.get_dir()))
# models = [[64], [64,64], [128,128], [256,256]]
models = [[64], [128], [64,64], [128,128], [256,256]]
for m in models:
g = tf.Graph()
with g.as_default():
# tf.reset_default_graph()
act = deepq.learn(
env,
q_func=deepq.models.mlp(m),
lr=1e-3,
max_timesteps=10000,
buffer_size=50000,
exploration_fraction=0.1,
exploration_final_eps=0.02,
print_freq=10,
callback=callback,
outdir="/tmp/experiments/discrete/DQN/"+str(m)
)
act.save("models/cartpole_model_DQN_"+str(m)+".pkl")
# print("Saving model to cartpole_model.pkl")
# act.save("cartpole_model.pkl")
```
|
Comet Lake is a private reservoir in Summit County, Ohio located within the city of Green, at . The community of Comet sits on the northwestern end of the lake. The lake drains over the Comet Lake Dam into the upper Tuscarawas River by way of Nimisila Creek.
References
Summit County, Ohio
Reservoirs in Ohio
|
The Travels of Wiglington and Wenks is originally a series of story books for children, written by John Bittleston and Eliza Quek in 1987, which was later adapted into an online virtual world for children..
Book series
There are four books in the series: The Legacy: London and Paris , The Chase: Venice and Rome, The Crossroads: Istanbul and Turkey, and The Malta Connection: Malta and Singapore. They tell the story of two water rats from Hampshire who travel the world in search of their heritage. The books are illustrated by Lee Kowling.
Virtual world
The Travels of Wiglington and Wenks Virtual World was officially launched on Christmas Eve 2009. The site says that "Some of the features players can experience in the game are building and designing culture-inspired houses, owning exotic islands, throwing parties, performing a range of cool actions, adopting unique pets, wearing clothes from different countries, meeting famous people from the past and present and visiting famous landmarks around the world." By March 1, the number of registered users reached more than 80,000 from 150 countries. It was nominated one of the top five virtual worlds for kids out of 40 other virtual worlds in the Readers' Choice Awards conducted by About.com and came in second after Webkinz. The other three virtual worlds nominated for the top five positions were Club Penguin, Chobots and Webosaurs. Wiglington and Wenks grew, added new features, and billed itself as "the world's most massive virtual world for kids". The world contained over 100 different locations, more than 50 games and a vast amount of information on historical people, animals, plants, inventions, items, architectures and countries. However, the world is currently closed indefinitely due to lack of funds.
The virtual world was developed by Swag Soft - an app development company based in Singapore.
References
External links
The Travels of Wiglington and Wenks Virtual World
Series of children's books
Children's websites
Free online games
|
DSpace is an open source repository software package typically used for creating open access repositories for scholarly and/or published digital content. While DSpace shares some feature overlap with content management systems and document management systems, the DSpace repository software serves a specific need as a digital archives system, focused on the long-term storage, access and preservation of digital content. The optional DSpace registry lists almost three thousand repositories all over the world.
History
The first public version of DSpace was released in November 2002, as a joint effort between developers from MIT and HP Labs. Following the first user group meeting in March 2004, a group of interested institutions formed the DSpace Federation, which determined the governance of future software development by adopting the Apache Foundation's community development model as well as establishing the DSpace Committer Group. In July 2007 as the DSpace user community grew larger, HP and MIT jointly formed the DSpace Foundation, a not-for-profit organization that provided leadership and support. In May 2009 collaboration on related projects and growing synergies between the DSpace Foundation and the Fedora Commons organization led to the joining of the two organizations to pursue their common mission in a not-for-profit called DuraSpace. DuraSpace and LYRASIS merged in July 2019. Currently the DSpace software and user community receives leadership and guidance from LYRASIS.
Technology
DSpace is constructed with Java web applications, many programs, and an associated metadata store. The web applications provide interfaces for administration, deposit, ingest, search, and access. The asset store is maintained on a file system or similar storage system. The metadata, including access and configuration information, is stored in a relational database and supports the use of PostgreSQL and Oracle database. DSpace holdings are made available primarily via a web interface. More recent versions of DSpace also support faceted search and browse functionality using Apache Solr.
Features
Some most important features of DSpace are as follows.
Free open source software
Completely customizable to fit user needs
Manage and preserve all format of digital content (PDF, Word, JPEG, MPEG, TIFF files)
Apache SOLR based search for metadata and full text contents
UTF-8 Support
Interface available in 22 languages
Granular group based access control, allowing setting permissions down to the level of individual files
Optimized for Google Scholar indexing
Integration with BASE, CORE, OpenAIRE, Unpaywall and WorldCat
Operating systems
DSpace software runs on Linux, Solaris, Unix, Ubuntu and Windows. It can also be installed on OS X.
Linux is by far the most common OS for DSpace.
Notable DSpace repositories
The World Bank - Open Knowledge Repository
Apollo - University of Cambridge Repository
Digital Access to Scholarship at Harvard
DSpace@MIT
Spiral - Imperial College London Repository
WHO Institutional Repository for Information Sharing
A full list of institutional repositories using DSpace software as well as others is available via the Registry of Open Access Repositories (ROAR) and at the DuraSpace Registry.
See also
Digital library
DuraCloud
Institutional repository
Fedora Commons
SWORD
DSpace Alternatives Free and Open Source Software
OPUS (software)
Islandora
Samvera
Omeka
EPrints
Invenio
Zenodo
CKAN
References
External links
– official site
2002 software
Digital library software
Free institutional repository software
Free software programmed in Java (programming language)
Massachusetts Institute of Technology software
Open-access archives
Software using the BSD license
Free and open-source software
|
Österreichs schlechtester Autofahrer ("Austria's Worst Driver") is an Austrian television series.
See also
List of Austrian television series
2007 Austrian television series debuts
2007 Austrian television series endings
2000s Austrian television series
ORF (broadcaster)
German-language television shows
|
Monte Rusta is a mountain of the Veneto, Italy. It has an elevation of 396 metres.
Mountains of Veneto
|
This is a list of inductees in the National Sprint Car Hall of Fame.
1990
Christopher J.C. Agajanian
Arthur Chevrolet
Louis Chevrolet
Larry Dickson
August Duesenberg
Fred Duesenberg
A. J. Foyt
Tommy Hinnershitz
Frank Lockhart
Rex Mays
Harry Arminius Miller
Barney Oldfield
Jan Opperman
Gus Schrader
Wilbur Shaw
Floyd "Pop" Dreyer
Jack Gunn
Ralph Hankinson
J. Alex Sloan
Floyd Trevis
1991
Ralph DePalma
Louis Meyer
Duke Nalon
Ted Horn
Parnelli Jones
Don Edmunds
Duane "Pancho" Carter
Ernie Triplett
Emory Collins
Hector Honore
Jerry Richert, Sr.
Art Sparks
Bud Winfield
Ed Winfield
Frank Funk
Fred Wagner
Al Sweeney
Marion Robinson
1992
Bobby Grim
Tommy Milton
Sheldon Kinser
Jud Larson
Eddie Rickenbacker
Bob Sall
Rich Vogler
Tony Willman
Art Pillsbury
John Vance
Alex Morales
Earl Gilmore
Ennis "Dizz" Wilson
Dick Gaines
T. E. "Pop" Myers
Sam Nunis
John Gerber
Ronnie Allyn
1993
Gary Bettenhausen
Duane Carter, Sr.
Joie Chitwood
Chris Economaki
Ira Hall
Jim Hurtubise
Roger McCluskey
Troy Ruttman
Myron Stevens
Ira Vail
A. J. Watson
Lloyd Axel
Walt James
Bob Trostle
Frank Winkley
1994
Don Branson
Jimmy Bryan
Sig Haugdahl
Frank Kurtis
George "Doc" MacKenzie
Fred Offenhauser
Elbert "Babe" Stapp
Jimmy Wilburn
Ralph Capitani
Earl Baltes
Deb Snyder
Leo Goossen
Karl Kinser
O. D. Lavely
Marshall "Shorty" Pritzbur
1995
Bob Sweikert
Pete DePaolo
Pat O'Connor
Johnny Rutherford
Bill Ambler
John Ambler
Pete Folse
Bill Hill
Rick Ferkel
Gaylord White
Frank Luptow
Richard "Mitch" Smith
Wally Meskowski
Don Smith
LaVern Nance
Louis Vermeil
1996
Emil Andres
Mario Andretti
Tom Bigelow
Mike Nazaruk
Johnny Thomson
Jerry Blundy
Lynn Paxton
Roy Richwine
Bill Pickens
Russ Clendenen
Rollie Beale
Willie Davis
Ted Halibrand
John Sloan
J. W. Hunt
Paul Weirick
1997
Joe James
Stubby Stubblefield
Bobby Unser
Travis "Spider" Webb
Bruce Bromme, Sr.
Tom Cherry
Charlie Curryer
Vern Fritch
Hiram Hillegass
Leo Krasek
Dick Tobias
Dick Wallen
Kenny Weld
Harry Wimmer
Gordon Woolley
1998
Sam Hanks
Harry Hartz
Norman "Bubby" Jones
Bill Schindler
Greg Weld
Bobby Allen
Gary Patterson
Dean Thompson
Grant King
Bob Weikert
Tom Holden
Ted Johnson
Gene Van Winkle
1999
Eddie Sachs
Johnny White
Al Gordon
Ray Lee Goodwin
Lealand McSpadden
Bob Kinser
Clarence "Hooker" Hood
LeRoy Van Conett
Russ Garnant
Steve Stapp
Granvel Henry
Don Basile
John Sawyer
Fred Loring
Larry Sullivan
2000
Joe Saldana
Al "Cotton" Farmer
Chester "Chet" Gardner
Earl Halaquist
Allen Heath
Bert Emick
Beryl Ward
Harold Leep
Jimmy Oskie
Steve Smith
Tom Marchese
Bob Russo
Paul Fromm
August "Gus" Hoffman
D. William "Speedy Bill" Smith
Chester "Chet" Wilson
L. A. "Les" Ward
2001
Emmett "Buzz" Barton
Brad Doty
Bob Hogle
Eddie Leavitt
Albert "Buddy" Taylor
Davey Brown, Sr.
Bob Estes
Gary Stanton
Don Martin
Jack Miller
Dick Sutcliffe
Don Mack
2002
Jack Hewitt
Jim McElreath
Everett Saylor
Dick Berggren - announcer, editor of Stock Car Racing magazine
Larry "Smokey" Snellbaker
J. Gordon Betz
Sam Traylor
Joe Scalzo
Lloyd Beckman
Ralph "Speedy" Helm
Maynard "Hungry" Clark
John Bagley
Galen Fox
2003
Sammy Sessions
Billy Winn
Clarence "Mutt" Anderson
Armin Krueger
John "Jack" Shillington Prince
Jay Woodside
Robert Roof
Bud Carson
Al Hamilton
Fred Horey
Ron Shuman Elected on first ballot eligible on age
Bill Utz
Doug Wolfgang Elected on first ballot eligible on age
2004
Bryan Saulpaugh
Chuck Amati
Sherman "Red" Campbell
Chuck Gurney
Keith Kauffman
Bob Slater
Billy Wilkerson
Jim Culbert
Ralph Morgan
"Boston" Louie Seymour
Walter E. Bull
Bruce Craig
R. Keith Hall
Don Peabody
2005
Steve Butler
Bob Carey
Elmer George
Bill Holland
Steve Kinser Elected on first ballot eligible on age
Robbie Stanley
Don Brown
Ray Tilley
Dick Simonek
John Mahoney
Jim Raper
Norm Witte
Kenny Woodruff
2006
Lee Kunzman
Sammy Swindell
Lanny Edwards
Johnny Hannon
Rickey Hood
Bob Pankratz
Jud Phillips
Francis Quinn
Newton "Buzz" Rose
Jimmy Sills
Granville "Buster" Warke
Taylor "Pappy" Weld
Ted Wilson
2007
Joe Jagersberger
Bayliss Levrett
Shane Carson
Jerry "Scratch" Daniels
Rajo Jack
Kenny Jacobs
Hal Minyard
Earl Gaerte
Glen Niebel
Ken Coles
Emmett Hahn
Bill White
2008
Tony Bettenhausen
Louis "Rusty" Espinoza
Glenn Fitzcharles
Bob Hampshire
Doug Howells
Dick Jordan
Brent Kaeding
John Padjen
Johnnie Parsons
Gordon Schroeder
Earl Wagner
Kramer Williamson
2009
Allan Brown
Jim Chini
Jack Elam
Lee Elkins
Jac Haudenschild
Jackie Holmes
Tommy Nicholson
Lee Osborne
Cavino Petillo
Roger Rager
Fred Rahmer
Louis Senter
Rodney "Rip" Williams
2010
Clyde Adams
Bobbie Adamson
Hank Arnold
George Bentel
Fred Brownfield
Ben Krasner
Fred Linder
Casey Luna
Frank Riddle
Hal Robson
Herman Schurch
Don Shepherd
2011
W.W. Bowen
Jimmy Boyd
Bruce Bromme, Jr.
Bob Burman
Wally Campbell
Andy Granatelli
Leonard Kerbs
Danny Lasoski
Gene Marderness
Della Rice
Emmett Shelley
Joe Sostilio
Gary Wright
2012
Johnny Anderson
Thad Dosher
Sam Hoffman
Harry Hosterman
Chuck Hulse
C. Henry Meyer
A. Earl Padgett
Colby Scroggin
Ron Shaver
Gary Sokola
Bill Vandewater
Bobby Ward
2013
Charles "Dutch" Baumann
Art Bisch
Lou Blaney
Jeff Bloom
Richard Hoffman
Harols "Red" Lempelius
Andy Linden
Jean Lynch
Ernest Moross
Brad Noffsinger
Edd Sheppard
C.W. Van Ranst
2014
Dave Argabright
Larry Beckett
Dave Blaney
Bobby Davis
Mark Kinser
William "Windy" McDonald
Chuck Merrill
George Nesler
2015
Mike Arthur
Roger Beck
"Tiger" Gene Brown
Brice Ellis
Don Kreitz
Danny Smith
Gil Sonner
Charlie Wiggins
2016
Dale Blaney
Doug Clark (Official at Knoxville Raceway)
Gene Crucean
Roy "Bud" Grimm Jr.
Shirley Kear Valentine
Frankie Kerr
Mark Light
Gus Linder
2017
Doug Auld
Earl Cooper
Dave Darland
Tony Elliott
Guy Forbrook
Terry McCarl
John Singer
Pat Sullivan
2018
Steve Beitler
Bryan Clauson (elected on first ballot eligible after death)
Lance Dewease
Oscar "Red" Garnant
Scott Gerkin
Emmett J. Malloy
Bob Mays
Dave Steele (elected on first ballot eligible after death)
2019
Jason Johnson (elected on first ballot eligible after death)
Stevie Smith
Richard Griffin
M. A. Brown
C. K. Spurlock
Tom Schmeh
Greg Stephens
Bill Endicott
2020/21
Bill Cummings
Walt Dyer
Greg Hodnett
Don Lamberti
Paul Leffler
L. Spencer Riggs
Tim Shaffer
Jeff Swindell
2022
Bob Frey
John Gibson
Eric Gordon
Terry Gray
Tim Green
Ralph Heintzelman Sr.
Jack Kromer
Robin Miller
Walter T. Ross
Dennis Roth
Walter "Slim" Rutherford
Tony Stewart
Reference for 2022 inductees:
2023
Johnny Capels
Max Dolder
Ken Hamilton
Paul Hazen
Chad Kemenah
Alan Kreitzer
Cory Kruseman
Bobby Marshall
Joie Ray
Joey Saldana
Tommy Sanders
Ralph Sheheen
Johnny Vance
Reference for 2023 inductees:
References
External links
Official website
List of inductees
Sports hall of fame inductees
|
```java
/*
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
*
* path_to_url
*
* Unless required by applicable law or agreed to in writing, software
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*/
package org.apache.shardingsphere.db.protocol.postgresql.constant;
import org.apache.shardingsphere.db.protocol.constant.DatabaseProtocolDefaultVersionProvider;
/**
* Default protocol version provider for PostgreSQL.
*/
public final class PostgreSQLProtocolDefaultVersionProvider implements DatabaseProtocolDefaultVersionProvider {
@Override
public String provide() {
return "12.3";
}
@Override
public String getDatabaseType() {
return "PostgreSQL";
}
}
```
|
El Castillo de Arcos de la frontera (Spanish: Castillo de Arcos de la Frontera) is a castle located in Arcos de la Frontera, Spain. It was declared Bien de Interés Cultural in 1993.
References
Bien de Interés Cultural landmarks in the Province of Cádiz
Castles in Andalusia
Buildings and structures completed in the 11th century
Buildings and structures completed in the 15th century
|
```c++
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/date/dateparser.h"
#include "src/objects/objects-inl.h"
#include "src/strings/char-predicates-inl.h"
namespace v8 {
namespace internal {
bool DateParser::DayComposer::Write(double* output) {
if (index_ < 1) return false;
// Day and month defaults to 1.
while (index_ < kSize) {
comp_[index_++] = 1;
}
int year = 0; // Default year is 0 (=> 2000) for KJS compatibility.
int month = kNone;
int day = kNone;
if (named_month_ == kNone) {
if (is_iso_date_ || (index_ == 3 && !IsDay(comp_[0]))) {
// YMD
year = comp_[0];
month = comp_[1];
day = comp_[2];
} else {
// MD(Y)
month = comp_[0];
day = comp_[1];
if (index_ == 3) year = comp_[2];
}
} else {
month = named_month_;
if (index_ == 1) {
// MD or DM
day = comp_[0];
} else if (!IsDay(comp_[0])) {
// YMD, MYD, or YDM
year = comp_[0];
day = comp_[1];
} else {
// DMY, MDY, or DYM
day = comp_[0];
year = comp_[1];
}
}
if (!is_iso_date_) {
if (Between(year, 0, 49))
year += 2000;
else if (Between(year, 50, 99))
year += 1900;
}
if (!Smi::IsValid(year) || !IsMonth(month) || !IsDay(day)) return false;
output[YEAR] = year;
output[MONTH] = month - 1; // 0-based
output[DAY] = day;
return true;
}
bool DateParser::TimeComposer::Write(double* output) {
// All time slots default to 0
while (index_ < kSize) {
comp_[index_++] = 0;
}
int& hour = comp_[0];
int& minute = comp_[1];
int& second = comp_[2];
int& millisecond = comp_[3];
if (hour_offset_ != kNone) {
if (!IsHour12(hour)) return false;
hour %= 12;
hour += hour_offset_;
}
if (!IsHour(hour) || !IsMinute(minute) || !IsSecond(second) ||
!IsMillisecond(millisecond)) {
// A 24th hour is allowed if minutes, seconds, and milliseconds are 0
if (hour != 24 || minute != 0 || second != 0 || millisecond != 0) {
return false;
}
}
output[HOUR] = hour;
output[MINUTE] = minute;
output[SECOND] = second;
output[MILLISECOND] = millisecond;
return true;
}
bool DateParser::TimeZoneComposer::Write(double* output) {
if (sign_ != kNone) {
if (hour_ == kNone) hour_ = 0;
if (minute_ == kNone) minute_ = 0;
// Avoid signed integer overflow (undefined behavior) by doing unsigned
// arithmetic.
unsigned total_seconds_unsigned = hour_ * 3600U + minute_ * 60U;
if (total_seconds_unsigned > Smi::kMaxValue) return false;
int total_seconds = static_cast<int>(total_seconds_unsigned);
if (sign_ < 0) {
total_seconds = -total_seconds;
}
DCHECK(Smi::IsValid(total_seconds));
output[UTC_OFFSET] = total_seconds;
} else {
output[UTC_OFFSET] = std::numeric_limits<double>::quiet_NaN();
}
return true;
}
const int8_t
DateParser::KeywordTable::array[][DateParser::KeywordTable::kEntrySize] = {
{'j', 'a', 'n', DateParser::MONTH_NAME, 1},
{'f', 'e', 'b', DateParser::MONTH_NAME, 2},
{'m', 'a', 'r', DateParser::MONTH_NAME, 3},
{'a', 'p', 'r', DateParser::MONTH_NAME, 4},
{'m', 'a', 'y', DateParser::MONTH_NAME, 5},
{'j', 'u', 'n', DateParser::MONTH_NAME, 6},
{'j', 'u', 'l', DateParser::MONTH_NAME, 7},
{'a', 'u', 'g', DateParser::MONTH_NAME, 8},
{'s', 'e', 'p', DateParser::MONTH_NAME, 9},
{'o', 'c', 't', DateParser::MONTH_NAME, 10},
{'n', 'o', 'v', DateParser::MONTH_NAME, 11},
{'d', 'e', 'c', DateParser::MONTH_NAME, 12},
{'a', 'm', '\0', DateParser::AM_PM, 0},
{'p', 'm', '\0', DateParser::AM_PM, 12},
{'u', 't', '\0', DateParser::TIME_ZONE_NAME, 0},
{'u', 't', 'c', DateParser::TIME_ZONE_NAME, 0},
{'z', '\0', '\0', DateParser::TIME_ZONE_NAME, 0},
{'g', 'm', 't', DateParser::TIME_ZONE_NAME, 0},
{'c', 'd', 't', DateParser::TIME_ZONE_NAME, -5},
{'c', 's', 't', DateParser::TIME_ZONE_NAME, -6},
{'e', 'd', 't', DateParser::TIME_ZONE_NAME, -4},
{'e', 's', 't', DateParser::TIME_ZONE_NAME, -5},
{'m', 'd', 't', DateParser::TIME_ZONE_NAME, -6},
{'m', 's', 't', DateParser::TIME_ZONE_NAME, -7},
{'p', 'd', 't', DateParser::TIME_ZONE_NAME, -7},
{'p', 's', 't', DateParser::TIME_ZONE_NAME, -8},
{'t', '\0', '\0', DateParser::TIME_SEPARATOR, 0},
{'\0', '\0', '\0', DateParser::INVALID, 0},
};
// We could use perfect hashing here, but this is not a bottleneck.
int DateParser::KeywordTable::Lookup(const uint32_t* pre, int len) {
int i;
for (i = 0; array[i][kTypeOffset] != INVALID; i++) {
int j = 0;
while (j < kPrefixLength && pre[j] == static_cast<uint32_t>(array[i][j])) {
j++;
}
// Check if we have a match and the length is legal.
// Word longer than keyword is only allowed for month names.
if (j == kPrefixLength &&
(len <= kPrefixLength || array[i][kTypeOffset] == MONTH_NAME)) {
return i;
}
}
return i;
}
int DateParser::ReadMilliseconds(DateToken token) {
// Read first three significant digits of the original numeral,
// as inferred from the value and the number of digits.
// I.e., use the number of digits to see if there were
// leading zeros.
int number = token.number();
int length = token.length();
if (length < 3) {
// Less than three digits. Multiply to put most significant digit
// in hundreds position.
if (length == 1) {
number *= 100;
} else if (length == 2) {
number *= 10;
}
} else if (length > 3) {
if (length > kMaxSignificantDigits) length = kMaxSignificantDigits;
// More than three digits. Divide by 10^(length - 3) to get three
// most significant digits.
int factor = 1;
do {
DCHECK_LE(factor, 100000000); // factor won't overflow.
factor *= 10;
length--;
} while (length > 3);
number /= factor;
}
return number;
}
} // namespace internal
} // namespace v8
```
|
```xml
import BrepCurve from 'geom/curves/brepCurve';
import NurbsCurve from 'geom/curves/nurbsCurve';
import {makeAngle0_360} from 'math/commons'
import {normalizeCurveEnds} from 'geom/impl/nurbs-ext';
import Vector from 'math/vector';
import CSys from "math/csys";
import {distanceAB} from "math/distance";
import {isCCW} from "geom/euclidean";
import {OCCCommandInterface} from "cad/craft/e0/occCommandInterface";
const RESOLUTION = 20;
export class SketchPrimitive {
id: string;
inverted: boolean;
constructor(id) {
this.id = id;
this.inverted = false;
}
invert() {
this.inverted = !this.inverted;
}
tessellate(resolution) {
return this.toNurbs(CSys.ORIGIN).tessellate(resolution);
// return brepCurve.impl.verb.tessellate().map(p => new Vector().set3(p) );
// const tessellation = this.tessellateImpl(resolution);
// if (this.inverted) {
// tessellation.reverse();
// }
// return tessellation;
}
get isCurve() {
return this.constructor.name !== 'Segment';
}
get isSegment() {
return !this.isCurve;
}
toNurbs(csys: CSys) {
let verbNurbs = this.toVerbNurbs(csys.outTransformation.apply, csys);
if (this.inverted) {
verbNurbs = verbNurbs.reverse();
}
const data = verbNurbs.asNurbs();
normalizeCurveEnds(data);
verbNurbs = new verb.geom.NurbsCurve(data);
return new BrepCurve(new NurbsCurve(verbNurbs));
}
toVerbNurbs(tr, csys): any {
throw 'not implemented'
}
tessellateImpl() {
throw 'not implemented'
}
toOCCGeometry(oci: OCCCommandInterface, underName: string, csys: CSys) {
throw 'not implemented'
}
massiveness() {
return 50;
}
}
export class Segment extends SketchPrimitive {
a: Vector;
b: Vector;
constructor(id, a, b) {
super(id);
this.a = a;
this.b = b;
}
tessellate(resolution) {
return [this.a, this.b];
}
toVerbNurbs(tr) {
return new verb.geom.Line(tr(this.a).data(), tr(this.b).data());
}
toGenericForm() {
const endpoints = [
this.a, //from endpoint
this.b, //to endpoint
];
if (this.inverted) {
endpoints.reverse();
}
return endpoints
}
toOCCGeometry(oci: OCCCommandInterface, underName: string, csys: CSys) {
const genForm = this.toGenericForm().map(csys.outTransformation.apply);
const [A, B] = genForm;
oci.point(underName + "_A", A.x, A.y, A.z);
oci.point(underName + "_B", B.x, B.y, B.z);
oci.gcarc(underName, "seg", underName + "_A", underName + "_B")
}
tangentAtStart(): Vector {
return this.b.minus(this.a);
}
tangentAtEnd(): Vector {
return this.a.minus(this.b);
}
massiveness() {
return this.a.minus(this.b).length();
}
}
export class Arc extends SketchPrimitive {
a: Vector;
b: Vector;
c: Vector;
constructor(id, a, b, c) {
super(id);
this.a = a;
this.b = b;
this.c = c;
}
toVerbNurbs(tr, csys) {
const basisX = csys.x;
const basisY = csys.y;
const startAngle = makeAngle0_360(Math.atan2(this.a.y - this.c.y, this.a.x - this.c.x));
const endAngle = makeAngle0_360(Math.atan2(this.b.y - this.c.y, this.b.x - this.c.x));
let angle = endAngle - startAngle;
if (angle < 0) {
angle = Math.PI * 2 + angle;
}
function pointAtAngle(angle) {
const dx = basisX.multiply(Math.cos(angle));
const dy = basisY.multiply(Math.sin(angle));
return dx.plus(dy);
}
const xAxis = pointAtAngle(startAngle);
const yAxis = pointAtAngle(startAngle + Math.PI * 0.5);
const arc = new verb.geom.Arc(tr(this.c).data(), xAxis.data(), yAxis.data(), distanceAB(this.c, this.a), 0, Math.abs(angle));
return adjustEnds(arc, tr(this.a), tr(this.b))
}
toGenericForm() {
const endpoints = [this.a, this.b];
if (this.inverted) {
endpoints.reverse();
}
const [a, b] = endpoints;
const tangent = a.minus(this.c)._perpXY() //tangent vector
if (this.inverted) {
tangent._negate();
}
return [
a, //from endpoint
b, //to endpoint
tangent //tangent vector
]
}
toOCCGeometry(oci: OCCCommandInterface, underName: string, csys: CSys) {
const tr = csys.outTransformation.apply;
const s = this;
const a = tr(s.inverted ? s.b : s.a);
const b = tr(s.inverted ? s.a : s.b);
const c = tr(s.c);
const tangent = c.minus(a)._cross(csys.z);//._normalize();
if (s.inverted) {
tangent._negate();
}
const A_TAN = a.plus(tangent);
oci.point(underName + "_A", a.x, a.y, a.z);
oci.point(underName + "_B", b.x, b.y, b.z);
oci.point(underName + "_T1", a.x, a.y, a.z);
oci.point(underName + "_T2", A_TAN.x, A_TAN.y, A_TAN.z);
oci.gcarc(underName, "cir", underName + "_A", underName + "_T1", underName + "_T2", underName + "_B")
}
massiveness() {
return this.a.minus(this.b).length();
}
}
export class BezierCurve extends SketchPrimitive {
a: Vector;
b: Vector;
cp1: Vector;
cp2: Vector;
constructor(id, a, b, cp1, cp2) {
super(id);
this.a = a;
this.b = b;
this.cp1 = cp1;
this.cp2 = cp2;
}
toVerbNurbs(tr) {
return new verb.geom.BezierCurve([tr(this.a).data(), tr(this.cp1).data(), tr(this.cp2).data(), tr(this.b).data()], null);
}
massiveness() {
return this.a.minus(this.b).length();
}
}
export class EllipticalArc extends SketchPrimitive {
c: Vector;
rx: number;
ry: number;
rot: number
a: Vector;
b: Vector;
constructor(id, c, rx, ry, rot, a, b) {
super(id);
this.c = c;
this.rx = rx;
this.ry = ry;
this.rot = rot;
this.a = a;
this.b = b;
}
toVerbNurbs(tr, csys) {
const ax = Math.cos(this.rot);
const ay = Math.sin(this.rot);
const xAxis = new Vector(ax, ay)._multiply(this.rx);
const yAxis = new Vector(-ay, ax)._multiply(this.ry);
const startAngle = Math.atan2(this.a.y - this.c.y, this.a.x - this.c.x) - this.rot;
const endAngle = Math.atan2(this.b.y - this.c.y, this.b.x - this.c.x) - this.rot;
if (startAngle > endAngle) {
}
// let arc = new verb.geom.EllipseArc(tr(this.c).data(), tr(xAxis).data(), tr(yAxis).data(), startAngle, endAngle);
let arc = new verb.geom.EllipseArc(this.c.data(), xAxis.data(), yAxis.data(), startAngle, endAngle);
arc = arc.transform(csys.outTransformation.toArray());
return arc;
// return adjustEnds(arc, tr(this.a), tr(this.b))
}
massiveness() {
return Math.max(this.rx, this.ry);
}
}
export class Circle extends SketchPrimitive {
c: Vector;
r: number
constructor(id, c, r) {
super(id);
this.c = c;
this.r = r;
}
toVerbNurbs(tr, csys) {
const basisX = csys.x;
const basisY = csys.y;
return new verb.geom.Circle(tr(this.c).data(), basisX.data(), basisY.data(), this.r);
}
toOCCGeometry(oci: OCCCommandInterface, underName: string, csys: CSys) {
const C = csys.outTransformation.apply(this.c);
const DIR = csys.z;
oci.circle(underName, ...C.data(), ...DIR.data(), this.r);
}
massiveness() {
return this.r;
}
}
export class Ellipse extends SketchPrimitive {
c: Vector;
rx: number;
ry: number;
rot: number
constructor(id, c, rx, ry, rot) {
super(id);
this.c = c;
this.rx = rx;
this.ry = ry;
this.rot = rot;
}
toVerbNurbs(tr) {
const ax = Math.cos(this.rot);
const ay = Math.sin(this.rot);
const xAxis = new Vector(ax, ay)._multiply(this.rx);
const yAxis = new Vector(-ay, ax)._multiply(this.ry);
return new verb.geom.Ellipse(tr(this.c).data(), tr(xAxis).data(), tr(yAxis).data());
}
massiveness() {
return Math.max(this.rx, this.ry);
}
}
export class Contour {
segments: SketchPrimitive[];
constructor() {
this.segments = [];
}
get id() {
return this.segments.reduce((prev, curr) => {
return prev.id.localeCompare(curr.id) < 0 ? prev : curr;
}).id;
}
add(obj) {
this.segments.push(obj);
}
tessellateInCoordinateSystem(csys) {
const out = [];
for (let segIdx = 0; segIdx < this.segments.length; ++segIdx) {
const segment = this.segments[segIdx];
segment.toNurbs(csys).tessellate().forEach(p => out.push(p));
out.pop();
}
return out;
}
transferInCoordinateSystem(csys) {
const cc = [];
for (let segIdx = 0; segIdx < this.segments.length; ++segIdx) {
const segment = this.segments[segIdx];
cc.push(segment.toNurbs(csys));
}
return cc;
}
tessellate() {
const tessellation = [];
for (const segment of this.segments) {
const segmentTessellation = segment.tessellate(segment.massiveness() * 0.1);
//skip last one because it's guaranteed to be closed
for (let i = 0; i < segmentTessellation.length - 1; ++i) {
tessellation.push(segmentTessellation[i]);
}
}
return tessellation;
}
isCCW() {
return isCCW(this.tessellate());
}
reverse() {
this.segments.reverse();
this.segments.forEach(s => s.invert());
}
}
function adjustEnds(arc, a, b) {
const data = arc.asNurbs();
function setHomoPoint(homoPoint, vector) {
homoPoint[0] = vector.x * homoPoint[3];
homoPoint[1] = vector.y * homoPoint[3];
homoPoint[2] = vector.z * homoPoint[3];
}
setHomoPoint(data.controlPoints[0], a);
setHomoPoint(data.controlPoints[data.controlPoints.length - 1], b);
return new verb.geom.NurbsCurve(data);
}
```
|
```go
package testfixtures
import (
"encoding/hex"
"fmt"
"strings"
)
func (l *Loader) tryHexStringToBytes(s string) ([]byte, error) {
if !strings.HasPrefix(s, "0x") {
return nil, fmt.Errorf("not a hexadecimal string, must be prefix 0x")
}
return hex.DecodeString(strings.TrimPrefix(s, "0x"))
}
```
|
Rana Riaz Ahmad is a Pakistani politician who had been a member of the Provincial Assembly of the Punjab from August 2018 till January 2023.
Political career
He was elected to the Provincial Assembly of the Punjab as a candidate of Pakistan Muslim League (N) from Constituency PP-200 (Sahiwal-V) in 2018 Pakistani general election.
References
Living people
Pakistan Muslim League (N) MPAs (Punjab)
Year of birth missing (living people)
|
Cırdaxan or Jyrdakhan may refer to:
Cırdaxan, Barda, Azerbaijan
Cırdaxan, Samukh, Azerbaijan
Cırdaxan, Yevlakh, Azerbaijan
|
Carmel Catholic High School is a co-educational, college preparatory, Catholic high school run jointly by the priests and brothers of the Order of Carmelites and the Sisters of Charity of the Blessed Virgin Mary. Located in Mundelein, Illinois, Carmel serves all of Lake County, as well as some of the surrounding counties, and southern Wisconsin. An institution of the Roman Catholic Archdiocese of Chicago, Carmel Catholic is one of three Carmelite-run high schools in the Chicago area, the others being Joliet Catholic High School and Mount Carmel High School.
History
In the early 1960s, the Carmelites and the Sisters of Charity were asked to build separate but similar Catholic high schools for the northern part of the Archdiocese of Chicago; an area corresponding roughly to Lake County. The boys school opened in 1962, with the girls school opening the next year. Following a lengthy planning process, the decision was made by the Carmelites and the BVM Sisters to combine the two schools and establish a board of directors. This was done beginning in the 1988–89 school year.
Awards and recognition
In 1985, 1996, 2002, 2007, and 2021 Carmel Catholic High School was recognized with the Blue Ribbon School Award of Excellence by the United States Department of Education.
Academics
The school offers 20 Advanced Placement (AP) courses: Biology, Chemistry, Physics (C: Mechanics), U.S. Government and Politics, U.S. History, European History, World History, English Language, English Literature, Spanish Language, French Language, Latin, Studio Art, Music Theory, Calculus AB, Calculus BC, Statistics, Psychology, Microeconomics, and Macroeconomics.
Demographics
The demographic breakdown of the 1,318 students enrolled in 2015-16 was:
Native American/Alaskan - 0.2%
Asian - 5.0%
Black - 2.5%
Hispanic - 6.6%
White - 80.0%
Native Hawaiian/Pacific islanders - 0.5%
Multiracial - 5.2%
Athletics
Carmel's athletic teams are named Corsairs, and the school's colors are brown, gold, and white. Carmel competes in the East Suburban Catholic Conference in its interscholastic athletics program.
The school sponsors both men's and women's teams in basketball, cross country, golf, lacrosse, soccer, swimming, tennis, track and field, and volleyball. The school sponsors men's teams in baseball, football, and wrestling, and women's teams in cheerleading, gymnastics, pom poms, and softball. Although not sponsored by the IHSA, the school also sponsors a men's ice hockey team.
The following teams have won their respective IHSA sponsored state tournament:
Football: 2003
Girls gymnastics: 1992, 1993, 2010, 2011, 2012
Girls soccer: 2015
Girls basketball: 2022
Fine arts
Carmel Catholic's fine arts program includes chorus, band, drama, and visual arts.
The drama program produces one play and one musical per year. The school's current long-range strategic plan includes the construction of a new fine arts wing by 2012. The Fine Arts wing was opened in 2013. The drama program is a troupe of the International Thespian Society and has had students participate in the Illinois High School Theatre Festival.
The choral program has a number of different choirs for students to join: Concert Choir, Treble Choir, Advanced Choir, as well as one show choir, Cadence, and one jazz/ a cappella group, Parkway Singers.
In the band program there are many different groups: The Jazz Band, Jazz Ensemble, Concert Band and Wind Ensemble. During the football season, the Marching Band plays at all home games and at as many playoff games they can get to.
Notable alumni
Marietta DePrima (1982) is an actress (The Hughleys).
Sean McGrath (2006) played for the NFL's Los Angeles Chargers
Brienne Minor (2015) is an NCAA champion tennis player who competed in the 2017 U.S. Open
Al Salvi (1978) was a former Illinois state legislator and 1996 Republican U.S. Senate nominee
Chris Salvi (2008) is a former football safety who played for Notre Dame
Rick Santorum (1976) was a United States senator (R—PA) (1995–2007)
Scott Stahoviak (1988) was a Major League Baseball first baseman and first round draft pick (1991) for the Minnesota Twins
Carol Tyler (1969) is an internationally known artist, cartoonist and humorist
Joe Tyler (1966) was an Olympic athlete who rode as brakeman on USA #1 bobsled in the 1980 Winter Olympics
Mike Wagner (1967) was an NFL safety for the Pittsburgh Steelers who played for their championship teams in Super Bowls IX, X, XIII, and XIV; he was a member of their "Steel Curtain" defense
Alex Young (2012) is a pitcher for the Cincinnati Reds of Major League Baseball.
Jeff Zgonina (1988) was an NFL player for the Houston Texans and is an assistant coach for the San Francisco 49ers
References
External links
Carmel Catholic High School official website
Educational institutions established in 1962
1962 establishments in Illinois
Roman Catholic Archdiocese of Chicago
Catholic secondary schools in Illinois
Carmelite educational institutions
Mundelein, Illinois
Schools in Lake County, Illinois
|
Asterisk indicates Sinn Féin MPs who do not take their seats in the Commons.
See also
Election results of women in United Kingdom general elections (1918–1945)
List of female members of the House of Commons of the United Kingdom
Mother of Parliament
Records of members of parliament of the United Kingdom § Women
Women in the House of Commons of the United Kingdom
References
Members of the Parliament of the United Kingdom
mps house of commons uk
Lists of women politicians
History of women in the United Kingdom
|
Capital punishment in Bhutan was abolished on March 20, 2004 and is prohibited by the 2008 Constitution. The prohibition appears among a number of fundamental rights guaranteed by the Constitution; while some fundamental rights—such as voting, land ownership, and equal pay—extend only to Bhutanese citizens, the prohibition on capital punishment applies to all people within the kingdom.
History
Under the reforms to the Tsa Yig by the first King of Bhutan, Ugyen Wangchuck, capital punishment was the penalty for murderers who fled the scene and for those who forged government documents. Under the National Security Act of 1992, the death penalty was designated for those guilty of "treasonable acts" or of overt acts "with intent to give aid and comfort to the enemy in order to deliberately and voluntarily betray" the royal government.
On April 5, 1964, Prime Minister Jigme Palden Dorji was assassinated in a dispute among competing political factions. The King's own uncle and head of the Royal Bhutan Army, Namgyal Bahadur, was among those executed for their role in the attempted coup.
See also
Law enforcement in Bhutan
Human rights in Bhutan
Religion and capital punishment
Judicial system of Bhutan
Constitution of Bhutan
References
External links
Bhutan
Law enforcement in Bhutan
Death in Bhutan
Human rights abuses in Bhutan
2004 disestablishments in Bhutan
|
```xml
<?xml version="1.0" encoding="utf-8"?>
<shape xmlns:android="path_to_url">
<solid android:color="?attr/colorSecondary" />
<corners android:radius="12dp" />
</shape>
```
|
```smalltalk
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Http;
using Microsoft.Extensions.Hosting;
var app = WebApplication.CreateBuilder(new WebApplicationOptions { EnvironmentName = Environments.Development }).Build();
app.UseStatusCodePages();
app.MapGet("/", () =>
{
return Results.Text(@"
<html>
<head>
<link rel=""stylesheet"" href=""path_to_url"" integrity="your_sha256_hashMI3AhiU"" crossorigin=""anonymous"">
</head>
<body>
<h1>Sample for {int}, {min()}, {max()}, and {range(min, max))} route constraints</h1>
<ul>
<li>/{id:int} <a href=""/10"">/10</a></li>
<li>/{id:int} <a href=""/10f"">/10f</a> (404)</li>
<li>/{id:int} <a href=""/10_000"">/10_000</a> (404)</li>
<li>/{id:int} <a href=""/10.4"">/10.4</a> (404)</li>
<li>/min/{minId:min(1)} <a href=""/min/100"">/min/100</a></li>
<li>/min/{minId:min(1)} <a href=""/min/0"">/min/0</a> (404)</li>
<li>/max/{maxId:max(10)} <a href=""/max/10"">/max/10</a></li>
<li>/max/{maxId:max(10)} <a href=""/max/11"">/max/11</a> (404)</li>
<li>/range/{rangeId:range(1, 10)} <a href=""/range/1"">/range/1</a></li>
<li>/range/{rangeId:range(1, 10)} <a href=""/range/10"">/range/10</a></li>
<li>/range/{rangeId:range(1, 10)} <a href=""/range/0"">/range/0</a> (404)</li>
<li>/range/{rangeId:range(1, 10)} <a href=""/range/11"">/range/11</a> (404)</li>
</ul>
</body>
</html>
", "text/html");
});
app.MapGet("/{id:int}", (int id) => id.ToString());
app.MapGet("/min/{minId:min(1)}", (int minId) => minId.ToString());
app.MapGet("/max/{maxId:max(10)}", (int maxId) => maxId.ToString());
app.MapGet("/range/{rangeId:range(1, 10)}", (int rangeId) => rangeId.ToString());
app.Run();
```
|
Mordellina testaceicolor is a species of beetle in the genus Mordellina. It was described in 1967.
References
Mordellidae
Beetles described in 1967
|
Harrison Mills may refer to:
Harrison Mills, British Columbia
Harrison Mills, Ohio
Harrison Mills, Missouri
Harrison Mills, member of American electronic music duo, Odesza
|
Palancavery, an important pilgrim place, is situated in Pettavaithalai, India. The important temples are Karumbai Amman Temple "Gord Shiva" temple at Devasthanam.The Palancavery is laid on the down streets side of river Cauvery.
References
Villages in Tiruchirappalli district
|
Singapore General Hospital (SGH) is an academic health science centre and tertiary referral hospital in Singapore. It is located next to the Bukit Merah and Chinatown districts of the Central Region, close to the Outram Community Hospital (OCH), which functions as a supplementary community and rehabilitation hospital to SGH for newly-discharged patients. There is also the Outram Polyclinic to complement outpatient care. All of these institutions are operated by SingHealth, which comes under the purview of the Ministry of Health (MOH).
It is the largest and oldest hospital in Singapore, and functions as the country's national hospital. Its foundation of its first building was laid in 1821, before its first major expansion in 1926. Subsequent expansions as well as renovations were also made in the following decades.
SGH is the flagship hospital of SingHealth, the country's largest group of public healthcare institutions and the principal teaching hospital for the Duke–NUS Medical School, which is affiliated with the National University of Singapore. Its campus includes four national specialty centres, namely the Singapore National Eye Centre (SNEC), the National Heart Centre Singapore (NHCS), the National Cancer Centre Singapore (NCCS), the National Dental Centre Singapore (NDCS). A fifth specialty centre, the Elective Care Centre Singapore (ECC), is currently under construction and it is expected to be completed in 2025.
SGH has been considered as being one of the best hospitals in the world, being consistently ranked in the top 10 by Newsweek, reaching its highest of 3rd in 2019. Subsequently, it is the highest ranked hospital in Asia, drawing in patients from around the region, with SGH performing near the highest number of transplants in the country, including both solid hematologic and organ transplantation.
History
Early years
The Singapore General Hospital was established in 1821, when the first General Hospital was located in the cantonment for British troops near the Singapore River. It later shifted to Pearl Banks apartment and then to the Kandang Kerbau district, before finally settling at Sepoy Lines along Outram Road in 1882.
The modern history of Singapore General Hospital began on 29 March 1926, with the opening of 800 beds in the Bowyer, Stanley and Norris Blocks. Today, only the Bowyer Block with its historically distinctive clock tower remains. The Bowyer Block is now home to the Singapore General Hospital Museum or the SGH Museum.
In 1981, the hospital was rebuilt, with its current 8-block complex housing in-patient wards, ambulatory and support services, research laboratories and a postgraduate medical institute.
On 1 April 1989, the hospital was restructured, in an effort to modernise the organisation of the hospital, due to rapidly developing changes in healthcare services and patient expectations for better service. As a restructured hospital, the Singapore General Hospital is still 100 per cent government-owned and is a not-for-profit institution. More than 60 per cent of the beds are allocated for subsidised patients, giving them access to an internationally established standard of affordable healthcare.
21st century
On 31 March 2000, a major reorganisation of the public sector healthcare services was initiated by the Ministry of Health (MOH). Since then, the Singapore General Hospital came under the management of Singapore Health Services or SingHealth.
In 2018, SGH announced that it will be expanding its accident and emergency (A&E) facilities, which include constructing a new 12-storey specialised building that will be four times larger than the hospital's existing A&E facilities. The building will be connected to its specialty centres as well as the Outram Community Hospital. It is expected to begin operations in 2023.
The SGH Museum
The Singapore General Hospital Museum is a repository of artefacts and records, where visitors can trace the long and rich history of the Singapore General Hospital. It is also a place where one can learn about the development of medical specialties and medical education in Singapore, presented with the aid of audio-visual and multimedia technology.
The SGH Museum was officially opened by President SR Nathan on 20 May 2005. The museum adopts a thematic approach in presenting the hospital's long history, where visitors get not just an insight into the significant developments of the hospital through the years, but also an understanding of the impact these incidents have on the people and the community.
National specialty centres
Singapore National Eye Centre
Singapore National Eye Centre (SNEC) is a specialty centre for ophthalmological services and is the largest ophthalmology specialist centre in Singapore. It was founded in 1990 to lead and organise specialised ophthalmological services with special emphasis on research and education. Since its inauguration, SNEC has averaged an annual workload of 14,000 major eye surgeries and 13,000 laser procedures. The SNEC also actively participates in clinical trials and researches the causes to find treatments to eye conditions such as myopia and glaucoma.
SNEC's facilities includes 50 consultation suites, 9 operating theatres, research facilities, offices and a library in two adjoining buildings.
National Cancer Centre Singapore
The National Cancer Centre Singapore (NCCS) is the country's national specialty centre for the diagnosis, research and treatment of cancer. The centre has Singapore's largest concentration of oncologists. It was originally established in 1993 as a unit of Singapore General Hospital, and subsequently became an autonomous institution of SingHealth.
The centre's founding director is Professor Soo Khee Chee. NCCS provides a range of medical, educational and research activities within a single institution; and practices a multi-disciplinary approach to diagnosis and treatment. It is Southeast Asia's only full multi-disciplinary sub-specialist centre for cancer.
In addition, the centre is a teaching institution for post-graduate cancer education that trains and offers fellowships for many local and overseas doctors, nurses, para-medical professionals and researchers.
On 2 June 2017, construction for an additional building to the NCCS began, which include more facilities to cater to increased patients' access to cancer treatment as well as the specialty centre's capacity. Slated to begin operations in 2022, it will also include a new Proton Therapy Centre, which allows the NCCS to engage in proton therapy to treat cancer in their patients. It is the first and only hospital in the region to have such facilities.
National Heart Centre Singapore
The National Heart Centre Singapore (NHCS) is a specialist medical centre and a regional referral centre for cardiovascular diseases. Established in 1994 as the Singapore Heart Centre in the Singapore General Hospital, the heart centre took over the hospital's cardiac services and set up a cardiology laboratory in 1995. It was renamed in 1998. In 2014, NHCS completed its move to a new purpose-built building at 5 Hospital Drive. The new building not only includes facilities for outpatient clinics and non-invasive testing, but also has operating theatres and an invasive cardiac catheterisation laboratory.
With over 9,000 inpatient admissions every year, the 186-bed specialty centre for cardiovascular disease in Singapore offers treatments from preventive to rehabilitative cardiac services.
National Dental Centre Singapore
The National Dental Centre Singapore (NDCS) is a facility in Singapore for specialist oral healthcare services. It commenced operations on 1 March 1997 and claims to offer the largest concentration of specialist expertise in a single facility. Its specialist teams attend to over 700 patients daily, including walk-in patients and those being referred to the centre. The centre is equipped with 92-chair facility and a day surgery suite.
The centre has three specialist clinical departments, being the Departments of Oral and Maxillofacial Surgery, Orthodontics and Restorative Dentistry, which attend to a wide range of oral conditions. In addition, Department of Restorative Dentistry also has sub-units in Endodontics, Paediatric Dentistry, Periodontics and Prosthodontics. Sub-speciality multidisciplinary services are available through NDCS's Centres for Corrective Jaw Surgery, Maxillofacial Rehabilitation and Facial Pain.
The centre is active in research as well as training activities, especially focusing on the professional education of dentists. NDCS has been under the management of Singapore Health Services Pte Ltd since 2002.
In 2019, the National University Centre for Oral Health, Singapore (NUCOHS) started operations at one-north, becoming the second national facility in Singapore to offer specialised dental health services. Although not a part of SGH or SingHealth, it acts as a supplementary facility to keep up with the rising demand of public dental services in the country. A new and larger National Dental Centre Singapore is also currently being constructed, which is expected to more than double its capacity by 2025. These include expanding the Geriatric Special Care Dentistry Clinic, to meet the needs of the older generation in Singapore.
Elective Care Centre
The Elective Care Centre (ECC) is a facility that will focus on non-emergency surgeries, which aims to offload resources at the main hospital. It is currently under construction, and it is expected to begin operations in 2026.
Notes
References
External links
Singapore National Eye Centre
National Cancer Centre Singapore
National Heart Centre Singapore
National Dental Centre Singapore
Hospital buildings completed in 1926
Hospital buildings completed in 1981
Hospitals in Singapore
Medical education in Singapore
Teaching hospitals
National University of Singapore
Hospitals established in 1821
Bukit Merah
Buildings and structures in Central Region, Singapore
|
The Canal de la Somme is a canal in northern France. Its total length is 156.4 km with 25 locks, from the English Channel at Saint-Valéry-sur-Somme to the Canal de Saint-Quentin at Saint-Simon.
History
The Somme River was canalized beginning in 1770. The 54 km section from St. Simon to Bray was completed by 1772, but the rest was not finished until 1843.
Overview
The canal as originally built has seen substantial modifications since construction of the Canal du Nord in 1904–1965, and is now made up of four distinct sections:
and 1 lock from Saint-Valery-sur-Somme to Abbeville (the Canal maritime)
and 18 locks from Abbeville to Péronne
with 2 locks the section upgraded as part of the Canal du Nord
and 4 locks from Voyennes to Saint-Simon, closed upstream from Offoy since 2004.
Some authors distinguish the Grande Somme downstream from Péronne and the Petite Somme upstream from Voyennes. Since 2005 the latter section has been closed to navigation as a result of silt deposits.
In the 1960s, more than 300,000 tonnes of goods were transported on the canal. Today it is used largely by pleasure boats.
En Route
PK 156 Saint-Valéry-sur-Somme
PK 141 Abbeville
PK 92 Amiens
PK 34 Péronne
PK 16 Voyennes
PK 0 Saint-Simon
See also
List of canals in France
References
External links
Canal de la Somme information on places, ports and moorings on the canal, by the author of Inland Waterways of France, Imray
Navigation details for 80 French rivers and canals (French waterways website section)
Somme
Canals opened in 1772
Canals opened in 1843
1843 establishments in France
|
Willem Van der Tanerijen (died 1499) was a jurist in the Duchy of Brabant (the territory of which is now divided between the Netherlands and Belgium) whose manuscript treatise on the procedures of the major courts of the duchy is an important source for the legal history of the fifteenth century. He was also a proponent of university training in law.
Life
Sources on Van der Tanerijen's life are scarce. He was probably born in Antwerp, where he later served as an alderman. He was appointed to the Council of Brabant and later as master of requests of the Great Council of Mary of Burgundy.
Writings
Boeck van der loopender practijken der raidtcameren van Brabant, edited by E. I. Strubbe, 2 vols. (Brussels, Commission royale pour la publication des anciennes lois et ordonnances de la Belgique, 1952).
References
1499 deaths
Legal history of Belgium
Legal history of the Netherlands
People from the Duchy of Brabant
Lawyers from the Habsburg Netherlands
15th-century people from the Holy Roman Empire
15th-century lawyers
|
```c
/*
* Delphine Software International CIN File Demuxer
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
*
* You should have received a copy of the GNU Lesser General Public
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/**
* @file
* Delphine Software International CIN file demuxer
*/
#include "libavutil/channel_layout.h"
#include "libavutil/intreadwrite.h"
#include "avformat.h"
#include "internal.h"
#include "avio_internal.h"
typedef struct CinFileHeader {
int video_frame_size;
int video_frame_width;
int video_frame_height;
int audio_frequency;
int audio_bits;
int audio_stereo;
int audio_frame_size;
} CinFileHeader;
typedef struct CinFrameHeader {
int audio_frame_type;
int video_frame_type;
int pal_colors_count;
int audio_frame_size;
int video_frame_size;
} CinFrameHeader;
typedef struct CinDemuxContext {
int audio_stream_index;
int video_stream_index;
CinFileHeader file_header;
int64_t audio_stream_pts;
int64_t video_stream_pts;
CinFrameHeader frame_header;
int audio_buffer_size;
} CinDemuxContext;
static int cin_probe(AVProbeData *p)
{
/* header starts with this special marker */
if (AV_RL32(&p->buf[0]) != 0x55AA0000)
return 0;
/* for accuracy, check some header field values */
if (AV_RL32(&p->buf[12]) != 22050 || p->buf[16] != 16 || p->buf[17] != 0)
return 0;
return AVPROBE_SCORE_MAX;
}
static int cin_read_file_header(CinDemuxContext *cin, AVIOContext *pb) {
CinFileHeader *hdr = &cin->file_header;
if (avio_rl32(pb) != 0x55AA0000)
return AVERROR_INVALIDDATA;
hdr->video_frame_size = avio_rl32(pb);
hdr->video_frame_width = avio_rl16(pb);
hdr->video_frame_height = avio_rl16(pb);
hdr->audio_frequency = avio_rl32(pb);
hdr->audio_bits = avio_r8(pb);
hdr->audio_stereo = avio_r8(pb);
hdr->audio_frame_size = avio_rl16(pb);
if (hdr->audio_frequency != 22050 || hdr->audio_bits != 16 || hdr->audio_stereo != 0)
return AVERROR_INVALIDDATA;
return 0;
}
static int cin_read_header(AVFormatContext *s)
{
int rc;
CinDemuxContext *cin = s->priv_data;
CinFileHeader *hdr = &cin->file_header;
AVIOContext *pb = s->pb;
AVStream *st;
rc = cin_read_file_header(cin, pb);
if (rc)
return rc;
cin->video_stream_pts = 0;
cin->audio_stream_pts = 0;
cin->audio_buffer_size = 0;
/* initialize the video decoder stream */
st = avformat_new_stream(s, NULL);
if (!st)
return AVERROR(ENOMEM);
avpriv_set_pts_info(st, 32, 1, 12);
cin->video_stream_index = st->index;
st->codec->codec_type = AVMEDIA_TYPE_VIDEO;
st->codec->codec_id = AV_CODEC_ID_DSICINVIDEO;
st->codec->codec_tag = 0; /* no fourcc */
st->codec->width = hdr->video_frame_width;
st->codec->height = hdr->video_frame_height;
/* initialize the audio decoder stream */
st = avformat_new_stream(s, NULL);
if (!st)
return AVERROR(ENOMEM);
avpriv_set_pts_info(st, 32, 1, 22050);
cin->audio_stream_index = st->index;
st->codec->codec_type = AVMEDIA_TYPE_AUDIO;
st->codec->codec_id = AV_CODEC_ID_DSICINAUDIO;
st->codec->codec_tag = 0; /* no tag */
st->codec->channels = 1;
st->codec->channel_layout = AV_CH_LAYOUT_MONO;
st->codec->sample_rate = 22050;
st->codec->bits_per_coded_sample = 8;
st->codec->bit_rate = st->codec->sample_rate * st->codec->bits_per_coded_sample * st->codec->channels;
return 0;
}
static int cin_read_frame_header(CinDemuxContext *cin, AVIOContext *pb) {
CinFrameHeader *hdr = &cin->frame_header;
hdr->video_frame_type = avio_r8(pb);
hdr->audio_frame_type = avio_r8(pb);
hdr->pal_colors_count = avio_rl16(pb);
hdr->video_frame_size = avio_rl32(pb);
hdr->audio_frame_size = avio_rl32(pb);
if (avio_feof(pb) || pb->error)
return AVERROR(EIO);
if (avio_rl32(pb) != 0xAA55AA55)
return AVERROR_INVALIDDATA;
if (hdr->video_frame_size < 0 || hdr->audio_frame_size < 0)
return AVERROR_INVALIDDATA;
return 0;
}
static int cin_read_packet(AVFormatContext *s, AVPacket *pkt)
{
CinDemuxContext *cin = s->priv_data;
AVIOContext *pb = s->pb;
CinFrameHeader *hdr = &cin->frame_header;
int rc, palette_type, pkt_size;
int ret;
if (cin->audio_buffer_size == 0) {
rc = cin_read_frame_header(cin, pb);
if (rc)
return rc;
if ((int16_t)hdr->pal_colors_count < 0) {
hdr->pal_colors_count = -(int16_t)hdr->pal_colors_count;
palette_type = 1;
} else {
palette_type = 0;
}
/* palette and video packet */
pkt_size = (palette_type + 3) * hdr->pal_colors_count + hdr->video_frame_size;
pkt_size = ffio_limit(pb, pkt_size);
ret = av_new_packet(pkt, 4 + pkt_size);
if (ret < 0)
return ret;
pkt->stream_index = cin->video_stream_index;
pkt->pts = cin->video_stream_pts++;
pkt->data[0] = palette_type;
pkt->data[1] = hdr->pal_colors_count & 0xFF;
pkt->data[2] = hdr->pal_colors_count >> 8;
pkt->data[3] = hdr->video_frame_type;
ret = avio_read(pb, &pkt->data[4], pkt_size);
if (ret < 0) {
av_packet_unref(pkt);
return ret;
}
if (ret < pkt_size)
av_shrink_packet(pkt, 4 + ret);
/* sound buffer will be processed on next read_packet() call */
cin->audio_buffer_size = hdr->audio_frame_size;
return 0;
}
/* audio packet */
ret = av_get_packet(pb, pkt, cin->audio_buffer_size);
if (ret < 0)
return ret;
pkt->stream_index = cin->audio_stream_index;
pkt->pts = cin->audio_stream_pts;
pkt->duration = cin->audio_buffer_size - (pkt->pts == 0);
cin->audio_stream_pts += pkt->duration;
cin->audio_buffer_size = 0;
return 0;
}
AVInputFormat ff_dsicin_demuxer = {
.name = "dsicin",
.long_name = NULL_IF_CONFIG_SMALL("Delphine Software International CIN"),
.priv_data_size = sizeof(CinDemuxContext),
.read_probe = cin_probe,
.read_header = cin_read_header,
.read_packet = cin_read_packet,
};
```
|
Compsoctena cyclatma is a moth in the family Eriocottidae. It was described by Edward Meyrick in 1908. It is found in Mozambique, South Africa (Limpopo), Zambia and Zimbabwe.
The wingspan is about 28 mm. The forewings are fuscous, somewhat sprinkled with whitish and irregularly and suffusedly irrorated (sprinkled) with blackish fuscous, the confluence of irroration forming several irregular broken longitudinal marks, and three or four spots on the posterior half of the costa. There is a rounded blotch of whitish suffusion on the dorsum before the middle, and an irregular streak of whitish suffusion along the posterior third of the dorsum and termen to the apex. The hindwings are grey.
References
Moths described in 1908
Compsoctena
Moths of Africa
|
```go
package otel
import (
"context"
"os"
"strconv"
"strings"
"time"
"go.opentelemetry.io/contrib/propagators/autoprop"
"go.opentelemetry.io/otel"
"go.opentelemetry.io/otel/exporters/otlp/otlptrace"
"go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc"
"go.opentelemetry.io/otel/sdk/resource"
sdktrace "go.opentelemetry.io/otel/sdk/trace"
semconv "go.opentelemetry.io/otel/semconv/v1.4.0"
"go.uber.org/zap"
"google.golang.org/grpc/credentials"
apiv1 "k8s.io/api/core/v1"
)
const (
OtelEnvPrefix = "OTEL_"
OtelEndpointEnvVar = "OTEL_EXPORTER_OTLP_ENDPOINT"
OtelInsecureEnvVar = "OTEL_EXPORTER_OTLP_INSECURE"
OtelPropagaters = "OTEL_PROPAGATORS"
)
type OtelConfig struct {
endpoint string
insecure bool
}
// parseOtelConfig parses the environment variables OTEL_EXPORTER_OTLP_ENDPOINT and
func parseOtelConfig() OtelConfig {
config := OtelConfig{}
config.endpoint = os.Getenv(OtelEndpointEnvVar)
insecure, err := strconv.ParseBool(os.Getenv(OtelInsecureEnvVar))
if err != nil {
insecure = true
}
config.insecure = insecure
return config
}
func getTraceExporter(ctx context.Context, logger *zap.Logger) (*otlptrace.Exporter, error) {
otelConfig := parseOtelConfig()
if otelConfig.endpoint == "" {
if logger != nil {
logger.Info("OTEL_EXPORTER_OTLP_ENDPOINT not set, skipping Opentelemtry tracing")
}
return nil, nil
}
grpcOpts := []otlptracegrpc.Option{
otlptracegrpc.WithEndpoint(otelConfig.endpoint),
}
if otelConfig.insecure {
grpcOpts = append(grpcOpts, otlptracegrpc.WithInsecure())
} else {
grpcOpts = append(grpcOpts, otlptracegrpc.WithTLSCredentials(credentials.NewClientTLSFromCert(nil, "")))
}
exporter, err := otlptracegrpc.New(ctx, grpcOpts...)
if err != nil {
return nil, err
}
return exporter, nil
}
// Initializes an OTLP exporter, and configures the corresponding trace and metric providers.
func InitProvider(ctx context.Context, logger *zap.Logger, serviceName string) (func(context.Context), error) {
res, err := resource.New(ctx,
resource.WithAttributes(
semconv.ServiceNameKey.String(serviceName),
),
)
if err != nil {
return nil, err
}
tracerProvider := sdktrace.NewTracerProvider(
sdktrace.WithResource(res),
)
traceExporter, err := getTraceExporter(ctx, logger)
if err != nil {
return nil, err
}
if traceExporter != nil {
bsp := sdktrace.NewBatchSpanProcessor(traceExporter)
tracerProvider.RegisterSpanProcessor(bsp)
}
otel.SetTracerProvider(tracerProvider)
otel.SetTextMapPropagator(autoprop.NewTextMapPropagator())
// Shutdown will flush any remaining spans and shut down the exporter.
return func(ctx context.Context) {
if ctx.Err() != nil {
// if the context is already cancelled, create a new one with a timeout of 30 seconds
ctxwithTimeout, cancel := context.WithTimeout(context.Background(), 30*time.Second)
defer cancel()
ctx = ctxwithTimeout
}
err := tracerProvider.Shutdown(ctx)
if err != nil && logger != nil {
logger.Error("error shutting down trace provider", zap.Error(err))
}
if traceExporter != nil {
if err = traceExporter.Shutdown(ctx); err != nil && logger != nil {
logger.Error("error shutting down trace exporter", zap.Error(err))
}
}
}, nil
}
// OtelEnvForContainer returns a list of environment variables
// for the container, which start with prefix OTEL_
func OtelEnvForContainer() []apiv1.EnvVar {
otelEnvs := []apiv1.EnvVar{}
for _, e := range os.Environ() {
if strings.HasPrefix(e, OtelEnvPrefix) {
pair := strings.SplitN(e, "=", 2)
otelEnvs = append(otelEnvs, apiv1.EnvVar{
Name: pair[0],
Value: pair[1],
})
}
}
return otelEnvs
}
```
|
Windermere is a suburb of the City of Maitland local government area in the Hunter Region of New South Wales, Australia, approximately from the Maitland CBD. It is named after the Windermere estate.
References
Suburbs of Maitland, New South Wales
|
```graphql
type User {
a: String
}
```
|
Douglas MacArthur High School is a public high school located in Decatur, Illinois. The school serves about 1,184 students from grades 9 to 12 in Decatur Public School District 61.
History
Built in 1957, MacArthur High School was named for General Douglas MacArthur, an American army officer. Correspondingly, the school's sports teams' nickname is the Generals.
Demographics
As of the 2020 school year, the enrollment was 1,090 students. The racial makeup of the school in 2020 was 54.5.7% African American, 37.3% White, 5.5% Hispanic, 1.4% Asian, 0.7% Two or more races 0.3% Native American, and 0.3% Pacific Islander. 66.3% of the student population are low income students.
Academics
In 2008, 36.8% of the student population met or exceeded in all subjects. The school did not make Adequate Yearly Progress (AYP) as defined by federal and state laws in 2008. In 2008, the high school graduation rate was 92.3%, up from 76.6% in 2007. The dropout rate lowered from 10.8% in 2007 to 2.5% in 2008.
Athletics
MacArthur High School is a member of the Central State Eight Conference. MacArthur fields teams in Baseball, Basketball, Bowling, Cheerleading, Cross Country, Football, Golf, Soccer, Softball, Tennis, Track & Field, Volleyball and Wrestling.
Notable alumni
Brian Culbertson, musician, funk-based instrumentalist, jazz artist (Class of 1991)
Loren Coleman, cryptozoologist, author, television personality (Class of 1965)
L. Douglas Hagen (1946 - 1971), US Army Special Forces Green Beret and Medal of Honor recipient (Class of 1964)
Steve Hunter, musician, guitarist, played with Mitch Ryder, Lou Reed, Alice Cooper, Peter Gabriel and others (Class of 1966)
David Joyner, actor
James W. Loewen, (1942-1921) sociologist, historian and author (class of 1960)
References
Buildings and structures in Decatur, Illinois
Public high schools in Illinois
Schools in Macon County, Illinois
|
Folksonomy is a classification system in which end users apply public tags to online items, typically to make those items easier for themselves or others to find later. Over time, this can give rise to a classification system based on those tags and how often they are applied or searched for, in contrast to a taxonomic classification designed by the owners of the content and specified when it is published. This practice is also known as collaborative tagging, social classification, social indexing, and social tagging. Folksonomy was originally "the result of personal free tagging of information [...] for one's own retrieval", but online sharing and interaction expanded it into collaborative forms. Social tagging is the application of tags in an open online environment where the tags of other users are available to others. Collaborative tagging (also known as group tagging) is tagging performed by a group of users. This type of folksonomy is commonly used in cooperative and collaborative projects such as research, content repositories, and social bookmarking.
The term was coined by Thomas Vander Wal in 2004 as a portmanteau of folk and taxonomy. Folksonomies became popular as part of social software applications such as social bookmarking and photograph annotation that enable users to collectively classify and find information via shared tags. Some websites include tag clouds as a way to visualize tags in a folksonomy.
Folksonomies can be used for K–12 education, business, and higher education. More specifically, folksonomies may be implemented for social bookmarking, teacher resource repositories, e-learning systems, collaborative learning, collaborative research, professional development and teaching. Wikipedia is also a prime example of folksonomy.
Benefits and disadvantages
Folksonomies are a trade-off between traditional centralized classification and no classification at all, and have several advantages:
Tagging is easy to understand and do, even without training and previous knowledge in classification or indexing
The vocabulary in a folksonomy directly reflects the user's vocabulary
Folksonomies are flexible, in the sense that the user can add or remove tags
Tags consist of both popular content and long-tail content, enabling users to browse and discover new content even in narrow topics
Tags reflect the user's conceptual model without cultural, social, or political bias
Enable the creation of communities, in the sense that users who apply the same tag have a common interest
Folksonomies are multi-dimensional, in the sense that users can assign any number and combination of tags to express a concept
There are several disadvantages with the use of tags and folksonomies as well, and some of the advantages (see above) can lead to problems. For example, the simplicity in tagging can result in poorly applied tags. Further, while controlled vocabularies are exclusionary by nature, tags are often ambiguous and overly personalized. Users apply tags to documents in many different ways and tagging systems also often lack mechanisms for handling synonyms, acronyms and homonyms, and they also often lack mechanisms for handling spelling variations such as misspellings, singular/plural form, conjugated and compound words. Some tagging systems do not support tags consisting of multiple words, resulting in tags like "viewfrommywindow". Sometimes users choose specialized tags or tags without meaning to others.
Elements and types
A folksonomy emerges when users tag content or information, such as web pages, photos, videos, podcasts, tweets, scientific papers and others. Strohmaier et al. elaborate the concept: the term "tagging" refers to a "voluntary activity of users who are annotating resources with term-so-called 'tags' – freely chosen from an unbounded and uncontrolled vocabulary". Others explain tags as an unstructured textual label or keywords, and that they appear as a simple form of metadata.
Folksonomies consist of three basic entities: users, tags, and resources. Users create tags to mark resources such as: web pages, photos, videos, and podcasts. These tags are used to manage, categorize and summarize online content. This collaborative tagging system also uses these tags as a way to index information, facilitate searches and navigate resources. Folksonomy also includes a set of URLs that are used to identify resources that have been referred to by users of different websites. These systems also include category schemes that have the ability to organize tags at different levels of granularity.
Vander Wal identifies two types of folksonomy: broad and narrow. A broad folksonomy arises when multiple users can apply the same tag to an item, providing information about which tags are the most popular. A narrow folksonomy occurs when users, typically fewer in number and often including the item's creator, tag an item with tags that can each be applied only once. While both broad and narrow folksonomies enable the searchability of content by adding an associated word or phrase to an object, a broad folksonomy allows for sorting based on the popularity of each tag, as well as the tracking of emerging trends in tag usage and developing vocabularies.
An example of a broad folksonomy is del.icio.us, a website where users can tag any online resource they find relevant with their own personal tags. The photo-sharing website Flickr is an oft-cited example of a narrow folksonomy.
Folksonomy versus taxonomy
'Taxonomy' refers to a hierarchical categorization in which relatively well-defined classes are nested under broader categories. A folksonomy establishes categories (each tag is a category) without stipulating or necessarily deriving a hierarchical structure of parent-child relations among different tags. (Work has been done on techniques for deriving at least loose hierarchies from clusters of tags.)
Supporters of folksonomies claim that they are often preferable to taxonomies because folksonomies democratize the way information is organized, they are more useful to users because they reflect current ways of thinking about domains, and they express more information about domains. Critics claim that folksonomies are messy and thus harder to use, and can reflect transient trends that may misrepresent what is known about a field.
An empirical analysis of the complex dynamics of tagging systems, published in 2007, has shown that consensus around stable distributions and shared vocabularies does emerge, even in the absence of a central controlled vocabulary. For content to be searchable, it should be categorized and grouped. While this was believed to require commonly agreed on sets of content describing tags (much like keywords of a journal article), some research has found that in large folksonomies common structures also emerge on the level of categorizations.
Accordingly, it is possible to devise mathematical models of collaborative tagging that allow for translating from personal tag vocabularies (personomies) to the vocabulary shared by most users.
Folksonomy is unrelated to folk taxonomy, a cultural practice that has been widely documented in anthropological and folkloristic work. Folk taxonomies are culturally supplied, intergenerationally transmitted, and relatively stable classification systems that people in a given culture use to make sense of the entire world around them (not just the Internet).
The study of the structuring or classification of folksonomy is termed folksontology. This branch of ontology deals with the intersection between highly structured taxonomies or hierarchies and loosely structured folksonomy, asking what best features can be taken by both for a system of classification. The strength of flat-tagging schemes is their ability to relate one item to others like it. Folksonomy allows large disparate groups of users to collaboratively label massive, dynamic information systems. The strength of taxonomies are their browsability: users can easily start from more generalized knowledge and target their queries towards more specific and detailed knowledge. Folksonomy looks to categorize tags and thus create browsable spaces of information that are easy to maintain and expand.
Social tagging for knowledge acquisition
Social tagging for knowledge acquisition is the specific use of tagging for finding and re-finding specific content for an individual or group. Social tagging systems differ from traditional taxonomies in that they are community-based systems lacking the traditional hierarchy of taxonomies. Rather than a top-down approach, social tagging relies on users to create the folksonomy from the bottom up.
Common uses of social tagging for knowledge acquisition include personal development for individual use and collaborative projects. Social tagging is used for knowledge acquisition in secondary, post-secondary, and graduate education as well as personal and business research. The benefits of finding/re-finding source information are applicable to a wide spectrum of users. Tagged resources are located through search queries rather than searching through a more traditional file folder system. The social aspect of tagging also allows users to take advantage of metadata from thousands of other users.
Users choose individual tags for stored resources. These tags reflect personal associations, categories, and concepts. All of which are individual representations based on meaning and relevance to that individual. The tags, or keywords, are designated by users. Consequently, tags represent a user's associations corresponding to the resource. Commonly tagged resources include videos, photos, articles, websites, and email. Tags are beneficial for a couple of reasons. First, they help to structure and organize large amounts of digital resources in a manner that makes them easily accessible when users attempt to locate the resource at a later time. The second aspect is social in nature, that is to say that users may search for new resources and content based on the tags of other users. Even the act of browsing through common tags may lead to further resources for knowledge acquisition.
Tags that occur more frequently with specific resources are said to be more strongly connected. Furthermore, tags may be connected to each other. This may be seen in the frequency in which they co-occur. The more often they co-occur, the stronger the connection. Tag clouds are often utilized to visualize connectivity between resources and tags. Font size increases as the strength of association increases.
Tags show interconnections of concepts that were formerly unknown to a user. Therefore, a user's current cognitive constructs may be modified or augmented by the metadata information found in aggregated social tags. This process promotes knowledge acquisition through cognitive irritation and equilibration. This theoretical framework is known as the co-evolution model of individual and collective knowledge.
The co-evolution model focuses on cognitive conflict in which a learner's prior knowledge and the information received from the environment are dissimilar to some degree. When this incongruence occurs, the learner must work through a process cognitive equilibration in order to make personal cognitive constructs and outside information congruent. According to the coevolution model, this may require the learner to modify existing constructs or simply add to them. The additional cognitive effort promotes information processing which in turn allows individual learning to occur.
Examples
Archive of Our Own: fan fiction archive
BibSonomy: social bookmarking and publication-sharing system
del.icio.us: public tagging service
Diigo: social bookmarking website
Flickr: shared photos
Instagram: online photo-sharing and social networking service
Many libraries' online catalogs
Mendeley: social reference management software
OpenStreetMap: map database
Pinterest: photosharing and publishing website
Steam: video game store
StumbleUpon: content discovery engine
Twitter hashtags
The World Wide Web Consortium's Annotea project with user-generated tags in 2002.
WordPress: blogging tool and Content Management System
Tumblr tags
See also
Autotagging
Blogosphere
Collective intelligence
Enterprise bookmarking
Faceted classification
Hierarchical clustering
Semantic annotation
Semantic similarity
Thesaurus
Weak ontology
Wiki
References
External links
Folksonomies as a tool for professional scientific databases
"The Three Orders": 2005 explanation of tagging and folksonomies (Archived version)
Vanderwal's definition of folksonomy
Vanderwal's take on Wikipedia's definition of folksonomy
Classroom Collaboration Using Social Bookmarking Service Diigo
Collective intelligence
Knowledge representation
Metadata
Semantic Web
Social bookmarking
Taxonomy
Web 2.0 neologisms
Sociology of knowledge
Information architecture
Crowdsourcing
|
Orojabad (, also Romanized as Orojābād; also known as Orujābād) is a village in Dadin Rural District, Jereh and Baladeh District, Kazerun County, Fars Province, Iran. At the 2006 census, its population was 44, in 9 families.
References
Populated places in Kazerun County
|
```python
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
#
# path_to_url
#
# Unless required by applicable law or agreed to in writing,
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# specific language governing permissions and limitations
"""Example code to do convolution."""
import sys
import pytest
import numpy as np
import tvm
from tvm import autotvm, te, topi
import tvm.topi.testing
from tvm.contrib import cudnn
from tvm.topi.nn.utils import get_pad_tuple
from tvm.topi.utils import get_const_tuple
from tvm.topi.nn.conv2d import _get_workload
from tvm.topi.x86.conv2d_avx_common import _fallback_schedule
import tvm.testing
dtype = tvm.testing.parameter("float16", "float32")
random_seed = tvm.testing.parameter(0)
@tvm.testing.fixture
def input_shape(batch, in_channel, in_size):
return (batch, in_channel, in_size, in_size)
@tvm.testing.fixture
def weight_shape(num_filter, in_channel, kernel):
return (num_filter, in_channel, kernel, kernel)
@tvm.testing.fixture
def bias_shape(num_filter):
return (num_filter, 1, 1)
@tvm.testing.fixture(cache_return_value=True)
def ref_data(
random_seed,
input_shape,
weight_shape,
bias_shape,
dtype,
stride,
padding,
dilation,
add_bias,
apply_relu,
):
np.random.seed(random_seed)
# scipy.signal.convolve2d does not support float16 data types, and
# the python fallback is too slow for general use. Computing
# ref_data in float32 will have fewer rounding errors than the TVM
# float16 compute, but those vary based on schedule anyways.
conv_dtype = "float32" if dtype == "float16" else dtype
a_np = np.random.uniform(size=input_shape).astype(dtype)
w_np = np.random.uniform(size=weight_shape).astype(dtype)
b_np = np.random.uniform(size=bias_shape).astype(dtype)
dw_np = tvm.topi.testing.dilate_python(w_np, (1, 1, dilation, dilation))
c_np = tvm.topi.testing.conv2d_nchw_python(
a_np.astype(conv_dtype), dw_np.astype(conv_dtype), stride, padding
).astype(dtype)
if add_bias:
c_np = c_np + b_np
if apply_relu:
c_np = np.maximum(c_np, 0)
return a_np, w_np, b_np, c_np
class BaseConv2DTests:
add_bias = tvm.testing.parameter(False)
apply_relu = tvm.testing.parameter(False)
dilation = tvm.testing.parameter(1)
batch = tvm.testing.parameter(1)
def test_conv2d_nchw(
self,
target,
dev,
batch,
in_channel,
in_size,
num_filter,
kernel,
stride,
padding,
dtype,
ref_data,
dilation,
add_bias,
apply_relu,
):
target = tvm.target.Target(target)
is_cudnn_target = target.kind.name == "cuda" and "cudnn" in target.attrs.get("libs", [])
if target.kind.name == "vulkan" and dtype == "float16":
if not target.attrs.get("supports_float16", False) or not target.attrs.get(
"supports_16bit_buffer", False
):
pytest.xfail("Vulkan device does not support float16")
if (
target.kind.name == "cuda"
and dtype == "float16"
and not tvm.contrib.nvcc.have_fp16(dev.compute_version)
):
pytest.xfail("CUDA float16 intrinsics not available")
pad_top, pad_left, pad_bottom, pad_right = get_pad_tuple(padding, (kernel, kernel))
padding_sum = pad_top + pad_left + pad_bottom + pad_right
has_asymmetric_padding = (pad_top != pad_bottom) or (pad_left != pad_right)
if is_cudnn_target and has_asymmetric_padding:
pytest.xfail("CuDNN does not support asymmetric padding")
a_np, w_np, b_np, c_np = ref_data
A = te.placeholder(a_np.shape, name="A", dtype=dtype)
W = te.placeholder(w_np.shape, name="W", dtype=dtype)
bias = te.placeholder(b_np.shape, name="bias", dtype=dtype)
if "int" in dtype:
tol = {"atol": 0, "rtol": 0}
elif dtype == "float32":
tol = {"rtol": 1e-4, "atol": 2e-4}
elif dtype == "float16":
# A summation in float16 with a single accumulator very
# quickly runs into large rounding errors. At some point,
# this tolerance should be schedule-dependent for to avoid
# false negatives.
num_values_summed = in_channel * kernel * kernel
gap_size = np.nextafter(c_np.max(), np.inf, dtype=c_np.dtype) - c_np.max()
tol = {"rtol": 1e-3, "atol": num_values_summed * gap_size / 2}
with autotvm.tophub.context(target): # load tophub pre-tuned parameters
if is_cudnn_target:
fcompute, fschedule = topi.cuda.conv2d_cudnn, topi.cuda.schedule_conv2d_cudnn
else:
fcompute, fschedule = tvm.topi.testing.get_conv2d_nchw_implement(target)
with target:
if is_cudnn_target:
C = fcompute(
A, W, (stride, stride), padding, (dilation, dilation), 1, "NCHW", dtype
)
else:
C = fcompute(A, W, (stride, stride), padding, (dilation, dilation), dtype)
if add_bias:
C = topi.add(C, bias)
if apply_relu:
C = topi.nn.relu(C)
s = fschedule([C])
a = tvm.nd.array(a_np, dev)
w = tvm.nd.array(w_np, dev)
b = tvm.nd.array(b_np, dev)
c = tvm.nd.array(np.zeros(get_const_tuple(C.shape), dtype=C.dtype), dev)
func = tvm.build(
s,
[A, W, bias, C],
target,
name="conv2d_{}_{}_{}_{}_{}_{}_{}_{}_{}".format(
dtype,
batch,
in_channel,
in_size,
num_filter,
kernel,
stride,
padding_sum,
dilation,
),
)
func(a, w, b, c)
tvm.testing.assert_allclose(c.numpy(), c_np, **tol)
@tvm.testing.parametrize_targets("llvm")
def test_workload_padding(
self,
target,
input_shape,
weight_shape,
stride,
padding,
dilation,
dtype,
ref_data,
):
a_np, w_np, b_np, c_np = ref_data
_, _, out_height, out_width = c_np.shape
A = te.placeholder(input_shape, name="A", dtype=dtype)
W = te.placeholder(weight_shape, name="W", dtype=dtype)
with tvm.target.Target(target):
wkl = _get_workload(A, W, (stride, stride), padding, dilation, dtype)
# check if tile_ow candidates are the factors of the right output weight.
cfg = autotvm.get_config()
_fallback_schedule(cfg, wkl)
ow_tile = np.prod(cfg["tile_ow"].size)
tvm.testing.assert_allclose(ow_tile, out_width)
class TestResNet18Workloads(BaseConv2DTests):
in_channel, in_size, num_filter, kernel, stride, padding = tvm.testing.parameters(
(3, 224, 64, 7, 2, 3),
(64, 56, 64, 3, 1, 1),
(64, 56, 64, 1, 1, 0),
(64, 56, 128, 3, 2, 1),
(64, 56, 128, 1, 2, 0),
(128, 28, 128, 3, 1, 1),
(128, 28, 256, 3, 2, 1),
(128, 28, 256, 1, 2, 0),
(256, 14, 256, 3, 1, 1),
(256, 14, 512, 3, 2, 1),
(256, 14, 512, 1, 2, 0),
(512, 7, 512, 3, 1, 1),
)
class TestInceptionV3Workloads(BaseConv2DTests):
in_channel, in_size, num_filter, kernel, stride, padding = tvm.testing.parameters(
(3, 299, 32, 3, 2, 0),
(32, 149, 32, 3, 1, 0),
(32, 147, 64, 3, 1, 1),
(64, 73, 80, 1, 1, 0),
(80, 73, 192, 3, 1, 0),
(192, 35, 64, 1, 1, 0),
(192, 35, 48, 1, 1, 0),
(48, 35, 64, 5, 1, 2),
(64, 35, 96, 3, 1, 1),
(96, 35, 96, 3, 1, 1),
(192, 35, 32, 1, 1, 0),
(256, 35, 64, 1, 1, 0),
(256, 35, 48, 1, 1, 0),
(288, 35, 64, 1, 1, 0),
(288, 35, 48, 1, 1, 0),
(288, 35, 384, 3, 2, 0),
(96, 35, 96, 3, 2, 0),
(768, 17, 192, 1, 1, 0),
(768, 17, 128, 1, 1, 0),
(128, 17, 128, 1, 1, 0),
(128, 17, 192, 7, 1, 3),
(128, 17, 128, 7, 1, 3),
(128, 17, 192, 1, 1, 0),
(768, 17, 160, 1, 1, 0),
# disable these tests due to some bugs of llvm with nvptx
# (160, 17, 160, 1, 1, 0),
(160, 17, 192, 7, 1, 3),
(160, 17, 160, 7, 1, 3),
(160, 17, 192, 1, 1, 0),
(192, 17, 192, 1, 1, 0),
(192, 17, 192, 7, 1, 3),
(192, 17, 320, 3, 2, 0),
(192, 17, 192, 3, 2, 0),
(1280, 8, 320, 1, 1, 0),
(1280, 8, 384, 1, 1, 0),
(384, 8, 384, 1, 1, 0),
(384, 8, 384, 3, 1, 1),
(1280, 8, 448, 1, 1, 0),
(448, 8, 384, 3, 1, 1),
(1280, 8, 192, 1, 1, 0),
(2048, 8, 320, 1, 1, 0),
(2048, 8, 384, 1, 1, 0),
(2048, 8, 448, 1, 1, 0),
(2048, 8, 192, 1, 1, 0),
(1024, 19, 84, 3, 1, 1),
(2048, 10, 126, 3, 1, 1),
(512, 5, 126, 3, 1, 1),
(256, 3, 126, 3, 1, 1),
)
class TestWeirdWorkloads(BaseConv2DTests):
batch, in_channel, in_size, num_filter, kernel, stride, padding = tvm.testing.parameters(
(2, 2, 2, 2, 2, 2, 2),
(3, 3, 3, 3, 3, 3, 3),
(4, 4, 4, 4, 4, 4, 4),
(5, 5, 5, 5, 5, 5, 5),
(6, 6, 6, 6, 6, 6, 6),
# disable these tests due to some bugs of llvm with nvptx
# (1, 1, 1, 1, 1, 1, 1),
# (2, 13, 71, 59, 3, 1, 1),
)
class TestAsymmetricPadding(BaseConv2DTests):
dilation = tvm.testing.parameter(1, 2)
in_channel, in_size, num_filter, kernel, stride, padding = tvm.testing.parameters(
(3, 35, 64, 7, 2, (0, 0, 1, 1)),
(64, 8, 128, 3, 1, (3, 3, 2, 2)),
(64, 8, 64, 1, 1, (1, 2, 2, 1)),
(64, 17, 192, 1, 1, (1, 2)),
(64, 8, 64, 3, 1, (3, 1)),
(128, 8, 384, 3, 1, (0, 2)),
(64, 35, 64, 3, 1, (1, 2)),
(64, 8, 64, 1, 1, "VALID"),
(388, 8, 64, 3, 1, "VALID"),
(64, 10, 48, 3, 1, "VALID"),
(512, 19, 64, 1, 1, "SAME"),
(64, 5, 32, 2, 1, "SAME"),
(64, 8, 64, 3, 1, "SAME"),
(64, 8, 64, 3, 1, (1, 2, 2, 1)),
(64, 8, 64, 5, 2, (1, 3)),
(64, 8, 64, 3, 1, "VALID"),
(64, 8, 64, 24, 1, "SAME"),
(32, 35, 64, 7, 2, (0, 0, 2, 2)),
)
class TestBatchSize(BaseConv2DTests):
in_channel, in_size, num_filter, kernel, stride, padding = tvm.testing.parameters(
(64, 56, 64, 3, 1, 1),
)
batch = tvm.testing.parameter(1, 4, 9)
class TestBiasRelu(BaseConv2DTests):
apply_relu = tvm.testing.parameter(True, False, ids=["relu", "no_relu"])
add_bias = tvm.testing.parameter(True, False, ids=["bias", "no_bias"])
in_channel, in_size, num_filter, kernel, stride, padding = tvm.testing.parameters(
(64, 56, 64, 3, 1, 1),
(64, 8, 64, 3, 1, (1, 2, 2, 1)),
(64, 8, 64, 5, 2, (1, 3)),
(64, 8, 64, 3, 1, "VALID"),
(64, 8, 64, 24, 1, "SAME"),
)
if __name__ == "__main__":
tvm.testing.main()
```
|
```java
/*
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
*
* path_to_url
*
* Unless required by applicable law or agreed to in writing, software
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*/
package org.apache.shardingsphere.infra.expr.groovy;
import com.google.common.base.Strings;
import com.google.common.collect.Sets;
import groovy.lang.Closure;
import groovy.lang.GString;
import groovy.lang.GroovyShell;
import groovy.lang.Script;
import groovy.util.Expando;
import org.apache.shardingsphere.infra.expr.spi.InlineExpressionParser;
import org.apache.shardingsphere.infra.util.groovy.GroovyUtils;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;
/**
* Groovy inline expression parser.
*/
public final class GroovyInlineExpressionParser implements InlineExpressionParser {
private static final String INLINE_EXPRESSION_KEY = "inlineExpression";
private static final Map<String, Script> SCRIPTS = new ConcurrentHashMap<>();
private static final GroovyShell SHELL = new GroovyShell();
private String inlineExpression;
@Override
public void init(final Properties props) {
inlineExpression = props.getProperty(INLINE_EXPRESSION_KEY);
}
@Override
public String handlePlaceHolder() {
return handlePlaceHolder(inlineExpression);
}
/**
* Replace all inline expression placeholders.
*
* @param inlineExpression inline expression with {@code $->}
* @return result inline expression with {@code $}
*/
private String handlePlaceHolder(final String inlineExpression) {
return inlineExpression.contains("$->{") ? inlineExpression.replaceAll("\\$->\\{", "\\$\\{") : inlineExpression;
}
/**
* Split and Evaluate inline expression. This function will replace all inline expression placeholders.
*
* @return result inline expression with {@code $}
*/
@Override
public List<String> splitAndEvaluate() {
return Strings.isNullOrEmpty(inlineExpression) ? Collections.emptyList() : flatten(evaluate(GroovyUtils.split(handlePlaceHolder(inlineExpression))));
}
/**
* Turn inline expression into Groovy Closure. This function will replace all inline expression placeholders.
* For compatibility reasons, it does not check whether the unit of the input parameter map is null.
* @return The result of the Groovy Closure pattern.
*/
@Override
public String evaluateWithArgs(final Map<String, Comparable<?>> map) {
Closure<?> result = ((Closure<?>) evaluate("{it -> \"" + handlePlaceHolder(inlineExpression) + "\"}")).rehydrate(new Expando(), null, null);
result.setResolveStrategy(Closure.DELEGATE_ONLY);
map.forEach(result::setProperty);
return result.call().toString();
}
private List<Object> evaluate(final List<String> inlineExpressions) {
List<Object> result = new ArrayList<>(inlineExpressions.size());
for (String each : inlineExpressions) {
StringBuilder expression = new StringBuilder(handlePlaceHolder(each));
if (!each.startsWith("\"")) {
expression.insert(0, '"');
}
if (!each.endsWith("\"")) {
expression.append('"');
}
result.add(evaluate(expression.toString()));
}
return result;
}
private Object evaluate(final String expression) {
Script script;
if (SCRIPTS.containsKey(expression)) {
script = SCRIPTS.get(expression);
} else {
script = SHELL.parse(expression);
SCRIPTS.put(expression, script);
}
return script.run();
}
private List<String> flatten(final List<Object> segments) {
List<String> result = new ArrayList<>();
for (Object each : segments) {
if (each instanceof GString) {
result.addAll(assemblyCartesianSegments((GString) each));
} else {
result.add(each.toString());
}
}
return result;
}
private List<String> assemblyCartesianSegments(final GString segment) {
Set<List<String>> cartesianValues = getCartesianValues(segment);
List<String> result = new ArrayList<>(cartesianValues.size());
for (List<String> each : cartesianValues) {
result.add(assemblySegment(each, segment));
}
return result;
}
@SuppressWarnings("unchecked")
private Set<List<String>> getCartesianValues(final GString segment) {
List<Set<String>> result = new ArrayList<>(segment.getValues().length);
for (Object each : segment.getValues()) {
if (null == each) {
continue;
}
if (each instanceof Collection) {
result.add(((Collection<Object>) each).stream().map(Object::toString).collect(Collectors.toCollection(LinkedHashSet::new)));
} else {
result.add(Sets.newHashSet(each.toString()));
}
}
return Sets.cartesianProduct(result);
}
private String assemblySegment(final List<String> cartesianValue, final GString segment) {
StringBuilder result = new StringBuilder();
for (int i = 0; i < segment.getStrings().length; i++) {
result.append(segment.getStrings()[i]);
if (i < cartesianValue.size()) {
result.append(cartesianValue.get(i));
}
}
return result.toString();
}
@Override
public String getType() {
return "GROOVY";
}
}
```
|
```c++
//
//
// path_to_url
//
// Unless required by applicable law or agreed to in writing, software
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// VulkanHpp Samples : ArrayProxy
// Compile test on using vk::ArrayProxy
#undef VULKAN_HPP_DISPATCH_LOADER_DYNAMIC
#define VULKAN_HPP_DISPATCH_LOADER_DYNAMIC 0
#include <iostream>
#include <vulkan/vulkan.hpp>
int main( int /*argc*/, char ** /*argv*/ )
{
try
{
// test operator==() with vk-handle and nullptr
vk::Instance instance;
assert( instance == nullptr );
assert( nullptr == instance );
instance = vk::createInstance( {} );
assert( instance != nullptr );
assert( nullptr != instance );
instance.destroy();
}
catch ( vk::SystemError const & err )
{
std::cout << "vk::SystemError: " << err.what() << std::endl;
exit( -1 );
}
catch ( ... )
{
std::cout << "unknown error\n";
exit( -1 );
}
return 0;
}
```
|
```javascript
function locStart(node) {
return node.sourceSpan.start.offset;
}
function locEnd(node) {
return node.sourceSpan.end.offset;
}
export { locEnd, locStart };
```
|
Dmitry Vadimovich Zelenin () (born 27 November 1962, in Moscow) is a Russian businessman and politician. During 2003-2011 he was governor of Tver Oblast, Russia.
Biography
Zelenin graduated from the Moscow Institute of Physics and Technology (Phystech) in 1986 and worked in the electronics industry until 1990 when he became commercial director of Resurs Bank and chief executive of this bank in 1995.
Zelenin was one of the top managers of Norilsk Nickel having joined in 1996 as first deputy general director. This company is one of the largest nickel producers in the world.
Dmitry Zelenin was elected governor of Tver Oblast in December 2003, bypassing incumbent Vladimir Platov, MVD officer Igor Zubov and communist Tatyana Astrakhankina. He was appointed for a second term by president Vladimir Putin in July 2007. In 2010, Zelenin caused a scandal when he posted photos of a salad containing an earthworm on his Twitter account, which was allegedly served to German President Christian Wulff. Sergei Prikhodko, foreign policy adviser to president Dmitry Medvedev, then asked him to resign. Zelenin resigned as governor in June 2011. The main reason was not the Kremlin incident, but the result of the Legislative Assembly election, where United Russia party suffered its second-worst electoral performance in the 2011 regional elections.
Zelenin is married with two daughters and a son.
References
External links
Personal website of Dmitry Zelenin
Moscow Institute of Physics and Technology alumni
1962 births
Governors of Tver Oblast
Living people
Politicians from Moscow
|
```java
/*
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
*
* path_to_url
*
* Unless required by applicable law or agreed to in writing, software
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*/
package org.apache.arrow.vector.complex.impl;
import org.apache.arrow.vector.complex.AbstractContainerVector;
import org.apache.arrow.vector.complex.reader.FieldReader;
import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter;
import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter;
import org.apache.arrow.vector.types.Types.MinorType;
/** An implementation of {@link AbstractFieldReader} for lists vectors. */
@SuppressWarnings("unused")
public class SingleListReaderImpl extends AbstractFieldReader {
private final String name;
private final AbstractContainerVector container;
private FieldReader reader;
/**
* Constructs a new instance.
*
* @param name The name of field to read in container.
* @param container The container holding a list.
*/
public SingleListReaderImpl(String name, AbstractContainerVector container) {
super();
this.name = name;
this.container = container;
}
@Override
public void setPosition(int index) {
super.setPosition(index);
if (reader != null) {
reader.setPosition(index);
}
}
@Override
public Object readObject() {
return reader.readObject();
}
@Override
public FieldReader reader() {
if (reader == null) {
reader = container.getChild(name).getReader();
setPosition(idx());
}
return reader;
}
@Override
public MinorType getMinorType() {
return MinorType.LIST;
}
@Override
public boolean isSet() {
return false;
}
@Override
public void copyAsValue(ListWriter writer) {
throw new UnsupportedOperationException(
"Generic list copying not yet supported. Please resolve to typed list.");
}
@Override
public void copyAsField(String name, StructWriter writer) {
throw new UnsupportedOperationException(
"Generic list copying not yet supported. Please resolve to typed list.");
}
}
```
|
```javascript
/**
*
*
*
* path_to_url
*
* Unless required by applicable law or agreed to in writing, software
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
*/
import {
each, keyBy, map, max, min
} from 'lodash';
function updatePosition (node, nodeCount, nodeIndex, orbitSize) {
const rotationAdjustment = nodeCount % 2 === 0 ? Math.PI / 4 : (5 / 6) * Math.PI;
const adjustment = (((2 * Math.PI) * nodeIndex) / nodeCount) + rotationAdjustment;
node.updatePosition({
x: ((orbitSize / 2) * Math.cos(adjustment)),
y: ((orbitSize / 2) * Math.sin(adjustment))
});
}
function positionNodes (nodes, orbitSize) {
let nodeIndex = 0;
const nodeCount = Object.keys(nodes).length - 1;
const sortedNodeNames = map(nodes, 'name');
sortedNodeNames.sort();
// Layout the nodes with the entry node in the middle
const nodeMap = keyBy(nodes, 'name');
each(sortedNodeNames, (nodeName) => {
const node = nodeMap[nodeName];
if (!node.isEntryNode() && nodeCount > 0) {
nodeIndex++;
updatePosition(node, nodeCount, nodeIndex, orbitSize);
} else {
node.updatePosition({ x: 0, y: 0 });
}
});
}
function centerNodesVertically (nodes) {
// Center the nodes vertically on the canvas
const yPositions = map(nodes, n => n.position.y);
const yOffset = Math.abs(Math.abs(max(yPositions)) - Math.abs(min(yPositions))) / 2;
each(nodes, (n) => {
n.position.y += yOffset;
});
}
function recalculateOrbitSize (nodes, orbitSize, nodeSize) {
const yPositions = map(nodes, n => n.position.y);
const yDistance = max(yPositions) - min(yPositions);
const totalHeight = (nodeSize * 2.25) + yDistance;
const newOrbitSize = orbitSize - Math.max(totalHeight - orbitSize, 0);
return newOrbitSize;
}
class RingCenterLayout {
run (graph, dimensions, layoutComplete) {
const maxDimension = Math.min(dimensions.width, dimensions.height);
let orbitSize = maxDimension;
const nodeSize = min(map(graph.nodes, 'size'));
if (Object.keys(graph.nodes).length > 0) {
// Position the nodes based on the current orbitSize
positionNodes(graph.nodes, orbitSize);
// Now that the nodes are positioned, adjust orbit size accordingly so the nodes all fit
orbitSize = recalculateOrbitSize(graph.nodes, maxDimension, nodeSize);
// Position again with the proper orbitSize
positionNodes(graph.nodes, orbitSize);
centerNodesVertically(graph.nodes);
}
layoutComplete();
}
}
export default RingCenterLayout;
```
|
Jean Delvaux (died 2 April 1595) was a Belgian Roman Catholic monk and an alleged practitioner of witchcraft.
In 1595, a scandal occurred among the monks at an Abbey at Stavelot in the Ardennes. The monk Jean Delvaux claimed that, at the age of fifteen, he met a man in the woods who promised him riches if he would follow him. Delvaux abided and he received two marks on his shoulders. He told Delvaux to become a monk at Stavelot, and promised that he would become an abbot. Delvaux did indeed become a monk, and discovered many warlocks among the priests and monks. He said, that there were nine convents of warlocks in the Ardennes, who met during the night with demons to eat, dance and engage in sex.
Delvaux was arrested on the order of the Prince-bishop of Liége, and an investigation was begun. On the way to Stavelot, the carriage of the commission broke down, and Delvaux claimed that a demon had destroyed it; Delvaux was accused of being insane. Until 10 January 1597, lay and clerical people were questioned in connection with these accusations. Delvaux was tortured and handed over to the secular authorities. He was found guilty, under Exodus 22:18, and sentenced to death. The remorseful Delvaux begged for mercy, but he was executed by decapitation, and not by burning.
References
Literature
Procès pour sorcellerie en Ardenne, Walthère Jamar, Chevron dans le passé
1595 deaths
Belgian Christian monks
People executed for witchcraft
Year of birth unknown
Place of birth missing
|
```kotlin
package io.github.detekt.tooling.dsl
import io.github.detekt.tooling.api.spec.ReportsSpec
import java.nio.file.Path
@ProcessingModelDsl
class ReportsSpecBuilder : Builder<ReportsSpec> {
var reports: MutableCollection<ReportsSpec.Report> = mutableListOf()
fun report(init: () -> Pair<String, Path>) {
reports.add(Report(init()))
}
override fun build(): ReportsSpec = ReportsModel(reports)
}
private data class ReportsModel(override val reports: Collection<ReportsSpec.Report>) : ReportsSpec
private data class Report(override val type: String, override val path: Path) : ReportsSpec.Report {
constructor(values: Pair<String, Path>) : this(values.first, values.second)
}
```
|
Proton Electronic Industrial Co., Ltd. () is a Taiwanese company founded in 1964. The company's line of business includes the manufacturing of radio and television consumer electronics equipment.
History
Proton was founded in 1964 as a importer of hi-fi equipment. They moved into manufacturing in 1970 and OEM manufacturing in 1975. In 1975 parent company Fulet Electronic Industrial Company Ltd. was founded to manage the increasingly complex operations. In 1985 the company employed more than 900 people and had an R&D department of 85.
In 2003 Proton Electronic formed a joint venture with the Chinese TV manufacturer Tsinghua Tongfang (清華同方) to gain exposure to the vast Chinese television market. Chinese growth at the time was driven by a government program which aimed to have all sets capable of receiving digital broadcasts by 2010.
References
External links
Electronics companies of Taiwan
|
Glyphipterix forsterella is a moth of the family Glyphipterigidae. It is found from most of Europe (except most of the Balkan Peninsula, Portugal and Ukraine), east to Japan.
The wingspan is . The forewings are rather broad, dark bronzy - fuscous ; five white streaks from posterior half of costa, second becoming silvery-metallic and reaching beyond middle ; a broader slightly curved oblique white mark from middle of dorsum, reaching half across wing a short white mark before tornus ; two or three silvery-metallic dots about tornus; a black apical spot enclosing a silvery- metallic dot ; dark line of cilia indented below apex ; a dark hook above apex. Hindwings are grey.
Adults are on wing from May to June and feed on the flowers of the larval host plant. There is one generation per year.
The larvae feed on the seeds of Carex species, including Carex vulpina and Carex remota. The species overwinters in the larval stage within the spikes of the host plant.
Subspecies
Glyphipterix forsterella forsterella
Glyphipterix forsterella albimaculella von Heinemann, 1876 (Central Europe)
Glyphipterix forsterella nivicaput Diakonoff, 1979 (Japan: Honshu)
References
Moths described in 1781
Glyphipterigidae
Moths of Asia
Moths of Europe
|
```javascript
Drawing with HTML5 `Canvas` API
Notifications API
Vibration API
Geolocation
Drag and Drop API
```
|
```shell
Interactively unstage changes
Specify a commit by its ancestry
Specify a range of commits using double dot syntax
Remember the results of previous hunk conflicts
Debug using binary search
```
|
```html
<!DOCTYPE HTML>
<html lang="zh-CN">
<head>
<meta charset="UTF-8">
<title>Archives: 2024/2 | Here. There.</title>
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=3, minimum-scale=1">
<meta name="author" content="">
<meta name="description" content="html5, js, angularjs, jQuery, ">
<link rel="alternate" href="/atom.xml" title="Here. There." type="application/atom+xml">
<link rel="icon" href="/img/favicon.ico">
<link rel="apple-touch-icon" href="/img/pacman.jpg">
<link rel="apple-touch-icon-precomposed" href="/img/pacman.jpg">
<link rel="stylesheet" href="/css/style.css">
<script type="text/javascript">
var _hmt = _hmt || [];
(function() {
var hm = document.createElement("script");
hm.src = "//hm.baidu.com/hm.js?3d902de4a19cf2bf179534ffd2dd7b7f";
var s = document.getElementsByTagName("script")[0];
s.parentNode.insertBefore(hm, s);
})();
</script>
<meta name="generator" content="Hexo 6.3.0"></head>
<body>
<header>
<div>
<div id="imglogo">
<a href="/"><img src="/img/sun.png" alt="Here. There." title="Here. There."/></a>
</div>
<div id="textlogo">
<h1 class="site-name"><a href="/" title="Here. There.">Here. There.</a></h1>
<h2 class="blog-motto">Love ice cream. Love sunshine. Love life. Love the world. Love myself. Love you.</h2>
</div>
<div class="navbar"><a class="navbutton navmobile" href="#" title="">
</a></div>
<nav class="animated">
<ul>
<li><a href="/"></a></li>
<li><a target="_blank" rel="noopener" href="path_to_url"></a></li>
<li><a href="/archives"></a></li>
<li><a href="/categories"></a></li>
<li><a href="path_to_url"></a></li>
<li><a href="/about"></a></li>
</ul>
</nav>
</div>
</header>
<div id="container">
<div class="archive-title" >
<h2 class="archive-icon">2024/2</h2>
<div class="archiveslist archive-float clearfix">
<ul class="archive-list"><li class="archive-list-item"><a class="archive-list-link" href="/archives/2024/08/"> 2024</a><span class="archive-list-count">1</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2024/07/"> 2024</a><span class="archive-list-count">1</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2024/06/"> 2024</a><span class="archive-list-count">1</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2024/05/"> 2024</a><span class="archive-list-count">1</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2024/04/"> 2024</a><span class="archive-list-count">1</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2024/03/"> 2024</a><span class="archive-list-count">1</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2024/02/"> 2024</a><span class="archive-list-count">1</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2024/01/"> 2024</a><span class="archive-list-count">1</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2023/12/"> 2023</a><span class="archive-list-count">1</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2023/11/"> 2023</a><span class="archive-list-count">1</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2023/10/"> 2023</a><span class="archive-list-count">2</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2023/09/"> 2023</a><span class="archive-list-count">1</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2023/08/"> 2023</a><span class="archive-list-count">1</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2023/07/"> 2023</a><span class="archive-list-count">2</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2023/06/"> 2023</a><span class="archive-list-count">1</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2023/05/"> 2023</a><span class="archive-list-count">1</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2023/04/"> 2023</a><span class="archive-list-count">1</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2023/03/"> 2023</a><span class="archive-list-count">1</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2023/01/"> 2023</a><span class="archive-list-count">1</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2022/12/"> 2022</a><span class="archive-list-count">1</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2022/11/"> 2022</a><span class="archive-list-count">1</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2022/10/"> 2022</a><span class="archive-list-count">1</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2022/09/"> 2022</a><span class="archive-list-count">1</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2022/08/"> 2022</a><span class="archive-list-count">1</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2022/07/"> 2022</a><span class="archive-list-count">1</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2022/06/"> 2022</a><span class="archive-list-count">1</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2022/05/"> 2022</a><span class="archive-list-count">1</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2022/04/"> 2022</a><span class="archive-list-count">1</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2022/03/"> 2022</a><span class="archive-list-count">1</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2022/02/"> 2022</a><span class="archive-list-count">2</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2022/01/"> 2022</a><span class="archive-list-count">1</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2021/12/"> 2021</a><span class="archive-list-count">3</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2021/11/"> 2021</a><span class="archive-list-count">4</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2021/10/"> 2021</a><span class="archive-list-count">3</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2021/09/"> 2021</a><span class="archive-list-count">1</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2021/08/"> 2021</a><span class="archive-list-count">1</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2021/07/"> 2021</a><span class="archive-list-count">2</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2021/06/"> 2021</a><span class="archive-list-count">2</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2021/05/"> 2021</a><span class="archive-list-count">3</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2021/04/"> 2021</a><span class="archive-list-count">1</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2021/03/"> 2021</a><span class="archive-list-count">2</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2021/02/"> 2021</a><span class="archive-list-count">1</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2021/01/"> 2021</a><span class="archive-list-count">2</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2020/11/"> 2020</a><span class="archive-list-count">1</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2020/10/"> 2020</a><span class="archive-list-count">2</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2020/08/"> 2020</a><span class="archive-list-count">5</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2020/07/"> 2020</a><span class="archive-list-count">3</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2020/06/"> 2020</a><span class="archive-list-count">3</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2020/04/"> 2020</a><span class="archive-list-count">4</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2020/03/"> 2020</a><span class="archive-list-count">7</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2020/02/"> 2020</a><span class="archive-list-count">2</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2019/12/"> 2019</a><span class="archive-list-count">3</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2019/11/"> 2019</a><span class="archive-list-count">4</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2019/10/"> 2019</a><span class="archive-list-count">2</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2019/09/"> 2019</a><span class="archive-list-count">1</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2019/08/"> 2019</a><span class="archive-list-count">1</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2019/07/"> 2019</a><span class="archive-list-count">4</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2019/06/"> 2019</a><span class="archive-list-count">3</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2019/05/"> 2019</a><span class="archive-list-count">1</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2019/04/"> 2019</a><span class="archive-list-count">2</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2019/03/"> 2019</a><span class="archive-list-count">2</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2019/02/"> 2019</a><span class="archive-list-count">2</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2019/01/"> 2019</a><span class="archive-list-count">3</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2018/12/"> 2018</a><span class="archive-list-count">4</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2018/11/"> 2018</a><span class="archive-list-count">4</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2018/10/"> 2018</a><span class="archive-list-count">1</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2018/09/"> 2018</a><span class="archive-list-count">3</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2018/08/"> 2018</a><span class="archive-list-count">3</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2018/07/"> 2018</a><span class="archive-list-count">3</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2018/06/"> 2018</a><span class="archive-list-count">3</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2018/05/"> 2018</a><span class="archive-list-count">5</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2018/04/"> 2018</a><span class="archive-list-count">4</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2018/03/"> 2018</a><span class="archive-list-count">7</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2018/02/"> 2018</a><span class="archive-list-count">4</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2018/01/"> 2018</a><span class="archive-list-count">9</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2017/12/"> 2017</a><span class="archive-list-count">5</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2017/11/"> 2017</a><span class="archive-list-count">3</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2017/10/"> 2017</a><span class="archive-list-count">5</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2017/09/"> 2017</a><span class="archive-list-count">6</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2017/08/"> 2017</a><span class="archive-list-count">11</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2017/07/"> 2017</a><span class="archive-list-count">9</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2017/06/"> 2017</a><span class="archive-list-count">10</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2017/05/"> 2017</a><span class="archive-list-count">15</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2017/04/"> 2017</a><span class="archive-list-count">7</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2017/03/"> 2017</a><span class="archive-list-count">10</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2017/02/"> 2017</a><span class="archive-list-count">41</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2017/01/"> 2017</a><span class="archive-list-count">6</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2016/12/"> 2016</a><span class="archive-list-count">7</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2016/11/"> 2016</a><span class="archive-list-count">9</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2016/10/"> 2016</a><span class="archive-list-count">5</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2016/09/"> 2016</a><span class="archive-list-count">7</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2016/08/"> 2016</a><span class="archive-list-count">9</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2016/07/"> 2016</a><span class="archive-list-count">14</span></li><li class="archive-list-item"><a class="archive-list-link" href="/archives/2016/06/"> 2016</a><span class="archive-list-count">9</span></li></ul>
</div>
</div>
<div id="main" class="all-list-box clearfix"><!--class: archive-part-->
<div id="archive-page" class=""><!--class: all-list-box-->
<section class="post" itemscope itemprop="blogPost">
<a href="/2024/02/21/front-end-performance-about-performanceobserver/" title=" PerformanceObserver" itemprop="url">
<h1 itemprop="name"> PerformanceObserver</h1>
<p itemprop="description" >PerformanceObserver
API
PerformanceObserverPerformanceObse</p>
<time datetime="2024-02-21T14:12:23.000Z" itemprop="datePublished">2024-02-21</time>
</a>
</section>
</div>
</div>
</div>
<footer><div id="footer" >
<section class="info">
<p> ^_^ </p>
</section>
<p class="copyright">Powered by <a href="path_to_url" target="_blank" title="hexo">hexo</a> and Theme by <a href="path_to_url" target="_blank" title="Pacman">Pacman</a> 2024
<a href="path_to_url" target="_blank" title=""></a>
</p>
</div>
</footer>
<script src="/js/jquery-2.1.0.min.js"></script>
<script type="text/javascript">
$(document).ready(function(){
$('.navbar').click(function(){
$('header nav').toggleClass('shownav');
});
var myWidth = 0;
function getSize(){
if( typeof( window.innerWidth ) == 'number' ) {
myWidth = window.innerWidth;
} else if( document.documentElement && document.documentElement.clientWidth) {
myWidth = document.documentElement.clientWidth;
};
};
var m = $('#main'),
a = $('#asidepart'),
c = $('.closeaside'),
o = $('.openaside');
$(window).resize(function(){
getSize();
if (myWidth >= 1024) {
$('header nav').removeClass('shownav');
}else
{
m.removeClass('moveMain');
a.css('display', 'block').removeClass('fadeOut');
o.css('display', 'none');
}
});
c.click(function(){
a.addClass('fadeOut').css('display', 'none');
o.css('display', 'block').addClass('fadeIn');
m.addClass('moveMain');
});
o.click(function(){
o.css('display', 'none').removeClass('beforeFadeIn');
a.css('display', 'block').removeClass('fadeOut').addClass('fadeIn');
m.removeClass('moveMain');
});
$(window).scroll(function(){
o.css("top",Math.max(80,260-$(this).scrollTop()));
});
});
</script>
</body>
</html>
```
|
Wallington, is a toponymic surname derived from a common English place name. The name "Wallington" derives from the Anglo Saxon "Waletone" meaning "village of the Britons".
People with the surname
George Wallington, American musician
James Wallington, American boxer
Mark Wallington (writer), British author
Nehemiah Wallington, English Puritan
|
Gao Qi may refer to:
Northern Qi (550–577), also known as Gao Qi, short-lived Chinese dynasty
Gao Qi (Ming dynasty) (1336–1374), early Ming dynasty writer
Gao Qi (musician) (born 1968), Chinese rock musician
See also
Gaoqi (disambiguation)
|
```javascript
/*eslint-env node*/
'use strict';
process.on('unhandledRejection', up => {
throw up;
});
const fs = require('fs');
const path = require('path');
const liveEditor = require('@gfxfundamentals/live-editor');
const liveEditorPath = path.dirname(require.resolve('@gfxfundamentals/live-editor'));
module.exports = function(grunt) {
require('load-grunt-tasks')(grunt);
const s_ignoreRE = /\.(md|py|sh|enc)$/i;
function noMds(filename) {
return !s_ignoreRE.test(filename);
}
const s_isMdRE = /\.md$/i;
function mdsOnly(filename) {
return s_isMdRE.test(filename);
}
function notFolder(filename) {
return !fs.statSync(filename).isDirectory();
}
function noMdsNoFolders(filename) {
return noMds(filename) && notFolder(filename);
}
grunt.initConfig({
eslint: {
lib: {
src: [
'webgl/resources/webgl-utils.js',
'webgl/resources/lessons-helper.js',
'webgl/resources/flattened-primitives.js',
'webgl/resources/2d-math.js',
'webgl/resources/3d-math.js',
'build/js/*.js',
],
},
examples: {
src: [
'webgl/*.html',
'!webgl/webgl-qna-*.html',
// 'webgl/lessons/*.md',
],
},
diagram: {
src: [
'webgl/lessons/resources/webgl-state-diagram/*.js',
],
},
},
jsdoc: {
docs: {
src: [
'webgl/resources/2d-math.js',
'webgl/resources/3d-math.js',
'webgl/resources/webgl-utils.js',
'docs.md',
],
options: {
destination: 'out/docs',
configure: 'build/conf/jsdoc.conf.json',
template: './node_modules/minami',
},
},
},
copy: {
main: {
files: [
{ expand: false, src: '*', dest: 'out/', filter: noMdsNoFolders, },
{ expand: true, cwd: `${liveEditor.monacoEditor}/`, src: 'min/**', dest: 'out/monaco-editor/', nonull: true, },
{ expand: true, cwd: `${liveEditorPath}/src/`, src: '**', dest: 'out/webgl/resources/', nonull: true, },
{ expand: true, src: 'webgl/**', dest: 'out/', filter: noMds, },
{ expand: true, src: '3rdparty/**', dest: 'out/', },
],
},
},
clean: [
'out/**/*',
],
buildlesson: {
main: {
files: [],
},
},
watch: {
main: {
files: [
'webgl/**',
'3rdparty/**',
'node_modules/@gfxfundamentals/live-editor/src/**',
],
tasks: ['copy'],
options: {
spawn: false,
},
},
lessons: {
files: [
'webgl/lessons/**/webgl*.md',
],
tasks: ['buildlesson'],
options: {
spawn: false,
},
},
},
});
let changedFiles = {};
const onChange = grunt.util._.debounce(function() {
grunt.config('copy.main.files', Object.keys(changedFiles).filter(noMds).map((file) => {
const copy = {
src: file,
dest: 'out/',
};
if (file.indexOf('live-editor') >= 0) {
copy.cwd = `${path.dirname(file)}/`;
copy.src = path.basename(file);
copy.expand = true;
copy.dest = 'out/webgl/resources/';
}
return copy;
}));
grunt.config('buildlesson.main.files', Object.keys(changedFiles).filter(mdsOnly).map((file) => {
return {
src: file,
};
}));
changedFiles = {};
}, 200);
grunt.event.on('watch', function(action, filepath) {
changedFiles[filepath] = action;
onChange();
});
const buildSettings = {
outDir: 'out',
baseUrl: 'path_to_url
rootFolder: 'webgl',
lessonGrep: 'webgl*.md',
siteName: 'WebGL2Fundamentals',
siteThumbnail: 'webgl2fundamentals.jpg', // in rootFolder/lessons/resources
templatePath: 'build/templates',
owner: 'gfxfundamentals',
repo: 'webgl2-fundamentals',
thumbnailOptions: {
thumbnailBackground: 'webgl2fundamentals.jpg',
text: [
{
font: 'bold 100px lesson-font',
verticalSpacing: 100,
offset: [100, 120],
textAlign: 'left',
shadowOffset: [15, 15],
strokeWidth: 15,
textWrapWidth: 1000,
},
],
},
};
// just the hackiest way to get this working.
grunt.registerMultiTask('buildlesson', 'build a lesson', function() {
const filenames = new Set();
this.files.forEach((files) => {
files.src.forEach((filename) => {
filenames.add(filename);
});
});
const buildStuff = require('@gfxfundamentals/lesson-builder');
const settings = Object.assign({}, buildSettings, {
filenames,
});
const finish = this.async();
buildStuff(settings).finally(finish);
});
grunt.registerTask('buildlessons', function() {
const buildStuff = require('@gfxfundamentals/lesson-builder');
const finish = this.async();
buildStuff(buildSettings).finally(finish);
});
grunt.registerTask('build', ['clean', 'copy', 'buildlessons']);
grunt.registerTask('buildwatch', ['build', 'watch']);
grunt.registerTask('pre-push', ['eslint']);
grunt.registerTask('default', ['eslint', 'build', 'jsdoc']);
};
```
|
The Professional Footballers' Association Women's Young Player of the Year (commonly referred to as PFA Young Player of the Year) is an annual award given to the player who is voted to have been the best of the year in English women's football. The award has been presented since the 2013–14 season and the winner is chosen by a vote amongst the members of the players' trade union, the Professional Footballers' Association (PFA).
The current holder is Lauren James of Chelsea.
Winners
The women's award has been presented since 2014 while the men's PFA Players' Player of the Year has been awarded since 1974. The table below also indicates where the winning player also won one or more of the other major "player of the year" awards in English women's football, namely the PFA Women's Players' Player of the Year award.
Breakdown of winners
By country
By club
See also
List of sports awards honoring women
References
External links
English women's football trophies and awards
Awards established in 2014
2014 establishments in England
Women's association football trophies and awards
|
Countess Wear is a district within the city of Exeter, Devon, England. It lies about two miles south-east of the city centre, on the north bank of the estuary of the River Exe. Historically an estate known as Weare, part of the manor of Topsham, was in this area. From the late 13th century, the construction of weirs in the River Exe by the Countess, and later, the Earls of Devon damaged the prosperity of Exeter to the benefit of Topsham which was downstream of the obstructions, and was owned by the Earls.
The bridges over the river and the adjacent Exeter Ship Canal were for many years a traffic bottleneck, until the completion of the last section of the M5 motorway, further downstream, in 1977.
History
The manor of Topsham was granted by King Henry I to Richard de Redvers and became part of his feudal barony of Plympton. The estate, or sub-manor of Weare was part of this. The present manor house was built in Georgian style by Sir John Duckworth, 1st Baronet in about 1804. It is now the club house of Exeter Golf and Country Club.
The weir was commonly known as Countess Wear as early as the fourteenth century: it is named after a weir that Isabella de Fortibus, Countess of Devon is said to have erected in the river hereabouts in the late 13th century. The details of the weir's construction are uncertain: a source of 1290 states that the countess had it built in 1284 and thereby damaged the salmon fishing and prevented boats from reaching Exeter; but a later source claims that her weir was built before 1272, leaving a thirty-foot gap in the centre through which boats could pass, until it was blocked between 1307 and 1377 by her cousin Hugh de Courtenay, 9th Earl of Devon and his son, Hugh de Courtenay, 2nd/10th Earl of Devon. The blocking up in 1307 was recorded in Exeter City Council records, and noted as being as a result of a conflict between de Courtenay and then-Mayor of the city Roger Beynim over whether fish in the market should be given to the Earl or the cathedral.
The weirs built by the Earls of Devon across the river prevented ships reaching Exeter, thereby forcing merchants to land goods at their port of Topsham, which therefore prospered. Despite several petitions to the king by the people of Exeter, the weirs remained until 1538 when Henry Courtenay, 1st Marquess of Exeter was attainted which resulted in all his possessions reverting to the Crown. In 1540, an Act of Parliament was passed to remove the obstructions, but it was found to be impossible to restore the navigation, and work was soon started to build the Exeter Canal to bypass the blocked section of the river.
Countess Isabella also constructed a corn mill in the area in 1284. It was rebuilt as a paper mill in 1658, destroyed by fire in the early 19th century and restored and in continued operation until 1885. The remains of the building are still standing.
During World War II, Countess Wear was the location of a US Navy base, and the bridge over the canal was used for rehearsing a glider attack on the Pegasus and Horsa Bridges in Normandy by the Oxfordshire and Buckinghamshire Light Infantry. A plaque to commemorate this event was installed on the bridge in 1994. After the war, accommodation at the navy base was converted into temporary housing for people in Exeter whose homes had been damaged or destroyed.
Transport
Countess Wear lends its name to a nearby set of bridges across the River Exe and the Exeter Canal. The first bridge was built in 1774 and was originally tolled, charging 1 penny for foot traffic and 1 shilling for a coach and six horses. It originally had six arches, in 1842 the two central arches were replaced by a 60ft arch to allow boats to pass through.
In 1935, the bridges were reconstructed for motor traffic as part of a bypass around Exeter, which had become increasingly congested as a bottleneck for holiday traffic heading to southwest England. The river bridge was widened from to to allow two-way traffic with a pair of footpaths. This bridge was Grade II listed in 2004. The canal was crossed by a new electric swing bridge. The new bridges were opened by the Minister of Transport, Leslie Burgin on 22 February 1938 at a total cost of £230,000.
A bascule bridge was built alongside the swing bridge to increase traffic flow in 1972, and although this greatly improved traffic flow, the bridges were still inadequate for modern-day traffic requirements, leading to the parallel M5 motorway being constructed downstream. The replacement motorway was opened by the Prime Minister, James Callaghan in May 1977.
At the centre of the area is the Countess Wear roundabout, where the old Exeter Bypass meets the crossroads for the Topsham Road.
Education
There are three schools in the area:
Countess Wear Community School
Southbrook School
The West of England School for the Partially Sighted
Community buildings
There is one church in the area:
St Luke's Church (1837–38), Countess Wear Road.
The village hall was rebuilt in 2016. The previous hall was built in 1922 by local men returning from the war, on land given for that purpose by Lady Granger.
References
Citations
Sources
Pole, Sir William (died 1635), Collections Towards a Description of the County of Devon, Sir John-William de la Pole (ed.), London, 1791.
Risdon, Tristram (died 1640), Survey of Devon. With considerable additions. London, 1811.
Vivian, Lt.Col. J.L., (Ed.) The Visitations of the County of Devon: Comprising the Heralds' Visitations of 1531, 1564 & 1620. Exeter, 1895.
Areas of Exeter
|
```objective-c
// Compress/PPM/PPMDDecoder.h
#ifndef __COMPRESS_PPMD_DECODER_H
#define __COMPRESS_PPMD_DECODER_H
#include "../../../Common/MyCom.h"
#include "../../ICoder.h"
#include "../../Common/OutBuffer.h"
#include "../RangeCoder/RangeCoder.h"
#include "PPMDDecode.h"
namespace NCompress {
namespace NPPMD {
class CDecoder :
public ICompressCoder,
public ICompressSetDecoderProperties2,
#ifndef NO_READ_FROM_CODER
public ICompressSetInStream,
public ICompressSetOutStreamSize,
public ISequentialInStream,
#endif
public CMyUnknownImp
{
CRangeDecoder _rangeDecoder;
COutBuffer _outStream;
CDecodeInfo _info;
Byte _order;
UInt32 _usedMemorySize;
int _remainLen;
UInt64 _outSize;
bool _outSizeDefined;
UInt64 _processedSize;
HRESULT CodeSpec(UInt32 num, Byte *memStream);
public:
#ifndef NO_READ_FROM_CODER
MY_UNKNOWN_IMP4(
ICompressSetDecoderProperties2,
ICompressSetInStream,
ICompressSetOutStreamSize,
ISequentialInStream)
#else
MY_UNKNOWN_IMP1(
ICompressSetDecoderProperties2)
#endif
void ReleaseStreams()
{
ReleaseInStream();
_outStream.ReleaseStream();
}
HRESULT Flush() { return _outStream.Flush(); }
STDMETHOD(CodeReal)(ISequentialInStream *inStream,
ISequentialOutStream *outStream,
const UInt64 *inSize, const UInt64 *outSize,
ICompressProgressInfo *progress);
STDMETHOD(Code)(ISequentialInStream *inStream,
ISequentialOutStream *outStream, const UInt64 *inSize, const UInt64 *outSize,
ICompressProgressInfo *progress);
STDMETHOD(SetDecoderProperties2)(const Byte *data, UInt32 size);
STDMETHOD(SetInStream)(ISequentialInStream *inStream);
STDMETHOD(ReleaseInStream)();
STDMETHOD(SetOutStreamSize)(const UInt64 *outSize);
#ifndef NO_READ_FROM_CODER
STDMETHOD(Read)(void *data, UInt32 size, UInt32 *processedSize);
#endif
CDecoder(): _outSizeDefined(false) {}
};
}}
#endif
```
|
```xml
/*
* @license Apache-2.0
*
*
*
* path_to_url
*
* Unless required by applicable law or agreed to in writing, software
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*/
// TypeScript Version: 4.1
/**
* Maximum signed 16-bit integer.
*
* @example
* var max = INT16_MAX;
* // returns 32767
*/
declare const INT16_MAX: number;
// EXPORTS //
export = INT16_MAX;
```
|
```gradle
apply plugin: 'com.github.jamorham.android.replace.token.preprocessor'
replaceAndroidTokenPreprocessorSettings {
// Adjust package names
replace 'com.eveningoutpost.dexdrip.Services': "com.eveningoutpost.dexdrip.services"
replace 'com.eveningoutpost.dexdrip.UtilityModels': "com.eveningoutpost.dexdrip.utilitymodels"
replace 'com.eveningoutpost.dexdrip.Models': "com.eveningoutpost.dexdrip.models"
replace 'com.eveningoutpost.dexdrip.Tables': "com.eveningoutpost.dexdrip.tables"
replace 'com.eveningoutpost.dexdrip.G5Model': "com.eveningoutpost.dexdrip.g5model"
replace 'com.eveningoutpost.dexdrip.GlucoseMeter': "com.eveningoutpost.dexdrip.glucosemeter"
//replace 'com.eveningoutpost.dexdrip.localeTasker': "com.eveningoutpost.dexdrip.localetasker"
replace 'com.eveningoutpost.dexdrip.ImportedLibraries': "com.eveningoutpost.dexdrip.importedlibraries"
// For AndroidManifest shorthand format
replace '".UtilityModels.': '".utilitymodels.'
//replace '".localeTasker.': '".localetasker.' // existing tasker implementations use the package name
replace '".GlucoseMeter.': '".glucosemeter.'
replace '".Services.': '".services.'
replace '".Models.': '".models.'
replace '".Tables.': '".tables.'
replace '".ImportedLibraries.': '".importedlibraries.'
}
```
|
R v Dersch, [1993] 3 S.C.R. 768 is a leading Supreme Court of Canada decision on the right against unreasonable search and seizure under section 8 of the Canadian Charter of Rights and Freedoms. The Court held that sharing of personal information of patients, such as blood test results, between health care professionals or law enforcement violates section 8 of the Charter and should be excluded under section 24(2).
Background
In the evening of October 7, 1987, Wilfred Dersch was driving near Duncan, British Columbia. His car swerved into oncoming traffic and caused an accident, killing the other driver. When the police arrived Dersch refused the breathalyzer. The police noticed that his eyes were glazed and he appeared intoxicated. He was taken to the hospital where the doctor tried to give him an intravenous line which he resisted. Eventually he passed out and the intravenous was given and a blood sample was taken. The doctor tested the blood for alcohol content as it was necessary for treatment.
The police requested a blood sample from Dersch who refused. The police then asked for the medical report which the doctor gave them. It revealed that he was intoxicated at the time of treatment. Later, the police obtained a warrant to seize the blood sample.
At trial the judge found that there was no violation of section 8 as the blood sample was taken for medically necessary reasons. The decision was upheld in the British Columbia Court of Appeal.
The issues before the Supreme Court were:
whether the doctors and hospital were subject to the Charter.
whether the police's conduct violated section 8 and if so whether it was saved under section 1.
if a violation is found whether the evidence could be excluded under section 24(2).
The Court found that the doctor and hospital were not subject to the Charter. It was also found that the police's conduct was in violation of section 8, was not justified under section 1, and that the evidence should be excluded under section 24(2).
See also
List of Supreme Court of Canada cases (Lamer Court)
External links
case summary at mapleleafweb.com
Dersch
Dersch
Dersch
Dersch
|
```cmake
# Find the ROCTX library
#
# Input to the module:
# ROCTRACER_ROOT_DIR - The root directory containing ROCTracer
#
# Output of the module:
# ROCTX_INCLUDE_DIR - include directory for roctx.h
# ROC::ROCTX - Imported library target for the ROCTX library
# Extensions
if(UNIX)
set(LIB_PREFIX "lib")
set(LIB_EXT ".a")
set(DLL_EXT ".so")
if(APPLE)
set(DLL_EXT ".dylib")
endif()
set(LINK_PREFIX "-l")
set(LINK_SUFFIX "")
else()
set(LIB_PREFIX "")
set(LIB_EXT ".lib")
set(DLL_EXT "_dll.lib")
set(LINK_PREFIX "")
set(LINK_SUFFIX ".lib")
endif()
find_path(ROCTX_INCLUDE_DIR roctx.h
PATHS
/usr/include
/usr/local/include
/opt/rocm/include
${ROCTRACER_ROOT_DIR}/include
PATH_SUFFIXES
roctracer)
if(ROCTX_INCLUDE_DIR)
message(STATUS "Found roctx header ${ROCTX_INCLUDE_DIR}/roctx.h")
else()
message(FATAL_ERROR "Unable to find roctx.h")
endif()
find_library(roctx_lib
NAMES
${LIB_PREFIX}roctx64${DLL_EXT}
PATHS
/usr/lib
/opt/rocm/lib
${ROCTRACER_ROOT_DIR}/lib/)
if(roctx_lib)
message(STATUS "Found roctx shared library ${roctx_lib}")
add_library(ROC::ROCTX SHARED IMPORTED)
set_target_properties(ROC::ROCTX PROPERTIES
IMPORTED_LINK_INTERFACE_LANGUAGES "C"
IMPORTED_LOCATION ${roctx_lib})
else()
message(FATAL_ERROR "Unable to locate roctx library")
endif()
target_include_directories(ROC::ROCTX
INTERFACE
${ROCTX_INCLUDE_DIR})
```
|
Eddie Komboïgo is a politician from Burkina Faso who is serving as the President of Congress for Democracy and Progress which is the main opposition party in Burkina Faso. He was also the presidential candidate for 2020 Burkinabè general election in which he was in second number and got 15 percent vote.
References
Living people
Year of birth missing (living people)
|
"Temporarily Yours" is a single by American country music artist Jeanne Pruett. Released in February 1980, and was the third single from the album Encore!. The song reached #5 on the Billboard Hot Country Singles chart, becoming her biggest hit single on that chart since 1973's "Satin Sheets".
Charts
Weekly charts
Year-end charts
References
1980 singles
Jeanne Pruett songs
Songs written by Sonny Throckmorton
1980 songs
|
```xml
import onetime from 'onetime';
import features from '../feature-manager.js';
import {linkifiedURLClass, shortenLink} from '../github-helpers/dom-formatters.js';
import observe from '../helpers/selector-observer.js';
/* This feature is currently so broad that it's not de-inited via signal, it's just run once for all pageloads #5889 */
function init(): void {
observe(`.comment-body a[href]:not(.${linkifiedURLClass})`, shortenLink);
}
void features.add(import.meta.url, {
init: onetime(init),
});
/*
## Test URLs
path_to_url
path_to_url
*/
```
|
```javascript
import yaml from 'yaml';
export default {
hooks: {
parsers: {
'yaml-parser': {
// A custom parser will only run against filenames that match the pattern
// This pattern will match any file with the .yaml extension.
// This allows you to mix different types of files in your token source
pattern: /\.yaml$/,
// the parse function takes a single argument, which is an object with
// 2 attributes: contents which is a string of the file contents, and
// filePath which is the path of the file.
// The function is expected to return a plain object.
parser: ({ contents }) => yaml.parse(contents),
},
},
},
parsers: ['yaml-parser'],
source: [`tokens/**/*.yaml`],
platforms: {
css: {
transformGroup: 'css',
buildPath: 'build/',
files: [
{
destination: 'variables.css',
format: 'css/variables',
},
],
},
},
};
```
|
Gretchen Mary Rehberg is the ninth bishop of the Episcopal Diocese of Spokane.
Early life and education
Rehberg was born in Pullman, Washington on July 7, 1964. Her mother, Margaret Rehberg (nee: Boe) was a homemaker and her father, Wallace Rehberg, was a professor at Washington State University. The family had a small farm outside of town.
After graduating from Pullman High School in 1982, Rehberg attended Sewanee: The University of the South.
Rehberg's first doctorate was a Ph.D. in chemistry. She received her Master of Divinity degree from General Theological Seminary in 2002. She later received her Doctor of Ministry degree from Wesley Theological Seminary.
Career
Rehberg was a professor of organic chemistry at Bucknell University until the late 1990s, when she left to enter theological school. For over 20 years, Rehberg volunteered as an EMT and firefighter. She was in New York City on September 11, 2001 and provided first aid to survivors of the World Trade Center attack. Later, she volunteered with a hazmat team doing cleanup at Ground Zero.
Rehberg was rector of the Episcopal Church of the Nativity in Lewiston, Idaho for 11 years prior to her consecration as bishop. She had also held several diocesan leadership positions. On October 18, 2016, she was elected as the ninth bishop of the Episcopal Diocese of Spokane, and she was consecrated at the Episcopal Cathedral of St. John the Evangelist in Spokane on March 18, 2017.
Rehberg has pulmonary fibrosis, possibly caused by conditions she experienced in her work in New York after the World Trade Center attack. In 2022, she suffered a flare up that significantly reduced her lung capacity and as of May 2023, is awaiting a double-lung transplant while she continues to travel her diocese in her work as bishop.
References
External links
Gallery from ordination ceremony
Living people
Women Protestant religious leaders
People from Pullman, Washington
General Theological Seminary alumni
Wesley Theological Seminary alumni
Bucknell University faculty
21st-century American Episcopalians
Year of birth missing (living people)
Women Anglican bishops
Episcopal bishops of Spokane
|
```objective-c
// -*- mode: c++ -*-
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Original author: Ted Mielczarek <ted.mielczarek@gmail.com>
// elf_symbols_to_module.h: Exposes ELFSymbolsToModule, a function
// for reading ELF symbol tables and inserting exported symbol names
// into a google_breakpad::Module as Extern definitions.
#ifndef BREAKPAD_COMMON_LINUX_ELF_SYMBOLS_TO_MODULE_H_
#define BREAKPAD_COMMON_LINUX_ELF_SYMBOLS_TO_MODULE_H_
#include <stddef.h>
#include <stdint.h>
namespace google_breakpad {
class Module;
bool ELFSymbolsToModule(const uint8_t *symtab_section,
size_t symtab_size,
const uint8_t *string_section,
size_t string_size,
const bool big_endian,
size_t value_size,
Module *module);
} // namespace google_breakpad
#endif // BREAKPAD_COMMON_LINUX_ELF_SYMBOLS_TO_MODULE_H_
```
|
```java
package com.airbnb.epoxy;
import android.view.View;
import com.airbnb.epoxy.EpoxyModel.SpanSizeOverrideCallback;
import com.airbnb.epoxy.integrationtest.BuildConfig;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.RobolectricTestRunner;
import org.robolectric.annotation.Config;
import static org.junit.Assert.assertEquals;
@RunWith(RobolectricTestRunner.class)
public class EpoxyModelIntegrationTest {
static class ModelWithSpanCount extends EpoxyModel<View> {
@Override
protected int getDefaultLayout() {
return 0;
}
@Override
public int getSpanSize(int totalSpanCount, int position, int itemCount) {
return 6;
}
}
@Test
public void modelReturnsSpanCount() {
ModelWithSpanCount model = new ModelWithSpanCount();
assertEquals(6, model.spanSize(0, 0, 0));
}
static class ModelWithSpanCountCallback extends EpoxyModel<View> {
@Override
protected int getDefaultLayout() {
return 0;
}
}
@Test
public void modelReturnsSpanCountFromCallback() {
ModelWithSpanCountCallback model = new ModelWithSpanCountCallback();
model.spanSizeOverride(new SpanSizeOverrideCallback() {
@Override
public int getSpanSize(int totalSpanCount, int position, int itemCount) {
return 7;
}
});
assertEquals(7, model.spanSize(0, 0, 0));
}
}
```
|
```python
# refined api Python2
import tensorflow as tf
from sklearn.metrics import confusion_matrix
import numpy as np
class Network():
def __init__(self, train_batch_size, test_batch_size, pooling_scale,
dropout_rate, base_learning_rate, decay_rate,
optimizeMethod='adam', save_path='model/default.ckpt'):
"""
@num_hidden:
@batch_size
"""
self.optimizeMethod = optimizeMethod
self.dropout_rate = dropout_rate
self.base_learning_rate = base_learning_rate
self.decay_rate = decay_rate
self.train_batch_size = train_batch_size
self.test_batch_size = test_batch_size
# Hyper Parameters
self.conv_config = [] # list of dict
self.fc_config = [] # list of dict
self.conv_weights = []
self.conv_biases = []
self.fc_weights = []
self.fc_biases = []
self.pooling_scale = pooling_scale
self.pooling_stride = pooling_scale
# Graph Related
self.tf_train_samples = None
self.tf_train_labels = None
self.tf_test_samples = None
self.tf_test_labels = None
#
self.writer = None
self.merged = None
self.train_summaries = []
self.test_summaries = []
# save
self.saver = None
self.save_path = save_path
def add_conv(self, *, patch_size, in_depth, out_depth, activation='relu', pooling=False, name):
"""
This function does not define operations in the graph, but only store config in self.conv_layer_config
"""
self.conv_config.append({
'patch_size': patch_size,
'in_depth': in_depth,
'out_depth': out_depth,
'activation': activation,
'pooling': pooling,
'name': name
})
with tf.name_scope(name):
weights = tf.Variable(
tf.truncated_normal([patch_size, patch_size, in_depth, out_depth], stddev=0.1), name=name + '_weights')
biases = tf.Variable(tf.constant(0.1, shape=[out_depth]), name=name + '_biases')
self.conv_weights.append(weights)
self.conv_biases.append(biases)
def add_fc(self, *, in_num_nodes, out_num_nodes, activation='relu', name):
"""
add fc layer config to slef.fc_layer_config
"""
self.fc_config.append({
'in_num_nodes': in_num_nodes,
'out_num_nodes': out_num_nodes,
'activation': activation,
'name': name
})
with tf.name_scope(name):
weights = tf.Variable(tf.truncated_normal([in_num_nodes, out_num_nodes], stddev=0.1))
biases = tf.Variable(tf.constant(0.1, shape=[out_num_nodes]))
self.fc_weights.append(weights)
self.fc_biases.append(biases)
self.train_summaries.append(tf.summary.histogram(str(len(self.fc_weights)) + '_weights', weights))
self.train_summaries.append(tf.summary.histogram(str(len(self.fc_biases)) + '_biases', biases))
def apply_regularization(self, _lambda):
# L2 regularization for the fully connected parameters
regularization = 0.0
for weights, biases in zip(self.fc_weights, self.fc_biases):
regularization += tf.nn.l2_loss(weights) + tf.nn.l2_loss(biases)
# 1e5
return _lambda * regularization
# should make the definition as an exposed API, instead of implemented in the function
def define_inputs(self, *, train_samples_shape, train_labels_shape, test_samples_shape):
#
with tf.name_scope('inputs'):
self.tf_train_samples = tf.placeholder(tf.float32, shape=train_samples_shape, name='tf_train_samples')
self.tf_train_labels = tf.placeholder(tf.float32, shape=train_labels_shape, name='tf_train_labels')
self.tf_test_samples = tf.placeholder(tf.float32, shape=test_samples_shape, name='tf_test_samples')
def define_model(self):
"""
"""
def model(data_flow, train=True):
"""
@data: original inputs
@return: logits
"""
# Define Convolutional Layers
for i, (weights, biases, config) in enumerate(zip(self.conv_weights, self.conv_biases, self.conv_config)):
with tf.name_scope(config['name'] + '_model'):
with tf.name_scope('convolution'):
# default 1,1,1,1 stride and SAME padding
data_flow = tf.nn.conv2d(data_flow, filter=weights, strides=[1, 1, 1, 1], padding='SAME')
data_flow = data_flow + biases
if not train:
self.visualize_filter_map(data_flow, how_many=config['out_depth'],
display_size=32 // (i // 2 + 1), name=config['name'] + '_conv')
if config['activation'] == 'relu':
data_flow = tf.nn.relu(data_flow)
if not train:
self.visualize_filter_map(data_flow, how_many=config['out_depth'],
display_size=32 // (i // 2 + 1), name=config['name'] + '_relu')
else:
raise Exception('Activation Func can only be Relu right now. You passed', config['activation'])
if config['pooling']:
data_flow = tf.nn.max_pool(
data_flow,
ksize=[1, self.pooling_scale, self.pooling_scale, 1],
strides=[1, self.pooling_stride, self.pooling_stride, 1],
padding='SAME')
if not train:
self.visualize_filter_map(data_flow, how_many=config['out_depth'],
display_size=32 // (i // 2 + 1) // 2,
name=config['name'] + '_pooling')
# Define Fully Connected Layers
for i, (weights, biases, config) in enumerate(zip(self.fc_weights, self.fc_biases, self.fc_config)):
if i == 0:
shape = data_flow.get_shape().as_list()
data_flow = tf.reshape(data_flow, [shape[0], shape[1] * shape[2] * shape[3]])
with tf.name_scope(config['name'] + 'model'):
### Dropout
if train and i == len(self.fc_weights) - 1:
data_flow = tf.nn.dropout(data_flow, self.dropout_rate, seed=4926)
###
data_flow = tf.matmul(data_flow, weights) + biases
if config['activation'] == 'relu':
data_flow = tf.nn.relu(data_flow)
elif config['activation'] is None:
pass
else:
raise Exception('Activation Func can only be Relu or None right now. You passed',
config['activation'])
return data_flow
# Training computation.
logits = model(self.tf_train_samples)
with tf.name_scope('loss'):
self.loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=logits, labels=self.tf_train_labels))
self.loss += self.apply_regularization(_lambda=5e-4)
self.train_summaries.append(tf.summary.scalar('Loss', self.loss))
# learning rate decay
global_step = tf.Variable(0)
learning_rate = tf.train.exponential_decay(
learning_rate=self.base_learning_rate,
global_step=global_step * self.train_batch_size,
decay_steps=100,
decay_rate=self.decay_rate,
staircase=True
)
# Optimizer.
with tf.name_scope('optimizer'):
if (self.optimizeMethod == 'gradient'):
self.optimizer = tf.train \
.GradientDescentOptimizer(learning_rate) \
.minimize(self.loss)
elif (self.optimizeMethod == 'momentum'):
self.optimizer = tf.train \
.MomentumOptimizer(learning_rate, 0.5) \
.minimize(self.loss)
elif (self.optimizeMethod == 'adam'):
self.optimizer = tf.train \
.AdamOptimizer(learning_rate) \
.minimize(self.loss)
# Predictions for the training, validation, and test data.
with tf.name_scope('train'):
self.train_prediction = tf.nn.softmax(logits, name='train_prediction')
tf.add_to_collection("prediction", self.train_prediction)
with tf.name_scope('test'):
self.test_prediction = tf.nn.softmax(model(self.tf_test_samples, train=False), name='test_prediction')
tf.add_to_collection("prediction", self.test_prediction)
single_shape = (1, 32, 32, 1)
single_input = tf.placeholder(tf.float32, shape=single_shape, name='single_input')
self.single_prediction = tf.nn.softmax(model(single_input, train=False), name='single_prediction')
tf.add_to_collection("prediction", self.single_prediction)
self.merged_train_summary = tf.summary.merge(self.train_summaries)
self.merged_test_summary = tf.summary.merge(self.test_summaries)
# Graph
self.saver = tf.train.Saver(tf.all_variables())
def run(self, train_samples, train_labels, test_samples, test_labels, *, train_data_iterator, iteration_steps,
test_data_iterator):
"""
Session
:data_iterator: a function that yields chuck of data
"""
self.writer = tf.summary.FileWriter('./board', tf.get_default_graph())
with tf.Session(graph=tf.get_default_graph()) as session:
tf.initialize_all_variables().run()
###
print('Start Training')
# batch 1000
for i, samples, labels in train_data_iterator(train_samples, train_labels, iteration_steps=iteration_steps,
chunkSize=self.train_batch_size):
_, l, predictions, summary = session.run(
[self.optimizer, self.loss, self.train_prediction, self.merged_train_summary],
feed_dict={self.tf_train_samples: samples, self.tf_train_labels: labels}
)
self.writer.add_summary(summary, i)
# labels is True Labels
accuracy, _ = self.accuracy(predictions, labels)
if i % 50 == 0:
print('Minibatch loss at step %d: %f' % (i, l))
print('Minibatch accuracy: %.1f%%' % accuracy)
###
###
accuracies = []
confusionMatrices = []
for i, samples, labels in test_data_iterator(test_samples, test_labels, chunkSize=self.test_batch_size):
result, summary = session.run(
[self.test_prediction, self.merged_test_summary],
feed_dict={self.tf_test_samples: samples}
)
self.writer.add_summary(summary, i)
accuracy, cm = self.accuracy(result, labels, need_confusion_matrix=True)
accuracies.append(accuracy)
confusionMatrices.append(cm)
print('Test Accuracy: %.1f%%' % accuracy)
print(' Average Accuracy:', np.average(accuracies))
print('Standard Deviation:', np.std(accuracies))
self.print_confusion_matrix(np.add.reduce(confusionMatrices))
###
def train(self, train_samples, train_labels, *, data_iterator, iteration_steps):
self.writer = tf.summary.FileWriter('./board', tf.get_default_graph())
with tf.Session(graph=tf.get_default_graph()) as session:
tf.initialize_all_variables().run()
###
print('Start Training')
# batch 1000
for i, samples, labels in data_iterator(train_samples, train_labels, iteration_steps=iteration_steps,
chunkSize=self.train_batch_size):
_, l, predictions, summary = session.run(
[self.optimizer, self.loss, self.train_prediction, self.merged_train_summary],
feed_dict={self.tf_train_samples: samples, self.tf_train_labels: labels}
)
self.writer.add_summary(summary, i)
# labels is True Labels
accuracy, _ = self.accuracy(predictions, labels)
if i % 50 == 0:
print('Minibatch loss at step %d: %f' % (i, l))
print('Minibatch accuracy: %.1f%%' % accuracy)
###
#
import os
if os.path.isdir(self.save_path.split('/')[0]):
save_path = self.saver.save(session, self.save_path)
print("Model saved in file: %s" % save_path)
else:
os.makedirs(self.save_path.split('/')[0])
save_path = self.saver.save(session, self.save_path)
print("Model saved in file: %s" % save_path)
def test(self, test_samples, test_labels, *, data_iterator):
if self.saver is None:
self.define_model()
if self.writer is None:
self.writer = tf.summary.FileWriter('./board', tf.get_default_graph())
print('Before session')
with tf.Session(graph=tf.get_default_graph()) as session:
self.saver.restore(session, self.save_path)
###
accuracies = []
confusionMatrices = []
for i, samples, labels in data_iterator(test_samples, test_labels, chunkSize=self.test_batch_size):
result = session.run(
self.test_prediction,
feed_dict={self.tf_test_samples: samples}
)
# self.writer.add_summary(summary, i)
accuracy, cm = self.accuracy(result, labels, need_confusion_matrix=True)
accuracies.append(accuracy)
confusionMatrices.append(cm)
print('Test Accuracy: %.1f%%' % accuracy)
print(' Average Accuracy:', np.average(accuracies))
print('Standard Deviation:', np.std(accuracies))
self.print_confusion_matrix(np.add.reduce(confusionMatrices))
###
def accuracy(self, predictions, labels, need_confusion_matrix=False):
"""
@return: accuracy and confusionMatrix as a tuple
"""
_predictions = np.argmax(predictions, 1)
_labels = np.argmax(labels, 1)
cm = confusion_matrix(_labels, _predictions) if need_confusion_matrix else None
# == is overloaded for numpy array
accuracy = (100.0 * np.sum(_predictions == _labels) / predictions.shape[0])
return accuracy, cm
def visualize_filter_map(self, tensor, *, how_many, display_size, name):
# print(tensor.get_shape)
filter_map = tensor[-1]
# print(filter_map.get_shape())
filter_map = tf.transpose(filter_map, perm=[2, 0, 1])
# print(filter_map.get_shape())
filter_map = tf.reshape(filter_map, (how_many, display_size, display_size, 1))
# print(how_many)
self.test_summaries.append(tf.summary.image(name, tensor=filter_map, max_outputs=how_many))
def print_confusion_matrix(self, confusionMatrix):
print('Confusion Matrix:')
for i, line in enumerate(confusionMatrix):
print(line, line[i] / np.sum(line))
a = 0
for i, column in enumerate(np.transpose(confusionMatrix, (1, 0))):
a += (column[i] / np.sum(column)) * (np.sum(column) / 26000)
print(column[i] / np.sum(column), )
print('\n', np.sum(confusionMatrix), a)
```
|
Spiranthes casei, or Case's lady's tresses, is a species of orchid native to the northeastern United States and Canada.
Description
Spiranthes casei plants are 7–44 cm tall. They have both basal and stem leaves and the basal leaves can still be present when flowering in August and September. As with all Spiranthes flowers are arranged in a spiral around the stem, and each flower has 3 petals and 3 sepals which together give it a tube-like shape. The petals and sepals have an ivory to yellowish white or greenish cream color.
Spiranthes casei is very closely related to and looks similar to Spiranthes ochroleuca but has smaller flowers, the dorsal (top) sepal and tips of the side petals are not recurved, and they have a comparatively reduced labellum.
Distribution and habitat
Spiranthes casei has been found in Maine, Michigan, Minnesota, New Hampshire, New York, Pennsylvania, Vermont and Wisconsin in the US and in Nova Scotia, Ontario and Quebec in Canada. It grows in forest, shrubland and grassland, mostly restricted to the lichen and bracken barrens of the Great Lakes Basin and the Canadian maritime provinces.
Taxonomy
Spiranthes casei was first described by P. M. Catling & J. E. Cruise in 1975. They named the new species after orchid researcher Frederick W. Case II. Before Catling & Cruise described it as a new species specimens of Spiranthes casei were often labeled as a hybrid between Spiranthes cernua and Spiranthes lacera or as a northern version of Spiranthes vernalis. The oldest herbarium specimen they could find was collected in Ontario in 1904.
References
casei
Orchids of the United States
|
The Swan Coastal Plain Shrublands and Woodlands is a sclerophyll-woodland vegetation community that stretch from Kalbarri in the north to Busselton in the south, passing through Perth, in Western Australia. Situated on the Swan Coastal Plain, it is listed as Endangered under the Commonwealth Environment Protection and Biodiversity Conservation Act 1999. The woodlands also incorporate Perth–Gingin Shrublands and Woodlands.
Geography
The community sits on poorly drained plains with greyish sandy benches and disjunctive swamps, in addition to areas that lie on bog iron ore, marl or solonetz soils, and as well as heavy clay soils.The Perth–Gingin Shrublands and Woodlands are located on inundated ironstone and heavy clay soils. The community occurs at parts near Kalbarri, City of Swan, City of Gosnells, Eneabba, the Perth area, at Gingin and Busselton and in the Scott River area, even though 97% of it is cleared.
Ecology
Native species include shrubs and small trees such as Melaleuca viminea, Grevillea curviloba, Kunzea recurva, Grevillea evanescens, Dryandra sessilis, Acacia saligna, Grevillea curviloba, Jacksonia furcellata, with Rhodanthe spp, being in the understorey, and the herbs Rhodanthe manglesii and Tribonanthes australis.
Other species in the community include Banksia attenuata, Banksia menziesii, sometimes with Allocasuarina fraseriana with a shrub layer that may include Adenanthos cygnorum, Hibbertia huegelii, Scaevola repens, Allocasuarina humilis, Bossiaea eriocarpa, Hibbertia hypericoides and Stirlingia latifolia. Herbs and forbs include Conostylis aurea, Burchardia congesta and Patersonia occidentalis.
References
Endangered ecological communities
Remnant urban bushland
Vegetation of Australia
Ecoregions of Western Australia
Temperate grasslands, savannas, and shrublands
Sclerophyll forests
Mediterranean forests, woodlands, and scrub
|
```smalltalk
/****************************************************************************
*
* path_to_url
* path_to_url
* path_to_url
****************************************************************************/
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Text;
using UnityEngine;
using UnityEngine.Events;
using UnityEngine.UI;
namespace QFramework
{
///
/// </summary>
public class AssemblyUtil
{
/// <summary>
/// Assembly-CSharp
/// </summary>
[Obsolete("Do not used", APIVersion.Force)]
public static Assembly DefaultCSharpAssembly
{
get
{
return AppDomain.CurrentDomain.GetAssemblies()
.SingleOrDefault(a => a.GetName().Name == "Assembly-CSharp");
}
}
/// <summary>
///
/// </summary>
/// <param name="typeName"></param>
/// <returns></returns>
[Obsolete("Do not used", APIVersion.Force)]
public static Type GetDefaultAssemblyType(string typeName)
{
return DefaultCSharpAssembly.GetType(typeName);
}
}
/// <summary>
///
/// </summary>
public static class ProbilityHelper
{
[Obsolete(" action?.Invoke(),please use action?.Invoke() instead", APIVersion.Force)]
public static bool InvokeGracefully(this UnityAction selfAction)
{
if (null != selfAction)
{
selfAction();
return true;
}
return false;
}
[Obsolete(" action?.Invoke(),please use action?.Invoke() instead", APIVersion.Force)]
public static bool InvokeGracefully<T>(this UnityAction<T> selfAction, T t)
{
if (null != selfAction)
{
selfAction(t);
return true;
}
return false;
}
[Obsolete(" action?.Invoke(),please use action?.Invoke() instead", APIVersion.Force)]
public static bool InvokeGracefully<T, K>(this UnityAction<T, K> selfAction, T t, K k)
{
if (null != selfAction)
{
selfAction(t, k);
return true;
}
return false;
}
[Obsolete("Do not used", APIVersion.Force)]
public static T RandomValueFrom<T>(params T[] values)
{
return values[UnityEngine.Random.Range(0, values.Length)];
}
/// <summary>
/// percent probability
/// </summary>
/// <param name="percent"> 0 ~ 100 </param>
/// <returns></returns>
[Obsolete("Do not used", APIVersion.Force)]
public static bool PercentProbability(int percent)
{
return UnityEngine.Random.Range(0, 1000) * 0.001f < 50 * 0.01f;
}
}
public static class ReflectionExtension
{
public static Assembly GetAssemblyCSharp()
{
var assemblies = AppDomain.CurrentDomain.GetAssemblies();
foreach (var a in assemblies)
{
if (a.FullName.StartsWith("Assembly-CSharp,"))
{
return a;
}
}
// Log.E(">>>>>>>Error: Can\'t find Assembly-CSharp.dll");
return null;
}
public static Assembly GetAssemblyCSharpEditor()
{
var assemblies = AppDomain.CurrentDomain.GetAssemblies();
foreach (var a in assemblies)
{
if (a.FullName.StartsWith("Assembly-CSharp-Editor,"))
{
return a;
}
}
// Log.E(">>>>>>>Error: Can\'t find Assembly-CSharp-Editor.dll");
return null;
}
}
/// <summary>
/// Write in unity 2017 .Net 3.5
/// after unity 2018 .Net 4.x and new C# version are more powerful
/// </summary>
public static class DeprecatedExtension
{
[Obsolete("Do not used", APIVersion.Force)]
public static int GetRandomWithPower(this List<int> powers)
{
var sum = 0;
foreach (var power in powers)
{
sum += power;
}
var randomNum = UnityEngine.Random.Range(0, sum);
var currentSum = 0;
for (var i = 0; i < powers.Count; i++)
{
var nextSum = currentSum + powers[i];
if (randomNum >= currentSum && randomNum <= nextSum)
{
return i;
}
currentSum = nextSum;
}
LogKit.E("");
return -1;
}
[Obsolete("Do not used", APIVersion.Force)]
public static T GetRandomWithPower<T>(this Dictionary<T, int> powersDict)
{
var keys = new List<T>();
var values = new List<int>();
foreach (var key in powersDict.Keys)
{
keys.Add(key);
values.Add(powersDict[key]);
}
var finalKeyIndex = values.GetRandomWithPower();
return keys[finalKeyIndex];
}
[Obsolete("Do not used", APIVersion.Force)]
public static void AddAnimatorParameterIfExists(this Animator animator, string parameterName,
AnimatorControllerParameterType type, List<string> parameterList)
{
if (animator.HasParameterOfType(parameterName, type))
{
parameterList.Add(parameterName);
}
}
[Obsolete("Do not used", APIVersion.Force)]
public static void UpdateAnimatorBool(this Animator self, string parameterName, bool value,
List<string> parameterList)
{
if (parameterList.Contains(parameterName))
{
self.SetBool(parameterName, value);
}
}
[Obsolete("Do not used", APIVersion.Force)]
public static void UpdateAnimatorTrigger(this Animator self, string parameterName, List<string> parameterList)
{
if (parameterList.Contains(parameterName))
{
self.SetTrigger(parameterName);
}
}
[Obsolete("Do not used", APIVersion.Force)]
public static void SetAnimatorTrigger(this Animator self, string parameterName, List<string> parameterList)
{
if (parameterList.Contains(parameterName))
{
self.SetTrigger(parameterName);
}
}
[Obsolete("Do not used", APIVersion.Force)]
public static void UpdateAnimatorFloat(this Animator self, string parameterName, float value,
List<string> parameterList)
{
if (parameterList.Contains(parameterName))
{
self.SetFloat(parameterName, value);
}
}
[Obsolete("Do not used", APIVersion.Force)]
public static void UpdateAnimatorInteger(this Animator self, string parameterName, int value,
List<string> parameterList)
{
if (parameterList.Contains(parameterName))
{
self.SetInteger(parameterName, value);
}
}
[Obsolete("Do not used", APIVersion.Force)]
public static void UpdateAnimatorBool(this Animator self, string parameterName, bool value)
{
self.SetBool(parameterName, value);
}
[Obsolete("Do not used", APIVersion.Force)]
public static void UpdateAnimatorTrigger(this Animator self, string parameterName)
{
self.SetTrigger(parameterName);
}
[Obsolete("Do not used", APIVersion.Force)]
public static void SetAnimatorTrigger(this Animator self, string parameterName)
{
self.SetTrigger(parameterName);
}
[Obsolete("Do not used", APIVersion.Force)]
public static void UpdateAnimatorFloat(this Animator self, string parameterName, float value)
{
self.SetFloat(parameterName, value);
}
[Obsolete("Do not used", APIVersion.Force)]
public static void UpdateAnimatorInteger(this Animator self, string parameterName, int value)
{
self.SetInteger(parameterName, value);
}
[Obsolete("Do not used", APIVersion.Force)]
public static void UpdateAnimatorBoolIfExists(this Animator self, string parameterName, bool value)
{
if (self.HasParameterOfType(parameterName, AnimatorControllerParameterType.Bool))
{
self.SetBool(parameterName, value);
}
}
[Obsolete("Do not used", APIVersion.Force)]
public static void UpdateAnimatorTriggerIfExists(this Animator self, string parameterName)
{
if (self.HasParameterOfType(parameterName, AnimatorControllerParameterType.Trigger))
{
self.SetTrigger(parameterName);
}
}
[Obsolete("Do not used", APIVersion.Force)]
public static void SetAnimatorTriggerIfExists(this Animator self, string parameterName)
{
if (self.HasParameterOfType(parameterName, AnimatorControllerParameterType.Trigger))
{
self.SetTrigger(parameterName);
}
}
[Obsolete("Do not used", APIVersion.Force)]
public static void UpdateAnimatorFloatIfExists(this Animator self, string parameterName, float value)
{
if (self.HasParameterOfType(parameterName, AnimatorControllerParameterType.Float))
{
self.SetFloat(parameterName, value);
}
}
[Obsolete("Do not used", APIVersion.Force)]
public static void UpdateAnimatorIntegerIfExists(this Animator self, string parameterName, int value)
{
if (self.HasParameterOfType(parameterName, AnimatorControllerParameterType.Int))
{
self.SetInteger(parameterName, value);
}
}
[Obsolete("Do not used", APIVersion.Force)]
public static bool HasParameterOfType(this Animator self, string name, AnimatorControllerParameterType type)
{
if (string.IsNullOrEmpty(name))
{
return false;
}
var parameters = self.parameters;
return parameters.Any(currParam => currParam.type == type && currParam.name == name);
}
[Obsolete("Do not used", APIVersion.Force)]
public static T EnableInteract<T>(this T selfSelectable) where T : Selectable
{
selfSelectable.interactable = true;
return selfSelectable;
}
[Obsolete("Do not used", APIVersion.Force)]
public static T DisableInteract<T>(this T selfSelectable) where T : Selectable
{
selfSelectable.interactable = false;
return selfSelectable;
}
[Obsolete("Do not used", APIVersion.Force)]
public static T CancelAllTransitions<T>(this T selfSelectable) where T : Selectable
{
selfSelectable.transition = Selectable.Transition.None;
return selfSelectable;
}
[Obsolete("Do not used", APIVersion.Force)]
public static void RegOnValueChangedEvent(this Toggle selfToggle, UnityAction<bool> onValueChangedEvent)
{
selfToggle.onValueChanged.AddListener(onValueChangedEvent);
}
[Obsolete("Do not used", APIVersion.Force)]
public static Vector2 GetPosInRootTrans(this RectTransform selfRectTransform, Transform rootTrans)
{
return RectTransformUtility.CalculateRelativeRectTransformBounds(rootTrans, selfRectTransform).center;
}
[Obsolete("Do not used", APIVersion.Force)]
public static RectTransform AnchorPosX(this RectTransform selfRectTrans, float anchorPosX)
{
var anchorPos = selfRectTrans.anchoredPosition;
anchorPos.x = anchorPosX;
selfRectTrans.anchoredPosition = anchorPos;
return selfRectTrans;
}
[Obsolete("Do not used", APIVersion.Force)]
public static RectTransform AnchorPosY(this RectTransform selfRectTrans, float anchorPosY)
{
var anchorPos = selfRectTrans.anchoredPosition;
anchorPos.y = anchorPosY;
selfRectTrans.anchoredPosition = anchorPos;
return selfRectTrans;
}
[Obsolete("Do not used", APIVersion.Force)]
public static RectTransform SetSizeWidth(this RectTransform selfRectTrans, float sizeWidth)
{
var sizeDelta = selfRectTrans.sizeDelta;
sizeDelta.x = sizeWidth;
selfRectTrans.sizeDelta = sizeDelta;
return selfRectTrans;
}
[Obsolete("Do not used", APIVersion.Force)]
public static RectTransform SetSizeHeight(this RectTransform selfRectTrans, float sizeHeight)
{
var sizeDelta = selfRectTrans.sizeDelta;
sizeDelta.y = sizeHeight;
selfRectTrans.sizeDelta = sizeDelta;
return selfRectTrans;
}
[Obsolete("Do not used", APIVersion.Force)]
public static Vector2 GetWorldSize(this RectTransform selfRectTrans)
{
return RectTransformUtility.CalculateRelativeRectTransformBounds(selfRectTrans).size;
}
[Obsolete("Do not used", APIVersion.Force)]
public static void SetAmbientLightHTMLStringColor(string htmlStringColor)
{
RenderSettings.ambientLight = htmlStringColor.HtmlStringToColor();
}
/// <summary>
/// : path_to_url
/// </summary>
/// <param name="self"></param>
[Obsolete("Do not used", APIVersion.Force)]
public static void SetStandardMaterialToTransparentMode(this Material self)
{
self.SetFloat("_Mode", 3);
self.SetInt("_SrcBlend", (int)UnityEngine.Rendering.BlendMode.SrcAlpha);
self.SetInt("_DstBlend", (int)UnityEngine.Rendering.BlendMode.OneMinusSrcAlpha);
self.SetInt("_ZWrite", 0);
self.DisableKeyword("_ALPHATEST_ON");
self.EnableKeyword("_ALPHABLEND_ON");
self.DisableKeyword("_ALPHAPREMULTIPLY_ON");
self.renderQueue = 3000;
}
[Obsolete(" gameObj.IsInLayerMask(layerMask)use gameObj.IsInLayerMask(layerMask) instead", true)]
public static bool ContainsGameObject(this LayerMask selfLayerMask, GameObject gameObject)
{
return gameObject.IsInLayerMask(selfLayerMask);
}
[Obsolete("Do not used", APIVersion.Force)]
public static Sprite CreateSprite(this Texture2D self)
{
return Sprite.Create(self, new Rect(0, 0, self.width, self.height), Vector2.one * 0.5f);
}
[Obsolete("Do not used", APIVersion.Force)]
public static Transform SeekTrans(this Transform selfTransform, string uniqueName)
{
var childTrans = selfTransform.Find(uniqueName);
if (null != childTrans)
return childTrans;
foreach (Transform trans in selfTransform)
{
childTrans = trans.SeekTrans(uniqueName);
if (null != childTrans)
return childTrans;
}
return null;
}
[Obsolete("Do not used", APIVersion.Force)]
public static T ShowChildTransByPath<T>(this T selfComponent, string tranformPath) where T : Component
{
selfComponent.transform.Find(tranformPath).gameObject.Show();
return selfComponent;
}
[Obsolete("Do not used", APIVersion.Force)]
public static T HideChildTransByPath<T>(this T selfComponent, string tranformPath) where T : Component
{
selfComponent.transform.Find(tranformPath).Hide();
return selfComponent;
}
[Obsolete("Do not used", APIVersion.Force)]
public static void CopyDataFromTrans(this Transform selfTrans, Transform fromTrans)
{
selfTrans.SetParent(fromTrans.parent);
selfTrans.localPosition = fromTrans.localPosition;
selfTrans.localRotation = fromTrans.localRotation;
selfTrans.localScale = fromTrans.localScale;
}
[Obsolete("Do not used", APIVersion.Force)]
public static void ActionRecursion(this Transform tfParent, Action<Transform> action)
{
action(tfParent);
foreach (Transform tfChild in tfParent)
{
tfChild.ActionRecursion(action);
}
}
[Obsolete("Do not used", APIVersion.Force)]
public static Transform FindChildRecursion(this Transform tfParent, string name,
StringComparison stringComparison = StringComparison.Ordinal)
{
if (tfParent.name.Equals(name, stringComparison))
{
//Debug.Log("Hit " + tfParent.name);
return tfParent;
}
foreach (Transform tfChild in tfParent)
{
Transform tfFinal = null;
tfFinal = tfChild.FindChildRecursion(name, stringComparison);
if (tfFinal)
{
return tfFinal;
}
}
return null;
}
[Obsolete("Do not used", APIVersion.Force)]
public static Transform FindChildRecursion(this Transform tfParent, Func<Transform, bool> predicate)
{
if (predicate(tfParent))
{
LogKit.I("Hit " + tfParent.name);
return tfParent;
}
foreach (Transform tfChild in tfParent)
{
Transform tfFinal = null;
tfFinal = tfChild.FindChildRecursion(predicate);
if (tfFinal)
{
return tfFinal;
}
}
return null;
}
[Obsolete("Do not used", APIVersion.Force)]
public static string GetPath(this Transform transform)
{
var sb = new System.Text.StringBuilder();
var t = transform;
while (true)
{
sb.Insert(0, t.name);
t = t.parent;
if (t)
{
sb.Insert(0, "/");
}
else
{
return sb.ToString();
}
}
}
[Obsolete("Do not used", APIVersion.Force)]
public static Transform FindByPath(this Transform selfTrans, string path)
{
return selfTrans.Find(path.Replace(".", "/"));
}
[Obsolete(" Scale(), use Scale() instead", APIVersion.Force)]
public static Vector3 GetGlobalScale<T>(this T selfComponent) where T : Component
{
return selfComponent.transform.lossyScale;
}
[Obsolete(" Scale(), use Scale() instead", APIVersion.Force)]
public static Vector3 GetScale<T>(this T selfComponent) where T : Component
{
return selfComponent.transform.lossyScale;
}
[Obsolete(" Scale(), use Scale() instead", APIVersion.Force)]
public static Vector3 GetWorldScale<T>(this T selfComponent) where T : Component
{
return selfComponent.transform.lossyScale;
}
[Obsolete(" Scale(), use Scale() instead", APIVersion.Force)]
public static Vector3 GetLossyScale<T>(this T selfComponent) where T : Component
{
return selfComponent.transform.lossyScale;
}
[Obsolete(" Rotation(), use Rotation() instead", APIVersion.Force)]
public static Quaternion GetRotation<T>(this T selfComponent) where T : Component
{
return selfComponent.transform.rotation;
}
[Obsolete(" DestroyChildren,use DestroyChildren() instead")]
public static T DestroyAllChild<T>(this T selfComponent) where T : Component
{
return selfComponent.DestroyChildren();
}
[Obsolete(" DestroyChildren,use DestroyChildren() instead")]
public static GameObject DestroyAllChild(this GameObject selfGameObj)
{
return selfGameObj.DestroyChildren();
}
[Obsolete(" Position(), use Position() instead", APIVersion.Force)]
public static Vector3 GetPosition<T>(this T selfComponent) where T : Component
{
return selfComponent.transform.position;
}
[Obsolete(" LocalScale(), use LocalScale() instead", APIVersion.Force)]
public static Vector3 GetLocalScale<T>(this T selfComponent) where T : Component
{
return selfComponent.transform.localScale;
}
[Obsolete(" LocalRotation(), use LocalRotation() instead", APIVersion.Force)]
public static Quaternion GetLocalRotation<T>(this T selfComponent) where T : Component
{
return selfComponent.transform.localRotation;
}
[Obsolete(" LocalPosition(), use LocalPosition() instead", APIVersion.Force)]
public static Vector3 GetLocalPosition<T>(this T selfComponent) where T : Component
{
return selfComponent.transform.localPosition;
}
[Obsolete(" Self, use Self instead", APIVersion.Force)]
public static T ApplySelfTo<T>(this T selfObj, System.Action<T> toFunction) where T : UnityEngine.Object
{
toFunction.InvokeGracefully(selfObj);
return selfObj;
}
[Obsolete(
" GetAttribute<T>(),please use GetAttribute<T>() instead",
APIVersion.Force)]
public static T GetFirstAttribute<T>(this MethodInfo method, bool inherit) where T : Attribute
{
return method.GetCustomAttributes<T>(inherit).FirstOrDefault();
}
[Obsolete(
" GetAttribute<T>(),please use GetAttribute<T>() instead",
APIVersion.Force)]
public static T GetFirstAttribute<T>(this FieldInfo field, bool inherit) where T : Attribute
{
return field.GetCustomAttributes<T>(inherit).FirstOrDefault();
}
[Obsolete(
" GetAttribute<T>(),please use GetAttribute<T>() instead",
APIVersion.Force)]
public static T GetFirstAttribute<T>(this PropertyInfo prop, bool inherit) where T : Attribute
{
return prop.GetCustomAttributes<T>(inherit).FirstOrDefault();
}
[Obsolete(
" GetAttribute<T>(),please use GetAttribute<T>() instead",
APIVersion.Force)]
public static T GetFirstAttribute<T>(this Type type, bool inherit) where T : Attribute
{
return type.GetCustomAttributes<T>(inherit).FirstOrDefault();
}
/// <summary>
///
/// </summary>
/// <param name="obj"></param>
/// <param name="fieldName"></param>
/// <returns></returns>
[Obsolete("Do not used", APIVersion.Force)]
public static object GetFieldByReflect(this object obj, string fieldName)
{
var fieldInfo = obj.GetType().GetField(fieldName);
return fieldInfo == null ? null : fieldInfo.GetValue(obj);
}
/// <summary>
///
/// </summary>
/// <param name="obj"></param>
/// <param name="fieldName"></param>
/// <returns></returns>
[Obsolete("Do not used", APIVersion.Force)]
public static object GetPropertyByReflect(this object obj, string propertyName, object[] index = null)
{
var propertyInfo = obj.GetType().GetProperty(propertyName);
return propertyInfo == null ? null : propertyInfo.GetValue(obj, index);
}
[Obsolete("Do not used", APIVersion.Force)]
public static object InvokeByReflect(this object obj, string methodName, params object[] args)
{
var methodInfo = obj.GetType().GetMethod(methodName);
return methodInfo == null ? null : methodInfo.Invoke(obj, args);
}
[Obsolete("Do not used", APIVersion.Force)]
public static object DefaultForType(this Type targetType)
{
return targetType.IsValueType ? Activator.CreateInstance(targetType) : null;
}
[Obsolete("Do not used", APIVersion.Force)]
public static string LastWord(this string selfUrl)
{
return selfUrl.Split('/').Last();
}
/// <summary>
///
/// </summary>
/// <param name="selfStr"></param>
/// <param name="toAppend"></param>
/// <param name="args"></param>
[Obsolete(
" str.Builder().AppendFormat(template,args).ToString(),please use str.Builder().AppendFormat(template,args).ToString() instead",
true)]
public static StringBuilder AppendFormat(this string selfStr, string toAppend, params object[] args)
{
return new StringBuilder(selfStr).AppendFormat(toAppend, args);
}
/// <summary>
///
/// </summary>
/// <param name="selfStr"></param>
/// <param name="toPrefix"></param>
/// <returns></returns>
[Obsolete(
" str.Builder().AddPrefix(***).ToString(),please use str.Builder().AddPrefix(***).ToString() instead",
true)]
public static string AddPrefix(this string selfStr, string toPrefix)
{
return new StringBuilder(toPrefix).Append(selfStr).ToString();
}
/// <summary>
///
/// </summary>
/// <param name="selfStr"></param>
/// <param name="toAppend"></param>
/// <returns></returns>
[Obsolete(
" str.Builder().Append(***).ToString(),please use str.Builder().Append(***).ToString() instead",
true)]
public static StringBuilder Append(this string selfStr, string toAppend)
{
return new StringBuilder(selfStr).Append(toAppend);
}
/// <summary>
///
/// </summary>
/// <param name="str"></param>
/// <returns></returns>
[Obsolete("Do not used", APIVersion.Force)]
public static string UppercaseFirst(this string str)
{
return char.ToUpper(str[0]) + str.Substring(1);
}
/// <summary>
///
/// </summary>
/// <param name="str"></param>
/// <returns></returns>
[Obsolete("Do not used", APIVersion.Force)]
public static string LowercaseFirst(this string str)
{
return char.ToLower(str[0]) + str.Substring(1);
}
/// <summary>
///
/// </summary>
/// <param name="str"></param>
/// <returns></returns>
[Obsolete("Do not used", APIVersion.Force)]
public static string ToUnixLineEndings(this string str)
{
return str.Replace("\r\n", "\n").Replace("\r", "\n");
}
/// <summary>
/// CSV
/// </summary>
/// <param name="values"></param>
/// <returns></returns>
[Obsolete("Do not used", APIVersion.Force)]
public static string ToCSV(this string[] values)
{
return string.Join(", ", values
.Where(value => !string.IsNullOrEmpty(value))
.Select(value => value.Trim())
.ToArray()
);
}
[Obsolete("Do not used", APIVersion.Force)]
public static string[] ArrayFromCSV(this string values)
{
return values
.Split(new[] { ',' }, StringSplitOptions.RemoveEmptyEntries)
.Select(value => value.Trim())
.ToArray();
}
[Obsolete("Do not used", APIVersion.Force)]
public static string ToSpacedCamelCase(this string text)
{
var sb = new StringBuilder(text.Length * 2);
sb.Append(char.ToUpper(text[0]));
for (var i = 1; i < text.Length; i++)
{
if (char.IsUpper(text[i]) && text[i - 1] != ' ')
{
sb.Append(' ');
}
sb.Append(text[i]);
}
return sb.ToString();
}
/// <summary>
/// Determines whether the type implements the specified interface
/// and is not an interface itself.
/// </summary>
/// <returns><c>true</c>, if interface was implementsed, <c>false</c> otherwise.</returns>
/// <param name="type">Type.</param>
/// <typeparam name="T">The 1st type parameter.</typeparam>
[Obsolete("Do not used", APIVersion.Force)]
public static bool ImplementsInterface<T>(this Type type)
{
return !type.IsInterface && type.GetInterfaces().Contains(typeof(T));
}
/// <summary>
/// Determines whether the type implements the specified interface
/// and is not an interface itself.
/// </summary>
/// <returns><c>true</c>, if interface was implementsed, <c>false</c> otherwise.</returns>
/// <param name="type">Type.</param>
/// <typeparam name="T">The 1st type parameter.</typeparam>
[Obsolete("Do not used", APIVersion.Force)]
public static bool ImplementsInterface<T>(this object obj)
{
var type = obj.GetType();
return !type.IsInterface && type.GetInterfaces().Contains(typeof(T));
}
/// <summary>
/// ,Path
/// </summary>
/// <param name="path"></param>
[Obsolete("Do not used", APIVersion.Force)]
public static void MakeFileDirectoryExist(string path)
{
string root = Path.GetDirectoryName(path);
if (!Directory.Exists(root))
{
Directory.CreateDirectory(root);
}
}
///
/// </summary>
/// <param name="path"></param>
/// <returns></returns>
[Obsolete("Do not used", APIVersion.Force)]
public static string GetFolderName(this string path)
{
if (string.IsNullOrEmpty(path))
{
return string.Empty;
}
return Path.GetDirectoryName(path);
}
// "'\''/'"
// "Normalize paths by removing whitespace and converting all '\' to '/'"
[Obsolete("Do not used", APIVersion.Force)]
public static string MakePathStandard(string path)
{
return path.Trim().Replace("\\", "/");
}
/// <summary>
///
/// </summary>
/// <param name="fileName"></param>
[Obsolete("Do not used", APIVersion.Force)]
public static string GetFilePathWithoutExtension(string fileName)
{
if (fileName.Contains("."))
return fileName.Substring(0, fileName.LastIndexOf('.'));
return fileName;
}
/// <summary>
///
/// </summary>
/// <param name="fileName"></param>
/// <param name="separator"></param>
/// <returns></returns>
[Obsolete("Do not used", APIVersion.Force)]
public static string GetFileNameWithoutExtension(string fileName, char separator = '/')
{
return GetFilePathWithoutExtension(GetFileName(fileName, separator));
}
/// <summary>
///
/// </summary>
/// <param name="path"></param>
/// <param name="separator"></param>
/// <returns></returns>
[Obsolete("Do not used", APIVersion.Force)]
public static string GetFileName(string path, char separator = '/')
{
path = MakePathStandard(path);
return path.Substring(path.LastIndexOf(separator) + 1);
}
/// <summary>
///
/// </summary>
/// <param name="fileName"></param>
/// <returns></returns>
[Obsolete("Do not used", APIVersion.Force)]
public static string GetDirectoryName(string fileName)
{
fileName = MakePathStandard(fileName);
return fileName.Substring(0, fileName.LastIndexOf('/'));
}
[Obsolete("Do not used", APIVersion.Force)]
public static string GetDirPath(this string absOrAssetsPath)
{
var name = absOrAssetsPath.Replace("\\", "/");
var lastIndex = name.LastIndexOf("/");
return name.Substring(0, lastIndex + 1);
}
[Obsolete("Do not used", APIVersion.Force)]
public static string GetLastDirName(this string absOrAssetsPath)
{
var name = absOrAssetsPath.Replace("\\", "/");
var dirs = name.Split('/');
return absOrAssetsPath.EndsWith("/") ? dirs[dirs.Length - 2] : dirs[dirs.Length - 1];
}
[Obsolete("Do not used", APIVersion.Force)]
public static List<string> GetDirSubFilePathList(this string dirABSPath, bool isRecursive = true,
string suffix = "")
{
var pathList = new List<string>();
var di = new DirectoryInfo(dirABSPath);
if (!di.Exists)
{
return pathList;
}
var files = di.GetFiles();
foreach (var fi in files)
{
if (!string.IsNullOrEmpty(suffix))
{
if (!fi.FullName.EndsWith(suffix, System.StringComparison.CurrentCultureIgnoreCase))
{
continue;
}
}
pathList.Add(fi.FullName);
}
if (isRecursive)
{
var dirs = di.GetDirectories();
foreach (var d in dirs)
{
pathList.AddRange(GetDirSubFilePathList(d.FullName, isRecursive, suffix));
}
}
return pathList;
}
[Obsolete("Do not used", APIVersion.Force)]
public static List<string> GetDirSubDirNameList(this string dirABSPath)
{
var di = new DirectoryInfo(dirABSPath);
var dirs = di.GetDirectories();
return dirs.Select(d => d.Name).ToList();
}
/// <summary>
///
/// </summary>
/// <param name="path"></param>
[Obsolete("Do not used", APIVersion.Force)]
public static void MakeDirectoryExist(string path)
{
if (!Directory.Exists(path))
{
Directory.CreateDirectory(path);
}
}
/// <summary>
///
/// </summary>
/// <param name="path"></param>
[Obsolete("Do not used", APIVersion.Force)]
public static void OpenFolder(string path)
{
#if UNITY_STANDALONE_OSX
System.Diagnostics.Process.Start("open", path);
#elif UNITY_STANDALONE_WIN
System.Diagnostics.Process.Start("explorer.exe", path);
#endif
}
/// <summary>
///
/// </summary>
/// <param name="path"></param>
/// <returns></returns>
[Obsolete("Do not used", APIVersion.Force)]
public static string GetPathParentFolder(this string path)
{
if (string.IsNullOrEmpty(path))
{
return string.Empty;
}
return Path.GetDirectoryName(path);
}
/// <summary>
///
/// </summary>
/// <param name="path"></param>
[Obsolete("Do not used", APIVersion.Force)]
public static string CreateDirIfNotExists4FilePath(this string path)
{
var direct = path.GetPathParentFolder();
if (!Directory.Exists(direct))
{
Directory.CreateDirectory(direct);
}
return path;
}
/// <summary>
///
/// <code>
/// var typeName = GenericExtention.GetTypeName<string>();
/// typeName.LogInfo(); // string
/// </code>
/// </summary>
/// <typeparam name="T"></typeparam>
/// <returns></returns>
[Obsolete("Do not used", APIVersion.Force)]
public static string GetTypeName<T>()
{
return typeof(T).ToString();
}
[Obsolete("Do not used", APIVersion.Force)]
public static void DoIfNotNull<T>(this T selfObj, Action<T> action) where T : class
{
if (selfObj != null)
{
action(selfObj);
}
}
/// <summary>
///
///
///
/// <code>
/// if (this.Is(player))
/// {
/// ...
/// }
/// </code>
/// </summary>
/// <param name="selfObj"></param>
/// <param name="value"></param>
/// <returns></returns>
[Obsolete(" Object.Equals(A,B)please use Object.Equals() isntead", true)]
public static bool Is<T>(this T selfObj, T value)
{
return Equals(selfObj, value);
}
[Obsolete(" Object.Equals(A,B)please use Object.Equals() isntead", true)]
public static bool Is<T>(this T selfObj, Func<T, bool> condition)
{
return condition(selfObj);
}
/// <summary>
/// Action
///
/// :
/// <code>
/// (1 == 1).Do(()=>Debug.Log("1 == 1");
/// </code>
/// </summary>
/// <param name="selfCondition"></param>
/// <param name="action"></param>
/// <returns></returns>
[Obsolete("Do not used", APIVersion.Force)]
public static bool Do(this bool selfCondition, Action action)
{
if (selfCondition)
{
action();
}
return selfCondition;
}
/// <summary>
/// Action
///
/// :
/// <code>
/// (1 == 1).Do((result)=>Debug.Log("1 == 1:" + result);
/// </code>
/// </summary>
/// <param name="selfCondition"></param>
/// <param name="action"></param>
/// <returns></returns>
[Obsolete("Do not used", APIVersion.Force)]
public static bool Do(this bool selfCondition, Action<bool> action)
{
action(selfCondition);
return selfCondition;
}
/// <summary>
/// Func
///
/// :
/// <code>
/// Func<int> func = ()=> 1;
/// var number = func.InvokeGracefully(); // if (func != null) number = func();
/// </code>
/// </summary>
/// <param name="selfFunc"></param>
/// <typeparam name="T"></typeparam>
/// <returns></returns>
[Obsolete(" someFunc?.Invoke() , please use someFunc?.Invoke() instead", APIVersion.Force)]
public static T InvokeGracefully<T>(this Func<T> selfFunc)
{
return null != selfFunc ? selfFunc() : default(T);
}
/// <summary>
/// Action
///
/// :
/// <code>
/// System.Action action = () => Log.I("action called");
/// action.InvokeGracefully(); // if (action != null) action();
/// </code>
/// </summary>
/// <param name="selfAction"> action </param>
/// <returns> </returns>
[Obsolete(" someFunc?.Invoke() , please use someFunc?.Invoke() instead", APIVersion.Force)]
public static bool InvokeGracefully(this Action selfAction)
{
if (null != selfAction)
{
selfAction();
return true;
}
return false;
}
/// <summary>
/// Action<T>
///
/// :
/// <code>
/// System.Action<int> action = (number) => Log.I("action called" + number);
/// action.InvokeGracefully(10); // if (action != null) action(10);
/// </code>
/// </summary>
/// <param name="selfAction"> action </param>
/// <typeparam name="T"></typeparam>
/// <returns> </returns>
[Obsolete(" someFunc?.Invoke() , please use someFunc?.Invoke() instead", APIVersion.Force)]
public static bool InvokeGracefully<T>(this Action<T> selfAction, T t)
{
if (null != selfAction)
{
selfAction(t);
return true;
}
return false;
}
/// <summary>
/// Action<T,K>
///
///
/// <code>
/// System.Action<int,string> action = (number,name) => Log.I("action called" + number + name);
/// action.InvokeGracefully(10,"qframework"); // if (action != null) action(10,"qframework");
/// </code>
/// </summary>
/// <param name="selfAction"></param>
/// <returns> call succeed</returns>
[Obsolete(" someFunc?.Invoke() , please use someFunc?.Invoke() instead", APIVersion.Force)]
public static bool InvokeGracefully<T, K>(this Action<T, K> selfAction, T t, K k)
{
if (null != selfAction)
{
selfAction(t, k);
return true;
}
return false;
}
/// <summary>
///
///
///
/// <code>
/// // delegate
/// TestDelegate testDelegate = () => { };
/// testDelegate.InvokeGracefully();
/// </code>
/// </summary>
/// <param name="selfAction"></param>
/// <returns> call suceed </returns>
[Obsolete(" someFunc?.Invoke() , please use someFunc?.Invoke() instead", APIVersion.Force)]
public static bool InvokeGracefully(this Delegate selfAction, params object[] args)
{
if (null != selfAction)
{
selfAction.DynamicInvoke(args);
return true;
}
return false;
}
}
}
```
|
```ruby
# frozen_string_literal: true
# truffleruby_primitives: true
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# 3. Neither the name of the library nor the names of its contributors may be
# used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
class Addrinfo
attr_reader :afamily, :pfamily, :socktype, :protocol
attr_reader :canonname
def self.getaddrinfo(nodename, service, family = nil, socktype = nil,
protocol = nil, flags = nil, timeout: nil)
# NOTE: timeout is ignored currently. On MRI it's ignored but only for platforms without getaddrinfo_a().
raw = Socket
.getaddrinfo(nodename, service, family, socktype, protocol, flags)
raw.map do |pair|
lfamily, lport, lhost, laddress, _, lsocktype, lprotocol = pair
sockaddr = Socket.pack_sockaddr_in(lport, laddress)
addr = Addrinfo.new(sockaddr, lfamily, lsocktype, lprotocol)
if flags and flags | Socket::AI_CANONNAME
addr.instance_variable_set(:@canonname, lhost)
end
addr
end
end
def self.ip(ip)
sockaddr = Socket.sockaddr_in(0, ip)
family = Truffle::Socket::Foreign::Sockaddr.family_of_string(sockaddr)
new(sockaddr, family)
end
def self.tcp(ip, port)
sockaddr = Socket.sockaddr_in(port, ip)
pfamily = Truffle::Socket::Foreign::Sockaddr.family_of_string(sockaddr)
new(sockaddr, pfamily, Socket::SOCK_STREAM, Socket::IPPROTO_TCP)
end
def self.udp(ip, port)
sockaddr = Socket.sockaddr_in(port, ip)
pfamily = Truffle::Socket::Foreign::Sockaddr.family_of_string(sockaddr)
new(sockaddr, pfamily, Socket::SOCK_DGRAM, Socket::IPPROTO_UDP)
end
def self.unix(socket, socktype = nil)
socktype ||= Socket::SOCK_STREAM
new(Socket.pack_sockaddr_un(socket), Socket::PF_UNIX, socktype)
end
# Addrinfo#initialize has a bunch of checks that prevent us from setting
# certain address families (e.g. AF_PACKET). Meanwhile methods such as
# Socket.getifaddrs need to create Addrinfo instances with exactly those
# address families.
#
# Because modifying #initialize would break compatibility we have to define a
# separate new-like method that completely ignores #initialize. You can thank
# Ruby for being such a well designed language.
#
# For the sake of simplicity `family` **must** be an Integer, a String based
# address family is not supported.
def self.raw_with_family(family)
instance = allocate
instance.instance_variable_set(:@afamily, family)
instance
end
def initialize(sockaddr, pfamily = nil, socktype = 0, protocol = 0)
if Primitive.is_a?(sockaddr, Array)
@afamily = Truffle::Socket.address_family(sockaddr[0])
@ip_port = sockaddr[1]
@ip_address = sockaddr[3]
# When using AF_INET6 the protocol family can only be PF_INET6
if @afamily == Socket::AF_INET6 and !pfamily
pfamily = Socket::PF_INET6
end
else
@afamily = Truffle::Socket::Foreign::Sockaddr.family_of_string(sockaddr)
case @afamily
when Socket::AF_UNIX
@unix_path = Socket.unpack_sockaddr_un(sockaddr)
when Socket::AF_INET
@ip_port, @ip_address = Socket.unpack_sockaddr_in(sockaddr)
when Socket::AF_INET6
@ip_port, @ip_address = Socket.unpack_sockaddr_in(sockaddr)
end
end
@pfamily ||= Truffle::Socket.protocol_family(pfamily)
@socktype = Truffle::Socket.socket_type(socktype || 0)
@protocol = protocol || 0
# Per MRI behaviour setting the protocol family should also set the address
# family, but only if the address and protocol families are compatible.
if @pfamily && @pfamily != 0
if @afamily == Socket::AF_INET6 and
@pfamily != Socket::PF_INET and
@pfamily != Socket::PF_INET6
raise SocketError, 'The given protocol and address families are incompatible'
end
@afamily = @pfamily
end
# MRI only checks this if "sockaddr" is an Array.
if Primitive.is_a?(sockaddr, Array)
if @afamily == Socket::AF_INET6
if Socket.sockaddr_in(0, @ip_address).bytesize != 28
raise SocketError, "Invalid IPv6 address: #{@ip_address.inspect}"
end
end
end
# Based on MRI's (re-)implementation of getaddrinfo()
if @afamily != Socket::AF_UNIX and
@afamily != Socket::AF_UNSPEC and
@afamily != Socket::AF_INET and
@afamily != Socket::AF_INET6
raise(
SocketError,
'Address family must be AF_UNIX, AF_INET, AF_INET6, PF_INET or PF_INET6'
)
end
# Per MRI this validation should only happen when "sockaddr" is an Array.
if Primitive.is_a?(sockaddr, Array)
case @socktype
when 0, nil
if @protocol != 0 and !Primitive.nil?(@protocol) and @protocol != Socket::IPPROTO_UDP
raise SocketError, 'Socket protocol must be IPPROTO_UDP or left unset'
end
when Socket::SOCK_RAW
# nothing to do
when Socket::SOCK_DGRAM
if @protocol != Socket::IPPROTO_UDP and @protocol != 0
raise SocketError, 'Socket protocol must be IPPROTO_UDP or left unset'
end
when Socket::SOCK_STREAM
if @protocol != Socket::IPPROTO_TCP and @protocol != 0
raise SocketError, 'Socket protocol must be IPPROTO_TCP or left unset'
end
# Based on MRI behaviour, though MRI itself doesn't seem to explicitly
# handle this case (possibly handled by getaddrinfo()).
when Socket::SOCK_SEQPACKET
if @protocol != 0
raise SocketError, 'SOCK_SEQPACKET can not be used with an explicit protocol'
end
else
raise SocketError, 'Unsupported socket type'
end
end
end
def unix?
@afamily == Socket::AF_UNIX
end
def ipv4?
@afamily == Socket::AF_INET
end
def ipv6?
@afamily == Socket::AF_INET6
end
def ip?
ipv4? || ipv6?
end
def ip_address
raise SocketError, 'need IPv4 or IPv6 address' unless ip?
@ip_address
end
def ip_port
raise SocketError, 'need IPv4 or IPv6 address' unless ip?
@ip_port
end
def unix_path
unless unix?
raise SocketError, 'The address family must be AF_UNIX'
end
@unix_path
end
def to_sockaddr
if unix?
Socket.sockaddr_un(@unix_path)
else
Socket.sockaddr_in(@ip_port.to_i, @ip_address.to_s)
end
end
alias_method :to_s, :to_sockaddr
def getnameinfo(flags = 0)
Socket.getnameinfo(to_sockaddr, flags)
end
def inspect_sockaddr
if ipv4?
if ip_port and ip_port != 0
"#{ip_address}:#{ip_port}"
elsif ip_address
ip_address.dup
else
'UNKNOWN'
end
elsif ipv6?
if ip_port and ip_port != 0
"[#{ip_address}]:#{ip_port}"
else
ip_address.dup
end
elsif unix?
if unix_path.start_with?(File::SEPARATOR)
unix_path.dup
else
"UNIX #{unix_path}"
end
else
Truffle::Socket.address_family_name(afamily)
end
end
def inspect
if socktype and socktype != 0
if ip?
case socktype
when Socket::SOCK_STREAM
suffix = 'TCP'
when Socket::SOCK_DGRAM
suffix = 'UDP'
else
suffix = Truffle::Socket.socket_type_name(socktype)
end
else
suffix = Truffle::Socket.socket_type_name(socktype)
end
"#<Addrinfo: #{inspect_sockaddr} #{suffix}>"
else
"#<Addrinfo: #{inspect_sockaddr}>"
end
end
def ip_unpack
unless ip?
raise SocketError, 'An IPv4/IPv6 address is required'
end
[ip_address, ip_port]
end
def ipv4_loopback?
return false unless ipv4?
Truffle::Socket::Foreign.inet_network(ip_address) & 0xff000000 == 0x7f000000
end
def ipv4_multicast?
return false unless ipv4?
Truffle::Socket::Foreign.inet_network(ip_address) & 0xf0000000 == 0xe0000000
end
def ipv4_private?
return false unless ipv4?
num = Truffle::Socket::Foreign.inet_network(ip_address)
num & 0xff000000 == 0x0a000000 ||
num & 0xfff00000 == 0xac100000 ||
num & 0xffff0000 == 0xc0a80000
end
def ipv6_loopback?
return false unless ipv6?
Truffle::Socket::Foreign.ip_to_bytes(afamily, ip_address) ==
Truffle::Socket::IPv6::LOOPBACK
end
def ipv6_linklocal?
return false unless ipv6?
bytes = Truffle::Socket::Foreign.ip_to_bytes(afamily, ip_address)
bytes[0] == 0xfe && bytes[1] >= 0x80
end
def ipv6_multicast?
return false unless ipv6?
bytes = Truffle::Socket::Foreign.ip_to_bytes(afamily, ip_address)
bytes[0] == 0xff
end
def ipv6_sitelocal?
return false unless ipv6?
bytes = Truffle::Socket::Foreign.ip_to_bytes(afamily, ip_address)
bytes[0] == 0xfe && bytes[1] >= 0xe0
end
def ipv6_mc_global?
ipv6_mc_flag?(0xe)
end
def ipv6_mc_linklocal?
ipv6_mc_flag?(0x2)
end
def ipv6_mc_nodelocal?
ipv6_mc_flag?(0x1)
end
def ipv6_mc_orglocal?
ipv6_mc_flag?(0x8)
end
def ipv6_mc_sitelocal?
ipv6_mc_flag?(0x5)
end
def ipv6_mc_flag?(value)
return false unless ipv6?
bytes = Truffle::Socket::Foreign.ip_to_bytes(afamily, ip_address)
bytes[0] == 0xff && bytes[1] & 0xf == value
end
private :ipv6_mc_flag?
def ipv6_to_ipv4
return unless ipv6?
bytes = Truffle::Socket::Foreign.ip_to_bytes(afamily, ip_address)
if Truffle::Socket::IPv6.ipv4_embedded?(bytes)
Addrinfo.ip(bytes.last(4).join('.'))
else
nil
end
end
def ipv6_unspecified?
return false unless ipv6?
bytes = Truffle::Socket::Foreign.ip_to_bytes(afamily, ip_address)
bytes == Truffle::Socket::IPv6::UNSPECIFIED
end
def ipv6_v4compat?
return false unless ipv6?
bytes = Truffle::Socket::Foreign.ip_to_bytes(afamily, ip_address)
Truffle::Socket::IPv6.ipv4_compatible?(bytes)
end
def ipv6_v4mapped?
return false unless ipv6?
bytes = Truffle::Socket::Foreign.ip_to_bytes(afamily, ip_address)
Truffle::Socket::IPv6.ipv4_mapped?(bytes)
end
def ipv6_unique_local?
return false unless ipv6?
bytes = Truffle::Socket::Foreign.ip_to_bytes(afamily, ip_address)
bytes[0] == 0xfc || bytes[0] == 0xfd
end
def marshal_dump
if unix?
address = unix_path
else
address = [ip_address, ip_port.to_s]
end
if unix?
protocol = 0
else
protocol = Truffle::Socket.protocol_name(self.protocol)
end
[
Truffle::Socket.address_family_name(afamily),
address,
Truffle::Socket.protocol_family_name(pfamily),
Truffle::Socket.socket_type_name(socktype),
protocol,
canonname
]
end
def marshal_load(array)
afamily, address, pfamily, socktype, protocol, canonname = array
@afamily = Truffle::Socket.address_family(afamily)
@pfamily = Truffle::Socket.protocol_family(pfamily)
@socktype = Truffle::Socket.socket_type(socktype)
if protocol and protocol != 0
@protocol = ::Socket.const_get(protocol)
else
@protocol = protocol
end
if unix?
@unix_path = address
else
@ip_address = address[0]
@ip_port = address[1].to_i
@canonname = canonname
end
end
end
```
|
```objective-c
//
//
#pragma once
//
// Include a bundled header-only copy of fmtlib or an external one.
// By default spdlog include its own copy.
//
#include <spdlog/tweakme.h>
#if defined(SPDLOG_USE_STD_FORMAT) // SPDLOG_USE_STD_FORMAT is defined - use std::format
#include <format>
#elif !defined(SPDLOG_FMT_EXTERNAL)
#if !defined(SPDLOG_COMPILED_LIB) && !defined(FMT_HEADER_ONLY)
#define FMT_HEADER_ONLY
#endif
#ifndef FMT_USE_WINDOWS_H
#define FMT_USE_WINDOWS_H 0
#endif
#include <spdlog/fmt/bundled/core.h>
#include <spdlog/fmt/bundled/format.h>
#else // SPDLOG_FMT_EXTERNAL is defined - use external fmtlib
#include <fmt/core.h>
#include <fmt/format.h>
#endif
```
|
```rust
//! Tests for time resource instrumentation.
//!
//! These tests ensure that the instrumentation for tokio
//! synchronization primitives is correct.
use std::time::Duration;
use tracing_mock::{expect, subscriber};
#[tokio::test]
async fn test_sleep_creates_span() {
let sleep_span = expect::span()
.named("runtime.resource")
.with_target("tokio::time::sleep");
let state_update = expect::event()
.with_target("runtime::resource::state_update")
.with_fields(
expect::field("duration")
.with_value(&(7_u64 + 1))
.and(expect::field("duration.op").with_value(&"override")),
);
let async_op_span = expect::span()
.named("runtime.resource.async_op")
.with_target("tokio::time::sleep");
let async_op_poll_span = expect::span()
.named("runtime.resource.async_op.poll")
.with_target("tokio::time::sleep");
let (subscriber, handle) = subscriber::mock()
.new_span(sleep_span.clone().with_explicit_parent(None))
.enter(sleep_span.clone())
.event(state_update)
.new_span(
async_op_span
.clone()
.with_contextual_parent(Some("runtime.resource"))
.with_field(expect::field("source").with_value(&"Sleep::new_timeout")),
)
.exit(sleep_span.clone())
.enter(async_op_span.clone())
.new_span(
async_op_poll_span
.clone()
.with_contextual_parent(Some("runtime.resource.async_op")),
)
.exit(async_op_span.clone())
.drop_span(async_op_span)
.drop_span(async_op_poll_span)
.drop_span(sleep_span)
.run_with_handle();
{
let _guard = tracing::subscriber::set_default(subscriber);
_ = tokio::time::sleep(Duration::from_millis(7));
}
handle.assert_finished();
}
```
|
```objective-c
/*************************************************
* Unicode Property Table handler *
*************************************************/
#ifndef _UCP_H
#define _UCP_H
/* This file contains definitions of the property values that are returned by
the UCD access macros. New values that are added for new releases of Unicode
should always be at the end of each enum, for backwards compatibility. */
/* These are the general character categories. */
#ifdef GLIB_COMPILATION
#include "gunicode.h"
#else
#include <glib.h>
#endif
enum {
ucp_C, /* Other */
ucp_L, /* Letter */
ucp_M, /* Mark */
ucp_N, /* Number */
ucp_P, /* Punctuation */
ucp_S, /* Symbol */
ucp_Z /* Separator */
};
/* These are the particular character types. */
enum {
ucp_Cc = G_UNICODE_CONTROL, /* Control */
ucp_Cf = G_UNICODE_FORMAT, /* Format */
ucp_Cn = G_UNICODE_UNASSIGNED, /* Unassigned */
ucp_Co = G_UNICODE_PRIVATE_USE, /* Private use */
ucp_Cs = G_UNICODE_SURROGATE, /* Surrogate */
ucp_Ll = G_UNICODE_LOWERCASE_LETTER, /* Lower case letter */
ucp_Lm = G_UNICODE_MODIFIER_LETTER, /* Modifier letter */
ucp_Lo = G_UNICODE_OTHER_LETTER, /* Other letter */
ucp_Lt = G_UNICODE_TITLECASE_LETTER, /* Title case letter */
ucp_Lu = G_UNICODE_UPPERCASE_LETTER, /* Upper case letter */
ucp_Mc = G_UNICODE_SPACING_MARK, /* Spacing mark */
ucp_Me = G_UNICODE_ENCLOSING_MARK, /* Enclosing mark */
ucp_Mn = G_UNICODE_NON_SPACING_MARK, /* Non-spacing mark */
ucp_Nd = G_UNICODE_DECIMAL_NUMBER, /* Decimal number */
ucp_Nl = G_UNICODE_LETTER_NUMBER, /* Letter number */
ucp_No = G_UNICODE_OTHER_NUMBER, /* Other number */
ucp_Pc = G_UNICODE_CONNECT_PUNCTUATION, /* Connector punctuation */
ucp_Pd = G_UNICODE_DASH_PUNCTUATION, /* Dash punctuation */
ucp_Pe = G_UNICODE_CLOSE_PUNCTUATION, /* Close punctuation */
ucp_Pf = G_UNICODE_FINAL_PUNCTUATION, /* Final punctuation */
ucp_Pi = G_UNICODE_INITIAL_PUNCTUATION, /* Initial punctuation */
ucp_Po = G_UNICODE_OTHER_PUNCTUATION, /* Other punctuation */
ucp_Ps = G_UNICODE_OPEN_PUNCTUATION, /* Open punctuation */
ucp_Sc = G_UNICODE_CURRENCY_SYMBOL, /* Currency symbol */
ucp_Sk = G_UNICODE_MODIFIER_SYMBOL, /* Modifier symbol */
ucp_Sm = G_UNICODE_MATH_SYMBOL, /* Mathematical symbol */
ucp_So = G_UNICODE_OTHER_SYMBOL, /* Other symbol */
ucp_Zl = G_UNICODE_LINE_SEPARATOR, /* Line separator */
ucp_Zp = G_UNICODE_PARAGRAPH_SEPARATOR, /* Paragraph separator */
ucp_Zs = G_UNICODE_SPACE_SEPARATOR /* Space separator */
};
/* These are the script identifications. */
enum {
ucp_Common = G_UNICODE_SCRIPT_COMMON,
ucp_Inherited = G_UNICODE_SCRIPT_INHERITED,
ucp_Arabic = G_UNICODE_SCRIPT_ARABIC,
ucp_Armenian = G_UNICODE_SCRIPT_ARMENIAN,
ucp_Bengali = G_UNICODE_SCRIPT_BENGALI,
ucp_Bopomofo = G_UNICODE_SCRIPT_BOPOMOFO,
ucp_Braille = G_UNICODE_SCRIPT_BRAILLE,
ucp_Buginese = G_UNICODE_SCRIPT_BUGINESE,
ucp_Buhid = G_UNICODE_SCRIPT_BUHID,
ucp_Canadian_Aboriginal = G_UNICODE_SCRIPT_CANADIAN_ABORIGINAL,
ucp_Cherokee = G_UNICODE_SCRIPT_CHEROKEE,
ucp_Coptic = G_UNICODE_SCRIPT_COPTIC,
ucp_Cypriot = G_UNICODE_SCRIPT_CYPRIOT,
ucp_Cyrillic = G_UNICODE_SCRIPT_CYRILLIC,
ucp_Deseret = G_UNICODE_SCRIPT_DESERET,
ucp_Devanagari = G_UNICODE_SCRIPT_DEVANAGARI,
ucp_Ethiopic = G_UNICODE_SCRIPT_ETHIOPIC,
ucp_Georgian = G_UNICODE_SCRIPT_GEORGIAN,
ucp_Glagolitic = G_UNICODE_SCRIPT_GLAGOLITIC,
ucp_Gothic = G_UNICODE_SCRIPT_GOTHIC,
ucp_Greek = G_UNICODE_SCRIPT_GREEK,
ucp_Gujarati = G_UNICODE_SCRIPT_GUJARATI,
ucp_Gurmukhi = G_UNICODE_SCRIPT_GURMUKHI,
ucp_Han = G_UNICODE_SCRIPT_HAN,
ucp_Hangul = G_UNICODE_SCRIPT_HANGUL,
ucp_Hanunoo = G_UNICODE_SCRIPT_HANUNOO,
ucp_Hebrew = G_UNICODE_SCRIPT_HEBREW,
ucp_Hiragana = G_UNICODE_SCRIPT_HIRAGANA,
ucp_Kannada = G_UNICODE_SCRIPT_KANNADA,
ucp_Katakana = G_UNICODE_SCRIPT_KATAKANA,
ucp_Kharoshthi = G_UNICODE_SCRIPT_KHAROSHTHI,
ucp_Khmer = G_UNICODE_SCRIPT_KHMER,
ucp_Lao = G_UNICODE_SCRIPT_LAO,
ucp_Latin = G_UNICODE_SCRIPT_LATIN,
ucp_Limbu = G_UNICODE_SCRIPT_LIMBU,
ucp_Linear_B = G_UNICODE_SCRIPT_LINEAR_B,
ucp_Malayalam = G_UNICODE_SCRIPT_MALAYALAM,
ucp_Mongolian = G_UNICODE_SCRIPT_MONGOLIAN,
ucp_Myanmar = G_UNICODE_SCRIPT_MYANMAR,
ucp_New_Tai_Lue = G_UNICODE_SCRIPT_NEW_TAI_LUE,
ucp_Ogham = G_UNICODE_SCRIPT_OGHAM,
ucp_Old_Italic = G_UNICODE_SCRIPT_OLD_ITALIC,
ucp_Old_Persian = G_UNICODE_SCRIPT_OLD_PERSIAN,
ucp_Oriya = G_UNICODE_SCRIPT_ORIYA,
ucp_Osmanya = G_UNICODE_SCRIPT_OSMANYA,
ucp_Runic = G_UNICODE_SCRIPT_RUNIC,
ucp_Shavian = G_UNICODE_SCRIPT_SHAVIAN,
ucp_Sinhala = G_UNICODE_SCRIPT_SINHALA,
ucp_Syloti_Nagri = G_UNICODE_SCRIPT_SYLOTI_NAGRI,
ucp_Syriac = G_UNICODE_SCRIPT_SYRIAC,
ucp_Tagalog = G_UNICODE_SCRIPT_TAGALOG,
ucp_Tagbanwa = G_UNICODE_SCRIPT_TAGBANWA,
ucp_Tai_Le = G_UNICODE_SCRIPT_TAI_LE,
ucp_Tamil = G_UNICODE_SCRIPT_TAMIL,
ucp_Telugu = G_UNICODE_SCRIPT_TELUGU,
ucp_Thaana = G_UNICODE_SCRIPT_THAANA,
ucp_Thai = G_UNICODE_SCRIPT_THAI,
ucp_Tibetan = G_UNICODE_SCRIPT_TIBETAN,
ucp_Tifinagh = G_UNICODE_SCRIPT_TIFINAGH,
ucp_Ugaritic = G_UNICODE_SCRIPT_UGARITIC,
ucp_Yi = G_UNICODE_SCRIPT_YI,
/* New for Unicode 5.0: */
ucp_Balinese = G_UNICODE_SCRIPT_BALINESE,
ucp_Cuneiform = G_UNICODE_SCRIPT_CUNEIFORM,
ucp_Nko = G_UNICODE_SCRIPT_NKO,
ucp_Phags_Pa = G_UNICODE_SCRIPT_PHAGS_PA,
ucp_Phoenician = G_UNICODE_SCRIPT_PHOENICIAN,
/* New for Unicode 5.1: */
ucp_Carian = G_UNICODE_SCRIPT_CARIAN,
ucp_Cham = G_UNICODE_SCRIPT_CHAM,
ucp_Kayah_Li = G_UNICODE_SCRIPT_KAYAH_LI,
ucp_Lepcha = G_UNICODE_SCRIPT_LEPCHA,
ucp_Lycian = G_UNICODE_SCRIPT_LYCIAN,
ucp_Lydian = G_UNICODE_SCRIPT_LYDIAN,
ucp_Ol_Chiki = G_UNICODE_SCRIPT_OL_CHIKI,
ucp_Rejang = G_UNICODE_SCRIPT_REJANG,
ucp_Saurashtra = G_UNICODE_SCRIPT_SAURASHTRA,
ucp_Sundanese = G_UNICODE_SCRIPT_SUNDANESE,
ucp_Vai = G_UNICODE_SCRIPT_VAI,
/* New for Unicode 5.2: */
ucp_Avestan = G_UNICODE_SCRIPT_AVESTAN,
ucp_Bamum = G_UNICODE_SCRIPT_BAMUM,
ucp_Egyptian_Hieroglyphs = G_UNICODE_SCRIPT_EGYPTIAN_HIEROGLYPHS,
ucp_Imperial_Aramaic = G_UNICODE_SCRIPT_IMPERIAL_ARAMAIC,
ucp_Inscriptional_Pahlavi = G_UNICODE_SCRIPT_INSCRIPTIONAL_PAHLAVI,
ucp_Inscriptional_Parthian = G_UNICODE_SCRIPT_INSCRIPTIONAL_PARTHIAN,
ucp_Javanese = G_UNICODE_SCRIPT_JAVANESE,
ucp_Kaithi = G_UNICODE_SCRIPT_KAITHI,
ucp_Lisu = G_UNICODE_SCRIPT_LISU,
ucp_Meetei_Mayek = G_UNICODE_SCRIPT_MEETEI_MAYEK,
ucp_Old_South_Arabian = G_UNICODE_SCRIPT_OLD_SOUTH_ARABIAN,
ucp_Old_Turkic = G_UNICODE_SCRIPT_OLD_TURKIC,
ucp_Samaritan = G_UNICODE_SCRIPT_SAMARITAN,
ucp_Tai_Tham = G_UNICODE_SCRIPT_TAI_THAM,
ucp_Tai_Viet = G_UNICODE_SCRIPT_TAI_VIET,
/* New for Unicode 6.0.0: */
ucp_Batak = G_UNICODE_SCRIPT_BATAK,
ucp_Brahmi = G_UNICODE_SCRIPT_BRAHMI,
ucp_Mandaic = G_UNICODE_SCRIPT_MANDAIC,
/* New for Unicode 6.1.0: */
ucp_Chakma = G_UNICODE_SCRIPT_CHAKMA,
ucp_Meroitic_Cursive = G_UNICODE_SCRIPT_MEROITIC_CURSIVE,
ucp_Meroitic_Hieroglyphs = G_UNICODE_SCRIPT_MEROITIC_HIEROGLYPHS,
ucp_Miao = G_UNICODE_SCRIPT_MIAO,
ucp_Sharada = G_UNICODE_SCRIPT_SHARADA,
ucp_Sora_Sompeng = G_UNICODE_SCRIPT_SORA_SOMPENG,
ucp_Takri = G_UNICODE_SCRIPT_TAKRI,
};
#endif
/* End of ucp.h */
```
|
```objective-c
/* $OpenBSD: fdpass.h,v 1.4 2021/11/01 14:43:25 ratchov Exp $ */
/*
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
#ifndef FDPASS_H
#define FDPASS_H
struct fileops;
struct fdpass *fdpass_new(int sock, struct fileops *ops);
void fdpass_close(struct fdpass *f);
extern struct fileops worker_fileops, helper_fileops;
extern struct fdpass *fdpass_peer;
struct sio_hdl *fdpass_sio_open(int, unsigned int);
struct mio_hdl *fdpass_mio_open(int, unsigned int);
struct sioctl_hdl *fdpass_sioctl_open(int, unsigned int);
#endif /* !defined(FDPASS_H) */
```
|
```go
//
//
// path_to_url
//
// Unless required by applicable law or agreed to in writing, software
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
package calc
import (
"fmt"
"sync"
v3 "github.com/projectcalico/calico/libcalico-go/lib/apis/v3"
"github.com/projectcalico/calico/libcalico-go/lib/backend/api"
"github.com/projectcalico/calico/libcalico-go/lib/backend/model"
)
func NewNodeCounter(sink api.SyncerCallbacks) *NodeCounter {
return &NodeCounter{
sink: sink,
nodeMap: map[string]bool{},
}
}
type NodeCounter struct {
sync.Mutex
sink api.SyncerCallbacks
inSync bool
nodeMap map[string]bool
}
func (c *NodeCounter) OnStatusUpdated(status api.SyncStatus) {
if status == api.InSync {
c.Lock()
c.inSync = true
c.Unlock()
}
c.sink.OnStatusUpdated(status)
}
func (c *NodeCounter) OnUpdates(updates []api.Update) {
for _, update := range updates {
switch k := update.Key.(type) {
case model.ResourceKey:
if k.Kind == v3.KindNode {
name := k.Name
switch update.UpdateType {
case api.UpdateTypeKVNew:
c.setNode(name)
case api.UpdateTypeKVDeleted:
c.deleteNode(name)
}
}
}
}
c.sink.OnUpdates(updates)
}
func (c *NodeCounter) GetNumNodes() (int, error) {
c.Lock()
defer c.Unlock()
if !c.inSync {
return 0, fmt.Errorf("Node counter not yet in sync")
}
return len(c.nodeMap), nil
}
func (c *NodeCounter) setNode(node string) {
c.Lock()
defer c.Unlock()
c.nodeMap[node] = true
}
func (c *NodeCounter) deleteNode(node string) {
c.Lock()
defer c.Unlock()
delete(c.nodeMap, node)
}
```
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.