text
stringlengths 2
14k
| meta
dict |
|---|---|
require 'rails_helper'
feature "Registration", :type => :feature do
it "should let you create a new user" do
visit new_user_registration_path
within "#new_user" do
fill_in "user_email", with: "test@example.com"
fill_in "user_password", with: "123456789"
fill_in "user_password_confirmation", with: "123456789"
end
click_button "Sign up"
if User.devise_modules.include? :confirmable
expect( page.body ).to include( 'A message with a confirmation link has been sent to your email address.' )
body = ActionMailer::Base.deliveries.last.body
md = body.encoded.match /(\/users\/confirmation.*) /
if !md
assert( false, "Confirmation URL not found in message" )
end
visit md[1]
expect( page.body ).to include( "Your email address has been successfully confirmed." )
else
expect( page.body ).to include( "Welcome! You have signed up successfully." )
end
click_link "Profile"
end
it "should require a user to have an email address" do
visit new_user_registration_path
within "#new_user" do
# fill_in "user_email", with: "test@example.com"
fill_in "user_password", with: "123456789"
fill_in "user_password_confirmation", with: "123456789"
end
click_button "Sign up"
expect( page.body ).to_not include( "Welcome! You have signed up successfully." )
end
it "should let a user change their password if they enter in their existing password" do
visit new_user_registration_path
within "#new_user" do
fill_in "user_email", with: "test@example.com"
fill_in "user_password", with: "123456789"
fill_in "user_password_confirmation", with: "123456789"
end
click_button "Sign up"
if User.devise_modules.include? :confirmable
expect( page.body ).to include( 'A message with a confirmation link has been sent to your email address.' )
body = ActionMailer::Base.deliveries.last.body
md = body.encoded.match /(\/users\/confirmation.*) /
if !md
assert( false, "Confirmation URL not found in message" )
end
visit md[1]
expect( page.body ).to include( "Your email address has been successfully confirmed." )
else
expect( page.body ).to include( "Welcome! You have signed up successfully." )
end
click_link "Profile"
within "#edit_user" do
fill_in "user_password", with: "012345678"
fill_in "user_password_confirmation", with: "012345678"
end
click_button "Update"
expect( page.body ).to include( "we need your current password to confirm your changes" )
within "#edit_user" do
fill_in "user_password", with: "012345678"
fill_in "user_password_confirmation", with: "012345678"
fill_in "user_current_password", with: "123456789"
end
click_button "Update"
expect( page.body ).to include( "Your account has been updated successfully." )
end
it "following a forgot password link should let you reset your password and log in" do
user = create :user
visit new_user_password_path
within "#new_user" do
fill_in "user_email", with: user.email
end
click_button "Send me reset password instructions"
expect( page.body ).to include( "You will receive an email with instructions on how to reset your password in a few minutes." )
body = ActionMailer::Base.deliveries.last.body
md = body.encoded.match /(\/users\/password\/edit\?reset.*)/
if !md
assert( false, "URL NOT FOUND IN MESSAGE")
end
visit md[1]
within "#new_user" do
fill_in "user_password", with: "new_password"
fill_in "user_password_confirmation", with: "new_password"
end
click_button "Change my password"
expect( page.body ).to_not include( "Email can't be blank" )
visit edit_user_registration_path
expect( page.body ).to include( "Sign Out")
click_link "Sign Out"
expect( page.body ).to include( "Signed out successfully." )
visit new_user_session_path
within "#new_user" do
fill_in "user_email", with: user.email
fill_in "user_password", with: "new_password"
end
click_button "Log in"
expect( page.body ).to include( "Signed in successfully.")
end
end
|
{
"pile_set_name": "Github"
}
|
import React from 'react';
import { CollapsibleSection, HealthCounts } from '@spinnaker/core';
import { IAmazonServerGroupDetailsSectionProps } from './IAmazonServerGroupDetailsSectionProps';
export class HealthDetailsSection extends React.Component<IAmazonServerGroupDetailsSectionProps> {
public render(): JSX.Element {
const { serverGroup } = this.props;
if (serverGroup.instanceCounts.total > 0) {
return (
<CollapsibleSection heading="Health" defaultExpanded={true}>
<dl className="dl-horizontal dl-narrow">
<dt>Instances</dt>
<dd>
<HealthCounts container={serverGroup.instanceCounts} className="pull-left" />
</dd>
</dl>
</CollapsibleSection>
);
}
return null;
}
}
|
{
"pile_set_name": "Github"
}
|
<?php
/**
* This script alters the session variable 'tree', expanding it
* at the dn specified in the query string.
*
* Note: this script is equal and opposite to collapse.php
*
* @package phpLDAPadmin
* @subpackage Tree
* @see collapse.php
*/
/**
*/
require './common.php';
$dn = get_request('dn','GET',true);
$tree = get_cached_item($app['server']->getIndex(),'tree');
$entry = $tree->getEntry($dn);
$entry->open();
set_cached_item($app['server']->getIndex(),'tree','null',$tree);
header(sprintf('Location:index.php?server_id=%s&junk=%s#%s',
$app['server']->getIndex(),random_junk(),htmlid($app['server']->getIndex(),$dn)));
die();
?>
|
{
"pile_set_name": "Github"
}
|
/*---------------------------------------------------------------------------*\
========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration | Website: https://openfoam.org
\\ / A nd | Copyright (C) 2011-2020 OpenFOAM Foundation
\\/ M anipulation |
-------------------------------------------------------------------------------
License
This file is part of OpenFOAM.
OpenFOAM is free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OpenFOAM is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with OpenFOAM. If not, see <http://www.gnu.org/licenses/>.
\*---------------------------------------------------------------------------*/
#include "cyclicPolyPatch.H"
#include "addToRunTimeSelectionTable.H"
#include "polyBoundaryMesh.H"
#include "polyMesh.H"
#include "demandDrivenData.H"
#include "OFstream.H"
#include "matchPoints.H"
#include "EdgeMap.H"
#include "Time.H"
#include "transformField.H"
#include "SubField.H"
#include "unitConversion.H"
// * * * * * * * * * * * * * * Static Data Members * * * * * * * * * * * * * //
namespace Foam
{
defineTypeNameAndDebug(cyclicPolyPatch, 0);
addToRunTimeSelectionTable(polyPatch, cyclicPolyPatch, word);
addToRunTimeSelectionTable(polyPatch, cyclicPolyPatch, dictionary);
}
// * * * * * * * * * * * * Protected Member Functions * * * * * * * * * * * //
void Foam::cyclicPolyPatch::initCalcGeometry(PstreamBuffers& pBufs)
{
polyPatch::initCalcGeometry(pBufs);
}
void Foam::cyclicPolyPatch::initCalcGeometry
(
const primitivePatch& referPatch,
pointField& nbrCtrs,
vectorField& nbrAreas,
pointField& nbrCc
)
{}
void Foam::cyclicPolyPatch::calcGeometry(PstreamBuffers& pBufs)
{
static_cast<cyclicTransform&>(*this) =
cyclicTransform
(
name(),
faceCentres(),
faceAreas(),
*this,
nbrPatchName(),
nbrPatch().faceCentres(),
nbrPatch().faceAreas(),
nbrPatch(),
matchTolerance()
);
}
void Foam::cyclicPolyPatch::initMovePoints
(
PstreamBuffers& pBufs,
const pointField& p
)
{
polyPatch::initMovePoints(pBufs, p);
}
void Foam::cyclicPolyPatch::movePoints
(
PstreamBuffers& pBufs,
const pointField& p
)
{
polyPatch::movePoints(pBufs, p);
}
void Foam::cyclicPolyPatch::initUpdateMesh(PstreamBuffers& pBufs)
{
polyPatch::initUpdateMesh(pBufs);
}
void Foam::cyclicPolyPatch::updateMesh(PstreamBuffers& pBufs)
{
polyPatch::updateMesh(pBufs);
deleteDemandDrivenData(coupledPointsPtr_);
deleteDemandDrivenData(coupledEdgesPtr_);
}
// * * * * * * * * * * * * * * * Constructors * * * * * * * * * * * * * * * //
Foam::cyclicPolyPatch::cyclicPolyPatch
(
const word& name,
const label size,
const label start,
const label index,
const polyBoundaryMesh& bm,
const word& patchType
)
:
coupledPolyPatch(name, size, start, index, bm, patchType),
cyclicTransform(false),
nbrPatchName_(word::null),
nbrPatchID_(-1),
coupledPointsPtr_(nullptr),
coupledEdgesPtr_(nullptr),
ownToNbrOrderDataPtr_(nullptr),
ownToNbrCyclicOrderDataPtr_(nullptr),
ownToNbrDebugOrderDataPtr_(nullptr)
{
// Neighbour patch might not be valid yet so no transformation
// calculation possible.
}
Foam::cyclicPolyPatch::cyclicPolyPatch
(
const word& name,
const label size,
const label start,
const label index,
const polyBoundaryMesh& bm,
const word& patchType,
const word& nbrPatchName
)
:
coupledPolyPatch(name, size, start, index, bm, patchType),
cyclicTransform(false),
nbrPatchName_(nbrPatchName),
nbrPatchID_(-1),
coupledPointsPtr_(nullptr),
coupledEdgesPtr_(nullptr),
ownToNbrOrderDataPtr_(nullptr),
ownToNbrCyclicOrderDataPtr_(nullptr),
ownToNbrDebugOrderDataPtr_(nullptr)
{
// Neighbour patch might not be valid yet so no transformation
// calculation possible.
}
Foam::cyclicPolyPatch::cyclicPolyPatch
(
const word& name,
const dictionary& dict,
const label index,
const polyBoundaryMesh& bm,
const word& patchType
)
:
coupledPolyPatch(name, dict, index, bm, patchType),
cyclicTransform(dict, false),
nbrPatchName_(dict.lookupOrDefault("neighbourPatch", word::null)),
coupleGroup_(dict),
nbrPatchID_(-1),
coupledPointsPtr_(nullptr),
coupledEdgesPtr_(nullptr),
ownToNbrOrderDataPtr_(nullptr),
ownToNbrCyclicOrderDataPtr_(nullptr),
ownToNbrDebugOrderDataPtr_(nullptr)
{
if (nbrPatchName_ == word::null && !coupleGroup_.valid())
{
FatalIOErrorInFunction
(
dict
) << "No \"neighbourPatch\" provided." << endl
<< exit(FatalIOError);
}
if (nbrPatchName_ == name)
{
FatalIOErrorInFunction(dict)
<< "Neighbour patch name " << nbrPatchName_
<< " cannot be the same as this patch " << name
<< exit(FatalIOError);
}
// Neighbour patch might not be valid yet so no transformation
// calculation possible.
}
Foam::cyclicPolyPatch::cyclicPolyPatch
(
const cyclicPolyPatch& pp,
const polyBoundaryMesh& bm
)
:
coupledPolyPatch(pp, bm),
cyclicTransform(pp),
nbrPatchName_(pp.nbrPatchName_),
coupleGroup_(pp.coupleGroup_),
nbrPatchID_(-1),
coupledPointsPtr_(nullptr),
coupledEdgesPtr_(nullptr),
ownToNbrOrderDataPtr_(nullptr),
ownToNbrCyclicOrderDataPtr_(nullptr),
ownToNbrDebugOrderDataPtr_(nullptr)
{
// Neighbour patch might not be valid yet so no transformation
// calculation possible.
}
Foam::cyclicPolyPatch::cyclicPolyPatch
(
const cyclicPolyPatch& pp,
const polyBoundaryMesh& bm,
const label index,
const label newSize,
const label newStart,
const word& neiName
)
:
coupledPolyPatch(pp, bm, index, newSize, newStart),
cyclicTransform(pp),
nbrPatchName_(neiName),
coupleGroup_(pp.coupleGroup_),
nbrPatchID_(-1),
coupledPointsPtr_(nullptr),
coupledEdgesPtr_(nullptr),
ownToNbrOrderDataPtr_(nullptr),
ownToNbrCycl
|
{
"pile_set_name": "Github"
}
|
To customize config traders copy this CfgServerTrader folder to your mission.
Then in description.ext replace this line:
#include "\z\addons\dayz_code\Configs\CfgServerTrader\CfgServerTrader.hpp"
with this:
#include "CfgServerTrader\CfgServerTrader.hpp"
|
{
"pile_set_name": "Github"
}
|
/*
* Copyright (C) 2018 Orange.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy ofthe License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specificlanguage governing permissions and
* limitations under the License.
*
*/
/* Parsing OpenVswitch syntax for rules is inherently hard. The main reason
* is the lack of clear lexical class for a few characters notably colon that
* is both an important action token and a character in IPv6 and mac addresses.
*
* We have decided to follow a two phase approach. First we split the text in
* main components so that filters and individual actions are recognized,
* then we split the elementary filters and actions in simpler components. For
* filters it only means checking if there is a mask. For actions, parsing may
* be more involved.
*/
package jsonof
import (
"bufio"
"bytes"
"encoding/json"
"errors"
"fmt"
"io"
"strconv"
"strings"
"github.com/skydive-project/skydive/graffiti/logging"
)
// JSONRule is an openflow rule ready for JSON export
type JSONRule struct {
Cookie uint64 `json:"Cookie"` // cookie value of the rule
Table int `json:"Table"` // table containing the rule
Priority int `json:"Priority"` // priority of rule
Meta []*Meta `json:"Meta,omitempty"` // anything that is not a filter.
Filters []*Filter `json:"Filters"` // all the filter
Actions []*Action `json:"Actions"` // all the actions
UUID string `json:"-"` // UUID used by skydive
RawFilter string `json:"-"` // Kept to be respawned
}
// JSONGroup is an openflow group ready for JSON export
type JSONGroup struct {
GroupID uint `json:"GroupId"` // id of the group
Type string `json:"Type"` // group type
Meta []*Meta `json:"Meta,omitempty"` // anything that is not a bucket
Buckets []*Bucket `json:"Buckets"` // buckets
UUID string `json:"-"` // UUID used by skydive
}
// Bucket is the representation of a bucket in an openflow group
type Bucket struct {
ID uint `json:"Id"` // id of bucket
Meta []*Meta `json:"Meta,omitempty"` // anything that is not an action
Actions []*Action `json:"Actions"` // action list
}
// Action represents an atomic action in an openflow rule
type Action struct {
Action string `json:"Function"` // Action name
Arguments []*Action `json:"Arguments,omitempty"` // Arguments if it exists
Key string `json:"Key,omitempty"` // Key for aguments such as k=v
}
// Filter is an elementary filter in an openflow rule
type Filter struct {
Key string `json:"Key"` // left hand side
Value string `json:"Value"` // right hand side
Mask string `json:"Mask,omitempty"` // mask if used
}
// Meta is anything not a filter or an action always as a pair key/value
type Meta struct {
Key string `json:"Key"` // key
Value string `json:"Value"` // raw value
}
// Token is a lexical entity
type Token int
const (
// Token values as recognized by scan
tNt Token = iota
tEOF
tText
tSpace
tComma
tEqual
tClosePar
)
const (
kwActions = "actions"
kwBucket = "bucket"
kwBucketID = "bucket_id"
kwCookie = "cookie"
kwGroupID = "group_id"
kwLoad = "load"
kwMove = "move"
kwPriority = "priority"
kwSetField = "set_field"
kwTable = "table"
kwType = "type"
)
// TokenNames is the array of printable names for Token.
var TokenNames = []string{
"NT",
"EOF",
"TEXT",
"SPACE",
"COMMA",
"EQUAL",
"CPAR",
}
var eof = rune(0)
// Stream represents a text buffer that can be scanned
type Stream struct {
r *bufio.Reader
last rune
token Token
value string
}
// NewStream returns a new instance of Stream.
func NewStream(r io.Reader) *Stream {
return &Stream{r: bufio.NewReader(r), last: eof, token: tNt}
}
// isWhitespace check if the rune is a classical separator
// (space of tab or eol)
func isWhitespace(ch rune) bool {
return ch == ' ' || ch == '\t' || ch == '\n'
}
// read reads the next rune from the bufferred reader.
// Returns the rune(0) if an error occurs (or io.EOF is returned).
func (s *Stream) read() rune {
if s.last != eof {
ch := s.last
s.last = eof
return ch
}
ch, _, err := s.r.ReadRune()
if err != nil {
return eof
}
return ch
}
// unread places the previously read rune back on the reader.
func (s *Stream) unread(r rune) { s.last = r }
// unscan puts back the previously read token.
func (s *Stream) unscan(tok Token, lit string) {
s.token = tok
s.value = lit
}
// scan returns the next token and literal value.
// nolint: gocyclo
func (s *Stream) scan() (tok Token, lit string) {
if s.token != tNt {
tok := s.token
s.token = tNt
return tok, s.value
}
// Read the next rune.
ch := s.read()
// If we see whitespace then consume all contiguous whitespace.
// If we see a letter then consume as an ident or reserved word.
switch ch {
case eof:
return tEOF, ""
case ' ', '\t', '\n':
for {
ch = s.read()
if ch == eof {
break
} else if !isWhitespace(ch) {
s.unread(ch)
break
}
}
return tSpace, ""
case '=':
return tEqual, ""
case ',':
return tComma, ""
case ')':
return tClosePar, ""
default:
var buf bytes.Buffer
buf.WriteRune(ch)
if ch == '(' {
s.fill(&buf, 1)
} else {
s.fill(&buf, 0)
}
return tText, buf.String()
}
}
func (s *Stream) fill(buf *bytes.Buffer, parLevel int) {
fillLoop:
for {
ch := s.read()
switch ch {
case eof:
break fillLoop
case ' ', '\t', '\n', ',', '=':
if parLevel == 0 {
s.unread(ch)
break fillLoop
}
case '(':
parLevel = parLevel + 1
case ')':
if parLevel == 0 {
s.unread(ch)
break fillLoop
}
parLevel = parLevel - 1
}
_, err := buf.WriteRune(ch)
if err != nil {
logging.GetLogger().Errorf(
"jsonof: fill cannot write into buffer: %s", err)
}
}
}
// ParseRule
|
{
"pile_set_name": "Github"
}
|
package org.opencv.core;
import java.util.Arrays;
import java.util.List;
public class MatOfDouble extends Mat {
// 64FC(x)
private static final int _depth = CvType.CV_64F;
private static final int _channels = 1;
public MatOfDouble() {
super();
}
protected MatOfDouble(long addr) {
super(addr);
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public static MatOfDouble fromNativeAddr(long addr) {
return new MatOfDouble(addr);
}
public MatOfDouble(Mat m) {
super(m, Range.all());
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public MatOfDouble(double...a) {
super();
fromArray(a);
}
public void alloc(int elemNumber) {
if(elemNumber>0)
super.create(elemNumber, 1, CvType.makeType(_depth, _channels));
}
public void fromArray(double...a) {
if(a==null || a.length==0)
return;
int num = a.length / _channels;
alloc(num);
put(0, 0, a); //TODO: check ret val!
}
public double[] toArray() {
int num = checkVector(_channels, _depth);
if(num < 0)
throw new RuntimeException("Native Mat has unexpected type or size: " + toString());
double[] a = new double[num * _channels];
if(num == 0)
return a;
get(0, 0, a); //TODO: check ret val!
return a;
}
public void fromList(List<Double> lb) {
if(lb==null || lb.size()==0)
return;
Double ab[] = lb.toArray(new Double[0]);
double a[] = new double[ab.length];
for(int i=0; i<ab.length; i++)
a[i] = ab[i];
fromArray(a);
}
public List<Double> toList() {
double[] a = toArray();
Double ab[] = new Double[a.length];
for(int i=0; i<a.length; i++)
ab[i] = a[i];
return Arrays.asList(ab);
}
}
|
{
"pile_set_name": "Github"
}
|
<?xml version="1.0" encoding="utf-8"?>
<!--
#
# %CopyrightBegin%
#
# Copyright Ericsson AB 2009-2018. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# %CopyrightEnd%
-->
<xsl:stylesheet version="1.0"
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
xmlns:exsl="http://exslt.org/common"
xmlns:func="http://exslt.org/functions"
xmlns:erl="http://erlang.org"
extension-element-prefixes="exsl func"
xmlns:fn="http://www.w3.org/2005/02/xpath-functions">
<xsl:include href="db_html_params.xsl"/>
<xsl:include href="db_funcs.xsl"/>
<func:function name="erl:flip_first_char">
<xsl:param name="in"/>
<xsl:variable name="uppercase" select="'ABCDEFGHIJKLMNOPQRSTUVWXYZ'"/>
<xsl:variable name="lowercase" select="'abcdefghijklmnopqrstuvwxyz'"/>
<xsl:variable name="first-char" select="substring($in, 1, 1)"/>
<xsl:variable name="result">
<xsl:choose>
<xsl:when test="contains($uppercase, $first-char)">
<xsl:value-of select="concat(translate($first-char, $uppercase, $lowercase), substring($in, 2))"/>
</xsl:when>
<xsl:otherwise>
<xsl:value-of select="concat(translate($first-char, $lowercase, $uppercase), substring($in, 2))"/>
</xsl:otherwise>
</xsl:choose>
</xsl:variable>
<func:result select="$result"/>
</func:function>
<func:function name="erl:lower-case">
<xsl:param name="str"/>
<xsl:variable name="uppercase" select="'ABCDEFGHIJKLMNOPQRSTUVWXYZ'"/>
<xsl:variable name="lowercase" select="'abcdefghijklmnopqrstuvwxyz'"/>
<xsl:variable name="result">
<xsl:value-of select="translate($str, $uppercase, $lowercase)"/>
</xsl:variable>
<func:result select="$result"/>
</func:function>
<func:function name="erl:to-link">
<xsl:param name="text"/>
<func:result select="translate(erl:lower-case($text),'?: /()" ','--------')"/>
</func:function>
<!-- Used from template menu.funcs to sort a module's functions for the lefthand index list,
from the module's .xml file. Returns a value on which to sort the entity in question
(a <name> element).
Some functions are listed with the name as an attribute, as in string.xml:
<name name="join" arity="2"/>
Others use the element value for the name, as in gen_server.xml:
<name>start_link(Module, Args, Options) -> Result</name>
Additionally, callbacks may be included, as in gen_server.xml:
<name>Module:handle_call(Request, From, State) -> Result</name>
For C reference pages the name tag has a substructure where the nametext tag
is used in the sort, as in erl_nif.xml
<name><ret>void *</ret><nametext>enif_alloc(size_t size)</nametext></name>
So first, get the name from either the attribute or the element value.
Then, reverse the case of the first character. This is because xsltproc, used for processing,
orders uppercase before lowercase (even when the 'case-order="lower-first"' option
is given). But we want the Module callback functions listed after a module's regular
functions, as they are now. This doesn't affect the actual value used in the output, but
just the value used as a sort key. To then ensure that uppercase is indeed sorted before
lower, as we now want it to be, the 'case-order="upper-first"' option is used.
This processing only affect the lefthand index list- the body of the doc page is not
affected.
-->
<func:function name="erl:get_sort_field">
<xsl:param name="elem"/>
<xsl:variable name="base">
<xsl:choose>
<xsl:when test="ancestor::cref">
<xsl:value-of select="$elem/nametext"/>
</xsl:when>
<xsl:otherwise>
<xsl:choose>
<xsl:when test="string-length($elem/@name) > 0">
<xsl:value-of select="$elem/@name"/>
</xsl:when>
<xsl:otherwise>
<xsl:value-of select="substring-before($elem, '(')"/>
</xsl:otherwise>
</xsl:choose>
</xsl:otherwise>
</xsl:choose>
</xsl:variable>
<func:result select="erl:flip_first_char($base)"/>
</func:function>
<!-- Start of Dialyzer type/spec tags.
See also the templates matching "name" and "seealso" as well as
the template "menu.funcs"
-->
<xsl:param name="specs_file" select="''"/>
<xsl:variable name="i" select="document($specs_file)"></xsl:variable>
<xsl:param name="mod2app_file" select="''"/>
<xsl:variable name="m2a" select="document($mod2app_file)"></xsl:variable>
<xsl:key name="mod2app" match="module" use="@name"/>
<xsl:key
name="mfa"
match="func/name[string-length(@arity) > 0 and ancestor::erlref]"
use="concat(ancestor::erlref/module,':',@name, '/', @arity)"/>
<xsl:template name="err">
<xsl:param name="f"/>
<xsl:param name="m"/>
<xsl:param name="n"/>
<xsl:param name="a"/>
<xsl:param name="s"/>
<xsl:message terminate="yes">
Error <xsl:if test="$f != ''">in <xsl:value-of select ="$f"/>:</xsl:if>
<xsl:if test="$m != ''"><xsl:value-of select ="$m"/>:</xsl:if>
<xsl:value-of select="$n"/>
<xsl:if test="$a != ''">/<xsl:value-of
select ="$a"/></xsl:if>: <xsl:value-of select="$s"/>
</xsl:message>
</xsl:template>
<xsl:template name="find_spec">
<xsl:variable name="curModule" select="ancestor::erlref/module"/>
<xsl:variable name="mod" select="@mod"/>
<xsl:variable name="name" select="@name"/>
<xsl:variable name="arity" select="@arity"/>
<xsl:variable name="clause_i" select="@clause_i"/>
<xsl:variable name="spec0" select=
"$i/specs/module[@name=$curModule]/spec
[name=$name and arity=$arity
and (string-length($mod) = 0 or module = $mod)]"/>
<xsl:variable name="spec" select="$spec0[string-length($clause_i) = 0
or position()
|
{
"pile_set_name": "Github"
}
|
package org.jboss.resteasy.test.resource.param.resource;
import org.jboss.resteasy.test.resource.param.MatrixParamAsPrimitiveTest;
import org.junit.Assert;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.MatrixParam;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
@Path("/wrappers/default/override")
public class MatrixParamAsPrimitiveWrappersDefaultOverride {
@GET
@Produces("application/boolean")
public String doGet(@MatrixParam("boolean") @DefaultValue("false") Boolean v) {
Assert.assertEquals(MatrixParamAsPrimitiveTest.ERROR_MESSAGE, true, v.booleanValue());
return "content";
}
@GET
@Produces("application/byte")
public String doGet(@MatrixParam("byte") @DefaultValue("1") Byte v) {
Assert.assertTrue(MatrixParamAsPrimitiveTest.ERROR_MESSAGE, (byte) 127 == v.byteValue());
return "content";
}
@GET
@Produces("application/short")
public String doGet(@MatrixParam("short") @DefaultValue("1") Short v) {
Assert.assertTrue(MatrixParamAsPrimitiveTest.ERROR_MESSAGE, (short) 32767 == v.shortValue());
return "content";
}
@GET
@Produces("application/int")
public String doGet(@MatrixParam("int") @DefaultValue("1") Integer v) {
Assert.assertEquals(MatrixParamAsPrimitiveTest.ERROR_MESSAGE, 2147483647, v.intValue());
return "content";
}
@GET
@Produces("application/long")
public String doGet(@MatrixParam("long") @DefaultValue("1") Long v) {
Assert.assertEquals(MatrixParamAsPrimitiveTest.ERROR_MESSAGE, 9223372036854775807L, v.longValue());
return "content";
}
@GET
@Produces("application/float")
public String doGet(@MatrixParam("float") @DefaultValue("0.0") Float v) {
Assert.assertEquals(MatrixParamAsPrimitiveTest.ERROR_MESSAGE, 3.14159265f, v.floatValue(), 0.0f);
return "content";
}
@GET
@Produces("application/double")
public String doGet(@MatrixParam("double") @DefaultValue("0.0") Double v) {
Assert.assertEquals(MatrixParamAsPrimitiveTest.ERROR_MESSAGE, 3.14159265358979d, v.doubleValue(), 0.0);
return "content";
}
@GET
@Produces("application/char")
public String doGet(@MatrixParam("char") @DefaultValue("b") Character v) {
Assert.assertEquals(MatrixParamAsPrimitiveTest.ERROR_MESSAGE, 'a', v.charValue());
return "content";
}
}
|
{
"pile_set_name": "Github"
}
|
//
// ViewController.m
// LMSideBarControllerDemo
//
// Created by LMinh on 10/11/15.
// Copyright © 2015 LMinh. All rights reserved.
//
#import "SideBarController.h"
#import "LeftMenuViewController.h"
#import "RightMenuViewController.h"
#import "MainNavigationController.h"
#import "LMSideBarDepthStyle.h"
@implementation SideBarController
- (void)awakeFromNib
{
[super awakeFromNib];
// Init side bar styles
LMSideBarDepthStyle *sideBarDepthStyle = [LMSideBarDepthStyle new];
sideBarDepthStyle.menuWidth = 220;
// Init view controllers
LeftMenuViewController *leftMenuViewController = [self.storyboard instantiateViewControllerWithIdentifier:@"leftMenuViewController"];
RightMenuViewController *rightMenuViewController = [self.storyboard instantiateViewControllerWithIdentifier:@"rightMenuViewController"];
MainNavigationController *navigationController = [self.storyboard instantiateViewControllerWithIdentifier:@"mainNavigationController"];
// Setup side bar controller
[self setPanGestureEnabled:YES];
[self setDelegate:self];
[self setMenuViewController:leftMenuViewController forDirection:LMSideBarControllerDirectionLeft];
[self setMenuViewController:rightMenuViewController forDirection:LMSideBarControllerDirectionRight];
[self setSideBarStyle:sideBarDepthStyle forDirection:LMSideBarControllerDirectionLeft];
[self setSideBarStyle:sideBarDepthStyle forDirection:LMSideBarControllerDirectionRight];
[self setContentViewController:navigationController];
}
#pragma mark - SIDE BAR DELEGATE
- (void)sideBarController:(LMSideBarController *)sideBarController willShowMenuViewController:(UIViewController *)menuViewController
{
}
- (void)sideBarController:(LMSideBarController *)sideBarController didShowMenuViewController:(UIViewController *)menuViewController
{
}
- (void)sideBarController:(LMSideBarController *)sideBarController willHideMenuViewController:(UIViewController *)menuViewController
{
}
- (void)sideBarController:(LMSideBarController *)sideBarController didHideMenuViewController:(UIViewController *)menuViewController
{
}
@end
|
{
"pile_set_name": "Github"
}
|
const { PeopleResolver } = require('../../people');
const { FIELD_TYPES } = require('../../people/constants');
const { OBJECT_TYPE } = require('../constants');
const { findByEmail } = require('../index');
const denormalizer = require('./denormalizer');
const PRIORITY = 100;
module.exports = new PeopleResolver(OBJECT_TYPE, resolver, denormalizer, PRIORITY);
function resolver({ fieldType, value, context }) {
if (fieldType === FIELD_TYPES.EMAIL_ADDRESS) {
return new Promise((resolve, reject) => {
findByEmail(value, { domainId: context.domain._id}, (err, user) => {
if (err) return reject(err);
resolve(user);
});
});
}
return Promise.resolve();
}
|
{
"pile_set_name": "Github"
}
|
<?xml version="1.0" encoding="UTF-8"?>
<MediaContainer
allowSync="1"
identifier="com.plexapp.plugins.library"
librarySectionID="4"
librarySectionUUID="efbdd7b618d67cdae2fd42cf68a8427f9526d7fa"
mediaTagPrefix="/system/bundle/media/flags/"
mediaTagVersion="1393422068"
size="1" >
<Video
addedAt="1365260822"
art="/library/metadata/572/art/1379374156"
contentRating="R"
duration="10506750"
guid="com.plexapp.agents.imdb://tt0187393?lang=en"
key="/library/metadata/572"
lastViewedAt="1388768131"
originallyAvailableAt="2000-06-28"
rating="7.5999999046325701"
ratingKey="572"
studio="Centropolis Entertainment"
summary="After proving himself on the field of battle in the French and Indian War, Benjamin Martin wants nothing more to do with such things, preferring the simple life of a farmer. But when his son Gabriel enlists in the army to defend their new nation, America, against the British, Benjamin reluctantly returns to his old life to protect his son."
tagline="Some things are worth fighting for."
thumb="/library/metadata/572/thumb/1379374156"
title="The Patriot"
titleSort="Patriot"
type="movie"
updatedAt="1379374156"
viewCount="92"
viewOffset="389055"
year="2000" >
<Media
id="557"
aspectRatio="2.35"
audioChannels="2"
audioCodec="aac"
bitrate="1704"
container="mp4"
duration="10506750"
has64bitOffsets="0"
height="800"
optimizedForStreaming="1"
videoCodec="h264"
videoFrameRate="24p"
videoResolution="1080"
width="1920" >
<Part
id="589"
container="mp4"
duration="10506750"
file="/var/lib/plexmediaserver/Movies/The Patriot Extended Cut.mp4"
has64bitOffsets="0"
key="/library/parts/589/file.mp4"
optimizedForStreaming="1"
size="2238591499" >
<Stream
id="2591"
codec="srt"
format="srt"
key="/library/streams/2591"
language="English"
languageCode="eng"
streamType="3" />
<Stream
id="3023"
bitDepth="8"
bitrate="1640"
cabac="1"
chromaSubsampling="4:2:0"
codec="h264"
codecID="avc1"
colorSpace="yuv"
duration="10506412"
frameRate="23.976"
frameRateMode="cfr"
hasScalingMatrix="0"
height="800"
index="0"
level="40"
profile="high"
refFrames="5"
scanType="progressive"
streamType="1"
width="1920" />
<Stream
id="3024"
bitrate="64"
bitrateMode="vbr"
channels="2"
codec="aac"
codecID="40"
duration="10506750"
index="1"
profile="he-aac / lc"
samplingRate="22050"
selected="1"
streamType="2" />
</Part>
</Media>
<Genre
id="19"
tag="Action" />
<Genre
id="104"
tag="Drama" />
<Genre
id="509"
tag="History" />
<Genre
id="510"
tag="War" />
<Writer
id="1889"
tag="Robert Rodat" />
<Director
id="108"
tag="Roland Emmerich" />
<Producer
id="3779"
tag="Dean Devlin" />
<Producer
id="3780"
tag="Mark Gordon" />
<Producer
id="3781"
tag="Gary Levinsohn" />
<Country
id="24"
tag="USA" />
<Role
id="3573"
role="Benjamin Martin"
tag="Mel Gibson"
thumb="http://d3gtl9l2a4fn1j.cloudfront.net/t/p/original/8s48F6yFPOOcUfGKH1dNvztiHZz.jpg" />
<Role
id="3574"
role="Gabriel Martin"
tag="Heath Ledger"
thumb="http://d3gtl9l2a4fn1j.cloudfront.net/t/p/original/47b8wJySE9r6gWMcTGSa0EuiDV.jpg" />
<Role
id="3575"
role="Charlotte Selton"
tag="Joely Richardson"
thumb="http://d3gtl9l2a4fn1j.cloudfront.net/t/p/original/eIp0OGjOBmdd7FuprJYKDWYvZxH.jpg" />
<Role
id="543"
role="Col. William Tavington"
tag="Jason Isaacs"
thumb="http://d3gtl9l2a4fn1j.cloudfront.net/t/p/original/xUlka8zpREdB0xIAiolOSC1t4CB.jpg" />
<Role
id="1622"
role="Jean Villeneuve"
tag="Tchéky Karyo"
thumb="http://d3gtl9l2a4fn1j.cloudfront.net/t/p/original/kh5Gb5luqfAOxYZvquXueXHXXa.jpg" />
<Role
id="1887"
role="Col. Harry Burwell"
tag="Chris Cooper"
thumb="http://d3gtl9l2a4fn1j.cloudfront.net/t/p/original/idYN2ItTwPbGL55cBZO4y8rdcGu.jpg" />
<Role
id="3576"
role="Anne Howard"
tag="Lisa Brenner" />
<Role
id="3577"
role="Gen. Cornwallis"
tag="Tom Wilkinson"
thumb="http://d3gtl9l2a4fn1j.cloudfront.net/t/p/original/ammDpnQRD1JVY0aMnt2HBJfucgZ.jpg" />
<Role
id="3578"
role="John Billings"
tag="Leon Rippy"
thumb="http://d3gtl9l2a4fn1j.cloudfront.net/t/p/original/o0mQPjR67UJzzKbm3nVdOj7fdVm.jpg" />
<Role
id="1721"
role="Dan Scott"
tag="Donal Logue"
thumb="http://d3gtl9l2a4fn1j.cloudfront.net/t/p/original/vwSHbtsOPCJDBeSCANFlWgsgN6A.jpg" />
<Role
id="2303"
role="Capt. Wilkins"
tag="Adam Baldwin"
thumb="http://d3gtl9l2a4fn1j.cloudfront.net/t/p/original/w76VLhGjRELFkETeFF0iPMJO9eJ.jpg" />
<Role
id="3579"
role="Occam"
tag="Jay Arlen Jones" />
<Role
id="3580"
role="Peter Howard"
tag="Joey D. Vieira" />
<Role
|
{
"pile_set_name": "Github"
}
|
import csv
from itertools import count
from pathlib import Path
from typing import Any, Dict, List
from rotkehlchen.assets.asset import Asset
from rotkehlchen.constants.assets import A_USD
from rotkehlchen.constants.misc import ZERO
from rotkehlchen.db.dbhandler import DBHandler
from rotkehlchen.errors import DeserializationError, UnknownAsset
from rotkehlchen.exchanges.data_structures import AssetMovement, AssetMovementCategory, Trade
from rotkehlchen.serialization.deserialize import (
deserialize_asset_amount,
deserialize_asset_amount_force_positive,
deserialize_asset_movement_category,
deserialize_fee,
deserialize_timestamp_from_date,
)
from rotkehlchen.typing import AssetAmount, Fee, Location, Price, TradePair, TradeType
def remap_header(fieldnames: List[str]) -> List[str]:
cur_count = count(1)
mapping = {1: 'Buy', 2: 'Sell', 3: 'Fee'}
return [f'Cur.{mapping[next(cur_count)]}' if f.startswith('Cur.') else f for f in fieldnames]
class UnsupportedCointrackingEntry(Exception):
"""Thrown for Cointracking CSV export entries we can't support to import"""
class UnsupportedCryptocomEntry(Exception):
"""Thrown for Cryptocom CSV export entries we can't support to import"""
def exchange_row_to_location(entry: str) -> Location:
"""Takes the exchange row entry of Cointracking exported trades list and returns a location"""
if entry == 'no exchange':
return Location.EXTERNAL
elif entry == 'Kraken':
return Location.KRAKEN
elif entry == 'Poloniex':
return Location.POLONIEX
elif entry == 'Bittrex':
return Location.BITTREX
elif entry == 'Binance':
return Location.BINANCE
elif entry == 'Bitmex':
return Location.BITMEX
elif entry == 'Coinbase':
return Location.COINBASE
# TODO: Check if this is the correct string for CoinbasePro from cointracking
elif entry == 'CoinbasePro':
return Location.COINBASEPRO
# TODO: Check if this is the correct string for Gemini from cointracking
elif entry == 'Gemini':
return Location.GEMINI
elif entry == 'ETH Transaction':
raise UnsupportedCointrackingEntry(
'Not importing ETH Transactions from Cointracking. Cointracking does not '
'export enough data for them. Simply enter your ethereum accounts and all '
'your transactions will be auto imported directly from the chain',
)
elif entry == 'BTC Transaction':
raise UnsupportedCointrackingEntry(
'Not importing BTC Transactions from Cointracking. Cointracking does not '
'export enough data for them. Simply enter your BTC accounts and all '
'your transactions will be auto imported directly from the chain',
)
raise UnsupportedCointrackingEntry(
f'Unknown Exchange "{entry}" encountered during a cointracking import. Ignoring it',
)
class DataImporter():
def __init__(self, db: DBHandler) -> None:
self.db = db
def _consume_cointracking_entry(self, csv_row: Dict[str, Any]) -> None:
"""Consumes a cointracking entry row from the CSV and adds it into the database
Can raise:
- DeserializationError if something is wrong with the format of the expected values
- UnsupportedCointrackingEntry if importing of this entry is not supported.
- IndexError if the CSV file is corrupt
- KeyError if the an expected CSV key is missing
- UnknownAsset if one of the assets founds in the entry are not supported
"""
row_type = csv_row['Type']
timestamp = deserialize_timestamp_from_date(
date=csv_row['Date'],
formatstr='%d.%m.%Y %H:%M:%S',
location='cointracking.info',
)
notes = csv_row['Comment']
location = exchange_row_to_location(csv_row['Exchange'])
fee = Fee(ZERO)
fee_currency = A_USD # whatever (used only if there is no fee)
if csv_row['Fee'] != '':
fee = deserialize_fee(csv_row['Fee'])
fee_currency = Asset(csv_row['Cur.Fee'])
if row_type in ('Gift/Tip', 'Trade', 'Income'):
base_asset = Asset(csv_row['Cur.Buy'])
quote_asset = None if csv_row['Cur.Sell'] == '' else Asset(csv_row['Cur.Sell'])
if quote_asset is None and row_type not in ('Gift/Tip', 'Income'):
raise DeserializationError('Got a trade entry with an empty quote asset')
if quote_asset is None:
# Really makes no difference as this is just a gift and the amount is zero
quote_asset = A_USD
pair = TradePair(f'{base_asset.identifier}_{quote_asset.identifier}')
base_amount_bought = deserialize_asset_amount(csv_row['Buy'])
if csv_row['Sell'] != '-':
quote_amount_sold = deserialize_asset_amount(csv_row['Sell'])
else:
quote_amount_sold = AssetAmount(ZERO)
rate = Price(quote_amount_sold / base_amount_bought)
trade = Trade(
timestamp=timestamp,
location=location,
pair=pair,
trade_type=TradeType.BUY, # It's always a buy during cointracking import
amount=base_amount_bought,
rate=rate,
fee=fee,
fee_currency=fee_currency,
link='',
notes=notes,
)
self.db.add_trades([trade])
elif row_type == 'Deposit' or row_type == 'Withdrawal':
category = deserialize_asset_movement_category(row_type.lower())
if category == AssetMovementCategory.DEPOSIT:
amount = deserialize_asset_amount(csv_row['Buy'])
asset = Asset(csv_row['Cur.Buy'])
else:
amount = deserialize_asset_amount_force_positive(csv_row['Sell'])
asset = Asset(csv_row['Cur.Sell'])
asset_movement = AssetMovement(
location=location,
category=category,
address=None,
transaction_id=None,
timestamp=timestamp,
asset=asset,
amount=amount,
fee=fee,
fee_asset=fee_currency,
link='',
)
self.db.add_asset_movements([asset_movement])
else:
raise UnsupportedCointrackingEntry(
f'Unknown entrype type "{row_type}" encountered during cointracking '
f'data import. Ignoring entry',
)
def import_cointracking_csv(self, filepath: Path) -> None:
with open(filepath, 'r', encoding='utf-8-sig') as csvfile:
data = csv.reader(csvfile, delimiter=',', quotechar='"')
header = remap_header(next(data))
for row in data:
try:
self._consume_cointracking_entry(dict(zip(header, row)))
except UnknownAsset as e:
self.db.msg_aggregator.add_warning(
f'During cointracking CSV import found action with unknown '
f'asset {e.asset_name}. Ignoring entry',
)
continue
except IndexError:
self.db.msg_aggregator.add_warning(
'During cointracking CSV import found entry with '
'unexpected number of columns',
)
continue
except DeserializationError as e:
self.db.msg_aggregator.add_warning(
f'Error during cointracking CSV import deserialization. '
f'Error was {
|
{
"pile_set_name": "Github"
}
|
/**
* Marlin 3D Printer Firmware
* Copyright (C) 2019 MarlinFirmware [https://github.com/MarlinFirmware/Marlin]
*
* Based on Sprinter and grbl.
* Copyright (C) 2011 Camiel Gubbels / Erik van der Zalm
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
#ifndef HAL_PINSDEBUG_TEENSY_H
#define NUMBER_PINS_TOTAL NUM_DIGITAL_PINS
#define MULTI_NAME_PAD 16 // space needed to be pretty if not first name assigned to a pin
#define FTM0_CH0_PIN 22
#define FTM0_CH1_PIN 23
#define FTM0_CH2_PIN 9
#define FTM0_CH3_PIN 10
#define FTM0_CH4_PIN 6
#define FTM0_CH5_PIN 20
#define FTM0_CH6_PIN 21
#define FTM0_CH7_PIN 5
#define FTM1_CH0_PIN 3
#define FTM1_CH1_PIN 4
#define FTM2_CH0_PIN 29
#define FTM2_CH1_PIN 30
#define FTM3_CH0_PIN 2
#define FTM3_CH1_PIN 14
#define FTM3_CH2_PIN 7
#define FTM3_CH3_PIN 8
#define FTM3_CH4_PIN 35
#define FTM3_CH5_PIN 36
#define FTM3_CH6_PIN 37
#define FTM3_CH7_PIN 38
#ifdef __MK66FX1M0__ // Teensy3.6
#define TPM1_CH0_PIN 16
#define TPM1_CH1_PIN 17
#endif
#define IS_ANALOG(P) ((P) >= analogInputToDigitalPin(0) && (P) <= analogInputToDigitalPin(9)) || ((P) >= analogInputToDigitalPin(12) && (P) <= analogInputToDigitalPin(20))
void HAL_print_analog_pin(char buffer[], int8_t pin) {
if (pin <= 23) sprintf_P(buffer, PSTR("(A%2d) "), int(pin - 14));
else if (pin <= 39) sprintf_P(buffer, PSTR("(A%2d) "), int(pin - 19));
}
void HAL_analog_pin_state(char buffer[], int8_t pin) {
if (pin <= 23) sprintf_P(buffer, PSTR("Analog in =% 5d"), analogRead(pin - 14));
else if (pin <= 39) sprintf_P(buffer, PSTR("Analog in =% 5d"), analogRead(pin - 19));
}
#define PWM_PRINT(V) do{ sprintf_P(buffer, PSTR("PWM: %4d"), 22); SERIAL_ECHO(buffer); }while(0)
#define FTM_CASE(N,Z) \
case FTM##N##_CH##Z##_PIN: \
if (FTM##N##_C##Z##V) { \
PWM_PRINT(FTM##N##_C##Z##V); \
return true; \
} else return false
/**
* Print a pin's PWM status.
* Return true if it's currently a PWM pin.
*/
bool HAL_pwm_status(int8_t pin) {
char buffer[20]; // for the sprintf statements
switch (pin) {
FTM_CASE(0,0);
FTM_CASE(0,1);
FTM_CASE(0,2);
FTM_CASE(0,3);
FTM_CASE(0,4);
FTM_CASE(0,5);
FTM_CASE(0,6);
FTM_CASE(0,7);
FTM_CASE(1,0);
FTM_CASE(1,1);
FTM_CASE(2,0);
FTM_CASE(2,1);
FTM_CASE(3,0);
FTM_CASE(3,1);
FTM_CASE(3,2);
FTM_CASE(3,3);
FTM_CASE(3,4);
FTM_CASE(3,5);
FTM_CASE(3,6);
FTM_CASE(3,7);
case NOT_ON_TIMER:
default:
return false;
}
SERIAL_ECHOPGM(" ");
}
static void HAL_pwm_details(uint8_t pin) { /* TODO */ }
#endif
|
{
"pile_set_name": "Github"
}
|
{% extends "socialaccount/base.html" %}
{% load i18n recaptcha2 %}
{% block head_title %}{% trans "Register" %}{% endblock %}
{% block head %}
{% recaptcha_init request.LANGUAGE_CODE %}
{% endblock %}
{% block content %}
<div class="central-form">
<h1>{% trans "Register" %}</h1>
<p>{% blocktrans with provider_name=account.get_provider.name site_name=site.name %}You are about to use your {{provider_name}} account to login to
{{site_name}}. As a final step, please complete the following form:{% endblocktrans %}</p>
<form class="signup" id="signup_form" method="post" action="{% url 'socialaccount_signup' %}">
{% csrf_token %}
{% include 'form-fields.html' %}
<button class="btn btn-primary" type="submit">{% trans "Register" %} »</button>
</form>
</div>
{% endblock %}
|
{
"pile_set_name": "Github"
}
|
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
var gTestfile = 'regress-465483.js';
//-----------------------------------------------------------------------------
var BUGNUMBER = 465483;
var summary = 'Type instability leads to undefined being added as a string instead of as a number';
var actual = '';
var expect = '';
//-----------------------------------------------------------------------------
test();
//-----------------------------------------------------------------------------
function test()
{
enterFunc ('test');
printBugNumber(BUGNUMBER);
printStatus (summary);
expect = 'NaN';
jit(true);
for each (i in [4, 'a', 'b', (void 0)]) print(actual = '' + (i + i));
jit(false);
reportCompare(expect, actual, summary);
exitFunc ('test');
}
|
{
"pile_set_name": "Github"
}
|
#
# Copyright (c) 2010-2020. Axon Framework
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
name=AxonTestConfiguration
appenders = console
appender.console.type = Console
appender.console.name = STDOUT
appender.console.layout.type = PatternLayout
appender.console.layout.pattern = %d [%t] %-5p %-30.30c{1} %x - %m%n
rootLogger.level = info
rootLogger.appenderRefs = stdout
rootLogger.appenderRef.stdout.ref = STDOUT
logger.axon.name = org.axonframework
logger.axon.level = info
logger.axon.additivity = false
logger.axon.appenderRefs = stdout
logger.axon.appenderRef.stdout.ref = STDOUT
|
{
"pile_set_name": "Github"
}
|
<!doctype HTML>
<html>
<meta charset="utf8">
<title>Content Visibility: navigating to a text fragment.</title>
<link rel="author" title="Vladimir Levin" href="mailto:vmpstr@chromium.org">
<link rel="help" href="https://drafts.csswg.org/css-contain/#content-visibility">
<meta name="timeout" content="long">
<meta name="assert" content="content-visibility: auto subtrees are 'searchable' by text fragment links">
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="/resources/testdriver.js"></script>
<script src="/resources/testdriver-vendor.js"></script>
<script src="/common/utils.js"></script>
<script src="/scroll-to-text-fragment/stash.js"></script>
<script>
promise_test(t => new Promise((resolve, reject) => {
const fragment = '#:~:text=hiddentext';
const key = token();
test_driver.bless("Open a URL with a text fragment directive", () => {
window.open(`resources/text-fragment-target-auto.html?key=${key}${fragment}`,
'_blank',
'noopener');
});
fetchResults(key, resolve, reject);
}).then(data => {
assert_equals(data.scrollPosition, "text");
assert_equals(data.target, "text");
}), "Fragment navigation with content-visibility; single text");
promise_test(t => new Promise((resolve, reject) => {
const fragment = '#:~:text=start,end';
const key = token();
test_driver.bless("Open a URL with a text fragment directive", () => {
window.open(`resources/text-fragment-target-auto.html?key=${key}${fragment}`,
'_blank',
'noopener');
});
fetchResults(key, resolve, reject);
}).then(data => {
assert_equals(data.scrollPosition, "text2");
assert_equals(data.target, "text2and3ancestor");
}), "Fragment navigation with content-visibility; range across blocks");
</script>
|
{
"pile_set_name": "Github"
}
|
<!DOCTYPE html>
<html lang="en-US">
<head>
<meta charset="UTF-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1">
<!-- Begin Jekyll SEO tag v2.5.0 -->
<title>Using the Demo Generator | Azure DevOps Demo Generator</title>
<meta name="generator" content="Jekyll v3.7.4" />
<meta property="og:title" content="Using the Demo Generator" />
<meta property="og:locale" content="en_US" />
<meta name="description" content="Azure DevOps Demo Generator" />
<meta property="og:description" content="Azure DevOps Demo Generator" />
<link rel="canonical" href="http://localhost:4000/using.html" />
<meta property="og:url" content="http://localhost:4000/using.html" />
<meta property="og:site_name" content="Azure DevOps Demo Generator" />
<script type="application/ld+json">
{"@type":"WebPage","url":"http://localhost:4000/using.html","headline":"Using the Demo Generator","description":"Azure DevOps Demo Generator","@context":"http://schema.org"}</script>
<!-- End Jekyll SEO tag -->
<link rel="stylesheet" href="/assets/css/style.css?v=71de869d35c4585adaf0b064bf89fd3929f3aa6a">
</head>
<body>
<div class="container-lg px-3 my-5 markdown-body">
<h1 id="using-the-azure-devops-demo-generator">Using the Azure DevOps Demo Generator</h1>
<hr />
<ol>
<li>
<p>Browse to the <a href="https://azuredevopsdemogenerator.azurewebsites.net/">Azure DevOps Demo Generator site</a> by click the link, or copy <code class="highlighter-rouge">https://azuredevopsdemogenerator.azurewebsites.net/</code> into your browser’s URL field.</p>
</li>
<li>
<p>Click <strong>Sign In</strong> and provide the Microsoft or Azure AD account credentials associated with an organization in Azure DevOps Services. If you don’t have an organization, click on <strong>Get Started for Free</strong> to create one and then log in with your credentials.</p>
</li>
</ol>
<p><img src="/About-Azure-DevOps-Demo-Generator/images/homepage.png" alt="Image of VSTS Demo Generator V2 login" /></p>
<ol>
<li>
<p>After you sign in, select <strong>Accept</strong> to grant the Demo Generator permissions to access your Azure DevOps account.</p>
</li>
<li>
<p>Select the organization you will use to host the project created by the Azure DevOps Demo Generator. (You may have multiple accounts of which you are a member, and which are associated with your login, so choose carefully.) Provide a name for your project (such as “MyProjectDemo” ) that you and other contributors can use to identify it as a demo project.</p>
</li>
</ol>
<p><img src="/About-Azure-DevOps-Demo-Generator/images/mainpage.png" alt="Image of the generator main page" /></p>
<p>Lastly, select the demo project template you want to provision by clicking <strong>…</strong> (Browse) button.</p>
<p><img src="/About-Azure-DevOps-Demo-Generator/images/templateselection.png" alt="Image of VSTS Demo Generator template selection screen" /></p>
<blockquote>
<p>The default template is <strong>SmartHotel360</strong>, which contains complete ASP.NET 2 web mobile and desktop business apps for a hotel, and can be deployed using Docker containers. Other templates include <strong>MyHealthClinic</strong>, which defines a team project for an ASP.NET Core app that deploys to Azure App Service; <strong>PartsUnlimited</strong>, which defines an ASP.NET app with customized CI/CD pipelines; and <strong>MyShuttle</strong>, which defines a Java app and Azure App service deployment.</p>
</blockquote>
<blockquote>
<p>All four templates provide fictional Azure DevOps users and pre-populated Agile planning and tracking work items and data, along with source code in an Azure Repos Git repo, as well as access to Azure Pipelines.</p>
</blockquote>
<ol>
<li>
<p>Some templates may require additional extensions to be installed to your organization. The demo generation process checks to see if these extensions are already installed. If the extension is already installed, a green check will be displayed in front of the extension name. If the extension is <strong>not</strong> installed, select the empty check boxes to install the extension(s) to your account. When ready, click on <strong>Create Project</strong> button.</p>
<blockquote>
<p>If you want to manually install the extensions, click on the provided link for a specific extension, which takes you to the extension’s page on Azure DevOps Marketplace. From there, you can install the extension.</p>
</blockquote>
</li>
<li>
<p>Your project may take a couple of minutes for the Demo Generator to provision. When it completes, you will be provided with a link to the demo project.</p>
</li>
</ol>
<p><img src="_img/projectcreated.png" alt="Image of Azure DevOps Demo Generator project created screen" /></p>
<ol>
<li>Select the link to go to the new demo Azure DevOps Services project and confirm it was successfully provisioned.</li>
</ol>
<p><img src="_img/projecthomepage.png" alt="Image of Azure DevOps Demo Generator provision confirmation screen" /></p>
<blockquote>
<p>You must provide your own information such as URLs, logins, password, and others for the configuration of demo endpoints that use Azure resources.</p>
</blockquote>
<hr />
<p>Next: <a href="/About-Azure-DevOps-Demo-Generator/Build-your-own-template">Building your own template</a></p>
</div>
<script src="https://cdnjs.cloudflare.com/ajax/libs/anchor-js/4.1.0/anchor.min.js" integrity="sha256-lZaRhKri35AyJSypXXs4o6OPFTbTmUoltBbDCbdzegg=" crossorigin="anonymous"></script>
<script>anchors.add();</script>
</body>
</html>
|
{
"pile_set_name": "Github"
}
|
/*
This file is a part of libcds - Concurrent Data Structures library
(C) Copyright Maxim Khizhinsky (libcds.dev@gmail.com) 2006-2016
Source code repo: http://github.com/khizmax/libcds/
Download: http://sourceforge.net/projects/libcds/files/
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef CDSLIB_CONTAINER_DETAILS_LAZY_LIST_BASE_H
#define CDSLIB_CONTAINER_DETAILS_LAZY_LIST_BASE_H
#include <cds/container/details/base.h>
#include <cds/intrusive/details/lazy_list_base.h>
#include <cds/urcu/options.h>
namespace cds { namespace container {
/// LazyList ordered list related definitions
/** @ingroup cds_nonintrusive_helper
*/
namespace lazy_list {
/// LazyList traits
/**
Either \p compare or \p less or both must be specified.
*/
struct traits
{
/// allocator used to allocate new node
typedef CDS_DEFAULT_ALLOCATOR allocator;
/// Key comparing functor
/**
No default functor is provided. If the option is not specified, the \p less is used.
*/
typedef opt::none compare;
/// Specifies binary predicate used for key comparing
/**
Default is \p std::less<T>.
*/
typedef opt::none less;
/// Specifies binary functor used for comparing keys for equality
/**
No default functor is provided. If \p equal_to option is not spcified, \p compare is used, if \p compare is not
specified, \p less is used.
*/
typedef opt::none equal_to;
/// Specifies list ordering policy.
/**
If \p sort is \p true, than list maintains items in sorted order, otherwise items are unordered. Default is \p true.
Note that if \p sort is \p false then lookup operations scan entire list.
*/
static const bool sort = true;
/// Lock type used to lock modifying items
/**
Default is cds::sync::spin
*/
typedef cds::sync::spin lock_type;
/// back-off strategy used
typedef cds::backoff::Default back_off;
/// Item counting feature; by default, disabled. Use \p cds::atomicity::item_counter to enable item counting
typedef atomicity::empty_item_counter item_counter;
/// C++ memory ordering model
/**
Can be \p opt::v::relaxed_ordering (relaxed memory model, the default)
or \p opt::v::sequential_consistent (sequentially consisnent memory model).
*/
typedef opt::v::relaxed_ordering memory_model;
/// RCU deadlock checking policy (only for \ref cds_intrusive_LazyList_rcu "RCU-based LazyList")
/**
List of available options see \p opt::rcu_check_deadlock
*/
typedef opt::v::rcu_throw_deadlock rcu_check_deadlock;
//@cond
// LazyKVList: supporting for split-ordered list
// key accessor (opt::none = internal key type is equal to user key type)
typedef opt::none key_accessor;
//@endcond
};
/// Metafunction converting option list to \p lazy_list::traits
/**
\p Options are:
- \p opt::lock_type - lock type for node-level locking. Default \p is cds::sync::spin. Note that <b>each</b> node
of the list has member of type \p lock_type, therefore, heavy-weighted locking primitive is not
acceptable as candidate for \p lock_type.
- \p opt::compare - key compare functor. No default functor is provided.
If the option is not specified, the \p opt::less is used.
- \p opt::less - specifies binary predicate used for key compare. Default is \p std::less<T>.
- \p opt::equal_to - specifies binary functor for comparing keys for equality. This option is applicable only for unordered list.
No default is provided. If \p equal_to is not specified, \p compare is used, if \p compare is not specified, \p less is used.
- \p opt::sort - specifies ordering policy. Default value is \p true, i.e. the list is ordered.
Note: unordering feature is not fully supported yet.
- \p opt::back_off - back-off strategy used. If the option is not specified, \p cds::backoff::Default is used.
- \p opt::item_counter - the type of item counting feature. Default is disabled (\p atomicity::empty_item_counter).
To enable item counting use \p atomicity::item_counter.
- \p opt::allocator - the allocator used for creating and freeing list's item. Default is \ref CDS_DEFAULT_ALLOCATOR macro.
- \p opt::memory_model - C++ memory ordering model. Can be \p opt::v::relaxed_ordering (relaxed memory model, the default)
or \p opt::v::sequential_consistent (sequentially consisnent memory model).
*/
template <typename... Options>
struct make_traits {
# ifdef CDS_DOXYGEN_INVOKED
typedef implementation_defined type ; ///< Metafunction result
# else
typedef typename cds::opt::make_options<
typename cds::opt::find_type_traits< traits, Options... >::type
,Options...
>::type type;
#endif
};
} // namespace lazy_list
// Forward declarations
template <typename GC, typename T, typename Traits=lazy_list::traits>
class LazyList;
template <typename GC, typename Key, typename Value, typename Traits=lazy_list::traits>
class LazyKVList;
// Tag for selecting lazy list implementation
/**
This struct is empty and it is used only as a tag for selecting LazyList
as ordered list implementation in declaration of some classes.
See \p split_list::traits::ordered_list as an example.
*/
struct lazy_list_tag
{};
}} // namespace cds::container
#endif // #ifndef CDSLIB_CONTAINER_DETAILS_LAZY_LIST_BASE_H
|
{
"pile_set_name": "Github"
}
|
#######################################################################
##
## Corresponding documentation:
##
## https://redmine.lighttpd.net/projects/lighttpd/wiki/Docs_ModAccesslog
##
server.modules += ( "mod_accesslog" )
##
## Default access log.
##
accesslog.filename = log_root + "/lighttpd-access.log"
##
## The default format produces CLF compatible output.
## For available parameters see access.txt
##
#accesslog.format = "%h %l %u %t \"%r\" %b %>s \"%{User-Agent}i\" \"%{Referer}i\""
##
## If you want to log to syslog you have to unset the
## accesslog.use-syslog setting and uncomment the next line.
##
#accesslog.use-syslog = "enable"
#
#######################################################################
|
{
"pile_set_name": "Github"
}
|
//
// ReleaseLeopardOrLater.xcconfig
//
// Xcode configuration file for building a Release configuration of a project
// on Leopard or later.
//
// This is a _Configuration_ Xcode config file for use in the "Based on" popup
// of the project configuration editor. Do _not_ use this as the config base
// and individual Xcode target, there are other configuration files for that
// purpose.
//
// Copyright 2006-2008 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not
// use this file except in compliance with the License. You may obtain a copy
// of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations under
// the License.
//
// This file will be going away, please migrate off it. Instead Apple wants
// you to use the "current" SDK, use ReleaseMacOSX.xcconfig and set your min
// supported OS version in your project file.
// Pull in the general settings
#include "../subconfig/General.xcconfig"
// Leopard or later
#include "../subconfig/LeopardOrLater.xcconfig"
// Release settings
#include "../subconfig/Release.xcconfig"
// Merge settings
#include "../subconfig/GTMMerge.xcconfig"
|
{
"pile_set_name": "Github"
}
|
<?xml version="1.0" encoding="utf-8"?>
<resources>
<style name="ActionUI" parent="@android:style/Theme.Translucent.NoTitleBar">
</style>
</resources>
|
{
"pile_set_name": "Github"
}
|

> Roadmap to becoming a game developer in 2020, inspired by [web-developer-roadmap](https://github.com/kamranahmedse/developer-roadmap).
Below you find a set of charts demonstrating the paths that you can take and the technologies that you would want to adopt in order to become a frontend, backend or a devops. I made these charts for an old professor of mine who wanted something to share with his college students to give them a perspective; sharing them here to help the community.
> Check out my [Github](https://github.com/utilForever) and say "hi" on [Twitter](https://twitter.com/utilForever).
***
<h3 align="center"><strong>Purpose of these Roadmaps</strong></h3>
> The purpose of these roadmaps is to give you an idea about the landscape and to guide you if you are confused about what to learn next and not to encourage you to pick what is hip and trendy. You should grow some understanding of why one tool would be better suited for some cases than the other and remember hip and trendy never means best suited for the job.
<h3 align="center"><strong>Note to Beginners</strong></h3>
> These roadmaps cover everything that is there to learn for the paths listed below. Don't feel overwhelmed, you don't need to learn it all in the beginning if you are just getting started. We are working on the beginner versions of these and will release it soon after we are done with the 2020 release of roadmaps.
***
If you think that these can be improved in any way, please do suggest.
## Introduction

## Client Roadmap

## Server Roadmap

## QA Roadmap

## 🚦 Wrap Up
If you think any of the roadmaps can be improved, please do open a PR with any updates and submit any issues. Also, I will continue to improve this, so you might want to watch/star this repository to revisit.
## 🙌 Contribution
The roadmaps are built using [Balsamiq](https://balsamiq.com/products/mockups/). Project file can be found at `/project-files` directory. To modify any of the roadmaps, open Balsamiq, click **Project > Import > Mockup JSON**, it will open the roadmap for you, update it, upload and update the images in readme and create a PR.
- Open pull request with improvements
- Discuss ideas in issues
- Spread the word
- Reach out to me directly at utilforever@gmail.com or [](https://twitter.com/utilForever)
## License
<img align="right" src="http://opensource.org/trademarks/opensource/OSI-Approved-License-100x137.png">
The class is licensed under the [MIT License](http://opensource.org/licenses/MIT):
Copyright © 2020 [Chris Ohk](http://www.github.com/utilForever).
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
{
"pile_set_name": "Github"
}
|
// Copyright 2015 The Neugram Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package stmt defines data structures representing Neugram statements.
package stmt
import (
"neugram.io/ng/syntax/expr"
"neugram.io/ng/syntax/src"
"neugram.io/ng/syntax/tipe"
"neugram.io/ng/syntax/token"
)
type Stmt interface {
stmt()
Pos() src.Pos // implements syntax.Node
}
type Import struct {
Position src.Pos
Name string
Path string
}
type ImportSet struct {
Position src.Pos
Imports []*Import
}
type TypeDecl struct {
Position src.Pos
Name string
Type *tipe.Named
}
type TypeDeclSet struct {
Position src.Pos
TypeDecls []*TypeDecl
}
type MethodikDecl struct {
Position src.Pos
Name string
Type *tipe.Named
Methods []*expr.FuncLiteral
}
// TODO InterfaceLiteral struct { Name string, MethodNames []string, Methods []*tipe.Func }
type Const struct {
Position src.Pos
NameList []string
Type tipe.Type
Values []expr.Expr
}
type ConstSet struct {
Position src.Pos
Consts []*Const
}
type VarSet struct {
Position src.Pos
Vars []*Var
}
type Var struct {
Position src.Pos
NameList []string
Type tipe.Type
Values []expr.Expr
}
type Assign struct {
Position src.Pos
Decl bool
Left []expr.Expr
Right []expr.Expr // TODO: give up on multiple rhs values for now.
}
type Block struct {
Position src.Pos
Stmts []Stmt
}
type If struct {
Position src.Pos
Init Stmt
Cond expr.Expr
Body Stmt // always *BlockStmt
Else Stmt
}
type For struct {
Position src.Pos
Init Stmt
Cond expr.Expr
Post Stmt
Body Stmt // always *BlockStmt
}
type Switch struct {
Position src.Pos
Init Stmt
Cond expr.Expr
Cases []SwitchCase
}
type SwitchCase struct {
Position src.Pos
Conds []expr.Expr
Default bool
Body *Block
}
type TypeSwitch struct {
Position src.Pos
Init Stmt // initialization statement; or nil
Assign Stmt // x := y.(type) or y.(type)
Cases []TypeSwitchCase
}
type TypeSwitchCase struct {
Position src.Pos
Default bool
Types []tipe.Type
Body *Block
}
type Go struct {
Position src.Pos
Call *expr.Call
}
type Range struct {
Position src.Pos
Decl bool
Key expr.Expr
Val expr.Expr
Expr expr.Expr
Body Stmt // always *BlockStmt
}
type Return struct {
Position src.Pos
Exprs []expr.Expr
}
type Defer struct {
Position src.Pos
Expr expr.Expr
}
type Simple struct {
Position src.Pos
Expr expr.Expr
}
// Send is channel send statement, "a <- b".
type Send struct {
Position src.Pos
Chan expr.Expr
Value expr.Expr
}
type Branch struct {
Position src.Pos
Type token.Token // Continue, Break, Goto, or Fallthrough
Label string
}
type Labeled struct {
Position src.Pos
Label string
Stmt Stmt
}
type Select struct {
Position src.Pos
Cases []SelectCase
}
type SelectCase struct {
Position src.Pos
Default bool
Stmt Stmt // a recv- or send-stmt
Body *Block
}
type Bad struct {
Position src.Pos
Error error
}
func (s *Import) stmt() {}
func (s *ImportSet) stmt() {}
func (s *TypeDecl) stmt() {}
func (s *TypeDeclSet) stmt() {}
func (s *MethodikDecl) stmt() {}
func (s *Const) stmt() {}
func (s *ConstSet) stmt() {}
func (s *Var) stmt() {}
func (s *VarSet) stmt() {}
func (s *Assign) stmt() {}
func (s *Block) stmt() {}
func (s *If) stmt() {}
func (s *For) stmt() {}
func (s *Switch) stmt() {}
func (s *SwitchCase) stmt() {}
func (s *TypeSwitch) stmt() {}
func (s *TypeSwitchCase) stmt() {}
func (s *Go) stmt() {}
func (s *Range) stmt() {}
func (s *Return) stmt() {}
func (s *Defer) stmt() {}
func (s *Simple) stmt() {}
func (s *Send) stmt() {}
func (s *Branch) stmt() {}
func (s *Labeled) stmt() {}
func (s *Select) stmt() {}
func (s *Bad) stmt() {}
func (s *Import) Pos() src.Pos { return s.Position }
func (s *ImportSet) Pos() src.Pos { return s.Position }
func (s *TypeDecl) Pos() src.Pos { return s.Position }
func (s *TypeDeclSet) Pos() src.Pos { return s.Position }
func (s *MethodikDecl) Pos() src.Pos { return s.Position }
func (s *Const) Pos() src.Pos { return s.Position }
func (s *ConstSet) Pos() src.Pos { return s.Position }
func (s *Var) Pos() src.Pos { return s.Position }
func (s *VarSet) Pos() src.Pos { return s.Position }
func (s *Assign) Pos() src.Pos { return s.Position }
func (s *Block) Pos() src.Pos { return s.Position }
func (s *If) Pos() src.Pos { return s.Position }
func (s *For) Pos() src.Pos { return s.Position }
func (s *Switch) Pos() src.Pos { return s.Position }
func (s SwitchCase) Pos() src.Pos { return s.Position }
func (s *TypeSwitch) Pos() src.Pos { return s.Position }
func (s TypeSwitchCase) Pos() src.Pos { return s.Position }
func (s *Go) Pos() src.Pos { return s.Position }
func (s *Range) Pos() src.Pos { return s.Position }
func (s *Return) Pos() src.Pos { return s.Position }
func (s *Defer) Pos() src.Pos { return s.Position }
func (s *Simple) Pos() src.Pos { return s.Position }
func (s *Send) Pos() src.Pos { return s.Position }
func (s *Branch) Pos() src.Pos { return s.Position }
func (s *Labeled) Pos() src.Pos { return s.Position }
func (s *Select) Pos() src.Pos { return s.Position }
func (s SelectCase) Pos() src.Pos { return s.Position }
func (s *Bad) Pos() src.Pos { return s.Position }
|
{
"pile_set_name": "Github"
}
|
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE log4j:configuration SYSTEM "log4j.dtd">
<log4j:configuration xmlns:log4j="http://jakarta.apache.org/log4j/"
debug="false">
<appender name="dbexample" class="org.apache.log4j.RollingFileAppender">
<param name="File" value="${catalina.home}/logs/dbexample.log"/>
<param name="Append" value="true" />
<param name="ImmediateFlush" value="true" />
<param name="MaxFileSize" value="20MB" />
<param name="MaxBackupIndex" value="10" />
<layout class="org.apache.log4j.PatternLayout">
<param name="ConversionPattern" value="%-4r [%t] %-5p %c %x - %m%n" />
</layout>
</appender>
<logger name="com.journaldev" additivity="false">
<level value="DEBUG" />
<appender-ref ref="dbexample"/>
</logger>
<root>
<level value="debug" />
<appender-ref ref="dbexample" />
</root>
</log4j:configuration>
|
{
"pile_set_name": "Github"
}
|
<?xml version="1.0" encoding="UTF-8"?>
<!--
The contents of this file are subject to the terms of the Common Development and
Distribution License (the License). You may not use this file except in compliance with the
License.
You can obtain a copy of the License at legal/CDDLv1.0.txt. See the License for the
specific language governing permission and limitations under the License.
When distributing Covered Software, include this CDDL Header Notice in each file and include
the License file at legal/CDDLv1.0.txt. If applicable, add the following below the CDDL
Header, with the fields enclosed by brackets [] replaced by your own identifying
information: "Portions copyright [year] [name of copyright owner]".
Copyright 2014 ForgeRock AS.
-->
<!DOCTYPE ModuleProperties PUBLIC "=//iPlanet//Authentication Module Properties XML Interface 1.0 DTD//EN"
"jar://com/sun/identity/authentication/Auth_Module_Properties.dtd">
<ModuleProperties moduleName="Scripted" version="1.0" >
<Callbacks length="0" order="1" timeout="600" header="#WILL NOT BE SHOWN#" />
<Callbacks length="2" order="2" timeout="120" header="Sign in to OpenAM" >
<HiddenValueCallback>
<Id>clientScriptOutputData</Id>
</HiddenValueCallback>
<TextOutputCallback messageType="script">PLACEHOLDER</TextOutputCallback>
</Callbacks>
</ModuleProperties>
|
{
"pile_set_name": "Github"
}
|
/*
* Copyright 2017, GeoSolutions Sas.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree.
*/
var expect = require('expect');
var {
SET_SEARCH_CONFIG_PROP,
RESET_SEARCH_CONFIG,
UPDATE_SERVICE,
setSearchConfigProp,
restServiceConfig,
updateService
} = require('../searchconfig');
describe('Test correctness of the searchconfig actions', () => {
it('resetServiceConfig', () => {
const testPage = 1;
var retval = restServiceConfig(testPage);
expect(retval).toExist();
expect(retval.type).toBe(RESET_SEARCH_CONFIG);
expect(retval.page).toBe(testPage);
});
it('setSearchConfigProp', () => {
const testProperty = 'prop';
const testValue = 'val';
var retval = setSearchConfigProp(testProperty, testValue);
expect(retval).toExist();
expect(retval.type).toBe(SET_SEARCH_CONFIG_PROP);
expect(retval.property).toBe(testProperty);
expect(retval.value).toBe(testValue);
});
it('updateService', () => {
const testService = "service";
const testIdx = 1;
var retval = updateService(testService, testIdx);
expect(retval).toExist();
expect(retval.type).toBe(UPDATE_SERVICE);
expect(retval.service).toBe(testService);
expect(retval.idx).toBe(testIdx);
});
});
|
{
"pile_set_name": "Github"
}
|
###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2020, John McNamara, jmcnamara@cpan.org
#
from ..excel_comparison_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.set_filename('chart_gradient06.xlsx')
def test_create_file(self):
"""Test the creation of a simple XlsxWriter file."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
chart = workbook.add_chart({'type': 'column'})
chart.axis_ids = [61363328, 61364864]
data = [
[1, 2, 3, 4, 5],
[2, 4, 6, 8, 10],
[3, 6, 9, 12, 15],
]
worksheet.write_column('A1', data[0])
worksheet.write_column('B1', data[1])
worksheet.write_column('C1', data[2])
chart.add_series({
'values': '=Sheet1!$A$1:$A$5',
'gradient': {
'colors': ['#DDEBCF', '#9CB86E', '#156B13'],
'type': 'path'
}
})
chart.add_series({'values': '=Sheet1!$B$1:$B$5'})
chart.add_series({'values': '=Sheet1!$C$1:$C$5'})
worksheet.insert_chart('E9', chart)
workbook.close()
self.assertExcelEqual()
|
{
"pile_set_name": "Github"
}
|
"""
author: shreyansh kushwaha
made on : 08.09.2020
"""
board = {'1' : ' ','2' : ' ','3' : ' ','4' : ' ','5' : ' ','6' : ' ','7' : ' ','8' : ' ','9' : ' '}
import random, os, check, time
p1 = input("Enter your name player 1 (symbol X):\n")
p2 = input("Enter your name player 2 (symbol O):\n")
score1, score2, score_tie = [], [], []
total_moves =0
player = random.randint(1, 2)
time.sleep(2.6)
os.system("cls")
if player == 1:
print(f" {p1} won the toss.")
else:
print(f" {p2} won the toss")
time.sleep(3)
print(" Let us begin")
time.sleep(2)
def toggaleplayer(player):
if player == 1:
player = 2
elif player == 2:
player = 1
def playagain():
inp = input("Do you want to play again??(Y/N)\n")
if inp.upper() == "Y":
a = toggaleplayer(player)
restart(a)
elif inp.upper() == "N":
os.system("cls")
print("Thanks for playing")
print(f"Number of times {p1} won : {len(score1)}.")
print(f"Number of times {p2} won : {len(score2)}.")
print(f"Number of ties: {len(score_tie)}.")
abc = input()
quit()
else:
print("Invalid input")
quit()
def restart(a):
total_moves, board =0, {'1' : ' ','2' : ' ','3' : ' ','4' : ' ','5' : ' ','6' : ' ','7' : ' ','8' : ' ','9' : ' '}
while True:
os.system("cls")
print(board['1'] + '|' + board['2'] + '|' + board['3'] )
print('-+-+-')
print(board['4'] + '|' + board['5'] + '|' + board['6'] )
print('-+-+-')
print(board['7'] + '|' + board['8'] + '|' + board['9'] )
check.check(total_moves,score1, score2, score_tie, playagain, board, p1, p2)
while True:
if a == 1:
p1_input = input(f"Its {p1}'s chance..\nwhere do you want to place your move:")
if p1_input.upper() in board and board[p1_input.upper()] == " ":
board[p1_input.upper()] = 'X'
a = 2
break
else: # on wrong input
print("Invalid input \n Enter again. ")
continue
else:
p2_input = input(f"Its {p2}'s chance..\nwhere do you want to place your move:")
if p2_input.upper() in board and board[p2_input.upper()] == " ":
board[p2_input.upper()] = 'O'
a = 1
break
else:
print("Invalid Input")
continue
total_moves += 1
while True:
os.system("cls")
print(board['1'] + '|' + board['2'] + '|' + board['3'] )
print('-+-+-')
print(board['4'] + '|' + board['5'] + '|' + board['6'] )
print('-+-+-')
print(board['7'] + '|' + board['8'] + '|' + board['9'] )
check.check(total_moves,score1, score2, score_tie, playagain, board, p1, p2)
while True:
if player == 1:
p1_input = input(f"Its {p1}'s chance..\nwhere do you want to place your move:")
if p1_input.upper() in board and board[p1_input.upper()] == " ":
board[p1_input.upper()] = 'X'
player = 2
break
else: # on wrong input
print("Invalid input ")
continue
else:
p2_input = input(f"Its {p2}'s chance..\nwhere do you want to place your move:")
if p2_input.upper() in board and board[p2_input.upper()] == " ":
board[p2_input.upper()] = 'O'
player = 1
break
else:
print("Invalid Input")
continue
total_moves += 1
|
{
"pile_set_name": "Github"
}
|
//
// Generated by class-dump 3.5 (64 bit) (Debug version compiled Oct 15 2018 10:31:50).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2015 by Steve Nygard.
//
@class NSData;
@protocol SETransceiver
- (NSData *)transceive:(NSData *)arg1 error:(id *)arg2;
@end
|
{
"pile_set_name": "Github"
}
|
// Base jqxgrid options and utils
var base_jqxgrid_options = {
altrows: true,
columnsheight: 40,
rowsheight: 40,
filterrowheight: 40,
width: '100%',
editable: false,
columnsresize: true,
groupable: false,
sortable: true,
filterable: true,
showstatusbar: false,
statusbarheight: 50,
showaggregates: true,
pageable: true,
pagermode: 'default',
autoheight: true,
autorowheight: false, // auto?
enabletooltips: true, // another solution: http://www.jqwidgets.com/community/topic/grid-tooltip/
pagesize: 10,
pagesizeoptions: ['10', '15', '20', '30', '40', '50', '100'],
virtualmode: false
};
var draw_grid = function(selector, datafields, columns, server_pagination,
custom_source_data, custom_grid_options) {
var source = {
url: window.location.href,
datafields: datafields,
datatype: "json",
id: 'pk',
root: 'data',
data: { enable_pagination: false },
beforeprocessing: function (data) {
// for server-side pagination
source.totalrecords = data.total_records;
render_error(data);
},
loadError: function (xhr, status, error){
if(xhr.responseText=='logout'){
window.location.href = login_url
}
}
};
if (custom_source_data) {
if (custom_source_data.badgeClass){
custom_source_data.loadComplete = function(data){
$('.'+custom_source_data.badgeClass).text(data.total_records || 0)
}
}
source = $.extend({}, source, custom_source_data);
}
var dataAdapter = new $.jqx.dataAdapter(source);
var current_jqxgrid_options = {
source: dataAdapter,
columns: columns,
// for server-side pagination
rendergridrows: function () {
return dataAdapter.records;
},
updatefilterconditions: function (type, defaultconditions) {
var stringcomparisonoperators = ['EMPTY', 'NOT_EMPTY', 'CONTAINS', 'CONTAINS_CASE_SENSITIVE', 'FULL_TEXT_SEARCH',
'DOES_NOT_CONTAIN', 'DOES_NOT_CONTAIN_CASE_SENSITIVE', 'STARTS_WITH', 'STARTS_WITH_CASE_SENSITIVE',
'ENDS_WITH', 'ENDS_WITH_CASE_SENSITIVE', 'EQUAL', 'EQUAL_CASE_SENSITIVE', 'NULL', 'NOT_NULL'];
var numericcomparisonoperators = ['EQUAL', 'NOT_EQUAL', 'LESS_THAN', 'LESS_THAN_OR_EQUAL', 'GREATER_THAN', 'GREATER_THAN_OR_EQUAL', 'NULL', 'NOT_NULL'];
var datecomparisonoperators = ['EQUAL', 'NOT_EQUAL', 'LESS_THAN', 'LESS_THAN_OR_EQUAL', 'GREATER_THAN', 'GREATER_THAN_OR_EQUAL', 'NULL', 'NOT_NULL'];
var booleancomparisonoperators = ['EQUAL', 'NOT_EQUAL'];
switch (type) {
case 'stringfilter':
return stringcomparisonoperators;
case 'numericfilter':
return numericcomparisonoperators;
case 'datefilter':
return datecomparisonoperators;
case 'booleanfilter':
return booleancomparisonoperators;
}
}
};
if (custom_grid_options) {
current_jqxgrid_options = $.extend({}, current_jqxgrid_options, custom_grid_options);
}
if (typeof server_pagination !== 'undefined' && server_pagination) {
current_jqxgrid_options.virtualmode = true;
current_jqxgrid_options.autoheight = true;
current_jqxgrid_options.source._source.data.enable_pagination = server_pagination;
current_jqxgrid_options.source._source.sort = function () {
// update the grid and send a request to the server.
$(selector).jqxGrid('updatebounddata');
};
current_jqxgrid_options.source._source.filter = function () {
// update the grid and send a request to the server.
$(selector).jqxGrid('updatebounddata', 'filter');
};
}
var opts = $.extend({}, base_jqxgrid_options, current_jqxgrid_options);
// needed to add custom filters (full text search)
$(selector).bind('bindingcomplete', function (a, b) {
var localizationObject = {};
filterstringcomparisonoperators = ['empty', 'not empty', 'contains', 'contains(match case)', 'full text search(or contains)',
'does not contain', 'does not contain(match case)', 'starts with', 'starts with(match case)',
'ends with', 'ends with(match case)', 'equal', 'equal(match case)', 'null', 'not null'];
filternumericcomparisonoperators = ['equal', 'not equal', 'less than', 'less than or equal', 'greater than', 'greater than or equal', 'null', 'not null'];
filterdatecomparisonoperators = ['equal', 'not equal', 'less than', 'less than or equal', 'greater than', 'greater than or equal', 'null', 'not null'];
filterbooleancomparisonoperators = ['equal', 'not equal'];
localizationObject.filterstringcomparisonoperators = filterstringcomparisonoperators;
localizationObject.filternumericcomparisonoperators = filternumericcomparisonoperators;
localizationObject.filterdatecomparisonoperators = filterdatecomparisonoperators;
localizationObject.filterbooleancomparisonoperators = filterbooleancomparisonoperators;
// change default message for empty data set if user_projects_selected is empty
if (!window.user_projects_selected.length){
localizationObject.emptydatastring = "Select Project(s) Above for Viewing Data";
$(".project_selection label")
.fadeOut(300)
.fadeIn(300)
.fadeOut(300)
.fadeIn(300)
}
else if (window.table_warning) {
localizationObject.emptydatastring = window.table_warning;
}
else {
localizationObject.emptydatastring = "No data to display";
}
$(selector).jqxGrid('localizestrings', localizationObject);
});
$(selector).jqxGrid(opts)
};
// custom cell renderer for jqxgrid
function renderCell(defaulthtml, new_value) {
var el = $(defaulthtml);
el.html(new_value);
return el.clone().wrap('<div>').parent().html();
}
function linkFormatter(defaulthtml, url, val, new_window, button_options) {
if (new_window || window.name == 'new'){
var window_width = $(window).width()*0.8;
var window_height = $(window).height()*0.8;
var new_value = '<a href="' + url +
'" onclick="window.open(this.href, \'new\', \'width=' +
window_width + ', height=' + window_height + ',scrollbars\'); return false;">' +
val + '</a>';
}
else {
new_value = '<a href="' + url + '">' + val + '</a>';
}
if (button_options) {
new_value = $(new_value)
.addClass('btn')
.addClass(button_options.klass)
.css({margin: 0, height: '100%', width: '100%'});
if (button_options.css){
new_value.css(button_options.css)
}
defaulthtml = $(defaulthtml);
defaulthtml.css({margin: 0, padding: '2px', height: '100%'})
}
return renderCell(defaulthtml, new_value);
}
function default
|
{
"pile_set_name": "Github"
}
|
import helper
from distutils.version import LooseVersion
class check_configuration_ssl_allow_invalid_cert():
"""
check_configuration_ssl_allow_invalid_cert:
Bypasses the validation checks for TLS/SSL certificates on other servers in
the cluster and allows the use of invalid certificates. When using the
allowInvalidCertificates setting, MongoDB logs as a warning the use of the
invalid certificate.
MongoDB versions 2.6.4 and above, check the net.ssl.weakCertificateValidation configuration option.
"""
# References:
# https://docs.mongodb.org/v2.6/reference/configuration-options/#net.ssl.allowInvalidCertificates
TITLE = 'Allow Invalid Certificate'
CATEGORY = 'Configuration'
TYPE = 'configuration_file'
SQL = None # SQL not needed... because this is NoSQL.
verbose = False
skip = False
result = {}
db = None
def do_check(self, configuration_file):
option = None
version_number = self.db.server_info()['version']
if LooseVersion(version_number) >= LooseVersion("2.6.4"):
option = 'net.ssl.allowInvalidCertificates'
value = helper.get_yaml_config_value(configuration_file, option)
if None == value:
self.result['level'] = 'GREEN'
self.result['output'] = '%s not found, not enabled.' % (option)
elif False == value:
self.result['level'] = 'GREEN'
self.result['output'] = '%s is (%s) not enabled.' % (option, value)
else:
self.result['level'] = 'RED'
self.result['output'] = '%s is (%s) enabled.' % (option, value)
else:
self.result['level'] = 'GRAY'
self.result['output'] = 'This check does not apply to MongoDB versions below 2.6.4.'
return self.result
def __init__(self, parent):
print('Performing check: ' + self.TITLE)
self.verbose = parent.verbose
self.db = parent.db
|
{
"pile_set_name": "Github"
}
|
/**
* Copyright (c) 2015 Bosch Software Innovations GmbH and others.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package org.eclipse.hawkbit.repository.jpa.model.helper;
import java.util.ArrayList;
import java.util.List;
import org.eclipse.hawkbit.repository.jpa.EntityInterceptor;
import org.springframework.beans.factory.annotation.Autowired;
/**
* A singleton bean which holds the {@link EntityInterceptor} to have all
* interceptors in spring beans.
*
*/
public final class EntityInterceptorHolder {
private static final EntityInterceptorHolder SINGLETON = new EntityInterceptorHolder();
@Autowired(required = false)
private final List<EntityInterceptor> entityInterceptors = new ArrayList<>();
private EntityInterceptorHolder() {
}
/**
* @return the entity intreceptor holder singleton instance
*/
public static EntityInterceptorHolder getInstance() {
return SINGLETON;
}
public List<EntityInterceptor> getEntityInterceptors() {
return entityInterceptors;
}
}
|
{
"pile_set_name": "Github"
}
|
github.com/hashicorp/errwrap v0.0.0-20141028054710-7554cd9344ce h1:prjrVgOk2Yg6w+PflHoszQNLTUh4kaByUcEWM/9uin4=
github.com/hashicorp/errwrap v0.0.0-20141028054710-7554cd9344ce/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
github.com/hashicorp/errwrap v1.0.0 h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA=
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
|
{
"pile_set_name": "Github"
}
|
/* Soot - a J*va Optimization Framework
* Copyright (C) 2004 Jennifer Lhotak
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
package soot.javaToJimple;
import java.util.*;
public abstract class AbstractJimpleBodyBuilder {
protected soot.jimple.JimpleBody body;
public void ext(AbstractJimpleBodyBuilder ext){
this.ext = ext;
if (ext.ext != null){
throw new RuntimeException("Extensions created in wrong order.");
}
ext.base = this.base;
}
public AbstractJimpleBodyBuilder ext(){
if (ext == null) return this;
return ext;
}
private AbstractJimpleBodyBuilder ext = null;
public void base(AbstractJimpleBodyBuilder base){
this.base = base;
}
public AbstractJimpleBodyBuilder base(){
return base;
}
private AbstractJimpleBodyBuilder base = this;
protected soot.jimple.JimpleBody createJimpleBody(polyglot.ast.Block block, List formals, soot.SootMethod sootMethod){
return ext().createJimpleBody(block, formals, sootMethod);
}
/*protected soot.Value createExpr(polyglot.ast.Expr expr){
return ext().createExpr(expr);
}*/
protected soot.Value createAggressiveExpr(polyglot.ast.Expr expr, boolean reduceAggressively, boolean reverseCondIfNec){
//System.out.println("in abstract");
return ext().createAggressiveExpr(expr, reduceAggressively, reverseCondIfNec);
}
protected void createStmt(polyglot.ast.Stmt stmt){
ext().createStmt(stmt);
}
protected boolean needsAccessor(polyglot.ast.Expr expr){
return ext().needsAccessor(expr);
}
protected soot.Local handlePrivateFieldAssignSet(polyglot.ast.Assign assign){
return ext().handlePrivateFieldAssignSet(assign);
}
protected soot.Local handlePrivateFieldUnarySet(polyglot.ast.Unary unary){
return ext().handlePrivateFieldUnarySet(unary);
}
protected soot.Value getAssignRightLocal(polyglot.ast.Assign assign, soot.Local leftLocal){
return ext().getAssignRightLocal(assign, leftLocal);
}
protected soot.Value getSimpleAssignRightLocal(polyglot.ast.Assign assign){
return ext().getSimpleAssignRightLocal(assign);
}
protected soot.Local handlePrivateFieldSet(polyglot.ast.Expr expr, soot.Value right, soot.Value base){
return ext().handlePrivateFieldSet(expr, right, base);
}
protected soot.SootMethodRef getSootMethodRef(polyglot.ast.Call call){
return ext().getSootMethodRef(call);
}
protected soot.Local generateLocal(soot.Type sootType){
return ext().generateLocal(sootType);
}
protected soot.Local generateLocal(polyglot.types.Type polyglotType){
return ext().generateLocal(polyglotType);
}
protected soot.Local getThis(soot.Type sootType){
return ext().getThis(sootType);
}
protected soot.Value getBaseLocal(polyglot.ast.Receiver receiver){
return ext().getBaseLocal(receiver);
}
protected soot.Value createLHS(polyglot.ast.Expr expr){
return ext().createLHS(expr);
}
protected soot.jimple.FieldRef getFieldRef(polyglot.ast.Field field){
return ext().getFieldRef(field);
}
protected soot.jimple.Constant getConstant(soot.Type sootType, int val){
return ext().getConstant(sootType, val);
}
}
|
{
"pile_set_name": "Github"
}
|
/*
* PRCMU clock implementation for ux500 platform.
*
* Copyright (C) 2012 ST-Ericsson SA
* Author: Ulf Hansson <ulf.hansson@linaro.org>
*
* License terms: GNU General Public License (GPL) version 2
*/
#include <linux/clk-provider.h>
#include <linux/mfd/dbx500-prcmu.h>
#include <linux/slab.h>
#include <linux/io.h>
#include <linux/err.h>
#include "clk.h"
#define to_clk_prcmu(_hw) container_of(_hw, struct clk_prcmu, hw)
struct clk_prcmu {
struct clk_hw hw;
u8 cg_sel;
int is_prepared;
int is_enabled;
int opp_requested;
};
/* PRCMU clock operations. */
static int clk_prcmu_prepare(struct clk_hw *hw)
{
int ret;
struct clk_prcmu *clk = to_clk_prcmu(hw);
ret = prcmu_request_clock(clk->cg_sel, true);
if (!ret)
clk->is_prepared = 1;
return ret;
}
static void clk_prcmu_unprepare(struct clk_hw *hw)
{
struct clk_prcmu *clk = to_clk_prcmu(hw);
if (prcmu_request_clock(clk->cg_sel, false))
pr_err("clk_prcmu: %s failed to disable %s.\n", __func__,
__clk_get_name(hw->clk));
else
clk->is_prepared = 0;
}
static int clk_prcmu_is_prepared(struct clk_hw *hw)
{
struct clk_prcmu *clk = to_clk_prcmu(hw);
return clk->is_prepared;
}
static int clk_prcmu_enable(struct clk_hw *hw)
{
struct clk_prcmu *clk = to_clk_prcmu(hw);
clk->is_enabled = 1;
return 0;
}
static void clk_prcmu_disable(struct clk_hw *hw)
{
struct clk_prcmu *clk = to_clk_prcmu(hw);
clk->is_enabled = 0;
}
static int clk_prcmu_is_enabled(struct clk_hw *hw)
{
struct clk_prcmu *clk = to_clk_prcmu(hw);
return clk->is_enabled;
}
static unsigned long clk_prcmu_recalc_rate(struct clk_hw *hw,
unsigned long parent_rate)
{
struct clk_prcmu *clk = to_clk_prcmu(hw);
return prcmu_clock_rate(clk->cg_sel);
}
static long clk_prcmu_round_rate(struct clk_hw *hw, unsigned long rate,
unsigned long *parent_rate)
{
struct clk_prcmu *clk = to_clk_prcmu(hw);
return prcmu_round_clock_rate(clk->cg_sel, rate);
}
static int clk_prcmu_set_rate(struct clk_hw *hw, unsigned long rate,
unsigned long parent_rate)
{
struct clk_prcmu *clk = to_clk_prcmu(hw);
return prcmu_set_clock_rate(clk->cg_sel, rate);
}
static int clk_prcmu_opp_prepare(struct clk_hw *hw)
{
int err;
struct clk_prcmu *clk = to_clk_prcmu(hw);
if (!clk->opp_requested) {
err = prcmu_qos_add_requirement(PRCMU_QOS_APE_OPP,
(char *)__clk_get_name(hw->clk),
100);
if (err) {
pr_err("clk_prcmu: %s fail req APE OPP for %s.\n",
__func__, __clk_get_name(hw->clk));
return err;
}
clk->opp_requested = 1;
}
err = prcmu_request_clock(clk->cg_sel, true);
if (err) {
prcmu_qos_remove_requirement(PRCMU_QOS_APE_OPP,
(char *)__clk_get_name(hw->clk));
clk->opp_requested = 0;
return err;
}
clk->is_prepared = 1;
return 0;
}
static void clk_prcmu_opp_unprepare(struct clk_hw *hw)
{
struct clk_prcmu *clk = to_clk_prcmu(hw);
if (prcmu_request_clock(clk->cg_sel, false)) {
pr_err("clk_prcmu: %s failed to disable %s.\n", __func__,
__clk_get_name(hw->clk));
return;
}
if (clk->opp_requested) {
prcmu_qos_remove_requirement(PRCMU_QOS_APE_OPP,
(char *)__clk_get_name(hw->clk));
clk->opp_requested = 0;
}
clk->is_prepared = 0;
}
static int clk_prcmu_opp_volt_prepare(struct clk_hw *hw)
{
int err;
struct clk_prcmu *clk = to_clk_prcmu(hw);
if (!clk->opp_requested) {
err = prcmu_request_ape_opp_100_voltage(true);
if (err) {
pr_err("clk_prcmu: %s fail req APE OPP VOLT for %s.\n",
__func__, __clk_get_name(hw->clk));
return err;
}
clk->opp_requested = 1;
}
err = prcmu_request_clock(clk->cg_sel, true);
if (err) {
prcmu_request_ape_opp_100_voltage(false);
clk->opp_requested = 0;
return err;
}
clk->is_prepared = 1;
return 0;
}
static void clk_prcmu_opp_volt_unprepare(struct clk_hw *hw)
{
struct clk_prcmu *clk = to_clk_prcmu(hw);
if (prcmu_request_clock(clk->cg_sel, false)) {
pr_err("clk_prcmu: %s failed to disable %s.\n", __func__,
__clk_get_name(hw->clk));
return;
}
if (clk->opp_requested) {
prcmu_request_ape_opp_100_voltage(false);
clk->opp_requested = 0;
}
clk->is_prepared = 0;
}
static struct clk_ops clk_prcmu_scalable_ops = {
.prepare = clk_prcmu_prepare,
.unprepare = clk_prcmu_unprepare,
.is_prepared = clk_prcmu_is_prepared,
.enable = clk_prcmu_enable,
.disable = clk_prcmu_disable,
.is_enabled = clk_prcmu_is_enabled,
.recalc_rate = clk_prcmu_recalc_rate,
.round_rate = clk_prcmu_round_rate,
.set_rate = clk_prcmu_set_rate,
};
static struct clk_ops clk_prcmu_gate_ops = {
.prepare = clk_prcmu_prepare,
.unprepare = clk_prcmu_unprepare,
.is_prepared = clk_prcmu_is_prepared,
.enable = clk_prcmu_enable,
.disable = clk_prcmu_disable,
.is_enabled = clk_prcmu_is_enabled,
.recalc_rate = clk_pr
|
{
"pile_set_name": "Github"
}
|
#import "GPUImageColorInvertFilter.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageInvertFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
gl_FragColor = vec4((1.0 - textureColor.rgb), textureColor.w);
}
);
#else
NSString *const kGPUImageInvertFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
gl_FragColor = vec4((1.0 - textureColor.rgb), textureColor.w);
}
);
#endif
@implementation GPUImageColorInvertFilter
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kGPUImageInvertFragmentShaderString]))
{
return nil;
}
return self;
}
@end
|
{
"pile_set_name": "Github"
}
|
/* libFLAC - Free Lossless Audio Codec library
* Copyright (C) 2000,2001,2002,2003,2004,2005,2006,2007 Josh Coalson
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* - Neither the name of the Xiph.org Foundation nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#if HAVE_CONFIG_H
# include <config.h>
#endif
#include "private/crc.h"
/* CRC-8, poly = x^8 + x^2 + x^1 + x^0, init = 0 */
FLAC__byte const FLAC__crc8_table[256] = {
0x00, 0x07, 0x0E, 0x09, 0x1C, 0x1B, 0x12, 0x15,
0x38, 0x3F, 0x36, 0x31, 0x24, 0x23, 0x2A, 0x2D,
0x70, 0x77, 0x7E, 0x79, 0x6C, 0x6B, 0x62, 0x65,
0x48, 0x4F, 0x46, 0x41, 0x54, 0x53, 0x5A, 0x5D,
0xE0, 0xE7, 0xEE, 0xE9, 0xFC, 0xFB, 0xF2, 0xF5,
0xD8, 0xDF, 0xD6, 0xD1, 0xC4, 0xC3, 0xCA, 0xCD,
0x90, 0x97, 0x9E, 0x99, 0x8C, 0x8B, 0x82, 0x85,
0xA8, 0xAF, 0xA6, 0xA1, 0xB4, 0xB3, 0xBA, 0xBD,
0xC7, 0xC0, 0xC9, 0xCE, 0xDB, 0xDC, 0xD5, 0xD2,
0xFF, 0xF8, 0xF1, 0xF6, 0xE3, 0xE4, 0xED, 0xEA,
0xB7, 0xB0, 0xB9, 0xBE, 0xAB, 0xAC, 0xA5, 0xA2,
0x8F, 0x88, 0x81, 0x86, 0x93, 0x94, 0x9D, 0x9A,
0x27, 0x20, 0x29, 0x2E, 0x3B, 0x3C, 0x35, 0x32,
0x1F, 0x18, 0x11, 0x16, 0x03, 0x04, 0x0D, 0x0A,
0x57, 0x50, 0x59, 0x5E, 0x4B, 0x4C, 0x45, 0x42,
0x6F, 0x68, 0x61, 0x66, 0x73, 0x74, 0x7D, 0x7A,
0x89, 0x8E, 0x87, 0x80, 0x95, 0x92, 0x9B, 0x9C,
0xB1, 0xB6, 0xBF, 0xB8, 0xAD, 0xAA, 0xA3, 0xA4,
0xF9, 0xFE, 0xF7, 0xF0, 0xE5, 0xE2, 0xEB, 0xEC,
0xC1, 0xC6, 0xCF, 0xC8, 0xDD, 0xDA, 0xD3, 0xD4,
0x69, 0x6E, 0x67, 0x60, 0x75, 0x72, 0x7B, 0x7C,
0x51, 0x56, 0x5F, 0x58, 0x4D, 0x4A, 0x43, 0x44,
0x19, 0x1E, 0x17, 0x10, 0x05, 0x02, 0x0B, 0x0C,
0x21, 0x26, 0x2F, 0x28, 0x3D, 0x3A, 0x33, 0x34,
0x4E, 0x49, 0x40, 0x47, 0x52, 0x55, 0x5C, 0x5B,
0x76, 0x71, 0x78, 0x7F, 0x6A, 0x6D, 0x64, 0x63,
0x3E, 0x39, 0x30, 0x37, 0x22, 0x25, 0x2C, 0x2B,
0x06, 0x01, 0x08, 0x0F, 0x1A, 0x1D, 0x14, 0x13,
0xAE, 0xA9, 0xA0, 0xA7, 0xB2, 0xB5, 0xBC, 0xBB,
0x96, 0x91, 0x98, 0x9F, 0x8A, 0x8D, 0x84, 0x83,
0xDE, 0xD9, 0xD0, 0xD7, 0xC2, 0xC5, 0xCC, 0xCB,
0xE6, 0xE1, 0xE8, 0xEF, 0xFA, 0xFD, 0xF4, 0xF3
};
/* CRC-16, poly = x^16 + x^15 + x^2 + x^0, init = 0 */
unsigned FLAC__crc16_table[256] = {
0x0000, 0x8005, 0x800f, 0x000a, 0x801b, 0x001e, 0x0014, 0x8011,
0x8033, 0x0036, 0x003c, 0x8039, 0x0028, 0x802d, 0x8027, 0x0022,
0x8063, 0x0066, 0x006c, 0x8069, 0x0078, 0x807d, 0x8077, 0x0072,
0x0050, 0x8055, 0x805f, 0x005a, 0x804b, 0x004e, 0x0044, 0x8041,
0x80c3, 0x00c6, 0x00cc, 0x80c9, 0x00d8, 0x80dd, 0x80d7, 0x00d2,
0x00f0, 0x80f5, 0x80ff, 0x00fa, 0x80eb, 0x00ee, 0x00e4, 0x80e1,
0x00a0, 0x80a5, 0x80af, 0x00aa, 0x80bb, 0x00be, 0x00b4, 0x80b1,
0x8093, 0x0096, 0x009c, 0x8099, 0x0088, 0x808d, 0x8087, 0x0082,
0x8183, 0x
|
{
"pile_set_name": "Github"
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* This code is a modified version of the original Spark 1.0.2 implementation.
*
*/
package com.massivedatascience.clusterer
import com.massivedatascience.clusterer.MultiKMeansClusterer.ClusteringWithDistortion
import com.massivedatascience.linalg.{ MutableWeightedVector, WeightedVector }
import org.apache.spark.SparkContext._
import org.apache.spark.rdd.RDD
import scala.collection.mutable.ArrayBuffer
/**
* A K-Means clustering implementation that performs multiple K-means clusterings simultaneously,
* returning the one with the lowest cost.
*
*/
//scalastyle:off
@deprecated("use ColumnTrackingKMeans", "1.2.0")
class MultiKMeans extends MultiKMeansClusterer {
def cluster(
maxIterations: Int,
pointOps: BregmanPointOps,
data: RDD[BregmanPoint],
c: Seq[IndexedSeq[BregmanCenter]]): Seq[ClusteringWithDistortion] = {
val centers = c.map(_.toArray).toArray
def cluster(): Seq[ClusteringWithDistortion] = {
val runs = centers.length
val active = Array.fill(runs)(true)
val costs = Array.fill(runs)(0.0)
var activeRuns = new ArrayBuffer[Int] ++ (0 until runs)
var iteration = 0
/*
* Execute iterations of Lloyd's algorithm until all runs have converged.
*/
while (iteration < maxIterations && activeRuns.nonEmpty) {
// remove the empty clusters
logInfo(s"iteration $iteration")
val activeCenters = activeRuns.map(r => centers(r)).toArray
if (log.isInfoEnabled) {
for (r <- 0 until activeCenters.length)
logInfo(s"run ${activeRuns(r)} has ${activeCenters(r).length} centers")
}
// Find the sum and count of points mapping to each center
val (centroids: Array[((Int, Int), WeightedVector)], runDistortion) = getCentroids(data, activeCenters)
if (log.isInfoEnabled) {
for (run <- activeRuns) logInfo(s"run $run distortion ${runDistortion(run)}")
}
for (run <- activeRuns) active(run) = false
for (((runIndex: Int, clusterIndex: Int), cn: MutableWeightedVector) <- centroids) {
val run = activeRuns(runIndex)
if (cn.weight == 0.0) {
active(run) = true
centers(run)(clusterIndex) = null.asInstanceOf[BregmanCenter]
} else {
val centroid = cn.asImmutable
active(run) = active(run) || pointOps.centerMoved(pointOps.toPoint(centroid), centers(run)(clusterIndex))
centers(run)(clusterIndex) = pointOps.toCenter(centroid)
}
}
// filter out null centers
for (r <- activeRuns) centers(r) = centers(r).filter(_ != null)
// update distortions and print log message if run completed during this iteration
for ((run, runIndex) <- activeRuns.zipWithIndex) {
costs(run) = runDistortion(runIndex)
if (!active(run)) logInfo(s"run $run finished in ${iteration + 1} iterations")
}
activeRuns = activeRuns.filter(active(_))
iteration += 1
}
costs.zip(centers).map { case (x, y) => ClusteringWithDistortion(x, y.toIndexedSeq) }
}
def getCentroids(
data: RDD[BregmanPoint],
activeCenters: Array[Array[BregmanCenter]]): (Array[((Int, Int), WeightedVector)], Array[Double]) = {
val sc = data.sparkContext
val runDistortion = Array.fill(activeCenters.length)(sc.accumulator(0.0))
val bcActiveCenters = sc.broadcast(activeCenters)
val result = data.mapPartitions[((Int, Int), WeightedVector)] { points =>
val bcCenters = bcActiveCenters.value
val centers = bcCenters.map(c => Array.fill(c.length)(pointOps.make()))
for (point <- points; (clusters, run) <- bcCenters.zipWithIndex) {
val (cluster, cost) = pointOps.findClosest(clusters, point)
runDistortion(run) += cost
centers(run)(cluster).add(point)
}
val contribution = for (
(clusters, run) <- bcCenters.zipWithIndex;
(contrib, cluster) <- clusters.zipWithIndex
) yield {
((run, cluster), centers(run)(cluster).asImmutable)
}
contribution.iterator
}.aggregateByKey(pointOps.make())(
(x, y) => x.add(y),
(x, y) => x.add(y)
).map(x => (x._1, x._2.asImmutable)).collect()
bcActiveCenters.unpersist()
(result, runDistortion.map(x => x.localValue))
}
cluster()
}
}
//scalastyle:on
|
{
"pile_set_name": "Github"
}
|
// DLLExports.cpp
//
// Notes:
// Win2000:
// If I register at HKCR\Folder\ShellEx then DLL is locked.
// otherwise it unloads after explorer closing.
// but if I call menu for desktop items it's locked all the time
#include "../../../Common/MyWindows.h"
#include <OleCtl.h>
#include "../../../Common/MyInitGuid.h"
#include "../../../Common/ComTry.h"
#include "../../../Windows/DLL.h"
#include "../../../Windows/ErrorMsg.h"
#include "../../../Windows/NtCheck.h"
#include "../../../Windows/Registry.h"
#include "../FileManager/IFolder.h"
#include "ContextMenu.h"
static LPCTSTR k_ShellExtName = TEXT("7-Zip Shell Extension");
static LPCTSTR k_Approved = TEXT("Software\\Microsoft\\Windows\\CurrentVersion\\Shell Extensions\\Approved");
// {23170F69-40C1-278A-1000-000100020000}
static LPCTSTR k_Clsid = TEXT("{23170F69-40C1-278A-1000-000100020000}");
DEFINE_GUID(CLSID_CZipContextMenu,
k_7zip_GUID_Data1,
k_7zip_GUID_Data2,
k_7zip_GUID_Data3_Common,
0x10, 0x00, 0x00, 0x01, 0x00, 0x02, 0x00, 0x00);
using namespace NWindows;
HINSTANCE g_hInstance = 0;
HWND g_HWND = 0;
LONG g_DllRefCount = 0; // Reference count of this DLL.
// #define ODS(sz) OutputDebugString(L#sz)
class CShellExtClassFactory:
public IClassFactory,
public CMyUnknownImp
{
public:
CShellExtClassFactory() { InterlockedIncrement(&g_DllRefCount); }
~CShellExtClassFactory() { InterlockedDecrement(&g_DllRefCount); }
MY_UNKNOWN_IMP1_MT(IClassFactory)
STDMETHODIMP CreateInstance(LPUNKNOWN, REFIID, void**);
STDMETHODIMP LockServer(BOOL);
};
STDMETHODIMP CShellExtClassFactory::CreateInstance(LPUNKNOWN pUnkOuter,
REFIID riid, void **ppvObj)
{
// ODS("CShellExtClassFactory::CreateInstance()\r\n");
*ppvObj = NULL;
if (pUnkOuter)
return CLASS_E_NOAGGREGATION;
CZipContextMenu *shellExt;
try
{
shellExt = new CZipContextMenu();
}
catch(...) { return E_OUTOFMEMORY; }
if (!shellExt)
return E_OUTOFMEMORY;
HRESULT res = shellExt->QueryInterface(riid, ppvObj);
if (res != S_OK)
delete shellExt;
return res;
}
STDMETHODIMP CShellExtClassFactory::LockServer(BOOL /* fLock */)
{
return S_OK; // Check it
}
#define NT_CHECK_FAIL_ACTION return FALSE;
extern "C"
BOOL WINAPI DllMain(
#ifdef UNDER_CE
HANDLE hInstance
#else
HINSTANCE hInstance
#endif
, DWORD dwReason, LPVOID)
{
if (dwReason == DLL_PROCESS_ATTACH)
{
g_hInstance = (HINSTANCE)hInstance;
// ODS("In DLLMain, DLL_PROCESS_ATTACH\r\n");
NT_CHECK
}
else if (dwReason == DLL_PROCESS_DETACH)
{
// ODS("In DLLMain, DLL_PROCESS_DETACH\r\n");
}
return TRUE;
}
// Used to determine whether the DLL can be unloaded by OLE
STDAPI DllCanUnloadNow(void)
{
// ODS("In DLLCanUnloadNow\r\n");
return (g_DllRefCount == 0 ? S_OK : S_FALSE);
}
STDAPI DllGetClassObject(REFCLSID rclsid, REFIID riid, LPVOID* ppv)
{
// ODS("In DllGetClassObject\r\n");
*ppv = NULL;
if (IsEqualIID(rclsid, CLSID_CZipContextMenu))
{
CShellExtClassFactory *cf;
try
{
cf = new CShellExtClassFactory;
}
catch(...) { return E_OUTOFMEMORY; }
if (!cf)
return E_OUTOFMEMORY;
HRESULT res = cf->QueryInterface(riid, ppv);
if (res != S_OK)
delete cf;
return res;
}
return CLASS_E_CLASSNOTAVAILABLE;
// return _Module.GetClassObject(rclsid, riid, ppv);
}
static BOOL RegisterServer()
{
FString modulePath;
if (!NDLL::MyGetModuleFileName(modulePath))
return FALSE;
const UString modulePathU = fs2us(modulePath);
CSysString clsidString = k_Clsid;
CSysString s = TEXT("CLSID\\");
s += clsidString;
{
NRegistry::CKey key;
if (key.Create(HKEY_CLASSES_ROOT, s, NULL, REG_OPTION_NON_VOLATILE, KEY_WRITE) != NOERROR)
return FALSE;
key.SetValue(NULL, k_ShellExtName);
NRegistry::CKey keyInproc;
if (keyInproc.Create(key, TEXT("InprocServer32"), NULL, REG_OPTION_NON_VOLATILE, KEY_WRITE) != NOERROR)
return FALSE;
keyInproc.SetValue(NULL, modulePathU);
keyInproc.SetValue(TEXT("ThreadingModel"), TEXT("Apartment"));
}
#if !defined(_WIN64) && !defined(UNDER_CE)
if (IsItWindowsNT())
#endif
{
NRegistry::CKey key;
if (key.Create(HKEY_LOCAL_MACHINE, k_Approved, NULL, REG_OPTION_NON_VOLATILE, KEY_WRITE) == NOERROR)
key.SetValue(clsidString, k_ShellExtName);
}
return TRUE;
}
STDAPI DllRegisterServer(void)
{
return RegisterServer() ? S_OK: SELFREG_E_CLASS;
}
static BOOL UnregisterServer()
{
const CSysString clsidString = k_Clsid;
CSysString s = TEXT("CLSID\\");
s += clsidString;
CSysString s2 = s;
s2.AddAscii("\\InprocServer32");
RegDeleteKey(HKEY_CLASSES_ROOT, s2);
RegDeleteKey(HKEY_CLASSES_ROOT, s);
#if !defined(_WIN64) && !defined(UNDER_CE)
if (IsItWindowsNT())
#endif
{
HKEY hKey;
if (RegOpenKeyEx(HKEY_LOCAL_MACHINE, k_Approved, 0, KEY_SET_VALUE, &hKey) == NOERROR)
{
RegDeleteValue(hKey, clsidString);
RegCloseKey(hKey);
}
}
return TRUE;
}
STDAPI DllUnregisterServer(void)
{
return UnregisterServer() ? S_OK: SELFREG_E_CLASS;
}
|
{
"pile_set_name": "Github"
}
|
/**
* Marlin 3D Printer Firmware
* Copyright (c) 2020 MarlinFirmware [https://github.com/MarlinFirmware/Marlin]
*
* Based on Sprinter and grbl.
* Copyright (c) 2011 Camiel Gubbels / Erik van der Zalm
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*
*/
#pragma once
enum DGUSLCD_Screens : uint8_t {
DGUSLCD_SCREEN_BOOT = 0,
DGUSLCD_SCREEN_MAIN = 1,
DGUSLCD_SCREEN_STATUS = 1,
DGUSLCD_SCREEN_STATUS2 = 1,
DGUSLCD_SCREEN_TEMPERATURE = 10,
DGUSLCD_SCREEN_PREHEAT = 18,
DGUSLCD_SCREEN_POWER_LOSS = 100,
DGUSLCD_SCREEN_MANUALMOVE = 192,
DGUSLCD_SCREEN_UTILITY = 120,
DGUSLCD_SCREEN_FILAMENT_HEATING = 146,
DGUSLCD_SCREEN_FILAMENT_LOADING = 148,
DGUSLCD_SCREEN_FILAMENT_UNLOADING = 158,
DGUSLCD_SCREEN_MANUALEXTRUDE = 160,
DGUSLCD_SCREEN_SDFILELIST = 71,
DGUSLCD_SCREEN_SDPRINTMANIPULATION = 73,
DGUSLCD_SCREEN_SDPRINTTUNE = 75,
DGUSLCD_SCREEN_FLC_PREHEAT = 94,
DGUSLCD_SCREEN_FLC_PRINTING = 96,
DGUSLCD_SCREEN_STEPPERMM = 212,
DGUSLCD_SCREEN_PID_E = 214,
DGUSLCD_SCREEN_PID_BED = 218,
DGUSLCD_SCREEN_Z_OFFSET = 222,
DGUSLCD_SCREEN_INFOS = 36,
DGUSLCD_SCREEN_CONFIRM = 240,
DGUSLCD_SCREEN_KILL = 250, ///< Kill Screen. Must always be 250 (to be able to display "Error wrong LCD Version")
DGUSLCD_SCREEN_WAITING = 251,
DGUSLCD_SCREEN_POPUP = 252, ///< special target, popup screen will also return this code to say "return to previous screen"
DGUSLDC_SCREEN_UNUSED = 255
};
// Display Memory layout used (T5UID)
// Except system variables this is arbitrary, just to organize stuff....
// 0x0000 .. 0x0FFF -- System variables and reserved by the display
// 0x1000 .. 0x1FFF -- Variables to never change location, regardless of UI Version
// 0x2000 .. 0x2FFF -- Controls (VPs that will trigger some action)
// 0x3000 .. 0x4FFF -- Marlin Data to be displayed
// 0x5000 .. -- SPs (if we want to modify display elements, e.g change color or like) -- currently unused
// As there is plenty of space (at least most displays have >8k RAM), we do not pack them too tight,
// so that we can keep variables nicely together in the address space.
// UI Version always on 0x1000...0x1002 so that the firmware can check this and bail out.
constexpr uint16_t VP_UI_VERSION_MAJOR = 0x1000; // Major -- incremented when incompatible
constexpr uint16_t VP_UI_VERSION_MINOR = 0x1001; // Minor -- incremented on new features, but compatible
constexpr uint16_t VP_UI_VERSION_PATCH = 0x1002; // Patch -- fixed which do not change functionality.
constexpr uint16_t VP_UI_FLAVOUR = 0x1010; // lets reserve 16 bytes here to determine if UI is suitable for this Marlin. tbd.
// Storage space for the Killscreen messages. 0x1100 - 0x1200 . Reused for the popup.
constexpr uint16_t VP_MSGSTR1 = 0x1100;
constexpr uint8_t VP_MSGSTR1_LEN = 0x20; // might be more place for it...
constexpr uint16_t VP_MSGSTR2 = 0x1140;
constexpr uint8_t VP_MSGSTR2_LEN = 0x20;
constexpr uint16_t VP_MSGSTR3 = 0x1180;
constexpr uint8_t VP_MSGSTR3_LEN = 0x20;
constexpr uint16_t VP_MSGSTR4 = 0x11C0;
constexpr uint8_t VP_MSGSTR4_LEN = 0x20;
// Screenchange request for screens that only make sense when printer is idle.
// e.g movement is only allowed if printer is not printing.
// Marlin must confirm by setting the screen manually.
constexpr uint16_t VP_SCREENCHANGE_ASK = 0x2000;
constexpr uint16_t VP_SCREENCHANGE = 0x2001; // Key-Return button to new menu pressed. Data contains target screen in low byte and info in high byte.
constexpr uint16_t VP_TEMP_ALL_OFF = 0x2002; // Turn all heaters off. Value arbitrary ;)=
constexpr uint16_t VP_SCREENCHANGE_WHENSD = 0x2003; // "Print" Button touched -- go only there if there is an SD Card.
constexpr uint16_t VP_CONFIRMED = 0x2010; // OK on confirm screen.
// Buttons on the SD-Card File listing.
constexpr uint16_t VP_SD_ScrollEvent = 0x2020; // Data: 0 for "up a directory", numbers are the amount to scroll, e.g -1 one up, 1 one down
constexpr uint16_t VP_SD_FileSelected = 0x2022; // Number of file field selected.
constexpr uint16_t VP_SD_FileSelectConfirm = 0x2024; // (This is a virtual VP and emulated by the Confirm Screen when a file has been confirmed)
constexpr uint16_t VP_SD_ResumePauseAbort = 0x2026; // Resume(Data=0), Pause(Data=1), Abort(Data=2) SD Card prints
constexpr uint16_t VP_SD_AbortPrintConfirmed = 0x2028; // Abort print confirmation (virtual, will be injected by the confirm dialog)
constexpr uint16_t VP_SD_Print_Setting = 0x2040;
constexpr uint16_t VP_SD_Print_LiveAdjustZ = 0x2050; // Data: 0 down, 1 up
// Controls for movement (we can't use the incremental / decremental feature of the display at this feature works only with 16 bit values
// (which would limit us to 655.35mm, which is likely not a problem for common setups, but i don't want to rule out hangprinters support)
// A word about the coding: The VP will be per axis and the return code will be an signed 16 bit value in 0.01 mm resolution, telling us
// the relative travel amount t he user wants to do. So eg. if the display sends us VP=2100 with value 100, the user wants us to move X by +1 mm.
constexpr uint16_t VP_MOVE_X = 0x2100;
constexpr uint16_t VP_MOVE_Y = 0x2102;
constexpr uint16_t VP_MOVE_Z = 0x2104;
constexpr uint16_t VP_MOVE_E0 = 0x2110;
constexpr uint16_t VP_MOVE_E1 = 0x2112;
//constexpr uint
|
{
"pile_set_name": "Github"
}
|
/* This file is part of the Pangolin Project.
* http://github.com/stevenlovegrove/Pangolin
*
* Copyright (c) 2011 Steven Lovegrove
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
#pragma once
#include <Python.h>
#include <pangolin/platform.h>
namespace pangolin
{
/// Class represents a reference counted PythonObject.
/// PythonObject is appropriately Py_INCREF'd and Py_DECREF'd
class PyUniqueObj
{
public:
inline
PyUniqueObj()
: obj(0)
{
}
/// Assumption: PythonObject has already been appropriately INCREF'd.
inline
PyUniqueObj(PyObject* obj)
: obj(obj)
{
}
inline
PyUniqueObj(const PyUniqueObj& other)
:obj(other.obj)
{
if(obj) Py_INCREF(obj);
}
inline
~PyUniqueObj()
{
if(obj) Py_DECREF(obj);
}
inline
PyUniqueObj(PyUniqueObj&& other)
: obj(other.obj)
{
other.obj = 0;
}
inline
void operator=(PyUniqueObj&& other)
{
Release();
obj = other.obj;
other.obj = 0;
}
inline
void operator=(PyObject* obj)
{
Release();
this->obj = obj;
}
inline
void Release() {
if(obj) {
Py_DECREF(obj);
obj = 0;
}
}
inline
PyObject* operator*() {
return obj;
}
inline
operator PyObject*() {
return obj;
}
private:
PyObject* obj;
};
}
|
{
"pile_set_name": "Github"
}
|
/*!
\mainpage
This is an example library that provides a very rudimentary OBJ file format
plugin for Usd. It is intentionally not particularly functional. The aim is to
provide nearly the bare minimum plugin structure for learning purposes. For a
far more advanced example, including an implementation of a low-level layer
backing store plugin, see the usdAbc alembic plugin.
The plugin is structured by functional component:
- \b fileFormat contains the SdfFileFormat plugin interface implementation.
- \b stream and \b streamIO contain the OBJ parser and data representation.
- \b translator contains the logic that translates OBJ data to Usd data.
*/
|
{
"pile_set_name": "Github"
}
|
// increment
export function increment(index) {
return {
type: 'INCREMENT_LIKES',
index
}
}
// add comment
export function addComment(postId, author, comment) {
return {
type: 'ADD_COMMENT',
postId,
author,
comment
}
}
// remove comment
export function removeComment(postId, i) {
return {
type: 'REMOVE_COMMENT',
i,
postId
}
}
|
{
"pile_set_name": "Github"
}
|
/*
* Copyright (C) 2005-2018 Team Kodi
* This file is part of Kodi - https://kodi.tv
*
* SPDX-License-Identifier: GPL-2.0-or-later
* See LICENSES/README.md for more information.
*/
#include "GUIRenderingControl.h"
#include "threads/SingleLock.h"
#include "guilib/IRenderingCallback.h"
#ifdef TARGET_WINDOWS
#include "rendering/dx/DeviceResources.h"
#endif
#define LABEL_ROW1 10
#define LABEL_ROW2 11
#define LABEL_ROW3 12
CGUIRenderingControl::CGUIRenderingControl(int parentID, int controlID, float posX, float posY, float width, float height)
: CGUIControl(parentID, controlID, posX, posY, width, height)
{
ControlType = GUICONTROL_RENDERADDON;
m_callback = NULL;
}
CGUIRenderingControl::CGUIRenderingControl(const CGUIRenderingControl &from)
: CGUIControl(from)
{
ControlType = GUICONTROL_RENDERADDON;
m_callback = NULL;
}
bool CGUIRenderingControl::InitCallback(IRenderingCallback *callback)
{
if (!callback)
return false;
CSingleLock lock(m_rendering);
CServiceBroker::GetWinSystem()->GetGfxContext().CaptureStateBlock();
float x = CServiceBroker::GetWinSystem()->GetGfxContext().ScaleFinalXCoord(GetXPosition(), GetYPosition());
float y = CServiceBroker::GetWinSystem()->GetGfxContext().ScaleFinalYCoord(GetXPosition(), GetYPosition());
float w = CServiceBroker::GetWinSystem()->GetGfxContext().ScaleFinalXCoord(GetXPosition() + GetWidth(), GetYPosition() + GetHeight()) - x;
float h = CServiceBroker::GetWinSystem()->GetGfxContext().ScaleFinalYCoord(GetXPosition() + GetWidth(), GetYPosition() + GetHeight()) - y;
if (x < 0) x = 0;
if (y < 0) y = 0;
if (x + w > CServiceBroker::GetWinSystem()->GetGfxContext().GetWidth()) w = CServiceBroker::GetWinSystem()->GetGfxContext().GetWidth() - x;
if (y + h > CServiceBroker::GetWinSystem()->GetGfxContext().GetHeight()) h = CServiceBroker::GetWinSystem()->GetGfxContext().GetHeight() - y;
void *device = NULL;
#if TARGET_WINDOWS
device = DX::DeviceResources::Get()->GetD3DDevice();
#endif
if (callback->Create((int)(x+0.5f), (int)(y+0.5f), (int)(w+0.5f), (int)(h+0.5f), device))
m_callback = callback;
else
return false;
CServiceBroker::GetWinSystem()->GetGfxContext().ApplyStateBlock();
return true;
}
void CGUIRenderingControl::UpdateVisibility(const CGUIListItem *item)
{
// if made invisible, start timer, only free addonptr after
// some period, configurable by window class
CGUIControl::UpdateVisibility(item);
if (!IsVisible() && m_callback)
FreeResources();
}
void CGUIRenderingControl::Process(unsigned int currentTime, CDirtyRegionList &dirtyregions)
{
//! @todo Add processing to the addon so it could mark when actually changing
CSingleLock lock(m_rendering);
if (m_callback && m_callback->IsDirty())
MarkDirtyRegion();
CGUIControl::Process(currentTime, dirtyregions);
}
void CGUIRenderingControl::Render()
{
CSingleLock lock(m_rendering);
if (m_callback)
{
// set the viewport - note: We currently don't have any control over how
// the addon renders, so the best we can do is attempt to define
// a viewport??
CServiceBroker::GetWinSystem()->GetGfxContext().SetViewPort(m_posX, m_posY, m_width, m_height);
CServiceBroker::GetWinSystem()->GetGfxContext().CaptureStateBlock();
m_callback->Render();
CServiceBroker::GetWinSystem()->GetGfxContext().ApplyStateBlock();
CServiceBroker::GetWinSystem()->GetGfxContext().RestoreViewPort();
}
CGUIControl::Render();
}
void CGUIRenderingControl::FreeResources(bool immediately)
{
CSingleLock lock(m_rendering);
if (!m_callback) return;
CServiceBroker::GetWinSystem()->GetGfxContext().CaptureStateBlock(); //! @todo locking
m_callback->Stop();
CServiceBroker::GetWinSystem()->GetGfxContext().ApplyStateBlock();
m_callback = NULL;
}
bool CGUIRenderingControl::CanFocusFromPoint(const CPoint &point) const
{ // mouse is allowed to focus this control, but it doesn't actually receive focus
return IsVisible() && HitTest(point);
}
|
{
"pile_set_name": "Github"
}
|
# Unix SMB/CIFS implementation.
# Copyright (C) 2014 Catalyst.Net Ltd
#
# Auto generate param_functions.c
#
# ** NOTE! The following LGPL license applies to the ldb
# ** library. This does NOT imply that all of Samba is released
# ** under the LGPL
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, see <http://www.gnu.org/licenses/>.
#
import os
import xml.etree.ElementTree as ET
import optparse
# parse command line arguments
parser = optparse.OptionParser()
parser.add_option("-f", "--file", dest="filename",
help="input file", metavar="FILE")
parser.add_option("-o", "--output", dest="output",
help='output file', metavar="FILE")
parser.add_option("--mode", type="choice", metavar="<FUNCTIONS|S3PROTO|LIBPROTO|PARAMDEFS|PARAMTABLE>",
choices=["FUNCTIONS", "S3PROTO", "LIBPROTO", "PARAMDEFS", "PARAMTABLE"], default="FUNCTIONS")
parser.add_option("--scope", metavar="<GLOBAL|LOCAL>",
choices=["GLOBAL", "LOCAL"], default="GLOBAL")
(options, args) = parser.parse_args()
if options.filename is None:
parser.error("No input file specified")
if options.output is None:
parser.error("No output file specified")
def iterate_all(path):
"""Iterate and yield all the parameters.
:param path: path to parameters xml file
"""
try:
p = open(path, 'r')
except IOError as e:
raise Exception("Error opening parameters file")
out = p.read()
# parse the parameters xml file
root = ET.fromstring(out)
for parameter in root:
name = parameter.attrib.get("name")
param_type = parameter.attrib.get("type")
context = parameter.attrib.get("context")
func = parameter.attrib.get("function")
synonym = parameter.attrib.get("synonym")
removed = parameter.attrib.get("removed")
generated = parameter.attrib.get("generated_function")
handler = parameter.attrib.get("handler")
enumlist = parameter.attrib.get("enumlist")
deprecated = parameter.attrib.get("deprecated")
synonyms = parameter.findall('synonym')
if removed == "1":
continue
constant = parameter.attrib.get("constant")
substitution = parameter.attrib.get("substitution")
parm = parameter.attrib.get("parm")
if name is None or param_type is None or context is None:
raise Exception("Error parsing parameter: " + name)
if func is None:
func = name.replace(" ", "_").lower()
if enumlist is None:
enumlist = "NULL"
if handler is None:
handler = "NULL"
yield {'name': name,
'type': param_type,
'context': context,
'function': func,
'constant': (constant == '1'),
'substitution': (substitution == '1'),
'parm': (parm == '1'),
'synonym' : synonym,
'generated' : generated,
'enumlist' : enumlist,
'handler' : handler,
'deprecated' : deprecated,
'synonyms' : synonyms }
# map doc attributes to a section of the generated function
context_dict = {"G": "_GLOBAL", "S": "_LOCAL"}
param_type_dict = {
"boolean" : "_BOOL",
"list" : "_LIST",
"string" : "_STRING",
"integer" : "_INTEGER",
"enum" : "_INTEGER",
"char" : "_CHAR",
"boolean-auto" : "_INTEGER",
"cmdlist" : "_LIST",
"bytes" : "_INTEGER",
"octal" : "_INTEGER",
"ustring" : "_STRING",
}
def generate_functions(path_in, path_out):
f = open(path_out, 'w')
try:
f.write('/* This file was automatically generated by generate_param.py. DO NOT EDIT */\n\n')
for parameter in iterate_all(options.filename):
# filter out parameteric options
if ':' in parameter['name']:
continue
if parameter['synonym'] == "1":
continue
if parameter['generated'] == "0":
continue
output_string = "FN"
temp = context_dict.get(parameter['context'])
if temp is None:
raise Exception(parameter['name'] + " has an invalid context " + parameter['context'])
output_string += temp
if parameter['type'] == "string" or parameter['type'] == "ustring":
if parameter['substitution']:
output_string += "_SUBSTITUTED"
else:
output_string += "_CONST"
if parameter['parm']:
output_string += "_PARM"
temp = param_type_dict.get(parameter['type'])
if temp is None:
raise Exception(parameter['name'] + " has an invalid param type " + parameter['type'])
output_string += temp
f.write(output_string + "(" + parameter['function'] + ", " + parameter['function'] + ')\n')
finally:
f.close()
mapping = {
'boolean' : 'bool ',
'string' : 'char *',
'integer' : 'int ',
'char' : 'char ',
'list' : 'const char **',
'enum' : 'int ',
'boolean-auto' : 'int ',
'cmdlist' : 'const char **',
'bytes' : 'int ',
'octal' : 'int ',
'ustring' : 'char *',
}
def make_s3_param_proto(path_in, path_out):
file_out = open(path_out, 'w')
try:
file_out.write('/* This file was automatically generated by generate_param.py. DO NOT EDIT */\n\n')
header = get_header(path_out)
file_out.write("#ifndef %s\n" % header)
file_out.write("#define %s\n\n" % header)
for parameter in iterate_all(path_in):
# filter out parameteric options
if ':' in parameter['name']:
continue
if parameter['synonym'] == "1":
continue
if parameter['generated'] == "0":
continue
output_string = ""
param_type = mapping.get(parameter['type'])
if param_type is None:
raise Exception(parameter['name'] + " has an invalid context " + parameter['context'])
output_string += param_type
output_string += "lp_%s" % parameter['function']
param = None
if parameter['parm']:
param = "const struct share_params *p"
else:
param = "int"
if parameter['type'] == 'string' or parameter['type'] == 'ustring':
if parameter['substitution']:
if parameter['context'] == 'G':
output_string += '(TALLOC_CTX *ctx, const struct loadparm_substitution *lp_sub);\n'
elif parameter['context'] == 'S':
output_string += '(TALLOC_CTX *ctx, const struct loadpar
|
{
"pile_set_name": "Github"
}
|
var R = require('../source');
var eq = require('./shared/eq');
describe('dropWhile', function() {
it('skips elements while the function reports `true`', function() {
eq(R.dropWhile(function(x) {return x < 5;}, [1, 3, 5, 7, 9]), [5, 7, 9]);
});
it('returns an empty list for an empty list', function() {
eq(R.dropWhile(function() { return false; }, []), []);
eq(R.dropWhile(function() { return true; }, []), []);
});
it('starts at the right arg and acknowledges undefined', function() {
var sublist = R.dropWhile(function(x) {return x !== void 0;}, [1, 3, void 0, 5, 7]);
eq(sublist.length, 3);
eq(sublist[0], void 0);
eq(sublist[1], 5);
eq(sublist[2], 7);
});
it('can operate on strings', function() {
eq(R.dropWhile(function(x) { return x !== 'd'; }, 'Ramda'), 'da');
});
});
|
{
"pile_set_name": "Github"
}
|
fileFormatVersion: 2
guid: 61757cf57a877ce46a12429a7e8996dd
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:
|
{
"pile_set_name": "Github"
}
|
GLOBAL SCOPE:
(1,1) -> class Math
(50,1) -> class Main
CLASS SCOPE OF 'Math':
(2,16) -> static function abs : int->int
(10,16) -> static function pow : int->int->int
(20,16) -> static function log : int->int
(33,16) -> static function max : int->int->int
(41,16) -> static function min : int->int->int
FORMAL SCOPE OF 'abs':
(2,24) -> variable @a : int
LOCAL SCOPE:
FORMAL SCOPE OF 'pow':
(10,24) -> variable @a : int
(10,31) -> variable @b : int
LOCAL SCOPE:
(11,13) -> variable i : int
(12,13) -> variable result : int
FORMAL SCOPE OF 'log':
(20,24) -> variable @a : int
LOCAL SCOPE:
(24,13) -> variable result : int
FORMAL SCOPE OF 'max':
(33,24) -> variable @a : int
(33,31) -> variable @b : int
LOCAL SCOPE:
FORMAL SCOPE OF 'min':
(41,24) -> variable @a : int
(41,31) -> variable @b : int
LOCAL SCOPE:
CLASS SCOPE OF 'Main':
(51,17) -> static function main : void
FORMAL SCOPE OF 'main':
LOCAL SCOPE:
|
{
"pile_set_name": "Github"
}
|
/*
* Copyright (C) 2006-2020 Istituto Italiano di Tecnologia (IIT)
* Copyright (C) 2006-2010 RobotCub Consortium
* All rights reserved.
*
* This software may be modified and distributed under the terms of the
* BSD-3-Clause license. See the accompanying LICENSE file for details.
*/
/*
* Most of this file is from the output_example.c of ffmpeg -
* copyright/copypolicy statement follows --
*/
/*
* Libavformat API example: Output a media file in any supported
* libavformat format. The default codecs are used.
*
* Copyright (c) 2003 Fabrice Bellard
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#include "FfmpegWriter.h"
#include "ffmpeg_api.h"
#include <yarp/os/all.h>
#include <yarp/sig/all.h>
#include <yarp/os/Log.h>
#include <yarp/os/LogComponent.h>
#include <cstdlib>
#include <cstring>
#include <cmath>
#ifndef M_PI
#define M_PI 3.1415926535897931
#endif
#define STREAM_FRAME_RATE 25 /* 25 images/s */
#define STREAM_PIX_FMT AV_PIX_FMT_YUV420P /* default pix_fmt */
#define STREAM_PIX_WORK AV_PIX_FMT_RGB24
using namespace yarp::os;
using namespace yarp::dev;
using namespace yarp::sig;
using namespace yarp::sig::file;
namespace {
YARP_LOG_COMPONENT(FFMPEGWRITER, "yarp.device.ffmpeg_writer")
}
/**************************************************************/
/* audio output */
float t, tincr, tincr2;
int16_t *samples;
int samples_size;
int samples_at;
int samples_channels;
uint8_t *audio_outbuf;
int audio_outbuf_size;
int audio_input_frame_size;
/*
* add an audio output stream
*/
static AVStream *add_audio_stream(AVFormatContext *oc, AVCodecID codec_id)
{
AVCodecContext *c;
AVStream *st;
st = avformat_new_stream(oc, NULL);
if (!st) {
yCFatal(FFMPEGWRITER, "Could not alloc stream");
}
c = st->codec;
c->codec_id = codec_id;
c->codec_type = AVMEDIA_TYPE_AUDIO;
/* put sample parameters */
c->bit_rate = 64000;
c->sample_rate = 44100;
c->channels = 2;
return st;
}
static void open_audio(AVFormatContext *oc, AVStream *st)
{
yCInfo(FFMPEGWRITER, "Opening audio stream");
AVCodecContext *c;
AVCodec *codec;
c = st->codec;
/* find the audio encoder */
codec = avcodec_find_encoder(c->codec_id);
if (!codec) {
yCFatal(FFMPEGWRITER, "Audio codec not found");
}
/* open it */
if (avcodec_open2(c, codec, nullptr) < 0) {
yCFatal(FFMPEGWRITER, "Could not open codec");
}
/* init signal generator */
t = 0;
tincr = 2 * M_PI * 110.0 / c->sample_rate;
/* increment frequency by 110 Hz per second */
tincr2 = 2 * M_PI * 110.0 / c->sample_rate / c->sample_rate;
audio_outbuf_size = 10000;
audio_outbuf = (uint8_t*)av_malloc(audio_outbuf_size);
/* ugly hack for PCM codecs (will be removed ASAP with new PCM
support to compute the input frame size in samples */
if (c->frame_size <= 1) {
audio_input_frame_size = audio_outbuf_size / c->channels;
switch(st->codec->codec_id) {
case AV_CODEC_ID_PCM_S16LE:
case AV_CODEC_ID_PCM_S16BE:
case AV_CODEC_ID_PCM_U16LE:
case AV_CODEC_ID_PCM_U16BE:
audio_input_frame_size >>= 1;
break;
default:
break;
}
} else {
audio_input_frame_size = c->frame_size;
}
samples_size = audio_input_frame_size;
samples_at = 0;
samples_channels = c->channels;
samples = (int16_t*)av_malloc(samples_size*2*samples_channels);
yCFatal(FFMPEGWRITER,
"FRAME SIZE is %d / samples size is %d\n",
c->frame_size,
samples_size);
}
/* prepare a 16 bit dummy audio frame of 'frame_size' samples and
'nb_channels' channels */
static void get_audio_frame(int16_t *samples, int frame_size, int nb_channels)
{
int j, i, v;
int16_t *q;
q = samples;
for(j=0;j<frame_size;j++) {
v = (int)(sin(t) * 10000);
for(i = 0; i < nb_channels; i++)
*q++ = v;
t += tincr;
tincr += tincr2;
}
}
static void make_audio_frame(AVCodecContext *c, AVFrame * &frame,
void *&samples) {
frame = av_frame_alloc();
if (!frame) {
yCFatal(FFMPEGWRITER, "Could not allocate audio frame");
}
frame->nb_samples = c->frame_size;
frame->format = c->sample_fmt;
frame->channel_layout = c->channel_layout;
int buffer_size = av_samples_get_buffer_size(nullptr, c->channels,
c->frame_size,
c->sample_fmt, 0);
if (buffer_size < 0) {
yCError(FFMPEGWRITER, "Could not get sample buffer size");
}
samples = av_malloc(buffer_size);
if (!samples) {
yCFatal(FFMPEGWRITER,
"Could not allocate %d bytes for samples buffer",
buffer_size);
}
/* setup the data pointers in the AVFrame */
int ret = avcodec_fill_audio_frame(frame, c->channels, c->sample_fmt,
(const uint8_t*)samples, buffer_size, 0);
if (ret < 0) {
yCFatal(FFMPEGWRITER, "Could not setup audio frame");
}
}
static void write_audio_frame(AVFormatContext *oc, AVStream *st)
{
AVCodecContext *c;
AVPacket pkt;
av_init_packet(&pkt);
c = st->codec;
get_audio_
|
{
"pile_set_name": "Github"
}
|
<!DOCTYPE html>
<html lang="en" ng-app="jpm">
<head>
<meta charset="utf-8" />
<meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<link href="/releases/3.5.0/css/style.css" rel="stylesheet" />
<script src="https://code.jquery.com/jquery-3.4.1.min.js"></script>
<script src="/js/releases.js"></script>
<!-- Begin Jekyll SEO tag v2.5.0 -->
<title>Bnd Gradle Plugins</title>
<meta name="generator" content="Jekyll v3.8.5" />
<meta property="og:title" content="Bnd Gradle Plugins" />
<meta property="og:locale" content="en_US" />
<meta name="description" content="See the documentation on GitHub for details on how to configure and use the Bnd Gradle plugins." />
<meta property="og:description" content="See the documentation on GitHub for details on how to configure and use the Bnd Gradle plugins." />
<script type="application/ld+json">
{"@type":"WebPage","url":"/releases/3.5.0/tools/bnd-gradle.html","headline":"Bnd Gradle Plugins","description":"See the documentation on GitHub for details on how to configure and use the Bnd Gradle plugins.","@context":"http://schema.org"}</script>
<!-- End Jekyll SEO tag -->
</head>
<body>
<ul class="container12 menu-bar">
<li span=11><a class=menu-link href="/releases/3.5.0/"><img
class=menu-logo src='/releases/3.5.0/img/bnd-80x40-white.png'></a>
<a href="/releases/3.5.0/chapters/110-introduction.html">Intro
</a><a href="/releases/3.5.0/chapters/800-headers.html">Headers
</a><a href="/releases/3.5.0/chapters/820-instructions.html">Instructions
</a><a href="/releases/3.5.0/chapters/850-macros.html">Macros
</a><div class="releases"><button class="dropbtn">3.5.0</button><div class="dropdown-content"></div></div>
<li class=menu-link span=1>
<a href="https://github.com/bndtools/bnd" target="_"><img
style="position:absolute;top:0;right:0;margin:0;padding:0;z-index:100"
src="https://camo.githubusercontent.com/38ef81f8aca64bb9a64448d0d70f1308ef5341ab/68747470733a2f2f73332e616d617a6f6e6177732e636f6d2f6769746875622f726962626f6e732f666f726b6d655f72696768745f6461726b626c75655f3132313632312e706e67"
alt="Fork me on GitHub"
data-canonical-src="https://s3.amazonaws.com/github/ribbons/forkme_right_darkblue_121621.png"></a>
</ul>
<ul class=container12>
<li span=3>
<div>
<ul class="side-nav">
<li><a href="/releases/3.5.0/chapters/100-release.html">Release</a>
<li><a href="/releases/3.5.0/chapters/110-introduction.html">Introduction</a>
<li><a href="/releases/3.5.0/chapters/120-install.html">How to install bnd</a>
<li><a href="/releases/3.5.0/chapters/123-tour-workspace.html">Guided Tour</a>
<li><a href="/releases/3.5.0/chapters/125-tour-features.html">Guided Tour Workspace & Projects</a>
<li><a href="/releases/3.5.0/chapters/130-concepts.html">Concepts</a>
<li><a href="/releases/3.5.0/chapters/140-best-practices.html">Best practices</a>
<li><a href="/releases/3.5.0/chapters/150-build.html">Build</a>
<li><a href="/releases/3.5.0/chapters/160-jars.html">Generating JARs</a>
<li><a href="/releases/3.5.0/chapters/170-versioning.html">Versioning</a>
<li><a href="/releases/3.5.0/chapters/180-baselining.html">Baselining</a>
<li><a href="/releases/3.5.0/chapters/200-components.html">Service Components</a>
<li><a href="/releases/3.5.0/chapters/210-metatype.html">Metatype</a>
<li><a href="/releases/3.5.0/chapters/220-contracts.html">Contracts</a>
<li><a href="/releases/3.5.0/chapters/230-manifest-annotations.html">Manifest Annotations</a>
<li><a href="/releases/3.5.0/chapters/250-resolving.html">Resolving Dependencies</a>
<li><a href="/releases/3.5.0/chapters/300-launching.html">Launching</a>
<li><a href="/releases/3.5.0/chapters/310-testing.html">Testing</a>
<li><a href="/releases/3.5.0/chapters/320-packaging.html">Packaging Applications</a>
<li><a href="/releases/3.5.0/chapters/390-wrapping.html">Wrapping Libraries to OSGi Bundles</a>
<li><a href="/releases/3.5.0/chapters/400-commandline.html">From the command line</a>
<li><a href="/releases/3.5.0/chapters/600-developer.html">For Developers</a>
<li><a href="/releases/3.5.0/chapters/610-plugin.html">Plugins</a>
<li><a href="/releases/3.5.0/chapters/700-tools.html">Tools bound to bnd</a>
<li><a href="/releases/3.5.0/chapters/790-format.html">File Format</a>
<li><a href="/releases/3.5.0/chapters/800-headers.html">Header Reference</a>
<li><a href="/releases/3.5.0/chapters/820-instructions.html">Instruction</a>
<li><a href="/releases/3.5.0/chapters/825-instructions-ref.html">Instruction Index</a>
<li><a href="/releases/3.5.0/chapters/850-macros.html">Macro Reference</a>
<li><a href="/releases/3.5.0/chapters/860-commands.html">Command Reference</a>
<li><a href="/releases/3.5.0/chapters/870-plugins.html">Plugins Reference</a>
<li><a href="/releases/3.5.0/chapters/880-settings.html">Settings</a>
<li><a href="/releases/3.5.0/chapters/900-errors.html">Errors</a>
<li><a href="/releases/3.5.0/chapters/910-warnings.html">Warnings</a>
<li><a href="/releases/3.5.0/chapters/920-faq.html">Frequently Asked Questions</a>
</ul>
<div class=enroute><
|
{
"pile_set_name": "Github"
}
|
Actor RedBloodSplatterz : Inventory
{
inventory.maxamount 1
}
Actor GreenBloodSplatterz : Inventory
{
inventory.maxamount 1
}
Actor BlueBloodSplatterz : Inventory
{
inventory.maxamount 1
}
Actor BloodSplasherz : Inventory
{
inventory.maxamount 1
}
Actor WaterSplasherz : Inventory
{
inventory.maxamount 1
}
Actor SlimeSplasherz : Inventory
{
inventory.maxamount 1
}
Actor SludgeSplasherz : Inventory
{
inventory.maxamount 1
}
Actor LavaSplasherz : Inventory
{
inventory.maxamount 1
}
Actor BloodSplasherBlueBlood : Inventory
{
inventory.maxamount 1
}
Actor SuperGoreSpawner
{
Projectile
+RANDOMIZE
+MISSILE
+FORCEXYBILLBOARD
// +BLOODSPLATTER
+THRUACTORS
Decal BloodSuper
damage 0
radius 2
height 0
speed 10
renderstyle ADD
alpha 0.9
scale .15
gravity 0.7
-NOGRAVITY
states
{
Spawn:
//TNT1 AAAAAAAA 3 A_SpawnItem("SuperGore")
Stop
Death:
Stop
XDeath:
TNT1 A 0
Stop
}
}
Actor SuperGoreSpawner2: SuperGoreSpawner
{
states
{
Spawn:
TNT1 AAAAAAAAA 2 A_SpawnItem("SuperGore")
Stop
}
}
Actor UltraGoreSpawner: SuperGoreSpawner
{
speed 60
states
{
Spawn:
TNT1 AAAAAAAAAAAAAA 1 A_SpawnItem("SuperGore")
Stop
}
}
actor SuperGore
{
Decal BloodSplat
game Doom
Alpha 0.45
+FORCEXYBILLBOARD
+GHOST
+NOBLOCKMAP
Gravity 0.05
+DontSplash
-EXPLODEONWATER
-ALLOWPARTICLES
+CLIENTSIDEONLY
-NOGRAVITY
+THRUACTORS
Scale 0.57
states
{
Spawn:
TNT1 A 0
TNT1 A 0 A_JumpIf(waterlevel > 1, "SpawnUnderwater")
BLER FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGG 1 A_FadeOut(0.01)
stop
SpawnUnderwater:
TNT1 A 0
TNT1 AA 0 A_CustomMissile ("Underblood3", 32, 0, random (0, 360), 2, random (0, 160))
Stop
}
}
Actor SuperGoreMist: SuperGore
{
Scale 1.0
Speed 8
Gravity 0.3
}
Actor SuperGoreMistSmall: SuperGore
{
Scale 0.35
Speed 2
Gravity 0.1
}
Actor SuperGoreMistTiny: SuperGore
{
Scale 0.15
Speed 1
Gravity 0.1
}
actor BlueSuperGore: SuperGore
{
Alpha 0.4
translation "168:191=192:207","16:47=240:247"
States
{
SpawnUnderwater:
TNT1 A 0
TNT1 AA 0 A_CustomMissile ("UnderbloodBlue3", 32, 0, random (0, 360), 2, random (0, 160))
Stop
}
}
Actor BlueSuperGoreSpawner: SuperGoreSpawner
{
speed 20
states
{
Spawn:
TNT1 AAAAAAAA 1 A_SpawnItem("BlueSuperGore")
Stop
}
}
actor GreenSuperGore: SuperGore
{
translation "168:191=112:127","16:47=120:127"
Decal GreenBloodSplat
States
{
SpawnUnderwater:
TNT1 A 0
TNT1 AA 0 A_CustomMissile ("UnderbloodGreen3", 32, 0, random (0, 360), 2, random (0, 160))
Stop
}
}
Actor GreenSuperGoreSpawner: SuperGoreSpawner
{
states
{
Spawn:
TNT1 AAAA 1 A_SpawnItem("GreenSuperGore")
Stop
}
}
ACTOR StealthBloodLol
{
game Doom
scale 1.1
speed 6
health 1
radius 8
height 1
Gravity 0.8
damage 0
Renderstyle Translucent
Alpha 0.7
DamageType Blood
Decal BloodSuper
+MISSILE
+CLIENTSIDEONLY
+NOTELEPORT
+NOBLOCKMAP
+THRUACTORS
+BLOODLESSIMPACT
+FORCEXYBILLBOARD
+NODAMAGETHRUST
+MOVEWITHSECTOR
+CORPSE
-DONTSPLASH
States
{
Spawn:
TNT1 A 0 A_JumpIf(waterlevel > 1, "Splash")
//XDT1 ABCD 4// A_SpawnItem ("BloodTr",0,0,0,1)
TNT1 A 1
loop
Splash:
BLOD A 0
stop
Death:
//TNT1 A 0 A_CustomMissile ("SmallBloodSplasher", 0, 0, random (0, 360), 2, random (0, 160))
TNT1 A 0 A_PlaySound("blooddrop")
TNT1 A 0 A_SpawnItem ("Brutal_Bloodspot", 5)
XDT1 EFGHIJK 0
Stop
}
}
actor BloodCloud
{
game Doom
scale 1.4
mass 1
renderstyle Translucent
alpha 0.9
Decal BloodSplat
+LOWGRAVITY
+NOTELEPORT
+NOBLOCKMAP
+NOCLIP
+FORCEXYBILLBOARD
+CLIENTSIDEONLY
+DontSplash
+MISSILE
-NOGRAVITY
Speed 2
states
{
Spawn:
TNT1 A 0 A_JumpIf(waterlevel > 1, "Splash")
//BTRL ABCD 4
BLOD ABCDDEEFFF 4
stop
Death:
TNT1 A 0
Stop
Splash:
BLOD A 0
stop
}
}
ACTOR BloodSplasher2
{
Game Doom
damagefactor "Trample", 0.0
DamageType Blood
Health 1
Radius 1
Height 1
Mass 1
+NOCLIP
+NOGRAVITY
+ACTIVATEMCROSS
+WINDTHRUST
+NODAMAGETHRUST
+PIERCEARMOR
+BLOODLESSIMPACT
DeathSound "None"
States
{
Spawn:
BTRL A 1
//TNT1 A 0 A_Explode(3,200)
Stop
}
}
ACTOR BloodSplasher3: BloodSplasher2
{
States
{
Spawn:
BTRL A 1
//TNT1 A 0 A_Explode(3,600)
Stop
}
}
actor SmallSuperGore
{
Decal BloodSplat
game Doom
Alpha 0.5
+FORCEXYBILLBOARD
+GHOST
+NOBLOCKMAP
+DontSplash
-EXPLODEONWATER
-ALLOWPARTICLES
+CLIENTSIDEONLY
+THRUACTORS
+MISSILE
+NOGRAVITY
Speed 16
Scale 2
states
{
Spawn:
TNT1 A 0
TNT1 A 0 A_JumpIf(waterlevel > 1, "SpawnUnderwater")
|
{
"pile_set_name": "Github"
}
|
/*
* freeglut_internal.h
*
* The freeglut library private include file.
*
* Copyright (c) 1999-2000 Pawel W. Olszta. All Rights Reserved.
* Written by Pawel W. Olszta, <olszta@sourceforge.net>
* Creation date: Thu Dec 2 1999
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included
* in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* PAWEL W. OLSZTA BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
#ifndef FREEGLUT_INTERNAL_H
#define FREEGLUT_INTERNAL_H
#ifdef HAVE_CONFIG_H
# include "config.h"
#endif
/* XXX Update these for each release! */
#define VERSION_MAJOR 2
#define VERSION_MINOR 7
#define VERSION_PATCH 0
/* Freeglut is intended to function under all Unix/X11 and Win32 platforms. */
/* XXX: Don't all MS-Windows compilers (except Cygwin) have _WIN32 defined?
* XXX: If so, remove the first set of defined()'s below.
*/
#if !defined(TARGET_HOST_POSIX_X11) && !defined(TARGET_HOST_MS_WINDOWS) && !defined(TARGET_HOST_MAC_OSX) && !defined(TARGET_HOST_SOLARIS)
#if defined(_MSC_VER) || defined(__WATCOMC__) || defined(__MINGW32__) \
|| defined(_WIN32) || defined(_WIN32_WCE) \
|| ( defined(__CYGWIN__) && defined(X_DISPLAY_MISSING) )
# define TARGET_HOST_MS_WINDOWS 1
#elif defined(__posix__) || defined(__unix__) || defined(__linux__) || defined(__sun)
# define TARGET_HOST_POSIX_X11 1
#elif defined(__APPLE__)
/* This is a placeholder until we get native OSX support ironed out -- JFF 11/18/09 */
# define TARGET_HOST_POSIX_X11 1
/* # define TARGET_HOST_MAC_OSX 1 */
#else
# error "Unrecognized target host!"
#endif
#endif
/* Detect both SunPro and gcc compilers on Sun Solaris */
#if defined (__SVR4) && defined (__sun)
# define TARGET_HOST_SOLARIS 1
#endif
#ifndef TARGET_HOST_MS_WINDOWS
# define TARGET_HOST_MS_WINDOWS 0
#endif
#ifndef TARGET_HOST_POSIX_X11
# define TARGET_HOST_POSIX_X11 0
#endif
#ifndef TARGET_HOST_MAC_OSX
# define TARGET_HOST_MAC_OSX 0
#endif
#ifndef TARGET_HOST_SOLARIS
# define TARGET_HOST_SOLARIS 0
#endif
/* -- FIXED CONFIGURATION LIMITS ------------------------------------------- */
#define FREEGLUT_MAX_MENUS 3
/* -- PLATFORM-SPECIFIC INCLUDES ------------------------------------------- */
/* All Win32 headers depend on the huge windows.h recursive include.
* Note: Lower-case header names are used, for best cross-platform
* compatibility.
*/
#if TARGET_HOST_MS_WINDOWS && !defined(_WIN32_WCE)
# include <windows.h>
# include <windowsx.h>
# include <mmsystem.h>
/* CYGWIN does not have tchar.h, but has TEXT(x), defined in winnt.h. */
# ifndef __CYGWIN__
# include <tchar.h>
# else
# define _TEXT(x) TEXT(x)
# define _T(x) TEXT(x)
# endif
#elif TARGET_HOST_POSIX_X11
# include <GL/glx.h>
# include <X11/Xlib.h>
# include <X11/Xatom.h>
# include <X11/keysym.h>
# include <X11/extensions/XI.h>
# ifdef HAVE_X11_EXTENSIONS_XF86VMODE_H
# include <X11/extensions/xf86vmode.h>
# endif
# ifdef HAVE_X11_EXTENSIONS_XRANDR_H
# include <X11/extensions/Xrandr.h>
# endif
/* If GLX is too old, we will fail during runtime when multisampling
is requested, but at least freeglut compiles. */
# ifndef GLX_SAMPLE_BUFFERS
# define GLX_SAMPLE_BUFFERS 0x80A8
# endif
# ifndef GLX_SAMPLES
# define GLX_SAMPLES 0x80A9
# endif
#endif
/* These files should be available on every platform. */
#include <stdio.h>
#include <string.h>
#include <math.h>
#include <stdlib.h>
#include <stdarg.h>
/* These are included based on autoconf directives. */
#ifdef HAVE_SYS_TYPES_H
# include <sys/types.h>
#endif
#include <unistd.h>
#ifdef TIME_WITH_SYS_TIME
# include <sys/time.h>
# include <time.h>
#elif defined(HAVE_SYS_TIME_H)
# include <sys/time.h>
#else
# include <time.h>
#endif
/* -- AUTOCONF HACKS --------------------------------------------------------*/
/* XXX: Update autoconf to avoid these.
* XXX: Are non-POSIX platforms intended not to use autoconf?
* If so, perhaps there should be a config_guess.h for them. Alternatively,
* config guesses could be placed above, just after the config.h exclusion.
*/
#if defined(__FreeBSD__) || defined(__NetBSD__)
# define HAVE_USB_JS 1
# if defined(__NetBSD__) || ( defined(__FreeBSD__) && __FreeBSD_version >= 500000)
# define HAVE_USBHID_H 1
# endif
#endif
#if TARGET_HOST_MS_WINDOWS
# define HAVE_VFPRINTF 1
#endif
/* MinGW may lack a prototype for ChangeDisplaySettingsEx() (depending on the version?) */
#if TARGET_HOST_MS_WINDOWS && !defined(ChangeDisplaySettingsEx)
LONG WINAPI ChangeDisplaySettingsExA(LPCSTR,LPDEVMODEA,HWND,DWORD,LPVOID);
LONG WINAPI ChangeDisplaySettingsExW(LPCWSTR,LPDEVMODEW,HWND,DWORD,LPVOID);
# ifdef UNICODE
# define ChangeDisplaySettingsEx ChangeDisplaySettingsExW
# else
# define ChangeDisplaySettingsEx ChangeDisplaySettingsExA
# endif
#endif
#if defined(_MSC_VER) || defined(__WATCOMC__)
/* strdup() is non-standard, for all but POSIX-2001 */
#define strdup _strdup
#endif
/* M_PI is non-standard (defined by BSD, not ISO-C) */
#ifndef M_PI
# define M_PI 3.14159265358979323846
#endif
#ifdef HAVE_STDBOOL_H
# include <stdbool.h>
# ifndef TRUE
# define TRUE true
# endif
# ifndef FALSE
# define FALSE false
# endif
#else
# ifndef TRUE
# define TRUE
|
{
"pile_set_name": "Github"
}
|
<?xml version='1.0' encoding='UTF-8'?>
<org.jenkinsci.plugins.terraform.TerraformBuildWrapper_-DescriptorImpl plugin="terraform@1.0.9">
<installations>
<org.jenkinsci.plugins.terraform.TerraformInstallation>
<name>Terraform 0.9.11</name>
<home></home>
<properties>
<hudson.tools.InstallSourceProperty>
<installers>
<org.jenkinsci.plugins.terraform.TerraformInstaller>
<id>0.9.11-linux-amd64</id>
</org.jenkinsci.plugins.terraform.TerraformInstaller>
</installers>
</hudson.tools.InstallSourceProperty>
</properties>
</org.jenkinsci.plugins.terraform.TerraformInstallation>
</installations>
</org.jenkinsci.plugins.terraform.TerraformBuildWrapper_-DescriptorImpl>
|
{
"pile_set_name": "Github"
}
|
#!/usr/bin/env bash
# 1o is as 1k, but putting the dropout on (c,m), i.e. the output
# of the LstmNonlinearityComponent, which I believe is the same as
# putting it on (i,f) which Gaofeng found worked well in the non-fast Lstm
# component; and using schedule which maxes out at 0.3, not 0.7.
# [note: this was a little worse. turns out it was not the same as
# what gaofeng did because he had separate masks on (i,f).
# note: I've since removed the script-level support for this.
# local/chain/compare_wer_general.sh --looped exp/chain_cleaned/tdnn_lstm1{e,k,l,m,n,o}_sp_bi
# System tdnn_lstm1e_sp_bi tdnn_lstm1k_sp_bi tdnn_lstm1l_sp_bi tdnn_lstm1m_sp_bi tdnn_lstm1n_sp_bi tdnn_lstm1o_sp_bi
# WER on dev(orig) 9.0 8.7 8.9 9.0 8.8 8.8
# [looped:] 9.0 8.6 8.9 8.9 8.8 8.9
# WER on dev(rescored) 8.4 7.9 8.2 8.2 8.1 8.1
# [looped:] 8.4 7.8 8.2 8.3 8.1 8.2
# WER on test(orig) 8.8 8.8 8.9 8.9 8.7 8.7
# [looped:] 8.8 8.7 8.8 8.8 8.7 8.7
# WER on test(rescored) 8.4 8.3 8.2 8.5 8.3 8.2
# [looped:] 8.3 8.3 8.3 8.5 8.3 8.2
# Final train prob -0.0648 -0.0693 -0.0768 -0.0807 -0.0702 -0.0698
# Final valid prob -0.0827 -0.0854 -0.0943 -0.0931 -0.0836 -0.0858
# Final train prob (xent) -0.8372 -0.8848 -0.9371 -0.9807 -0.8719 -0.8998
# Final valid prob (xent) -0.9497 -0.9895 -1.0546 -1.0629 -0.9732 -1.0084
# 1e is as 1b, but reducing decay-time from 40 to 20.
# 1d is as 1b, but adding decay-time=40 to the fast-lstmp-layers. note: it
# uses egs from 1b, remember to remove that before I commit.
# steps/info/chain_dir_info.pl exp/chain_cleaned/tdnn_lstm1a_sp_bi
# exp/chain_cleaned/tdnn_lstm1a_sp_bi: num-iters=253 nj=2..12 num-params=9.5M dim=40+100->3607 combine=-0.07->-0.07 xent:train/valid[167,252,final]=(-0.960,-0.859,-0.852/-1.05,-0.999,-0.997) logprob:train/valid[167,252,final]=(-0.076,-0.064,-0.062/-0.099,-0.092,-0.091)
# This is as run_lstm1e.sh except adding TDNN layers in between; also comparing below
# with run_lstm1d.sh which had a larger non-recurrent-projection-dim and which had
# better results. Note: these results are not with the updated LM (the LM data-prep
# for this setup was changed in Nov 2016 but this was with an older directory).
#
# local/chain/compare_wer_general.sh exp/chain_cleaned/lstm1d_sp_bi exp/chain_cleaned/lstm1e_sp_bi exp/chain_cleaned/tdnn_lstm1a_sp_bi
# System lstm1d_sp_bi lstm1e_sp_bi tdnn_lstm1a_sp_bi
# WER on dev(orig) 10.3 10.7 9.7
# WER on dev(rescored) 9.8 10.1 9.3
# WER on test(orig) 9.7 9.8 9.1
# WER on test(rescored) 9.2 9.4 8.7
# Final train prob -0.0812 -0.0862 -0.0625
# Final valid prob -0.1049 -0.1047 -0.0910
# Final train prob (xent) -1.1334 -1.1763 -0.8518
# Final valid prob (xent) -1.2263 -1.2427 -0.9972
## how you run this (note: this assumes that the run_tdnn_lstm.sh soft link points here;
## otherwise call it directly in its location).
# by default, with cleanup:
# local/chain/run_tdnn_lstm.sh
# without cleanup:
# local/chain/run_tdnn_lstm.sh --train-set train --gmm tri3 --nnet3-affix "" &
# note, if you have already run one of the non-chain nnet3 systems
# (e.g. local/nnet3/run_tdnn.sh), you may want to run with --stage 14.
# run_tdnn_lstm_1a.sh was modified from run_lstm_1e.sh, which is a fairly
# standard, LSTM, except that some TDNN layers were added in between the
# LSTM layers. I was looking at egs/ami/s5b/local/chain/tuning/run_tdnn_lstm_1i.sh, but
# this isn't exactly copied from there.
set -e -o pipefail
# First the options that are passed through to run_ivector_common.sh
# (some of which are also used in this script directly).
stage=0
nj=30
decode_nj=30
min_seg_len=1.55
label_delay=5
xent_regularize=0.1
train_set=train_cleaned
gmm=tri3_cleaned # the gmm for the target data
num_threads_ubm=32
nnet3_affix=_cleaned # cleanup affix for nnet3 and chain dirs, e.g. _cleaned
# training options
chunk_left_context=40
chunk_right_context=0
chunk_left_context_initial=0
chunk_right_context_final=0
# decode options
extra_left_context=50
extra_right_context=0
extra_left_context_initial=0
extra_right_context_final=0
frames_per_chunk=140,100,160
frames_per_chunk_primary=140
# The rest are configs specific to this script. Most of the parameters
# are just hardcoded at this level, in the commands below.
train_stage=-10
tree_affix= # affix for tree directory, e.g. "a" or "b", in case we change the configuration.
tdnn_lstm_affix=1o #affix for TDNN-LSTM directory, e.g. "a" or "b", in case we change the configuration.
common_egs_dir=exp/chain_cleaned/tdnn_lstm1b_sp_bi/egs # you can set this to use previously dumped egs.
# End configuration section.
echo "$0 $@" # Print the command line for logging
. ./cmd.sh
. ./path.sh
. ./utils/parse_options.sh
if ! cuda-compiled; then
cat <<EOF && exit 1
This script is intended to be used with GPUs but you have
|
{
"pile_set_name": "Github"
}
|
{
"tests": [
{
"description": "update: ServerTimestamp with data",
"comment": "A key with the special ServerTimestamp sentinel is removed from\nthe data in the update operation. Instead it appears in a separate Transform operation.\nNote that in these tests, the string \"ServerTimestamp\" should be replaced with the\nspecial ServerTimestamp value.",
"update": {
"docRefPath": "projects/projectID/databases/(default)/documents/C/d",
"jsonData": "{\"a\": 1, \"b\": \"ServerTimestamp\"}",
"request": {
"database": "projects/projectID/databases/(default)",
"writes": [
{
"update": {
"name": "projects/projectID/databases/(default)/documents/C/d",
"fields": {
"a": {
"integerValue": "1"
}
}
},
"updateMask": {
"fieldPaths": [
"a"
]
},
"currentDocument": {
"exists": true
}
},
{
"transform": {
"document": "projects/projectID/databases/(default)/documents/C/d",
"fieldTransforms": [
{
"fieldPath": "b",
"setToServerValue": "REQUEST_TIME"
}
]
}
}
]
}
}
}
]
}
|
{
"pile_set_name": "Github"
}
|
// Copyright 2016 CoreOS, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package v2_3
import (
"reflect"
"github.com/coreos/ignition/config/v2_3/types"
)
// Append appends newConfig to oldConfig and returns the result. Appending one
// config to another is accomplished by iterating over every field in the
// config structure, appending slices, recursively appending structs, and
// overwriting old values with new values for all other types.
func Append(oldConfig, newConfig types.Config) types.Config {
vOld := reflect.ValueOf(oldConfig)
vNew := reflect.ValueOf(newConfig)
vResult := appendStruct(vOld, vNew)
return vResult.Interface().(types.Config)
}
// appendStruct is an internal helper function to AppendConfig. Given two values
// of structures (assumed to be the same type), recursively iterate over every
// field in the struct, appending slices, recursively appending structs, and
// overwriting old values with the new for all other types. Some individual
// struct fields have alternate merge strategies, determined by the field name.
// Currently these fields are "ignition.version", which uses the old value, and
// "ignition.config" which uses the new value.
func appendStruct(vOld, vNew reflect.Value) reflect.Value {
tOld := vOld.Type()
vRes := reflect.New(tOld)
for i := 0; i < tOld.NumField(); i++ {
vfOld := vOld.Field(i)
vfNew := vNew.Field(i)
vfRes := vRes.Elem().Field(i)
switch tOld.Field(i).Name {
case "Version":
vfRes.Set(vfOld)
continue
case "Config":
vfRes.Set(vfNew)
continue
}
switch vfOld.Type().Kind() {
case reflect.Struct:
vfRes.Set(appendStruct(vfOld, vfNew))
case reflect.Slice:
vfRes.Set(reflect.AppendSlice(vfOld, vfNew))
default:
if vfNew.Kind() == reflect.Ptr && vfNew.IsNil() {
vfRes.Set(vfOld)
} else {
vfRes.Set(vfNew)
}
}
}
return vRes.Elem()
}
|
{
"pile_set_name": "Github"
}
|
/*
** 2001 September 16
**
** The author disclaims copyright to this source code. In place of
** a legal notice, here is a blessing:
**
** May you do good and not evil.
** May you find forgiveness for yourself and forgive others.
** May you share freely, never taking more than you give.
**
******************************************************************************
**
** This header file (together with is companion C source-code file
** "os.c") attempt to abstract the underlying operating system so that
** the SQLite library will work on both POSIX and windows systems.
**
** This header file is #include-ed by sqliteInt.h and thus ends up
** being included by every source file.
*/
#ifndef _SQLITE_OS_H_
#define _SQLITE_OS_H_
/*
** Attempt to automatically detect the operating system and setup the
** necessary pre-processor macros for it.
*/
#include "os_setup.h"
/* If the SET_FULLSYNC macro is not defined above, then make it
** a no-op
*/
#ifndef SET_FULLSYNC
# define SET_FULLSYNC(x,y)
#endif
/*
** The default size of a disk sector
*/
#ifndef SQLITE_DEFAULT_SECTOR_SIZE
# define SQLITE_DEFAULT_SECTOR_SIZE 4096
#endif
/*
** Temporary files are named starting with this prefix followed by 16 random
** alphanumeric characters, and no file extension. They are stored in the
** OS's standard temporary file directory, and are deleted prior to exit.
** If sqlite is being embedded in another program, you may wish to change the
** prefix to reflect your program's name, so that if your program exits
** prematurely, old temporary files can be easily identified. This can be done
** using -DSQLITE_TEMP_FILE_PREFIX=myprefix_ on the compiler command line.
**
** 2006-10-31: The default prefix used to be "sqlite_". But then
** Mcafee started using SQLite in their anti-virus product and it
** started putting files with the "sqlite" name in the c:/temp folder.
** This annoyed many windows users. Those users would then do a
** Google search for "sqlite", find the telephone numbers of the
** developers and call to wake them up at night and complain.
** For this reason, the default name prefix is changed to be "sqlite"
** spelled backwards. So the temp files are still identified, but
** anybody smart enough to figure out the code is also likely smart
** enough to know that calling the developer will not help get rid
** of the file.
*/
#ifndef SQLITE_TEMP_FILE_PREFIX
# define SQLITE_TEMP_FILE_PREFIX "etilqs_"
#endif
/*
** The following values may be passed as the second argument to
** sqlite3OsLock(). The various locks exhibit the following semantics:
**
** SHARED: Any number of processes may hold a SHARED lock simultaneously.
** RESERVED: A single process may hold a RESERVED lock on a file at
** any time. Other processes may hold and obtain new SHARED locks.
** PENDING: A single process may hold a PENDING lock on a file at
** any one time. Existing SHARED locks may persist, but no new
** SHARED locks may be obtained by other processes.
** EXCLUSIVE: An EXCLUSIVE lock precludes all other locks.
**
** PENDING_LOCK may not be passed directly to sqlite3OsLock(). Instead, a
** process that requests an EXCLUSIVE lock may actually obtain a PENDING
** lock. This can be upgraded to an EXCLUSIVE lock by a subsequent call to
** sqlite3OsLock().
*/
#define NO_LOCK 0
#define SHARED_LOCK 1
#define RESERVED_LOCK 2
#define PENDING_LOCK 3
#define EXCLUSIVE_LOCK 4
/*
** File Locking Notes: (Mostly about windows but also some info for Unix)
**
** We cannot use LockFileEx() or UnlockFileEx() on Win95/98/ME because
** those functions are not available. So we use only LockFile() and
** UnlockFile().
**
** LockFile() prevents not just writing but also reading by other processes.
** A SHARED_LOCK is obtained by locking a single randomly-chosen
** byte out of a specific range of bytes. The lock byte is obtained at
** random so two separate readers can probably access the file at the
** same time, unless they are unlucky and choose the same lock byte.
** An EXCLUSIVE_LOCK is obtained by locking all bytes in the range.
** There can only be one writer. A RESERVED_LOCK is obtained by locking
** a single byte of the file that is designated as the reserved lock byte.
** A PENDING_LOCK is obtained by locking a designated byte different from
** the RESERVED_LOCK byte.
**
** On WinNT/2K/XP systems, LockFileEx() and UnlockFileEx() are available,
** which means we can use reader/writer locks. When reader/writer locks
** are used, the lock is placed on the same range of bytes that is used
** for probabilistic locking in Win95/98/ME. Hence, the locking scheme
** will support two or more Win95 readers or two or more WinNT readers.
** But a single Win95 reader will lock out all WinNT readers and a single
** WinNT reader will lock out all other Win95 readers.
**
** The following #defines specify the range of bytes used for locking.
** SHARED_SIZE is the number of bytes available in the pool from which
** a random byte is selected for a shared lock. The pool of bytes for
** shared locks begins at SHARED_FIRST.
**
** The same locking strategy and
** byte ranges are used for Unix. This leaves open the possibility of having
** clients on win95, winNT, and unix all talking to the same shared file
** and all locking correctly. To do so would require that samba (or whatever
** tool is being used for file sharing) implements locks correctly between
** windows and unix. I'm guessing that isn't likely to happen, but by
** using the same locking range we are at least open to the possibility.
**
** Locking in windows is manditory. For this reason, we cannot store
** actual data in the bytes used for locking. The pager never allocates
** the pages involved in locking therefore. SHARED_SIZE is selected so
** that all locks will fit on a single page even at the minimum page size.
** PENDING_BYTE defines the beginning of the locks. By default PENDING_BYTE
** is set high so that we don't have to allocate an unused page except
** for very large databases. But one should test the page skipping logic
** by setting PENDING_BYTE low and running the entire regression suite.
**
** Changing the value of PENDING_BYTE results in a subtly incompatible
** file format. Depending on how it is changed, you might not notice
** the incompatibility right away, even running a full regression test.
** The default location of PENDING_BYTE is the first byte past the
** 1GB boundary.
**
*/
#ifdef SQLITE_OMIT_WSD
# define PENDING_BYTE (0x40000000)
#else
# define PENDING_BYTE sqlite3PendingByte
#endif
#define RESERVED_BYTE (PENDING_BYTE+1)
#define SHARED_FIRST (PENDING_BYTE+2)
#define SHARED_SIZE 510
/*
** Wrapper around OS specific sqlite3_os_init() function.
*/
int sqlite3OsInit(void);
/*
** Functions for accessing sqlite3_file methods
*/
void sqlite3OsClose(sqlite3_file*);
int sqlite3OsRead(sqlite3_file*, void*, int amt, i64 offset);
int sqlite3OsWrite(sqlite3_file*, const void*, int amt, i64 offset);
int sqlite3OsTruncate(sqlite3_file*, i64 size);
int sqlite3OsSync(sqlite3_file*, int);
int sqlite3OsFileSize(sqlite3_file*, i64 *pSize);
int sqlite3OsLock(sqlite3_file*, int);
int sqlite3OsUnlock(sqlite3_file*, int);
int sqlite3OsCheckReservedLock(sqlite3_file *id, int *pResOut);
int sqlite3OsFileControl(sqlite3_file*,int,void*);
void sqlite3OsFileControlHint(sqlite3
|
{
"pile_set_name": "Github"
}
|
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "components/certificate_transparency/log_dns_client.h"
#include <memory>
#include <numeric>
#include <string>
#include <utility>
#include <vector>
#include "base/format_macros.h"
#include "base/message_loop/message_loop.h"
#include "base/run_loop.h"
#include "base/strings/string_number_conversions.h"
#include "base/strings/stringprintf.h"
#include "base/test/test_timeouts.h"
#include "components/certificate_transparency/mock_log_dns_traffic.h"
#include "crypto/sha2.h"
#include "net/base/net_errors.h"
#include "net/cert/merkle_audit_proof.h"
#include "net/cert/signed_certificate_timestamp.h"
#include "net/dns/dns_client.h"
#include "net/dns/dns_config_service.h"
#include "net/dns/dns_protocol.h"
#include "net/log/net_log.h"
#include "net/test/gtest_util.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace certificate_transparency {
namespace {
using ::testing::AllOf;
using ::testing::Eq;
using ::testing::IsEmpty;
using ::testing::Le;
using ::testing::Not;
using ::testing::NotNull;
using net::test::IsError;
using net::test::IsOk;
// Sample Merkle leaf hashes.
const char* const kLeafHashes[] = {
"\x1f\x25\xe1\xca\xba\x4f\xf9\xb8\x27\x24\x83\x0f\xca\x60\xe4\xc2\xbe\xa8"
"\xc3\xa9\x44\x1c\x27\xb0\xb4\x3e\x6a\x96\x94\xc7\xb8\x04",
"\x2c\x26\xb4\x6b\x68\xff\xc6\x8f\xf9\x9b\x45\x3c\x1d\x30\x41\x34\x13\x42"
"\x2d\x70\x64\x83\xbf\xa0\xf9\x8a\x5e\x88\x62\x66\xe7\xae",
"\xfc\xde\x2b\x2e\xdb\xa5\x6b\xf4\x08\x60\x1f\xb7\x21\xfe\x9b\x5c\x33\x8d"
"\x10\xee\x42\x9e\xa0\x4f\xae\x55\x11\xb6\x8f\xbf\x8f\xb9",
};
// DNS query names for looking up the leaf index associated with each hash in
// |kLeafHashes|. Assumes the log domain is "ct.test".
const char* const kLeafIndexQnames[] = {
"D4S6DSV2J743QJZEQMH4UYHEYK7KRQ5JIQOCPMFUHZVJNFGHXACA.hash.ct.test.",
"FQTLI23I77DI76M3IU6B2MCBGQJUELLQMSB37IHZRJPIQYTG46XA.hash.ct.test.",
"7TPCWLW3UVV7ICDAD63SD7U3LQZY2EHOIKPKAT5OKUI3ND57R64Q.hash.ct.test.",
};
// Leaf indices and tree sizes for use with |kLeafHashes|.
const uint64_t kLeafIndices[] = {0, 1, 2};
const uint64_t kTreeSizes[] = {100, 10000, 1000000};
// Only 7 audit proof nodes can fit into a DNS response, because they are sent
// in a TXT RDATA string, which has a maximum size of 255 bytes, and each node
// is a SHA-256 hash (32 bytes), i.e. (255 / 32) == 7.
// This means audit proofs consisting of more than 7 nodes require multiple DNS
// requests to retrieve.
const size_t kMaxProofNodesPerDnsResponse = 7;
// Returns an example Merkle audit proof containing |length| nodes.
// The proof cannot be used for cryptographic purposes; it is merely a
// placeholder.
std::vector<std::string> GetSampleAuditProof(size_t length) {
std::vector<std::string> audit_proof(length);
// Makes each node of the audit proof different, so that tests are able to
// confirm that the audit proof is reconstructed in the correct order.
for (size_t i = 0; i < length; ++i) {
std::string node(crypto::kSHA256Length, '\0');
// Each node is 32 bytes, with each byte having a different value.
for (size_t j = 0; j < crypto::kSHA256Length; ++j) {
node[j] = static_cast<char>((-127 + i + j) % 128);
}
audit_proof[i].assign(std::move(node));
}
return audit_proof;
}
} // namespace
class LogDnsClientTest : public ::testing::TestWithParam<net::IoMode> {
protected:
LogDnsClientTest()
: network_change_notifier_(net::NetworkChangeNotifier::CreateMock()) {
mock_dns_.SetSocketReadMode(GetParam());
mock_dns_.InitializeDnsConfig();
}
std::unique_ptr<LogDnsClient> CreateLogDnsClient(
size_t max_concurrent_queries) {
return std::make_unique<LogDnsClient>(mock_dns_.CreateDnsClient(),
net::NetLogWithSource(),
max_concurrent_queries);
}
// Convenience function for calling QueryAuditProof synchronously.
template <typename... Types>
net::Error QueryAuditProof(Types&&... args) {
std::unique_ptr<LogDnsClient> log_client = CreateLogDnsClient(0);
net::TestCompletionCallback callback;
const net::Error result = log_client->QueryAuditProof(
std::forward<Types>(args)..., callback.callback());
return result != net::ERR_IO_PENDING
? result
: static_cast<net::Error>(callback.WaitForResult());
}
// This will be the NetworkChangeNotifier singleton for the duration of the
// test. It is accessed statically by LogDnsClient.
std::unique_ptr<net::NetworkChangeNotifier> network_change_notifier_;
// Queues and handles asynchronous DNS tasks. Indirectly used by LogDnsClient,
// the underlying net::DnsClient, and NetworkChangeNotifier.
base::MessageLoopForIO message_loop_;
// Allows mock DNS sockets to be setup.
MockLogDnsTraffic mock_dns_;
};
TEST_P(LogDnsClientTest, QueryAuditProofReportsThatLogDomainDoesNotExist) {
ASSERT_TRUE(mock_dns_.ExpectRequestAndErrorResponse(
kLeafIndexQnames[0], net::dns_protocol::kRcodeNXDOMAIN));
std::unique_ptr<LogDnsClient::AuditProofQuery> query;
ASSERT_THAT(QueryAuditProof("ct.test", kLeafHashes[0], kTreeSizes[0], &query),
IsError(net::ERR_NAME_NOT_RESOLVED));
}
TEST_P(LogDnsClientTest,
QueryAuditProofReportsServerFailuresDuringLeafIndexRequests) {
ASSERT_TRUE(mock_dns_.ExpectRequestAndErrorResponse(
kLeafIndexQnames[0
|
{
"pile_set_name": "Github"
}
|
{
"name": "@mostly-adequate/support",
"version": "2.0.1",
"description": "Support functions and data-structures from the Mostly Adequate Guide to Functional Programming",
"license": "MIT",
"main": "index.js",
"repository": {
"type": "git",
"url": "https://github.com/MostlyAdequate/mostly-adequate-guide"
},
"author": "@mostly-adequate",
"bugs": {
"url": "https://github.com/MostlyAdequate/mostly-adequate-guide/issues"
},
"homepage": "https://github.com/MostlyAdequate/mostly-adequate-guide/support",
"keywords": [
"functional programming",
"mostly adequate",
"guide",
"fp"
],
"dependencies": {},
"devDependencies": {
"eslint": "^5.9.0",
"eslint-config-airbnb": "^16.1.0",
"eslint-plugin-import": "^2.8.0",
"eslint-plugin-jsx-a11y": "^6.0.2",
"eslint-plugin-react": "^7.5.1"
},
"scripts": {
"lint": "eslint ."
}
}
|
{
"pile_set_name": "Github"
}
|
package com.salesforce.phoenix.filter;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import junit.framework.TestCase;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.filter.Filter.ReturnCode;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import com.google.common.base.Function;
import com.google.common.collect.Lists;
import com.salesforce.phoenix.query.KeyRange;
import com.salesforce.phoenix.query.QueryConstants;
import com.salesforce.phoenix.schema.ColumnModifier;
import com.salesforce.phoenix.schema.PDataType;
import com.salesforce.phoenix.schema.PDatum;
import com.salesforce.phoenix.schema.RowKeySchema.RowKeySchemaBuilder;
import com.salesforce.phoenix.util.ByteUtil;
//reset()
//filterAllRemaining() -> true indicates scan is over, false, keep going on.
//filterRowKey(byte[],int,int) -> true to drop this row, if false, we will also call
//filterKeyValue(KeyValue) -> true to drop this key/value
//filterRow(List) -> allows direct modification of the final list to be submitted
//filterRow() -> last chance to drop entire row based on the sequence of filterValue() calls. Eg: filter a row if it doesn't contain a specified column.
@RunWith(Parameterized.class)
public class SkipScanFilterTest extends TestCase {
private final SkipScanFilter skipper;
private final List<List<KeyRange>> cnf;
private final List<Expectation> expectations;
public SkipScanFilterTest(List<List<KeyRange>> cnf, int[] widths, List<Expectation> expectations) {
this.expectations = expectations;
this.cnf = cnf;
RowKeySchemaBuilder builder = new RowKeySchemaBuilder(widths.length);
for (final int width : widths) {
builder.addField(
new PDatum() {
@Override
public boolean isNullable() {
return width <= 0;
}
@Override
public PDataType getDataType() {
return width <= 0 ? PDataType.VARCHAR : PDataType.CHAR;
}
@Override
public Integer getByteSize() {
return width <= 0 ? null : width;
}
@Override
public Integer getMaxLength() {
return getByteSize();
}
@Override
public Integer getScale() {
return null;
}
@Override
public ColumnModifier getColumnModifier() {
return null;
}
}, width <= 0, null);
}
skipper = new SkipScanFilter(cnf, builder.build());
}
@Test
public void test() {
System.out.println("CNF: " + cnf + "\n" + "Expectations: " + expectations);
for (Expectation expectation : expectations) {
expectation.examine(skipper);
}
}
@Parameters(name="{0} {1} {2}")
public static Collection<Object> data() {
List<Object> testCases = Lists.newArrayList();
testCases.addAll(
foreach(new KeyRange[][]{{
PDataType.CHAR.getKeyRange(Bytes.toBytes("abc"), true, Bytes.toBytes("def"), true),
PDataType.CHAR.getKeyRange(Bytes.toBytes("dzy"), false, Bytes.toBytes("xyz"), false),
},
{
PDataType.CHAR.getKeyRange(Bytes.toBytes("AA"), true, Bytes.toBytes("AB"), false),
},
{
PDataType.CHAR.getKeyRange(Bytes.toBytes("AA"), true, Bytes.toBytes("AB"), false),
},
{
PDataType.CHAR.getKeyRange(Bytes.toBytes("AA"), true, Bytes.toBytes("AB"), false),
},
{
PDataType.CHAR.getKeyRange(Bytes.toBytes("AA"), true, Bytes.toBytes("AB"), false),
}},
new int[]{3,2,2,2,2},
//new SeekNext("abcABABABAB", "abdAAAAAAAA"),
new SeekNext("defAAABABAB", "dzzAAAAAAAA"),
new Finished("xyyABABABAB"))
);
testCases.addAll(
foreach(new KeyRange[][]{{
PDataType.VARCHAR.getKeyRange(Bytes.toBytes("j"), false, Bytes.toBytes("k"), true),
}},
new int[]{0},
new SeekNext(Bytes.toBytes("a"), ByteUtil.nextKey(new byte[] {'j',QueryConstants.SEPARATOR_BYTE})),
new Include("ja"),
new Include("jz"),
new Include("k"),
new Finished("ka")));
testCases.addAll(
foreach(new KeyRange[][]{{
PDataType.CHAR.getKeyRange(Bytes.toBytes("aaa"), true, Bytes.toBytes("aaa"), true),
PDataType.CHAR.getKeyRange(Bytes.toBytes("aac"), true, Bytes.toBytes("aad"), true),
PDataType.CHAR.getKeyRange(Bytes.toBytes("abc"), true, Bytes.toBytes("def"), true)
}},
new int[]{3},
new SeekNext("aab", "aac"),
new SeekNext("abb", "abc"),
new Include("abc"),
new Include("abe"),
new Include("def"),
new Finished("deg")));
testCases.addAll(
foreach(new KeyRange[][]{{
PDataType.CHAR.getKeyRange(Bytes.toBytes("aaa"), true, Bytes.toBytes("aaa"), true),
PDataType.CHAR.getKeyRange(Bytes.toBytes("abc"), false, Bytes.toBytes("def"), true)
}},
new int[]{3},
new SeekNext("aba", "abd"),
new Include("abe"),
new Include("def"),
new Finished("deg")));
testCases.addAll(
foreach(new KeyRange[][]{{
PDataType.CHAR.getKeyRange(Bytes.toBytes("aaa"), true, Bytes.toBytes("aaa"), true),
PDataType.CHAR.getKeyRange(Bytes.toBytes("abc"), false, Bytes.toBytes("def"), false)
}},
new int[]{3},
new SeekNext("aba", "abd"),
new Finished("def"))
);
testCases.addAll(
foreach(new KeyRange[][]{{
PDataType.CHAR.getKeyRange(Bytes.toBytes("abc"), true, Bytes.toBytes("def"), true),
PDataType.CHAR.getKeyRange(Bytes.toBytes("dzy"), false, Bytes.toBytes("xyz"), false),
}},
new int[]{3},
new Include("def"),
new SeekNext("deg", "dzz"),
new Include("eee"),
new Finished("xyz"))
);
testCases.addAll(
foreach(new KeyRange[][]{{
PDataType.CHAR.getKeyRange(Bytes.toBytes("aaa"), true, Bytes.toBytes("aaa"), true),
PDataType.CHAR.getKeyRange(Bytes.toBytes("abc"), true, Bytes.toBytes("abc"), true),
PDataType.CHAR.getKeyRange(Bytes.toBytes("def"), true, Bytes.toBytes("def"), true),
},
{
PDataType.CHAR.getKeyRange(Bytes.toBytes("AB"), true, Bytes.toBytes("AX"), true),
PDataType.CHAR.getKeyRange(Bytes.toBytes("EA"), false, Bytes.toBytes("EZ"), false),
PDataType.CHAR.getKeyRange(Bytes.toBytes("PO"), true, Bytes.toBytes("PP"),
|
{
"pile_set_name": "Github"
}
|
define([
'logger',
'backbone',
'injector',
'marionette',
'vent',
'#qt_core/controllers/api',
'commands/index',
'controllers/nav',
'controllers/initdata',
'routers/router',
'views/layout',
'#beans/beans',
"modernizr",
'fastclick',
'#qt_core/controllers/config'
],
function (logger, Backbone, injector,Marionette,vent,api,commandoPool,NavController,InitDataController,Router, LayoutView, beans,
Modernizr, FastClick,Cfg) {
'use strict';
//Done
FastClick.attach(document.body);
console.error('WARN : mainLocale done in code');
injector.set(injector.cfg.currentMainLocale,'fr');
injector.set(injector.cfg.currentSubLocales,[]);
//A déplacer!!!!
api.setInterceptEndCallFunction(function(res) {
try {
// Get CSRF Token value from Cookie using jQuery
function getCookie(name) {
var cookieValue = null;
if (document.cookie && document.cookie !== '') {
var cookies = document.cookie.split(';');
for (var i = 0; i < cookies.length; i++) {
var cookie = jQuery.trim(cookies[i]);
// Does this cookie string begin with the name we want?
if (cookie.substring(0, name.length + 1) === (name + '=')) {
cookieValue = decodeURIComponent(cookie.substring(name.length + 1));
break;
}
}
}
return cookieValue;
}
var csrfToken = getCookie('csrftoken');
//var csrfToken = res.xhr.getResponseHeader("X-CSRF-TOKEN");
if (csrfToken) {
//console.log("Ok CSRF : "+csrfToken);
injector.set(injector.cfg.csrfToken,csrfToken);
var apiController = injector.get(injector.cfg.apiController);
apiController.removeHeader("X-CSRF-TOKEN");
apiController.addHeader("X-CSRF-TOKEN",csrfToken);
}
else {
console.log('No CSRF Token?');
}
}
catch (e) {
console.error("error while catching end call XHR : "+(e ? JSON.stringify(e) : "NULL?"));
}
});
//end déplacer
var app = new Marionette.Application();
app.addInitializer(function () {
this.commandoPool = commandoPool;
//temp eric : trace when error
this.commandoPool.commandError =function(error) {
console.log('Error : '+error);
if (error.stack)
console.log(error.stack);
};
window.quemaInjector = injector;
window.quemaVent = vent;
injector.set(injector.config['commando.pool'], commandoPool);
this.apiController = api;
injector.set(injector.config.api, api.api);
injector.set(injector.config.apiController, api);
});
return app;
});
|
{
"pile_set_name": "Github"
}
|
/**
******************************************************************************
* @file system_stm32f4xx.c
* @author MCD Application Team
* @version V1.4.0
* @date 04-August-2014
* @brief CMSIS Cortex-M4 Device Peripheral Access Layer System Source File.
* This file contains the system clock configuration for STM32F4xx devices.
*
* 1. This file provides two functions and one global variable to be called from
* user application:
* - SystemInit(): Setups the system clock (System clock source, PLL Multiplier
* and Divider factors, AHB/APBx prescalers and Flash settings),
* depending on the configuration made in the clock xls tool.
* This function is called at startup just after reset and
* before branch to main program. This call is made inside
* the "startup_stm32f4xx.s" file.
*
* - SystemCoreClock variable: Contains the core clock (HCLK), it can be used
* by the user application to setup the SysTick
* timer or configure other parameters.
*
* - SystemCoreClockUpdate(): Updates the variable SystemCoreClock and must
* be called whenever the core clock is changed
* during program execution.
*
* 2. After each device reset the HSI (16 MHz) is used as system clock source.
* Then SystemInit() function is called, in "startup_stm32f4xx.s" file, to
* configure the system clock before to branch to main program.
*
* 3. If the system clock source selected by user fails to startup, the SystemInit()
* function will do nothing and HSI still used as system clock source. User can
* add some code to deal with this issue inside the SetSysClock() function.
*
* 4. The default value of HSE crystal is set to 25MHz, refer to "HSE_VALUE" define
* in "stm32f4xx.h" file. When HSE is used as system clock source, directly or
* through PLL, and you are using different crystal you have to adapt the HSE
* value to your own configuration.
*
* 5. This file configures the system clock as follows:
*=============================================================================
*=============================================================================
* Supported STM32F40xxx/41xxx devices
*-----------------------------------------------------------------------------
* System Clock source | PLL (HSE)
*-----------------------------------------------------------------------------
* SYSCLK(Hz) | 168000000
*-----------------------------------------------------------------------------
* HCLK(Hz) | 168000000
*-----------------------------------------------------------------------------
* AHB Prescaler | 1
*-----------------------------------------------------------------------------
* APB1 Prescaler | 4
*-----------------------------------------------------------------------------
* APB2 Prescaler | 2
*-----------------------------------------------------------------------------
* HSE Frequency(Hz) | 25000000
*-----------------------------------------------------------------------------
* PLL_M | 25
*-----------------------------------------------------------------------------
* PLL_N | 336
*-----------------------------------------------------------------------------
* PLL_P | 2
*-----------------------------------------------------------------------------
* PLL_Q | 7
*-----------------------------------------------------------------------------
* PLLI2S_N | NA
*-----------------------------------------------------------------------------
* PLLI2S_R | NA
*-----------------------------------------------------------------------------
* I2S input clock | NA
*-----------------------------------------------------------------------------
* VDD(V) | 3.3
*-----------------------------------------------------------------------------
* Main regulator output voltage | Scale1 mode
*-----------------------------------------------------------------------------
* Flash Latency(WS) | 5
*-----------------------------------------------------------------------------
* Prefetch Buffer | ON
*-----------------------------------------------------------------------------
* Instruction cache | ON
*-----------------------------------------------------------------------------
* Data cache | ON
*-----------------------------------------------------------------------------
* Require 48MHz for USB OTG FS, | Disabled
* SDIO and RNG clock |
*-----------------------------------------------------------------------------
*=============================================================================
*=============================================================================
* Supported STM32F42xxx/43xxx devices
*-----------------------------------------------------------------------------
* System Clock source | PLL (HSE)
*-----------------------------------------------------------------------------
* SYSCLK(Hz) | 180000000
*-----------------------------------------------------------------------------
* HCLK(Hz) | 180000000
*-----------------------------------------------------------------------------
* AHB Prescaler | 1
*-----------------------------------------------------------------------------
* APB1 Prescaler | 4
*-----------------------------------------------------------------------------
* APB2 Prescaler | 2
*-----------------------------------------------------------------------------
* HSE Frequency(Hz) | 25000000
*-----------------------------------------------------------------------------
* PLL_M | 25
*-----------------------------------------------------------------------------
* PLL_N | 360
*-----------------------------------------------------------------------------
* PLL_P | 2
*-----------------------------------------------------------------------------
* PLL_Q | 7
*-----------------------------------------------------------------------------
* PLLI2S_N | NA
*-----------------------------------------------------------------------------
* PLLI2S_R | NA
*-----------------------------------------------------------------------------
* I2S input clock | NA
*-----------------------------------------------------------------------------
* VDD(V) | 3.3
*-----------------------------------------------------------------------------
* Main regulator output voltage | Scale1 mode
*-----------------------------------------------------------------------------
* Flash Latency(WS) | 5
*-----------------------------------------------------------------------------
* Prefetch Buffer | ON
*-----------------------------------------------------------------------------
* Instruction cache | ON
*-----------------------------------------------------------------------------
* Data cache | ON
*-----------------------------------------------------------------------------
* Require 48MHz for USB OTG FS, | Disabled
* SDIO and RNG clock |
*-----------------------------------------------------------------------------
*=============================================================================
*=============================================================================
* Supported STM32F401xx devices
*-----------------------------------------------------------------------------
* System Clock source | PLL (HSE)
*-----------------------------------------------------------------------------
* SYSCLK(Hz) | 84000000
*-----------------------------------------------------------------------------
* HCLK(Hz) | 84000000
*-----------------------------------------------------------------------------
* AHB Prescaler | 1
*-----------------------------------------------------------------------------
* APB1 Prescaler | 2
*-----------------------------------------------------------------------------
* APB2 Prescaler | 1
*-----------------------------------------------------------------------------
* HSE Frequency(Hz) | 25000000
*-----------------------------------------------------------------------------
* PLL_M | 25
*-----------------------------------------------------------------------------
* PLL_N | 336
*-----------------------------------------------------------------------------
* PLL_P | 4
*-----------------------------------------------------------------------------
* PLL_Q | 7
*-----------------------------------------------------------------------------
* PLLI2S_N | NA
*-----------------------------------------------------------------------------
* PLLI2S_R | NA
*-----------------------------------------------------------------------------
* I2S input clock | NA
*-----------------------------------------------------------------------------
* VDD(V) | 3.3
*-----------------------------------------------------------------------------
* Main regulator output voltage | Scale1 mode
*-----------------------------------------------------------------------------
* Flash Latency(WS) | 2
*-----------------------------------------------------------------------------
* Prefetch Buffer | ON
*-----------------------------------------------------------------------------
* Instruction cache | ON
*-----------------------------------------------------------------------------
* Data cache | ON
*-----------------------------------------------------------------------------
* Require 48MHz for USB OTG FS, | Disabled
* SDIO and RNG clock |
*-----------------------------------------------------------------------------
*=============================================================================
*=============================================================================
* Supported STM32F411xx devices
*-----------------------------------------------------------------------------
* System Clock source | PLL (HSI)
*-----------------------------------------------------------------------------
* SYSCLK(Hz) | 100000000
*-----------------------------------------------------------------------------
* HCLK(Hz) | 100000000
*-----------------------------------------------------------------------------
* AHB Prescaler | 1
*-----------------------------------------------------------------------------
* APB1 Prescaler | 2
*-----------------------------------------------------------------------------
* APB2 Prescaler | 1
*-----------------------------------------------------------------------------
* HSI Frequency(Hz) | 16000000
*-----------------------------------------------------------------------------
* PLL_M | 16
*-----------------------------------------------------------------------------
* PLL_N | 400
*-----------------------------------------------------------------------------
* PLL_P | 4
*-----------------------------------------------------------------------------
* PLL_Q | 7
*-----------------------------------------------------------------------------
* PLLI2S_N | NA
*----------------------------------------------------------------
|
{
"pile_set_name": "Github"
}
|
<?xml version="1.0" encoding="UTF-8"?>
<rsp stat="ok">
<photo id="44070187" secret="5e5a50b675" server="32" farm="1" dateuploaded="1126980258" isfavorite="0" license="4"
safety_level="0" rotation="0" originalsecret="5e5a50b675" originalformat="jpg" views="5031" media="photo">
<owner nsid="94571281@N00" username="jonrawlinson" realname="Jon Rawlinson" location="" iconserver="1"
iconfarm="1"/>
<title>$650,000 ride! Enzo, Ferrari</title>
<description>ahhh, only in Italy! The incredible Enzo Ferrari!</description>
<visibility ispublic="1" isfriend="0" isfamily="0"/>
<dates posted="1126980258" taken="2005-09-17 22:49:09" takengranularity="0" lastupdate="1303625900"/>
<editability cancomment="0" canaddmeta="0"/>
<publiceditability cancomment="1" canaddmeta="0"/>
<usage candownload="1" canblog="0" canprint="0" canshare="1"/>
<comments>12</comments>
<notes/>
<people haspeople="0"/>
<tags>
<tag id="104308-44070187-2411732" author="94571281@N00" raw="jonrawlinson" machine_tag="0">jonrawlinson
</tag>
<tag id="104308-44070187-121" author="94571281@N00" raw="travel" machine_tag="0">travel</tag>
<tag id="104308-44070187-4835592" author="94571281@N00" raw="radblog" machine_tag="0">radblog</tag>
<tag id="104308-44070187-292353" author="94571281@N00" raw="rawlinson" machine_tag="0">rawlinson</tag>
<tag id="104308-44070187-4771215" author="94571281@N00" raw="theradblog" machine_tag="0">theradblog</tag>
<tag id="104308-44070187-1178648" author="94571281@N00" raw="enzoferrari" machine_tag="0">enzoferrari</tag>
<tag id="104308-44070187-16596" author="94571281@N00" raw="enzo" machine_tag="0">enzo</tag>
<tag id="104308-44070187-9505" author="94571281@N00" raw="ferrari" machine_tag="0">ferrari</tag>
<tag id="104308-44070187-227" author="94571281@N00" raw="red" machine_tag="0">red</tag>
<tag id="104308-44070187-501" author="94571281@N00" raw="hot" machine_tag="0">hot</tag>
<tag id="104308-44070187-525" author="94571281@N00" raw="rome" machine_tag="0">rome</tag>
<tag id="104308-44070187-297" author="94571281@N00" raw="italy" machine_tag="0">italy</tag>
<tag id="104308-44070187-733200" author="94571281@N00" raw="650000" machine_tag="0">650000</tag>
<tag id="104308-44070187-4009" author="94571281@N00" raw="ride" machine_tag="0">ride</tag>
<tag id="104308-44070187-16971" author="94571281@N00" raw="whip" machine_tag="0">whip</tag>
<tag id="104308-44070187-49721" author="94571281@N00" raw="smokin" machine_tag="0">smokin</tag>
<tag id="104308-44070187-8117" author="94571281@N00" raw="fast" machine_tag="0">fast</tag>
<tag id="104308-44070187-53450" author="94571281@N00" raw="supercar" machine_tag="0">supercar</tag>
<tag id="104308-44070187-37194751" author="94571281@N00" raw="jonrawlinson.com" machine_tag="0">
jonrawlinsoncom
</tag>
<tag id="104308-44070187-41629469" author="94571281@N00" raw="theradblog.com" machine_tag="0">
theradblogcom
</tag>
</tags>
<urls>
<url type="photopage">http://www.flickr.com/photos/london/44070187/</url>
</urls>
</photo>
</rsp>
|
{
"pile_set_name": "Github"
}
|
{{- if .Values.compass.enabled }}
apiVersion: v1
kind: Service
metadata:
labels:
app: {{ .Values.compass.name }}
service: {{ .Values.compass.name }}
name: {{ .Values.compass.name }}
namespace: {{ .Release.Namespace }}
spec:
ports:
{{ range $i, $var := .Values.compass.service.ports -}}
- name: {{ $var.name }}
port: {{ $var.port }}
targetPort: {{ $var.port }}
{{ end }}
selector:
app: {{ .Values.compass.name }}
type: {{ .Values.compass.service.type}}
{{- end }}
|
{
"pile_set_name": "Github"
}
|
// Copyright 2013 Google Inc. All Rights Reserved.
// Copyright 2017 The Cockroach Authors.
//
// Use of this software is governed by the Business Source License
// included in the file licenses/BSL.txt.
//
// As of the Change Date specified in that file, in accordance with
// the Business Source License, use of this software will be governed
// by the Apache License, Version 2.0, included in the file
// licenses/APL.txt.
// This code originated in the github.com/golang/glog package.
package log
import (
"testing"
"time"
"github.com/cockroachdb/cockroach/pkg/util/timeutil"
)
// TestLogFilenameParsing ensures that logName and parseLogFilename work as
// advertised.
func TestLogFilenameParsing(t *testing.T) {
testCases := []time.Time{
timeutil.Now(),
timeutil.Now().AddDate(-10, 0, 0),
timeutil.Now().AddDate(0, 0, -1),
}
for i, testCase := range testCases {
filename, _ := logName(program, testCase)
details, err := ParseLogFilename(filename)
if err != nil {
t.Fatal(err)
}
if a, e := timeutil.Unix(0, details.Time).Format(time.RFC3339), testCase.Format(time.RFC3339); a != e {
t.Errorf("%d: Times do not match, expected:%s - actual:%s", i, e, a)
}
}
}
// TestSelectFiles checks that selectFiles correctly filters and orders
// filesInfos.
func TestSelectFiles(t *testing.T) {
testFiles := []FileInfo{}
year2000 := time.Date(2000, time.January, 1, 1, 0, 0, 0, time.UTC)
year2050 := time.Date(2050, time.January, 1, 1, 0, 0, 0, time.UTC)
year2200 := time.Date(2200, time.January, 1, 1, 0, 0, 0, time.UTC)
for i := 0; i < 100; i++ {
fileTime := year2000.AddDate(i, 0, 0)
name, _ := logName(program, fileTime)
testfile := FileInfo{
Name: name,
Details: FileDetails{
Time: fileTime.UnixNano(),
},
}
testFiles = append(testFiles, testfile)
}
testCases := []struct {
EndTimestamp int64
ExpectedCount int
}{
{year2200.UnixNano(), 100},
{year2050.UnixNano(), 51},
{year2000.UnixNano(), 1},
}
for i, testCase := range testCases {
actualFiles := selectFiles(testFiles, testCase.EndTimestamp)
previousTimestamp := year2200.UnixNano()
if len(actualFiles) != testCase.ExpectedCount {
t.Errorf("%d: expected %d files, actual %d", i, testCase.ExpectedCount, len(actualFiles))
}
for _, file := range actualFiles {
if file.Details.Time > previousTimestamp {
t.Errorf("%d: returned files are not in the correct order", i)
}
if file.Details.Time > testCase.EndTimestamp {
t.Errorf("%d: did not filter by endTime", i)
}
previousTimestamp = file.Details.Time
}
}
}
|
{
"pile_set_name": "Github"
}
|
"use strict";
exports.__esModule = true;
var _hasInstance = require("../core-js/symbol/has-instance");
var _hasInstance2 = _interopRequireDefault(_hasInstance);
var _symbol = require("../core-js/symbol");
var _symbol2 = _interopRequireDefault(_symbol);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
exports.default = function (left, right) {
if (right != null && typeof _symbol2.default !== "undefined" && right[_hasInstance2.default]) {
return right[_hasInstance2.default](left);
} else {
return left instanceof right;
}
};
|
{
"pile_set_name": "Github"
}
|
/*
* Copyright (c) 2014, The Linux Foundation. All rights reserved.
* Copyright (C) 2013 Red Hat
* Author: Rob Clark <robdclark@gmail.com>
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 as published by
* the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along with
* this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include "mdp5_kms.h"
#include "mdp5_smp.h"
struct mdp5_smp {
struct drm_device *dev;
uint8_t reserved[MAX_CLIENTS]; /* fixed MMBs allocation per client */
int blk_cnt;
int blk_size;
/* register cache */
u32 alloc_w[22];
u32 alloc_r[22];
u32 pipe_reqprio_fifo_wm0[SSPP_MAX];
u32 pipe_reqprio_fifo_wm1[SSPP_MAX];
u32 pipe_reqprio_fifo_wm2[SSPP_MAX];
};
static inline
struct mdp5_kms *get_kms(struct mdp5_smp *smp)
{
struct msm_drm_private *priv = smp->dev->dev_private;
return to_mdp5_kms(to_mdp_kms(priv->kms));
}
static inline u32 pipe2client(enum mdp5_pipe pipe, int plane)
{
#define CID_UNUSED 0
if (WARN_ON(plane >= pipe2nclients(pipe)))
return CID_UNUSED;
/*
* Note on SMP clients:
* For ViG pipes, fetch Y/Cr/Cb-components clients are always
* consecutive, and in that order.
*
* e.g.:
* if mdp5_cfg->smp.clients[SSPP_VIG0] = N,
* Y plane's client ID is N
* Cr plane's client ID is N + 1
* Cb plane's client ID is N + 2
*/
return mdp5_cfg->smp.clients[pipe] + plane;
}
/* allocate blocks for the specified request: */
static int smp_request_block(struct mdp5_smp *smp,
struct mdp5_smp_state *state,
u32 cid, int nblks)
{
void *cs = state->client_state[cid];
int i, avail, cnt = smp->blk_cnt;
uint8_t reserved;
/* we shouldn't be requesting blocks for an in-use client: */
WARN_ON(bitmap_weight(cs, cnt) > 0);
reserved = smp->reserved[cid];
if (reserved) {
nblks = max(0, nblks - reserved);
DBG("%d MMBs allocated (%d reserved)", nblks, reserved);
}
avail = cnt - bitmap_weight(state->state, cnt);
if (nblks > avail) {
dev_err(smp->dev->dev, "out of blks (req=%d > avail=%d)\n",
nblks, avail);
return -ENOSPC;
}
for (i = 0; i < nblks; i++) {
int blk = find_first_zero_bit(state->state, cnt);
set_bit(blk, cs);
set_bit(blk, state->state);
}
return 0;
}
static void set_fifo_thresholds(struct mdp5_smp *smp,
enum mdp5_pipe pipe, int nblks)
{
u32 smp_entries_per_blk = smp->blk_size / (128 / BITS_PER_BYTE);
u32 val;
/* 1/4 of SMP pool that is being fetched */
val = (nblks * smp_entries_per_blk) / 4;
smp->pipe_reqprio_fifo_wm0[pipe] = val * 1;
smp->pipe_reqprio_fifo_wm1[pipe] = val * 2;
smp->pipe_reqprio_fifo_wm2[pipe] = val * 3;
}
/*
* NOTE: looks like if horizontal decimation is used (if we supported that)
* then the width used to calculate SMP block requirements is the post-
* decimated width. Ie. SMP buffering sits downstream of decimation (which
* presumably happens during the dma from scanout buffer).
*/
uint32_t mdp5_smp_calculate(struct mdp5_smp *smp,
const struct mdp_format *format,
u32 width, bool hdecim)
{
struct mdp5_kms *mdp5_kms = get_kms(smp);
int rev = mdp5_cfg_get_hw_rev(mdp5_kms->cfg);
int i, hsub, nplanes, nlines;
u32 fmt = format->base.pixel_format;
uint32_t blkcfg = 0;
nplanes = drm_format_num_planes(fmt);
hsub = drm_format_horz_chroma_subsampling(fmt);
/* different if BWC (compressed framebuffer?) enabled: */
nlines = 2;
/* Newer MDPs have split/packing logic, which fetches sub-sampled
* U and V components (splits them from Y if necessary) and packs
* them together, writes to SMP using a single client.
*/
if ((rev > 0) && (format->chroma_sample > CHROMA_FULL)) {
fmt = DRM_FORMAT_NV24;
nplanes = 2;
/* if decimation is enabled, HW decimates less on the
* sub sampled chroma components
*/
if (hdecim && (hsub > 1))
hsub = 1;
}
for (i = 0; i < nplanes; i++) {
int n, fetch_stride, cpp;
cpp = drm_format_plane_cpp(fmt, i);
fetch_stride = width * cpp / (i ? hsub : 1);
n = DIV_ROUND_UP(fetch_stride * nlines, smp->blk_size);
/* for hw rev v1.00 */
if (rev == 0)
n = roundup_pow_of_two(n);
blkcfg |= (n << (8 * i));
}
return blkcfg;
}
int mdp5_smp_assign(struct mdp5_smp *smp, struct mdp5_smp_state *state,
enum mdp5_pipe pipe, uint32_t blkcfg)
{
struct mdp5_kms *mdp5_kms = get_kms(smp);
struct drm_device *dev = mdp5_kms->dev;
int i, ret;
for (i = 0; i < pipe2nclients(pipe); i++) {
u32 cid = pipe2client(pipe, i);
int n = blkcfg & 0xff;
if (!n)
continue;
DBG("%s[%d]: request %d SMP blocks", pipe2name(pipe), i, n);
ret = smp_request_block(smp, state, cid, n);
if (ret) {
dev_err(dev->dev, "Cannot allocate %d SMP blocks: %d\n",
n, ret);
return ret;
}
blkcfg >>= 8;
}
state->assigned |= (1 << pipe);
return 0;
}
/* Release SMP blocks for all clients of the pipe */
void mdp5_smp_release(struct mdp5_smp *smp, struct mdp5_smp_state *state
|
{
"pile_set_name": "Github"
}
|
/******************************************************************************
* Copyright 2017 The Apollo Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*****************************************************************************/
#pragma once
#include "modules/drivers/canbus/can_comm/protocol_data.h"
#include "modules/drivers/proto/conti_radar.pb.h"
namespace apollo {
namespace drivers {
namespace conti_radar {
using apollo::drivers::ContiRadar;
class ClusterQualityInfo702
: public apollo::drivers::canbus::ProtocolData<ContiRadar> {
public:
static const uint32_t ID;
ClusterQualityInfo702();
void Parse(const std::uint8_t* bytes, int32_t length,
ContiRadar* conti_radar) const override;
private:
int target_id(const std::uint8_t* bytes, int32_t length) const;
int longitude_dist_rms(const std::uint8_t* bytes, int32_t length) const;
int lateral_dist_rms(const std::uint8_t* bytes, int32_t length) const;
int longitude_vel_rms(const std::uint8_t* bytes, int32_t length) const;
int pdh0(const std::uint8_t* bytes, int32_t length) const;
int ambig_state(const std::uint8_t* bytes, int32_t length) const;
int invalid_state(const std::uint8_t* bytes, int32_t length) const;
int lateral_vel_rms(const std::uint8_t* bytes, int32_t length) const;
};
} // namespace conti_radar
} // namespace drivers
} // namespace apollo
|
{
"pile_set_name": "Github"
}
|
/*
** Copyright (C) 2006-2012 Erik de Castro Lopo <erikd@mega-nerd.com>
**
** This program is free software; you can redistribute it and/or modify
** it under the terms of the GNU General Public License as published by
** the Free Software Foundation; either version 2 of the License, or
** (at your option) any later version.
**
** This program is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
** GNU General Public License for more details.
**
** You should have received a copy of the GNU General Public License
** along with this program; if not, write to the Free Software
** Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
#include "sfconfig.h"
#include <cstdio>
#include <cstdlib>
#include <cstring>
#include <sndfile.hh>
#include "utils.h"
static short sbuffer [100] ;
static int ibuffer [100] ;
static float fbuffer [100] ;
static double dbuffer [100] ;
static void
ceeplusplus_wchar_test (void)
{
#if 0
LPCWSTR filename = L"wchar_test.wav" ;
print_test_name (__func__, "ceeplusplus_wchar_test.wav") ;
/* Use this scope to make sure the created file is closed. */
{
SndfileHandle file (filename, SFM_WRITE, SF_FORMAT_WAV | SF_FORMAT_PCM_16, 2, 44100) ;
if (file.refCount () != 1)
{ printf ("\n\n%s %d : Error : Reference count (%d) should be 1.\n\n", __func__, __LINE__, file.refCount ()) ;
exit (1) ;
} ;
/* This should check that the file did in fact get created with a
** wchar_t * filename.
*/
exit_if_true (
GetFileAttributesW (filename) == INVALID_FILE_ATTRIBUTES,
"\n\nLine %d : GetFileAttributes failed.\n\n", __LINE__
) ;
}
/* Use this because the file was created with CreateFileW. */
DeleteFileW (filename) ;
puts ("ok") ;
#endif
} /* ceeplusplus_wchar_test */
static void
create_file (const char * filename, int format)
{ SndfileHandle file ;
if (file.refCount () != 0)
{ printf ("\n\n%s %d : Error : Reference count (%d) should be zero.\n\n", __func__, __LINE__, file.refCount ()) ;
exit (1) ;
} ;
file = SndfileHandle (filename, SFM_WRITE, format, 2, 48000) ;
if (file.refCount () != 1)
{ printf ("\n\n%s %d : Error : Reference count (%d) should be 1.\n\n", __func__, __LINE__, file.refCount ()) ;
exit (1) ;
} ;
file.setString (SF_STR_TITLE, filename) ;
/* Item write. */
file.write (sbuffer, ARRAY_LEN (sbuffer)) ;
file.write (ibuffer, ARRAY_LEN (ibuffer)) ;
file.write (fbuffer, ARRAY_LEN (fbuffer)) ;
file.write (dbuffer, ARRAY_LEN (dbuffer)) ;
/* Frame write. */
file.writef (sbuffer, ARRAY_LEN (sbuffer) / file.channels ()) ;
file.writef (ibuffer, ARRAY_LEN (ibuffer) / file.channels ()) ;
file.writef (fbuffer, ARRAY_LEN (fbuffer) / file.channels ()) ;
file.writef (dbuffer, ARRAY_LEN (dbuffer) / file.channels ()) ;
/* RAII takes care of the SndfileHandle. */
} /* create_file */
static void
check_title (const SndfileHandle & file, const char * filename)
{ const char *title = NULL ;
title = file.getString (SF_STR_TITLE) ;
if (title == NULL)
{ printf ("\n\n%s %d : Error : No title.\n\n", __func__, __LINE__) ;
exit (1) ;
} ;
if (strcmp (filename, title) != 0)
{ printf ("\n\n%s %d : Error : title '%s' should be '%s'\n\n", __func__, __LINE__, title, filename) ;
exit (1) ;
} ;
return ;
} /* check_title */
static void
read_file (const char * filename, int format)
{ SndfileHandle file ;
sf_count_t count ;
if (file)
{ printf ("\n\n%s %d : Error : should not be here.\n\n", __func__, __LINE__) ;
exit (1) ;
} ;
file = SndfileHandle (filename) ;
if (1)
{ SndfileHandle file2 = file ;
if (file.refCount () != 2 || file2.refCount () != 2)
{ printf ("\n\n%s %d : Error : Reference count (%d) should be two.\n\n", __func__, __LINE__, file.refCount ()) ;
exit (1) ;
} ;
} ;
if (file.refCount () != 1)
{ printf ("\n\n%s %d : Error : Reference count (%d) should be one.\n\n", __func__, __LINE__, file.refCount ()) ;
exit (1) ;
} ;
if (! file)
{ printf ("\n\n%s %d : Error : should not be here.\n\n", __func__, __LINE__) ;
exit (1) ;
} ;
if (file.format () != format)
{ printf ("\n\n%s %d : Error : format 0x%08x should be 0x%08x.\n\n", __func__, __LINE__, file.format (), format) ;
exit (1) ;
} ;
if (file.channels () != 2)
{ printf ("\n\n%s %d : Error : channels %d should be 2.\n\n", __func__, __LINE__, file.channels ()) ;
exit (1) ;
} ;
if (file.frames () != ARRAY_LEN (sbuffer) * 4)
{ printf ("\n\n%s %d : Error : frames %ld should be %lu.\n\n", __func__, __LINE__,
(long) file.frames (), (long) ARRAY_LEN (sbuffer) * 4 / 2) ;
exit (1) ;
} ;
switch (format & SF_FORMAT_TYPEMASK)
{ case SF_FORMAT_AU :
break ;
default :
check_title (file, filename) ;
break ;
} ;
/* Item read. */
file.read (sbuffer, ARRAY_LEN (sbuffer)) ;
file.read (ibuffer, ARRAY_LEN (ibuffer)) ;
file.read (fbuffer, ARRAY_LEN (fbuffer)) ;
file.read (dbuffer, ARRAY_LEN (dbuffer)) ;
/* Frame read. */
file.readf (sbuffer, ARRAY_LEN (sbuffer) / file.channels ()) ;
file.readf (ibuffer, ARRAY_LEN (ibuffer) / file.channels ()) ;
file.readf (fbuffer, ARRAY_LEN (fbuffer) / file.channels ()) ;
file.readf (dbuffer, ARRAY_LEN (dbuffer) / file.channels ()) ;
count = file.seek (file.frames () - 10, SEEK_SET) ;
if (count != file.frames () - 10)
{ printf ("\n\n%s %d : Error : offset (%ld) should be %ld\n\n", __func__, __LINE__,
(long) count, (long
|
{
"pile_set_name": "Github"
}
|
# Monolog - Logging for PHP [](https://travis-ci.org/Seldaek/monolog)
[](https://packagist.org/packages/monolog/monolog)
[](https://packagist.org/packages/monolog/monolog)
[](https://www.versioneye.com/php/monolog:monolog/references)
Monolog sends your logs to files, sockets, inboxes, databases and various
web services. See the complete list of handlers below. Special handlers
allow you to build advanced logging strategies.
This library implements the [PSR-3](https://github.com/php-fig/fig-standards/blob/master/accepted/PSR-3-logger-interface.md)
interface that you can type-hint against in your own libraries to keep
a maximum of interoperability. You can also use it in your applications to
make sure you can always use another compatible logger at a later time.
As of 1.11.0 Monolog public APIs will also accept PSR-3 log levels.
Internally Monolog still uses its own level scheme since it predates PSR-3.
## Installation
Install the latest version with
```bash
$ composer require monolog/monolog
```
## Basic Usage
```php
<?php
use Monolog\Logger;
use Monolog\Handler\StreamHandler;
// create a log channel
$log = new Logger('name');
$log->pushHandler(new StreamHandler('path/to/your.log', Logger::WARNING));
// add records to the log
$log->addWarning('Foo');
$log->addError('Bar');
```
## Documentation
- [Usage Instructions](doc/01-usage.md)
- [Handlers, Formatters and Processors](doc/02-handlers-formatters-processors.md)
- [Utility classes](doc/03-utilities.md)
- [Extending Monolog](doc/04-extending.md)
## Third Party Packages
Third party handlers, formatters and processors are
[listed in the wiki](https://github.com/Seldaek/monolog/wiki/Third-Party-Packages). You
can also add your own there if you publish one.
## About
### Requirements
- Monolog works with PHP 5.3 or above, and is also tested to work with HHVM.
### Submitting bugs and feature requests
Bugs and feature request are tracked on [GitHub](https://github.com/Seldaek/monolog/issues)
### Framework Integrations
- Frameworks and libraries using [PSR-3](https://github.com/php-fig/fig-standards/blob/master/accepted/PSR-3-logger-interface.md)
can be used very easily with Monolog since it implements the interface.
- [Symfony2](http://symfony.com) comes out of the box with Monolog.
- [Silex](http://silex.sensiolabs.org/) comes out of the box with Monolog.
- [Laravel 4 & 5](http://laravel.com/) come out of the box with Monolog.
- [Lumen](http://lumen.laravel.com/) comes out of the box with Monolog.
- [PPI](http://www.ppi.io/) comes out of the box with Monolog.
- [CakePHP](http://cakephp.org/) is usable with Monolog via the [cakephp-monolog](https://github.com/jadb/cakephp-monolog) plugin.
- [Slim](http://www.slimframework.com/) is usable with Monolog via the [Slim-Monolog](https://github.com/Flynsarmy/Slim-Monolog) log writer.
- [XOOPS 2.6](http://xoops.org/) comes out of the box with Monolog.
- [Aura.Web_Project](https://github.com/auraphp/Aura.Web_Project) comes out of the box with Monolog.
- [Nette Framework](http://nette.org/en/) can be used with Monolog via [Kdyby/Monolog](https://github.com/Kdyby/Monolog) extension.
- [Proton Micro Framework](https://github.com/alexbilbie/Proton) comes out of the box with Monolog.
### Author
Jordi Boggiano - <j.boggiano@seld.be> - <http://twitter.com/seldaek><br />
See also the list of [contributors](https://github.com/Seldaek/monolog/contributors) which participated in this project.
### License
Monolog is licensed under the MIT License - see the `LICENSE` file for details
### Acknowledgements
This library is heavily inspired by Python's [Logbook](http://packages.python.org/Logbook/)
library, although most concepts have been adjusted to fit to the PHP world.
|
{
"pile_set_name": "Github"
}
|
// Targeted by JavaCPP version 1.5.4: DO NOT EDIT THIS FILE
package org.bytedeco.arrow;
import java.nio.*;
import org.bytedeco.javacpp.*;
import org.bytedeco.javacpp.annotation.*;
import static org.bytedeco.javacpp.presets.javacpp.*;
import static org.bytedeco.arrow.global.arrow.*;
/** \brief Status outcome object (success or error)
*
* The Status object is an object holding the outcome of an operation.
* The outcome is represented as a StatusCode, either success
* (StatusCode::OK) or an error (any other of the StatusCode enumeration values).
*
* Additionally, if an error occurred, a specific error message is generally
* attached. */
@Namespace("arrow") @NoOffset @Properties(inherit = org.bytedeco.arrow.presets.arrow.class)
public class Status extends Pointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public Status(Pointer p) { super(p); }
/** Native array allocator. Access with {@link Pointer#position(long)}. */
public Status(long size) { super((Pointer)null); allocateArray(size); }
private native void allocateArray(long size);
@Override public Status position(long position) {
return (Status)super.position(position);
}
@Override public Status getPointer(long i) {
return new Status(this).position(position + i);
}
// Create a success status.
public Status() { super((Pointer)null); allocate(); }
@NoException private native void allocate();
public Status(StatusCode code, @StdString String msg) { super((Pointer)null); allocate(code, msg); }
private native void allocate(StatusCode code, @StdString String msg);
public Status(@Cast("arrow::StatusCode") byte code, @StdString BytePointer msg) { super((Pointer)null); allocate(code, msg); }
private native void allocate(@Cast("arrow::StatusCode") byte code, @StdString BytePointer msg);
/** \brief Pluggable constructor for use by sub-systems. detail cannot be null. */
public Status(StatusCode code, @StdString String msg, @SharedPtr StatusDetail detail) { super((Pointer)null); allocate(code, msg, detail); }
private native void allocate(StatusCode code, @StdString String msg, @SharedPtr StatusDetail detail);
public Status(@Cast("arrow::StatusCode") byte code, @StdString BytePointer msg, @SharedPtr StatusDetail detail) { super((Pointer)null); allocate(code, msg, detail); }
private native void allocate(@Cast("arrow::StatusCode") byte code, @StdString BytePointer msg, @SharedPtr StatusDetail detail);
// Copy the specified status.
public Status(@Const @ByRef Status s) { super((Pointer)null); allocate(s); }
private native void allocate(@Const @ByRef Status s);
public native @ByRef @Name("operator =") Status put(@Const @ByRef Status s);
// Move the specified status.
public native @Cast("bool") boolean Equals(@Const @ByRef Status s);
// AND the statuses.
public native @ByVal @Name("operator &") @NoException Status and(@Const @ByRef Status s);
public native @ByRef @Name("operator &=") @NoException Status andPut(@Const @ByRef Status s);
/** Return a success status */
public static native @ByVal Status OK();
/** Return an error status for out-of-memory conditions */
/** Return an error status for failed key lookups (e.g. column name in a table) */
/** Return an error status for type errors (such as mismatching data types) */
/** Return an error status for unknown errors */
/** Return an error status when an operation or a combination of operation and
* data types is unimplemented */
/** Return an error status for invalid data (for example a string that fails parsing) */
/** Return an error status when an index is out of bounds */
/** Return an error status when a container's capacity would exceed its limits */
/** Return an error status when some IO-related operation failed */
/** Return an error status when some (de)serialization operation failed */
/** Return true iff the status indicates success. */
public native @Cast("bool") boolean ok();
/** Return true iff the status indicates an out-of-memory error. */
public native @Cast("bool") boolean IsOutOfMemory();
/** Return true iff the status indicates a key lookup error. */
public native @Cast("bool") boolean IsKeyError();
/** Return true iff the status indicates invalid data. */
public native @Cast("bool") boolean IsInvalid();
/** Return true iff the status indicates an IO-related failure. */
public native @Cast("bool") boolean IsIOError();
/** Return true iff the status indicates a container reaching capacity limits. */
public native @Cast("bool") boolean IsCapacityError();
/** Return true iff the status indicates an out of bounds index. */
public native @Cast("bool") boolean IsIndexError();
/** Return true iff the status indicates a type error. */
public native @Cast("bool") boolean IsTypeError();
/** Return true iff the status indicates an unknown error. */
public native @Cast("bool") boolean IsUnknownError();
/** Return true iff the status indicates an unimplemented operation. */
public native @Cast("bool") boolean IsNotImplemented();
/** Return true iff the status indicates a (de)serialization failure */
public native @Cast("bool") boolean IsSerializationError();
/** Return true iff the status indicates a R-originated error. */
public native @Cast("bool") boolean IsRError();
public native @Cast("bool") boolean IsCodeGenError();
public native @Cast("bool") boolean IsExpressionValidationError();
public native @Cast("bool") boolean IsExecutionError();
///
public native @Cast("bool") boolean IsAlreadyExists();
/** \brief Return a string representation of this status suitable for printing.
*
* The string "OK" is returned for success. */
public native @StdString String ToString();
/** \brief Return a string representation of the status code, without the message
* text or POSIX code information. */
public native @StdString String CodeAsString();
public static native @StdString String CodeAsString(StatusCode arg0);
public static native @StdString BytePointer CodeAsString(@Cast("arrow::StatusCode") byte arg0);
/** \brief Return the StatusCode value attached to this status. */
public native StatusCode code();
/** \brief Return the specific error message attached to this status. */
public native @StdString String message();
/** \brief Return the status detail attached to this message. */
public native @SharedPtr StatusDetail detail();
/** \brief Return a new Status copying the existing status, but
* updating with the existing detail. */
public native @ByVal Status WithDetail(@SharedPtr StatusDetail new_detail);
/** \brief Return a new Status with changed message, copying the
* existing status code and detail. */
public native void Abort(@StdString String message);
public native void Abort(@StdString BytePointer message);
}
|
{
"pile_set_name": "Github"
}
|
<?xml version="1.0" encoding="utf-8"?>
<!--
~ Copyright (c) 2019 Hai Zhang <dreaming.in.code.zh@gmail.com>
~ All Rights Reserved.
-->
<View
xmlns:android="http://schemas.android.com/apk/res/android"
android:id="@+id/swatch"
android:layout_width="36dp"
android:layout_height="36dp"
android:background="@drawable/color_preference_widget_background" />
|
{
"pile_set_name": "Github"
}
|
/**
* @file wizwiki_w7500.c
* @brief board ID for the WIZnet WIZwiki-W7500 board
*
* DAPLink Interface Firmware
* Copyright (c) 2009-2016, ARM Limited, All Rights Reserved
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
const char *board_id = "2201";
|
{
"pile_set_name": "Github"
}
|
###########################################################
#
# strace
#
###########################################################
# You must replace "strace" and "STRACE" with the lower case name and
# upper case name of your new package. Some places below will say
# "Do not change this" - that does not include this global change,
# which must always be done to ensure we have unique names.
#
# STRACE_VERSION, STRACE_SITE and STRACE_SOURCE define
# the upstream location of the source code for the package.
# STRACE_DIR is the directory which is created when the source
# archive is unpacked.
# STRACE_UNZIP is the command used to unzip the source.
# It is usually "zcat" (for .gz) or "bzcat" (for .bz2)
#
# You should change all these variables to suit your package.
#
STRACE_VERSION ?= 4.10
STRACE_IPK_VERSION ?= 1
STRACE_SITE=http://$(SOURCEFORGE_MIRROR)/sourceforge/strace
STRACE_SOURCE?=strace-$(STRACE_VERSION).tar.xz
STRACE_DIR=strace-$(STRACE_VERSION)
STRACE_UNZIP?=xzcat
STRACE_MAINTAINER=Christopher <edmondsc@onid.orst.edu>
STRACE_DESCRIPTION=Traces all system calls a program makes.
STRACE_SECTION=utility
STRACE_PRIORITY=optional
STRACE_DEPENDS=
STRACE_SUGGESTS=
STRACE_CONFLICTS=
#
# STRACE_PATCHES should list any patches, in the the order in
# which they should be applied to the source code.
#
#
STRACE_PATCHES?=#$(STRACE_SOURCE_DIR)/CTL_PROC.patch
#
# If the compilation of the package requires additional
# compilation or linking flags, then list them here.
#
STRACE_CPPFLAGS=
STRACE_CPPFLAGS_PRE?=-I$(SOURCE_DIR)/strace/include
STRACE_LDFLAGS=
#
# STRACE_BUILD_DIR is the directory in which the build is done.
# STRACE_SOURCE_DIR is the directory which holds all the
# patches and ipkg control files.
# STRACE_IPK_DIR is the directory in which the ipk is built.
# STRACE_IPK is the name of the resulting ipk files.
#
# You should not change any of these variables.
#
STRACE_BUILD_DIR=$(BUILD_DIR)/strace
STRACE_SOURCE_DIR=$(SOURCE_DIR)/strace
STRACE_IPK_DIR=$(BUILD_DIR)/strace-$(STRACE_VERSION)-ipk
STRACE_IPK=$(BUILD_DIR)/strace_$(STRACE_VERSION)-$(STRACE_IPK_VERSION)_$(TARGET_ARCH).ipk
.PHONY: strace-source strace-unpack strace strace-stage strace-ipk strace-clean strace-dirclean strace-check
#
# This is the dependency on the source code. If the source is missing,
# then it will be fetched from the site using wget.
#
$(DL_DIR)/$(STRACE_SOURCE):
$(WGET) -P $(@D) $(STRACE_SITE)/$(@F) || \
$(WGET) -P $(@D) $(SOURCES_NLO_SITE)/$(@F)
#
# The source code depends on it existing within the download directory.
# This target will be called by the top level Makefile to download the
# source code's archive (.tar.gz, .bz2, etc.)
#
strace-source: $(DL_DIR)/$(STRACE_SOURCE) $(STRACE_PATCHES)
#
# This target unpacks the source code in the build directory.
# If the source archive is not .tar.gz or .tar.bz2, then you will need
# to change the commands here. Patches to the source code are also
# applied in this target as required.
#
# This target also configures the build within the build directory.
# Flags such as LDFLAGS and CPPFLAGS should be passed into configure
# and NOT $(MAKE) below. Passing it to configure causes configure to
# correctly BUILD the Makefile with the right paths, where passing it
# to Make causes it to override the default search paths of the compiler.
#
# If the compilation of the package requires other packages to be staged
# first, then do that first (e.g. "$(MAKE) <bar>-stage <baz>-stage").
#
$(STRACE_BUILD_DIR)/.configured: $(DL_DIR)/$(STRACE_SOURCE) $(STRACE_PATCHES) make/strace.mk
rm -rf $(BUILD_DIR)/$(STRACE_DIR) $(@D)
$(STRACE_UNZIP) $(DL_DIR)/$(STRACE_SOURCE) | tar -C $(BUILD_DIR) -xvf -
if test -n "$(STRACE_PATCHES)" ; \
then cat $(STRACE_PATCHES) | \
$(PATCH) -d $(BUILD_DIR)/$(STRACE_DIR) -p1 ; \
fi
mv $(BUILD_DIR)/$(STRACE_DIR) $(@D)
(cd $(@D); \
$(TARGET_CONFIGURE_OPTS) \
CPPFLAGS="$(STRACE_CPPFLAGS_PRE) $(STAGING_CPPFLAGS) $(STRACE_CPPFLAGS)" \
LDFLAGS="$(STAGING_LDFLAGS) $(STRACE_LDFLAGS)" \
./configure \
--build=$(GNU_HOST_NAME) \
--host=$(GNU_TARGET_NAME) \
--target=$(GNU_TARGET_NAME) \
--prefix=$(TARGET_PREFIX) \
--disable-nls \
--disable-static \
)
touch $@
strace-unpack: $(STRACE_BUILD_DIR)/.configured
#
# This builds the actual binary. You should change the target to refer
# directly to the main binary which is built.
#
$(STRACE_BUILD_DIR)/.built: $(STRACE_BUILD_DIR)/.configured
rm -f $@
$(MAKE) -C $(@D)
touch $@
#
# You should change the dependency to refer directly to the main binary
# which is built.
#
strace: $(STRACE_BUILD_DIR)/.built
#
# If you are building a library, then you need to stage it too.
#
#
# This rule creates a control file for ipkg. It is no longer
# necessary to create a seperate control file under sources/strace
#
$(STRACE_IPK_DIR)/CONTROL/control:
@$(INSTALL) -d $(@D)
@rm -f $@
@echo "Package: strace" >>$@
@echo "Architecture: $(TARGET_ARCH)" >>$@
@echo "Priority: $(STRACE_PRIORITY)" >>$@
@echo "Section: $(STRACE_SECTION)" >>$@
@echo "Version: $(STRACE_VERSION)-$(STRACE_IPK_VERSION)" >>$@
@echo "Maintainer: $(STRACE_MAINTAINER)" >>$@
@echo "Source: $(STRACE_SITE)/$(STRACE_SOURCE)" >>$@
@echo "Description: $(STRACE_DESCRIPTION)" >>$@
@echo "Depends: $(STRACE_DEPENDS)" >>$@
@echo "Suggests: $(STRACE_SUGGESTS)" >>$@
@echo "Conflicts: $(STRACE_CONFLICTS)" >>$@
#
# This builds the IPK file.
#
# Binaries should be installed into $(STRACE_IPK_DIR)$(TARGET_PREFIX)/sbin or $(STRACE_IPK_DIR)$(TARGET_PREFIX)/bin
# (use the location in a well-known Linux distro as a guide for choosing sbin or bin).
# Libraries and include files should be installed into $(STRACE_IPK_DIR)$(TARGET_PREFIX)/{lib,include}
# Configuration files should be installed in $(STRACE_IPK_DIR)$(TARGET_PREFIX)/etc/strace/...
# Documentation files should be installed in $(STRACE_IPK_DIR)$(TARGET_PREFIX)/doc/strace/...
# Daemon startup scripts should be installed in $(STRACE_IPK_DIR)$(TARGET_PREFIX)/etc/init.d/S??strace
#
# You may need to patch your application to
|
{
"pile_set_name": "Github"
}
|
// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto3";
package google.ads.googleads.v4.resources;
import "google/ads/googleads/v4/common/matching_function.proto";
import "google/ads/googleads/v4/enums/feed_link_status.proto";
import "google/ads/googleads/v4/enums/placeholder_type.proto";
import "google/api/field_behavior.proto";
import "google/api/resource.proto";
import "google/protobuf/wrappers.proto";
import "google/api/annotations.proto";
option csharp_namespace = "Google.Ads.GoogleAds.V4.Resources";
option go_package = "google.golang.org/genproto/googleapis/ads/googleads/v4/resources;resources";
option java_multiple_files = true;
option java_outer_classname = "AdGroupFeedProto";
option java_package = "com.google.ads.googleads.v4.resources";
option objc_class_prefix = "GAA";
option php_namespace = "Google\\Ads\\GoogleAds\\V4\\Resources";
option ruby_package = "Google::Ads::GoogleAds::V4::Resources";
// Proto file describing the AdGroupFeed resource.
// An ad group feed.
message AdGroupFeed {
option (google.api.resource) = {
type: "googleads.googleapis.com/AdGroupFeed"
pattern: "customers/{customer}/adGroupFeeds/{ad_group_feed}"
};
// Immutable. The resource name of the ad group feed.
// Ad group feed resource names have the form:
//
// `customers/{customer_id}/adGroupFeeds/{ad_group_id}~{feed_id}
string resource_name = 1 [
(google.api.field_behavior) = IMMUTABLE,
(google.api.resource_reference) = {
type: "googleads.googleapis.com/AdGroupFeed"
}
];
// Immutable. The feed being linked to the ad group.
google.protobuf.StringValue feed = 2 [
(google.api.field_behavior) = IMMUTABLE,
(google.api.resource_reference) = {
type: "googleads.googleapis.com/Feed"
}
];
// Immutable. The ad group being linked to the feed.
google.protobuf.StringValue ad_group = 3 [
(google.api.field_behavior) = IMMUTABLE,
(google.api.resource_reference) = {
type: "googleads.googleapis.com/AdGroup"
}
];
// Indicates which placeholder types the feed may populate under the connected
// ad group. Required.
repeated google.ads.googleads.v4.enums.PlaceholderTypeEnum.PlaceholderType placeholder_types = 4;
// Matching function associated with the AdGroupFeed.
// The matching function is used to filter the set of feed items selected.
// Required.
google.ads.googleads.v4.common.MatchingFunction matching_function = 5;
// Output only. Status of the ad group feed.
// This field is read-only.
google.ads.googleads.v4.enums.FeedLinkStatusEnum.FeedLinkStatus status = 6 [(google.api.field_behavior) = OUTPUT_ONLY];
}
|
{
"pile_set_name": "Github"
}
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
org.apache.ws.security.crypto.provider=org.apache.ws.security.components.crypto.Merlin
org.apache.ws.security.crypto.merlin.keystore.type=jks
org.apache.ws.security.crypto.merlin.keystore.password=password
org.apache.ws.security.crypto.merlin.keystore.alias=bob
org.apache.ws.security.crypto.merlin.keystore.file=keys/bob.jks
|
{
"pile_set_name": "Github"
}
|
using System;
using System.Web.Mvc;
using Glimpse.Mvc.Model;
using Glimpse.Test.Common;
using Xunit;
using Xunit.Extensions;
namespace Glimpse.Test.Mvc.Model
{
public class ViewModelSummaryShould
{
[Theory, AutoMock]
public void SetModelType(ViewDataDictionary viewData, TempDataDictionary tempData, string displayMode, Type displayModeType)
{
var sut = new ViewModelSummary(viewData, tempData, typeof(ViewModelSummary), true, displayMode, displayModeType);
Assert.Equal(true, sut.IsValid);
Assert.Equal(typeof(ViewModelSummary), sut.ModelType);
}
[Theory, AutoMock]
public void ReturnViewDataKeys(TempDataDictionary tempData, string displayMode, Type displayModeType)
{
var viewData = new ViewDataDictionary { { "A", 1 }, { "B", 2 }, { "C", 3 } };
var sut = new ViewModelSummary(viewData, tempData, typeof(ViewModelSummary), true, displayMode, displayModeType);
Assert.Contains("A", sut.ViewDataKeys);
Assert.Contains("B", sut.ViewDataKeys);
Assert.Contains("C", sut.ViewDataKeys);
}
[Theory, AutoMock]
public void ReturnTempDataKeys(ViewDataDictionary viewData, string displayMode, Type displayModeType)
{
var tempData = new TempDataDictionary { { "A", 1 }, { "B", 2 }, { "C", 3 } };
var sut = new ViewModelSummary(viewData, tempData, typeof(ViewModelSummary), true, displayMode, displayModeType);
Assert.Contains("A", sut.TempDataKeys);
Assert.Contains("B", sut.TempDataKeys);
Assert.Contains("C", sut.TempDataKeys);
}
[Theory, AutoMock]
public void SetDisplayMode(ViewDataDictionary viewData, TempDataDictionary tempData, string displayMode, Type displayModeType)
{
var sut = new ViewModelSummary(viewData, tempData, typeof(ViewModelSummary), true, displayMode, displayModeType);
Assert.Equal(displayMode, sut.DisplayModeId);
}
[Theory, AutoMock]
public void SetDisplayModeType(ViewDataDictionary viewData, TempDataDictionary tempData, string displayMode, Type displayModeType)
{
var sut = new ViewModelSummary(viewData, tempData, typeof(ViewModelSummary), true, displayMode, displayModeType);
Assert.Equal(displayModeType, sut.DisplayModeType);
Assert.True(sut.HasDisplayMode);
}
}
}
|
{
"pile_set_name": "Github"
}
|
class Empty(Exception):
"""Exception for requesting data from an empty collection"""
pass
|
{
"pile_set_name": "Github"
}
|
/*
* \brief Protective MBR partition table definitions
* \author Josef Soentgen
* \date 2018-05-03
*/
/*
* Copyright (C) 2018 Genode Labs GmbH
*
* This file is part of the Genode OS framework, which is distributed
* under the terms of the GNU Affero General Public License version 3.
*/
#ifndef _PMBR_H_
#define _PMBR_H_
/* Genode includes */
#include <base/fixed_stdint.h>
namespace Protective_mbr
{
enum { TYPE_PROTECTIVE = 0xEE, };
/**
* Partition table entry format
*/
struct Partition
{
Genode::uint8_t unused[4] { };
Genode::uint8_t type { };
Genode::uint8_t unused2[3] { };
Genode::uint32_t lba { };
Genode::uint32_t sectors { };
} __attribute__((packed));
/**
* Master boot record header
*/
struct Header
{
Genode::uint8_t unused[446] { };
Partition partitions[4] { };
Genode::uint16_t magic { 0xaa55 };
} __attribute__((packed));
}
#endif /* _PMBR_H_ */
|
{
"pile_set_name": "Github"
}
|
-- Copyright 2020 Stanford University
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
import "regent"
local c = regentlib.c
struct ret
{
v : int,
id : uint64,
}
__demand(__inline)
task inc1(x : int) : int
return x + 1
end
__demand(__inline)
task dec1(x : int) : ret
return ret { v = x - 1, id = c.legion_context_get_unique_id(__context()) }
end
__demand(__inline)
task f(x : int) : ret
return dec1(inc1(x + 5))
end
__forbid(__inline)
task g(x : int) : ret
return ret { v = x + 5, id = c.legion_context_get_unique_id(__context()) }
end
__demand(__inline)
task h()
regentlib.c.printf("called h\n")
return c.legion_context_get_unique_id(__context())
end
task main()
var id_main = c.legion_context_get_unique_id(__context())
var id_h = h()
regentlib.assert(id_h == id_main, "test failed")
for i = 0, 10 do
var ret_f, ret_g = f(i), g(i)
regentlib.assert(ret_f.v == ret_g.v, "test failed")
regentlib.assert(id_main == ret_f.id, "test failed")
regentlib.assert(id_main ~= ret_g.id, "test failed")
end
end
regentlib.start(main)
|
{
"pile_set_name": "Github"
}
|
/*
Copyright 2016 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package v1beta1
import (
authorizationapi "k8s.io/api/authorization/v1beta1"
)
type LocalSubjectAccessReviewExpansion interface {
Create(sar *authorizationapi.LocalSubjectAccessReview) (result *authorizationapi.LocalSubjectAccessReview, err error)
}
func (c *localSubjectAccessReviews) Create(sar *authorizationapi.LocalSubjectAccessReview) (result *authorizationapi.LocalSubjectAccessReview, err error) {
result = &authorizationapi.LocalSubjectAccessReview{}
err = c.client.Post().
Namespace(c.ns).
Resource("localsubjectaccessreviews").
Body(sar).
Do().
Into(result)
return
}
|
{
"pile_set_name": "Github"
}
|
<!DOCTYPE HTML>
<!--
Any copyright is dedicated to the Public Domain.
http://creativecommons.org/publicdomain/zero/1.0/
-->
<html class="reftest-wait"><head>
<meta charset="utf-8">
<title>CSS Grid Test: test 027 dynamic remove/insert second item</title>
<link rel="author" title="Mats Palmgren" href="https://bugzilla.mozilla.org/show_bug.cgi?id=1144096">
<link rel="help" href="https://drafts.csswg.org/css-grid/#pagination">
<link rel="match" href="grid-fragmentation-027-ref.html">
<script src="support/dyn.js"></script>
<script>
function runTest(text) {
document.body.innerHTML = text;
dyn3('.grid');
document.documentElement.removeAttribute("class");
}
</script>
</head>
<body onload='dynamicTest("grid-fragmentation-027.html", runTest)'></body>
</html>
|
{
"pile_set_name": "Github"
}
|
/* $Id$
*
* Copyright (c) 2010 Anders Wallin (anders.e.e.wallin "at" gmail.com).
*
* This file is part of OpenCAMlib
* (see https://github.com/aewallin/opencamlib).
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 2.1 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include <sstream>
#include <cmath>
// uncomment to disable assert() calls
// #define NDEBUG
#include <cassert>
#include "point.hpp"
#include "ellipseposition.hpp"
#include "ellipse.hpp"
#include "numeric.hpp"
namespace ocl
{
//******** EllipsePosition ********************** */
EllipsePosition::EllipsePosition() {
diangle = 0.0;
setD();
}
void EllipsePosition::setDiangle(double dia) {
assert( !std::isnan(dia) );
diangle = dia;
setD();
}
void EllipsePosition::setD() {
// set (s,t) to angle corresponding to diangle
// see: http://www.freesteel.co.uk/wpblog/2009/06/encoding-2d-angles-without-trigonometry/
// see: http://www.anderswallin.net/2010/07/radians-vs-diamondangle/
// return P2( (a < 2 ? 1-a : a-3),
// (a < 3 ? ((a > 1) ? 2-a : a) : a-4)
double d = diangle;
assert( !std::isnan(d) );
while ( d > 4.0 ) // make d a diangle in [0,4]
d -= 4.0;
while ( d < 0.0)
d+=4.0;
assert( d >= 0.0 && d <= 4.0 ); // now we should be in [0,4]
Point p( (d < 2 ? 1-d : d-3) ,
(d < 3 ? ((d > 1) ? 2-d : d) : d-4) );
// now we have a vector pointing in the right direction
// but it is not normalized
p.normalize();
s = p.x;
t = p.y;
assert( this->isValid() );
}
// check that s and t values are OK
bool EllipsePosition::isValid() const {
if ( isZero_tol( square(s) + square(t) - 1.0 ) )
return true;
else {
std::cout << " EllipsePosition=" << *this << "\n";
std::cout << " square(s) + square(t) - 1.0 = " << square(s) + square(t) - 1.0 << " !!\n";
return false;
}
}
EllipsePosition& EllipsePosition::operator=(const EllipsePosition &pos) {
s = pos.s;
t = pos.t;
diangle = pos.diangle;
return *this;
}
std::string EllipsePosition::str() const {
std::ostringstream o;
o << *this;
return o.str();
}
std::ostream& operator<<(std::ostream &stream, EllipsePosition pos) {
stream << "("<< pos.s <<" ," << pos.t << ")";
return stream;
}
}//end namespace
//end file ellipseposition.cpp
|
{
"pile_set_name": "Github"
}
|
program where_01
implicit none
real :: a(10), b(10)
where (a >= 0)
b = 1
else where
b = 0
end where
where (a >= 0)
b = 1
elsewhere
b = 0
end where
where (a >= 0)
b = 1
elsewhere
b = 0
endwhere
end program
|
{
"pile_set_name": "Github"
}
|
//===-- ARMBuildAttributes.h - ARM Build Attributes -------------*- C++ -*-===//
//
// The LLVM Compiler Infrastructure
//
// This file is distributed under the University of Illinois Open Source
// License. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
//
// This file contains enumerations and support routines for ARM build attributes
// as defined in ARM ABI addenda document (ABI release 2.08).
//
// ELF for the ARM Architecture r2.09 - November 30, 2012
//
// http://infocenter.arm.com/help/topic/com.arm.doc.ihi0044e/IHI0044E_aaelf.pdf
//
//===----------------------------------------------------------------------===//
#ifndef LLVM_SUPPORT_ARMBUILDATTRIBUTES_H
#define LLVM_SUPPORT_ARMBUILDATTRIBUTES_H
namespace llvm {
class StringRef;
namespace ARMBuildAttrs {
enum SpecialAttr {
// This is for the .cpu asm attr. It translates into one or more
// AttrType (below) entries in the .ARM.attributes section in the ELF.
SEL_CPU
};
enum AttrType {
// Rest correspond to ELF/.ARM.attributes
File = 1,
CPU_raw_name = 4,
CPU_name = 5,
CPU_arch = 6,
CPU_arch_profile = 7,
ARM_ISA_use = 8,
THUMB_ISA_use = 9,
FP_arch = 10,
WMMX_arch = 11,
Advanced_SIMD_arch = 12,
PCS_config = 13,
ABI_PCS_R9_use = 14,
ABI_PCS_RW_data = 15,
ABI_PCS_RO_data = 16,
ABI_PCS_GOT_use = 17,
ABI_PCS_wchar_t = 18,
ABI_FP_rounding = 19,
ABI_FP_denormal = 20,
ABI_FP_exceptions = 21,
ABI_FP_user_exceptions = 22,
ABI_FP_number_model = 23,
ABI_align_needed = 24,
ABI_align_preserved = 25,
ABI_enum_size = 26,
ABI_HardFP_use = 27,
ABI_VFP_args = 28,
ABI_WMMX_args = 29,
ABI_optimization_goals = 30,
ABI_FP_optimization_goals = 31,
compatibility = 32,
CPU_unaligned_access = 34,
FP_HP_extension = 36,
ABI_FP_16bit_format = 38,
MPextension_use = 42, // recoded from 70 (ABI r2.08)
DIV_use = 44,
also_compatible_with = 65,
conformance = 67,
Virtualization_use = 68,
/// Legacy Tags
Section = 2, // deprecated (ABI r2.09)
Symbol = 3, // deprecated (ABI r2.09)
ABI_align8_needed = 24, // renamed to ABI_align_needed (ABI r2.09)
ABI_align8_preserved = 25, // renamed to ABI_align_preserved (ABI r2.09)
nodefaults = 64, // deprecated (ABI r2.09)
T2EE_use = 66, // deprecated (ABI r2.09)
MPextension_use_old = 70 // recoded to MPextension_use (ABI r2.08)
};
StringRef AttrTypeAsString(unsigned Attr, bool HasTagPrefix = true);
StringRef AttrTypeAsString(AttrType Attr, bool HasTagPrefix = true);
int AttrTypeFromString(StringRef Tag);
// Magic numbers for .ARM.attributes
enum AttrMagic {
Format_Version = 0x41
};
// Legal Values for CPU_arch, (=6), uleb128
enum CPUArch {
Pre_v4 = 0,
v4 = 1, // e.g. SA110
v4T = 2, // e.g. ARM7TDMI
v5T = 3, // e.g. ARM9TDMI
v5TE = 4, // e.g. ARM946E_S
v5TEJ = 5, // e.g. ARM926EJ_S
v6 = 6, // e.g. ARM1136J_S
v6KZ = 7, // e.g. ARM1176JZ_S
v6T2 = 8, // e.g. ARM1156T2F_S
v6K = 9, // e.g. ARM1136J_S
v7 = 10, // e.g. Cortex A8, Cortex M3
v6_M = 11, // e.g. Cortex M1
v6S_M = 12, // v6_M with the System extensions
v7E_M = 13, // v7_M with DSP extensions
v8 = 14 // v8, AArch32
};
enum CPUArchProfile { // (=7), uleb128
Not_Applicable = 0, // pre v7, or cross-profile code
ApplicationProfile = (0x41), // 'A' (e.g. for Cortex A8)
RealTimeProfile = (0x52), // 'R' (e.g. for Cortex R4)
MicroControllerProfile = (0x4D), // 'M' (e.g. for Cortex M3)
SystemProfile = (0x53) // 'S' Application or real-time profile
};
// The following have a lot of common use cases
enum {
Not_Allowed = 0,
Allowed = 1,
// Tag_ARM_ISA_use (=8), uleb128
// Tag_THUMB_ISA_use, (=9), uleb128
AllowThumb32 = 2, // 32-bit Thumb (implies 16-bit instructions)
// Tag_FP_arch (=10), uleb128 (formerly Tag_VFP_arch = 10)
AllowFPv2 = 2, // v2 FP ISA permitted (implies use of the v1 FP ISA)
AllowFPv3A = 3, // v3 FP ISA permitted (implies use of the v2 FP ISA)
AllowFPv3B = 4, // v3 FP ISA permitted, but only D0-D15, S0-S31
AllowFPv4A = 5, // v4 FP ISA permitted (implies use of v3 FP ISA)
AllowFPv4B = 6, // v4 FP ISA was permitted, but only D0-D15, S0-S31
AllowFPARMv8A = 7, // Use of the ARM v8-A FP ISA was permitted
AllowFPARMv8B = 8, // Use of the ARM v8-A FP ISA was permitted, but only
// D0-D15, S0-S31
// Tag_WMMX_arch, (=11), uleb128
AllowWMMXv1 = 1, // The user permitted this entity to use WMMX v1
AllowWMMXv2 = 2, // The user permitted this entity to use WMMX v2
// Tag_Advanced_SIMD_arch, (=12), uleb128
AllowNeon = 1, // SIMDv1 was permitted
AllowNeon2 = 2, // SIMDv2 was permitted (Half-precision FP, MAC operations)
AllowNeonARMv8 = 3, // ARM v8-A SIMD was permitted
// Tag_ABI_PCS_R9_use, (=14), uleb128
R9IsGPR = 0, // R9 used as v6 (just another callee-saved register)
R9IsSB = 1, // R9 used as a global static base rgister
R9IsTLSPo
|
{
"pile_set_name": "Github"
}
|
<?xml version="1.0" encoding="ISO-8859-1"?>
<!--
-
- This file is part of the OpenLink Software Virtuoso Open-Source (VOS)
- project.
-
- Copyright (C) 1998-2020 OpenLink Software
-
- This project is free software; you can redistribute it and/or modify it
- under the terms of the GNU General Public License as published by the
- Free Software Foundation; only version 2 of the License, dated June 1991.
-
- This program is distributed in the hope that it will be useful, but
- WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- General Public License for more details.
-
- You should have received a copy of the GNU General Public License along
- with this program; if not, write to the Free Software Foundation, Inc.,
- 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
-
-
-->
<refentry id="fn_udt_defines_field">
<refmeta>
<refentrytitle>udt_defines_field</refentrytitle>
<refmiscinfo>type</refmiscinfo>
</refmeta>
<refnamediv>
<refname>udt_defines_field</refname>
<refpurpose>Determines whether a user defined type contains a specified member.</refpurpose>
</refnamediv>
<refsynopsisdiv>
<funcsynopsis id="fsyn_udt_defines_field">
<funcprototype id="fproto_udt_defines_field">
<funcdef>integer <function>udt_defines_field</function></funcdef>
<paramdef>in <parameter>udt</parameter> any</paramdef>
<paramdef>in <parameter>member_name</parameter> varchar</paramdef>
</funcprototype>
</funcsynopsis>
</refsynopsisdiv>
<refsect1 id="desc_udt_defines_field">
<title>Description</title>
<para>This function is used to determine whether the supplied member_name
is a member contained by the supplied udt.</para>
</refsect1>
<refsect1 id="params_udt_defines_field">
<title>Parameters</title>
<refsect2><title>udt</title>
<para>A user defined type name as varchar or type instance.</para>
</refsect2>
<refsect2><title>member_name</title>
<para>The requested member name.</para>
</refsect2>
</refsect1>
<refsect1 id="ret_udt_defines_field"><title>Return Types</title>
<para>This function returns either 1 (true) or 0 (false). 1 (true) is
returned if the udt contains a member whose name is equal to the
value of member_name, or 0 otherwise.</para>
</refsect1>
<!--
<refsect1 id="errors_udt_defines_field">
<title>Errors</title>
<para>This function can generate the following errors:</para>
<errorcode></errorcode>
</refsect1>
-->
<refsect1 id="examples_udt_defines_field">
<title>Examples</title>
<example id="ex_udt_defines_field"><title>Simple Use</title>
<screen><![CDATA[
select udt_defines_field (new SER_UDT(), 'A');
]]></screen>
<para>returns 1</para>
<screen><![CDATA[
select udt_defines_field (new SER_UDT_SUB(), 'A');
]]></screen>
<para>returns 1</para>
<screen><![CDATA[
select udt_defines_field (new SER_UDT(), 'B');
]]></screen>
<para>returns 0;</para>
</example>
</refsect1>
<refsect1 id="seealso_udt_defines_field">
<title>See Also</title>
<para><link linkend="fn_udt_get"><function>udt_get()</function></link></para>
<para><link linkend="fn_udt_implements_method"><function>udt_implements_method()</function></link></para>
<para><link linkend="fn_udt_instance_of"><function>udt_instance_of()</function></link></para>
<para><link linkend="fn_udt_set"><function>udt_set()</function></link></para>
</refsect1>
</refentry>
|
{
"pile_set_name": "Github"
}
|
/**
* Mupen64 - tlb.c
* Copyright (C) 2002 Hacktarux
*
* Mupen64 homepage: http://mupen64.emulation64.com
* email address: hacktarux@yahoo.fr
*
* If you want to contribute to the project please contact
* me first (maybe someone is already making what you are
* planning to do).
*
*
* This program is free software; you can redistribute it and/
* or modify it under the terms of the GNU General Public Li-
* cence as published by the Free Software Foundation; either
* version 2 of the Licence, or any later version.
*
* This program is distributed in the hope that it will be use-
* ful, but WITHOUT ANY WARRANTY; without even the implied war-
* ranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU General Public Licence for more details.
*
* You should have received a copy of the GNU General Public
* Licence along with this program; if not, write to the Free
* Software Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139,
* USA.
*
**/
#include "r4300.h"
#include "macros.h"
#include "ops.h"
#include "recomph.h"
#include "interupt.h"
#include "Invalid_Code.h"
#include "../main/md5.h"
#include "../gc_memory/memory.h"
#include "../gc_memory/TLB-Cache.h"
#include "ARAM-blocks.h"
#include <zlib.h>
uLong ZEXPORT adler32(uLong adler, const Bytef *buf, uInt len);
void TLBR()
{
int index;
index = Index & 0x1F;
PageMask = tlb_e[index].mask << 13;
EntryHi = ((tlb_e[index].vpn2 << 13) | tlb_e[index].asid);
EntryLo0 = (tlb_e[index].pfn_even << 6) | (tlb_e[index].c_even << 3)
| (tlb_e[index].d_even << 2) | (tlb_e[index].v_even << 1)
| tlb_e[index].g;
EntryLo1 = (tlb_e[index].pfn_odd << 6) | (tlb_e[index].c_odd << 3)
| (tlb_e[index].d_odd << 2) | (tlb_e[index].v_odd << 1)
| tlb_e[index].g;
PC++;
}
void TLBWI()
{
unsigned int i;
PowerPC_block* temp_block;
if (tlb_e[Index&0x3F].v_even)
{
for (i=tlb_e[Index&0x3F].start_even>>12; i<=tlb_e[Index&0x3F].end_even>>12; i++)
{
temp_block = blocks_get(i);
#ifdef USE_TLB_CACHE
unsigned long paddr = TLBCache_get_r(i);
if(!invalid_code_get(i) && (invalid_code_get(paddr>>12) ||
invalid_code_get((paddr>>12)+0x20000)))
#else
if(!invalid_code_get(i) &&(invalid_code_get(tlb_LUT_r[i]>>12) ||
invalid_code_get((tlb_LUT_r[i]>>12)+0x20000)))
#endif
invalid_code_set(i, 1);
if (!invalid_code_get(i))
{
/*int j;
md5_state_t state;
md5_byte_t digest[16];
md5_init(&state);
md5_append(&state,
(const md5_byte_t*)&rdram[(tlb_LUT_r[i]&0x7FF000)/4],
0x1000);
md5_finish(&state, digest);
for (j=0; j<16; j++) blocks[i]->md5[j] = digest[j];*/
#ifdef USE_TLB_CACHE
temp_block->adler32 = adler32(0, (const Bytef*)&rdram[(paddr&0x7FF000)/4], 0x1000);
#else
temp_block->adler32 = adler32(0, (const Bytef*)&rdram[(tlb_LUT_r[i]&0x7FF000)/4], 0x1000);
#endif
invalid_code_set(i, 1);
}
else if (temp_block)
{
/*int j;
for (j=0; j<16; j++) blocks[i]->md5[j] = 0;*/
temp_block->adler32 = 0;
}
#ifdef USE_TLB_CACHE
TLBCache_set_r(i, 0);
#else
tlb_LUT_r[i] = 0;
#endif
}
if (tlb_e[Index&0x3F].d_even)
for (i=tlb_e[Index&0x3F].start_even>>12; i<=tlb_e[Index&0x3F].end_even>>12; i++)
#ifdef USE_TLB_CACHE
TLBCache_set_w(i, 0);
#else
tlb_LUT_w[i] = 0;
#endif
}
if (tlb_e[Index&0x3F].v_odd)
{
for (i=tlb_e[Index&0x3F].start_odd>>12; i<=tlb_e[Index&0x3F].end_odd>>12; i++)
{
temp_block = blocks_get(i);
#ifdef USE_TLB_CACHE
unsigned long paddr = TLBCache_get_r(i);
if(!invalid_code_get(i) && (invalid_code_get(paddr>>12) ||
invalid_code_get((paddr>>12)+0x20000)))
#else
if(!invalid_code_get(i) &&(invalid_code_get(tlb_LUT_r[i]>>12) ||
invalid_code_get((tlb_LUT_r[i]>>12)+0x20000)))
#endif
invalid_code_set(i, 1);
if (!invalid_code_get(i))
{
/*int j;
md5_state_t state;
md5_byte_t digest[16];
md5_init(&state);
md5_append(&state,
(const md5_byte_t*)&rdram[(tlb_LUT_r[i]&0x7FF000)/4],
0x1000);
md5_finish(&state, digest);
for (j=0; j<16; j++) blocks[i]->md5[j] = digest[j];*/
#ifdef USE_TLB_CACHE
temp_block->adler32 = adler32(0, (const Bytef*)&rdram[(paddr&0x7FF000)/4], 0x1000);
#else
temp_block->adler32 = adler32(0, (const Bytef*)&rdram[(tlb_LUT_r[i]&0x7FF000)/4], 0x1000);
#endif
invalid_code_set(i, 1);
}
else if (temp_block)
{
/*int j;
for (j=0; j<16; j++) blocks[i]->md5[j] = 0;*/
temp_block->adler32 = 0;
}
#ifdef USE_TLB_CACHE
TLBCache_set_r(i, 0);
#else
tlb_LUT_r[i] = 0;
#endif
}
if (tlb_e[Index&0x3F].d_odd)
for (i=tlb_e[Index&0x3F].start_odd>>12; i<=tlb_e[Index&0x3F
|
{
"pile_set_name": "Github"
}
|
package org.dcache.services.info.gathers;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Required;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import dmg.cells.nucleus.CellLifeCycleAware;
import dmg.cells.nucleus.EnvironmentAware;
import dmg.cells.nucleus.UOID;
import org.dcache.util.NDC;
import org.dcache.services.info.base.StateExhibitor;
import org.dcache.services.info.base.StateUpdateManager;
import static com.google.common.base.Preconditions.checkState;
/**
* This thread is responsible for scheduling various data-gathering activity.
* Multiple DataGatheringActivity instances can be registered, each will operate
* independently. The frequency at which they trigger, or even whether they are
* periodic, is completely under the control of the DGA.
* <p>
* These DataGatheringActivities can (in principle) do anything when
* triggered, but will typically send one or more messages to dCache.
*
* @author Paul Millar <paul.millar@desy.de>
*/
public class DataGatheringScheduler implements Runnable, EnvironmentAware, CellLifeCycleAware
{
private static final long FIVE_MINUTES = 5*60*1000;
private static final Logger LOGGER_SCHED = LoggerFactory.getLogger(DataGatheringScheduler.class);
private static final Logger LOGGER_RA = LoggerFactory.getLogger(RegisteredActivity.class);
private boolean _timeToQuit;
private final List<RegisteredActivity> _activity = new ArrayList<>();
private Map<String,Object> _environment;
private Iterable<DgaFactoryService> _factories;
private StateUpdateManager _sum;
private StateExhibitor _exhibitor;
private MessageSender _sender;
private MessageMetadataRepository<UOID> _repository;
private Thread _thread;
/**
* Class holding a periodically repeated DataGatheringActivity
* @author Paul Millar <paul.millar@desy.de>
*/
private static class RegisteredActivity
{
/** Min. delay (in ms). We prevent Schedulables from triggering more frequently than this */
private static final long MINIMUM_DGA_DELAY = 50;
private final Schedulable _dga;
/** The delay until this DataGatheringActivity should be next triggered */
private Date _nextTriggered;
/** Whether we should include this activity when scheduling next activity */
private boolean _enabled = true;
/**
* Create a new PeriodicActvity, with specified DataGatheringActivity, that
* is triggered with a fixed period. The initial delay is a randomly chosen
* fraction of the period.
* @param dga the DataGatheringActivity to be triggered periodically
* @param period the period between successive triggering in milliseconds.
*/
RegisteredActivity(Schedulable dga)
{
_dga = dga;
updateNextTrigger();
}
/**
* Try to make sure we don't hit the system with lots of queries at the same
* time
* @param period
*/
private void updateNextTrigger()
{
Date nextTrigger = _dga.shouldNextBeTriggered();
if (nextTrigger == null) {
LOGGER_RA.error("registered dga returned null Date");
nextTrigger = new Date(System.currentTimeMillis() + FIVE_MINUTES);
} else {
// Safety! Check we wont trigger too quickly
if (nextTrigger.getTime() - System.currentTimeMillis() < MINIMUM_DGA_DELAY) {
LOGGER_RA.warn("DGA {} triggering too quickly ({}ms): engaging safety.",
_dga, nextTrigger.getTime() - System.currentTimeMillis());
nextTrigger = new Date (System.currentTimeMillis() + MINIMUM_DGA_DELAY);
}
}
_nextTriggered = nextTrigger;
}
/**
* Update this PeriodicActivity so it's trigger time is <i>now</i>.
*/
public void shouldTriggerNow()
{
_nextTriggered = new Date();
}
/**
* Check the status of this activity. If the time has elapsed,
* this will cause the DataGatheringActivity to be triggered
* and the timer to be reset.
* @return true if the DataGatheringActivity was triggered.
*/
boolean checkAndTrigger(Date now)
{
if (!_enabled) {
return false;
}
if (now.before(_nextTriggered)) {
return false;
}
NDC.push(_dga.toString());
_dga.trigger();
NDC.pop();
updateNextTrigger();
return true;
}
/**
* Calculate the duration until the event has triggered.
* @return duration, in milliseconds, until event or zero if it
* should have been triggered already.
*/
long getDelay()
{
long delay = _nextTriggered.getTime() - System.currentTimeMillis();
return delay > 0 ? delay : 0;
}
/**
* Return the time this will be next triggered.
* @return
*/
long getNextTriggered()
{
return _nextTriggered.getTime();
}
boolean isEnabled()
{
return _enabled;
}
void disable()
{
_enabled = false;
}
/**
* Enable a periodic activity.
*/
void enable()
{
if (!_enabled) {
_enabled = true;
updateNextTrigger();
}
}
/**
* A human-understandable name for this DGA
* @return the underlying DGA's name
*/
@Override
public String toString()
{
return _dga.toString();
}
/**
* Render current status into a human-understandable form.
* @return single-line String describing current status.
*/
public String getStatus()
{
StringBuilder sb = new StringBuilder();
sb.append(this.toString());
sb.append(" [");
sb.append(_enabled ? "enabled" : "disabled");
if (_enabled) {
sb.append(String
.format(", next %1$.1fs", getDelay() / 1000.0));
}
sb.append("]");
return sb.toString();
}
}
@Override
public synchronized void afterStart()
{
checkState(_thread == null, "DataGatheringScheduler already started");
for (DgaFactoryService factory : _factories) {
if (factory instanceof EnvironmentAware) {
((EnvironmentAware)factory).setEnvironment(_environment);
}
for (Schedulable dga : factory.createDgas(_exhibitor, _sender,
_sum, _repository)) {
_activity.add(new RegisteredActivity(dga));
}
}
_thread = new Thread(this);
_thread.setName("DGA-Scheduler");
_thread.start();
}
@Override
public void setEnvironment(Map<String,Object> environment)
{
_environment = environment;
}
@Required
public void setDgaFactories(Iterable<DgaFactoryService> factories)
{
_factories = factories;
}
@Required
public void setStateUpdateManager(StateUpdateManager sum)
{
_sum = sum;
}
@Required
public void setStateExhibitor(StateExhibitor exhibitor)
{
_exhibitor = exhibitor;
}
@Required
public void setMessageSender(MessageSender sender)
{
_sender = sender;
}
@Required
public void setMessageMetadataRepository(MessageMetadataRepository<UOID> repository)
{
_repository = repository;
}
/**
* Main loop for this thread triggering DataGatheringActivity.
*/
|
{
"pile_set_name": "Github"
}
|
{
"@context": "https://linkedsoftwaredependencies.org/bundles/npm/@comunica/actor-rdf-parse-html-script/^1.0.0/components/context.jsonld",
"@id": "npmd:@comunica/actor-rdf-parse-html-script",
"@type": "Module",
"requireName": "@comunica/actor-rdf-parse-html-script",
"import": [
"files-carphs:components/Actor/RdfParse/HtmlScript.jsonld"
]
}
|
{
"pile_set_name": "Github"
}
|
/*
[auto_generated]
boost/numeric/odeint/integrate/detail/integrate_n_steps.hpp
[begin_description]
integrate steps implementation
[end_description]
Copyright 2012-2015 Mario Mulansky
Copyright 2012 Christoph Koke
Copyright 2012 Karsten Ahnert
Distributed under the Boost Software License, Version 1.0.
(See accompanying file LICENSE_1_0.txt or
copy at http://www.boost.org/LICENSE_1_0.txt)
*/
#ifndef BOOST_NUMERIC_ODEINT_INTEGRATE_DETAIL_INTEGRATE_N_STEPS_HPP_INCLUDED
#define BOOST_NUMERIC_ODEINT_INTEGRATE_DETAIL_INTEGRATE_N_STEPS_HPP_INCLUDED
#include <boost/numeric/odeint/util/unwrap_reference.hpp>
#include <boost/numeric/odeint/stepper/stepper_categories.hpp>
#include <boost/numeric/odeint/integrate/detail/integrate_adaptive.hpp>
#include <boost/numeric/odeint/util/unit_helper.hpp>
#include <boost/numeric/odeint/util/detail/less_with_sign.hpp>
namespace boost {
namespace numeric {
namespace odeint {
namespace detail {
// forward declaration
template< class Stepper , class System , class State , class Time , class Observer >
size_t integrate_adaptive_checked(
Stepper stepper , System system , State &start_state ,
Time &start_time , Time end_time , Time &dt ,
Observer observer, controlled_stepper_tag
);
/* basic version */
template< class Stepper , class System , class State , class Time , class Observer>
Time integrate_n_steps(
Stepper stepper , System system , State &start_state ,
Time start_time , Time dt , size_t num_of_steps ,
Observer observer , stepper_tag )
{
typename odeint::unwrap_reference< Observer >::type &obs = observer;
typename odeint::unwrap_reference< Stepper >::type &st = stepper;
Time time = start_time;
for( size_t step = 0; step < num_of_steps ; ++step )
{
obs( start_state , time );
st.do_step( system , start_state , time , dt );
// direct computation of the time avoids error propagation happening when using time += dt
// we need clumsy type analysis to get boost units working here
time = start_time + static_cast< typename unit_value_type<Time>::type >( step+1 ) * dt;
}
obs( start_state , time );
return time;
}
/* controlled version */
template< class Stepper , class System , class State , class Time , class Observer >
Time integrate_n_steps(
Stepper stepper , System system , State &start_state ,
Time start_time , Time dt , size_t num_of_steps ,
Observer observer , controlled_stepper_tag )
{
typename odeint::unwrap_reference< Observer >::type &obs = observer;
Time time = start_time;
Time time_step = dt;
for( size_t step = 0; step < num_of_steps ; ++step )
{
obs( start_state , time );
// integrate_adaptive_checked uses the given checker to throw if an overflow occurs
detail::integrate_adaptive(stepper, system, start_state, time, static_cast<Time>(time + time_step), dt,
null_observer(), controlled_stepper_tag());
// direct computation of the time avoids error propagation happening when using time += dt
// we need clumsy type analysis to get boost units working here
time = start_time + static_cast< typename unit_value_type<Time>::type >(step+1) * time_step;
}
obs( start_state , time );
return time;
}
/* dense output version */
template< class Stepper , class System , class State , class Time , class Observer >
Time integrate_n_steps(
Stepper stepper , System system , State &start_state ,
Time start_time , Time dt , size_t num_of_steps ,
Observer observer , dense_output_stepper_tag )
{
typename odeint::unwrap_reference< Observer >::type &obs = observer;
typename odeint::unwrap_reference< Stepper >::type &st = stepper;
Time time = start_time;
const Time end_time = start_time + static_cast< typename unit_value_type<Time>::type >(num_of_steps) * dt;
st.initialize( start_state , time , dt );
size_t step = 0;
while( step < num_of_steps )
{
while( less_with_sign( time , st.current_time() , st.current_time_step() ) )
{
st.calc_state( time , start_state );
obs( start_state , time );
++step;
// direct computation of the time avoids error propagation happening when using time += dt
// we need clumsy type analysis to get boost units working here
time = start_time + static_cast< typename unit_value_type<Time>::type >(step) * dt;
}
// we have not reached the end, do another real step
if( less_with_sign( static_cast<Time>(st.current_time()+st.current_time_step()) ,
end_time ,
st.current_time_step() ) )
{
st.do_step( system );
}
else if( less_with_sign( st.current_time() , end_time , st.current_time_step() ) )
{ // do the last step ending exactly on the end point
st.initialize( st.current_state() , st.current_time() , static_cast<Time>(end_time - st.current_time()) );
st.do_step( system );
}
}
// make sure we really end exactly where we should end
while( st.current_time() < end_time )
{
if( less_with_sign( end_time ,
static_cast<Time>(st.current_time()+st.current_time_step()) ,
st.current_time_step() ) )
st.initialize( st.current_state() , st.current_time() , static_cast<Time>(end_time - st.current_time()) );
st.do_step( system );
}
// observation at final point
obs( st.current_state() , end_time );
return time;
}
}
}
}
}
#endif /* BOOST_NUMERIC_ODEINT_INTEGRATE_DETAIL_INTEGRATE_N_STEPS_HPP_INCLUDED */
|
{
"pile_set_name": "Github"
}
|
# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://bitbucket.org/ned/coveragepy/src/default/NOTICE.txt
# Makefile for utility work on coverage.py.
default:
@echo "* No default action *"
clean:
-rm -f *.pyd */*.pyd
-rm -f *.so */*.so
-PYTHONPATH=. python tests/test_farm.py clean
-rm -rf build coverage.egg-info dist htmlcov
-rm -f *.pyc */*.pyc */*/*.pyc */*/*/*.pyc */*/*/*/*.pyc */*/*/*/*/*.pyc
-rm -f *.pyo */*.pyo */*/*.pyo */*/*/*.pyo */*/*/*/*.pyo */*/*/*/*/*.pyo
-rm -f *.bak */*.bak */*/*.bak */*/*/*.bak */*/*/*/*.bak */*/*/*/*/*.bak
-rm -f *$$py.class */*$$py.class */*/*$$py.class */*/*/*$$py.class */*/*/*/*$$py.class */*/*/*/*/*$$py.class
-rm -rf __pycache__ */__pycache__ */*/__pycache__ */*/*/__pycache__ */*/*/*/__pycache__ */*/*/*/*/__pycache__
-rm -f coverage/*,cover
-rm -f MANIFEST
-rm -f .coverage .coverage.* coverage.xml .metacov* .noseids
-rm -f tests/zipmods.zip
-rm -rf tests/eggsrc/build tests/eggsrc/dist tests/eggsrc/*.egg-info
-rm -f setuptools-*.egg distribute-*.egg distribute-*.tar.gz
-rm -rf doc/_build doc/_spell
sterile: clean
-rm -rf .tox*
LINTABLE = coverage igor.py setup.py tests ci/*.py
lint:
-pylint $(LINTABLE)
python -m tabnanny $(LINTABLE)
python igor.py check_eol
spell:
-pylint --disable=all --enable=spelling $(LINTABLE)
pep8:
pep8 --filename=*.py --repeat $(LINTABLE)
test:
tox -e py27,py34 $(ARGS)
metacov:
COVERAGE_COVERAGE=yes tox $(ARGS)
metahtml:
python igor.py combine_html
# Kitting
kit:
python setup.py sdist --formats=gztar,zip
wheel:
tox -c tox_wheels.ini $(ARGS)
kit_upload:
twine upload dist/*
kit_local:
cp -v dist/* `awk -F "=" '/find-links/ {print $$2}' ~/.pip/pip.conf`
# pip caches wheels of things it has installed. Clean them out so we
# don't go crazy trying to figure out why our new code isn't installing.
find ~/Library/Caches/pip/wheels -name 'coverage-*' -delete
download_appveyor:
python ci/download_appveyor.py nedbat/coveragepy
pypi:
python setup.py register
build_ext:
python setup.py build_ext
install:
python setup.py install
uninstall:
-rm -rf $(PYHOME)/lib/site-packages/coverage*
-rm -rf $(PYHOME)/scripts/coverage*
# Documentation
SPHINXBUILD = sphinx-build
SPHINXOPTS = -a -E doc
WEBHOME = ~/web/stellated/
WEBSAMPLE = $(WEBHOME)/files/sample_coverage_html
WEBSAMPLEBETA = $(WEBHOME)/files/sample_coverage_html_beta
docreqs:
pip install -r doc/requirements.pip
dochtml:
$(SPHINXBUILD) -b html $(SPHINXOPTS) doc/_build/html
@echo
@echo "Build finished. The HTML pages are in doc/_build/html."
docspell:
$(SPHINXBUILD) -b spelling $(SPHINXOPTS) doc/_spell
publish:
rm -f $(WEBSAMPLE)/*.*
mkdir -p $(WEBSAMPLE)
cp doc/sample_html/*.* $(WEBSAMPLE)
publishbeta:
rm -f $(WEBSAMPLEBETA)/*.*
mkdir -p $(WEBSAMPLEBETA)
cp doc/sample_html_beta/*.* $(WEBSAMPLEBETA)
|
{
"pile_set_name": "Github"
}
|
using CubeWorld.Tiles;
using CubeWorld.Utils;
using CubeWorld.Serialization;
namespace CubeWorld.Tiles.Rules
{
public class TileRuleConditionNearTypeAmout : TileRuleCondition
{
public int minValue;
public byte tileType;
public TileRuleConditionNearTypeAmout()
{
}
public TileRuleConditionNearTypeAmout(int minValue, byte tileType)
{
this.minValue = minValue;
this.tileType = tileType;
}
public override bool Validate(TileManager tileManager, Tile tile, TilePosition pos)
{
tileManager.world.stats.checkedConditions++;
int amount = 0;
foreach (TilePosition delta in Manhattan.GetTilesAtDistance(1))
if (tileManager.IsValidTile(pos + delta) && tileManager.GetTileType(pos + delta) == tileType)
amount++;
return amount >= minValue;
}
public override void Serialize(Serializer serializer)
{
base.Serialize(serializer);
serializer.Serialize(ref minValue, "minValue");
serializer.Serialize(ref tileType, "tileType");
}
}
}
|
{
"pile_set_name": "Github"
}
|
<?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.XIB" version="3.0" toolsVersion="12118" systemVersion="16E195" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" colorMatched="YES">
<device id="retina4_7" orientation="portrait">
<adaptation id="fullscreen"/>
</device>
<dependencies>
<deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="12086"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<objects>
<placeholder placeholderIdentifier="IBFilesOwner" id="-1" userLabel="File's Owner" customClass="ReferenceViewController">
<connections>
<outlet property="view" destination="i5M-Pr-FkT" id="sfx-zR-JGt"/>
</connections>
</placeholder>
<placeholder placeholderIdentifier="IBFirstResponder" id="-2" customClass="UIResponder"/>
<view clearsContextBeforeDrawing="NO" contentMode="scaleToFill" id="i5M-Pr-FkT">
<rect key="frame" x="0.0" y="0.0" width="375" height="667"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<subviews>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="游戏介绍" textAlignment="center" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="tqv-hb-vlQ">
<rect key="frame" x="149" y="66" width="77.5" height="21"/>
<constraints>
<constraint firstAttribute="width" constant="77.5" id="HJM-XC-37x"/>
<constraint firstAttribute="height" constant="21" id="Nk8-ng-QBO"/>
</constraints>
<fontDescription key="fontDescription" type="system" pointSize="19"/>
<color key="textColor" white="0.33333333333333331" alpha="1" colorSpace="calibratedWhite"/>
<nil key="highlightedColor"/>
</label>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text=" 通过滑动手势上下左右控制蛇的方向,寻找吃的东西,每吃一口就能得到一定的积分,而且蛇的身子会越吃越长,身子越长玩的难度就越大,不能碰墙,不能咬到自己的身体,更不能咬自己的尾巴。
本游戏不设关卡。
向经典致敬。" lineBreakMode="clip" numberOfLines="0" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="NhL-aB-nim">
<rect key="frame" x="37.5" y="113" width="300" height="169"/>
<constraints>
<constraint firstAttribute="width" constant="300" id="f4p-rl-MAS"/>
<constraint firstAttribute="height" constant="169" id="kKy-UU-JgT"/>
</constraints>
<fontDescription key="fontDescription" name=".AppleSystemUIFont" family=".AppleSystemUIFont" pointSize="16"/>
<color key="textColor" white="0.33333333333333331" alpha="1" colorSpace="calibratedWhite"/>
<nil key="highlightedColor"/>
</label>
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="svQ-qY-LSe">
<rect key="frame" x="170" y="582" width="35" height="35"/>
<constraints>
<constraint firstAttribute="height" constant="35" id="kI3-00-VnM"/>
<constraint firstAttribute="width" constant="35" id="ndr-Nd-dhj"/>
</constraints>
<state key="normal" image="close.png"/>
<connections>
<action selector="dismissAction:" destination="-1" eventType="touchUpInside" id="2NQ-1K-oHJ"/>
</connections>
</button>
</subviews>
<color key="backgroundColor" red="1" green="0.80784313730000001" blue="0.94509803920000002" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<constraints>
<constraint firstItem="NhL-aB-nim" firstAttribute="centerX" secondItem="i5M-Pr-FkT" secondAttribute="centerX" id="T6t-UM-7IG"/>
<constraint firstItem="svQ-qY-LSe" firstAttribute="centerX" secondItem="i5M-Pr-FkT" secondAttribute="centerX" id="e9U-hE-vo0"/>
<constraint firstItem="NhL-aB-nim" firstAttribute="top" secondItem="tqv-hb-vlQ" secondAttribute="bottom" constant="26" id="eB4-5B-lM7"/>
<constraint firstAttribute="bottom" secondItem="svQ-qY-LSe" secondAttribute="bottom" constant="50" id="ue0-Js-ds6"/>
<constraint firstItem="tqv-hb-vlQ" firstAttribute="centerX" secondItem="i5M-Pr-FkT" secondAttribute="centerX" id="ujf-i4-RS3"/>
<constraint firstItem="tqv-hb-vlQ" firstAttribute="top" secondItem="i5M-Pr-FkT" secondAttribute="top" constant="66" id="ule-HV-pZR"/>
</constraints>
<point key="canvasLocation" x="234.5" y="41.5"/>
</view>
</objects>
<resources>
<image name="close.png" width="128" height="128"/>
</resources>
</document>
|
{
"pile_set_name": "Github"
}
|
loopback_users.guest = false
listeners.tcp.default = 5672
management.listener.port = 15672
management.listener.ssl = false
auth_backends.1 = http
## This configures rabbitmq_auth_backend_cache that delegates to
## the HTTP backend. If using this, make sure to comment the
## auth_backends.1 line above.
##
# auth_backends.1 = cache
#
# auth_cache.cached_backend = http
# auth_cache.cache_ttl = 5000
auth_http.http_method = get
auth_http.user_path = http://auth-backend:8000/auth/user
auth_http.vhost_path = http://auth-backend:8000/auth/vhost
auth_http.resource_path = http://auth-backend:8000/auth/resource
auth_http.topic_path = http://auth-backend:8000/auth/topic
|
{
"pile_set_name": "Github"
}
|
#include <stdbool.h>
#include <uv.h>
#include "queue.h"
// client message
#define INPUT '0'
#define RESIZE_TERMINAL '1'
#define JSON_DATA '{'
// server message
#define OUTPUT '0'
#define SET_WINDOW_TITLE '1'
#define SET_PREFERENCES '2'
// url paths
struct endpoints {
char *ws;
char *index;
char *token;
char *parent;
};
extern volatile bool force_exit;
extern struct lws_context *context;
extern struct server *server;
extern struct endpoints endpoints;
typedef enum { STATE_INIT, STATE_KILL, STATE_EXIT } proc_state;
struct pss_http {
char path[128];
char *buffer;
char *ptr;
size_t len;
};
struct pty_proc {
char **args;
int argc;
pid_t pid;
int status;
proc_state state;
int pty;
char *pty_buffer;
ssize_t pty_len;
uv_pipe_t pipe;
LIST_ENTRY(pty_proc) entry;
};
struct pss_tty {
bool initialized;
int initial_cmd_index;
bool authenticated;
char address[50];
char path[20];
struct lws *wsi;
char *buffer;
size_t len;
struct pty_proc *proc;
};
struct server {
int client_count; // client count
char *prefs_json; // client preferences
char *credential; // encoded basic auth credential
char *index; // custom index.html
char *command; // full command line
char **argv; // command with arguments
int argc; // command + arguments count
int sig_code; // close signal
char sig_name[20]; // human readable signal string
bool url_arg; // allow client to send cli arguments in URL
bool readonly; // whether not allow clients to write to the TTY
bool check_origin; // whether allow websocket connection from different origin
int max_clients; // maximum clients to support
bool once; // whether accept only one client and exit on disconnection
char socket_path[255]; // UNIX domain socket path
char terminal_type[30]; // terminal type to report
uv_loop_t *loop; // the libuv event loop
uv_signal_t watcher; // SIGCHLD watcher
LIST_HEAD(proc, pty_proc) procs; // started process list
};
|
{
"pile_set_name": "Github"
}
|
# Makefile for the dss1_divert ISDN module
# Each configuration option enables a list of files.
obj-$(CONFIG_ISDN_DIVERSION) += dss1_divert.o
# Multipart objects.
dss1_divert-y := isdn_divert.o divert_procfs.o divert_init.o
|
{
"pile_set_name": "Github"
}
|
{
"domain": "ci",
"tags": [
"country",
"geo"
],
"whoisServer": "whois.nic.ci",
"nameServers": [
"any.nic.ci",
"censvrns0001.ird.fr",
"ci.hosting.nic.fr",
"ns-ci.afrinic.net",
"ns.nic.ci",
"phloem.uoregon.edu"
],
"policies": [
{
"type": "idn-disallowed"
}
]
}
|
{
"pile_set_name": "Github"
}
|
/*
* CAAM/SEC 4.x transport/backend driver
* JobR backend functionality
*
* Copyright 2008-2012 Freescale Semiconductor, Inc.
*/
#include <linux/of_irq.h>
#include <linux/of_address.h>
#include "compat.h"
#include "ctrl.h"
#include "regs.h"
#include "jr.h"
#include "desc.h"
#include "intern.h"
struct jr_driver_data {
/* List of Physical JobR's with the Driver */
struct list_head jr_list;
spinlock_t jr_alloc_lock; /* jr_list lock */
} ____cacheline_aligned;
static struct jr_driver_data driver_data;
static int caam_reset_hw_jr(struct device *dev)
{
struct caam_drv_private_jr *jrp = dev_get_drvdata(dev);
unsigned int timeout = 100000;
/*
* mask interrupts since we are going to poll
* for reset completion status
*/
clrsetbits_32(&jrp->rregs->rconfig_lo, 0, JRCFG_IMSK);
/* initiate flush (required prior to reset) */
wr_reg32(&jrp->rregs->jrcommand, JRCR_RESET);
while (((rd_reg32(&jrp->rregs->jrintstatus) & JRINT_ERR_HALT_MASK) ==
JRINT_ERR_HALT_INPROGRESS) && --timeout)
cpu_relax();
if ((rd_reg32(&jrp->rregs->jrintstatus) & JRINT_ERR_HALT_MASK) !=
JRINT_ERR_HALT_COMPLETE || timeout == 0) {
dev_err(dev, "failed to flush job ring %d\n", jrp->ridx);
return -EIO;
}
/* initiate reset */
timeout = 100000;
wr_reg32(&jrp->rregs->jrcommand, JRCR_RESET);
while ((rd_reg32(&jrp->rregs->jrcommand) & JRCR_RESET) && --timeout)
cpu_relax();
if (timeout == 0) {
dev_err(dev, "failed to reset job ring %d\n", jrp->ridx);
return -EIO;
}
/* unmask interrupts */
clrsetbits_32(&jrp->rregs->rconfig_lo, JRCFG_IMSK, 0);
return 0;
}
/*
* Shutdown JobR independent of platform property code
*/
static int caam_jr_shutdown(struct device *dev)
{
struct caam_drv_private_jr *jrp = dev_get_drvdata(dev);
dma_addr_t inpbusaddr, outbusaddr;
int ret;
ret = caam_reset_hw_jr(dev);
tasklet_kill(&jrp->irqtask);
/* Release interrupt */
free_irq(jrp->irq, dev);
/* Free rings */
inpbusaddr = rd_reg64(&jrp->rregs->inpring_base);
outbusaddr = rd_reg64(&jrp->rregs->outring_base);
dma_free_coherent(dev, sizeof(dma_addr_t) * JOBR_DEPTH,
jrp->inpring, inpbusaddr);
dma_free_coherent(dev, sizeof(struct jr_outentry) * JOBR_DEPTH,
jrp->outring, outbusaddr);
kfree(jrp->entinfo);
return ret;
}
static int caam_jr_remove(struct platform_device *pdev)
{
int ret;
struct device *jrdev;
struct caam_drv_private_jr *jrpriv;
jrdev = &pdev->dev;
jrpriv = dev_get_drvdata(jrdev);
/*
* Return EBUSY if job ring already allocated.
*/
if (atomic_read(&jrpriv->tfm_count)) {
dev_err(jrdev, "Device is busy\n");
return -EBUSY;
}
/* Remove the node from Physical JobR list maintained by driver */
spin_lock(&driver_data.jr_alloc_lock);
list_del(&jrpriv->list_node);
spin_unlock(&driver_data.jr_alloc_lock);
/* Release ring */
ret = caam_jr_shutdown(jrdev);
if (ret)
dev_err(jrdev, "Failed to shut down job ring\n");
irq_dispose_mapping(jrpriv->irq);
return ret;
}
/* Main per-ring interrupt handler */
static irqreturn_t caam_jr_interrupt(int irq, void *st_dev)
{
struct device *dev = st_dev;
struct caam_drv_private_jr *jrp = dev_get_drvdata(dev);
u32 irqstate;
/*
* Check the output ring for ready responses, kick
* tasklet if jobs done.
*/
irqstate = rd_reg32(&jrp->rregs->jrintstatus);
if (!irqstate)
return IRQ_NONE;
/*
* If JobR error, we got more development work to do
* Flag a bug now, but we really need to shut down and
* restart the queue (and fix code).
*/
if (irqstate & JRINT_JR_ERROR) {
dev_err(dev, "job ring error: irqstate: %08x\n", irqstate);
BUG();
}
/* mask valid interrupts */
clrsetbits_32(&jrp->rregs->rconfig_lo, 0, JRCFG_IMSK);
/* Have valid interrupt at this point, just ACK and trigger */
wr_reg32(&jrp->rregs->jrintstatus, irqstate);
preempt_disable();
tasklet_schedule(&jrp->irqtask);
preempt_enable();
return IRQ_HANDLED;
}
/* Deferred service handler, run as interrupt-fired tasklet */
static void caam_jr_dequeue(unsigned long devarg)
{
int hw_idx, sw_idx, i, head, tail;
struct device *dev = (struct device *)devarg;
struct caam_drv_private_jr *jrp = dev_get_drvdata(dev);
void (*usercall)(struct device *dev, u32 *desc, u32 status, void *arg);
u32 *userdesc, userstatus;
void *userarg;
while (rd_reg32(&jrp->rregs->outring_used)) {
head = ACCESS_ONCE(jrp->head);
spin_lock(&jrp->outlock);
sw_idx = tail = jrp->tail;
hw_idx = jrp->out_ring_read_index;
for (i = 0; CIRC_CNT(head, tail + i, JOBR_DEPTH) >= 1; i++) {
sw_idx = (tail + i) & (JOBR_DEPTH - 1);
if (jrp->outring[hw_idx].desc ==
caam_dma_to_cpu(jrp->entinfo[sw_idx].desc_addr_dma))
break; /* found */
}
/* we should never fail to find a matching descriptor */
BUG_ON(CIRC_CNT(head, tail + i, JOBR_DEPTH) <= 0);
/* Unmap just-run descriptor so we can post-process */
dma_unmap_single(dev,
caam_dma_to_cpu(jrp->outring[hw_idx].desc),
jrp->entinfo[sw_idx].desc_size,
DMA_TO_DEVICE);
/* mark completed, avoid matching on a recycled desc addr */
jrp->entinfo[sw_idx].desc_addr_dma = 0;
/* Stash callback params for use outside of lock */
usercall = jrp->entinfo[sw_idx].callbk;
userarg = jrp->entinfo[sw_idx].cbkarg;
user
|
{
"pile_set_name": "Github"
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.