code stringlengths 3 1.01M | repo_name stringlengths 5 116 | path stringlengths 3 311 | language stringclasses 30
values | license stringclasses 15
values | size int64 3 1.01M |
|---|---|---|---|---|---|
#locale-settings {
margin-top: 30px;
}
#preferred-locale {
margin-top: 10px;
}
#locale-settings .label {
color: #aaa;
display: inline-block;
font-size: 16px;
font-weight: 300;
margin: 6px 10px 0 0;
text-align: right;
width: 280px;
vertical-align: top;
}
#locale-settings .locale-selector {
display: inline-block;
}
#locale-settings .locale-selector .locale.select {
width: 280px;
}
#locale-settings .locale-selector .locale.select .button {
background: #272a2f;
color: #aaaaaa;
font-size: 16px;
font-weight: 400;
height: 36px;
margin: 0;
padding: 8px 12px;
width: 100%;
}
#locale-settings .locale-selector .locale.select .menu {
background: #272a2f;
border: 1px solid #333941;
border-top: none;
top: 36px;
left: -1px;
width: 282px;
z-index: 30;
}
#main form {
margin: 0 auto;
}
#main form section {
margin: 0 auto 70px;
}
#main form section h3 {
margin-bottom: 20px;
}
#main .controls .cancel {
float: none;
margin: 9px;
}
#profile-form {
display: block;
position: relative;
text-align: left;
width: 620px;
}
#profile-form .field {
text-align: left;
}
#profile-form .field:not(:last-child) {
margin-bottom: 20px;
}
#profile-form .field input {
color: #ffffff;
background: #333941;
border: 1px solid #4d5967;
border-radius: 3px;
float: none;
width: 290px;
padding: 4px;
-moz-box-sizing: border-box;
box-sizing: border-box;
}
#profile-form button {
margin-top: 10px;
}
#profile-form .help {
color: #888888;
font-style: italic;
margin-top: 5px;
}
.errorlist {
color: #f36;
list-style: none;
margin: 0;
margin-top: 5px;
text-align: left;
}
.check-list {
cursor: pointer;
}
| mathjazz/pontoon | pontoon/contributors/static/css/settings.css | CSS | bsd-3-clause | 1,702 |
/* TEMPLATE GENERATED TESTCASE FILE
Filename: CWE23_Relative_Path_Traversal__char_connect_socket_w32CreateFile_66a.cpp
Label Definition File: CWE23_Relative_Path_Traversal.label.xml
Template File: sources-sink-66a.tmpl.cpp
*/
/*
* @description
* CWE: 23 Relative Path Traversal
* BadSource: connect_socket Read data using a connect socket (client side)
* GoodSource: Use a fixed file name
* Sinks: w32CreateFile
* BadSink : Open the file named in data using CreateFile()
* Flow Variant: 66 Data flow: data passed in an array from one function to another in different source files
*
* */
#include "std_testcase.h"
#ifdef _WIN32
#define BASEPATH "c:\\temp\\"
#else
#include <wchar.h>
#define BASEPATH "/tmp/"
#endif
#ifdef _WIN32
#include <winsock2.h>
#include <windows.h>
#include <direct.h>
#pragma comment(lib, "ws2_32") /* include ws2_32.lib when linking */
#define CLOSE_SOCKET closesocket
#else /* NOT _WIN32 */
#include <sys/types.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <arpa/inet.h>
#include <unistd.h>
#define INVALID_SOCKET -1
#define SOCKET_ERROR -1
#define CLOSE_SOCKET close
#define SOCKET int
#endif
#define TCP_PORT 27015
#define IP_ADDRESS "127.0.0.1"
namespace CWE23_Relative_Path_Traversal__char_connect_socket_w32CreateFile_66
{
#ifndef OMITBAD
/* bad function declaration */
void badSink(char * dataArray[]);
void bad()
{
char * data;
char * dataArray[5];
char dataBuffer[FILENAME_MAX] = BASEPATH;
data = dataBuffer;
{
#ifdef _WIN32
WSADATA wsaData;
int wsaDataInit = 0;
#endif
int recvResult;
struct sockaddr_in service;
char *replace;
SOCKET connectSocket = INVALID_SOCKET;
size_t dataLen = strlen(data);
do
{
#ifdef _WIN32
if (WSAStartup(MAKEWORD(2,2), &wsaData) != NO_ERROR)
{
break;
}
wsaDataInit = 1;
#endif
/* POTENTIAL FLAW: Read data using a connect socket */
connectSocket = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP);
if (connectSocket == INVALID_SOCKET)
{
break;
}
memset(&service, 0, sizeof(service));
service.sin_family = AF_INET;
service.sin_addr.s_addr = inet_addr(IP_ADDRESS);
service.sin_port = htons(TCP_PORT);
if (connect(connectSocket, (struct sockaddr*)&service, sizeof(service)) == SOCKET_ERROR)
{
break;
}
/* Abort on error or the connection was closed, make sure to recv one
* less char than is in the recv_buf in order to append a terminator */
/* Abort on error or the connection was closed */
recvResult = recv(connectSocket, (char *)(data + dataLen), sizeof(char) * (FILENAME_MAX - dataLen - 1), 0);
if (recvResult == SOCKET_ERROR || recvResult == 0)
{
break;
}
/* Append null terminator */
data[dataLen + recvResult / sizeof(char)] = '\0';
/* Eliminate CRLF */
replace = strchr(data, '\r');
if (replace)
{
*replace = '\0';
}
replace = strchr(data, '\n');
if (replace)
{
*replace = '\0';
}
}
while (0);
if (connectSocket != INVALID_SOCKET)
{
CLOSE_SOCKET(connectSocket);
}
#ifdef _WIN32
if (wsaDataInit)
{
WSACleanup();
}
#endif
}
/* put data in array */
dataArray[2] = data;
badSink(dataArray);
}
#endif /* OMITBAD */
#ifndef OMITGOOD
/* good function declarations */
/* goodG2B uses the GoodSource with the BadSink */
void goodG2BSink(char * dataArray[]);
static void goodG2B()
{
char * data;
char * dataArray[5];
char dataBuffer[FILENAME_MAX] = BASEPATH;
data = dataBuffer;
/* FIX: Use a fixed file name */
strcat(data, "file.txt");
dataArray[2] = data;
goodG2BSink(dataArray);
}
void good()
{
goodG2B();
}
#endif /* OMITGOOD */
} /* close namespace */
/* Below is the main(). It is only used when building this testcase on
its own for testing or for building a binary to use in testing binary
analysis tools. It is not used when compiling all the testcases as one
application, which is how source code analysis tools are tested. */
#ifdef INCLUDEMAIN
using namespace CWE23_Relative_Path_Traversal__char_connect_socket_w32CreateFile_66; /* so that we can use good and bad easily */
int main(int argc, char * argv[])
{
/* seed randomness */
srand( (unsigned)time(NULL) );
#ifndef OMITGOOD
printLine("Calling good()...");
good();
printLine("Finished good()");
#endif /* OMITGOOD */
#ifndef OMITBAD
printLine("Calling bad()...");
bad();
printLine("Finished bad()");
#endif /* OMITBAD */
return 0;
}
#endif
| JianpingZeng/xcc | xcc/test/juliet/testcases/CWE23_Relative_Path_Traversal/s01/CWE23_Relative_Path_Traversal__char_connect_socket_w32CreateFile_66a.cpp | C++ | bsd-3-clause | 5,179 |
/*
* Copyright (c) 2014 ARM Limited
* All rights reserved
*
* The license below extends only to copyright in the software and shall
* not be construed as granting a license to any other intellectual
* property including but not limited to intellectual property relating
* to a hardware implementation of the functionality of the software
* licensed hereunder. You may use the software subject to the license
* terms below provided that you ensure that this notice is replicated
* unmodified and in its entirety in all distributions of the software,
* modified or unmodified, in source code or in binary form.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met: redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer;
* redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution;
* neither the name of the copyright holders nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* Authors: Andreas Sandberg
*/
#include "debug/VIOPci.hh"
#include "dev/virtio/pci.hh"
#include "mem/packet_access.hh"
#include "params/PciVirtIO.hh"
PciVirtIO::PciVirtIO(const Params *params)
: PciDevice(params), queueNotify(0), interruptDeliveryPending(false),
vio(*params->vio), callbackKick(this)
{
// Override the subsystem ID with the device ID from VirtIO
config.subsystemID = htole(vio.deviceId);
BARSize[0] = BAR0_SIZE_BASE + vio.configSize;
vio.registerKickCallback(&callbackKick);
}
PciVirtIO::~PciVirtIO()
{
}
Tick
PciVirtIO::read(PacketPtr pkt)
{
const unsigned M5_VAR_USED size(pkt->getSize());
int bar;
Addr offset;
if (!getBAR(pkt->getAddr(), bar, offset))
panic("Invalid PCI memory access to unmapped memory.\n");
assert(bar == 0);
DPRINTF(VIOPci, "Reading offset 0x%x [len: %i]\n", offset, size);
// Forward device configuration writes to the device VirtIO model
if (offset >= OFF_VIO_DEVICE) {
vio.readConfig(pkt, offset - OFF_VIO_DEVICE);
return 0;
}
pkt->makeResponse();
switch(offset) {
case OFF_DEVICE_FEATURES:
DPRINTF(VIOPci, " DEVICE_FEATURES request\n");
assert(size == sizeof(uint32_t));
pkt->set<uint32_t>(vio.deviceFeatures);
break;
case OFF_GUEST_FEATURES:
DPRINTF(VIOPci, " GUEST_FEATURES request\n");
assert(size == sizeof(uint32_t));
pkt->set<uint32_t>(vio.getGuestFeatures());
break;
case OFF_QUEUE_ADDRESS:
DPRINTF(VIOPci, " QUEUE_ADDRESS request\n");
assert(size == sizeof(uint32_t));
pkt->set<uint32_t>(vio.getQueueAddress());
break;
case OFF_QUEUE_SIZE:
DPRINTF(VIOPci, " QUEUE_SIZE request\n");
assert(size == sizeof(uint16_t));
pkt->set<uint16_t>(vio.getQueueSize());
break;
case OFF_QUEUE_SELECT:
DPRINTF(VIOPci, " QUEUE_SELECT\n");
assert(size == sizeof(uint16_t));
pkt->set<uint16_t>(vio.getQueueSelect());
break;
case OFF_QUEUE_NOTIFY:
DPRINTF(VIOPci, " QUEUE_NOTIFY request\n");
assert(size == sizeof(uint16_t));
pkt->set<uint16_t>(queueNotify);
break;
case OFF_DEVICE_STATUS:
DPRINTF(VIOPci, " DEVICE_STATUS request\n");
assert(size == sizeof(uint8_t));
pkt->set<uint8_t>(vio.getDeviceStatus());
break;
case OFF_ISR_STATUS: {
DPRINTF(VIOPci, " ISR_STATUS\n");
assert(size == sizeof(uint8_t));
uint8_t isr_status(interruptDeliveryPending ? 1 : 0);
interruptDeliveryPending = false;
pkt->set<uint8_t>(isr_status);
} break;
default:
panic("Unhandled read offset (0x%x)\n", offset);
}
return 0;
}
Tick
PciVirtIO::write(PacketPtr pkt)
{
const unsigned M5_VAR_USED size(pkt->getSize());
int bar;
Addr offset;
if (!getBAR(pkt->getAddr(), bar, offset))
panic("Invalid PCI memory access to unmapped memory.\n");
assert(bar == 0);
DPRINTF(VIOPci, "Writing offset 0x%x [len: %i]\n", offset, size);
// Forward device configuration writes to the device VirtIO model
if (offset >= OFF_VIO_DEVICE) {
vio.writeConfig(pkt, offset - OFF_VIO_DEVICE);
return 0;
}
pkt->makeResponse();
switch(offset) {
case OFF_DEVICE_FEATURES:
warn("Guest tried to write device features.");
break;
case OFF_GUEST_FEATURES:
DPRINTF(VIOPci, " WRITE GUEST_FEATURES request\n");
assert(size == sizeof(uint32_t));
vio.setGuestFeatures(pkt->get<uint32_t>());
break;
case OFF_QUEUE_ADDRESS:
DPRINTF(VIOPci, " WRITE QUEUE_ADDRESS\n");
assert(size == sizeof(uint32_t));
vio.setQueueAddress(pkt->get<uint32_t>());
break;
case OFF_QUEUE_SIZE:
panic("Guest tried to write queue size.");
break;
case OFF_QUEUE_SELECT:
DPRINTF(VIOPci, " WRITE QUEUE_SELECT\n");
assert(size == sizeof(uint16_t));
vio.setQueueSelect(pkt->get<uint16_t>());
break;
case OFF_QUEUE_NOTIFY:
DPRINTF(VIOPci, " WRITE QUEUE_NOTIFY\n");
assert(size == sizeof(uint16_t));
queueNotify = pkt->get<uint16_t>();
vio.onNotify(queueNotify);
break;
case OFF_DEVICE_STATUS: {
assert(size == sizeof(uint8_t));
uint8_t status(pkt->get<uint8_t>());
DPRINTF(VIOPci, "VirtIO set status: 0x%x\n", status);
vio.setDeviceStatus(status);
} break;
case OFF_ISR_STATUS:
warn("Guest tried to write ISR status.");
break;
default:
panic("Unhandled read offset (0x%x)\n", offset);
}
return 0;
}
void
PciVirtIO::kick()
{
DPRINTF(VIOPci, "kick(): Sending interrupt...\n");
interruptDeliveryPending = true;
intrPost();
}
PciVirtIO *
PciVirtIOParams::create()
{
return new PciVirtIO(this);
}
| etashjian/ECE757-final | src/dev/virtio/pci.cc | C++ | bsd-3-clause | 7,129 |
/* $NetBSD: ecoff_machdep.h,v 1.1 2002/03/13 05:03:18 simonb Exp $ */
#include <mips/ecoff_machdep.h>
| MarginC/kame | netbsd/sys/arch/evbmips/include/ecoff_machdep.h | C | bsd-3-clause | 103 |
\documentclass{jarticle}
\usepackage{plext}
\usepackage{calc}
\begin{document}
%% cf. latex.ltx macros (from ltboxes)
ใใใใใ\makebox[60pt]{ใใใ}ใใใใใ\par
ใใใใใ\makebox[-30pt]{ใใใ}ใใใใใ\par
% calc extension check
ใใใใใ\makebox[60pt+10pt]{ใใใ}ใใใใใ\par
ใใใใใ\makebox[-30pt/2*3]{ใใใ}ใใใใใ\par
% robustness
\section{ใใใใใ\makebox[60pt]{ใใใ}ใใใใใ}
\section{ใใใใใ\makebox[-30pt]{ใใใ}ใใใใใ}
%% plext.sty macros
ใใใใใ\pbox[60pt]{ใใใ}ใใใใใ\par
ใใใใใ\pbox[-30pt]{ใใใ}ใใใใใ\par % natural width
% calc extension check
ใใใใใ\pbox[60pt+10pt]{ใใใ}ใใใใใ\par
ใใใใใ\pbox[-30pt/2*3]{ใใใ}ใใใใใ\par % natural width
% robustness
\section{ใใใใใ\pbox<t>[60pt]{ใใใ}ใใใใใ}
\section{ใใใใใ\pbox<t>[-30pt]{ใใใ}ใใใใใ}
\end{document}
| h-kitagawa/platex | tests/plext_pbox.tex | TeX | bsd-3-clause | 1,020 |
<?php
/**
* Magento
*
* NOTICE OF LICENSE
*
* This source file is subject to the Open Software License (OSL 3.0)
* that is bundled with this package in the file LICENSE.txt.
* It is also available through the world-wide-web at this URL:
* http://opensource.org/licenses/osl-3.0.php
* If you did not receive a copy of the license and are unable to
* obtain it through the world-wide-web, please send an email
* to license@magentocommerce.com so we can send you a copy immediately.
*
* DISCLAIMER
*
* Do not edit or add to this file if you wish to upgrade Magento to newer
* versions in the future. If you wish to customize Magento for your
* needs please refer to http://www.magentocommerce.com for more information.
*
* @category Mage
* @package Mage_Adminhtml
* @copyright Copyright (c) 2011 Magento Inc. (http://www.magentocommerce.com)
* @license http://opensource.org/licenses/osl-3.0.php Open Software License (OSL 3.0)
*/
class Mage_Adminhtml_Block_Permissions_Tab_Rolesusers extends Mage_Adminhtml_Block_Widget_Tabs {
public function __construct()
{
parent::__construct();
$roleId = $this->getRequest()->getParam('rid', false);
$users = Mage::getModel("admin/user")->getCollection()->load();
$this->setTemplate('permissions/rolesusers.phtml')
->assign('users', $users->getItems())
->assign('roleId', $roleId);
}
protected function _prepareLayout()
{
$this->setChild('userGrid', $this->getLayout()->createBlock('adminhtml/permissions_role_grid_user', 'roleUsersGrid'));
return parent::_prepareLayout();
}
protected function _getGridHtml()
{
return $this->getChildHtml('userGrid');
}
protected function _getJsObjectName()
{
return $this->getChild('userGrid')->getJsObjectName();
}
}
| 5452/durex | includes/src/Mage_Adminhtml_Block_Permissions_Tab_Rolesusers.php | PHP | bsd-3-clause | 1,869 |
/* TEMPLATE GENERATED TESTCASE FILE
Filename: CWE762_Mismatched_Memory_Management_Routines__new_free_long_84_goodB2G.cpp
Label Definition File: CWE762_Mismatched_Memory_Management_Routines__new_free.label.xml
Template File: sources-sinks-84_goodB2G.tmpl.cpp
*/
/*
* @description
* CWE: 762 Mismatched Memory Management Routines
* BadSource: Allocate data using new
* GoodSource: Allocate data using malloc()
* Sinks:
* GoodSink: Deallocate data using delete
* BadSink : Deallocate data using free()
* Flow Variant: 84 Data flow: data passed to class constructor and destructor by declaring the class object on the heap and deleting it after use
*
* */
#ifndef OMITGOOD
#include "std_testcase.h"
#include "CWE762_Mismatched_Memory_Management_Routines__new_free_long_84.h"
namespace CWE762_Mismatched_Memory_Management_Routines__new_free_long_84
{
CWE762_Mismatched_Memory_Management_Routines__new_free_long_84_goodB2G::CWE762_Mismatched_Memory_Management_Routines__new_free_long_84_goodB2G(long * dataCopy)
{
data = dataCopy;
/* POTENTIAL FLAW: Allocate memory with a function that requires delete to free the memory */
data = new long;
}
CWE762_Mismatched_Memory_Management_Routines__new_free_long_84_goodB2G::~CWE762_Mismatched_Memory_Management_Routines__new_free_long_84_goodB2G()
{
/* FIX: Deallocate the memory using delete */
delete data;
}
}
#endif /* OMITGOOD */
| JianpingZeng/xcc | xcc/test/juliet/testcases/CWE762_Mismatched_Memory_Management_Routines/s07/CWE762_Mismatched_Memory_Management_Routines__new_free_long_84_goodB2G.cpp | C++ | bsd-3-clause | 1,448 |
'use strict';
angular.module("ngLocale", [], ["$provide", function ($provide) {
var PLURAL_CATEGORY = {ZERO: "zero", ONE: "one", TWO: "two", FEW: "few", MANY: "many", OTHER: "other"};
function getDecimals(n) {
n = n + '';
var i = n.indexOf('.');
return (i == -1) ? 0 : n.length - i - 1;
}
function getVF(n, opt_precision) {
var v = opt_precision;
if (undefined === v) {
v = Math.min(getDecimals(n), 3);
}
var base = Math.pow(10, v);
var f = ((n * base) | 0) % base;
return {v: v, f: f};
}
$provide.value("$locale", {
"DATETIME_FORMATS": {
"AMPMS": [
"AM",
"PM"
],
"DAY": [
"Sunday",
"Monday",
"Tuesday",
"Wednesday",
"Thursday",
"Friday",
"Saturday"
],
"ERANAMES": [
"Before Christ",
"Anno Domini"
],
"ERAS": [
"BC",
"AD"
],
"FIRSTDAYOFWEEK": 6,
"MONTH": [
"January",
"February",
"March",
"April",
"May",
"June",
"July",
"August",
"September",
"October",
"November",
"December"
],
"SHORTDAY": [
"Sun",
"Mon",
"Tue",
"Wed",
"Thu",
"Fri",
"Sat"
],
"SHORTMONTH": [
"Jan",
"Feb",
"Mar",
"Apr",
"May",
"Jun",
"Jul",
"Aug",
"Sep",
"Oct",
"Nov",
"Dec"
],
"STANDALONEMONTH": [
"January",
"February",
"March",
"April",
"May",
"June",
"July",
"August",
"September",
"October",
"November",
"December"
],
"WEEKENDRANGE": [
5,
6
],
"fullDate": "EEEE, d MMMM y",
"longDate": "d MMMM y",
"medium": "d MMM y h:mm:ss a",
"mediumDate": "d MMM y",
"mediumTime": "h:mm:ss a",
"short": "d/M/yy h:mm a",
"shortDate": "d/M/yy",
"shortTime": "h:mm a"
},
"NUMBER_FORMATS": {
"CURRENCY_SYM": "$",
"DECIMAL_SEP": ".",
"GROUP_SEP": ",",
"PATTERNS": [
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 3,
"minFrac": 0,
"minInt": 1,
"negPre": "-",
"negSuf": "",
"posPre": "",
"posSuf": ""
},
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 2,
"minFrac": 2,
"minInt": 1,
"negPre": "-\u00a4",
"negSuf": "",
"posPre": "\u00a4",
"posSuf": ""
}
]
},
"id": "en-jm",
"localeID": "en_JM",
"pluralCat": function (n, opt_precision) {
var i = n | 0;
var vf = getVF(n, opt_precision);
if (i == 1 && vf.v == 0) {
return PLURAL_CATEGORY.ONE;
}
return PLURAL_CATEGORY.OTHER;
}
});
}]);
| mudunuriRaju/tlr-live | tollbackend/web/js/angular-1.5.5/i18n/angular-locale_en-jm.js | JavaScript | bsd-3-clause | 4,012 |
/*
** License Applicability. Except to the extent portions of this file are
** made subject to an alternative license as permitted in the SGI Free
** Software License B, Version 1.1 (the "License"), the contents of this
** file are subject only to the provisions of the License. You may not use
** this file except in compliance with the License. You may obtain a copy
** of the License at Silicon Graphics, Inc., attn: Legal Services, 1600
** Amphitheatre Parkway, Mountain View, CA 94043-1351, or at:
**
** http://oss.sgi.com/projects/FreeB
**
** Note that, as provided in the License, the Software is distributed on an
** "AS IS" basis, with ALL EXPRESS AND IMPLIED WARRANTIES AND CONDITIONS
** DISCLAIMED, INCLUDING, WITHOUT LIMITATION, ANY IMPLIED WARRANTIES AND
** CONDITIONS OF MERCHANTABILITY, SATISFACTORY QUALITY, FITNESS FOR A
** PARTICULAR PURPOSE, AND NON-INFRINGEMENT.
**
** Original Code. The Original Code is: OpenGL Sample Implementation,
** Version 1.2.1, released January 26, 2000, developed by Silicon Graphics,
** Inc. The Original Code is Copyright (c) 1991-2000 Silicon Graphics, Inc.
** Copyright in any portions created by third parties is as indicated
** elsewhere herein. All Rights Reserved.
**
** Additional Notice Provisions: The application programming interfaces
** established by SGI in conjunction with the Original Code are The
** OpenGL(R) Graphics System: A Specification (Version 1.2.1), released
** April 1, 1999; The OpenGL(R) Graphics System Utility Library (Version
** 1.3), released November 4, 1998; and OpenGL(R) Graphics with the X
** Window System(R) (Version 1.3), released October 19, 1998. This software
** was created using the OpenGL(R) version 1.2.1 Sample Implementation
** published by SGI, but has not been independently verified as being
** compliant with the OpenGL(R) version 1.2.1 Specification.
*/
/*
* bufpool.c++
*
* $Date: 2004/05/12 15:29:36 $ $Revision: 1.2 $
* $Header: /home/krh/git/sync/mesa-cvs-repo/Mesa/src/glu/sgi/libnurbs/internals/bufpool.cc,v 1.2 2004/05/12 15:29:36 brianp Exp $
*/
#include "glimports.h"
#include "myassert.h"
#include "bufpool.h"
/*-----------------------------------------------------------------------------
* Pool - allocate a new pool of buffers
*-----------------------------------------------------------------------------
*/
Pool::Pool( int _buffersize, int initpoolsize, const char *n )
{
if((unsigned)_buffersize < sizeof(Buffer))
buffersize = sizeof(Buffer);
else
buffersize = _buffersize;
initsize = initpoolsize * buffersize;
nextsize = initsize;
name = n;
magic = is_allocated;
nextblock = 0;
curblock = 0;
freelist = 0;
nextfree = 0;
}
/*-----------------------------------------------------------------------------
* ~Pool - free a pool of buffers and the pool itself
*-----------------------------------------------------------------------------
*/
Pool::~Pool( void )
{
assert( (this != 0) && (magic == is_allocated) );
while( nextblock ) {
delete [] blocklist[--nextblock];
blocklist[nextblock] = 0;
}
magic = is_free;
}
void Pool::grow( void )
{
assert( (this != 0) && (magic == is_allocated) );
curblock = new char[nextsize];
blocklist[nextblock++] = curblock;
nextfree = nextsize;
nextsize *= 2;
}
/*-----------------------------------------------------------------------------
* Pool::clear - free buffers associated with pool but keep pool
*-----------------------------------------------------------------------------
*/
void
Pool::clear( void )
{
assert( (this != 0) && (magic == is_allocated) );
while( nextblock ) {
delete [] blocklist[--nextblock];
blocklist[nextblock] = 0;
}
curblock = 0;
freelist = 0;
nextfree = 0;
if( nextsize > initsize )
nextsize /= 2;
}
| anasazi/POP-REU-Project | pkgs/libs/mesa/src/src/glu/sgi/libnurbs/internals/bufpool.cc | C++ | bsd-3-clause | 3,853 |
#ifndef fl_gl_cyclic_color_wheel_h
#define fl_gl_cyclic_color_wheel_h
#include <FL/gl.h> /* GLfloat GLubyte GLuint GLenum */
#include <FL/Fl.H>
#include <FL/Fl_Gl_Window.H>
#include <FL/Fl_Value_Input.H>
#include <FL/Fl_Check_Button.H>
class fl_gl_cyclic_color_wheel : public Fl_Gl_Window {
public:
fl_gl_cyclic_color_wheel(int x,int y ,int w,int h ,const char*l=0);
void init_widget_set(
const int number
,Fl_Value_Input* valinp_hue_min
,Fl_Value_Input* valinp_hue_max
,Fl_Check_Button* chebut_enable_sw
,Fl_Check_Button* chebut_rotate360_sw
);
void init_number_and_is_max( const int number ,const bool is_max);
void set_min_or_max(const bool is_max );
void set_reset(void);
private:
/* ใใฆในใใฉใใฐ้ๅงไฝ็ฝฎ */
int mouse_x_when_push_
,mouse_y_when_push_;
/* Hue Color Wheel่กจ็คบ้ๅงไฝ็ฝฎ */
double x_offset_;
double hue_offset_;
/* ๅHue min,maxใฎ็ฏๅฒใๅ่่กจ็คบ */
class guide_widget_set_ {
public:
Fl_Value_Input* valinp_hue_min;
Fl_Value_Input* valinp_hue_max;
Fl_Check_Button* chebut_enable_sw;
Fl_Check_Button* chebut_rotate360_sw;
};
std::vector< guide_widget_set_ > guide_widget_sets_;
int hue_range_number_;
bool hue_range_is_max_;
//----------
void draw();
void draw_object_();
int handle(int event);
void handle_push_( const int mx ,const int my );
void handle_updownleftright_( const int mx ,const int my );
void handle_keyboard_( const int key , const char* text );
double xpos_from_hue_(const double hue);
double hue_from_xpos_(const double xpos);
double limit_new_hue_( double hue_o_new ,bool& rotate360_sw );
void set_min_or_max_to_gui_( const bool rot360_sw );
};
#endif /* !fl_gl_cyclic_color_wheel_h */
| masafumi-inoue/GTS | sources/libcpp83gts_callback_and_action/fl_gl_cyclic_color_wheel.h | C | bsd-3-clause | 1,711 |
from django.apps import AppConfig
class ContentStoreAppConfig(AppConfig):
name = "contentstore"
def ready(self):
import contentstore.signals
contentstore.signals
| praekelt/seed-stage-based-messaging | contentstore/apps.py | Python | bsd-3-clause | 190 |
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "base/prefs/json_pref_store.h"
#include <algorithm>
#include "base/bind.h"
#include "base/callback.h"
#include "base/file_util.h"
#include "base/json/json_file_value_serializer.h"
#include "base/json/json_string_value_serializer.h"
#include "base/memory/ref_counted.h"
#include "base/message_loop/message_loop_proxy.h"
#include "base/sequenced_task_runner.h"
#include "base/threading/sequenced_worker_pool.h"
#include "base/values.h"
namespace {
// Some extensions we'll tack on to copies of the Preferences files.
const base::FilePath::CharType* kBadExtension = FILE_PATH_LITERAL("bad");
// Differentiates file loading between origin thread and passed
// (aka file) thread.
class FileThreadDeserializer
: public base::RefCountedThreadSafe<FileThreadDeserializer> {
public:
FileThreadDeserializer(JsonPrefStore* delegate,
base::SequencedTaskRunner* sequenced_task_runner)
: no_dir_(false),
error_(PersistentPrefStore::PREF_READ_ERROR_NONE),
delegate_(delegate),
sequenced_task_runner_(sequenced_task_runner),
origin_loop_proxy_(base::MessageLoopProxy::current()) {
}
void Start(const base::FilePath& path) {
DCHECK(origin_loop_proxy_->BelongsToCurrentThread());
sequenced_task_runner_->PostTask(
FROM_HERE,
base::Bind(&FileThreadDeserializer::ReadFileAndReport,
this, path));
}
// Deserializes JSON on the sequenced task runner.
void ReadFileAndReport(const base::FilePath& path) {
DCHECK(sequenced_task_runner_->RunsTasksOnCurrentThread());
value_.reset(DoReading(path, &error_, &no_dir_));
origin_loop_proxy_->PostTask(
FROM_HERE,
base::Bind(&FileThreadDeserializer::ReportOnOriginThread, this));
}
// Reports deserialization result on the origin thread.
void ReportOnOriginThread() {
DCHECK(origin_loop_proxy_->BelongsToCurrentThread());
delegate_->OnFileRead(value_.release(), error_, no_dir_);
}
static base::Value* DoReading(const base::FilePath& path,
PersistentPrefStore::PrefReadError* error,
bool* no_dir) {
int error_code;
std::string error_msg;
JSONFileValueSerializer serializer(path);
base::Value* value = serializer.Deserialize(&error_code, &error_msg);
HandleErrors(value, path, error_code, error_msg, error);
*no_dir = !base::PathExists(path.DirName());
return value;
}
static void HandleErrors(const base::Value* value,
const base::FilePath& path,
int error_code,
const std::string& error_msg,
PersistentPrefStore::PrefReadError* error);
private:
friend class base::RefCountedThreadSafe<FileThreadDeserializer>;
~FileThreadDeserializer() {}
bool no_dir_;
PersistentPrefStore::PrefReadError error_;
scoped_ptr<base::Value> value_;
const scoped_refptr<JsonPrefStore> delegate_;
const scoped_refptr<base::SequencedTaskRunner> sequenced_task_runner_;
const scoped_refptr<base::MessageLoopProxy> origin_loop_proxy_;
};
// static
void FileThreadDeserializer::HandleErrors(
const base::Value* value,
const base::FilePath& path,
int error_code,
const std::string& error_msg,
PersistentPrefStore::PrefReadError* error) {
*error = PersistentPrefStore::PREF_READ_ERROR_NONE;
if (!value) {
DVLOG(1) << "Error while loading JSON file: " << error_msg
<< ", file: " << path.value();
switch (error_code) {
case JSONFileValueSerializer::JSON_ACCESS_DENIED:
*error = PersistentPrefStore::PREF_READ_ERROR_ACCESS_DENIED;
break;
case JSONFileValueSerializer::JSON_CANNOT_READ_FILE:
*error = PersistentPrefStore::PREF_READ_ERROR_FILE_OTHER;
break;
case JSONFileValueSerializer::JSON_FILE_LOCKED:
*error = PersistentPrefStore::PREF_READ_ERROR_FILE_LOCKED;
break;
case JSONFileValueSerializer::JSON_NO_SUCH_FILE:
*error = PersistentPrefStore::PREF_READ_ERROR_NO_FILE;
break;
default:
*error = PersistentPrefStore::PREF_READ_ERROR_JSON_PARSE;
// JSON errors indicate file corruption of some sort.
// Since the file is corrupt, move it to the side and continue with
// empty preferences. This will result in them losing their settings.
// We keep the old file for possible support and debugging assistance
// as well as to detect if they're seeing these errors repeatedly.
// TODO(erikkay) Instead, use the last known good file.
base::FilePath bad = path.ReplaceExtension(kBadExtension);
// If they've ever had a parse error before, put them in another bucket.
// TODO(erikkay) if we keep this error checking for very long, we may
// want to differentiate between recent and long ago errors.
if (base::PathExists(bad))
*error = PersistentPrefStore::PREF_READ_ERROR_JSON_REPEAT;
base::Move(path, bad);
break;
}
} else if (!value->IsType(base::Value::TYPE_DICTIONARY)) {
*error = PersistentPrefStore::PREF_READ_ERROR_JSON_TYPE;
}
}
} // namespace
scoped_refptr<base::SequencedTaskRunner> JsonPrefStore::GetTaskRunnerForFile(
const base::FilePath& filename,
base::SequencedWorkerPool* worker_pool) {
std::string token("json_pref_store-");
token.append(filename.AsUTF8Unsafe());
return worker_pool->GetSequencedTaskRunnerWithShutdownBehavior(
worker_pool->GetNamedSequenceToken(token),
base::SequencedWorkerPool::BLOCK_SHUTDOWN);
}
JsonPrefStore::JsonPrefStore(const base::FilePath& filename,
base::SequencedTaskRunner* sequenced_task_runner)
: path_(filename),
sequenced_task_runner_(sequenced_task_runner),
prefs_(new base::DictionaryValue()),
read_only_(false),
writer_(filename, sequenced_task_runner),
initialized_(false),
read_error_(PREF_READ_ERROR_OTHER) {}
bool JsonPrefStore::GetValue(const std::string& key,
const base::Value** result) const {
base::Value* tmp = NULL;
if (!prefs_->Get(key, &tmp))
return false;
if (result)
*result = tmp;
return true;
}
void JsonPrefStore::AddObserver(PrefStore::Observer* observer) {
observers_.AddObserver(observer);
}
void JsonPrefStore::RemoveObserver(PrefStore::Observer* observer) {
observers_.RemoveObserver(observer);
}
bool JsonPrefStore::HasObservers() const {
return observers_.might_have_observers();
}
bool JsonPrefStore::IsInitializationComplete() const {
return initialized_;
}
bool JsonPrefStore::GetMutableValue(const std::string& key,
base::Value** result) {
return prefs_->Get(key, result);
}
void JsonPrefStore::SetValue(const std::string& key, base::Value* value) {
DCHECK(value);
scoped_ptr<base::Value> new_value(value);
base::Value* old_value = NULL;
prefs_->Get(key, &old_value);
if (!old_value || !value->Equals(old_value)) {
prefs_->Set(key, new_value.release());
ReportValueChanged(key);
}
}
void JsonPrefStore::SetValueSilently(const std::string& key,
base::Value* value) {
DCHECK(value);
scoped_ptr<base::Value> new_value(value);
base::Value* old_value = NULL;
prefs_->Get(key, &old_value);
if (!old_value || !value->Equals(old_value)) {
prefs_->Set(key, new_value.release());
if (!read_only_)
writer_.ScheduleWrite(this);
}
}
void JsonPrefStore::RemoveValue(const std::string& key) {
if (prefs_->Remove(key, NULL))
ReportValueChanged(key);
}
void JsonPrefStore::MarkNeedsEmptyValue(const std::string& key) {
keys_need_empty_value_.insert(key);
}
bool JsonPrefStore::ReadOnly() const {
return read_only_;
}
PersistentPrefStore::PrefReadError JsonPrefStore::GetReadError() const {
return read_error_;
}
PersistentPrefStore::PrefReadError JsonPrefStore::ReadPrefs() {
if (path_.empty()) {
OnFileRead(NULL, PREF_READ_ERROR_FILE_NOT_SPECIFIED, false);
return PREF_READ_ERROR_FILE_NOT_SPECIFIED;
}
PrefReadError error;
bool no_dir;
base::Value* value =
FileThreadDeserializer::DoReading(path_, &error, &no_dir);
OnFileRead(value, error, no_dir);
return error;
}
void JsonPrefStore::ReadPrefsAsync(ReadErrorDelegate *error_delegate) {
initialized_ = false;
error_delegate_.reset(error_delegate);
if (path_.empty()) {
OnFileRead(NULL, PREF_READ_ERROR_FILE_NOT_SPECIFIED, false);
return;
}
// Start async reading of the preferences file. It will delete itself
// in the end.
scoped_refptr<FileThreadDeserializer> deserializer(
new FileThreadDeserializer(this, sequenced_task_runner_.get()));
deserializer->Start(path_);
}
void JsonPrefStore::CommitPendingWrite() {
if (writer_.HasPendingWrite() && !read_only_)
writer_.DoScheduledWrite();
}
void JsonPrefStore::ReportValueChanged(const std::string& key) {
FOR_EACH_OBSERVER(PrefStore::Observer, observers_, OnPrefValueChanged(key));
if (!read_only_)
writer_.ScheduleWrite(this);
}
void JsonPrefStore::OnFileRead(base::Value* value_owned,
PersistentPrefStore::PrefReadError error,
bool no_dir) {
scoped_ptr<base::Value> value(value_owned);
read_error_ = error;
if (no_dir) {
FOR_EACH_OBSERVER(PrefStore::Observer,
observers_,
OnInitializationCompleted(false));
return;
}
initialized_ = true;
switch (error) {
case PREF_READ_ERROR_ACCESS_DENIED:
case PREF_READ_ERROR_FILE_OTHER:
case PREF_READ_ERROR_FILE_LOCKED:
case PREF_READ_ERROR_JSON_TYPE:
case PREF_READ_ERROR_FILE_NOT_SPECIFIED:
read_only_ = true;
break;
case PREF_READ_ERROR_NONE:
DCHECK(value.get());
prefs_.reset(static_cast<base::DictionaryValue*>(value.release()));
break;
case PREF_READ_ERROR_NO_FILE:
// If the file just doesn't exist, maybe this is first run. In any case
// there's no harm in writing out default prefs in this case.
break;
case PREF_READ_ERROR_JSON_PARSE:
case PREF_READ_ERROR_JSON_REPEAT:
break;
default:
NOTREACHED() << "Unknown error: " << error;
}
if (error_delegate_.get() && error != PREF_READ_ERROR_NONE)
error_delegate_->OnError(error);
FOR_EACH_OBSERVER(PrefStore::Observer,
observers_,
OnInitializationCompleted(true));
}
JsonPrefStore::~JsonPrefStore() {
CommitPendingWrite();
}
bool JsonPrefStore::SerializeData(std::string* output) {
// TODO(tc): Do we want to prune webkit preferences that match the default
// value?
JSONStringValueSerializer serializer(output);
serializer.set_pretty_print(true);
scoped_ptr<base::DictionaryValue> copy(
prefs_->DeepCopyWithoutEmptyChildren());
// Iterates |keys_need_empty_value_| and if the key exists in |prefs_|,
// ensure its empty ListValue or DictonaryValue is preserved.
for (std::set<std::string>::const_iterator
it = keys_need_empty_value_.begin();
it != keys_need_empty_value_.end();
++it) {
const std::string& key = *it;
base::Value* value = NULL;
if (!prefs_->Get(key, &value))
continue;
if (value->IsType(base::Value::TYPE_LIST)) {
const base::ListValue* list = NULL;
if (value->GetAsList(&list) && list->empty())
copy->Set(key, new base::ListValue);
} else if (value->IsType(base::Value::TYPE_DICTIONARY)) {
const base::DictionaryValue* dict = NULL;
if (value->GetAsDictionary(&dict) && dict->empty())
copy->Set(key, new base::DictionaryValue);
}
}
return serializer.Serialize(*(copy.get()));
}
| cvsuser-chromium/chromium | base/prefs/json_pref_store.cc | C++ | bsd-3-clause | 11,988 |
<?php
/**
* Zym Framework
*
* LICENSE
*
* This source file is subject to the new BSD license that is bundled
* with this package in the file LICENSE.txt.
*
* @category Zym
* @package Zym_View
* @subpackage Helper
* @copyright Copyright (c) 2008 Zym. (http://www.zym-project.com/)
* @license http://www.zym-project.com/license New BSD License
*/
/**
* @see Zend_Controller_Front
*/
require_once 'Zend/Controller/Front.php';
/**
* Get response obj
*
* @author Geoffrey Tran
* @license http://www.zym-project.com/license New BSD License
* @package Zym_View
* @subpackage Helper
* @copyright Copyright (c) 2008 Zym. (http://www.zym-project.com/)
*/
class Zym_View_Helper_GetResponse
{
/**
* Get the response object
*
* @return Zend_Controller_Response_Abstract
*/
public function getResponse()
{
return Zend_Controller_Front::getInstance()->getResponse();
}
}
| robinsk/zym | library/Zym/View/Helper/GetResponse.php | PHP | bsd-3-clause | 928 |
<?php
/**
* Magento
*
* NOTICE OF LICENSE
*
* This source file is subject to the Open Software License (OSL 3.0)
* that is bundled with this package in the file LICENSE.txt.
* It is also available through the world-wide-web at this URL:
* http://opensource.org/licenses/osl-3.0.php
* If you did not receive a copy of the license and are unable to
* obtain it through the world-wide-web, please send an email
* to license@magentocommerce.com so we can send you a copy immediately.
*
* DISCLAIMER
*
* Do not edit or add to this file if you wish to upgrade Magento to newer
* versions in the future. If you wish to customize Magento for your
* needs please refer to http://www.magentocommerce.com for more information.
*
* @category Mage
* @package Mage_XmlConnect
* @copyright Copyright (c) 2011 Magento Inc. (http://www.magentocommerce.com)
* @license http://opensource.org/licenses/osl-3.0.php Open Software License (OSL 3.0)
*/
/**
* Xmlconnect form checkbox element
*
* @category Mage
* @package Mage_XmlConnect
* @author Magento Core Team <core@magentocommerce.com>
*/
class Mage_XmlConnect_Model_Simplexml_Form_Element_Checkbox
extends Mage_XmlConnect_Model_Simplexml_Form_Element_Abstract
{
/**
* Init checkbox element
*
* @param array $attributes
*/
public function __construct($attributes = array())
{
parent::__construct($attributes);
$this->setType('checkbox');
}
/**
* Add value to element
*
* @param Mage_XmlConnect_Model_Simplexml_Element $xmlObj
* @return Mage_XmlConnect_Model_Simplexml_Form_Element_Abstract
*/
protected function _addValue(Mage_XmlConnect_Model_Simplexml_Element $xmlObj)
{
$xmlObj->addAttribute('value', (int)$this->getValue());
return $this;
}
}
| 5452/durex | includes/src/Mage_XmlConnect_Model_Simplexml_Form_Element_Checkbox.php | PHP | bsd-3-clause | 1,850 |
<?php
/**
* Magento
*
* NOTICE OF LICENSE
*
* This source file is subject to the Open Software License (OSL 3.0)
* that is bundled with this package in the file LICENSE.txt.
* It is also available through the world-wide-web at this URL:
* http://opensource.org/licenses/osl-3.0.php
* If you did not receive a copy of the license and are unable to
* obtain it through the world-wide-web, please send an email
* to license@magentocommerce.com so we can send you a copy immediately.
*
* DISCLAIMER
*
* Do not edit or add to this file if you wish to upgrade Magento to newer
* versions in the future. If you wish to customize Magento for your
* needs please refer to http://www.magentocommerce.com for more information.
*
* @category Mage
* @package Mage_Catalog
* @copyright Copyright (c) 2011 Magento Inc. (http://www.magentocommerce.com)
* @license http://opensource.org/licenses/osl-3.0.php Open Software License (OSL 3.0)
*/
/**
* Catalog product option values collection
*
* @category Mage
* @package Mage_Catalog
* @author Magento Core Team <core@magentocommerce.com>
*/
class Mage_Catalog_Model_Resource_Product_Option_Value_Collection
extends Mage_Core_Model_Resource_Db_Collection_Abstract
{
/**
* Resource initialization
*/
protected function _construct()
{
$this->_init('catalog/product_option_value');
}
/**
* Add price, title to result
*
* @param int $storeId
* @return Mage_Catalog_Model_Resource_Product_Option_Value_Collection
*/
public function getValues($storeId)
{
$this->addPriceToResult($storeId)
->addTitleToResult($storeId);
return $this;
}
/**
* Add titles to result
*
* @param int $storeId
* @return Mage_Catalog_Model_Resource_Product_Option_Value_Collection
*/
public function addTitlesToResult($storeId)
{
$adapter = $this->getConnection();
$optionTypePriceTable = $this->getTable('catalog/product_option_type_price');
$optionTitleTable = $this->getTable('catalog/product_option_type_title');
$priceExpr = $adapter->getCheckSql(
'store_value_price.price IS NULL',
'default_value_price.price',
'store_value_price.price'
);
$priceTypeExpr = $adapter->getCheckSql(
'store_value_price.price_type IS NULL',
'default_value_price.price_type',
'store_value_price.price_type'
);
$titleExpr = $adapter->getCheckSql(
'store_value_title.title IS NULL',
'default_value_title.title',
'store_value_title.title'
);
$joinExprDefaultPrice = 'default_value_price.option_type_id = main_table.option_type_id AND '
. $adapter->quoteInto('default_value_price.store_id = ?', Mage_Catalog_Model_Abstract::DEFAULT_STORE_ID);
$joinExprStorePrice = 'store_value_price.option_type_id = main_table.option_type_id AND '
. $adapter->quoteInto('store_value_price.store_id = ?', $storeId);
$joinExprTitle = 'store_value_title.option_type_id = main_table.option_type_id AND '
. $adapter->quoteInto('store_value_title.store_id = ?', $storeId);
$this->getSelect()
->joinLeft(
array('default_value_price' => $optionTypePriceTable),
$joinExprDefaultPrice,
array('default_price'=>'price','default_price_type'=>'price_type')
)
->joinLeft(
array('store_value_price' => $optionTypePriceTable),
$joinExprStorePrice,
array(
'store_price' => 'price',
'store_price_type' => 'price_type',
'price' => $priceExpr,
'price_type' => $priceTypeExpr
)
)
->join(
array('default_value_title' => $optionTitleTable),
'default_value_title.option_type_id = main_table.option_type_id',
array('default_title' => 'title')
)
->joinLeft(
array('store_value_title' => $optionTitleTable),
$joinExprTitle,
array(
'store_title' => 'title',
'title' => $titleExpr)
)
->where('default_value_title.store_id = ?', Mage_Catalog_Model_Abstract::DEFAULT_STORE_ID);
return $this;
}
/**
* Add title result
*
* @param int $storeId
* @return Mage_Catalog_Model_Resource_Product_Option_Value_Collection
*/
public function addTitleToResult($storeId)
{
$optionTitleTable = $this->getTable('catalog/product_option_type_title');
$titleExpr = $this->getConnection()
->getCheckSql('store_value_title.title IS NULL', 'default_value_title.title', 'store_value_title.title');
$joinExpr = 'store_value_title.option_type_id = main_table.option_type_id AND '
. $this->getConnection()->quoteInto('store_value_title.store_id = ?', $storeId);
$this->getSelect()
->join(
array('default_value_title' => $optionTitleTable),
'default_value_title.option_type_id = main_table.option_type_id',
array('default_title' => 'title')
)
->joinLeft(
array('store_value_title' => $optionTitleTable),
$joinExpr,
array(
'store_title' => 'title',
'title' => $titleExpr
)
)
->where('default_value_title.store_id = ?', Mage_Catalog_Model_Abstract::DEFAULT_STORE_ID);
return $this;
}
/**
* Add price to result
*
* @param int $storeId
* @return Mage_Catalog_Model_Resource_Product_Option_Value_Collection
*/
public function addPriceToResult($storeId)
{
$optionTypeTable = $this->getTable('catalog/product_option_type_price');
$priceExpr = $this->getConnection()
->getCheckSql('store_value_price.price IS NULL', 'default_value_price.price', 'store_value_price.price');
$priceTypeExpr = $this->getConnection()
->getCheckSql(
'store_value_price.price_type IS NULL',
'default_value_price.price_type',
'store_value_price.price_type'
);
$joinExprDefault = 'default_value_price.option_type_id = main_table.option_type_id AND '
. $this->getConnection()->quoteInto('default_value_price.store_id = ?', Mage_Catalog_Model_Abstract::DEFAULT_STORE_ID);
$joinExprStore = 'store_value_price.option_type_id = main_table.option_type_id AND '
. $this->getConnection()->quoteInto('store_value_price.store_id = ?', $storeId);
$this->getSelect()
->joinLeft(
array('default_value_price' => $optionTypeTable),
$joinExprDefault,
array(
'default_price' => 'price',
'default_price_type'=>'price_type'
)
)
->joinLeft(
array('store_value_price' => $optionTypeTable),
$joinExprStore,
array(
'store_price' => 'price',
'store_price_type' => 'price_type',
'price' => $priceExpr,
'price_type' => $priceTypeExpr
)
);
return $this;
}
/**
* Add option filter
*
* @param array $optionIds
* @param int $storeId
* @return Mage_Catalog_Model_Resource_Product_Option_Value_Collection
*/
public function getValuesByOption($optionIds, $storeId = null)
{
if (!is_array($optionIds)) {
$optionIds = array($optionIds);
}
return $this->addFieldToFilter('main_table.option_type_id', array('in' => $optionIds));
}
/**
* Add option to filter
*
* @param array|Mage_Catalog_Model_Product_Option|int $option
* @return Mage_Catalog_Model_Resource_Product_Option_Value_Collection
*/
public function addOptionToFilter($option)
{
if (empty($option)) {
$this->addFieldToFilter('option_id', '');
} elseif (is_array($option)) {
$this->addFieldToFilter('option_id', array('in' => $option));
} elseif ($option instanceof Mage_Catalog_Model_Product_Option) {
$this->addFieldToFilter('option_id', $option->getId());
} else {
$this->addFieldToFilter('option_id', $option);
}
return $this;
}
}
| 5452/durex | includes/src/Mage_Catalog_Model_Resource_Product_Option_Value_Collection.php | PHP | bsd-3-clause | 8,912 |
# -*- coding: utf-8 -*-
"""Git tools."""
from shlex import split
from plumbum import ProcessExecutionError
from plumbum.cmd import git
DEVELOPMENT_BRANCH = "develop"
def run_git(*args, dry_run=False, quiet=False):
"""Run a git command, print it before executing and capture the output."""
command = git[split(" ".join(args))]
if not quiet:
print("{}{}".format("[DRY-RUN] " if dry_run else "", command))
if dry_run:
return ""
rv = command()
if not quiet and rv:
print(rv)
return rv
def branch_exists(branch):
"""Return True if the branch exists."""
try:
run_git("rev-parse --verify {}".format(branch), quiet=True)
return True
except ProcessExecutionError:
return False
def get_current_branch():
"""Get the current branch name."""
return run_git("rev-parse --abbrev-ref HEAD", quiet=True).strip()
| andreoliw/clitoolkit | clit/git.py | Python | bsd-3-clause | 899 |
package testclasses;
import de.unifreiburg.cs.proglang.jgs.support.DynamicLabel;
import util.printer.SecurePrinter;
public class PrintMediumSuccess {
public static void main(String[] args) {
String med = "This is medium information";
med = DynamicLabel.makeMedium(med);
SecurePrinter.printMedium(med);
String low = "This is low information";
low = DynamicLabel.makeLow(low);
SecurePrinter.printMedium(low);
}
}
| luminousfennell/jgs | DynamicAnalyzer/src/main/java/testclasses/PrintMediumSuccess.java | Java | bsd-3-clause | 430 |
define(["pat-autoscale", "jquery"], function(pattern, jQuery) {
describe("pat-autoscale", function() {
beforeEach(function() {
$("<div/>", {id: "lab"}).appendTo(document.body);
$(window).off(".autoscale");
});
afterEach(function() {
$("#lab").remove();
});
describe("setup", function() {
var force_method, mozilla, msie, version;
beforeEach(function() {
force_method=pattern.force_method;
mozilla=jQuery.browser.mozilla;
msie=jQuery.browser.msie;
version=jQuery.browser.version;
pattern.force_method=null;
jQuery.browser.mozilla=false;
jQuery.browser.msie=false;
});
afterEach(function() {
pattern.force_method=force_method;
jQuery.browser.mozilla=mozilla;
jQuery.browser.msie=msie;
jQuery.browser.version=version;
});
it("Force zoom on old IE versions", function() {
jQuery.browser.msie=true;
jQuery.browser.version="8.192.921";
pattern._setup();
expect(pattern.force_method).toBe("zoom");
});
it("Force nothing on recent IE versions", function() {
jQuery.browser.msie=true;
jQuery.browser.version="9.0.19A";
pattern._setup();
expect(pattern.force_method).toBe(null);
});
it("Force scale on gecko", function() {
// See https://bugzilla.mozilla.org/show_bug.cgi?id=390936
jQuery.browser.mozilla=true;
pattern._setup();
expect(pattern.force_method).toBe("scale");
});
it("Force nothing on other browsers", function() {
pattern._setup();
expect(pattern.force_method).toBe(null);
});
});
describe("init", function() {
var force_method;
beforeEach(function() {
force_method=pattern.force_method;
});
afterEach(function() {
pattern.force_method=force_method;
});
it("Return jQuery object", function() {
var jq = jasmine.createSpyObj("jQuery", ["each"]);
jq.each.andReturn(jq);
expect(pattern.init(jq)).toBe(jq);
});
it("Perform initial scaling", function() {
$("<div/>", {id: "parent"}).css({width: "200px"})
.append($("<div/>", {id: "child", "data-pat-auto-scale": "scale"})
.css({width: "50px"}))
.appendTo("#lab");
var $child = $("#child");
spyOn(pattern, "scale");
pattern.init($child);
expect(pattern.scale).toHaveBeenCalled();
});
it("Honour method override", function() {
$("<div/>", {id: "parent"}).css({width: "200px"})
.append($("<div/>", {id: "child", "data-pat-auto-scale": "scale"})
.css({width: "50px"}))
.appendTo("#lab");
var $child = $("#child");
pattern.force_method = "forced";
pattern.init($child);
expect($child.data("patterns.auto-scale").method).toBe("forced");
});
});
describe("scale", function() {
it("Scale element", function() {
$("<div/>", {id: "parent"}).css({width: "200px"})
.append($("<div/>", {id: "child"}).css({width: "50px"}))
.appendTo("#lab");
var child = document.getElementById("child");
$(child).data("patterns.auto-scale", {method: "scale", minWidth: 0, maxWidth: 1000});
pattern.scale.apply(child, []);
expect(child.getAttribute("style")).toMatch(/transform: scale\(4\);/);
});
it("Zoom element", function() {
$("<div/>", {id: "parent"}).css({width: "200px"})
.append($("<div/>", {id: "child"}).css({width: "50px"}))
.appendTo("#lab");
var child = document.getElementById("child");
$(child).data("patterns.auto-scale", {method: "zoom", minWidth: 0, maxWidth: 1000});
pattern.scale.apply(child, []);
expect(child.style.zoom).toBe("4");
});
it("Honour minimum width", function() {
$("<div/>", {id: "parent"}).css({width: "100px"})
.append($("<div/>", {id: "child"}).css({width: "400px"}))
.appendTo("#lab");
var child = document.getElementById("child");
$(child).data("patterns.auto-scale", {method: "zoom", minWidth: 200, maxWidth: 1000});
pattern.scale.apply(child, []);
expect(child.style.zoom).toBe("0.5");
});
it("Honour maximum width", function() {
$("<div/>", {id: "parent"}).css({width: "200px"})
.append($("<div/>", {id: "child"}).css({width: "50px"}))
.appendTo("#lab");
var child = document.getElementById("child");
$(child).data("patterns.auto-scale", {method: "zoom", minWidth: 0, maxWidth: 100});
pattern.scale.apply(child, []);
expect(child.style.zoom).toBe("2");
});
it("Add scaled class", function() {
$("<div/>", {id: "parent"}).css({width: "200px"})
.append($("<div/>", {id: "child"}).css({width: "50px"}))
.appendTo("#lab");
var child = document.getElementById("child");
$(child).data("patterns.auto-scale", {method: "zoom", minWidth: 0, maxWidth: 1000});
pattern.scale.apply(child, []);
expect($(child).hasClass("scaled")).toBeTruthy();
});
});
});
});
| Patternslib/Patterns-archive | tests/specs/pat/autoscale.js | JavaScript | bsd-3-clause | 6,201 |
require 'spec_helper'
RSpec.describe Spree::CheckoutController, type: :controller do
# copied from original checkout controller spec
let(:token) { 'some_token' }
let(:user) { FactoryGirl.create(:user) }
let(:order) { OrderWalkthrough.up_to(:delivery) }
before do
allow_any_instance_of(ActionDispatch::Request).to receive(:remote_ip).and_return("128.0.0.1")
allow(controller).to receive(:try_spree_current_user).and_return(user)
allow(controller).to receive(:spree_current_user).and_return(user)
allow(controller).to receive(:current_order).and_return(order)
end
describe "PATCH /checkout/update/payment" do
context "when payment_method is PayU" do
let(:payment_method) { FactoryGirl.create :payu_payment_method }
let(:payment_params) do
{
state: "payment",
order: { payments_attributes: [{ payment_method_id: payment_method.id }] }
}
end
subject { spree_post :update, payment_params }
before do
# we need to fake it because it's returned back with order
allow(SecureRandom).to receive(:uuid).and_return("36332498-294f-41a1-980c-7b2ec0e3a8a4")
allow(OpenPayU::Configuration).to receive(:merchant_pos_id).and_return("145278")
allow(OpenPayU::Configuration).to receive(:signature_key).and_return("S3CRET_KEY")
end
let(:payu_order_create_status) { "SUCCESS" }
let!(:payu_order_create) do
stub_request(:post, "https://145278:S3CRET_KEY@secure.payu.com/api/v2/orders")
.with(body:
{
merchantPosId: "145278",
customerIp: "128.0.0.1",
extOrderId: order.id,
description: "Order from Spree Test Store",
currencyCode: "USD",
totalAmount: 2000,
orderUrl: "http://test.host/orders/#{order.number}",
notifyUrl: "http://test.host/payu/notify",
continueUrl: "http://test.host/orders/#{order.number}",
buyer: {
email: user.email,
phone: "555-555-0199",
firstName: "John",
lastName: "Doe",
language: "PL",
delivery: {
street: "10 Lovely Street",
postalCode: "35005",
city: "Herndon",
countryCode: "US"
}
},
products: {
products: [
{ name: order.line_items.first.product.name, unitPrice: 1000, quantity: 1 }
]
},
reqId: "{36332498-294f-41a1-980c-7b2ec0e3a8a4}"
},
headers: { 'Content-Type' => 'application/json', 'User-Agent' => 'Ruby' }
)
.to_return(
status: 200,
body: {
status: { statusCode: payu_order_create_status },
redirect_uri: "http://payu.com/redirect/url/4321"
}.to_json,
headers: {}
)
end
it "creates new PayU order" do
expect { subject }.to_not raise_error
expect(payu_order_create).to have_been_requested
end
context "when PayU order creation succeeded" do
it "updates order payment" do
subject
payment = order.payments.last
expect(payment.payment_method).to eq(payment_method)
expect(payment).to be_pending
expect(payment.amount).to eq(order.total)
end
it "redirects to payu redirect url" do
expect(subject).to redirect_to("http://payu.com/redirect/url/4321")
end
context "when payment save failed" do
before do
allow_any_instance_of(Spree::Payment).to receive(:save).and_return(false)
allow_any_instance_of(Spree::Payment).to receive(:errors)
.and_return(double(full_messages: ["payment save failed"]))
end
it "logs errors" do
subject
expect(flash[:error]).to include("payment save failed")
end
it "renders checkout state with redirect" do
expect(subject).to redirect_to "http://test.host/checkout/payment"
end
end
context "when order transition failed" do
before do
allow(order).to receive(:next).and_return(false)
allow(order).to(receive(:errors)
.and_return(double(full_messages: ["order cannot transition to this state"])))
end
it "logs errors" do
subject
expect(flash[:error]).to include("order cannot transition to this state")
end
it "renders checkout state with redirect" do
expect(subject).to redirect_to "http://test.host/checkout/payment"
end
end
end
context "when PayU order creation returns unexpected status" do
let(:payu_order_create_status) { "FAIL" }
it "logs error in order" do
subject
expect(assigns(:order).errors[:base]).to include("PayU error ")
end
it "renders :edit page" do
expect(subject).to render_template(:edit)
end
end
context "when something failed inside PayU order creation" do
before do
allow(OpenPayU::Order).to receive(:create).and_raise(RuntimeError.new("Payment timeout!"))
end
it "logs error in order" do
subject
expect(assigns(:order).errors[:base]).to include("PayU error Payment timeout!")
end
it "renders :edit page" do
expect(subject).to render_template(:edit)
end
end
end
context "when order attributes are missing" do
let(:payment_params) { { state: "payment", order: { some: "details" } } }
subject { spree_post :update, payment_params }
it "renders checkout state with redirect" do
expect(subject).to redirect_to "http://test.host/checkout/payment"
end
it "logs error" do
subject
expect(flash[:error]).to include("No payment found")
end
end
end
end
| netguru/spree_payu_integration | spec/controllers/spree/checkout_controller_decorator_spec.rb | Ruby | bsd-3-clause | 6,155 |
/*
* Copyright 2014 MongoDB, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MONGOC_BULK_OPERATION_H
#define MONGOC_BULK_OPERATION_H
#include <bson.h>
#include "mongoc-macros.h"
#include "mongoc-write-concern.h"
#define MONGOC_BULK_WRITE_FLAGS_INIT \
{ \
true, MONGOC_BYPASS_DOCUMENT_VALIDATION_DEFAULT, 0 \
}
BSON_BEGIN_DECLS
/* forward decl */
struct _mongoc_client_session_t;
typedef struct _mongoc_bulk_operation_t mongoc_bulk_operation_t;
typedef struct _mongoc_bulk_write_flags_t mongoc_bulk_write_flags_t;
MONGOC_EXPORT (void)
mongoc_bulk_operation_destroy (mongoc_bulk_operation_t *bulk);
MONGOC_EXPORT (uint32_t)
mongoc_bulk_operation_execute (mongoc_bulk_operation_t *bulk,
bson_t *reply,
bson_error_t *error);
MONGOC_EXPORT (void)
mongoc_bulk_operation_delete (mongoc_bulk_operation_t *bulk,
const bson_t *selector)
BSON_GNUC_DEPRECATED_FOR (mongoc_bulk_operation_remove);
MONGOC_EXPORT (void)
mongoc_bulk_operation_delete_one (mongoc_bulk_operation_t *bulk,
const bson_t *selector)
BSON_GNUC_DEPRECATED_FOR (mongoc_bulk_operation_remove_one);
MONGOC_EXPORT (void)
mongoc_bulk_operation_insert (mongoc_bulk_operation_t *bulk,
const bson_t *document);
MONGOC_EXPORT (bool)
mongoc_bulk_operation_insert_with_opts (mongoc_bulk_operation_t *bulk,
const bson_t *document,
const bson_t *opts,
bson_error_t *error); /* OUT */
MONGOC_EXPORT (void)
mongoc_bulk_operation_remove (mongoc_bulk_operation_t *bulk,
const bson_t *selector);
MONGOC_EXPORT (bool)
mongoc_bulk_operation_remove_many_with_opts (mongoc_bulk_operation_t *bulk,
const bson_t *selector,
const bson_t *opts,
bson_error_t *error); /* OUT */
MONGOC_EXPORT (void)
mongoc_bulk_operation_remove_one (mongoc_bulk_operation_t *bulk,
const bson_t *selector);
MONGOC_EXPORT (bool)
mongoc_bulk_operation_remove_one_with_opts (mongoc_bulk_operation_t *bulk,
const bson_t *selector,
const bson_t *opts,
bson_error_t *error); /* OUT */
MONGOC_EXPORT (void)
mongoc_bulk_operation_replace_one (mongoc_bulk_operation_t *bulk,
const bson_t *selector,
const bson_t *document,
bool upsert);
MONGOC_EXPORT (bool)
mongoc_bulk_operation_replace_one_with_opts (mongoc_bulk_operation_t *bulk,
const bson_t *selector,
const bson_t *document,
const bson_t *opts,
bson_error_t *error); /* OUT */
MONGOC_EXPORT (void)
mongoc_bulk_operation_update (mongoc_bulk_operation_t *bulk,
const bson_t *selector,
const bson_t *document,
bool upsert);
MONGOC_EXPORT (bool)
mongoc_bulk_operation_update_many_with_opts (mongoc_bulk_operation_t *bulk,
const bson_t *selector,
const bson_t *document,
const bson_t *opts,
bson_error_t *error); /* OUT */
MONGOC_EXPORT (void)
mongoc_bulk_operation_update_one (mongoc_bulk_operation_t *bulk,
const bson_t *selector,
const bson_t *document,
bool upsert);
MONGOC_EXPORT (bool)
mongoc_bulk_operation_update_one_with_opts (mongoc_bulk_operation_t *bulk,
const bson_t *selector,
const bson_t *document,
const bson_t *opts,
bson_error_t *error); /* OUT */
MONGOC_EXPORT (void)
mongoc_bulk_operation_set_bypass_document_validation (
mongoc_bulk_operation_t *bulk, bool bypass);
/*
* The following functions are really only useful by language bindings and
* those wanting to replay a bulk operation to a number of clients or
* collections.
*/
MONGOC_EXPORT (mongoc_bulk_operation_t *)
mongoc_bulk_operation_new (bool ordered);
MONGOC_EXPORT (void)
mongoc_bulk_operation_set_write_concern (
mongoc_bulk_operation_t *bulk, const mongoc_write_concern_t *write_concern);
MONGOC_EXPORT (void)
mongoc_bulk_operation_set_database (mongoc_bulk_operation_t *bulk,
const char *database);
MONGOC_EXPORT (void)
mongoc_bulk_operation_set_collection (mongoc_bulk_operation_t *bulk,
const char *collection);
MONGOC_EXPORT (void)
mongoc_bulk_operation_set_client (mongoc_bulk_operation_t *bulk, void *client);
MONGOC_EXPORT (void)
mongoc_bulk_operation_set_client_session (
mongoc_bulk_operation_t *bulk,
struct _mongoc_client_session_t *client_session);
/* These names include the term "hint" for backward compatibility, should be
* mongoc_bulk_operation_get_server_id, mongoc_bulk_operation_set_server_id. */
MONGOC_EXPORT (void)
mongoc_bulk_operation_set_hint (mongoc_bulk_operation_t *bulk,
uint32_t server_id);
MONGOC_EXPORT (uint32_t)
mongoc_bulk_operation_get_hint (const mongoc_bulk_operation_t *bulk);
MONGOC_EXPORT (const mongoc_write_concern_t *)
mongoc_bulk_operation_get_write_concern (const mongoc_bulk_operation_t *bulk);
BSON_END_DECLS
#endif /* MONGOC_BULK_OPERATION_H */
| AmiArt/treefrog-framework | 3rdparty/mongo-c-driver-1.9.5/src/mongoc/mongoc-bulk-operation.h | C | bsd-3-clause | 6,633 |
#include "pri.h"
#include "calcu_erase_dot_noise.h"
#include "iip_erase_dot_noise.h"
void iip_erase_dot_noise::_exec_uchar( long l_width, long l_height, long l_area_xpos, long l_area_ypos, long l_area_xsize, long l_area_ysize, long l_channels, unsigned char *ucharp_in, unsigned char *ucharp_out )
{
long l_start, l_scansize;
long xx,yy;
unsigned char *ucharp_in_y1,*ucharp_in_y2,*ucharp_in_y3;
unsigned char *ucharp_in_x11,*ucharp_in_x12,*ucharp_in_x13,
*ucharp_in_x21,*ucharp_in_x22,*ucharp_in_x23,
*ucharp_in_x31,*ucharp_in_x32,*ucharp_in_x33;
unsigned char *ucharp_out_y1,*ucharp_out_y2;
unsigned char *ucharp_out_x1,*ucharp_out_x2;
unsigned char *ucharp_tmp;
calcu_erase_dot_noise cl_dot;
l_height;
/* ๅๆๅค */
l_scansize = l_width * l_channels;
l_start = l_area_ypos * l_scansize + l_area_xpos * l_channels;
ucharp_in += l_start;
ucharp_out += l_start;
/* ็ธฆๆนๅใใคใณใฟใผๅๆๅ */
ucharp_in_y1 = ucharp_in;
ucharp_in_y2 = ucharp_in_y3 = NULL;
ucharp_out_y1 = ucharp_out;
ucharp_out_y2 = NULL;
/* ็ธฆๆนๅใซใผใ */
for (yy = 0L; yy < l_area_ysize; ++yy,
/* ็ธฆๆนๅใฎ3้ฃใใคใณใฟใผ้ฒใใ */
ucharp_in_y3 = ucharp_in_y2,
ucharp_in_y2 = ucharp_in_y1,
ucharp_in_y1 += l_scansize,
ucharp_out_y2 = ucharp_out_y1,
ucharp_out_y1 += l_scansize
) {
/* ใซใฆใณใใใฆใณ่กจ็คบไธญ */
if (ON == this->get_i_cv_sw()) { pri_funct_cv_run(yy); }
/* 3้ฃๆบใกใใพใง */
if (NULL == ucharp_in_y3) { continue; }
/* ๆจชๆนๅใใคใณใฟใผๅๆๅ */
ucharp_in_x11 = ucharp_in_y1;
ucharp_in_x12 = ucharp_in_x13 = NULL;
ucharp_in_x21 = ucharp_in_y2;
ucharp_in_x22 = ucharp_in_x23 = NULL;
ucharp_in_x31 = ucharp_in_y3;
ucharp_in_x32 = ucharp_in_x33 = NULL;
ucharp_out_x1 = ucharp_out_y2;
ucharp_out_x2 = NULL;
/* ๆจชๆนๅใซใผใ */
for (xx = 0L; xx < l_area_xsize; ++xx,
/* ๆจชๆนๅใฎ3x3้ฃใใคใณใฟใผ้ฒใใ */
ucharp_in_x33 = ucharp_in_x32,
ucharp_in_x32 = ucharp_in_x31,
ucharp_in_x31 += l_channels,
ucharp_in_x23 = ucharp_in_x22,
ucharp_in_x22 = ucharp_in_x21,
ucharp_in_x21 += l_channels,
ucharp_in_x13 = ucharp_in_x12,
ucharp_in_x12 = ucharp_in_x11,
ucharp_in_x11 += l_channels,
ucharp_out_x2 = ucharp_out_x1,
ucharp_out_x1 += l_channels
) {
/* 3้ฃๆบใกใใพใง */
if (NULL == ucharp_in_x13) { continue; }
/* dotใใคใถใใๅคๆญ */
ucharp_tmp = cl_dot.get_ucharp(
ucharp_in_x11,ucharp_in_x12,ucharp_in_x13,
ucharp_in_x21,ucharp_in_x22,ucharp_in_x23,
ucharp_in_x31,ucharp_in_x32,ucharp_in_x33
);
/* dotใใคใถใ */
if (NULL != ucharp_tmp) {
ucharp_out_x2[CH_RED] = ucharp_tmp[CH_RED];
ucharp_out_x2[CH_GRE] = ucharp_tmp[CH_GRE];
ucharp_out_x2[CH_BLU] = ucharp_tmp[CH_BLU];
}
}
}
}
| masafumi-inoue/GTS | sources/libcpp72iip_erase_dot_noise/iip_erase_dot_noise_uchar.cpp | C++ | bsd-3-clause | 2,775 |
// Copyright (c) 2013-2015 The btcsuite developers
// Use of this source code is governed by an ISC
// license that can be found in the LICENSE file.
package wire_test
import (
"bytes"
"io"
)
// fixedWriter implements the io.Writer interface and intentially allows
// testing of error paths by forcing short writes.
type fixedWriter struct {
b []byte
pos int
}
// Write writes the contents of p to w. When the contents of p would cause
// the writer to exceed the maximum allowed size of the fixed writer,
// io.ErrShortWrite is returned and the writer is left unchanged.
//
// This satisfies the io.Writer interface.
func (w *fixedWriter) Write(p []byte) (n int, err error) {
lenp := len(p)
if w.pos+lenp > cap(w.b) {
return 0, io.ErrShortWrite
}
n = lenp
w.pos += copy(w.b[w.pos:], p)
return
}
// Bytes returns the bytes already written to the fixed writer.
func (w *fixedWriter) Bytes() []byte {
return w.b
}
// newFixedWriter returns a new io.Writer that will error once more bytes than
// the specified max have been written.
func newFixedWriter(max int) io.Writer {
b := make([]byte, max, max)
fw := fixedWriter{b, 0}
return &fw
}
// fixedReader implements the io.Reader interface and intentially allows
// testing of error paths by forcing short reads.
type fixedReader struct {
buf []byte
pos int
iobuf *bytes.Buffer
}
// Read reads the next len(p) bytes from the fixed reader. When the number of
// bytes read would exceed the maximum number of allowed bytes to be read from
// the fixed writer, an error is returned.
//
// This satisfies the io.Reader interface.
func (fr *fixedReader) Read(p []byte) (n int, err error) {
n, err = fr.iobuf.Read(p)
fr.pos += n
return
}
// newFixedReader returns a new io.Reader that will error once more bytes than
// the specified max have been read.
func newFixedReader(max int, buf []byte) io.Reader {
b := make([]byte, max, max)
if buf != nil {
copy(b[:], buf)
}
iobuf := bytes.NewBuffer(b)
fr := fixedReader{b, 0, iobuf}
return &fr
}
| hectorj/btcd | wire/fixedIO_test.go | GO | isc | 2,030 |
java_import 'org.apollo.game.action.DistancedAction'
# A distanced action which opens a door.
class OpenDoorAction < DistancedAction
include DoorConstants
attr_reader :door
def initialize(mob, door)
super(0, true, mob, door.position, DOOR_SIZE)
@door = door
end
def executeAction
mob.turn_to(@door.position)
DoorUtil.toggle(@door)
stop
end
def equals(other)
get_class == other.get_class && @door == other.door
end
end
# MessageListener for opening and closing doors.
on :message, :first_object_action do |player, message|
if DoorUtil.door?(message.id)
door = DoorUtil.get_door_object(message.position, message.id)
player.start_action(OpenDoorAction.new(player, door)) unless door.nil?
end
end
| garyttierney/apollo | data/plugins/navigation/door/door.rb | Ruby | isc | 753 |
import * as React from 'react';
import { BsPrefixComponent } from './helpers';
interface NavbarToggleProps {
label?: string;
}
declare class NavbarToggle<
As extends React.ReactType = 'button'
> extends BsPrefixComponent<As, NavbarToggleProps> {}
export default NavbarToggle;
| glenjamin/react-bootstrap | types/components/NavbarToggle.d.ts | TypeScript | mit | 284 |
๏ปฟusing System;
using System.ComponentModel;
using System.Drawing;
using System.Windows.Forms;
namespace DotSpatial.Symbology.Forms
{
public partial class LineSymbolDialog
{
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// Clean up any resources being used.
/// </summary>
/// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
protected override void Dispose(bool disposing)
{
if (disposing && (components != null))
{
components.Dispose();
}
base.Dispose(disposing);
}
#region Windows Form Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
this.components = new System.ComponentModel.Container();
System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(LineSymbolDialog));
this.lblSymbologyType = new System.Windows.Forms.Label();
this.lblPredefinedSymbol = new System.Windows.Forms.Label();
this.lblSymbolPreview = new System.Windows.Forms.Label();
this.btnSymbolDetails = new System.Windows.Forms.Button();
this.cmbCategories = new System.Windows.Forms.ComboBox();
this.predefinedLineSymbolControl1 = new DotSpatial.Symbology.Forms.PredefinedLineSymbolControl();
this.symbolPreview1 = new DotSpatial.Symbology.Forms.SymbolPreview();
this.dialogButtons1 = new DotSpatial.Symbology.Forms.DialogButtons();
this.SuspendLayout();
//
// lblSymbologyType
//
resources.ApplyResources(this.lblSymbologyType, "lblSymbologyType");
this.lblSymbologyType.Name = "lblSymbologyType";
//
// lblPredefinedSymbol
//
resources.ApplyResources(this.lblPredefinedSymbol, "lblPredefinedSymbol");
this.lblPredefinedSymbol.Name = "lblPredefinedSymbol";
//
// lblSymbolPreview
//
resources.ApplyResources(this.lblSymbolPreview, "lblSymbolPreview");
this.lblSymbolPreview.Name = "lblSymbolPreview";
//
// btnSymbolDetails
//
resources.ApplyResources(this.btnSymbolDetails, "btnSymbolDetails");
this.btnSymbolDetails.Name = "btnSymbolDetails";
this.btnSymbolDetails.UseVisualStyleBackColor = true;
this.btnSymbolDetails.Click += new System.EventHandler(this.BtnSymbolDetailsClick);
//
// cmbCategories
//
resources.ApplyResources(this.cmbCategories, "cmbCategories");
this.cmbCategories.FormattingEnabled = true;
this.cmbCategories.Name = "cmbCategories";
this.cmbCategories.SelectedIndexChanged += new System.EventHandler(this.CmbCategoriesSelectedIndexChanged);
//
// predefinedLineSymbolControl1
//
resources.ApplyResources(this.predefinedLineSymbolControl1, "predefinedLineSymbolControl1");
this.predefinedLineSymbolControl1.BackColor = System.Drawing.Color.White;
this.predefinedLineSymbolControl1.CategoryFilter = String.Empty;
this.predefinedLineSymbolControl1.CellMargin = 8;
this.predefinedLineSymbolControl1.CellSize = new System.Drawing.Size(62, 62);
this.predefinedLineSymbolControl1.ControlRectangle = new System.Drawing.Rectangle(0, 0, 272, 253);
this.predefinedLineSymbolControl1.DefaultCategoryFilter = "All";
this.predefinedLineSymbolControl1.DynamicColumns = true;
this.predefinedLineSymbolControl1.IsInitialized = false;
this.predefinedLineSymbolControl1.IsSelected = true;
this.predefinedLineSymbolControl1.Name = "predefinedLineSymbolControl1";
this.predefinedLineSymbolControl1.SelectedIndex = -1;
this.predefinedLineSymbolControl1.SelectionBackColor = System.Drawing.Color.LightGray;
this.predefinedLineSymbolControl1.SelectionForeColor = System.Drawing.Color.White;
this.predefinedLineSymbolControl1.ShowSymbolNames = true;
this.predefinedLineSymbolControl1.TextFont = new System.Drawing.Font("Arial", 8F);
this.predefinedLineSymbolControl1.VerticalScrollEnabled = true;
//
// symbolPreview1
//
resources.ApplyResources(this.symbolPreview1, "symbolPreview1");
this.symbolPreview1.BackColor = System.Drawing.Color.White;
this.symbolPreview1.Name = "symbolPreview1";
//
// dialogButtons1
//
resources.ApplyResources(this.dialogButtons1, "dialogButtons1");
this.dialogButtons1.Name = "dialogButtons1";
//
// LineSymbolDialog
//
resources.ApplyResources(this, "$this");
this.Controls.Add(this.dialogButtons1);
this.Controls.Add(this.predefinedLineSymbolControl1);
this.Controls.Add(this.cmbCategories);
this.Controls.Add(this.symbolPreview1);
this.Controls.Add(this.btnSymbolDetails);
this.Controls.Add(this.lblSymbolPreview);
this.Controls.Add(this.lblPredefinedSymbol);
this.Controls.Add(this.lblSymbologyType);
this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.FixedDialog;
this.HelpButton = true;
this.MaximizeBox = false;
this.MinimizeBox = false;
this.Name = "LineSymbolDialog";
this.ResumeLayout(false);
this.PerformLayout();
}
#endregion
private Button btnSymbolDetails;
private ComboBox cmbCategories;
private DialogButtons dialogButtons1;
private Label lblPredefinedSymbol;
private Label lblSymbolPreview;
private Label lblSymbologyType;
private PredefinedLineSymbolControl predefinedLineSymbolControl1;
private SymbolPreview symbolPreview1;
}
} | CGX-GROUP/DotSpatial | Source/DotSpatial.Symbology.Forms/LineSymbolDialog.Designer.cs | C# | mit | 6,632 |
function* f() {
var x;
try {
x = yield 1;
} catch (ex) {
yield ex;
}
return 2;
}
var g = f();
expect(g.next()).toEqual({value: 1, done: false});
expect(g.next()).toEqual({value: 2, done: true});
g = f();
expect(g.next()).toEqual({value: 1, done: false});
expect(g.throw(3)).toEqual({value: 3, done: false});
expect(g.next()).toEqual({value: 2, done: true});
| kellyselden/babel | packages/babel-preset-es2015/test/fixtures/traceur/Yield/YieldAssignThrow.js | JavaScript | mit | 378 |
package events
import (
"errors"
"github.com/miketheprogrammer/go-thrust/lib/commands"
"github.com/miketheprogrammer/go-thrust/lib/dispatcher"
)
/*
Create a new EventHandler for a give event.
*/
func NewHandler(event string, fn interface{}) (ThrustEventHandler, error) {
h := ThrustEventHandler{}
h.Event = event
h.Type = "event"
err := h.SetHandleFunc(fn)
dispatcher.RegisterHandler(h)
return h, err
}
/**
Begin Thrust Handler Code.
**/
type Handler interface {
Handle(cr commands.CommandResponse)
Register()
SetHandleFunc(fn interface{})
}
type ThrustEventHandler struct {
Type string
Event string
Handler interface{}
}
func (teh ThrustEventHandler) Handle(cr commands.CommandResponse) {
if cr.Action != "event" {
return
}
if cr.Type != teh.Event && teh.Event != "*" {
return
}
cr.Event.Type = cr.Type
if fn, ok := teh.Handler.(func(commands.CommandResponse)); ok == true {
fn(cr)
return
}
if fn, ok := teh.Handler.(func(commands.EventResult)); ok == true {
fn(cr.Event)
return
}
}
func (teh *ThrustEventHandler) SetHandleFunc(fn interface{}) error {
if fn, ok := fn.(func(commands.CommandResponse)); ok == true {
teh.Handler = fn
return nil
}
if fn, ok := fn.(func(commands.EventResult)); ok == true {
teh.Handler = fn
return nil
}
return errors.New("Invalid Handler Definition")
}
| FPurchess/blank | vendor/src/github.com/miketheprogrammer/go-thrust/lib/events/eventhandler.go | GO | mit | 1,347 |
package edu.pacificu.cs493f15_1.paperorplasticapp;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* To work on unit tests, switch the Test Artifact in the Build Variants view.
*/
public class ExampleUnitTest
{
@Test
public void addition_isCorrect() throws Exception
{
assertEquals(4, 2 + 2);
}
} | eheydemann/PaperOrPlastic | PaperOrPlasticApp/app/src/test/java/edu/pacificu/cs493f15_1/paperorplasticapp/ExampleUnitTest.java | Java | mit | 326 |
from __future__ import unicode_literals
from .atomicparsley import AtomicParsleyPP
from .ffmpeg import (
FFmpegPostProcessor,
FFmpegAudioFixPP,
FFmpegEmbedSubtitlePP,
FFmpegExtractAudioPP,
FFmpegFixupStretchedPP,
FFmpegMergerPP,
FFmpegMetadataPP,
FFmpegVideoConvertorPP,
)
from .xattrpp import XAttrMetadataPP
from .execafterdownload import ExecAfterDownloadPP
def get_postprocessor(key):
return globals()[key + 'PP']
__all__ = [
'AtomicParsleyPP',
'ExecAfterDownloadPP',
'FFmpegAudioFixPP',
'FFmpegEmbedSubtitlePP',
'FFmpegExtractAudioPP',
'FFmpegFixupStretchedPP',
'FFmpegMergerPP',
'FFmpegMetadataPP',
'FFmpegPostProcessor',
'FFmpegVideoConvertorPP',
'XAttrMetadataPP',
]
| janusnic/youtube-dl-GUI | youtube_dl/postprocessor/__init__.py | Python | mit | 760 |
package com.hearthsim.test.minion;
import com.hearthsim.card.Card;
import com.hearthsim.card.CharacterIndex;
import com.hearthsim.card.basic.minion.BoulderfistOgre;
import com.hearthsim.card.basic.minion.RaidLeader;
import com.hearthsim.card.classic.minion.common.ScarletCrusader;
import com.hearthsim.card.classic.minion.rare.Abomination;
import com.hearthsim.card.minion.Minion;
import com.hearthsim.exception.HSException;
import com.hearthsim.model.BoardModel;
import com.hearthsim.model.PlayerModel;
import com.hearthsim.model.PlayerSide;
import com.hearthsim.util.tree.HearthTreeNode;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.*;
public class TestAbomination {
private HearthTreeNode board;
private PlayerModel currentPlayer;
private PlayerModel waitingPlayer;
@Before
public void setup() throws HSException {
board = new HearthTreeNode(new BoardModel());
currentPlayer = board.data_.getCurrentPlayer();
waitingPlayer = board.data_.getWaitingPlayer();
board.data_.placeMinion(PlayerSide.CURRENT_PLAYER, new RaidLeader());
board.data_.placeMinion(PlayerSide.CURRENT_PLAYER, new BoulderfistOgre());
board.data_.placeMinion(PlayerSide.WAITING_PLAYER, new ScarletCrusader());
board.data_.placeMinion(PlayerSide.WAITING_PLAYER, new RaidLeader());
board.data_.placeMinion(PlayerSide.WAITING_PLAYER, new BoulderfistOgre());
Card fb = new Abomination();
currentPlayer.placeCardHand(fb);
currentPlayer.setMana((byte) 8);
waitingPlayer.setMana((byte) 8);
}
@Test
public void test0() throws HSException {
Card theCard = currentPlayer.getHand().get(0);
HearthTreeNode ret = theCard.useOn(PlayerSide.WAITING_PLAYER, CharacterIndex.HERO, board);
assertNull(ret);
assertEquals(currentPlayer.getHand().size(), 1);
assertEquals(currentPlayer.getNumMinions(), 2);
assertEquals(waitingPlayer.getNumMinions(), 3);
assertEquals(currentPlayer.getMana(), 8);
assertEquals(waitingPlayer.getMana(), 8);
assertEquals(currentPlayer.getHero().getHealth(), 30);
assertEquals(waitingPlayer.getHero().getHealth(), 30);
assertEquals(currentPlayer.getCharacter(CharacterIndex.MINION_1).getHealth(), 2);
assertEquals(currentPlayer.getCharacter(CharacterIndex.MINION_2).getHealth(), 7);
assertEquals(waitingPlayer.getCharacter(CharacterIndex.MINION_1).getHealth(), 1);
assertEquals(waitingPlayer.getCharacter(CharacterIndex.MINION_2).getHealth(), 2);
assertEquals(waitingPlayer.getCharacter(CharacterIndex.MINION_3).getHealth(), 7);
assertEquals(currentPlayer.getCharacter(CharacterIndex.MINION_1).getTotalAttack(), 2);
assertEquals(currentPlayer.getCharacter(CharacterIndex.MINION_2).getTotalAttack(), 7);
assertEquals(waitingPlayer.getCharacter(CharacterIndex.MINION_1).getTotalAttack(), 4);
assertEquals(waitingPlayer.getCharacter(CharacterIndex.MINION_2).getTotalAttack(), 2);
assertEquals(waitingPlayer.getCharacter(CharacterIndex.MINION_3).getTotalAttack(), 7);
assertTrue(waitingPlayer.getCharacter(CharacterIndex.MINION_1).getDivineShield());
}
@Test
public void test1() throws HSException {
Card theCard = currentPlayer.getHand().get(0);
HearthTreeNode ret = theCard.useOn(PlayerSide.CURRENT_PLAYER, CharacterIndex.HERO, board);
assertFalse(ret == null);
assertEquals(currentPlayer.getHand().size(), 0);
assertEquals(currentPlayer.getNumMinions(), 3);
assertEquals(waitingPlayer.getNumMinions(), 3);
assertEquals(currentPlayer.getMana(), 3);
assertEquals(waitingPlayer.getMana(), 8);
assertEquals(currentPlayer.getHero().getHealth(), 30);
assertEquals(waitingPlayer.getHero().getHealth(), 30);
assertEquals(currentPlayer.getCharacter(CharacterIndex.MINION_1).getHealth(), 4);
assertEquals(currentPlayer.getCharacter(CharacterIndex.MINION_2).getHealth(), 2);
assertEquals(currentPlayer.getCharacter(CharacterIndex.MINION_3).getHealth(), 7);
assertEquals(waitingPlayer.getCharacter(CharacterIndex.MINION_1).getHealth(), 1);
assertEquals(waitingPlayer.getCharacter(CharacterIndex.MINION_2).getHealth(), 2);
assertEquals(waitingPlayer.getCharacter(CharacterIndex.MINION_3).getHealth(), 7);
assertEquals(currentPlayer.getCharacter(CharacterIndex.MINION_1).getTotalAttack(), 5);
assertEquals(currentPlayer.getCharacter(CharacterIndex.MINION_2).getTotalAttack(), 2);
assertEquals(currentPlayer.getCharacter(CharacterIndex.MINION_3).getTotalAttack(), 7);
assertEquals(waitingPlayer.getCharacter(CharacterIndex.MINION_1).getTotalAttack(), 4);
assertEquals(waitingPlayer.getCharacter(CharacterIndex.MINION_2).getTotalAttack(), 2);
assertEquals(waitingPlayer.getCharacter(CharacterIndex.MINION_3).getTotalAttack(), 7);
assertTrue(waitingPlayer.getCharacter(CharacterIndex.MINION_1).getDivineShield());
}
@Test
public void test2() throws HSException {
Card theCard = currentPlayer.getHand().get(0);
HearthTreeNode ret = theCard.useOn(PlayerSide.CURRENT_PLAYER, CharacterIndex.HERO, board);
assertFalse(ret == null);
assertEquals(currentPlayer.getHand().size(), 0);
assertEquals(currentPlayer.getNumMinions(), 3);
assertEquals(waitingPlayer.getNumMinions(), 3);
assertEquals(currentPlayer.getMana(), 3);
assertEquals(waitingPlayer.getMana(), 8);
assertEquals(currentPlayer.getHero().getHealth(), 30);
assertEquals(waitingPlayer.getHero().getHealth(), 30);
assertEquals(currentPlayer.getCharacter(CharacterIndex.MINION_1).getTotalHealth(), 4);
assertEquals(currentPlayer.getCharacter(CharacterIndex.MINION_2).getTotalHealth(), 2);
assertEquals(currentPlayer.getCharacter(CharacterIndex.MINION_3).getTotalHealth(), 7);
assertEquals(waitingPlayer.getCharacter(CharacterIndex.MINION_1).getTotalHealth(), 1);
assertEquals(waitingPlayer.getCharacter(CharacterIndex.MINION_2).getTotalHealth(), 2);
assertEquals(waitingPlayer.getCharacter(CharacterIndex.MINION_3).getTotalHealth(), 7);
assertEquals(currentPlayer.getCharacter(CharacterIndex.MINION_1).getTotalAttack(), 5);
assertEquals(currentPlayer.getCharacter(CharacterIndex.MINION_2).getTotalAttack(), 2);
assertEquals(currentPlayer.getCharacter(CharacterIndex.MINION_3).getTotalAttack(), 7);
assertEquals(waitingPlayer.getCharacter(CharacterIndex.MINION_1).getTotalAttack(), 4);
assertEquals(waitingPlayer.getCharacter(CharacterIndex.MINION_2).getTotalAttack(), 2);
assertEquals(waitingPlayer.getCharacter(CharacterIndex.MINION_3).getTotalAttack(), 7);
assertTrue(waitingPlayer.getCharacter(CharacterIndex.MINION_1).getDivineShield());
//attack the Ogre... should kill everything except the Scarlet Crusader
Minion attacker = currentPlayer.getCharacter(CharacterIndex.MINION_1);
attacker.hasAttacked(false);
ret = attacker.attack(PlayerSide.WAITING_PLAYER, CharacterIndex.MINION_3, ret);
assertFalse(ret == null);
assertEquals(currentPlayer.getHand().size(), 0);
assertEquals(currentPlayer.getNumMinions(), 1);
assertEquals(waitingPlayer.getNumMinions(), 1);
assertEquals(currentPlayer.getMana(), 3);
assertEquals(waitingPlayer.getMana(), 8);
assertEquals(currentPlayer.getHero().getHealth(), 28);
assertEquals(waitingPlayer.getHero().getHealth(), 28);
assertEquals(currentPlayer.getCharacter(CharacterIndex.MINION_1).getHealth(), 5);
assertEquals(waitingPlayer.getCharacter(CharacterIndex.MINION_1).getHealth(), 1);
assertEquals(currentPlayer.getCharacter(CharacterIndex.MINION_1).getTotalAttack(), 6);
assertEquals(waitingPlayer.getCharacter(CharacterIndex.MINION_1).getTotalAttack(), 3);
assertFalse(waitingPlayer.getCharacter(CharacterIndex.MINION_1).getDivineShield());
}
@Test
public void test3() throws HSException {
Card theCard = currentPlayer.getHand().get(0);
HearthTreeNode ret = theCard.useOn(PlayerSide.CURRENT_PLAYER, CharacterIndex.HERO, board);
assertFalse(ret == null);
assertEquals(currentPlayer.getHand().size(), 0);
assertEquals(currentPlayer.getNumMinions(), 3);
assertEquals(waitingPlayer.getNumMinions(), 3);
assertEquals(currentPlayer.getMana(), 3);
assertEquals(waitingPlayer.getMana(), 8);
assertEquals(currentPlayer.getHero().getHealth(), 30);
assertEquals(waitingPlayer.getHero().getHealth(), 30);
assertEquals(currentPlayer.getCharacter(CharacterIndex.MINION_1).getHealth(), 4);
assertEquals(currentPlayer.getCharacter(CharacterIndex.MINION_2).getHealth(), 2);
assertEquals(currentPlayer.getCharacter(CharacterIndex.MINION_3).getHealth(), 7);
assertEquals(waitingPlayer.getCharacter(CharacterIndex.MINION_1).getHealth(), 1);
assertEquals(waitingPlayer.getCharacter(CharacterIndex.MINION_2).getHealth(), 2);
assertEquals(waitingPlayer.getCharacter(CharacterIndex.MINION_3).getHealth(), 7);
assertEquals(currentPlayer.getCharacter(CharacterIndex.MINION_1).getTotalAttack(), 5);
assertEquals(currentPlayer.getCharacter(CharacterIndex.MINION_2).getTotalAttack(), 2);
assertEquals(currentPlayer.getCharacter(CharacterIndex.MINION_3).getTotalAttack(), 7);
assertEquals(waitingPlayer.getCharacter(CharacterIndex.MINION_1).getTotalAttack(), 4);
assertEquals(waitingPlayer.getCharacter(CharacterIndex.MINION_2).getTotalAttack(), 2);
assertEquals(waitingPlayer.getCharacter(CharacterIndex.MINION_3).getTotalAttack(), 7);
assertTrue(waitingPlayer.getCharacter(CharacterIndex.MINION_1).getDivineShield());
//Silence the Abomination first, then attack with it
Minion attacker = currentPlayer.getCharacter(CharacterIndex.MINION_1);
attacker.silenced(PlayerSide.CURRENT_PLAYER, board);
attacker.hasAttacked(false);
attacker.attack(PlayerSide.WAITING_PLAYER, CharacterIndex.MINION_3, ret);
assertEquals(currentPlayer.getHand().size(), 0);
assertEquals(currentPlayer.getNumMinions(), 2);
assertEquals(waitingPlayer.getNumMinions(), 3);
assertEquals(currentPlayer.getMana(), 3);
assertEquals(waitingPlayer.getMana(), 8);
assertEquals(currentPlayer.getHero().getHealth(), 30);
assertEquals(waitingPlayer.getHero().getHealth(), 30);
assertEquals(currentPlayer.getCharacter(CharacterIndex.MINION_1).getHealth(), 2);
assertEquals(currentPlayer.getCharacter(CharacterIndex.MINION_2).getHealth(), 7);
assertEquals(waitingPlayer.getCharacter(CharacterIndex.MINION_1).getHealth(), 1);
assertEquals(waitingPlayer.getCharacter(CharacterIndex.MINION_2).getHealth(), 2);
assertEquals(waitingPlayer.getCharacter(CharacterIndex.MINION_3).getHealth(), 2);
assertEquals(currentPlayer.getCharacter(CharacterIndex.MINION_1).getTotalAttack(), 2);
assertEquals(currentPlayer.getCharacter(CharacterIndex.MINION_2).getTotalAttack(), 7);
assertEquals(waitingPlayer.getCharacter(CharacterIndex.MINION_1).getTotalAttack(), 4);
assertEquals(waitingPlayer.getCharacter(CharacterIndex.MINION_2).getTotalAttack(), 2);
assertEquals(waitingPlayer.getCharacter(CharacterIndex.MINION_3).getTotalAttack(), 7);
assertTrue(waitingPlayer.getCharacter(CharacterIndex.MINION_1).getDivineShield());
}
}
| slaymaker1907/HearthSim | src/test/java/com/hearthsim/test/minion/TestAbomination.java | Java | mit | 11,770 |
// Type definitions for yargs 17.0
// Project: https://github.com/chevex/yargs, https://yargs.js.org
// Definitions by: Martin Poelstra <https://github.com/poelstra>
// Mizunashi Mana <https://github.com/mizunashi-mana>
// Jeffery Grajkowski <https://github.com/pushplay>
// Jimi (Dimitris) Charalampidis <https://github.com/JimiC>
// Steffen Viken Valvรฅg <https://github.com/steffenvv>
// Emily Marigold Klassen <https://github.com/forivall>
// ExE Boss <https://github.com/ExE-Boss>
// Aankhen <https://github.com/Aankhen>
// Ben Coe <https://github.com/bcoe>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
// TypeScript Version: 3.0
// The following TSLint rules have been disabled:
// unified-signatures: Because there is useful information in the argument names of the overloaded signatures
// Convention:
// Use 'union types' when:
// - parameter types have similar signature type (i.e. 'string | ReadonlyArray<string>')
// - parameter names have the same semantic meaning (i.e. ['command', 'commands'] , ['key', 'keys'])
// An example for not using 'union types' is the declaration of 'env' where `prefix` and `enable` parameters
// have different semantics. On the other hand, in the declaration of 'usage', a `command: string` parameter
// has the same semantic meaning with declaring an overload method by using `commands: ReadonlyArray<string>`,
// thus it's preferred to use `command: string | ReadonlyArray<string>`
// Use parameterless declaration instead of declaring all parameters optional,
// when all parameters are optional and more than one
import { DetailedArguments, Configuration } from 'yargs-parser';
declare namespace yargs {
type BuilderCallback<T, R> = ((args: Argv<T>) => PromiseLike<Argv<R>>) | ((args: Argv<T>) => Argv<R>) | ((args: Argv<T>) => void);
type ParserConfigurationOptions = Configuration & {
/** Sort commands alphabetically. Default is `false` */
'sort-commands': boolean;
};
/**
* The type parameter `T` is the expected shape of the parsed options.
* `Arguments<T>` is those options plus `_` and `$0`, and an indexer falling
* back to `unknown` for unknown options.
*
* For the return type / `argv` property, we create a mapped type over
* `Arguments<T>` to simplify the inferred type signature in client code.
*/
interface Argv<T = {}> {
(): { [key in keyof Arguments<T>]: Arguments<T>[key] } | Promise<{ [key in keyof Arguments<T>]: Arguments<T>[key] }>;
(args: ReadonlyArray<string>, cwd?: string): Argv<T>;
/**
* Set key names as equivalent such that updates to a key will propagate to aliases and vice-versa.
*
* Optionally `.alias()` can take an object that maps keys to aliases.
* Each key of this object should be the canonical version of the option, and each value should be a string or an array of strings.
*/
// Aliases for previously declared options can inherit the types of those options.
alias<K1 extends keyof T, K2 extends string>(shortName: K1, longName: K2 | ReadonlyArray<K2>): Argv<T & { [key in K2]: T[K1] }>;
alias<K1 extends keyof T, K2 extends string>(shortName: K2, longName: K1 | ReadonlyArray<K1>): Argv<T & { [key in K2]: T[K1] }>;
alias(shortName: string | ReadonlyArray<string>, longName: string | ReadonlyArray<string>): Argv<T>;
alias(aliases: { [shortName: string]: string | ReadonlyArray<string> }): Argv<T>;
/**
* Get the arguments as a plain old object.
*
* Arguments without a corresponding flag show up in the `argv._` array.
*
* The script name or node command is available at `argv.$0` similarly to how `$0` works in bash or perl.
*
* If `yargs` is executed in an environment that embeds node and there's no script name (e.g. Electron or nw.js),
* it will ignore the first parameter since it expects it to be the script name. In order to override
* this behavior, use `.parse(process.argv.slice(1))` instead of .argv and the first parameter won't be ignored.
*/
argv: { [key in keyof Arguments<T>]: Arguments<T>[key] } | Promise<{ [key in keyof Arguments<T>]: Arguments<T>[key] }>;
/**
* Tell the parser to interpret `key` as an array.
* If `.array('foo')` is set, `--foo foo bar` will be parsed as `['foo', 'bar']` rather than as `'foo'`.
* Also, if you use the option multiple times all the values will be flattened in one array so `--foo foo --foo bar` will be parsed as `['foo', 'bar']`
*
* When the option is used with a positional, use `--` to tell `yargs` to stop adding values to the array.
*/
array<K extends keyof T>(key: K | ReadonlyArray<K>): Argv<Omit<T, K> & { [key in K]: ToArray<T[key]> }>;
array<K extends string>(key: K | ReadonlyArray<K>): Argv<T & { [key in K]: Array<string | number> | undefined }>;
/**
* Interpret `key` as a boolean. If a non-flag option follows `key` in `process.argv`, that string won't get set as the value of `key`.
*
* `key` will default to `false`, unless a `default(key, undefined)` is explicitly set.
*
* If `key` is an array, interpret all the elements as booleans.
*/
boolean<K extends keyof T>(key: K | ReadonlyArray<K>): Argv<Omit<T, K> & { [key in K]: boolean | undefined }>;
boolean<K extends string>(key: K | ReadonlyArray<K>): Argv<T & { [key in K]: boolean | undefined }>;
/**
* Check that certain conditions are met in the provided arguments.
* @param func Called with two arguments, the parsed `argv` hash and an array of options and their aliases.
* If `func` throws or returns a non-truthy value, show the thrown error, usage information, and exit.
* @param global Indicates whether `check()` should be enabled both at the top-level and for each sub-command.
*/
check(func: (argv: Arguments<T>, aliases: { [alias: string]: string }) => any, global?: boolean): Argv<T>;
/**
* Limit valid values for key to a predefined set of choices, given as an array or as an individual value.
* If this method is called multiple times, all enumerated values will be merged together.
* Choices are generally strings or numbers, and value matching is case-sensitive.
*
* Optionally `.choices()` can take an object that maps multiple keys to their choices.
*
* Choices can also be specified as choices in the object given to `option()`.
*/
choices<K extends keyof T, C extends ReadonlyArray<any>>(key: K, values: C): Argv<Omit<T, K> & { [key in K]: C[number] | undefined }>;
choices<K extends string, C extends ReadonlyArray<any>>(key: K, values: C): Argv<T & { [key in K]: C[number] | undefined }>;
choices<C extends { [key: string]: ReadonlyArray<any> }>(choices: C): Argv<Omit<T, keyof C> & { [key in keyof C]: C[key][number] | undefined }>;
/**
* Provide a synchronous function to coerce or transform the value(s) given on the command line for `key`.
*
* The coercion function should accept one argument, representing the parsed value from the command line, and should return a new value or throw an error.
* The returned value will be used as the value for `key` (or one of its aliases) in `argv`.
*
* If the function throws, the error will be treated as a validation failure, delegating to either a custom `.fail()` handler or printing the error message in the console.
*
* Coercion will be applied to a value after all other modifications, such as `.normalize()`.
*
* Optionally `.coerce()` can take an object that maps several keys to their respective coercion function.
*
* You can also map the same function to several keys at one time. Just pass an array of keys as the first argument to `.coerce()`.
*
* If you are using dot-notion or arrays, .e.g., `user.email` and `user.password`, coercion will be applied to the final object that has been parsed
*/
coerce<K extends keyof T, V>(key: K | ReadonlyArray<K>, func: (arg: any) => V): Argv<Omit<T, K> & { [key in K]: V | undefined }>;
coerce<K extends string, V>(key: K | ReadonlyArray<K>, func: (arg: any) => V): Argv<T & { [key in K]: V | undefined }>;
coerce<O extends { [key: string]: (arg: any) => any }>(opts: O): Argv<Omit<T, keyof O> & { [key in keyof O]: ReturnType<O[key]> | undefined }>;
/**
* Define the commands exposed by your application.
* @param command Should be a string representing the command or an array of strings representing the command and its aliases.
* @param description Use to provide a description for each command your application accepts (the values stored in `argv._`).
* Set `description` to false to create a hidden command. Hidden commands don't show up in the help output and aren't available for completion.
* @param [builder] Object to give hints about the options that your command accepts.
* Can also be a function. This function is executed with a yargs instance, and can be used to provide advanced command specific help.
*
* Note that when `void` is returned, the handler `argv` object type will not include command-specific arguments.
* @param [handler] Function, which will be executed with the parsed `argv` object.
*/
command<U = T>(
command: string | ReadonlyArray<string>,
description: string,
builder?: BuilderCallback<T, U>,
handler?: (args: Arguments<U>) => void,
middlewares?: MiddlewareFunction[],
deprecated?: boolean | string,
): Argv<U>;
command<O extends { [key: string]: Options }>(
command: string | ReadonlyArray<string>,
description: string,
builder?: O,
handler?: (args: Arguments<InferredOptionTypes<O>>) => void,
middlewares?: MiddlewareFunction[],
deprecated?: boolean | string,
): Argv<T>;
command<U>(command: string | ReadonlyArray<string>, description: string, module: CommandModule<T, U>): Argv<U>;
command<U = T>(
command: string | ReadonlyArray<string>,
showInHelp: false,
builder?: BuilderCallback<T, U>,
handler?: (args: Arguments<U>) => void,
middlewares?: MiddlewareFunction[],
deprecated?: boolean | string,
): Argv<T>;
command<O extends { [key: string]: Options }>(
command: string | ReadonlyArray<string>,
showInHelp: false,
builder?: O,
handler?: (args: Arguments<InferredOptionTypes<O>>) => void,
): Argv<T>;
command<U>(command: string | ReadonlyArray<string>, showInHelp: false, module: CommandModule<T, U>): Argv<U>;
command<U>(module: CommandModule<T, U>): Argv<U>;
// Advanced API
/** Apply command modules from a directory relative to the module calling this method. */
commandDir(dir: string, opts?: RequireDirectoryOptions): Argv<T>;
/**
* Enable bash/zsh-completion shortcuts for commands and options.
*
* If invoked without parameters, `.completion()` will make completion the command to output the completion script.
*
* @param [cmd] When present in `argv._`, will result in the `.bashrc` or `.zshrc` completion script being outputted.
* To enable bash/zsh completions, concat the generated script to your `.bashrc` or `.bash_profile` (or `.zshrc` for zsh).
* @param [description] Provide a description in your usage instructions for the command that generates the completion scripts.
* @param [func] Rather than relying on yargs' default completion functionality, which shiver me timbers is pretty awesome, you can provide your own completion method.
*/
completion(): Argv<T>;
completion(cmd: string, func?: AsyncCompletionFunction): Argv<T>;
completion(cmd: string, func?: SyncCompletionFunction): Argv<T>;
completion(cmd: string, func?: PromiseCompletionFunction): Argv<T>;
completion(cmd: string, description?: string | false, func?: AsyncCompletionFunction): Argv<T>;
completion(cmd: string, description?: string | false, func?: SyncCompletionFunction): Argv<T>;
completion(cmd: string, description?: string | false, func?: PromiseCompletionFunction): Argv<T>;
/**
* Tells the parser that if the option specified by `key` is passed in, it should be interpreted as a path to a JSON config file.
* The file is loaded and parsed, and its properties are set as arguments.
* Because the file is loaded using Node's require(), the filename MUST end in `.json` to be interpreted correctly.
*
* If invoked without parameters, `.config()` will make --config the option to pass the JSON config file.
*
* @param [description] Provided to customize the config (`key`) option in the usage string.
* @param [explicitConfigurationObject] An explicit configuration `object`
*/
config(): Argv<T>;
config(key: string | ReadonlyArray<string>, description?: string, parseFn?: (configPath: string) => object): Argv<T>;
config(key: string | ReadonlyArray<string>, parseFn: (configPath: string) => object): Argv<T>;
config(explicitConfigurationObject: object): Argv<T>;
/**
* Given the key `x` is set, the key `y` must not be set. `y` can either be a single string or an array of argument names that `x` conflicts with.
*
* Optionally `.conflicts()` can accept an object specifying multiple conflicting keys.
*/
conflicts(key: string, value: string | ReadonlyArray<string>): Argv<T>;
conflicts(conflicts: { [key: string]: string | ReadonlyArray<string> }): Argv<T>;
/**
* Interpret `key` as a boolean flag, but set its parsed value to the number of flag occurrences rather than `true` or `false`. Default value is thus `0`.
*/
count<K extends keyof T>(key: K | ReadonlyArray<K>): Argv<Omit<T, K> & { [key in K]: number }>;
count<K extends string>(key: K | ReadonlyArray<K>): Argv<T & { [key in K]: number }>;
/**
* Set `argv[key]` to `value` if no option was specified in `process.argv`.
*
* Optionally `.default()` can take an object that maps keys to default values.
*
* The default value can be a `function` which returns a value. The name of the function will be used in the usage string.
*
* Optionally, `description` can also be provided and will take precedence over displaying the value in the usage instructions.
*/
default<K extends keyof T, V>(key: K, value: V, description?: string): Argv<Omit<T, K> & { [key in K]: V }>;
default<K extends string, V>(key: K, value: V, description?: string): Argv<T & { [key in K]: V }>;
default<D extends { [key: string]: any }>(defaults: D, description?: string): Argv<Omit<T, keyof D> & D>;
/**
* @deprecated since version 6.6.0
* Use '.demandCommand()' or '.demandOption()' instead
*/
demand<K extends keyof T>(key: K | ReadonlyArray<K>, msg?: string | true): Argv<Defined<T, K>>;
demand<K extends string>(key: K | ReadonlyArray<K>, msg?: string | true): Argv<T & { [key in K]: unknown }>;
demand(key: string | ReadonlyArray<string>, required?: boolean): Argv<T>;
demand(positionals: number, msg: string): Argv<T>;
demand(positionals: number, required?: boolean): Argv<T>;
demand(positionals: number, max: number, msg?: string): Argv<T>;
/**
* @param key If is a string, show the usage information and exit if key wasn't specified in `process.argv`.
* If is an array, demand each element.
* @param msg If string is given, it will be printed when the argument is missing, instead of the standard error message.
* @param demand Controls whether the option is demanded; this is useful when using .options() to specify command line parameters.
*/
demandOption<K extends keyof T>(key: K | ReadonlyArray<K>, msg?: string | true): Argv<Defined<T, K>>;
demandOption<K extends string>(key: K | ReadonlyArray<K>, msg?: string | true): Argv<T & { [key in K]: unknown }>;
demandOption(key: string | ReadonlyArray<string>, demand?: boolean): Argv<T>;
/**
* Demand in context of commands.
* You can demand a minimum and a maximum number a user can have within your program, as well as provide corresponding error messages if either of the demands is not met.
*/
demandCommand(): Argv<T>;
demandCommand(min: number, minMsg?: string): Argv<T>;
demandCommand(min: number, max?: number, minMsg?: string, maxMsg?: string): Argv<T>;
/**
* Shows a [deprecated] notice in front of the option
*/
deprecateOption(option: string, msg?: string): Argv<T>;
/**
* Describe a `key` for the generated usage information.
*
* Optionally `.describe()` can take an object that maps keys to descriptions.
*/
describe(key: string | ReadonlyArray<string>, description: string): Argv<T>;
describe(descriptions: { [key: string]: string }): Argv<T>;
/** Should yargs attempt to detect the os' locale? Defaults to `true`. */
detectLocale(detect: boolean): Argv<T>;
/**
* Tell yargs to parse environment variables matching the given prefix and apply them to argv as though they were command line arguments.
*
* Use the "__" separator in the environment variable to indicate nested options. (e.g. prefix_nested__foo => nested.foo)
*
* If this method is called with no argument or with an empty string or with true, then all env vars will be applied to argv.
*
* Program arguments are defined in this order of precedence:
* 1. Command line args
* 2. Env vars
* 3. Config file/objects
* 4. Configured defaults
*
* Env var parsing is disabled by default, but you can also explicitly disable it by calling `.env(false)`, e.g. if you need to undo previous configuration.
*/
env(): Argv<T>;
env(prefix: string): Argv<T>;
env(enable: boolean): Argv<T>;
/** A message to print at the end of the usage instructions */
epilog(msg: string): Argv<T>;
/** A message to print at the end of the usage instructions */
epilogue(msg: string): Argv<T>;
/**
* Give some example invocations of your program.
* Inside `cmd`, the string `$0` will get interpolated to the current script name or node command for the present script similar to how `$0` works in bash or perl.
* Examples will be printed out as part of the help message.
*/
example(command: string, description: string): Argv<T>;
example(command: ReadonlyArray<[string, string?]>): Argv<T>;
/** Manually indicate that the program should exit, and provide context about why we wanted to exit. Follows the behavior set by `.exitProcess().` */
exit(code: number, err: Error): void;
/**
* By default, yargs exits the process when the user passes a help flag, the user uses the `.version` functionality, validation fails, or the command handler fails.
* Calling `.exitProcess(false)` disables this behavior, enabling further actions after yargs have been validated.
*/
exitProcess(enabled: boolean): Argv<T>;
/**
* Method to execute when a failure occurs, rather than printing the failure message.
* @param func Is called with the failure message that would have been printed, the Error instance originally thrown and yargs state when the failure occurred.
*/
fail(func: ((msg: string, err: Error, yargs: Argv<T>) => any) | boolean): Argv<T>;
/**
* Allows to programmatically get completion choices for any line.
* @param args An array of the words in the command line to complete.
* @param done The callback to be called with the resulting completions.
*/
getCompletion(args: ReadonlyArray<string>, done: (completions: ReadonlyArray<string>) => void): Argv<T>;
/**
* Indicate that an option (or group of options) should not be reset when a command is executed
*
* Options default to being global.
*/
global(key: string | ReadonlyArray<string>): Argv<T>;
/** Given a key, or an array of keys, places options under an alternative heading when displaying usage instructions */
group(key: string | ReadonlyArray<string>, groupName: string): Argv<T>;
/** Hides a key from the generated usage information. Unless a `--show-hidden` option is also passed with `--help` (see `showHidden()`). */
hide(key: string): Argv<T>;
/**
* Configure an (e.g. `--help`) and implicit command that displays the usage string and exits the process.
* By default yargs enables help on the `--help` option.
*
* Note that any multi-char aliases (e.g. `help`) used for the help option will also be used for the implicit command.
* If there are no multi-char aliases (e.g. `h`), then all single-char aliases will be used for the command.
*
* If invoked without parameters, `.help()` will use `--help` as the option and help as the implicit command to trigger help output.
*
* @param [description] Customizes the description of the help option in the usage string.
* @param [enableExplicit] If `false` is provided, it will disable --help.
*/
help(): Argv<T>;
help(enableExplicit: boolean): Argv<T>;
help(option: string, enableExplicit: boolean): Argv<T>;
help(option: string, description?: string, enableExplicit?: boolean): Argv<T>;
/**
* Given the key `x` is set, it is required that the key `y` is set.
* y` can either be the name of an argument to imply, a number indicating the position of an argument or an array of multiple implications to associate with `x`.
*
* Optionally `.implies()` can accept an object specifying multiple implications.
*/
implies(key: string, value: string | ReadonlyArray<string>): Argv<T>;
implies(implies: { [key: string]: string | ReadonlyArray<string> }): Argv<T>;
/**
* Return the locale that yargs is currently using.
*
* By default, yargs will auto-detect the operating system's locale so that yargs-generated help content will display in the user's language.
*/
locale(): string;
/**
* Override the auto-detected locale from the user's operating system with a static locale.
* Note that the OS locale can be modified by setting/exporting the `LC_ALL` environment variable.
*/
locale(loc: string): Argv<T>;
/**
* Define global middleware functions to be called first, in list order, for all cli command.
* @param callbacks Can be a function or a list of functions. Each callback gets passed a reference to argv.
* @param [applyBeforeValidation] Set to `true` to apply middleware before validation. This will execute the middleware prior to validation checks, but after parsing.
*/
middleware(callbacks: MiddlewareFunction<T> | ReadonlyArray<MiddlewareFunction<T>>, applyBeforeValidation?: boolean): Argv<T>;
/**
* The number of arguments that should be consumed after a key. This can be a useful hint to prevent parsing ambiguity.
*
* Optionally `.nargs()` can take an object of `key`/`narg` pairs.
*/
nargs(key: string, count: number): Argv<T>;
nargs(nargs: { [key: string]: number }): Argv<T>;
/** The key provided represents a path and should have `path.normalize()` applied. */
normalize<K extends keyof T>(key: K | ReadonlyArray<K>): Argv<Omit<T, K> & { [key in K]: ToString<T[key]> }>;
normalize<K extends string>(key: K | ReadonlyArray<K>): Argv<T & { [key in K]: string | undefined }>;
/**
* Tell the parser to always interpret key as a number.
*
* If `key` is an array, all elements will be parsed as numbers.
*
* If the option is given on the command line without a value, `argv` will be populated with `undefined`.
*
* If the value given on the command line cannot be parsed as a number, `argv` will be populated with `NaN`.
*
* Note that decimals, hexadecimals, and scientific notation are all accepted.
*/
number<K extends keyof T>(key: K | ReadonlyArray<K>): Argv<Omit<T, K> & { [key in K]: ToNumber<T[key]> }>;
number<K extends string>(key: K | ReadonlyArray<K>): Argv<T & { [key in K]: number | undefined }>;
/**
* Method to execute when a command finishes successfully.
* @param func Is called with the successful result of the command that finished.
*/
onFinishCommand(func: (result: any) => void): Argv<T>;
/**
* This method can be used to make yargs aware of options that could exist.
* You can also pass an opt object which can hold further customization, like `.alias()`, `.demandOption()` etc. for that option.
*/
option<K extends keyof T, O extends Options>(key: K, options: O): Argv<Omit<T, K> & { [key in K]: InferredOptionType<O> }>;
option<K extends string, O extends Options>(key: K, options: O): Argv<T & { [key in K]: InferredOptionType<O> }>;
option<O extends { [key: string]: Options }>(options: O): Argv<Omit<T, keyof O> & InferredOptionTypes<O>>;
/**
* This method can be used to make yargs aware of options that could exist.
* You can also pass an opt object which can hold further customization, like `.alias()`, `.demandOption()` etc. for that option.
*/
options<K extends keyof T, O extends Options>(key: K, options: O): Argv<Omit<T, K> & { [key in K]: InferredOptionType<O> }>;
options<K extends string, O extends Options>(key: K, options: O): Argv<T & { [key in K]: InferredOptionType<O> }>;
options<O extends { [key: string]: Options }>(options: O): Argv<Omit<T, keyof O> & InferredOptionTypes<O>>;
/**
* Parse `args` instead of `process.argv`. Returns the `argv` object. `args` may either be a pre-processed argv array, or a raw argument string.
*
* Note: Providing a callback to parse() disables the `exitProcess` setting until after the callback is invoked.
* @param [context] Provides a useful mechanism for passing state information to commands
*/
parse(): { [key in keyof Arguments<T>]: Arguments<T>[key] } | Promise<{ [key in keyof Arguments<T>]: Arguments<T>[key] }>;
parse(arg: string | ReadonlyArray<string>, context?: object, parseCallback?: ParseCallback<T>): { [key in keyof Arguments<T>]: Arguments<T>[key] }
| Promise<{ [key in keyof Arguments<T>]: Arguments<T>[key] }>;
parseSync(): { [key in keyof Arguments<T>]: Arguments<T>[key] };
parseSync(arg: string | ReadonlyArray<string>, context?: object, parseCallback?: ParseCallback<T>): { [key in keyof Arguments<T>]: Arguments<T>[key] };
parseAsync(): Promise<{ [key in keyof Arguments<T>]: Arguments<T>[key] }>;
parseAsync(arg: string | ReadonlyArray<string>, context?: object, parseCallback?: ParseCallback<T>): Promise<{ [key in keyof Arguments<T>]: Arguments<T>[key] }>;
/**
* If the arguments have not been parsed, this property is `false`.
*
* If the arguments have been parsed, this contain detailed parsed arguments.
*/
parsed: DetailedArguments | false;
/** Allows to configure advanced yargs features. */
parserConfiguration(configuration: Partial<ParserConfigurationOptions>): Argv<T>;
/**
* Similar to `config()`, indicates that yargs should interpret the object from the specified key in package.json as a configuration object.
* @param [cwd] If provided, the package.json will be read from this location
*/
pkgConf(key: string | ReadonlyArray<string>, cwd?: string): Argv<T>;
/**
* Allows you to configure a command's positional arguments with an API similar to `.option()`.
* `.positional()` should be called in a command's builder function, and is not available on the top-level yargs instance. If so, it will throw an error.
*/
positional<K extends keyof T, O extends PositionalOptions>(key: K, opt: O): Argv<Omit<T, K> & { [key in K]: InferredOptionType<O> }>;
positional<K extends string, O extends PositionalOptions>(key: K, opt: O): Argv<T & { [key in K]: InferredOptionType<O> }>;
/** Should yargs provide suggestions regarding similar commands if no matching command is found? */
recommendCommands(): Argv<T>;
/**
* @deprecated since version 6.6.0
* Use '.demandCommand()' or '.demandOption()' instead
*/
require<K extends keyof T>(key: K | ReadonlyArray<K>, msg?: string | true): Argv<Defined<T, K>>;
require(key: string, msg: string): Argv<T>;
require(key: string, required: boolean): Argv<T>;
require(keys: ReadonlyArray<number>, msg: string): Argv<T>;
require(keys: ReadonlyArray<number>, required: boolean): Argv<T>;
require(positionals: number, required: boolean): Argv<T>;
require(positionals: number, msg: string): Argv<T>;
/**
* @deprecated since version 6.6.0
* Use '.demandCommand()' or '.demandOption()' instead
*/
required<K extends keyof T>(key: K | ReadonlyArray<K>, msg?: string | true): Argv<Defined<T, K>>;
required(key: string, msg: string): Argv<T>;
required(key: string, required: boolean): Argv<T>;
required(keys: ReadonlyArray<number>, msg: string): Argv<T>;
required(keys: ReadonlyArray<number>, required: boolean): Argv<T>;
required(positionals: number, required: boolean): Argv<T>;
required(positionals: number, msg: string): Argv<T>;
requiresArg(key: string | ReadonlyArray<string>): Argv<T>;
/** Set the name of your script ($0). Default is the base filename executed by node (`process.argv[1]`) */
scriptName($0: string): Argv<T>;
/**
* Generate a bash completion script.
* Users of your application can install this script in their `.bashrc`, and yargs will provide completion shortcuts for commands and options.
*/
showCompletionScript(): Argv<T>;
/**
* Configure the `--show-hidden` option that displays the hidden keys (see `hide()`).
* @param option If `boolean`, it enables/disables this option altogether. i.e. hidden keys will be permanently hidden if first argument is `false`.
* If `string` it changes the key name ("--show-hidden").
* @param description Changes the default description ("Show hidden options")
*/
showHidden(option?: string | boolean): Argv<T>;
showHidden(option: string, description?: string): Argv<T>;
/**
* Print the usage data using the console function consoleLevel for printing.
* @param [consoleLevel='error']
*/
showHelp(consoleLevel?: string): Argv<T>;
/**
* Provide the usage data as a string.
* @param printCallback a function with a single argument.
*/
showHelp(printCallback: (s: string) => void): Argv<T>;
/**
* By default, yargs outputs a usage string if any error is detected.
* Use the `.showHelpOnFail()` method to customize this behavior.
* @param enable If `false`, the usage string is not output.
* @param [message] Message that is output after the error message.
*/
showHelpOnFail(enable: boolean, message?: string): Argv<T>;
/** Specifies either a single option key (string), or an array of options. If any of the options is present, yargs validation is skipped. */
skipValidation(key: string | ReadonlyArray<string>): Argv<T>;
/**
* Any command-line argument given that is not demanded, or does not have a corresponding description, will be reported as an error.
*
* Unrecognized commands will also be reported as errors.
*/
strict(): Argv<T>;
strict(enabled: boolean): Argv<T>;
/**
* Similar to .strict(), except that it only applies to unrecognized commands.
* A user can still provide arbitrary options, but unknown positional commands
* will raise an error.
*/
strictCommands(): Argv<T>;
strictCommands(enabled: boolean): Argv<T>;
/**
* Similar to `.strict()`, except that it only applies to unrecognized options. A
* user can still provide arbitrary positional options, but unknown options
* will raise an error.
*/
strictOptions(): Argv<T>;
strictOptions(enabled: boolean): Argv<T>;
/**
* Tell the parser logic not to interpret `key` as a number or boolean. This can be useful if you need to preserve leading zeros in an input.
*
* If `key` is an array, interpret all the elements as strings.
*
* `.string('_')` will result in non-hyphenated arguments being interpreted as strings, regardless of whether they resemble numbers.
*/
string<K extends keyof T>(key: K | ReadonlyArray<K>): Argv<Omit<T, K> & { [key in K]: ToString<T[key]> }>;
string<K extends string>(key: K | ReadonlyArray<K>): Argv<T & { [key in K]: string | undefined }>;
// Intended to be used with '.wrap()'
terminalWidth(): number;
updateLocale(obj: { [key: string]: string }): Argv<T>;
/**
* Override the default strings used by yargs with the key/value pairs provided in obj
*
* If you explicitly specify a locale(), you should do so before calling `updateStrings()`.
*/
updateStrings(obj: { [key: string]: string }): Argv<T>;
/**
* Set a usage message to show which commands to use.
* Inside `message`, the string `$0` will get interpolated to the current script name or node command for the present script similar to how `$0` works in bash or perl.
*
* If the optional `description`/`builder`/`handler` are provided, `.usage()` acts an an alias for `.command()`.
* This allows you to use `.usage()` to configure the default command that will be run as an entry-point to your application
* and allows you to provide configuration for the positional arguments accepted by your program:
*/
usage(message: string): Argv<T>;
usage<U>(command: string | ReadonlyArray<string>, description: string, builder?: (args: Argv<T>) => Argv<U>, handler?: (args: Arguments<U>) => void): Argv<T>;
usage<U>(command: string | ReadonlyArray<string>, showInHelp: boolean, builder?: (args: Argv<T>) => Argv<U>, handler?: (args: Arguments<U>) => void): Argv<T>;
usage<O extends { [key: string]: Options }>(command: string | ReadonlyArray<string>, description: string, builder?: O, handler?: (args: Arguments<InferredOptionTypes<O>>) => void): Argv<T>;
usage<O extends { [key: string]: Options }>(command: string | ReadonlyArray<string>, showInHelp: boolean, builder?: O, handler?: (args: Arguments<InferredOptionTypes<O>>) => void): Argv<T>;
/**
* Add an option (e.g. `--version`) that displays the version number (given by the version parameter) and exits the process.
* By default yargs enables version for the `--version` option.
*
* If no arguments are passed to version (`.version()`), yargs will parse the package.json of your module and use its version value.
*
* If the boolean argument `false` is provided, it will disable `--version`.
*/
version(): Argv<T>;
version(version: string): Argv<T>;
version(enable: boolean): Argv<T>;
version(optionKey: string, version: string): Argv<T>;
version(optionKey: string, description: string, version: string): Argv<T>;
/**
* Format usage output to wrap at columns many columns.
*
* By default wrap will be set to `Math.min(80, windowWidth)`. Use `.wrap(null)` to specify no column limit (no right-align).
* Use `.wrap(yargs.terminalWidth())` to maximize the width of yargs' usage instructions.
*/
wrap(columns: number | null): Argv<T>;
}
type Arguments<T = {}> = T & {
/** Non-option arguments */
_: Array<string | number>;
/** The script name or node command */
$0: string;
/** All remaining options */
[argName: string]: unknown;
};
interface RequireDirectoryOptions {
/** Look for command modules in all subdirectories and apply them as a flattened (non-hierarchical) list. */
recurse?: boolean;
/** The types of files to look for when requiring command modules. */
extensions?: ReadonlyArray<string>;
/**
* A synchronous function called for each command module encountered.
* Accepts `commandObject`, `pathToFile`, and `filename` as arguments.
* Returns `commandObject` to include the command; any falsy value to exclude/skip it.
*/
visit?: (commandObject: any, pathToFile?: string, filename?: string) => any;
/** Whitelist certain modules */
include?: RegExp | ((pathToFile: string) => boolean);
/** Blacklist certain modules. */
exclude?: RegExp | ((pathToFile: string) => boolean);
}
interface Options {
/** string or array of strings, alias(es) for the canonical option key, see `alias()` */
alias?: string | ReadonlyArray<string>;
/** boolean, interpret option as an array, see `array()` */
array?: boolean;
/** boolean, interpret option as a boolean flag, see `boolean()` */
boolean?: boolean;
/** value or array of values, limit valid option arguments to a predefined set, see `choices()` */
choices?: Choices;
/** function, coerce or transform parsed command line values into another value, see `coerce()` */
coerce?: (arg: any) => any;
/** boolean, interpret option as a path to a JSON config file, see `config()` */
config?: boolean;
/** function, provide a custom config parsing function, see `config()` */
configParser?: (configPath: string) => object;
/** string or object, require certain keys not to be set, see `conflicts()` */
conflicts?: string | ReadonlyArray<string> | { [key: string]: string | ReadonlyArray<string> };
/** boolean, interpret option as a count of boolean flags, see `count()` */
count?: boolean;
/** value, set a default value for the option, see `default()` */
default?: any;
/** string, use this description for the default value in help content, see `default()` */
defaultDescription?: string;
/**
* @deprecated since version 6.6.0
* Use 'demandOption' instead
*/
demand?: boolean | string;
/** boolean or string, mark the argument as deprecated, see `deprecateOption()` */
deprecate?: boolean | string;
/** boolean or string, mark the argument as deprecated, see `deprecateOption()` */
deprecated?: boolean | string;
/** boolean or string, demand the option be given, with optional error message, see `demandOption()` */
demandOption?: boolean | string;
/** string, the option description for help content, see `describe()` */
desc?: string;
/** string, the option description for help content, see `describe()` */
describe?: string;
/** string, the option description for help content, see `describe()` */
description?: string;
/** boolean, indicate that this key should not be reset when a command is invoked, see `global()` */
global?: boolean;
/** string, when displaying usage instructions place the option under an alternative group heading, see `group()` */
group?: string;
/** don't display option in help output. */
hidden?: boolean;
/** string or object, require certain keys to be set, see `implies()` */
implies?: string | ReadonlyArray<string> | { [key: string]: string | ReadonlyArray<string> };
/** number, specify how many arguments should be consumed for the option, see `nargs()` */
nargs?: number;
/** boolean, apply path.normalize() to the option, see `normalize()` */
normalize?: boolean;
/** boolean, interpret option as a number, `number()` */
number?: boolean;
/**
* @deprecated since version 6.6.0
* Use 'demandOption' instead
*/
require?: boolean | string;
/**
* @deprecated since version 6.6.0
* Use 'demandOption' instead
*/
required?: boolean | string;
/** boolean, require the option be specified with a value, see `requiresArg()` */
requiresArg?: boolean;
/** boolean, skips validation if the option is present, see `skipValidation()` */
skipValidation?: boolean;
/** boolean, interpret option as a string, see `string()` */
string?: boolean;
type?: "array" | "count" | PositionalOptionsType;
}
interface PositionalOptions {
/** string or array of strings, see `alias()` */
alias?: string | ReadonlyArray<string>;
/** boolean, interpret option as an array, see `array()` */
array?: boolean;
/** value or array of values, limit valid option arguments to a predefined set, see `choices()` */
choices?: Choices;
/** function, coerce or transform parsed command line values into another value, see `coerce()` */
coerce?: (arg: any) => any;
/** string or object, require certain keys not to be set, see `conflicts()` */
conflicts?: string | ReadonlyArray<string> | { [key: string]: string | ReadonlyArray<string> };
/** value, set a default value for the option, see `default()` */
default?: any;
/** boolean or string, demand the option be given, with optional error message, see `demandOption()` */
demandOption?: boolean | string;
/** string, the option description for help content, see `describe()` */
desc?: string;
/** string, the option description for help content, see `describe()` */
describe?: string;
/** string, the option description for help content, see `describe()` */
description?: string;
/** string or object, require certain keys to be set, see `implies()` */
implies?: string | ReadonlyArray<string> | { [key: string]: string | ReadonlyArray<string> };
/** boolean, apply path.normalize() to the option, see normalize() */
normalize?: boolean;
type?: PositionalOptionsType;
}
/** Remove keys K in T */
type Omit<T, K> = { [key in Exclude<keyof T, K>]: T[key] };
/** Remove undefined as a possible value for keys K in T */
type Defined<T, K extends keyof T> = Omit<T, K> & { [key in K]: Exclude<T[key], undefined> };
/** Convert T to T[] and T | undefined to T[] | undefined */
type ToArray<T> = Array<Exclude<T, undefined>> | Extract<T, undefined>;
/** Gives string[] if T is an array type, otherwise string. Preserves | undefined. */
type ToString<T> = (Exclude<T, undefined> extends any[] ? string[] : string) | Extract<T, undefined>;
/** Gives number[] if T is an array type, otherwise number. Preserves | undefined. */
type ToNumber<T> = (Exclude<T, undefined> extends any[] ? number[] : number) | Extract<T, undefined>;
type InferredOptionType<O extends Options | PositionalOptions> =
O extends (
| { required: string | true }
| { require: string | true }
| { demand: string | true }
| { demandOption: string | true }
) ?
Exclude<InferredOptionTypeInner<O>, undefined> :
InferredOptionTypeInner<O>;
type InferredOptionTypeInner<O extends Options | PositionalOptions> =
O extends { default: any, coerce: (arg: any) => infer T } ? T :
O extends { default: infer D } ? D :
O extends { type: "count" } ? number :
O extends { count: true } ? number :
RequiredOptionType<O> | undefined;
type RequiredOptionType<O extends Options | PositionalOptions> =
O extends { type: "array", string: true } ? string[] :
O extends { type: "array", number: true } ? number[] :
O extends { type: "array", normalize: true } ? string[] :
O extends { type: "string", array: true } ? string[] :
O extends { type: "number", array: true } ? number[] :
O extends { string: true, array: true } ? string[] :
O extends { number: true, array: true } ? number[] :
O extends { normalize: true, array: true } ? string[] :
O extends { type: "array" } ? Array<string | number> :
O extends { type: "boolean" } ? boolean :
O extends { type: "number" } ? number :
O extends { type: "string" } ? string :
O extends { array: true } ? Array<string | number> :
O extends { boolean: true } ? boolean :
O extends { number: true } ? number :
O extends { string: true } ? string :
O extends { normalize: true } ? string :
O extends { choices: ReadonlyArray<infer C> } ? C :
O extends { coerce: (arg: any) => infer T } ? T :
unknown;
type InferredOptionTypes<O extends { [key: string]: Options }> = { [key in keyof O]: InferredOptionType<O[key]> };
interface CommandModule<T = {}, U = {}> {
/** array of strings (or a single string) representing aliases of `exports.command`, positional args defined in an alias are ignored */
aliases?: ReadonlyArray<string> | string;
/** object declaring the options the command accepts, or a function accepting and returning a yargs instance */
builder?: CommandBuilder<T, U>;
/** string (or array of strings) that executes this command when given on the command line, first string may contain positional args */
command?: ReadonlyArray<string> | string;
/** boolean (or string) to show deprecation notice */
deprecated?: boolean | string;
/** string used as the description for the command in help text, use `false` for a hidden command */
describe?: string | false;
/** a function which will be passed the parsed argv. */
handler: (args: Arguments<U>) => void;
}
type ParseCallback<T = {}> = (err: Error | undefined, argv: Arguments<T>|Promise<Arguments<T>>, output: string) => void;
type CommandBuilder<T = {}, U = {}> = { [key: string]: Options } | ((args: Argv<T>) => Argv<U>) | ((args: Argv<T>) => PromiseLike<Argv<U>>);
type SyncCompletionFunction = (current: string, argv: any) => string[];
type AsyncCompletionFunction = (current: string, argv: any, done: (completion: ReadonlyArray<string>) => void) => void;
type PromiseCompletionFunction = (current: string, argv: any) => Promise<string[]>;
type MiddlewareFunction<T = {}> = (args: Arguments<T>) => void;
type Choices = ReadonlyArray<string | number | true | undefined>;
type PositionalOptionsType = "boolean" | "number" | "string";
}
declare var yargs: yargs.Argv;
export = yargs;
| georgemarshall/DefinitelyTyped | types/yargs/index.d.ts | TypeScript | mit | 48,896 |
๏ปฟusing Mono.Cecil;
namespace Cake.Web.Docs.Reflection.Model
{
/// <summary>
/// Represents reflected method information.
/// </summary>
public interface IMethodInfo
{
/// <summary>
/// Gets the method identity.
/// </summary>
/// <value>The method identity.</value>
string Identity { get; }
/// <summary>
/// Gets the method definition.
/// </summary>
/// <value>
/// The method definition.
/// </value>
MethodDefinition Definition { get; }
/// <summary>
/// The associated metadata.
/// </summary>
IDocumentationMetadata Metadata { get; }
}
}
| naasking/website | src/Cake.Web.Docs/Reflection/Model/IMethodInfo.cs | C# | mit | 701 |
package im.actor.server.api.rpc.service
import im.actor.api.rpc.Implicits._
import im.actor.api.rpc._
import im.actor.api.rpc.counters.UpdateCountersChanged
import im.actor.api.rpc.groups.{ UpdateGroupInvite, UpdateGroupUserInvited }
import im.actor.api.rpc.messaging._
import im.actor.api.rpc.misc.ResponseVoid
import im.actor.api.rpc.peers.{ GroupOutPeer, PeerType }
import im.actor.server._
import im.actor.server.api.rpc.service.groups.{ GroupInviteConfig, GroupsServiceImpl }
import im.actor.server.group.GroupOffice
import im.actor.server.presences.{ GroupPresenceManager, PresenceManager }
import im.actor.server.util.ACLUtils
import scala.concurrent.Future
import scala.util.Random
class MessagingServiceHistorySpec extends BaseAppSuite with GroupsServiceHelpers
with ImplicitFileStorageAdapter
with ImplicitSessionRegionProxy
with ImplicitGroupRegions
with ImplicitAuthService
with ImplicitSequenceService
with SequenceMatchers {
behavior of "MessagingServiceHistoryService"
"Private messaging" should "load history" in s.privat
it should "load dialogs" in s.dialogs // TODO: remove this test's dependency on previous example
it should "mark messages received and send updates" in s.historyPrivate.markReceived
it should "mark messages read and send updates" in s.historyPrivate.markRead
"Group messaging" should "mark messages received and send updates" in s.historyGroup.markReceived
it should "mark messages read and send updates" in s.historyGroup.markRead
it should "Load all history in public groups" in s.public
implicit private val presenceManagerRegion = PresenceManager.startRegion()
implicit private val groupPresenceManagerRegion = GroupPresenceManager.startRegion()
private val groupInviteConfig = GroupInviteConfig("http://actor.im")
implicit private val service = messaging.MessagingServiceImpl(mediator)
implicit private val groupsService = new GroupsServiceImpl(groupInviteConfig)
private object s {
val (user1, authId1, _) = createUser()
val sessionId1 = createSessionId()
val (user2, authId2, _) = createUser()
val sessionId2 = createSessionId()
val clientData1 = ClientData(authId1, sessionId1, Some(user1.id))
val clientData2 = ClientData(authId2, sessionId2, Some(user2.id))
val user1Model = getUserModel(user1.id)
val user1AccessHash = ACLUtils.userAccessHash(authId2, user1.id, user1Model.accessSalt)
val user1Peer = peers.OutPeer(PeerType.Private, user1.id, user1AccessHash)
val user2Model = getUserModel(user2.id)
val user2AccessHash = ACLUtils.userAccessHash(authId1, user2.id, user2Model.accessSalt)
val user2Peer = peers.OutPeer(PeerType.Private, user2.id, user2AccessHash)
def privat() = {
val step = 100L
val (message1Date, message2Date, message3Date) = {
implicit val clientData = clientData1
val message1Date = whenReady(service.handleSendMessage(user2Peer, Random.nextLong(), TextMessage("Hi Shiva 1", Vector.empty, None)))(_.toOption.get.date)
Thread.sleep(step)
val message2Date = whenReady(service.handleSendMessage(user2Peer, Random.nextLong(), TextMessage("Hi Shiva 2", Vector.empty, None)))(_.toOption.get.date)
Thread.sleep(step)
val message3Date = whenReady(service.handleSendMessage(user2Peer, Random.nextLong(), TextMessage("Hi Shiva 3", Vector.empty, None)))(_.toOption.get.date)
Thread.sleep(step)
whenReady(service.handleSendMessage(user2Peer, Random.nextLong(), TextMessage("Hi Shiva 4", Vector.empty, None)))(_ โ ())
(message1Date, message2Date, message3Date)
}
Thread.sleep(300)
{
implicit val clientData = clientData2
whenReady(service.handleMessageReceived(user1Peer, message2Date)) { resp โ
resp should matchPattern {
case Ok(ResponseVoid) โ
}
}
whenReady(service.handleMessageRead(user1Peer, message1Date)) { resp โ
resp should matchPattern {
case Ok(ResponseVoid) โ
}
}
}
Thread.sleep(1000)
{
implicit val clientData = clientData1
whenReady(service.handleLoadHistory(user2Peer, message3Date, 100)) { resp โ
resp should matchPattern {
case Ok(_) โ
}
val respBody = resp.toOption.get
respBody.users.length should ===(0)
respBody.history.length should ===(3)
respBody.history.map(_.state) should ===(Seq(Some(MessageState.Sent), Some(MessageState.Received), Some(MessageState.Read)))
}
}
}
def dialogs() = {
{
implicit val clientData = clientData1
whenReady(service.handleLoadDialogs(0, 100)) { resp โ
resp should matchPattern {
case Ok(_) โ
}
val respBody = resp.toOption.get
respBody.dialogs.length should ===(1)
val dialog = respBody.dialogs.head
dialog.unreadCount should ===(0)
respBody.users.length should ===(2)
}
}
{
implicit val clientData = clientData2
whenReady(service.handleLoadDialogs(0, 100)) { resp โ
resp should matchPattern {
case Ok(_) โ
}
val respBody = resp.toOption.get
respBody.dialogs.length should ===(1)
val dialog = respBody.dialogs.head
dialog.unreadCount should ===(3)
respBody.users.length should ===(1)
}
}
}
def public() = {
val groupId = Random.nextInt
val (pubUser, pubAuthId, _) = createUser()
val accessHash = whenReady(GroupOffice.create(groupId, pubUser.id, pubAuthId, "Public group", Random.nextLong, Set.empty))(_.accessHash)
whenReady(GroupOffice.makePublic(groupId, "Public group description"))(identity)
val groupOutPeer = GroupOutPeer(groupId, accessHash)
val firstMessage = TextMessage("First", Vector.empty, None)
val secondMessage = TextMessage("Second", Vector.empty, None)
{
implicit val clientData = clientData1
whenReady(groupsService.handleEnterGroup(groupOutPeer))(identity)
whenReady(service.handleSendMessage(groupOutPeer.asOutPeer, Random.nextLong(), firstMessage))(identity)
}
{
implicit val clientData = clientData2
whenReady(groupsService.handleEnterGroup(groupOutPeer))(identity)
whenReady(service.handleSendMessage(groupOutPeer.asOutPeer, Random.nextLong(), secondMessage))(identity)
Thread.sleep(2000)
whenReady(service.handleLoadHistory(groupOutPeer.asOutPeer, 0, 100)) { resp โ
val history = resp.toOption.get.history
//history does not contain message about group creation, as group was not created by Zero user
history.length shouldEqual 4
history.map(_.message) should contain allOf (firstMessage, secondMessage)
}
}
}
object historyPrivate {
val (user1, authId1, _) = createUser()
def markReceived() = {
val (user2, authId2, _) = createUser()
val sessionId = createSessionId()
val clientData1 = ClientData(authId1, sessionId1, Some(user1.id))
val clientData2 = ClientData(authId2, sessionId2, Some(user2.id))
val user1AccessHash = ACLUtils.userAccessHash(authId2, user1.id, getUserModel(user1.id).accessSalt)
val user1Peer = peers.OutPeer(PeerType.Private, user1.id, user1AccessHash)
val user2AccessHash = ACLUtils.userAccessHash(authId1, user2.id, getUserModel(user2.id).accessSalt)
val user2Peer = peers.OutPeer(PeerType.Private, user2.id, user2AccessHash)
val startDate = {
implicit val clientData = clientData1
val startDate = System.currentTimeMillis()
val sendMessages = Future.sequence(Seq(
service.handleSendMessage(user2Peer, Random.nextLong(), TextMessage("Hi Shiva 1", Vector.empty, None)),
futureSleep(1500).flatMap(_ โ service.handleSendMessage(user2Peer, Random.nextLong(), TextMessage("Hi Shiva 2", Vector.empty, None))),
futureSleep(3000).flatMap(_ โ service.handleSendMessage(user2Peer, Random.nextLong(), TextMessage("Hi Shiva 3", Vector.empty, None)))
))
whenReady(sendMessages)(_ โ ())
startDate
}
{
implicit val clientData = clientData2
whenReady(service.handleMessageReceived(user1Peer, startDate + 2000)) { resp โ
resp should matchPattern {
case Ok(ResponseVoid) โ
}
}
Thread.sleep(100) // Let peer managers write to db
whenReady(db.run(persist.Dialog.find(user1.id, models.Peer.privat(user2.id)))) { dialogOpt โ
dialogOpt.get.lastReceivedAt.getMillis should be < startDate + 3000
dialogOpt.get.lastReceivedAt.getMillis should be > startDate + 1000
}
}
{
implicit val clientData = clientData1
expectUpdatesOrdered(failUnmatched)(0, Array.empty, List(
UpdateMessageSent.header,
UpdateMessageSent.header,
UpdateMessageSent.header,
UpdateMessageReceived.header
)) {
case (UpdateMessageSent.header, update) โ parseUpdate[UpdateMessageSent](update)
case (UpdateMessageReceived.header, update) โ parseUpdate[UpdateMessageReceived](update)
}
}
}
def markRead() = {
val (user1, authId1, _) = createUser()
val (user2, authId21, _) = createUser()
val sessionId = createSessionId()
val clientData1 = ClientData(authId1, sessionId, Some(user1.id))
val clientData21 = ClientData(authId21, sessionId, Some(user2.id))
val clientData22 = ClientData(createAuthId(user2.id), sessionId, Some(user2.id))
val user1AccessHash = ACLUtils.userAccessHash(authId21, user1.id, getUserModel(user1.id).accessSalt)
val user1Peer = peers.OutPeer(PeerType.Private, user1.id, user1AccessHash)
val user2AccessHash = ACLUtils.userAccessHash(authId1, user2.id, getUserModel(user2.id).accessSalt)
val user2Peer = peers.OutPeer(PeerType.Private, user2.id, user2AccessHash)
val startDate = {
implicit val clientData = clientData1
val startDate = System.currentTimeMillis()
val sendMessages = Future.sequence(Seq(
service.handleSendMessage(user2Peer, Random.nextLong(), TextMessage("Hi Shiva 1", Vector.empty, None)),
futureSleep(1500).flatMap(_ โ service.handleSendMessage(user2Peer, Random.nextLong(), TextMessage("Hi Shiva 2", Vector.empty, None))),
futureSleep(3000).flatMap(_ โ service.handleSendMessage(user2Peer, Random.nextLong(), TextMessage("Hi Shiva 3", Vector.empty, None)))
))
whenReady(sendMessages)(_ โ ())
startDate
}
{
implicit val clientData = clientData21
whenReady(service.handleMessageRead(user1Peer, startDate + 2000)) { resp โ
resp should matchPattern {
case Ok(ResponseVoid) โ
}
}
Thread.sleep(100) // Let peer managers write to db
whenReady(db.run(persist.Dialog.find(user1.id, models.Peer.privat(user2.id)))) { optDialog โ
val dialog = optDialog.get
dialog.lastReadAt.getMillis should be < startDate + 3000
dialog.lastReadAt.getMillis should be > startDate + 1000
}
whenReady(service.handleLoadDialogs(Long.MaxValue, 100)) { resp โ
val dialog = resp.toOption.get.dialogs.head
dialog.unreadCount shouldEqual 1
}
}
{
implicit val clientData = clientData1
expectUpdatesOrdered(ignoreUnmatched)(0, Array.empty, List(
UpdateMessageSent.header,
UpdateMessageSent.header,
UpdateMessageSent.header,
UpdateMessageRead.header
)) {
case (UpdateMessageRead.header, update) โ parseUpdate[UpdateMessageRead](update)
}
}
{
implicit val clientData = clientData21
expectUpdatesOrdered(ignoreUnmatched)(0, Array.empty, List(
UpdateCountersChanged.header,
UpdateMessage.header,
UpdateCountersChanged.header,
UpdateMessage.header,
UpdateCountersChanged.header,
UpdateMessage.header,
UpdateCountersChanged.header
)) {
case _ โ
}
}
{
//UpdateMessageReadByMe sent to user2 second device
implicit val clientData = clientData22
expectUpdatesOrdered(ignoreUnmatched)(0, Array.empty, List(
UpdateCountersChanged.header,
UpdateMessage.header,
UpdateCountersChanged.header,
UpdateMessage.header,
UpdateCountersChanged.header,
UpdateMessage.header,
UpdateCountersChanged.header,
UpdateMessageReadByMe.header
)) {
case _ โ
}
}
}
}
object historyGroup {
def markReceived() = {
val (user1, authId1, _) = createUser()
val (user2, authId2, _) = createUser()
val sessionId = createSessionId()
val clientData1 = ClientData(authId1, sessionId, Some(user1.id))
val clientData2 = ClientData(authId2, sessionId, Some(user2.id))
val groupOutPeer = {
implicit val clientData = clientData1
createGroup("Fun group", Set(user2.id)).groupPeer
}
val startDate = System.currentTimeMillis()
{
implicit val clientData = clientData1
val sendMessages = Future.sequence(Seq(
service.handleSendMessage(groupOutPeer.asOutPeer, Random.nextLong(), TextMessage("Hi Shiva 1", Vector.empty, None)),
futureSleep(1500).flatMap(_ โ service.handleSendMessage(groupOutPeer.asOutPeer, Random.nextLong(), TextMessage("Hi Shiva 2", Vector.empty, None))),
futureSleep(3000).flatMap(_ โ service.handleSendMessage(groupOutPeer.asOutPeer, Random.nextLong(), TextMessage("Hi Shiva 3", Vector.empty, None)))
))
whenReady(sendMessages)(_ โ ())
}
{
implicit val clientData = clientData2
whenReady(service.handleMessageReceived(groupOutPeer.asOutPeer, startDate + 2000)) { resp โ
resp should matchPattern {
case Ok(ResponseVoid) โ
}
}
Thread.sleep(100) // Let peer managers write to db
whenReady(db.run(persist.Dialog.find(user1.id, models.Peer.group(groupOutPeer.groupId)))) { dialogOpt โ
dialogOpt.get.lastReceivedAt.getMillis should be < startDate + 3000
dialogOpt.get.lastReceivedAt.getMillis should be > startDate + 1000
}
}
{
implicit val clientData = clientData1
expectUpdatesUnorderedOnly(ignoreUnmatched)(0, Array.empty, List(
UpdateGroupUserInvited.header,
UpdateGroupInvite.header,
UpdateMessageSent.header,
UpdateMessageSent.header,
UpdateMessageSent.header,
UpdateMessageReceived.header
)) {
case _ โ
}
}
}
def markRead() = {
val (user1, authId1, _) = createUser()
val (user2, authId2, _) = createUser()
val sessionId = createSessionId()
val clientData1 = ClientData(authId1, sessionId, Some(user1.id))
val clientData2 = ClientData(authId2, sessionId, Some(user2.id))
val groupOutPeer = {
implicit val clientData = clientData1
createGroup("Fun group", Set(user2.id)).groupPeer
}
val startDate = System.currentTimeMillis()
{
implicit val clientData = clientData1
val sendMessages = Future.sequence(Seq(
service.handleSendMessage(groupOutPeer.asOutPeer, Random.nextLong(), TextMessage("Hi Shiva 1", Vector.empty, None)),
futureSleep(1500).flatMap(_ โ service.handleSendMessage(groupOutPeer.asOutPeer, Random.nextLong(), TextMessage("Hi Shiva 2", Vector.empty, None))),
futureSleep(3000).flatMap(_ โ service.handleSendMessage(groupOutPeer.asOutPeer, Random.nextLong(), TextMessage("Hi Shiva 3", Vector.empty, None)))
))
whenReady(sendMessages)(_ โ ())
}
Thread.sleep(300)
{
implicit val clientData = clientData2
whenReady(service.handleMessageRead(groupOutPeer.asOutPeer, startDate + 2000)) { resp โ
resp should matchPattern {
case Ok(ResponseVoid) โ
}
}
Thread.sleep(300)
whenReady(db.run(persist.Dialog.find(user1.id, models.Peer.group(groupOutPeer.groupId)))) { dialogOpt โ
dialogOpt.get.lastReadAt.getMillis should be < startDate + 3000
dialogOpt.get.lastReadAt.getMillis should be > startDate + 1000
}
whenReady(service.handleLoadDialogs(Long.MaxValue, 100)) { resp โ
val dialog = resp.toOption.get.dialogs.head
dialog.unreadCount shouldEqual 1
}
}
{
implicit val clientData = clientData1
expectUpdatesUnorderedOnly(ignoreUnmatched)(0, Array.empty, List(
UpdateGroupUserInvited.header,
UpdateGroupInvite.header,
UpdateMessageSent.header,
UpdateMessageSent.header,
UpdateMessageSent.header,
UpdateMessageRead.header,
UpdateMessage.header,
UpdateCountersChanged.header
)) {
case (UpdateMessageRead.header, update) โ parseUpdate[UpdateMessageRead](update)
}
}
{
implicit val clientData = clientData2
expectUpdatesUnorderedOnly(ignoreUnmatched)(0, Array.empty, List(
UpdateGroupInvite.header,
UpdateCountersChanged.header,
UpdateMessage.header,
UpdateCountersChanged.header,
UpdateMessage.header,
UpdateCountersChanged.header,
UpdateMessage.header,
UpdateMessageSent.header, //sent message with GroupServiceMessages.userJoined
UpdateMessageReadByMe.header,
UpdateCountersChanged.header
)) {
case (UpdateMessageReadByMe.header, update) โ parseUpdate[UpdateMessageReadByMe](update)
case (UpdateMessageSent.header, update) โ parseUpdate[UpdateMessageSent](update)
}
}
}
}
}
}
| nguyenhongson03/actor-platform | actor-server/actor-tests/src/test/scala/im/actor/server/api/rpc/service/MessagingServiceHistorySpec.scala | Scala | mit | 18,840 |
๏ปฟusing System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("2d357d86-395a-43e1-9c1b-da7a228f2e0a")]
| danice/Nancy.Hal | src/Nancy.Hal/Properties/AssemblyInfo.cs | C# | mit | 491 |
<?php
/**
* This file is part of the Zephir.
*
* (c) Phalcon Team <team@zephir-lang.com>
*
* For the full copyright and license information, please view
* the LICENSE file that was distributed with this source code.
*/
namespace Zephir\Detectors;
/**
* ForValueUseDetector.
*
* Detects whether the traversed variable is modified within the 'for's block
*/
class ForValueUseDetector extends WriteDetector
{
/**
* ForValueUseDetector constructor.
*
* Initialize detector with safe defaults
*/
public function __construct()
{
$this->setDetectionFlags(self::DETECT_NONE);
}
}
| phalcon/zephir | Library/Detectors/ForValueUseDetector.php | PHP | mit | 630 |
<?php
namespace Platformsh\Cli\Command\Integration;
use Platformsh\Cli\Command\PlatformCommand;
use Symfony\Component\Console\Input\InputArgument;
use Symfony\Component\Console\Input\InputInterface;
use Symfony\Component\Console\Output\OutputInterface;
class IntegrationDeleteCommand extends PlatformCommand
{
/**
* {@inheritdoc}
*/
protected function configure()
{
$this
->setName('integration:delete')
->addArgument('id', InputArgument::REQUIRED, 'The integration ID')
->setDescription('Delete an integration from a project');
$this->addProjectOption();
}
protected function execute(InputInterface $input, OutputInterface $output)
{
$this->validateInput($input);
$id = $input->getArgument('id');
$integration = $this->getSelectedProject()
->getIntegration($id);
if (!$integration) {
$this->stdErr->writeln("Integration not found: <error>$id</error>");
return 1;
}
if (!$integration->operationAvailable('delete')) {
$this->stdErr->writeln("The integration <error>$id</error> cannot be deleted");
return 1;
}
$type = $integration->getProperty('type');
$confirmText = "Delete the integration <info>$id</info> (type: $type)?";
if (!$this->getHelper('question')
->confirm($confirmText, $input, $this->stdErr)
) {
return 1;
}
$integration->delete();
$this->stdErr->writeln("Deleted integration <info>$id</info>");
return 0;
}
}
| Fredplais/platformsh-cli | src/Command/Integration/IntegrationDeleteCommand.php | PHP | mit | 1,647 |
# Wind River Workbench generated Makefile.
# Do not edit!!!
#
# The file ".wrmakefile" is the template used by the Wind River Workbench to
# generate the makefiles of this project. Add user-specific build targets and
# make rules only(!) in this project's ".wrmakefile" file. These will then be
# automatically dumped into the makefiles.
WIND_HOME := $(subst \,/,$(WIND_HOME))
WIND_BASE := $(subst \,/,$(WIND_BASE))
WIND_USR := $(subst \,/,$(WIND_USR))
WRVX_COMPBASE := $(subst \,/,$(WRVX_COMPBASE))
all : pre_build main_all post_build
_clean ::
@echo "make: removing targets and objects of `pwd`"
TRACE=0
TRACEON=$(TRACE:0=@)
TRACE_FLAG=$(TRACEON:1=)
JOBS?=1
TARGET_JOBS?=$(JOBS)
MAKEFILE := Makefile
FLEXIBLE_BUILD := 1
BUILD_SPEC = SIMNTdiab
DEBUG_MODE = 1
ifeq ($(DEBUG_MODE),1)
MODE_DIR := Debug
else
MODE_DIR := NonDebug
endif
OBJ_DIR := .
WS_ROOT_DIR := C:/_HP
PRJ_ROOT_DIR := $(WS_ROOT_DIR)/StateMachine
#Global Build Macros
PROJECT_TYPE = DKM
DEFINES =
EXPAND_DBG = 0
#BuildSpec specific Build Macros
VX_CPU_FAMILY = simpc
CPU = SIMNT
TOOL_FAMILY = diab
TOOL = diab
TOOL_PATH =
CC_ARCH_SPEC = -tX86LH:vxworks69
VSB_DIR = $(WIND_BASE)/target/lib
VSB_CONFIG_FILE = $(VSB_DIR)/h/config/vsbConfig.h
LIBPATH =
LIBS =
IDE_INCLUDES = -I$(WIND_BASE)/target/h -I$(WIND_BASE)/target/h/wrn/coreip
IDE_LIBRARIES =
IDE_DEFINES = -DCPU=_VX_$(CPU) -DTOOL_FAMILY=$(TOOL_FAMILY) -DTOOL=$(TOOL) -D_WRS_KERNEL -D_VSB_CONFIG_FILE=\"$(VSB_DIR)/h/config/vsbConfig.h\" -DIP_PORT_VXWORKS=69
#BuildTool flags
ifeq ($(DEBUG_MODE),1)
DEBUGFLAGS_C-Compiler = -g
DEBUGFLAGS_C++-Compiler = -g
DEBUGFLAGS_Linker = -g
DEBUGFLAGS_Partial-Image-Linker =
DEBUGFLAGS_Librarian =
DEBUGFLAGS_Assembler = -g
else
DEBUGFLAGS_C-Compiler = -XO -Xsize-opt
DEBUGFLAGS_C++-Compiler = -XO -Xsize-opt
DEBUGFLAGS_Linker = -XO -Xsize-opt
DEBUGFLAGS_Partial-Image-Linker =
DEBUGFLAGS_Librarian =
DEBUGFLAGS_Assembler = -XO -Xsize-opt
endif
#Project Targets
PROJECT_TARGETS = StateMachine/$(MODE_DIR)/StateMachine.out \
StateMachine_partialImage/$(MODE_DIR)/StateMachine_partialImage.o
#Rules
# StateMachine
ifeq ($(DEBUG_MODE),1)
StateMachine/$(MODE_DIR)/% : DEBUGFLAGS_C-Compiler = -g
StateMachine/$(MODE_DIR)/% : DEBUGFLAGS_C++-Compiler = -g
StateMachine/$(MODE_DIR)/% : DEBUGFLAGS_Linker = -g
StateMachine/$(MODE_DIR)/% : DEBUGFLAGS_Partial-Image-Linker =
StateMachine/$(MODE_DIR)/% : DEBUGFLAGS_Librarian =
StateMachine/$(MODE_DIR)/% : DEBUGFLAGS_Assembler = -g
else
StateMachine/$(MODE_DIR)/% : DEBUGFLAGS_C-Compiler = -XO -Xsize-opt
StateMachine/$(MODE_DIR)/% : DEBUGFLAGS_C++-Compiler = -XO -Xsize-opt
StateMachine/$(MODE_DIR)/% : DEBUGFLAGS_Linker = -XO -Xsize-opt
StateMachine/$(MODE_DIR)/% : DEBUGFLAGS_Partial-Image-Linker =
StateMachine/$(MODE_DIR)/% : DEBUGFLAGS_Librarian =
StateMachine/$(MODE_DIR)/% : DEBUGFLAGS_Assembler = -XO -Xsize-opt
endif
StateMachine/$(MODE_DIR)/% : IDE_INCLUDES = -I$(WIND_BASE)/target/h -I$(WIND_BASE)/target/h/wrn/coreip
StateMachine/$(MODE_DIR)/% : IDE_LIBRARIES =
StateMachine/$(MODE_DIR)/% : IDE_DEFINES = -DCPU=_VX_$(CPU) -DTOOL_FAMILY=$(TOOL_FAMILY) -DTOOL=$(TOOL) -D_WRS_KERNEL -D_VSB_CONFIG_FILE=\"$(VSB_DIR)/h/config/vsbConfig.h\" -DIP_PORT_VXWORKS=69
StateMachine/$(MODE_DIR)/% : PROJECT_TYPE = DKM
StateMachine/$(MODE_DIR)/% : DEFINES =
StateMachine/$(MODE_DIR)/% : EXPAND_DBG = 0
StateMachine/$(MODE_DIR)/% : VX_CPU_FAMILY = simpc
StateMachine/$(MODE_DIR)/% : CPU = SIMNT
StateMachine/$(MODE_DIR)/% : TOOL_FAMILY = diab
StateMachine/$(MODE_DIR)/% : TOOL = diab
StateMachine/$(MODE_DIR)/% : TOOL_PATH =
StateMachine/$(MODE_DIR)/% : CC_ARCH_SPEC = -tX86LH:vxworks69
StateMachine/$(MODE_DIR)/% : VSB_DIR = $(WIND_BASE)/target/lib
StateMachine/$(MODE_DIR)/% : VSB_CONFIG_FILE = $(VSB_DIR)/h/config/vsbConfig.h
StateMachine/$(MODE_DIR)/% : LIBPATH =
StateMachine/$(MODE_DIR)/% : LIBS =
StateMachine/$(MODE_DIR)/% : OBJ_DIR := StateMachine/$(MODE_DIR)
OBJECTS_StateMachine = StateMachine_partialImage/$(MODE_DIR)/StateMachine_partialImage.o
ifeq ($(TARGET_JOBS),1)
StateMachine/$(MODE_DIR)/StateMachine.out : $(OBJECTS_StateMachine)
$(TRACE_FLAG)if [ ! -d "`dirname "$@"`" ]; then mkdir -p "`dirname "$@"`"; fi;echo "building $@";rm -f "$@";ddump -Ng $(OBJECTS_StateMachine) | tclsh $(WIND_BASE)/host/resource/hutils/tcl/munch.tcl -c pentium -tags $(VSB_DIR)/tags/simpc/SIMNT/common/dkm.tags > $(OBJ_DIR)/ctdt.c; $(TOOL_PATH)dcc $(DEBUGFLAGS_Linker) $(CC_ARCH_SPEC) -Xdollar-in-ident -ei1518,4177,4223,4301,4550,5409,1606 -ei4193,4826,4381,4237,1573,4007,4082,4177,4223,4260,4550,5361,5828,2273,5387,5388 -ei1522,4092,4111,4152,4167,4171,4174,4186,4188,4191,4192,4223,4231,4236,4284,4375,4494,4513,5152,5457 -Xforce-declarations $(ADDED_CFLAGS) $(IDE_INCLUDES) $(ADDED_INCLUDES) $(IDE_DEFINES) $(DEFINES) -o $(OBJ_DIR)/ctdt.o -c $(OBJ_DIR)/ctdt.c; $(TOOL_PATH)dld -tX86LH:vxworks69 -X -r5 -f 0x90,1,1 -r4 -o "$@" $(OBJ_DIR)/ctdt.o $(OBJECTS_StateMachine) $(IDE_LIBRARIES) $(LIBPATH) $(LIBS) $(ADDED_LIBPATH) $(ADDED_LIBS) && if [ "$(EXPAND_DBG)" = "1" ]; then plink "$@";fi
else
StateMachine/$(MODE_DIR)/StateMachine.out : StateMachine/$(MODE_DIR)/StateMachine.out_jobs
endif
StateMachine/$(MODE_DIR)/StateMachine_compile_file : $(FILE) ;
_clean :: StateMachine/$(MODE_DIR)/StateMachine_clean
StateMachine/$(MODE_DIR)/StateMachine_clean :
$(TRACE_FLAG)if [ -d "StateMachine" ]; then cd "StateMachine"; rm -rf $(MODE_DIR); fi
# StateMachine_partialImage
ifeq ($(DEBUG_MODE),1)
StateMachine_partialImage/$(MODE_DIR)/% : DEBUGFLAGS_C-Compiler = -g
StateMachine_partialImage/$(MODE_DIR)/% : DEBUGFLAGS_C++-Compiler = -g
StateMachine_partialImage/$(MODE_DIR)/% : DEBUGFLAGS_Linker = -g
StateMachine_partialImage/$(MODE_DIR)/% : DEBUGFLAGS_Partial-Image-Linker =
StateMachine_partialImage/$(MODE_DIR)/% : DEBUGFLAGS_Librarian =
StateMachine_partialImage/$(MODE_DIR)/% : DEBUGFLAGS_Assembler = -g
else
StateMachine_partialImage/$(MODE_DIR)/% : DEBUGFLAGS_C-Compiler = -XO -Xsize-opt
StateMachine_partialImage/$(MODE_DIR)/% : DEBUGFLAGS_C++-Compiler = -XO -Xsize-opt
StateMachine_partialImage/$(MODE_DIR)/% : DEBUGFLAGS_Linker = -XO -Xsize-opt
StateMachine_partialImage/$(MODE_DIR)/% : DEBUGFLAGS_Partial-Image-Linker =
StateMachine_partialImage/$(MODE_DIR)/% : DEBUGFLAGS_Librarian =
StateMachine_partialImage/$(MODE_DIR)/% : DEBUGFLAGS_Assembler = -XO -Xsize-opt
endif
StateMachine_partialImage/$(MODE_DIR)/% : IDE_INCLUDES = -I$(WIND_BASE)/target/h -I$(WIND_BASE)/target/h/wrn/coreip
StateMachine_partialImage/$(MODE_DIR)/% : IDE_LIBRARIES =
StateMachine_partialImage/$(MODE_DIR)/% : IDE_DEFINES = -DCPU=_VX_$(CPU) -DTOOL_FAMILY=$(TOOL_FAMILY) -DTOOL=$(TOOL) -D_WRS_KERNEL -D_VSB_CONFIG_FILE=\"$(VSB_DIR)/h/config/vsbConfig.h\" -DIP_PORT_VXWORKS=69
StateMachine_partialImage/$(MODE_DIR)/% : PROJECT_TYPE = DKM
StateMachine_partialImage/$(MODE_DIR)/% : DEFINES =
StateMachine_partialImage/$(MODE_DIR)/% : EXPAND_DBG = 0
StateMachine_partialImage/$(MODE_DIR)/% : VX_CPU_FAMILY = simpc
StateMachine_partialImage/$(MODE_DIR)/% : CPU = SIMNT
StateMachine_partialImage/$(MODE_DIR)/% : TOOL_FAMILY = diab
StateMachine_partialImage/$(MODE_DIR)/% : TOOL = diab
StateMachine_partialImage/$(MODE_DIR)/% : TOOL_PATH =
StateMachine_partialImage/$(MODE_DIR)/% : CC_ARCH_SPEC = -tX86LH:vxworks69
StateMachine_partialImage/$(MODE_DIR)/% : VSB_DIR = $(WIND_BASE)/target/lib
StateMachine_partialImage/$(MODE_DIR)/% : VSB_CONFIG_FILE = $(VSB_DIR)/h/config/vsbConfig.h
StateMachine_partialImage/$(MODE_DIR)/% : LIBPATH =
StateMachine_partialImage/$(MODE_DIR)/% : LIBS =
StateMachine_partialImage/$(MODE_DIR)/% : OBJ_DIR := StateMachine_partialImage/$(MODE_DIR)
StateMachine_partialImage/$(MODE_DIR)/Objects/StateMachine/diaTimer.o : $(PRJ_ROOT_DIR)/diaTimer.cpp $(FORCE_FILE_BUILD)
$(TRACE_FLAG)if [ ! -d "`dirname "$@"`" ]; then mkdir -p "`dirname "$@"`"; fi;echo "building $@"; $(TOOL_PATH)dcc $(DEBUGFLAGS_C++-Compiler) $(CC_ARCH_SPEC) -W:c:,-Xclib-optim-off -Xansi -Xlocal-data-area-static-only -W:c++:.CPP -ei1518,4177,4223,4301,4550,5409,1606 -ei4193,4826,4381,4237,1573,4007,4082,4177,4223,4260,4550,5361,5828,2273,5387,5388 -ei1522,4092,4111,4152,4167,4171,4174,4186,4188,4191,4192,4223,4231,4236,4284,4375,4494,4513,5152,5457 -Xforce-declarations -Xmake-dependency=0xd $(IDE_DEFINES) $(DEFINES) $(ADDED_C++FLAGS) $(IDE_INCLUDES) $(ADDED_INCLUDES) -o "$@" -c "$<"
StateMachine_partialImage/$(MODE_DIR)/Objects/StateMachine/hardware/disp.o : $(PRJ_ROOT_DIR)/hardware/disp.c $(FORCE_FILE_BUILD)
$(TRACE_FLAG)if [ ! -d "`dirname "$@"`" ]; then mkdir -p "`dirname "$@"`"; fi;echo "building $@"; $(TOOL_PATH)dcc $(DEBUGFLAGS_C-Compiler) $(CC_ARCH_SPEC) -W:c:,-Xclib-optim-off -Xansi -Xlocal-data-area-static-only -W:c++:.CPP -Xc-new -Xdialect-c89 -ei1518,4177,4223,4301,4550,5409,1606 -ei4193,4826,4381,4237,1573,4007,4082,4177,4223,4260,4550,5361,5828,2273,5387,5388 -ei1522,4092,4111,4152,4167,4171,4174,4186,4188,4191,4192,4223,4231,4236,4284,4375,4494,4513,5152,5457 -Xforce-declarations -Xmake-dependency=0xd $(IDE_DEFINES) $(DEFINES) $(ADDED_CFLAGS) $(IDE_INCLUDES) $(ADDED_INCLUDES) -o "$@" -c "$<"
StateMachine_partialImage/$(MODE_DIR)/Objects/StateMachine/hardware/hwFunc.o : $(PRJ_ROOT_DIR)/hardware/hwFunc.c $(FORCE_FILE_BUILD)
$(TRACE_FLAG)if [ ! -d "`dirname "$@"`" ]; then mkdir -p "`dirname "$@"`"; fi;echo "building $@"; $(TOOL_PATH)dcc $(DEBUGFLAGS_C-Compiler) $(CC_ARCH_SPEC) -W:c:,-Xclib-optim-off -Xansi -Xlocal-data-area-static-only -W:c++:.CPP -Xc-new -Xdialect-c89 -ei1518,4177,4223,4301,4550,5409,1606 -ei4193,4826,4381,4237,1573,4007,4082,4177,4223,4260,4550,5361,5828,2273,5387,5388 -ei1522,4092,4111,4152,4167,4171,4174,4186,4188,4191,4192,4223,4231,4236,4284,4375,4494,4513,5152,5457 -Xforce-declarations -Xmake-dependency=0xd $(IDE_DEFINES) $(DEFINES) $(ADDED_CFLAGS) $(IDE_INCLUDES) $(ADDED_INCLUDES) -o "$@" -c "$<"
StateMachine_partialImage/$(MODE_DIR)/Objects/StateMachine/hardware/kbd.o : $(PRJ_ROOT_DIR)/hardware/kbd.c $(FORCE_FILE_BUILD)
$(TRACE_FLAG)if [ ! -d "`dirname "$@"`" ]; then mkdir -p "`dirname "$@"`"; fi;echo "building $@"; $(TOOL_PATH)dcc $(DEBUGFLAGS_C-Compiler) $(CC_ARCH_SPEC) -W:c:,-Xclib-optim-off -Xansi -Xlocal-data-area-static-only -W:c++:.CPP -Xc-new -Xdialect-c89 -ei1518,4177,4223,4301,4550,5409,1606 -ei4193,4826,4381,4237,1573,4007,4082,4177,4223,4260,4550,5361,5828,2273,5387,5388 -ei1522,4092,4111,4152,4167,4171,4174,4186,4188,4191,4192,4223,4231,4236,4284,4375,4494,4513,5152,5457 -Xforce-declarations -Xmake-dependency=0xd $(IDE_DEFINES) $(DEFINES) $(ADDED_CFLAGS) $(IDE_INCLUDES) $(ADDED_INCLUDES) -o "$@" -c "$<"
StateMachine_partialImage/$(MODE_DIR)/Objects/StateMachine/keyboard.o : $(PRJ_ROOT_DIR)/keyboard.cpp $(FORCE_FILE_BUILD)
$(TRACE_FLAG)if [ ! -d "`dirname "$@"`" ]; then mkdir -p "`dirname "$@"`"; fi;echo "building $@"; $(TOOL_PATH)dcc $(DEBUGFLAGS_C++-Compiler) $(CC_ARCH_SPEC) -W:c:,-Xclib-optim-off -Xansi -Xlocal-data-area-static-only -W:c++:.CPP -ei1518,4177,4223,4301,4550,5409,1606 -ei4193,4826,4381,4237,1573,4007,4082,4177,4223,4260,4550,5361,5828,2273,5387,5388 -ei1522,4092,4111,4152,4167,4171,4174,4186,4188,4191,4192,4223,4231,4236,4284,4375,4494,4513,5152,5457 -Xforce-declarations -Xmake-dependency=0xd $(IDE_DEFINES) $(DEFINES) $(ADDED_C++FLAGS) $(IDE_INCLUDES) $(ADDED_INCLUDES) -o "$@" -c "$<"
StateMachine_partialImage/$(MODE_DIR)/Objects/StateMachine/main.o : $(PRJ_ROOT_DIR)/main.cpp $(FORCE_FILE_BUILD)
$(TRACE_FLAG)if [ ! -d "`dirname "$@"`" ]; then mkdir -p "`dirname "$@"`"; fi;echo "building $@"; $(TOOL_PATH)dcc $(DEBUGFLAGS_C++-Compiler) $(CC_ARCH_SPEC) -W:c:,-Xclib-optim-off -Xansi -Xlocal-data-area-static-only -W:c++:.CPP -ei1518,4177,4223,4301,4550,5409,1606 -ei4193,4826,4381,4237,1573,4007,4082,4177,4223,4260,4550,5361,5828,2273,5387,5388 -ei1522,4092,4111,4152,4167,4171,4174,4186,4188,4191,4192,4223,4231,4236,4284,4375,4494,4513,5152,5457 -Xforce-declarations -Xmake-dependency=0xd $(IDE_DEFINES) $(DEFINES) $(ADDED_C++FLAGS) $(IDE_INCLUDES) $(ADDED_INCLUDES) -o "$@" -c "$<"
StateMachine_partialImage/$(MODE_DIR)/Objects/StateMachine/setMyIP.o : $(PRJ_ROOT_DIR)/setMyIP.cpp $(FORCE_FILE_BUILD)
$(TRACE_FLAG)if [ ! -d "`dirname "$@"`" ]; then mkdir -p "`dirname "$@"`"; fi;echo "building $@"; $(TOOL_PATH)dcc $(DEBUGFLAGS_C++-Compiler) $(CC_ARCH_SPEC) -W:c:,-Xclib-optim-off -Xansi -Xlocal-data-area-static-only -W:c++:.CPP -ei1518,4177,4223,4301,4550,5409,1606 -ei4193,4826,4381,4237,1573,4007,4082,4177,4223,4260,4550,5361,5828,2273,5387,5388 -ei1522,4092,4111,4152,4167,4171,4174,4186,4188,4191,4192,4223,4231,4236,4284,4375,4494,4513,5152,5457 -Xforce-declarations -Xmake-dependency=0xd $(IDE_DEFINES) $(DEFINES) $(ADDED_C++FLAGS) $(IDE_INCLUDES) $(ADDED_INCLUDES) -o "$@" -c "$<"
StateMachine_partialImage/$(MODE_DIR)/Objects/StateMachine/stateMachine.o : $(PRJ_ROOT_DIR)/stateMachine.cpp $(FORCE_FILE_BUILD)
$(TRACE_FLAG)if [ ! -d "`dirname "$@"`" ]; then mkdir -p "`dirname "$@"`"; fi;echo "building $@"; $(TOOL_PATH)dcc $(DEBUGFLAGS_C++-Compiler) $(CC_ARCH_SPEC) -W:c:,-Xclib-optim-off -Xansi -Xlocal-data-area-static-only -W:c++:.CPP -ei1518,4177,4223,4301,4550,5409,1606 -ei4193,4826,4381,4237,1573,4007,4082,4177,4223,4260,4550,5361,5828,2273,5387,5388 -ei1522,4092,4111,4152,4167,4171,4174,4186,4188,4191,4192,4223,4231,4236,4284,4375,4494,4513,5152,5457 -Xforce-declarations -Xmake-dependency=0xd $(IDE_DEFINES) $(DEFINES) $(ADDED_C++FLAGS) $(IDE_INCLUDES) $(ADDED_INCLUDES) -o "$@" -c "$<"
StateMachine_partialImage/$(MODE_DIR)/Objects/StateMachine/stateTable.o : $(PRJ_ROOT_DIR)/stateTable.cpp $(FORCE_FILE_BUILD)
$(TRACE_FLAG)if [ ! -d "`dirname "$@"`" ]; then mkdir -p "`dirname "$@"`"; fi;echo "building $@"; $(TOOL_PATH)dcc $(DEBUGFLAGS_C++-Compiler) $(CC_ARCH_SPEC) -W:c:,-Xclib-optim-off -Xansi -Xlocal-data-area-static-only -W:c++:.CPP -ei1518,4177,4223,4301,4550,5409,1606 -ei4193,4826,4381,4237,1573,4007,4082,4177,4223,4260,4550,5361,5828,2273,5387,5388 -ei1522,4092,4111,4152,4167,4171,4174,4186,4188,4191,4192,4223,4231,4236,4284,4375,4494,4513,5152,5457 -Xforce-declarations -Xmake-dependency=0xd $(IDE_DEFINES) $(DEFINES) $(ADDED_C++FLAGS) $(IDE_INCLUDES) $(ADDED_INCLUDES) -o "$@" -c "$<"
StateMachine_partialImage/$(MODE_DIR)/Objects/StateMachine/systemManager.o : $(PRJ_ROOT_DIR)/systemManager.cpp $(FORCE_FILE_BUILD)
$(TRACE_FLAG)if [ ! -d "`dirname "$@"`" ]; then mkdir -p "`dirname "$@"`"; fi;echo "building $@"; $(TOOL_PATH)dcc $(DEBUGFLAGS_C++-Compiler) $(CC_ARCH_SPEC) -W:c:,-Xclib-optim-off -Xansi -Xlocal-data-area-static-only -W:c++:.CPP -ei1518,4177,4223,4301,4550,5409,1606 -ei4193,4826,4381,4237,1573,4007,4082,4177,4223,4260,4550,5361,5828,2273,5387,5388 -ei1522,4092,4111,4152,4167,4171,4174,4186,4188,4191,4192,4223,4231,4236,4284,4375,4494,4513,5152,5457 -Xforce-declarations -Xmake-dependency=0xd $(IDE_DEFINES) $(DEFINES) $(ADDED_C++FLAGS) $(IDE_INCLUDES) $(ADDED_INCLUDES) -o "$@" -c "$<"
OBJECTS_StateMachine_partialImage = StateMachine_partialImage/$(MODE_DIR)/Objects/StateMachine/diaTimer.o \
StateMachine_partialImage/$(MODE_DIR)/Objects/StateMachine/hardware/disp.o \
StateMachine_partialImage/$(MODE_DIR)/Objects/StateMachine/hardware/hwFunc.o \
StateMachine_partialImage/$(MODE_DIR)/Objects/StateMachine/hardware/kbd.o \
StateMachine_partialImage/$(MODE_DIR)/Objects/StateMachine/keyboard.o \
StateMachine_partialImage/$(MODE_DIR)/Objects/StateMachine/main.o \
StateMachine_partialImage/$(MODE_DIR)/Objects/StateMachine/setMyIP.o \
StateMachine_partialImage/$(MODE_DIR)/Objects/StateMachine/stateMachine.o \
StateMachine_partialImage/$(MODE_DIR)/Objects/StateMachine/stateTable.o \
StateMachine_partialImage/$(MODE_DIR)/Objects/StateMachine/systemManager.o
ifeq ($(TARGET_JOBS),1)
StateMachine_partialImage/$(MODE_DIR)/StateMachine_partialImage.o : $(OBJECTS_StateMachine_partialImage)
$(TRACE_FLAG)if [ ! -d "`dirname "$@"`" ]; then mkdir -p "`dirname "$@"`"; fi;echo "building $@"; $(TOOL_PATH)dld -tX86LH:vxworks69 -X -r5 -f 0x90,1,1 -o "$@" $(OBJECTS_StateMachine_partialImage) $(ADDED_OBJECTS) $(IDE_LIBRARIES) $(LIBPATH) $(LIBS) $(ADDED_LIBPATH) $(ADDED_LIBS) && if [ "$(EXPAND_DBG)" = "1" ]; then plink "$@";fi
else
StateMachine_partialImage/$(MODE_DIR)/StateMachine_partialImage.o : StateMachine_partialImage/$(MODE_DIR)/StateMachine_partialImage.o_jobs
endif
StateMachine_partialImage/$(MODE_DIR)/StateMachine_partialImage_compile_file : $(FILE) ;
_clean :: StateMachine_partialImage/$(MODE_DIR)/StateMachine_partialImage_clean
StateMachine_partialImage/$(MODE_DIR)/StateMachine_partialImage_clean :
$(TRACE_FLAG)if [ -d "StateMachine_partialImage" ]; then cd "StateMachine_partialImage"; rm -rf $(MODE_DIR); fi
force :
TARGET_JOBS_RULE?=echo "Update the makefile template via File > Import > Build Settings : Update makefile template";exit 1
%_jobs :
$(TRACE_FLAG)$(TARGET_JOBS_RULE)
DEP_FILES := StateMachine_partialImage/$(MODE_DIR)/Objects/StateMachine/diaTimer.d StateMachine_partialImage/$(MODE_DIR)/Objects/StateMachine/hardware/disp.d StateMachine_partialImage/$(MODE_DIR)/Objects/StateMachine/hardware/hwFunc.d \
StateMachine_partialImage/$(MODE_DIR)/Objects/StateMachine/hardware/kbd.d StateMachine_partialImage/$(MODE_DIR)/Objects/StateMachine/keyboard.d StateMachine_partialImage/$(MODE_DIR)/Objects/StateMachine/main.d \
StateMachine_partialImage/$(MODE_DIR)/Objects/StateMachine/setMyIP.d StateMachine_partialImage/$(MODE_DIR)/Objects/StateMachine/stateMachine.d StateMachine_partialImage/$(MODE_DIR)/Objects/StateMachine/stateTable.d \
StateMachine_partialImage/$(MODE_DIR)/Objects/StateMachine/systemManager.d
-include $(DEP_FILES)
WIND_SCOPETOOLS_BASE := $(subst \,/,$(WIND_SCOPETOOLS_BASE))
clean_scopetools :
$(TRACE_FLAG)rm -rf $(PRJ_ROOT_DIR)/.coveragescope/db
CLEAN_STEP := clean_scopetools
-include $(PRJ_ROOT_DIR)/*.makefile
-include *.makefile
TARGET_JOBS_RULE=$(MAKE) -f $(MAKEFILE) --jobs $(TARGET_JOBS) $(MFLAGS) $* TARGET_JOBS=1
ifeq ($(JOBS),1)
main_all : external_build $(PROJECT_TARGETS)
@echo "make: built targets of `pwd`"
else
main_all : external_build
@$(MAKE) -f $(MAKEFILE) --jobs $(JOBS) $(MFLAGS) $(PROJECT_TARGETS) TARGET_JOBS=1 &&\
echo "make: built targets of `pwd`"
endif
# entry point for extending the build
external_build ::
@echo ""
# main entry point for pre processing prior to the build
pre_build :: $(PRE_BUILD_STEP) generate_sources
@echo ""
# entry point for generating sources prior to the build
generate_sources ::
@echo ""
# main entry point for post processing after the build
post_build :: $(POST_BUILD_STEP) deploy_output
@echo ""
# entry point for deploying output after the build
deploy_output ::
@echo ""
clean :: external_clean $(CLEAN_STEP) _clean
# entry point for extending the build clean
external_clean ::
@echo ""
| dudomatik/conveyor-belt | StateMachineTelnet/SIMNTdiab/Makefile | Makefile | mit | 18,791 |
__author__ = 'Nishanth'
from juliabox.cloud import JBPluginCloud
from juliabox.jbox_util import JBoxCfg, retry_on_errors
from googleapiclient.discovery import build
from oauth2client.client import GoogleCredentials
import threading
class JBoxGCD(JBPluginCloud):
provides = [JBPluginCloud.JBP_DNS, JBPluginCloud.JBP_DNS_GCD]
threadlocal = threading.local()
INSTALLID = None
REGION = None
DOMAIN = None
@staticmethod
def configure():
cloud_host = JBoxCfg.get('cloud_host')
JBoxGCD.INSTALLID = cloud_host['install_id']
JBoxGCD.REGION = cloud_host['region']
JBoxGCD.DOMAIN = cloud_host['domain']
@staticmethod
def domain():
if JBoxGCD.DOMAIN is None:
JBoxGCD.configure()
return JBoxGCD.DOMAIN
@staticmethod
def connect():
c = getattr(JBoxGCD.threadlocal, 'conn', None)
if c is None:
JBoxGCD.configure()
creds = GoogleCredentials.get_application_default()
JBoxGCD.threadlocal.conn = c = build("dns", "v1", credentials=creds)
return c
@staticmethod
@retry_on_errors(retries=2)
def add_cname(name, value):
JBoxGCD.connect().changes().create(
project=JBoxGCD.INSTALLID, managedZone=JBoxGCD.REGION,
body={'kind': 'dns#change',
'additions': [
{'rrdatas': [value],
'kind': 'dns#resourceRecordSet',
'type': 'A',
'name': name,
'ttl': 300} ] }).execute()
@staticmethod
@retry_on_errors(retries=2)
def delete_cname(name):
resp = JBoxGCD.connect().resourceRecordSets().list(
project=JBoxGCD.INSTALLID, managedZone=JBoxGCD.REGION,
name=name, type='A').execute()
if len(resp['rrsets']) == 0:
JBoxGCD.log_debug('No prior dns registration found for %s', name)
else:
cname = resp['rrsets'][0]['rrdatas'][0]
ttl = resp['rrsets'][0]['ttl']
JBoxGCD.connect().changes().create(
project=JBoxGCD.INSTALLID, managedZone=JBoxGCD.REGION,
body={'kind': 'dns#change',
'deletions': [
{'rrdatas': [str(cname)],
'kind': 'dns#resourceRecordSet',
'type': 'A',
'name': name,
'ttl': ttl} ] }).execute()
JBoxGCD.log_warn('Prior dns registration was found for %s', name)
| JuliaLang/JuliaBox | engine/src/juliabox/plugins/dns_gcd/impl_gcd.py | Python | mit | 2,592 |
require 'rails'
module StripeI18n
class Railtie < ::Rails::Railtie
initializer 'stripe-i18n' do |app|
StripeI18n::Railtie.instance_eval do
pattern = pattern_from app.config.i18n.available_locales
add("rails/locale/#{pattern}.yml")
end
end
protected
def self.add(pattern)
files = Dir[File.join(File.dirname(__FILE__), '../..', pattern)]
I18n.load_path.concat(files)
end
def self.pattern_from(args)
array = Array(args || [])
array.blank? ? '*' : "{#{array.join ','}}"
end
end
end
| ekosz/stripe-i18n | lib/stripe_i18n/railtie.rb | Ruby | mit | 567 |
/*
* The MIT License
*
* Copyright (c) 2004-2009, Sun Microsystems, Inc., Kohsuke Kawaguchi
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.node_monitors;
import hudson.Extension;
import hudson.FilePath;
import hudson.Functions;
import hudson.model.Computer;
import hudson.remoting.Callable;
import jenkins.model.Jenkins;
import hudson.node_monitors.DiskSpaceMonitorDescriptor.DiskSpace;
import org.kohsuke.stapler.DataBoundConstructor;
import java.io.IOException;
import java.text.ParseException;
/**
* Checks available disk space of the remote FS root.
* Requires Mustang.
*
* @author Kohsuke Kawaguchi
* @since 1.123
*/
public class DiskSpaceMonitor extends AbstractDiskSpaceMonitor {
@DataBoundConstructor
public DiskSpaceMonitor(String freeSpaceThreshold) throws ParseException {
super(freeSpaceThreshold);
}
public DiskSpaceMonitor() {}
public DiskSpace getFreeSpace(Computer c) {
return DESCRIPTOR.get(c);
}
@Override
public String getColumnCaption() {
// Hide this column from non-admins
return Jenkins.getInstance().hasPermission(Jenkins.ADMINISTER) ? super.getColumnCaption() : null;
}
public static final DiskSpaceMonitorDescriptor DESCRIPTOR = new DiskSpaceMonitorDescriptor() {
public String getDisplayName() {
return Messages.DiskSpaceMonitor_DisplayName();
}
@Override
protected Callable<DiskSpace, IOException> createCallable(Computer c) {
FilePath p = c.getNode().getRootPath();
if(p==null) return null;
return p.asCallableWith(new GetUsableSpace());
}
};
@Extension
public static DiskSpaceMonitorDescriptor install() {
if(Functions.isMustangOrAbove()) return DESCRIPTOR;
return null;
}
}
| chenDoInG/jenkins | core/src/main/java/hudson/node_monitors/DiskSpaceMonitor.java | Java | mit | 2,868 |
class PersonalFileUploader < FileUploader
def self.dynamic_path_segment(model)
File.join(CarrierWave.root, model_path(model))
end
def self.base_dir
File.join(root_dir, '-', 'system')
end
private
def secure_url
File.join(self.class.model_path(model), secret, file.filename)
end
def self.model_path(model)
if model
File.join("/#{base_dir}", model.class.to_s.underscore, model.id.to_s)
else
File.join("/#{base_dir}", 'temp')
end
end
end
| t-zuehlsdorff/gitlabhq | app/uploaders/personal_file_uploader.rb | Ruby | mit | 492 |
<?php
/**
* @copyright 2006-2013, Miles Johnson - http://milesj.me
* @license http://opensource.org/licenses/mit-license.php
* @link http://milesj.me/code/php/transit
*/
namespace Transit\Transformer\Image;
use Transit\File;
use \InvalidArgumentException;
/**
* Crops a photo, but resizes and keeps aspect ratio depending on which side is larger.
*
* @package Transit\Transformer\Image
*/
class CropTransformer extends AbstractImageTransformer {
const TOP = 'top';
const BOTTOM = 'bottom';
const LEFT = 'left';
const RIGHT = 'right';
const CENTER = 'center';
/**
* Configuration.
*
* @type array {
* @type string $location Location to crop from the source image
* @type int $quality Quality of JPEG image
* @type int $width Width of output image
* @type int $height Height of output image
* }
*/
protected $_config = array(
'location' => self::CENTER,
'quality' => 100,
'width' => null,
'height' => null
);
/**
* {@inheritdoc}
*
* @throws \InvalidArgumentException
*/
public function transform(File $file, $self = false) {
$config = $this->getConfig();
$baseWidth = $file->width();
$baseHeight = $file->height();
$width = $config['width'];
$height = $config['height'];
if (is_numeric($width) && !$height) {
$height = round(($baseHeight / $baseWidth) * $width);
} else if (is_numeric($height) && !$width) {
$width = round(($baseWidth / $baseHeight) * $height);
} else if (!is_numeric($height) && !is_numeric($width)) {
throw new InvalidArgumentException('Invalid width and height for crop');
}
$location = $config['location'];
$widthScale = $baseWidth / $width;
$heightScale = $baseHeight / $height;
$src_x = 0;
$src_y = 0;
$src_w = $baseWidth;
$src_h = $baseHeight;
// If an array is passed, use those dimensions
if (is_array($location)) {
list($src_x, $src_y, $src_w, $src_h) = $location;
// Source width is larger, use height scale as the base
} else {
if ($widthScale > $heightScale) {
$src_w = $width * $heightScale;
// Position horizontally in the middle
if ($location === self::CENTER) {
$src_x = ($baseWidth / 2) - (($width / 2) * $heightScale);
// Position at the far right
} else if ($location === self::RIGHT || $location === self::BOTTOM) {
$src_x = $baseWidth - $src_w;
}
// Source height is larger, use width scale as the base
} else {
$src_h = $height * $widthScale;
// Position vertically in the middle
if ($location === self::CENTER) {
$src_y = ($baseHeight / 2) - (($height / 2) * $widthScale);
// Position at the bottom
} else if ($location === self::RIGHT || $location === self::BOTTOM) {
$src_y = $baseHeight - $src_h;
}
}
}
return $this->_process($file, array(
'dest_w' => $width,
'dest_h' => $height,
'source_x' => $src_x,
'source_y' => $src_y,
'source_w' => $src_w,
'source_h' => $src_h,
'quality' => $config['quality'],
'overwrite' => $self
));
}
} | milesj/transit | src/Transit/Transformer/Image/CropTransformer.php | PHP | mit | 3,668 |
---
title: Putain de code !
url: http://putaindecode.io/
source: https://github.com/putaindecode/putaindecode.io
showcaseTags:
- open-source
- community
- learning
- multi-languages
---
| MoOx/phenomic | website/content/showcase/entry/putaindecode.io.md | Markdown | mit | 194 |
teleirc
=======
Telegram <-> IRC gateway.
* Uses the [node-telegram-bot](https://github.com/orzFly/node-telegram-bot) library for Telegram communication
* IRC communication via martynsmith's [node-irc](https://github.com/martynsmith/node-irc) module
* All Telegram messages are sent to IRC channel
* IRC messages sent to Telegram only when bot is hilighted (configurable)
Setup
-----
git clone https://github.com/FruitieX/teleirc
cd teleirc
npm install
cp teleirc_config.js.example ~/.teleirc_config.js
Next, set up your bot via the [BotFather](https://telegram.me/botfather) Telegram user.
Save your bot token in `~/.teleirc_config.js`. Remember to allow the bot to see all messages via the
`/setprivacy` command to `BotFather`, otherwise only messages starting with a
slash are visible to teleirc.
Now read through the rest of `~/.teleirc_config.js` and change the configuration as appropriate.
When you're done, launch teleirc with:
npm start
Optional:
- For your convenience, there is an included systemd unit file in `teleirc.service`.
- You can change your Telegram Bot's profile picture with the `/setuserpic` BotFather command.
Special thanks
--------------
Thanks to [warbaque](https://github.com/warbaque) for an implementation using Telegram Bot API!
| jeffry1829/teleirc | README.md | Markdown | mit | 1,298 |
require 'spec_helper'
describe "php::5_5_4" do
let(:facts) { default_test_facts }
it do
should contain_php__version("5.5.4")
end
end
| webflo/puppet-php | spec/classes/php_5_5_4_spec.rb | Ruby | mit | 145 |
<?php
/*
* $Id: MssqlPlatform.php 3752 2007-04-11 09:11:18Z fabien $
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* This software consists of voluntary contributions made by many individuals
* and is licensed under the LGPL. For more information please see
* <http://propel.phpdb.org>.
*/
require_once 'propel/engine/platform/DefaultPlatform.php';
include_once 'propel/engine/database/model/Domain.php';
/**
* MS SQL Platform implementation.
*
* @author Hans Lellelid <hans@xmpl.org> (Propel)
* @author Martin Poeschl <mpoeschl@marmot.at> (Torque)
* @version $Revision: 536 $
* @package propel.engine.platform
*/
class MssqlPlatform extends DefaultPlatform {
/**
* Initializes db specific domain mapping.
*/
protected function initialize()
{
parent::initialize();
$this->setSchemaDomainMapping(new Domain(PropelTypes::INTEGER, "INT"));
$this->setSchemaDomainMapping(new Domain(PropelTypes::BOOLEAN, "INT"));
$this->setSchemaDomainMapping(new Domain(PropelTypes::DOUBLE, "FLOAT"));
$this->setSchemaDomainMapping(new Domain(PropelTypes::LONGVARCHAR, "TEXT"));
$this->setSchemaDomainMapping(new Domain(PropelTypes::CLOB, "TEXT"));
$this->setSchemaDomainMapping(new Domain(PropelTypes::DATE, "DATETIME"));
$this->setSchemaDomainMapping(new Domain(PropelTypes::BU_DATE, "DATETIME"));
$this->setSchemaDomainMapping(new Domain(PropelTypes::TIME, "DATETIME"));
$this->setSchemaDomainMapping(new Domain(PropelTypes::TIMESTAMP, "DATETIME"));
$this->setSchemaDomainMapping(new Domain(PropelTypes::BU_TIMESTAMP, "DATETIME"));
$this->setSchemaDomainMapping(new Domain(PropelTypes::BINARY, "BINARY(7132)"));
$this->setSchemaDomainMapping(new Domain(PropelTypes::VARBINARY, "IMAGE"));
$this->setSchemaDomainMapping(new Domain(PropelTypes::LONGVARBINARY, "IMAGE"));
$this->setSchemaDomainMapping(new Domain(PropelTypes::BLOB, "IMAGE"));
}
/**
* @see Platform#getMaxColumnNameLength()
*/
public function getMaxColumnNameLength()
{
return 128;
}
/**
* @return Explicitly returns <code>NULL</code> if null values are
* allowed (as recomended by Microsoft).
* @see Platform#getNullString(boolean)
*/
public function getNullString($notNull)
{
return ($notNull ? "NOT NULL" : "NULL");
}
/**
* @see Platform::supportsNativeDeleteTrigger()
*/
public function supportsNativeDeleteTrigger()
{
return true;
}
/**
* @see Platform::hasSize(String)
*/
public function hasSize($sqlType)
{
return !("INT" == $sqlType || "TEXT" == $sqlType);
}
/**
* @see Platform::quoteIdentifier()
*/
public function quoteIdentifier($text)
{
return '[' . $text . ']';
}
}
| vincent03460/fxcmiscc-partner | lib/symfony/vendor/propel-generator/classes/propel/engine/platform/MssqlPlatform.php | PHP | mit | 3,539 |
//
// NSDictionary+Enumerable.h
// MRCEnumerable
//
// Created by Marcus Crafter on 17/11/10.
// Copyright 2010 Red Artisan. All rights reserved.
//
#import <Foundation/Foundation.h>
@interface NSDictionary (Enumerable)
- (void)each:(void (^)(id key, id obj))block;
- (id)inject:(id)m :(id (^)(id m, id key, id obj))block;
- (NSDictionary *)select:(BOOL (^)(id key, id obj))block;
- (NSDictionary *)reject:(BOOL (^)(id key, id obj))block;
- (id)detect:(BOOL (^)(id key, id obj))block;
@end
| matthewzimmer/bit-miner | wrappers/ios/bitminer/Pods/MRCEnumerable/Classes/NSDictionary+Enumerable.h | C | mit | 499 |
/*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
* Changes may cause incorrect behavior and will be lost if the code is
* regenerated.
*/
'use strict';
const models = require('./index');
/**
* Properties of an artifact source.
*
* @extends models['Resource']
*/
class ArtifactSource extends models['Resource'] {
/**
* Create a ArtifactSource.
* @member {string} [displayName] The artifact source's display name.
* @member {string} [uri] The artifact source's URI.
* @member {string} [sourceType] The artifact source's type. Possible values
* include: 'VsoGit', 'GitHub'
* @member {string} [folderPath] The folder containing artifacts.
* @member {string} [armTemplateFolderPath] The folder containing Azure
* Resource Manager templates.
* @member {string} [branchRef] The artifact source's branch reference.
* @member {string} [securityToken] The security token to authenticate to the
* artifact source.
* @member {string} [status] Indicates if the artifact source is enabled
* (values: Enabled, Disabled). Possible values include: 'Enabled',
* 'Disabled'
* @member {date} [createdDate] The artifact source's creation date.
* @member {string} [provisioningState] The provisioning status of the
* resource.
* @member {string} [uniqueIdentifier] The unique immutable identifier of a
* resource (Guid).
*/
constructor() {
super();
}
/**
* Defines the metadata of ArtifactSource
*
* @returns {object} metadata of ArtifactSource
*
*/
mapper() {
return {
required: false,
serializedName: 'ArtifactSource',
type: {
name: 'Composite',
className: 'ArtifactSource',
modelProperties: {
id: {
required: false,
readOnly: true,
serializedName: 'id',
type: {
name: 'String'
}
},
name: {
required: false,
readOnly: true,
serializedName: 'name',
type: {
name: 'String'
}
},
type: {
required: false,
readOnly: true,
serializedName: 'type',
type: {
name: 'String'
}
},
location: {
required: false,
serializedName: 'location',
type: {
name: 'String'
}
},
tags: {
required: false,
serializedName: 'tags',
type: {
name: 'Dictionary',
value: {
required: false,
serializedName: 'StringElementType',
type: {
name: 'String'
}
}
}
},
displayName: {
required: false,
serializedName: 'properties.displayName',
type: {
name: 'String'
}
},
uri: {
required: false,
serializedName: 'properties.uri',
type: {
name: 'String'
}
},
sourceType: {
required: false,
serializedName: 'properties.sourceType',
type: {
name: 'String'
}
},
folderPath: {
required: false,
serializedName: 'properties.folderPath',
type: {
name: 'String'
}
},
armTemplateFolderPath: {
required: false,
serializedName: 'properties.armTemplateFolderPath',
type: {
name: 'String'
}
},
branchRef: {
required: false,
serializedName: 'properties.branchRef',
type: {
name: 'String'
}
},
securityToken: {
required: false,
serializedName: 'properties.securityToken',
type: {
name: 'String'
}
},
status: {
required: false,
serializedName: 'properties.status',
type: {
name: 'String'
}
},
createdDate: {
required: false,
readOnly: true,
serializedName: 'properties.createdDate',
type: {
name: 'DateTime'
}
},
provisioningState: {
required: false,
serializedName: 'properties.provisioningState',
type: {
name: 'String'
}
},
uniqueIdentifier: {
required: false,
serializedName: 'properties.uniqueIdentifier',
type: {
name: 'String'
}
}
}
}
};
}
}
module.exports = ArtifactSource;
| lmazuel/azure-sdk-for-node | lib/services/devTestLabs/lib/models/artifactSource.js | JavaScript | mit | 5,125 |
// implementation for cube_support.h
#include <utils/multiindex.h>
#include <utils/fixed_array1d.h>
using MathTL::multi_degree;
using MathTL::FixedArray1D;
namespace WaveletTL
{
template <class IBASIS, unsigned int DIM>
inline
void
support(const CubeBasis<IBASIS,DIM>& basis,
const typename CubeBasis<IBASIS,DIM>::Index& lambda,
typename CubeBasis<IBASIS,DIM>::Support& supp)
{
basis.support(lambda, supp);
}
template <class IBASIS, unsigned int DIM>
bool
intersect_supports(const CubeBasis<IBASIS,DIM>& basis,
const typename CubeBasis<IBASIS,DIM>::Index& lambda,
const typename CubeBasis<IBASIS,DIM>::Index& mu,
typename CubeBasis<IBASIS,DIM>::Support& supp)
{
typename CubeBasis<IBASIS,DIM>::Support supp_lambda;
WaveletTL::support<IBASIS,DIM>(basis, lambda, supp_lambda);
typename CubeBasis<IBASIS,DIM>::Support supp_mu;
WaveletTL::support<IBASIS,DIM>(basis, mu, supp_mu);
// determine support intersection granularity,
// adjust single support granularities if necessary
supp.j = std::max(supp_lambda.j, supp_mu.j);
if (supp_lambda.j > supp_mu.j)
{
const int adjust = 1<<(supp_lambda.j-supp_mu.j);
for (unsigned int i = 0; i < DIM; i++)
{
supp_mu.a[i] *= adjust;
supp_mu.b[i] *= adjust;
}
}
else
{
const int adjust = 1<<(supp_mu.j-supp_lambda.j);
for (unsigned int i = 0; i < DIM; i++)
{
supp_lambda.a[i] *= adjust;
supp_lambda.b[i] *= adjust;
}
}
for (unsigned int i = 0; i < DIM; i++)
{
supp.a[i] = std::max(supp_lambda.a[i],supp_mu.a[i]);
supp.b[i] = std::min(supp_lambda.b[i],supp_mu.b[i]);
if (supp.a[i] >= supp.b[i])
{
return false;
}
}
return true;
}
template <class IBASIS, unsigned int DIM>
void intersecting_wavelets(const CubeBasis<IBASIS,DIM>& basis,
const typename CubeBasis<IBASIS,DIM>::Index& lambda,
const int j, const bool generators,
std::list<typename CubeBasis<IBASIS,DIM>::Index>& intersecting)
{
typedef typename CubeBasis<IBASIS,DIM>::Index Index;
intersecting.clear();
#if 1
// the set of intersecting wavelets is a cartesian product from d sets from the 1D case,
// so we only have to compute the relevant 1D indices
typedef typename IBASIS::Index Index1D;
FixedArray1D<std::list<Index1D>,DIM>
intersecting_1d_generators, intersecting_1d_wavelets;
// prepare all intersecting wavelets and generators in the i-th coordinate direction
for (unsigned int i = 0; i < DIM; i++)
{
intersecting_wavelets(*basis.bases()[i],
Index1D(lambda.j(),
lambda.e()[i],
lambda.k()[i],
basis.bases()[i]),
j, true, intersecting_1d_generators[i]);
if (!(generators))
{
intersecting_wavelets(*basis.bases()[i],
Index1D(lambda.j(),
lambda.e()[i],
lambda.k()[i],
basis.bases()[i]),
j, false, intersecting_1d_wavelets[i]);
}
}
// generate all relevant tensor product indices with either e=(0,...,0) or e!=(0,...,0)
typedef std::list<FixedArray1D<Index1D,DIM> > list_type;
list_type indices;
FixedArray1D<Index1D,DIM> helpindex;
if (DIM > 1 || (DIM == 1 && generators))
{
for (typename std::list<Index1D>::const_iterator it(intersecting_1d_generators[0].begin()),
itend(intersecting_1d_generators[0].end());
it != itend; ++it)
{
helpindex[0] = *it;
indices.push_back(helpindex);
}
}
if (!(generators))
{
for (typename std::list<Index1D>::const_iterator it(intersecting_1d_wavelets[0].begin()),
itend(intersecting_1d_wavelets[0].end());
it != itend; ++it)
{
helpindex[0] = *it;
indices.push_back(helpindex);
}
}
for (unsigned int i = 1; i < DIM; i++)
{
list_type sofar;
sofar.swap(indices);
for (typename list_type::const_iterator itready(sofar.begin()), itreadyend(sofar.end());
itready != itreadyend; ++itready)
{
helpindex = *itready;
unsigned int esum = 0;
for (unsigned int k = 0; k < i; k++)
{
esum += helpindex[k].e();
}
if (generators || (i < DIM-1 || (i == (DIM-1) && esum > 0)))
{
for (typename std::list<Index1D>::const_iterator it(intersecting_1d_generators[i].begin()),
itend(intersecting_1d_generators[i].end());
it != itend; ++it)
{
helpindex[i] = *it;
indices.push_back(helpindex);
}
}
if (!(generators))
{
for (typename std::list<Index1D>::const_iterator it(intersecting_1d_wavelets[i].begin()),
itend(intersecting_1d_wavelets[i].end());
it != itend; ++it)
{
helpindex[i] = *it;
indices.push_back(helpindex);
}
}
}
}
// compose the results
typename Index::type_type help_e;
typename Index::translation_type help_k;
for (typename list_type::const_iterator it(indices.begin()), itend(indices.end());
it != itend; ++it)
{
for (unsigned int i = 0; i < DIM; i++)
{
help_e[i] = (*it)[i].e();
help_k[i] = (*it)[i].k();
}
intersecting.push_back(Index(j, help_e, help_k, &basis));
}
#else
typedef typename CubeBasis<IBASIS,DIM>::Index Index;
int k = -1;
if ( generators ) {
k=0;
}
else {
k=j-basis.j0()+1;
}
//std::list<typename Frame::Index> intersect_diff;
//! generators
if (true) {
FixedArray1D<int,DIM>
minkwavelet, maxkwavelet, minkgen, maxkgen;
typedef typename IBASIS::Index Index1D;
int minkkkk;
int maxkkkk;
// prepare all intersecting wavelets and generators in the i-th coordinate direction
for (unsigned int i = 0; i < DIM; i++) {
get_intersecting_wavelets_on_level(*basis.bases()[i],
Index1D(lambda.j(),
lambda.e()[i],
lambda.k()[i],
basis.bases()[i]),
j, true, minkkkk,maxkkkk);
minkgen[i]=minkkkk;
maxkgen[i] = maxkkkk;
if (!(generators))
get_intersecting_wavelets_on_level(*basis.bases()[i],
Index1D(lambda.j(),
lambda.e()[i],
lambda.k()[i],
basis.bases()[i]),
j, false, minkkkk,maxkkkk);
minkwavelet[i] = minkkkk;
maxkwavelet[i] = maxkkkk;
} // end for
unsigned int result = 0;
int deltaresult = 0;
int genfstlvl = 0;
bool gen = 0;
//const Array1D<Index>* full_collection = &basis.full_collection;
MultiIndex<int,DIM> type;
type[DIM-1] = 1;
unsigned int tmp = 1;
bool exit = 0;
// determine how many wavelets there are on all the levels
// below the level of this index
if (! gen) {
result = 0;
genfstlvl =1;
//generators on level j0
for (unsigned int i = 0; i< DIM; i++)
genfstlvl *= (basis.bases()[i])->Deltasize((basis.bases()[i])->j0());
//additional wavelets on level j
// =(#Gen[1]+#Wav[1])*...*(#Gen[Dim-1]+#Wav[Dim-1])
// -#Gen[1]*...*#Gen[Dim-1]
for (int lvl= 0 ;
lvl < (j -basis.j0());
lvl++){
int genCurLvl = 1;
int addWav = 1;
for (unsigned int i = 0; i< DIM; i++) {
unsigned int curJ = basis.bases()[i]->j0()+lvl;
int genCurDim = (basis.bases()[i])->Deltasize(curJ);
genCurLvl *= genCurDim;
addWav *= genCurDim+ (basis.bases()[i])->Nablasize(curJ);
}
result += addWav-genCurLvl;
}
result += genfstlvl;
}
while(!exit){
FixedArray1D<int,DIM> help1, help2;
for(unsigned int i = 0; i<DIM; i++)
help1[i]=0;
// berechnet wie viele indices mit einem zu kleinem translationstyp es gibt, so dass sich die Wavelets nicht schneiden
unsigned int result2 = 0;
for (unsigned int i = 0; i < DIM; i++) { // begin for1
int tmp = 1;
for (unsigned int l = i+1; l < DIM; l++) {
if (type[l] == 0)
tmp *= (basis.bases())[l]->Deltasize(j);
else
tmp *= (basis.bases())[l]->Nablasize(j);
}
help2[i] = tmp;
if (type[i] == 0) {
if (minkgen[i] == (basis.bases())[i]->DeltaLmin())
continue;
}
else
if (minkwavelet[i] == (basis.bases())[i]->Nablamin())
continue;
if (type[i] == 0) {
tmp *= minkgen[i]-(basis.bases())[i]->DeltaLmin();
}
else
tmp *= minkwavelet[i]-(basis.bases())[i]->Nablamin();
result2 += tmp;
} // end for1
int tmp = 0;
if (type[DIM-1] == 0) {
tmp = maxkgen[DIM-1] - minkgen[DIM-1]+1;
}
else{
tmp = maxkwavelet[DIM-1] - minkwavelet[DIM-1]+1;
}
bool exit2 = 0;
while(!exit2){
// fรผgt die Indizes ein die sich รผberlappen
for (unsigned int i = result + result2; i < result + result2 + tmp; i++) {
const Index* ind = basis.get_wavelet(i); //&((*full_collection)[i]);
intersecting.push_back(*ind);
}
for (unsigned int i = DIM-2; i >= 0; i--) {
if(type[i]==0){
if ( help1[i] < maxkgen[i]-minkgen[i]) {
help1[i]++;
result2 = result2 + help2[i];
for (unsigned int j = i+1; j<=DIM-2;j++){
if(type[i] == 0){
result2 = result2 - help2[j]*(maxkgen[j] - minkgen[j]+1);
}
else
result2 = result2 - help2[j]*(maxkwavelet[j] - minkwavelet[j]+1);
}
break;
}
else {
help1[i]=0;
exit2 = (i==0);
break;
}
}
else {
if ( help1[i] < maxkwavelet[i] - minkwavelet[i]) {
help1[i]++;
result2 = result2 + help2[i];
for (unsigned int j = i+1; j<=DIM-2;j++){
if(type[i] == 0){
result2 = result2 - help2[j]*(maxkgen[j] - minkgen[j]+1);
}
else
result2 = result2 - help2[j]*(maxkwavelet[j] - minkwavelet[j]+1);
}
break;
}
else {
help1[i]=0;
exit2 = (i==0);
break;
}
}
} //end for
} //end while 2
// berechnet wie viele Indizes von dem jeweiligen Typ in Patches p liegen
tmp = 1;
for (unsigned int i = 0; i < DIM; i++) {
if (type[i] == 0)
tmp *= (basis.bases())[i]->Deltasize(j);
else
tmp *= (basis.bases())[i]->Nablasize(j);
}
result += tmp;
// berechnet den nรคchsten Typ
for (unsigned int i = DIM-1; i >= 0; i--) {
if ( type[i] == 1 ) {
type[i] = 0;
exit = (i == 0);
if(exit)
break;
}
else {
type[i]++;
break;
}
} //end for
} // end while 1
} // end if
// } // end if
else { // if generators
// a brute force solution
typedef typename CubeBasis<IBASIS,DIM>::Support Support;
Support supp;
if (generators) {
for (Index mu = basis.first_generator (j);; ++mu) {
if (intersect_supports(basis, lambda, mu, supp))
intersecting.push_back(mu);
if (mu == basis.last_generator(j)) break;
}
}
}
//*/
#endif
//#else
#if 0
// a brute force solution
typedef typename CubeBasis<IBASIS,DIM>::Support Support;
Support supp;
if (generators) {
for (Index mu = first_generator<IBASIS,DIM>(&basis, j);; ++mu) {
if (intersect_supports(basis, lambda, mu, supp))
intersecting.push_back(mu);
if (mu == last_generator<IBASIS,DIM>(&basis, j)) break;
}
} else {
for (Index mu = first_wavelet<IBASIS,DIM>(&basis, j);; ++mu) {
if (intersect_supports(basis, lambda, mu, supp))
intersecting.push_back(mu);
if (mu == last_wavelet<IBASIS,DIM>(&basis, j)) break;
}
}
#endif
}
template <class IBASIS, unsigned int DIM>
bool intersect_singular_support(const CubeBasis<IBASIS,DIM>& basis,
const typename CubeBasis<IBASIS,DIM>::Index& lambda,
const typename CubeBasis<IBASIS,DIM>::Index& mu)
{
// we have intersection of the singular supports if and only if
// one of the components have this property in one dimension
typedef typename IBASIS::Index Index1D;
for (unsigned int i = 0; i < DIM; i++) {
if (intersect_singular_support
(*basis.bases()[i],
Index1D(lambda.j(), lambda.e()[i], lambda.k()[i], basis.bases()[i]),
Index1D(mu.j(), mu.e()[i], mu.k()[i], basis.bases()[i])))
return true;
}
return false;
}
}
| agnumerikunimarburg/Marburg_Software_Library | WaveletTL/cube/cube_support.cpp | C++ | mit | 12,820 |
<?php
/**
* Config class
**/
class Config extends Application
{
private $config;
function __construct($param = array())
{
if (!empty($param['file'])) {
$this -> load($param['file']);
}
}
public function reset()
{
$this -> config = array();
}
public function load($filename = '')
{
$file = APP . DIRECTORY_SEPARATOR . 'Configs' . DIRECTORY_SEPARATOR . $filename . '.ini';
if (file_exists($file)) {
$this -> config = parse_ini_file($file);
}
}
function __get($value) {
return $this -> config[$value];
}
}
| Claudiu/Moongrace | Moongrace/Lib/config.php | PHP | mit | 542 |
# frozen_string_literal: true
class AddFieldsToCourseMaterialFolders < ActiveRecord::Migration[4.2]
def change
remove_column :course_material_folders, :parent_folder_id, :integer,
foreign_key: { references: :course_material_folders }
add_column :course_material_folders, :parent_id, :integer
add_column :course_material_folders, :course_id, :integer, null: false
add_column :course_material_folders, :can_student_upload, :boolean, null: false, default: false
add_index :course_material_folders, [:parent_id, :name], unique: true, case_sensitive: false
end
end
| cysjonathan/coursemology2 | db/migrate/20150812024950_add_fields_to_course_material_folders.rb | Ruby | mit | 604 |
MoeDownloader
======
ๅบไบpython็็ฆๅฉๅพๅ
ๆขๅจ๏ผ็ฎๅๅฏไปฅๅ
ๆข่ๆฆดใ็
่ๅไบๆฌก่ใจใญ็ปๅใใญใฐ่ฟไธไธช็ฝ็ซ็ๅพ็๏ผๅฆๆ้่ฆๅ ๅ
ฅๅ
ถไป็ฝ็ซไนๆฏ่พๅฎนๆใ
ๅบๆฌ็จๆณ:
======
"`
python catch.py [topic]
`"
ๅ
ถไธญ๏ผ๏ผปtopic๏ผฝๅฏไปฅๆฏcaoliuใmoeimgใjandanไธไธช้้กนไนไธ
ๆดๅค็็จๆณ่ฏท่พๅ
ฅ
"`
python catch.py -h
`"
ๆฅๆฅ็
| shanlinfeiniao/MoeDownloader | README.md | Markdown | mit | 385 |
module Emulation
export run_cpu
const EMULATED_INTRINSICS = [
:get_global_id,
:get_global_size
]
type EmulationContext
global_id
global_size
function EmulationContext()
new(
[0,0,0],
[0,0,0]
)
end
end
# Emulated version of intrinsic functions
# mainly for testing and execution on the CPU
function get_global_id(ctx::EmulationContext, dim::Int32)
return ctx.global_id[dim + 1]
end
function get_global_size(ctx::EmulationContext, dim::Int32)
return ctx.global_size[dim + 1]
end
# Helpers for adding an emulation overload for a kernel
const CTX = gensym("ctx")
function visit_ast(f :: Function, ast)
f(ast)
if isa(ast, Expr)
for a in ast.args
visit_ast(f, a)
end
end
end
function add_intrinsics_ctx_arg(ex)
if isa(ex, Expr) && ex.head == :call
fname = ex.args[1]
if in(fname, EMULATED_INTRINSICS)
# add context as first argument (after the function name)
insert!(ex.args, 2, CTX)
# qualify the call to the Emulation module
ex.args[1] = :(Emulation.$fname)
end
end
end
function add_emulation(fun::Expr)
sig = fun.args[1]
# insert ctx as first argument (after the function name)
insert!(sig.args, 2, :($CTX :: HSA.Emulation.EmulationContext))
visit_ast(add_intrinsics_ctx_arg, fun)
end
function run_cpu(rng::Tuple{Int,Int,Int}, kernel::Function, args...)
ctx = Emulation.EmulationContext()
ctx.global_size = [rng...]
for x = 0:rng[1]-1
for y = 0:rng[2]-1
for z = 0:rng[3]-1
ctx.global_id[1:3] = [x,y,z]
kernel(ctx, args...)
end
end
end
end
end # module Emulation
export @hsa_kernel
"""
Marks a function as implementing an HSA kernel
That means that it needs to be handled differently from a host function during
code generation. Also, this macro enables emulation support for the kernel by
adding a method that takes an EmulationContext as an additional argument.
"""
macro hsa_kernel(fun::Expr)
if(fun.head != :function)
error("@hsa_kernel must be applied to a function definition")
end
emu_fun = copy(fun)
Emulation.add_emulation(emu_fun)
if has_hsa_codegen()
device_fun = HSA.Execution.hsa_kernel(fun)
else
device_fun = quote
end
end
return quote
$(esc(device_fun))
$(esc(emu_fun))
end
end
# if codegen is not available, we need to emulate @hsa
if !has_hsa_codegen()
include("execution.jl")
end
| JuliaGPU/HSA.jl | src/emulation/emulation.jl | Julia | mit | 2,595 |
package bf.io.openshop.entities;
public class Page {
private long id;
private String title;
private String text;
public Page() {
}
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getText() {
return text;
}
public void setText(String text) {
this.text = text;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Page page = (Page) o;
if (id != page.id) return false;
if (title != null ? !title.equals(page.title) : page.title != null) return false;
return !(text != null ? !text.equals(page.text) : page.text != null);
}
@Override
public int hashCode() {
int result = (int) (id ^ (id >>> 32));
result = 31 * result + (title != null ? title.hashCode() : 0);
result = 31 * result + (text != null ? text.hashCode() : 0);
return result;
}
@Override
public String toString() {
return "Page{" +
"id=" + id +
", title='" + title + '\'' +
", text='" + text + '\'' +
'}';
}
}
| openshopio/openshop.io-android | app/src/main/java/bf/io/openshop/entities/Page.java | Java | mit | 1,413 |
require 'abstract_unit'
require 'controller/fake_models'
class CustomersController < ActionController::Base
end
module Fun
class GamesController < ActionController::Base
def hello_world
end
end
end
module NewRenderTestHelper
def rjs_helper_method_from_module
page.visual_effect :highlight
end
end
class LabellingFormBuilder < ActionView::Helpers::FormBuilder
end
class NewRenderTestController < ActionController::Base
layout :determine_layout
def self.controller_name; "test"; end
def self.controller_path; "test"; end
def hello_world
end
def render_hello_world
render :template => "test/hello_world"
end
def render_hello_world_from_variable
@person = "david"
render :text => "hello #{@person}"
end
def render_action_hello_world
render :action => "hello_world"
end
def render_action_hello_world_as_symbol
render :action => :hello_world
end
def render_text_hello_world
render :text => "hello world"
end
def render_text_hello_world_with_layout
@variable_for_layout = ", I'm here!"
render :text => "hello world", :layout => true
end
def hello_world_with_layout_false
render :layout => false
end
def render_custom_code
render :text => "hello world", :status => "404 Moved"
end
def render_file_with_instance_variables
@secret = 'in the sauce'
path = File.join(File.dirname(__FILE__), '../fixtures/test/render_file_with_ivar.erb')
render :file => path
end
def render_file_with_locals
path = File.join(File.dirname(__FILE__), '../fixtures/test/render_file_with_locals.erb')
render :file => path, :locals => {:secret => 'in the sauce'}
end
def render_file_not_using_full_path
@secret = 'in the sauce'
render :file => 'test/render_file_with_ivar', :use_full_path => true
end
def render_file_not_using_full_path_with_dot_in_path
@secret = 'in the sauce'
render :file => 'test/dot.directory/render_file_with_ivar', :use_full_path => true
end
def render_xml_hello
@name = "David"
render :template => "test/hello"
end
def greeting
# let's just rely on the template
end
def layout_test
render :action => "hello_world"
end
def layout_test_with_different_layout
render :action => "hello_world", :layout => "standard"
end
def rendering_without_layout
render :action => "hello_world", :layout => false
end
def layout_overriding_layout
render :action => "hello_world", :layout => "standard"
end
def rendering_nothing_on_layout
render :nothing => true
end
def builder_layout_test
render :action => "hello"
end
def partials_list
@test_unchanged = 'hello'
@customers = [ Customer.new("david"), Customer.new("mary") ]
render :action => "list"
end
def partial_only
render :partial => true
end
def partial_only_with_layout
render :partial => "partial_only", :layout => true
end
def partial_with_locals
render :partial => "customer", :locals => { :customer => Customer.new("david") }
end
def partial_with_form_builder
render :partial => ActionView::Helpers::FormBuilder.new(:post, nil, @template, {}, Proc.new {})
end
def partial_with_form_builder_subclass
render :partial => LabellingFormBuilder.new(:post, nil, @template, {}, Proc.new {})
end
def partial_collection
render :partial => "customer", :collection => [ Customer.new("david"), Customer.new("mary") ]
end
def partial_collection_with_spacer
render :partial => "customer", :spacer_template => "partial_only", :collection => [ Customer.new("david"), Customer.new("mary") ]
end
def partial_collection_with_counter
render :partial => "customer_counter", :collection => [ Customer.new("david"), Customer.new("mary") ]
end
def partial_collection_with_locals
render :partial => "customer_greeting", :collection => [ Customer.new("david"), Customer.new("mary") ], :locals => { :greeting => "Bonjour" }
end
def partial_collection_shorthand_with_locals
render :partial => [ Customer.new("david"), Customer.new("mary") ], :locals => { :greeting => "Bonjour" }
end
def partial_collection_shorthand_with_different_types_of_records
render :partial => [
BadCustomer.new("mark"),
GoodCustomer.new("craig"),
BadCustomer.new("john"),
GoodCustomer.new("zach"),
GoodCustomer.new("brandon"),
BadCustomer.new("dan") ],
:locals => { :greeting => "Bonjour" }
end
def partial_collection_shorthand_with_different_types_of_records_with_counter
partial_collection_shorthand_with_different_types_of_records
end
def empty_partial_collection
render :partial => "customer", :collection => []
end
def partial_with_hash_object
render :partial => "hash_object", :object => {:first_name => "Sam"}
end
def partial_hash_collection
render :partial => "hash_object", :collection => [ {:first_name => "Pratik"}, {:first_name => "Amy"} ]
end
def partial_hash_collection_with_locals
render :partial => "hash_greeting", :collection => [ {:first_name => "Pratik"}, {:first_name => "Amy"} ], :locals => { :greeting => "Hola" }
end
def partial_with_implicit_local_assignment
@customer = Customer.new("Marcel")
render :partial => "customer"
end
def missing_partial
render :partial => 'thisFileIsntHere'
end
def hello_in_a_string
@customers = [ Customer.new("david"), Customer.new("mary") ]
render :text => "How's there? " << render_to_string(:template => "test/list")
end
def render_to_string_with_assigns
@before = "i'm before the render"
render_to_string :text => "foo"
@after = "i'm after the render"
render :action => "test/hello_world"
end
def render_to_string_with_partial
@partial_only = render_to_string :partial => "partial_only"
@partial_with_locals = render_to_string :partial => "customer", :locals => { :customer => Customer.new("david") }
render :action => "test/hello_world"
end
def render_to_string_with_exception
render_to_string :file => "exception that will not be caught - this will certainly not work", :use_full_path => true
end
def render_to_string_with_caught_exception
@before = "i'm before the render"
begin
render_to_string :file => "exception that will be caught- hope my future instance vars still work!", :use_full_path => true
rescue
end
@after = "i'm after the render"
render :action => "test/hello_world"
end
def accessing_params_in_template
render :inline => "Hello: <%= params[:name] %>"
end
def accessing_params_in_template_with_layout
render :layout => nil, :inline => "Hello: <%= params[:name] %>"
end
def render_with_explicit_template
render :template => "test/hello_world"
end
def double_render
render :text => "hello"
render :text => "world"
end
def double_redirect
redirect_to :action => "double_render"
redirect_to :action => "double_render"
end
def render_and_redirect
render :text => "hello"
redirect_to :action => "double_render"
end
def render_to_string_and_render
@stuff = render_to_string :text => "here is some cached stuff"
render :text => "Hi web users! #{@stuff}"
end
def rendering_with_conflicting_local_vars
@name = "David"
def @template.name() nil end
render :action => "potential_conflicts"
end
def hello_world_from_rxml_using_action
render :action => "hello_world_from_rxml.builder"
end
def hello_world_from_rxml_using_template
render :template => "test/hello_world_from_rxml.builder"
end
def head_with_location_header
head :location => "/foo"
end
def head_with_symbolic_status
head :status => params[:status].intern
end
def head_with_integer_status
head :status => params[:status].to_i
end
def head_with_string_status
head :status => params[:status]
end
def head_with_custom_header
head :x_custom_header => "something"
end
def head_with_status_code_first
head :forbidden, :x_custom_header => "something"
end
def render_with_location
render :xml => "<hello/>", :location => "http://example.com", :status => 201
end
def render_with_object_location
customer = Customer.new("Some guy", 1)
render :xml => "<customer/>", :location => customer_url(customer), :status => :created
end
def render_with_to_xml
to_xmlable = Class.new do
def to_xml
"<i-am-xml/>"
end
end.new
render :xml => to_xmlable
end
helper NewRenderTestHelper
helper do
def rjs_helper_method(value)
page.visual_effect :highlight, value
end
end
def enum_rjs_test
render :update do |page|
page.select('.product').each do |value|
page.rjs_helper_method_from_module
page.rjs_helper_method(value)
page.sortable(value, :url => { :action => "order" })
page.draggable(value)
end
end
end
def delete_with_js
@project_id = 4
end
def render_js_with_explicit_template
@project_id = 4
render :template => 'test/delete_with_js'
end
def render_js_with_explicit_action_template
@project_id = 4
render :action => 'delete_with_js'
end
def update_page
render :update do |page|
page.replace_html 'balance', '$37,000,000.00'
page.visual_effect :highlight, 'balance'
end
end
def update_page_with_instance_variables
@money = '$37,000,000.00'
@div_id = 'balance'
render :update do |page|
page.replace_html @div_id, @money
page.visual_effect :highlight, @div_id
end
end
def action_talk_to_layout
# Action template sets variable that's picked up by layout
end
def render_text_with_assigns
@hello = "world"
render :text => "foo"
end
def yield_content_for
render :action => "content_for", :layout => "yield"
end
def render_content_type_from_body
response.content_type = Mime::RSS
render :text => "hello world!"
end
def render_call_to_partial_with_layout
render :action => "calling_partial_with_layout"
end
def render_call_to_partial_with_layout_in_main_layout_and_within_content_for_layout
render :action => "calling_partial_with_layout"
end
def render_using_layout_around_block
render :action => "using_layout_around_block"
end
def render_using_layout_around_block_in_main_layout_and_within_content_for_layout
render :action => "using_layout_around_block"
end
def rescue_action(e) raise end
private
def determine_layout
case action_name
when "hello_world", "layout_test", "rendering_without_layout",
"rendering_nothing_on_layout", "render_text_hello_world",
"render_text_hello_world_with_layout",
"hello_world_with_layout_false",
"partial_only", "partial_only_with_layout",
"accessing_params_in_template",
"accessing_params_in_template_with_layout",
"render_with_explicit_template",
"render_js_with_explicit_template",
"render_js_with_explicit_action_template",
"delete_with_js", "update_page", "update_page_with_instance_variables"
"layouts/standard"
when "builder_layout_test"
"layouts/builder"
when "action_talk_to_layout", "layout_overriding_layout"
"layouts/talk_from_action"
when "render_call_to_partial_with_layout_in_main_layout_and_within_content_for_layout"
"layouts/partial_with_layout"
when "render_using_layout_around_block_in_main_layout_and_within_content_for_layout"
"layouts/block_with_layout"
end
end
end
NewRenderTestController.view_paths = [ File.dirname(__FILE__) + "/../fixtures/" ]
Fun::GamesController.view_paths = [ File.dirname(__FILE__) + "/../fixtures/" ]
class NewRenderTest < Test::Unit::TestCase
def setup
@controller = NewRenderTestController.new
# enable a logger so that (e.g.) the benchmarking stuff runs, so we can get
# a more accurate simulation of what happens in "real life".
@controller.logger = Logger.new(nil)
@request = ActionController::TestRequest.new
@response = ActionController::TestResponse.new
@request.host = "www.nextangle.com"
end
def test_simple_show
get :hello_world
assert_response :success
assert_template "test/hello_world"
assert_equal "<html>Hello world!</html>", @response.body
end
def test_do_with_render
get :render_hello_world
assert_template "test/hello_world"
end
def test_do_with_render_from_variable
get :render_hello_world_from_variable
assert_equal "hello david", @response.body
end
def test_do_with_render_action
get :render_action_hello_world
assert_template "test/hello_world"
end
def test_do_with_render_action_as_symbol
get :render_action_hello_world_as_symbol
assert_template "test/hello_world"
end
def test_do_with_render_text
get :render_text_hello_world
assert_equal "hello world", @response.body
end
def test_do_with_render_text_and_layout
get :render_text_hello_world_with_layout
assert_equal "<html>hello world, I'm here!</html>", @response.body
end
def test_do_with_render_action_and_layout_false
get :hello_world_with_layout_false
assert_equal 'Hello world!', @response.body
end
def test_do_with_render_custom_code
get :render_custom_code
assert_response :missing
end
def test_render_file_with_instance_variables
get :render_file_with_instance_variables
assert_equal "The secret is in the sauce\n", @response.body
end
def test_render_file_not_using_full_path
get :render_file_not_using_full_path
assert_equal "The secret is in the sauce\n", @response.body
end
def test_render_file_not_using_full_path_with_dot_in_path
get :render_file_not_using_full_path_with_dot_in_path
assert_equal "The secret is in the sauce\n", @response.body
end
def test_render_file_with_locals
get :render_file_with_locals
assert_equal "The secret is in the sauce\n", @response.body
end
def test_attempt_to_access_object_method
assert_raises(ActionController::UnknownAction, "No action responded to [clone]") { get :clone }
end
def test_private_methods
assert_raises(ActionController::UnknownAction, "No action responded to [determine_layout]") { get :determine_layout }
end
def test_access_to_request_in_view
view_internals_old_value = ActionController::Base.view_controller_internals
ActionController::Base.view_controller_internals = false
ActionController::Base.protected_variables_cache = nil
get :hello_world
assert !assigns.include?('_request'), '_request should not be in assigns'
assert !assigns.include?('request'), 'request should not be in assigns'
ActionController::Base.view_controller_internals = true
ActionController::Base.protected_variables_cache = nil
get :hello_world
assert !assigns.include?('request'), 'request should not be in assigns'
assert_kind_of ActionController::AbstractRequest, assigns['_request']
assert_kind_of ActionController::AbstractRequest, @response.template.request
ensure
ActionController::Base.view_controller_internals = view_internals_old_value
ActionController::Base.protected_variables_cache = nil
end
def test_render_xml
get :render_xml_hello
assert_equal "<html>\n <p>Hello David</p>\n<p>This is grand!</p>\n</html>\n", @response.body
end
def test_enum_rjs_test
get :enum_rjs_test
assert_equal <<-EOS.strip, @response.body
$$(".product").each(function(value, index) {
new Effect.Highlight(element,{});
new Effect.Highlight(value,{});
Sortable.create(value, {onUpdate:function(){new Ajax.Request('/test/order', {asynchronous:true, evalScripts:true, parameters:Sortable.serialize(value)})}});
new Draggable(value, {});
});
EOS
end
def test_render_xml_with_default
get :greeting
assert_equal "<p>This is grand!</p>\n", @response.body
end
def test_render_with_default_from_accept_header
@request.env["HTTP_ACCEPT"] = "text/javascript"
get :greeting
assert_equal "$(\"body\").visualEffect(\"highlight\");", @response.body
end
def test_render_rjs_with_default
get :delete_with_js
assert_equal %!Element.remove("person");\nnew Effect.Highlight(\"project-4\",{});!, @response.body
end
def test_render_rjs_template_explicitly
get :render_js_with_explicit_template
assert_equal %!Element.remove("person");\nnew Effect.Highlight(\"project-4\",{});!, @response.body
end
def test_rendering_rjs_action_explicitly
get :render_js_with_explicit_action_template
assert_equal %!Element.remove("person");\nnew Effect.Highlight(\"project-4\",{});!, @response.body
end
def test_layout_rendering
get :layout_test
assert_equal "<html>Hello world!</html>", @response.body
end
def test_layout_test_with_different_layout
get :layout_test_with_different_layout
assert_equal "<html>Hello world!</html>", @response.body
end
def test_rendering_without_layout
get :rendering_without_layout
assert_equal "Hello world!", @response.body
end
def test_layout_overriding_layout
get :layout_overriding_layout
assert_no_match %r{<title>}, @response.body
end
def test_rendering_nothing_on_layout
get :rendering_nothing_on_layout
assert_equal " ", @response.body
end
def test_render_xml_with_layouts
get :builder_layout_test
assert_equal "<wrapper>\n<html>\n <p>Hello </p>\n<p>This is grand!</p>\n</html>\n</wrapper>\n", @response.body
end
def test_partial_only
get :partial_only
assert_equal "only partial", @response.body
end
def test_partial_only_with_layout
get :partial_only_with_layout
assert_equal "<html>only partial</html>", @response.body
end
def test_render_to_string
assert_not_deprecated { get :hello_in_a_string }
assert_equal "How's there? goodbyeHello: davidHello: marygoodbye\n", @response.body
end
def test_render_to_string_doesnt_break_assigns
get :render_to_string_with_assigns
assert_equal "i'm before the render", assigns(:before)
assert_equal "i'm after the render", assigns(:after)
end
def test_render_to_string_partial
get :render_to_string_with_partial
assert_equal "only partial", assigns(:partial_only)
assert_equal "Hello: david", assigns(:partial_with_locals)
end
def test_bad_render_to_string_still_throws_exception
assert_raises(ActionController::MissingTemplate) { get :render_to_string_with_exception }
end
def test_render_to_string_that_throws_caught_exception_doesnt_break_assigns
assert_nothing_raised { get :render_to_string_with_caught_exception }
assert_equal "i'm before the render", assigns(:before)
assert_equal "i'm after the render", assigns(:after)
end
def test_nested_rendering
get :hello_world
assert_equal "Living in a nested world", Fun::GamesController.process(@request, @response).body
end
def test_accessing_params_in_template
get :accessing_params_in_template, :name => "David"
assert_equal "Hello: David", @response.body
end
def test_accessing_params_in_template_with_layout
get :accessing_params_in_template_with_layout, :name => "David"
assert_equal "<html>Hello: David</html>", @response.body
end
def test_render_with_explicit_template
get :render_with_explicit_template
assert_response :success
end
def test_double_render
assert_raises(ActionController::DoubleRenderError) { get :double_render }
end
def test_double_redirect
assert_raises(ActionController::DoubleRenderError) { get :double_redirect }
end
def test_render_and_redirect
assert_raises(ActionController::DoubleRenderError) { get :render_and_redirect }
end
# specify the one exception to double render rule - render_to_string followed by render
def test_render_to_string_and_render
get :render_to_string_and_render
assert_equal("Hi web users! here is some cached stuff", @response.body)
end
def test_rendering_with_conflicting_local_vars
get :rendering_with_conflicting_local_vars
assert_equal("First: David\nSecond: Stephan\nThird: David\nFourth: David\nFifth: ", @response.body)
end
def test_action_talk_to_layout
get :action_talk_to_layout
assert_equal "<title>Talking to the layout</title>\nAction was here!", @response.body
end
def test_partials_list
get :partials_list
assert_equal "goodbyeHello: davidHello: marygoodbye\n", @response.body
end
def test_partial_with_locals
get :partial_with_locals
assert_equal "Hello: david", @response.body
end
def test_partial_with_form_builder
get :partial_with_form_builder
assert_match(/<label/, @response.body)
assert_template('test/_form')
end
def test_partial_with_form_builder_subclass
get :partial_with_form_builder_subclass
assert_match(/<label/, @response.body)
assert_template('test/_labelling_form')
end
def test_partial_collection
get :partial_collection
assert_equal "Hello: davidHello: mary", @response.body
end
def test_partial_collection_with_counter
get :partial_collection_with_counter
assert_equal "david1mary2", @response.body
end
def test_partial_collection_with_locals
get :partial_collection_with_locals
assert_equal "Bonjour: davidBonjour: mary", @response.body
end
def test_partial_collection_with_spacer
get :partial_collection_with_spacer
assert_equal "Hello: davidonly partialHello: mary", @response.body
end
def test_partial_collection_shorthand_with_locals
get :partial_collection_shorthand_with_locals
assert_equal "Bonjour: davidBonjour: mary", @response.body
end
def test_partial_collection_shorthand_with_different_types_of_records
get :partial_collection_shorthand_with_different_types_of_records
assert_equal "Bonjour bad customer: mark1Bonjour good customer: craig2Bonjour bad customer: john3Bonjour good customer: zach4Bonjour good customer: brandon5Bonjour bad customer: dan6", @response.body
end
def test_empty_partial_collection
get :empty_partial_collection
assert_equal " ", @response.body
end
def test_partial_with_hash_object
get :partial_with_hash_object
assert_equal "Sam\nmaS\n", @response.body
end
def test_hash_partial_collection
get :partial_hash_collection
assert_equal "Pratik\nkitarP\nAmy\nymA\n", @response.body
end
def test_partial_hash_collection_with_locals
get :partial_hash_collection_with_locals
assert_equal "Hola: PratikHola: Amy", @response.body
end
def test_partial_with_implicit_local_assignment
get :partial_with_implicit_local_assignment
assert_equal "Hello: Marcel", @response.body
end
def test_render_missing_partial_template
assert_raises(ActionView::ActionViewError) do
get :missing_partial
end
end
def test_render_text_with_assigns
get :render_text_with_assigns
assert_equal "world", assigns["hello"]
end
def test_update_page
get :update_page
assert_template nil
assert_equal 'text/javascript; charset=utf-8', @response.headers['type']
assert_equal 2, @response.body.split($/).length
end
def test_update_page_with_instance_variables
get :update_page_with_instance_variables
assert_template nil
assert_equal 'text/javascript; charset=utf-8', @response.headers['type']
assert_match /balance/, @response.body
assert_match /\$37/, @response.body
end
def test_yield_content_for
assert_not_deprecated { get :yield_content_for }
assert_equal "<title>Putting stuff in the title!</title>\n\nGreat stuff!\n", @response.body
end
def test_overwritting_rendering_relative_file_with_extension
get :hello_world_from_rxml_using_template
assert_equal "<html>\n <p>Hello</p>\n</html>\n", @response.body
get :hello_world_from_rxml_using_action
assert_equal "<html>\n <p>Hello</p>\n</html>\n", @response.body
end
def test_head_with_location_header
get :head_with_location_header
assert @response.body.blank?
assert_equal "/foo", @response.headers["Location"]
assert_response :ok
end
def test_head_with_custom_header
get :head_with_custom_header
assert @response.body.blank?
assert_equal "something", @response.headers["X-Custom-Header"]
assert_response :ok
end
def test_head_with_symbolic_status
get :head_with_symbolic_status, :status => "ok"
assert_equal "200 OK", @response.headers["Status"]
assert_response :ok
get :head_with_symbolic_status, :status => "not_found"
assert_equal "404 Not Found", @response.headers["Status"]
assert_response :not_found
ActionController::StatusCodes::SYMBOL_TO_STATUS_CODE.each do |status, code|
get :head_with_symbolic_status, :status => status.to_s
assert_equal code, @response.response_code
assert_response status
end
end
def test_head_with_integer_status
ActionController::StatusCodes::STATUS_CODES.each do |code, message|
get :head_with_integer_status, :status => code.to_s
assert_equal message, @response.message
end
end
def test_head_with_string_status
get :head_with_string_status, :status => "404 Eat Dirt"
assert_equal 404, @response.response_code
assert_equal "Eat Dirt", @response.message
assert_response :not_found
end
def test_head_with_status_code_first
get :head_with_status_code_first
assert_equal 403, @response.response_code
assert_equal "Forbidden", @response.message
assert_equal "something", @response.headers["X-Custom-Header"]
assert_response :forbidden
end
def test_rendering_with_location_should_set_header
get :render_with_location
assert_equal "http://example.com", @response.headers["Location"]
end
def test_rendering_xml_should_call_to_xml_if_possible
get :render_with_to_xml
assert_equal "<i-am-xml/>", @response.body
end
def test_rendering_with_object_location_should_set_header_with_url_for
ActionController::Routing::Routes.draw do |map|
map.resources :customers
map.connect ':controller/:action/:id'
end
get :render_with_object_location
assert_equal "http://www.nextangle.com/customers/1", @response.headers["Location"]
end
def test_render_call_to_partial_with_layout
get :render_call_to_partial_with_layout
assert_equal "Before (David)\nInside from partial (David)\nAfter", @response.body
end
def test_render_call_to_partial_with_layout_in_main_layout_and_within_content_for_layout
get :render_call_to_partial_with_layout_in_main_layout_and_within_content_for_layout
assert_equal "Before (Anthony)\nInside from partial (Anthony)\nAfter\nBefore (David)\nInside from partial (David)\nAfter\nBefore (Ramm)\nInside from partial (Ramm)\nAfter", @response.body
end
def test_using_layout_around_block
get :render_using_layout_around_block
assert_equal "Before (David)\nInside from block\nAfter", @response.body
end
def test_using_layout_around_block_in_main_layout_and_within_content_for_layout
get :render_using_layout_around_block_in_main_layout_and_within_content_for_layout
assert_equal "Before (Anthony)\nInside from first block in layout\nAfter\nBefore (David)\nInside from block\nAfter\nBefore (Ramm)\nInside from second block in layout\nAfter\n", @response.body
end
end
| chip/signal-wiki | vendor/rails/actionpack/test/controller/new_render_test.rb | Ruby | mit | 27,616 |
/**
* Bootstrap
* (sails.config.bootstrap)
*
* An asynchronous bootstrap function that runs before your Sails app gets lifted.
* This gives you an opportunity to set up your data model, run jobs, or perform some special logic.
*
* For more information on bootstrapping your app, check out:
* http://sailsjs.org/#!/documentation/reference/sails.config/sails.config.bootstrap.html
*/
module.exports.bootstrap = function(cb) {
// It's very important to trigger this callback method when you are finished
// with the bootstrap! (otherwise your server will never lift, since it's waiting on the bootstrap)
sails.services.passport.loadStrategies();
// CRON JOBS FOR INFLUENCERS, HASHTAGS, MENTIONS
// Runs every 15 minutes
const TIMEZONE = 'America/Los_Angeles';
var CronJob = require('cron').CronJob;
var cronJobs = Object.keys(sails.config.cron);
cronJobs.forEach(function(key) {
var value = sails.config.cron[key];
new CronJob(key, value, null, true, TIMEZONE);
})
sails.config.twitterstream();
// new CronJob('00 * * * * *', function() {
// console.log(new Date(), 'You will see this message every minute.');
// }, null, true, TIMEZONE);
cb();
};
| piket/twitter-mafia | config/bootstrap.js | JavaScript | mit | 1,211 |
/*
* Exchange Web Services Managed API
*
* Copyright (c) Microsoft Corporation
* All rights reserved.
*
* MIT License
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this
* software and associated documentation files (the "Software"), to deal in the Software
* without restriction, including without limitation the rights to use, copy, modify, merge,
* publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons
* to whom the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or
* substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
* INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
* PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
* FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
* OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
namespace Microsoft.Exchange.WebServices.Data
{
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Xml;
/// <summary>
/// Represents the response to a folder search operation.
/// </summary>
internal sealed class FindFolderResponse : ServiceResponse
{
private FindFoldersResults results = new FindFoldersResults();
private PropertySet propertySet;
/// <summary>
/// Reads response elements from XML.
/// </summary>
/// <param name="reader">The reader.</param>
internal override void ReadElementsFromXml(EwsServiceXmlReader reader)
{
reader.ReadStartElement(XmlNamespace.Messages, XmlElementNames.RootFolder);
this.results.TotalCount = reader.ReadAttributeValue<int>(XmlAttributeNames.TotalItemsInView);
this.results.MoreAvailable = !reader.ReadAttributeValue<bool>(XmlAttributeNames.IncludesLastItemInRange);
// Ignore IndexedPagingOffset attribute if MoreAvailable is false.
this.results.NextPageOffset = results.MoreAvailable ? reader.ReadNullableAttributeValue<int>(XmlAttributeNames.IndexedPagingOffset) : null;
reader.ReadStartElement(XmlNamespace.Types, XmlElementNames.Folders);
if (!reader.IsEmptyElement)
{
do
{
reader.Read();
if (reader.NodeType == XmlNodeType.Element)
{
Folder folder = EwsUtilities.CreateEwsObjectFromXmlElementName<Folder>(reader.Service, reader.LocalName);
if (folder == null)
{
reader.SkipCurrentElement();
}
else
{
folder.LoadFromXml(
reader,
true, /* clearPropertyBag */
this.propertySet,
true /* summaryPropertiesOnly */);
this.results.Folders.Add(folder);
}
}
}
while (!reader.IsEndElement(XmlNamespace.Types, XmlElementNames.Folders));
}
reader.ReadEndElement(XmlNamespace.Messages, XmlElementNames.RootFolder);
}
/// <summary>
/// Creates a folder instance.
/// </summary>
/// <param name="service">The service.</param>
/// <param name="xmlElementName">Name of the XML element.</param>
/// <returns>Folder</returns>
private Folder CreateFolderInstance(ExchangeService service, string xmlElementName)
{
return EwsUtilities.CreateEwsObjectFromXmlElementName<Folder>(service, xmlElementName);
}
/// <summary>
/// Initializes a new instance of the <see cref="FindFolderResponse"/> class.
/// </summary>
/// <param name="propertySet">The property set from, the request.</param>
internal FindFolderResponse(PropertySet propertySet)
: base()
{
this.propertySet = propertySet;
EwsUtilities.Assert(
this.propertySet != null,
"FindFolderResponse.ctor",
"PropertySet should not be null");
}
/// <summary>
/// Gets the results of the search operation.
/// </summary>
public FindFoldersResults Results
{
get { return this.results; }
}
}
} | axelitus/fork-ews-managed-api | Core/Responses/FindFolderResponse.cs | C# | mit | 5,021 |
#define MICROPY_HW_BOARD_NAME "CustomPCB"
#define MICROPY_HW_MCU_NAME "STM32F439"
#define MICROPY_HW_HAS_FLASH (1)
#define MICROPY_HW_ENABLE_RNG (1)
#define MICROPY_HW_ENABLE_RTC (1)
#define MICROPY_HW_ENABLE_DAC (1)
#define MICROPY_HW_ENABLE_USB (1)
#define MICROPY_HW_ENABLE_SDCARD (1) // works with no SD card too
// SD card detect switch
#if MICROPY_HW_ENABLE_SDCARD
#define MICROPY_HW_SDCARD_DETECT_PIN (pin_A8)
#define MICROPY_HW_SDCARD_DETECT_PULL (GPIO_PULLUP)
#define MICROPY_HW_SDCARD_DETECT_PRESENT (1)
#endif
// HSE is 8MHz
#define MICROPY_HW_CLK_PLLM (8) //divide external clock by this to get 1MHz
#define MICROPY_HW_CLK_PLLN (384) //this number is the PLL clock in MHz
#define MICROPY_HW_CLK_PLLP (RCC_PLLP_DIV2) //divide PLL clock by this to get core clock
#define MICROPY_HW_CLK_PLLQ (8) //divide core clock by this to get 48MHz
// USB config
#define MICROPY_HW_USB_FS (1)
// UART config
#define MICROPY_HW_UART1_TX (pin_A9)
#define MICROPY_HW_UART1_RX (pin_A10)
#define MICROPY_HW_UART2_TX (pin_D5)
#define MICROPY_HW_UART2_RX (pin_D6)
#define MICROPY_HW_UART2_RTS (pin_D1)
#define MICROPY_HW_UART2_CTS (pin_D0)
#define MICROPY_HW_UART3_TX (pin_D8)
#define MICROPY_HW_UART3_RX (pin_D9)
#define MICROPY_HW_UART3_RTS (pin_D12)
#define MICROPY_HW_UART3_CTS (pin_D11)
#define MICROPY_HW_UART4_TX (pin_A0)
#define MICROPY_HW_UART4_RX (pin_A1)
#define MICROPY_HW_UART6_TX (pin_C6)
#define MICROPY_HW_UART6_RX (pin_C7)
// I2C buses
#define MICROPY_HW_I2C1_SCL (pin_A8)
#define MICROPY_HW_I2C1_SDA (pin_C9)
// SPI buses
#define MICROPY_HW_SPI1_NSS (pin_A4)
#define MICROPY_HW_SPI1_SCK (pin_A5)
#define MICROPY_HW_SPI1_MISO (pin_A6)
#define MICROPY_HW_SPI1_MOSI (pin_A7)
#if MICROPY_HW_USB_HS_IN_FS
// The HS USB uses B14 & B15 for D- and D+
#else
#define MICROPY_HW_SPI2_NSS (pin_B12)
#define MICROPY_HW_SPI2_SCK (pin_B13)
#define MICROPY_HW_SPI2_MISO (pin_B14)
#define MICROPY_HW_SPI2_MOSI (pin_B15)
#endif
#define MICROPY_HW_SPI3_NSS (pin_E11)
#define MICROPY_HW_SPI3_SCK (pin_E12)
#define MICROPY_HW_SPI3_MISO (pin_E13)
#define MICROPY_HW_SPI3_MOSI (pin_E14)
//#define MICROPY_HW_SPI4_NSS (pin_E11)
//#define MICROPY_HW_SPI4_SCK (pin_E12)
//#define MICROPY_HW_SPI4_MISO (pin_E13)
//#define MICROPY_HW_SPI4_MOSI (pin_E14)
//#define MICROPY_HW_SPI5_NSS (pin_F6)
//#define MICROPY_HW_SPI5_SCK (pin_F7)
//#define MICROPY_HW_SPI5_MISO (pin_F8)
//#define MICROPY_HW_SPI5_MOSI (pin_F9)
//#define MICROPY_HW_SPI6_NSS (pin_G8)
//#define MICROPY_HW_SPI6_SCK (pin_G13)
//#define MICROPY_HW_SPI6_MISO (pin_G12)
//#define MICROPY_HW_SPI6_MOSI (pin_G14)
// CAN buses
#define MICROPY_HW_CAN1_TX (pin_B9)
#define MICROPY_HW_CAN1_RX (pin_B8)
#define MICROPY_HW_CAN2_TX (pin_B13)
#define MICROPY_HW_CAN2_RX (pin_B12)
// USRSW is pulled low. Pressing the button makes the input go high.
#define MICROPY_HW_USRSW_PIN (pin_A0)
#define MICROPY_HW_USRSW_PULL (GPIO_NOPULL)
#define MICROPY_HW_USRSW_EXTI_MODE (GPIO_MODE_IT_RISING)
#define MICROPY_HW_USRSW_PRESSED (1)
| bvernoux/micropython | ports/stm32/boards/STM32F439/mpconfigboard.h | C | mit | 3,172 |
const latestIncome = require('./latestIncome')
const latestSpending = require('./latestSpending')
function aggFinances(search) {
return {
latestIncome: () => latestIncome(search),
latestSpending: () => latestSpending(search),
}
}
module.exports = aggFinances
| tithebarn/charity-base | graphql/resolvers/query/CHC/getCharities/aggregate/finances/index.js | JavaScript | mit | 273 |
#ifndef LEON_RTEMS_CONFIG_H_
#define LEON_RTEMS_CONFIG_H_
#ifndef _RTEMS_CONFIG_H_
#define _RTEMS_CONFIG_H_
#include <OsDrvCpr.h>
#if defined(__RTEMS__)
#if !defined (__CONFIG__)
#define __CONFIG__
/* ask the system to generate a configuration table */
#define CONFIGURE_INIT
#ifndef RTEMS_POSIX_API
#define RTEMS_POSIX_API
#endif
#define CONFIGURE_MICROSECONDS_PER_TICK 1000 /* 1 millisecond */
#define CONFIGURE_TICKS_PER_TIMESLICE 50 /* 50 milliseconds */
#define CONFIGURE_APPLICATION_NEEDS_CONSOLE_DRIVER
#define CONFIGURE_APPLICATION_NEEDS_CLOCK_DRIVER
#define CONFIGURE_POSIX_INIT_THREAD_TABLE
#define CONFIGURE_MINIMUM_TASK_STACK_SIZE (8192)
#define CONFIGURE_MAXIMUM_TASKS 20
#define CONFIGURE_MAXIMUM_POSIX_THREADS 5
#define CONFIGURE_MAXIMUM_POSIX_MUTEXES 8
#define CONFIGURE_MAXIMUM_POSIX_KEYS 8
#define CONFIGURE_MAXIMUM_POSIX_SEMAPHORES 8
#define CONFIGURE_MAXIMUM_POSIX_MESSAGE_QUEUES 8
#define CONFIGURE_MAXIMUM_POSIX_TIMERS 4
#define CONFIGURE_MAXIMUM_TIMERS 4
void POSIX_Init( void *args );
static void Fatal_extension( Internal_errors_Source the_source,
bool is_internal,
uint32_t the_error );
#define CONFIGURE_MAXIMUM_USER_EXTENSIONS 1
#define CONFIGURE_INITIAL_EXTENSIONS { .fatal = Fatal_extension }
#include <SDCardIORTEMSConfig.h>
#include <rtems/confdefs.h>
#endif // __CONFIG__
#endif // __RTEMS__
// Set the system clocks
BSP_SET_CLOCK( DEFAULT_REFCLOCK, 200000, 1, 1,
DEFAULT_RTEMS_CSS_LOS_CLOCKS, DEFAULT_RTEMS_MSS_LRT_CLOCKS,
0, 0, 0 );
// Set L2 cache behaviour
BSP_SET_L2C_CONFIG( 1, DEFAULT_RTEMS_L2C_REPLACEMENT_POLICY,
DEFAULT_RTEMS_L2C_WAYS, DEFAULT_RTEMS_L2C_MODE, 0, 0 );
#endif // _RTEMS_CONFIG_H_
#endif // LEON_RTEMS_CONFIG_H_
| EyesOfThings/Software | Platform Software/myriad/apps/sdcard_wifi_example/leon/rtems_config.h | C | mit | 1,928 |
require 'spec_helper'
require 'generator_spec/test_case'
require 'generators/refinery/engine/engine_generator'
module Refinery
describe EngineGenerator do
include GeneratorSpec::TestCase
destination File.expand_path("../../../../../../tmp", __FILE__)
before do
prepare_destination
run_generator %w{ rspec_product_test title:string description:text image:image brochure:resource }
end
context "when generating a resource inside existing extensions dir" do
before do
run_generator %w{ rspec_item_test title:string --extension rspec_product_tests --namespace rspec_product_tests --skip }
end
it "creates a new migration with the new resource" do
destination_root.should have_structure {
directory "vendor" do
directory "extensions" do
directory "rspec_product_tests" do
directory "db" do
directory "migrate" do
file "2_create_rspec_product_tests_rspec_item_tests.rb"
end
end
end
end
end
}
end
it "appends routes to the routes file" do
File.open("#{destination_root}/vendor/extensions/rspec_product_tests/config/routes.rb") do |file|
file.grep(%r{rspec_item_tests}).count.should eq(2)
end
end
end
end
end
| resolve/refinerycms | core/spec/lib/generators/refinery/engine/engine_generator_multiple_resources_spec.rb | Ruby | mit | 1,394 |
---
title: The Title of the Book
date: 30/12/2018
---
According to Rev. 1:1 the title of the book is "The Revelation of Jesus Christ". It is a self-revelation of Him to His people and an expression of His care for them.
The book is the unveiling of Jesus Christ and it is both from Jesus and about Him. He is the focus of its content and its central figure.
It begins where the four Gospels end, with Jesus's resurrection and ascension into heaven and the continuation of his work of salvation.
Together with the Epistle to the Hebrews, Revelation emphasizes Jesus' heavenly ministry. Without Revelation or Hebrews, our knowledge of Christ's high-priestly ministry in heaven in behalf of His people would be very limited.
| imasaru/sabbath-school-lessons | src/en/2019-01-45-sec/01/02.md | Markdown | mit | 729 |
package inputs
import (
"fmt"
)
// DiskIO is based on telegraf DiskIO.
type DiskIO struct {
baseInput
}
// PluginName is based on telegraf plugin name.
func (d *DiskIO) PluginName() string {
return "diskio"
}
// UnmarshalTOML decodes the parsed data to the object
func (d *DiskIO) UnmarshalTOML(data interface{}) error {
return nil
}
// TOML encodes to toml string.
func (d *DiskIO) TOML() string {
return fmt.Sprintf(`[[inputs.%s]]
## By default, telegraf will gather stats for all devices including
## disk partitions.
## Setting devices will restrict the stats to the specified devices.
# devices = ["sda", "sdb", "vd*"]
## Uncomment the following line if you need disk serial numbers.
# skip_serial_number = false
#
## On systems which support it, device metadata can be added in the form of
## tags.
## Currently only Linux is supported via udev properties. You can view
## available properties for a device by running:
## 'udevadm info -q property -n /dev/sda'
## Note: Most, but not all, udev properties can be accessed this way. Properties
## that are currently inaccessible include DEVTYPE, DEVNAME, and DEVPATH.
# device_tags = ["ID_FS_TYPE", "ID_FS_USAGE"]
#
## Using the same metadata source as device_tags, you can also customize the
## name of the device via templates.
## The 'name_templates' parameter is a list of templates to try and apply to
## the device. The template may contain variables in the form of '$PROPERTY' or
## '${PROPERTY}'. The first template which does not contain any variables not
## present for the device is used as the device name tag.
## The typical use case is for LVM volumes, to get the VG/LV name instead of
## the near-meaningless DM-0 name.
# name_templates = ["$ID_FS_LABEL","$DM_VG_NAME/$DM_LV_NAME"]
`, d.PluginName())
}
| influxdb/influxdb | telegraf/plugins/inputs/diskio.go | GO | mit | 1,832 |
// This file was generated based on 'C:\ProgramData\Uno\Packages\UnoCore\0.19.6\Source\Uno\UX\Attributes\$.uno'.
// WARNING: Changes might be lost if you edit this file directly.
#pragma once
#include <Uno.Attribute.h>
namespace g{namespace Uno{namespace UX{struct UXSourceFileNameAttribute;}}}
namespace g{
namespace Uno{
namespace UX{
// public sealed class UXSourceFileNameAttribute :234
// {
uType* UXSourceFileNameAttribute_typeof();
void UXSourceFileNameAttribute__ctor_1_fn(UXSourceFileNameAttribute* __this);
void UXSourceFileNameAttribute__New1_fn(UXSourceFileNameAttribute** __retval);
struct UXSourceFileNameAttribute : ::g::Uno::Attribute
{
void ctor_1();
static UXSourceFileNameAttribute* New1();
};
// }
}}} // ::g::Uno::UX
| blyk/BlackCode-Fuse | TestApp/.build/Simulator/Android/include/Uno.UX.UXSourceFileNameAttribute.h | C | mit | 751 |
<?php namespace App;
use Illuminate\Database\Eloquent\Model;
class Task extends Model {
//
}
| voiceBits/bitz-admin | voicebitsapps/app/Task.php | PHP | mit | 98 |
# This file is part of Indico.
# Copyright (C) 2002 - 2021 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from datetime import timedelta
from flask import flash, redirect, request, session
from indico.core.db import db
from indico.modules.admin import RHAdminBase
from indico.modules.news import logger, news_settings
from indico.modules.news.forms import NewsForm, NewsSettingsForm
from indico.modules.news.models.news import NewsItem
from indico.modules.news.util import get_recent_news
from indico.modules.news.views import WPManageNews, WPNews
from indico.util.date_time import now_utc
from indico.util.i18n import _
from indico.web.flask.util import url_for
from indico.web.forms.base import FormDefaults
from indico.web.rh import RH
from indico.web.util import jsonify_data, jsonify_form
class RHNews(RH):
@staticmethod
def _is_new(item):
days = news_settings.get('new_days')
if not days:
return False
return item.created_dt.date() >= (now_utc() - timedelta(days=days)).date()
def _process(self):
news = NewsItem.query.order_by(NewsItem.created_dt.desc()).all()
return WPNews.render_template('news.html', news=news, _is_new=self._is_new)
class RHNewsItem(RH):
normalize_url_spec = {
'locators': {
lambda self: self.item.locator.slugged
}
}
def _process_args(self):
self.item = NewsItem.get_or_404(request.view_args['news_id'])
def _process(self):
return WPNews.render_template('news_item.html', item=self.item)
class RHManageNewsBase(RHAdminBase):
pass
class RHManageNews(RHManageNewsBase):
def _process(self):
news = NewsItem.query.order_by(NewsItem.created_dt.desc()).all()
return WPManageNews.render_template('admin/news.html', 'news', news=news)
class RHNewsSettings(RHManageNewsBase):
def _process(self):
form = NewsSettingsForm(obj=FormDefaults(**news_settings.get_all()))
if form.validate_on_submit():
news_settings.set_multi(form.data)
get_recent_news.clear_cached()
flash(_('Settings have been saved'), 'success')
return jsonify_data()
return jsonify_form(form)
class RHCreateNews(RHManageNewsBase):
def _process(self):
form = NewsForm()
if form.validate_on_submit():
item = NewsItem()
form.populate_obj(item)
db.session.add(item)
db.session.flush()
get_recent_news.clear_cached()
logger.info('News %r created by %s', item, session.user)
flash(_("News '{title}' has been posted").format(title=item.title), 'success')
return jsonify_data(flash=False)
return jsonify_form(form)
class RHManageNewsItemBase(RHManageNewsBase):
def _process_args(self):
RHManageNewsBase._process_args(self)
self.item = NewsItem.get_or_404(request.view_args['news_id'])
class RHEditNews(RHManageNewsItemBase):
def _process(self):
form = NewsForm(obj=self.item)
if form.validate_on_submit():
old_title = self.item.title
form.populate_obj(self.item)
db.session.flush()
get_recent_news.clear_cached()
logger.info('News %r modified by %s', self.item, session.user)
flash(_("News '{title}' has been updated").format(title=old_title), 'success')
return jsonify_data(flash=False)
return jsonify_form(form)
class RHDeleteNews(RHManageNewsItemBase):
def _process(self):
db.session.delete(self.item)
get_recent_news.clear_cached()
flash(_("News '{title}' has been deleted").format(title=self.item.title), 'success')
logger.info('News %r deleted by %r', self.item, session.user)
return redirect(url_for('news.manage'))
| pferreir/indico | indico/modules/news/controllers.py | Python | mit | 3,954 |
package swarm
import (
"testing"
"time"
ma "github.com/ipfs/go-ipfs/Godeps/_workspace/src/github.com/jbenet/go-multiaddr"
context "github.com/ipfs/go-ipfs/Godeps/_workspace/src/golang.org/x/net/context"
inet "github.com/ipfs/go-ipfs/p2p/net"
)
func TestNotifications(t *testing.T) {
ctx := context.Background()
swarms := makeSwarms(ctx, t, 5)
defer func() {
for _, s := range swarms {
s.Close()
}
}()
timeout := 5 * time.Second
// signup notifs
notifiees := make([]*netNotifiee, len(swarms))
for i, swarm := range swarms {
n := newNetNotifiee()
swarm.Notify(n)
notifiees[i] = n
}
connectSwarms(t, ctx, swarms)
<-time.After(time.Millisecond)
// should've gotten 5 by now.
// test everyone got the correct connection opened calls
for i, s := range swarms {
n := notifiees[i]
for _, s2 := range swarms {
if s == s2 {
continue
}
var actual []inet.Conn
for len(s.ConnectionsToPeer(s2.LocalPeer())) != len(actual) {
select {
case c := <-n.connected:
actual = append(actual, c)
case <-time.After(timeout):
t.Fatal("timeout")
}
}
expect := s.ConnectionsToPeer(s2.LocalPeer())
for _, c1 := range actual {
found := false
for _, c2 := range expect {
if c1 == c2 {
found = true
break
}
}
if !found {
t.Error("connection not found")
}
}
}
}
complement := func(c inet.Conn) (*Swarm, *netNotifiee, *Conn) {
for i, s := range swarms {
for _, c2 := range s.Connections() {
if c.LocalMultiaddr().Equal(c2.RemoteMultiaddr()) &&
c2.LocalMultiaddr().Equal(c.RemoteMultiaddr()) {
return s, notifiees[i], c2
}
}
}
t.Fatal("complementary conn not found", c)
return nil, nil, nil
}
testOCStream := func(n *netNotifiee, s inet.Stream) {
var s2 inet.Stream
select {
case s2 = <-n.openedStream:
t.Log("got notif for opened stream")
case <-time.After(timeout):
t.Fatal("timeout")
}
if s != s2 {
t.Fatal("got incorrect stream", s.Conn(), s2.Conn())
}
select {
case s2 = <-n.closedStream:
t.Log("got notif for closed stream")
case <-time.After(timeout):
t.Fatal("timeout")
}
if s != s2 {
t.Fatal("got incorrect stream", s.Conn(), s2.Conn())
}
}
streams := make(chan inet.Stream)
for _, s := range swarms {
s.SetStreamHandler(func(s inet.Stream) {
streams <- s
s.Close()
})
}
// open a streams in each conn
for i, s := range swarms {
for _, c := range s.Connections() {
_, n2, _ := complement(c)
st1, err := c.NewStream()
if err != nil {
t.Error(err)
} else {
st1.Write([]byte("hello"))
st1.Close()
testOCStream(notifiees[i], st1)
st2 := <-streams
testOCStream(n2, st2)
}
}
}
// close conns
for i, s := range swarms {
n := notifiees[i]
for _, c := range s.Connections() {
_, n2, c2 := complement(c)
c.Close()
c2.Close()
var c3, c4 inet.Conn
select {
case c3 = <-n.disconnected:
case <-time.After(timeout):
t.Fatal("timeout")
}
if c != c3 {
t.Fatal("got incorrect conn", c, c3)
}
select {
case c4 = <-n2.disconnected:
case <-time.After(timeout):
t.Fatal("timeout")
}
if c2 != c4 {
t.Fatal("got incorrect conn", c, c2)
}
}
}
}
type netNotifiee struct {
listen chan ma.Multiaddr
listenClose chan ma.Multiaddr
connected chan inet.Conn
disconnected chan inet.Conn
openedStream chan inet.Stream
closedStream chan inet.Stream
}
func newNetNotifiee() *netNotifiee {
return &netNotifiee{
listen: make(chan ma.Multiaddr),
listenClose: make(chan ma.Multiaddr),
connected: make(chan inet.Conn),
disconnected: make(chan inet.Conn),
openedStream: make(chan inet.Stream),
closedStream: make(chan inet.Stream),
}
}
func (nn *netNotifiee) Listen(n inet.Network, a ma.Multiaddr) {
nn.listen <- a
}
func (nn *netNotifiee) ListenClose(n inet.Network, a ma.Multiaddr) {
nn.listenClose <- a
}
func (nn *netNotifiee) Connected(n inet.Network, v inet.Conn) {
nn.connected <- v
}
func (nn *netNotifiee) Disconnected(n inet.Network, v inet.Conn) {
nn.disconnected <- v
}
func (nn *netNotifiee) OpenedStream(n inet.Network, v inet.Stream) {
nn.openedStream <- v
}
func (nn *netNotifiee) ClosedStream(n inet.Network, v inet.Stream) {
nn.closedStream <- v
}
| sbruce/go-ipfs | p2p/net/swarm/swarm_notif_test.go | GO | mit | 4,319 |
## Infiniti Samples
This repository contains examples for [Intelledox Infiniti](http://intelledox.com).
Directory | Description
--------- | -----------
[Extension Examples](ExtensionExamples) | A collection of sample code that helps you learn and explore Infiniti extensions.
[API Examples](ApiExamples) | A collection of sample API client code that helps demonstrate calling Infiniti REST services.
## Resources
+ **Website:** [intelledox.com](http://intelledox.com)
+ **Documentation:** [Infiniti Knowledge Base](http://ixsupport.intelledox.com/kb)
+ **Blog:** [Intelledox Blog](http://intelledox.com/ix-connect/blog) | Intelledox/Infiniti | README.md | Markdown | mit | 624 |
๏ปฟ// --------------------------------------------------------------------------------------------
// <copyright file="TransformVisitor.UnionAll.cs" company="Effort Team">
// Copyright (C) 2011-2014 Effort Team
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
// </copyright>
// --------------------------------------------------------------------------------------------
namespace Effort.Internal.DbCommandTreeTransformation
{
using System;
using System.Collections.Generic;
#if !EFOLD
using System.Data.Entity.Core.Common.CommandTrees;
#else
using System.Data.Common.CommandTrees;
#endif
using System.Linq;
using System.Linq.Expressions;
using System.Reflection;
using Effort.Internal.Common;
internal partial class TransformVisitor
{
public override Expression Visit(DbUnionAllExpression expression)
{
Type resultType = edmTypeConverter.Convert(expression.ResultType);
Expression left = this.Visit(expression.Left);
Expression right = this.Visit(expression.Right);
var resultElemType = TypeHelper.GetElementType(resultType);
this.UnifyCollections(resultElemType, ref left, ref right);
return queryMethodExpressionBuilder.Concat(left, right);
}
}
} | wertzui/effort | Main/Source/Effort/Internal/DbCommandTreeTransformation/TransformVisitor.UnionAll.cs | C# | mit | 2,438 |
<?php
namespace Kunstmaan\NodeSearchBundle\Helper\FormWidgets;
use Doctrine\ORM\EntityManager;
use Kunstmaan\AdminBundle\Helper\FormWidgets\FormWidget;
use Kunstmaan\NodeBundle\Entity\Node;
use Kunstmaan\NodeSearchBundle\Entity\NodeSearch;
use Symfony\Component\Form\FormBuilderInterface;
use Symfony\Component\HttpFoundation\Request;
class SearchFormWidget extends FormWidget
{
/** @var Node */
private $node;
/** @var NodeSearch */
private $nodeSearch;
/**
* @param Node $node
* @param EntityManager $em
*/
public function __construct(Node $node, EntityManager $em)
{
$this->node = $node;
$this->nodeSearch = $em->getRepository('KunstmaanNodeSearchBundle:NodeSearch')->findOneByNode($this->node);
}
/**
* @param FormBuilderInterface $builder The form builder
*/
public function buildForm(FormBuilderInterface $builder)
{
parent::buildForm($builder);
$data = $builder->getData();
$data['node_search'] = $this->nodeSearch;
$builder->setData($data);
}
/**
* @param Request $request
*/
public function bindRequest(Request $request)
{
$form = $request->request->get('form');
$this->data['node_search'] = $form['node_search']['boost'];
}
/**
* @param EntityManager $em
*/
public function persist(EntityManager $em)
{
$nodeSearch = $em->getRepository('KunstmaanNodeSearchBundle:NodeSearch')->findOneByNode($this->node);
if ($this->data['node_search'] !== null) {
if ($nodeSearch === null) {
$nodeSearch = new NodeSearch();
$nodeSearch->setNode($this->node);
}
$nodeSearch->setBoost($this->data['node_search']);
$em->persist($nodeSearch);
}
}
}
| mwoynarski/KunstmaanBundlesCMS | src/Kunstmaan/NodeSearchBundle/Helper/FormWidgets/SearchFormWidget.php | PHP | mit | 1,850 |
๏ปฟusing System.Reflection;
[assembly: AssemblyTitle("Rainbow")]
[assembly: AssemblyDescription("Rainbow serialization library")] | kamsar/Rainbow | src/Rainbow/Properties/AssemblyInfo.cs | C# | mit | 130 |
main: main.cc
g++ -O2 -Wall -Wextra -o main main.cc
run: main
./main
debug: main
valgrind --leak-check=full --show-leak-kinds=all --track-origins=yes ./main
clean:
rm -fv main
| PysKa-Ratzinger/personal_project_euler_solutions | solutions/051-075/54/Makefile | Makefile | mit | 183 |
define(function(require, exports, module) {
var Notify = require('common/bootstrap-notify');
var FileChooser = require('../widget/file/file-chooser3');
exports.run = function() {
var $form = $("#course-material-form");
var materialChooser = new FileChooser({
element: '#material-file-chooser'
});
materialChooser.on('change', function(item) {
$form.find('[name="fileId"]').val(item.id);
});
$form.on('click', '.delete-btn', function(){
var $btn = $(this);
if (!confirm(Translator.trans('็็่ฆๅ ้ค่ฏฅ่ตๆๅ๏ผ'))) {
return ;
}
$.post($btn.data('url'), function(){
$btn.parents('.list-group-item').remove();
Notify.success(Translator.trans('่ตๆๅทฒๅ ้ค'));
});
});
$form.on('submit', function(){
if ($form.find('[name="fileId"]').val().length == 0) {
Notify.danger(Translator.trans('่ฏทๅ
ไธไผ ๆไปถๆๆทปๅ ่ตๆ็ฝ็ป้พๆฅ๏ผ'));
return false;
}
$.post($form.attr('action'), $form.serialize(), function(html){
Notify.success(Translator.trans('่ตๆๆทปๅ ๆๅ๏ผ'));
$("#material-list").append(html).show();
$form.find('.text-warning').hide();
$form.find('[name="fileId"]').val('');
$form.find('[name="link"]').val('');
$form.find('[name="description"]').val('');
materialChooser.open();
}).fail(function(){
Notify.success(Translator.trans('่ตๆๆทปๅ ๅคฑ่ดฅ๏ผ่ฏท้่ฏ๏ผ'));
});
return false;
});
$('.modal').on('hidden.bs.modal', function(){
window.location.reload();
});
};
}); | richtermark/SMEAGOnline | web/bundles/topxiaweb/js/controller/course-manage/material-modal.js | JavaScript | mit | 1,892 |
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
elif [ -L "${binary}" ]; then
echo "Destination binary is symlinked..."
dirname="$(dirname "${binary}")"
binary="${dirname}/$(readlink "${binary}")"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u)
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Copies the bcsymbolmap files of a vendored framework
install_bcsymbolmap() {
local bcsymbolmap_path="$1"
local destination="${BUILT_PRODUCTS_DIR}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}"
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identity
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary"
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/ForecastIO-iOS/ForecastIO.framework"
install_framework "${BUILT_PRODUCTS_DIR}/OHHTTPStubs-iOS/OHHTTPStubs.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/ForecastIO-iOS/ForecastIO.framework"
install_framework "${BUILT_PRODUCTS_DIR}/OHHTTPStubs-iOS/OHHTTPStubs.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
| ghodasara/ForecastIO | Pods/Target Support Files/Pods-ForecastIO iOS Tests/Pods-ForecastIO iOS Tests-frameworks.sh | Shell | mit | 8,050 |
/*
-----------------------------------------------------------------------------
This source file is part of OGRE
(Object-oriented Graphics Rendering Engine)
For the latest info, see http://www.ogre3d.org
Copyright (c) 2000-2013 Torus Knot Software Ltd
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
-----------------------------------------------------------------------------
*/
#ifndef _RTShaderSRSSegmentedLights_
#define _RTShaderSRSSegmentedLights_
#include "OgreShaderPrerequisites.h"
#include "OgreShaderParameter.h"
#include "OgreShaderSubRenderState.h"
#include "OgreVector4.h"
#include "OgreLight.h"
#include "OgreCommon.h"
/** Segmented lighting sub render state
* The following is sub render state handles lighting in the scene.
* This sub render state is heavily based on PerPixelLighting
*/
class RTShaderSRSSegmentedLights : public Ogre::RTShader::SubRenderState
{
// Interface.
public:
/** Class default constructor */
RTShaderSRSSegmentedLights();
/**
@see SubRenderState::getType.
*/
virtual const Ogre::String& getType() const;
/**
@see SubRenderState::getType.
*/
virtual int getExecutionOrder() const;
/**
@see SubRenderState::updateGpuProgramsParams.
*/
virtual void updateGpuProgramsParams(Ogre::Renderable* rend, Ogre::Pass* pass, const Ogre::AutoParamDataSource* source, const Ogre::LightList* pLightList);
/**
@see SubRenderState::copyFrom.
*/
virtual void copyFrom(const Ogre::RTShader::SubRenderState& rhs);
/**
@see SubRenderState::preAddToRenderState.
*/
virtual bool preAddToRenderState(const Ogre::RTShader::RenderState* renderState, Ogre::Pass* srcPass, Ogre::Pass* dstPass);
static Ogre::String Type;
// Protected types:
protected:
// Per light parameters.
struct LightParams
{
Ogre::Light::LightTypes mType; // Light type.
Ogre::RTShader::UniformParameterPtr mPosition; // Light position.
Ogre::RTShader::UniformParameterPtr mDirection; // Light direction.
Ogre::RTShader::UniformParameterPtr mSpotParams; // Spot light parameters.
Ogre::RTShader::UniformParameterPtr mDiffuseColour; // Diffuse colour.
Ogre::RTShader::UniformParameterPtr mSpecularColour; // Specular colour.
};
typedef Ogre::vector<LightParams>::type LightParamsList;
typedef LightParamsList::iterator LightParamsIterator;
typedef LightParamsList::const_iterator LightParamsConstIterator;
// Protected methods
protected:
/**
Set the track per vertex colour type. Ambient, Diffuse, Specular and Emissive lighting components source
can be the vertex colour component. To establish such a link one should provide the matching flags to this
sub render state.
*/
void setTrackVertexColourType(Ogre::TrackVertexColourType type) { mTrackVertexColourType = type; }
/**
Return the current track per vertex type.
*/
Ogre::TrackVertexColourType getTrackVertexColourType() const { return mTrackVertexColourType; }
/**
Set the light count per light type that this sub render state will generate.
@see ShaderGenerator::setLightCount.
*/
void setLightCount(const int lightCount[3]);
/**
Get the light count per light type that this sub render state will generate.
@see ShaderGenerator::getLightCount.
*/
void getLightCount(int lightCount[3]) const;
/**
Set the specular component state. If set to true this sub render state will compute a specular
lighting component in addition to the diffuse component.
@param enable Pass true to enable specular component computation.
*/
void setSpecularEnable(bool enable) { mSpecularEnable = enable; }
/**
Get the specular component state.
*/
bool getSpecularEnable() const { return mSpecularEnable; }
/**
@see SubRenderState::resolveParameters.
*/
virtual bool resolveParameters(Ogre::RTShader::ProgramSet* programSet);
/** Resolve global lighting parameters */
bool resolveGlobalParameters(Ogre::RTShader::ProgramSet* programSet);
/** Resolve per light parameters */
bool resolvePerLightParameters(Ogre::RTShader::ProgramSet* programSet);
/**
@see SubRenderState::resolveDependencies.
*/
virtual bool resolveDependencies(Ogre::RTShader::ProgramSet* programSet);
/**
@see SubRenderState::addFunctionInvocations.
*/
virtual bool addFunctionInvocations(Ogre::RTShader::ProgramSet* programSet);
/**
Internal method that adds related vertex shader functions invocations.
*/
bool addVSInvocation(Ogre::RTShader::Function* vsMain, const int groupOrder, int& internalCounter);
/**
Internal method that adds global illumination component functions invocations.
*/
bool addPSGlobalIlluminationInvocationBegin(Ogre::RTShader::Function* psMain, const int groupOrder, int& internalCounter);
bool addPSGlobalIlluminationInvocationEnd(Ogre::RTShader::Function* psMain, const int groupOrder, int& internalCounter);
/**
Internal method that adds per light illumination component functions invocations.
*/
bool addPSIlluminationInvocation(LightParams* curLightParams, Ogre::RTShader::Function* psMain, const int groupOrder, int& internalCounter);
/**
Internal method that adds light illumination component calculated from the segmented texture.
*/
bool addPSSegmentedTextureLightInvocation(Ogre::RTShader::Function* psMain, const int groupOrder, int& internalCounter);
/**
Internal method that adds the final colour assignments.
*/
bool addPSFinalAssignmentInvocation(Ogre::RTShader::Function* psMain, const int groupOrder, int& internalCounter);
// Attributes.
protected:
Ogre::TrackVertexColourType mTrackVertexColourType; // Track per vertex colour type.
bool mSpecularEnable; // Specular component enabled/disabled.
LightParamsList mLightParamsList; // Light list.
Ogre::RTShader::UniformParameterPtr mWorldMatrix; // World view matrix parameter.
Ogre::RTShader::UniformParameterPtr mWorldITMatrix; // World view matrix inverse transpose parameter.
Ogre::RTShader::ParameterPtr mVSInPosition; // Vertex shader input position parameter.
Ogre::RTShader::ParameterPtr mVSOutWorldPos; // Vertex shader output view position (position in camera space) parameter.
Ogre::RTShader::ParameterPtr mPSInWorldPos; // Pixel shader input view position (position in camera space) parameter.
Ogre::RTShader::ParameterPtr mVSInNormal; // Vertex shader input normal.
Ogre::RTShader::ParameterPtr mVSOutNormal; // Vertex shader output normal.
Ogre::RTShader::ParameterPtr mPSInNormal; // Pixel shader input normal.
Ogre::RTShader::ParameterPtr mPSLocalNormal;
Ogre::RTShader::ParameterPtr mPSTempDiffuseColour; // Pixel shader temporary diffuse calculation parameter.
Ogre::RTShader::ParameterPtr mPSTempSpecularColour; // Pixel shader temporary specular calculation parameter.
Ogre::RTShader::ParameterPtr mPSDiffuse; // Pixel shader input/local diffuse parameter.
Ogre::RTShader::ParameterPtr mPSSpecular; // Pixel shader input/local specular parameter.
Ogre::RTShader::ParameterPtr mPSOutDiffuse; // Pixel shader output diffuse parameter.
Ogre::RTShader::ParameterPtr mPSOutSpecular; // Pixel shader output specular parameter.
Ogre::RTShader::UniformParameterPtr mDerivedSceneColour; // Derived scene colour parameter.
Ogre::RTShader::UniformParameterPtr mLightAmbientColour; // Ambient light colour parameter.
Ogre::RTShader::UniformParameterPtr mDerivedAmbientLightColour; // Derived ambient light colour parameter.
Ogre::RTShader::UniformParameterPtr mSurfaceAmbientColour; // Surface ambient colour parameter.
Ogre::RTShader::UniformParameterPtr mSurfaceDiffuseColour; // Surface diffuse colour parameter.
Ogre::RTShader::UniformParameterPtr mSurfaceSpecularColour; // Surface specular colour parameter.
Ogre::RTShader::UniformParameterPtr mSurfaceEmissiveColour; // Surface emissive colour parameter.
Ogre::RTShader::UniformParameterPtr mSurfaceShininess; // Surface shininess parameter.
//Segmented texture
bool mUseSegmentedLightTexture;
bool mIsDebugMode;
unsigned short m_LightSamplerIndex;
Ogre::RTShader::UniformParameterPtr mPSLightTextureIndexLimit;
Ogre::RTShader::UniformParameterPtr mPSLightTextureLightBounds;
Ogre::RTShader::UniformParameterPtr mPSSegmentedLightTexture;
//Ogre::RTShader::UniformParameterPtr mPSLightAreaBounds;
static Ogre::Light msBlankLight; // Shared blank light.
};
/**
A factory that enables creation of PerPixelLighting instances.
@remarks Sub class of SubRenderStateFactory
*/
class RTShaderSRSSegmentedLightsFactory : public Ogre::RTShader::SubRenderStateFactory
{
public:
/**
@see SubRenderStateFactory::getType.
*/
virtual const Ogre::String& getType() const;
/**
@see SubRenderStateFactory::createInstance.
*/
virtual Ogre::RTShader::SubRenderState* createInstance(Ogre::ScriptCompiler* compiler, Ogre::PropertyAbstractNode* prop, Ogre::Pass* pass, Ogre::RTShader::SGScriptTranslator* translator);
/**
@see SubRenderStateFactory::writeInstance.
*/
virtual void writeInstance(Ogre::MaterialSerializer* ser, Ogre::RTShader::SubRenderState* subRenderState, Ogre::Pass* srcPass, Ogre::Pass* dstPass);
protected:
/**
@see SubRenderStateFactory::createInstanceImpl.
*/
virtual Ogre::RTShader::SubRenderState* createInstanceImpl();
};
#endif
| vbudovski/ogre | Samples/ShaderSystemMultiLight/include/RTShaderSRSSegmentedLights.h | C | mit | 10,199 |
module.exports={A:{A:{"2":"H D G E A B FB"},B:{"1":"p z J L N I","2":"C"},C:{"1":"0 2 3 5 6 8 9 P Q R S T U V W X Y Z a c d e f g h i j k l m n o M q r s t u v w x y CB AB","2":"4 aB F K H D G E A B C p z J L N I O YB SB"},D:{"1":"0 2 3 5 6 8 9 z J L N I O P Q R S T U V W X Y Z a c d e f g h i j k l m n o M q r s t u v w x y CB AB MB cB GB b HB IB JB KB","2":"F K H D G E A B C p"},E:{"1":"1 B C RB TB","2":"F K H D G E A LB DB NB OB PB QB"},F:{"1":"2 J L N I O P Q R S T U V W X Y Z a c d e f g h i j k l m n o M q r s t u v w x y","2":"1 E B C UB VB WB XB BB ZB EB"},G:{"2":"7 G C DB bB dB eB fB gB hB iB jB kB lB mB"},H:{"2":"nB"},I:{"1":"b sB tB","2":"4 7 F oB pB qB rB"},J:{"1":"A","2":"D"},K:{"1":"M","2":"1 A B C BB EB"},L:{"1":"b"},M:{"1":"0"},N:{"2":"A B"},O:{"1":"uB"},P:{"1":"F K vB wB"},Q:{"1":"xB"},R:{"1":"yB"}},B:1,C:"Download attribute"};
| stephaniejn/stephaniejn.github.io | node_modules/caniuse-lite/data/features/download.js | JavaScript | mit | 857 |
<?php
/**
* This makes our life easier when dealing with paths. Everything is relative
* to the application root now.
*/
chdir(dirname(__DIR__));
// Decline static file requests back to the PHP built-in webserver
if (php_sapi_name() === 'cli-server' && is_file(__DIR__ . parse_url($_SERVER['REQUEST_URI'], PHP_URL_PATH))) {
return false;
}
// Setup autoloading
include 'vendor/autoload.php';
if (!defined('APPLICATION_PATH')) {
define('APPLICATION_PATH', realpath(__DIR__ . '/../'));
}
$appConfig = include APPLICATION_PATH . '/config/application.config.php';
if (file_exists(APPLICATION_PATH . '/config/development.config.php')) {
$appConfig = Zend\Stdlib\ArrayUtils::merge($appConfig, include APPLICATION_PATH . '/config/development.config.php');
}
// Run the application!
Zend\Mvc\Application::init($appConfig)->run();
| iansltx/helpwanted | public/index.php | PHP | mit | 843 |
// ---------------------------------------------------------------------------------------------
#region // Copyright (c) 2014, SIL International. All Rights Reserved.
// <copyright from='2008' to='2014' company='SIL International'>
// Copyright (c) 2014, SIL International. All Rights Reserved.
//
// Distributable under the terms of the MIT License (http://sil.mit-license.org/)
// </copyright>
#endregion
//
// This class originated in FieldWorks (under the GNU Lesser General Public License), but we
// have decided to make it avaialble in SIL.ScriptureUtils as part of Palaso so it will be more
// readily available to other projects.
// ---------------------------------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
namespace SIL.Scripture
{
/// <summary>
/// Manipulate information for standard chatper/verse schemes
/// </summary>
public class VersificationTable
{
private readonly ScrVers scrVers;
private List<int[]> bookList;
private Dictionary<string, string> toStandard;
private Dictionary<string, string> fromStandard;
private static string baseDir;
private static VersificationTable[] versifications = null;
// Names of the versificaiton files. These are in "\My Paratext Projects"
private static string[] versificationFiles = new string[] { "",
"org.vrs", "lxx.vrs", "vul.vrs", "eng.vrs", "rsc.vrs", "rso.vrs", "oth.vrs",
"oth2.vrs", "oth3.vrs", "oth4.vrs", "oth5.vrs", "oth6.vrs", "oth7.vrs", "oth8.vrs",
"oth9.vrs", "oth10.vrs", "oth11.vrs", "oth12.vrs", "oth13.vrs", "oth14.vrs",
"oth15.vrs", "oth16.vrs", "oth17.vrs", "oth18.vrs", "oth19.vrs", "oth20.vrs",
"oth21.vrs", "oth22.vrs", "oth23.vrs", "oth24.vrs" };
/// ------------------------------------------------------------------------------------
/// <summary>
/// This method should be called once before an application accesses anything that
/// requires versification info.
/// TODO: Paratext needs to call this with ScrTextCollection.SettingsDirectory.
/// </summary>
/// <param name="vrsFolder">Path to the folder containing the .vrs files</param>
/// ------------------------------------------------------------------------------------
public static void Initialize(string vrsFolder)
{
baseDir = vrsFolder;
}
/// ------------------------------------------------------------------------------------
/// <summary>
/// Get the versification table for this versification
/// </summary>
/// <param name="vers"></param>
/// <returns></returns>
/// ------------------------------------------------------------------------------------
public static VersificationTable Get(ScrVers vers)
{
Debug.Assert(vers != ScrVers.Unknown);
if (versifications == null)
versifications = new VersificationTable[versificationFiles.GetUpperBound(0)];
// Read versification table if not already read
if (versifications[(int)vers] == null)
{
versifications[(int)vers] = new VersificationTable(vers);
ReadVersificationFile(FileName(vers), versifications[(int)vers]);
}
return versifications[(int)vers];
}
/// ------------------------------------------------------------------------------------
/// <summary>
/// Read versification file and "add" its entries.
/// At the moment we only do this once. Eventually we will call this twice.
/// Once for the standard versification, once for custom entries in versification.vrs
/// file for this project.
/// </summary>
/// <param name="fileName"></param>
/// <param name="versification"></param>
/// ------------------------------------------------------------------------------------
private static void ReadVersificationFile(string fileName, VersificationTable versification)
{
using (TextReader reader = new StreamReader(fileName))
{
for (string line = reader.ReadLine(); line != null; line = reader.ReadLine())
{
line = line.Trim();
if (line == "" || line[0] == '#')
continue;
if (line.Contains("="))
ParseMappingLine(fileName, versification, line);
else
ParseChapterVerseLine(fileName, versification, line);
}
}
}
// Parse lines mapping from this versification to standard versification
// GEN 1:10 = GEN 2:11
// GEN 1:10-13 = GEN 2:11-14
private static void ParseChapterVerseLine(string fileName, VersificationTable versification, string line)
{
string[] parts = line.Split(' ');
int bookNum = BCVRef.BookToNumber(parts[0]);
if (bookNum == -1)
return; // Deuterocanonical books not supported
if (bookNum == 0)
throw new Exception("Invalid [" + parts[0] + "] " + fileName);
while (versification.bookList.Count < bookNum)
versification.bookList.Add(new int[1] { 1 });
List<int> verses = new List<int>();
for (int i = 1; i <= parts.GetUpperBound(0); ++i)
{
string[] pieces = parts[i].Split(':');
int verseCount;
if (pieces.GetUpperBound(0) != 1 ||
!int.TryParse(pieces[1], out verseCount) || verseCount <= 0)
{
throw new Exception("Invalid [" + line + "] " + fileName);
}
verses.Add(verseCount);
}
versification.bookList[bookNum - 1] = verses.ToArray();
}
// Parse lines giving number of verses for each chapter like
// GEN 1:10 2:23 ...
private static void ParseMappingLine(string fileName, VersificationTable versification, string line)
{
try
{
string[] parts = line.Split('=');
string[] leftPieces = parts[0].Trim().Split('-');
string[] rightPieces = parts[1].Trim().Split('-');
BCVRef left = new BCVRef(leftPieces[0]);
int leftLimit = leftPieces.GetUpperBound(0) == 0 ? 0 : int.Parse(leftPieces[1]);
BCVRef right = new BCVRef(rightPieces[0]);
while (true)
{
versification.toStandard[left.ToString()] = right.ToString();
versification.fromStandard[right.ToString()] = left.ToString();
if (left.Verse >= leftLimit)
break;
left.Verse = left.Verse + 1;
right.Verse = right.Verse + 1;
}
}
catch
{
// ENHANCE: Make it so the TE version of Localizer can have its own resources for stuff
// like this.
throw new Exception("Invalid [" + line + "] " + fileName);
}
}
/// <summary>
/// Gets the name of this requested versification file.
/// </summary>
/// <param name="vers">Versification scheme</param>
public static string GetFileNameForVersification(ScrVers vers)
{
return versificationFiles[(int)vers];
}
// Get path of this versification file.
// Fall back to eng.vrs if not present.
private static string FileName(ScrVers vers)
{
if (baseDir == null)
throw new InvalidOperationException("VersificationTable.Initialize must be called first");
string fileName = Path.Combine(baseDir, GetFileNameForVersification(vers));
if (!File.Exists(fileName))
fileName = Path.Combine(baseDir, GetFileNameForVersification(ScrVers.English));
return fileName;
}
// Create empty versification table
private VersificationTable(ScrVers vers)
{
this.scrVers = vers;
bookList = new List<int[]>();
toStandard = new Dictionary<string, string>();
fromStandard = new Dictionary<string, string>();
}
public int LastBook()
{
return bookList.Count;
}
/// <summary>
/// Last chapter number in this book.
/// </summary>
/// <param name="bookNum"></param>
/// <returns></returns>
public int LastChapter(int bookNum)
{
if (bookNum <= 0)
return 0;
if (bookNum - 1 >= bookList.Count)
return 1;
int[] chapters = bookList[bookNum - 1];
return chapters.GetUpperBound(0) + 1;
}
/// <summary>
/// Last verse number in this book/chapter.
/// </summary>
/// <param name="bookNum"></param>
/// <param name="chapterNum"></param>
/// <returns></returns>
public int LastVerse(int bookNum, int chapterNum)
{
if (bookNum <= 0)
return 0;
if (bookNum - 1 >= bookList.Count)
return 1;
int[] chapters = bookList[bookNum - 1];
// Chapter "0" is the intro material. Pretend that it has 1 verse.
if (chapterNum - 1 > chapters.GetUpperBound(0) || chapterNum < 1)
return 1;
return chapters[chapterNum - 1];
}
/// <summary>
/// Change the passed VerseRef to be this versification.
/// </summary>
/// <param name="vref"></param>
public void ChangeVersification(IVerseReference vref)
{
if (vref.Versification == scrVers)
return;
// Map from existing to standard versification
string verse = vref.ToString();
string verse2;
Get(vref.Versification).toStandard.TryGetValue(verse, out verse2);
if (verse2 == null)
verse2 = verse;
// Map from standard versification to this versification
string verse3;
fromStandard.TryGetValue(verse2, out verse3);
if (verse3 == null)
verse3 = verse2;
// If verse has changed, parse new value
if (verse != verse3)
vref.Parse(verse3);
vref.Versification = scrVers;
}
}
}
| mccarthyrb/libpalaso | SIL.Scripture/VersificationTable.cs | C# | mit | 9,031 |
{{< layout}}
{{$pageTitle}}Upload your photo{{/pageTitle}}
{{$header}}
<h1>Take your photo</h1>
{{/header}}
{{$content}}
<p>If you need to, <a href="/priority_service_170215/photoguide-short/">read the photo guide again</a>.</p>
<p>
Weโll store all photos for up to 30 days in line with our <a href="" rel="external">privacy policy</a>.
</p>
<!-- browse button
<div id="photo-group" class="form-group">
<label for="photo" class="button photo-upload-label" role="button">
<span id="photo-label-text" class="">Upload your photo</span>
</label>
<input type="file" accept="image/jpeg" id="photo" class="photo-choose-file" name="photo" aria-controls="progress-container" aria-required="true" style="display:none">
</div>
-->
<a href="/prototype_170123/uploadphoto/processing-image" class="button">Upload your photo</a><br/><br/>
<details>
<summary><span class="summary">I have a printed photo</span></summary>
<div class="panel panel-border-narrow">
<p>
You canโt use a printed photo with this service. Youโll either need to <a href="">get a digital photo</a>, or <a href="">use a different service</a> to renew your passport.
</p>
</div>
</details>
<h3>How to take a good passport photo</h3>
<div class="column-half">
<ol class="list-number">
<li>Get a friend to take your photo.</li>
<li>Use a plain background.</li>
<li>Donโt crop your photo โ include your face, shoulders and upper body.</li>
<li>Keep your hair away from your face and brushed down.</li>
<li>Make sure there are no shadows on your face or behind you.</li>
</ol>
</div>
<object type="image/jpg" data="/public/images/woman-with-reddish-hair_ex@2x.jpg"type="image/svg+xml" class="svg" tabindex="-1" style="width:100%;padding-left:0px;padding-top:30px">
<img src="/public/images/woman-with-reddish-hair_ex@2x.jpg" width="206" height= alt="">
</object>
{{/content}}
{{/ layout}}
| maxinedivers/pass-max | views/priority_service_170215/photoguide-short/short-upload-photo.html | HTML | mit | 2,245 |
require 'sprockets/autoload'
require 'sprockets/path_utils'
module Sprockets
class BabelProcessor
VERSION = '1'
def self.instance
@instance ||= new
end
def self.call(input)
instance.call(input)
end
def initialize(options = {})
@options = options.merge({
'blacklist' => (options['blacklist'] || []) + ['useStrict'],
'sourceMap' => false
}).freeze
@cache_key = [
self.class.name,
Autoload::Babel::Transpiler::VERSION,
Autoload::Babel::Source::VERSION,
VERSION,
@options
].freeze
end
def call(input)
data = input[:data]
result = input[:cache].fetch(@cache_key + [data]) do
Autoload::Babel::Transpiler.transform(data, @options.merge(
'sourceRoot' => input[:load_path],
'moduleRoot' => '',
'filename' => input[:filename],
'filenameRelative' => PathUtils.split_subpath(input[:load_path], input[:filename])
))
end
result['code']
end
end
end
| Andreis13/sprockets | lib/sprockets/babel_processor.rb | Ruby | mit | 1,054 |
'use strict';
const EventEmitter = require('events');
const uuid = require('node-uuid');
const ItemType = require('./ItemType');
const { Inventory, InventoryFullError } = require('./Inventory');
const Logger = require('./Logger');
const Player = require('./Player');
/**
* @property {Area} area Area the item belongs to (warning: this is not the area is currently in but the
* area it belongs to on a fresh load)
* @property {object} properties Essentially a blob of whatever attrs the item designer wanted to add
* @property {array|string} behaviors Single or list of behaviors this object uses
* @property {string} description Long description seen when looking at it
* @property {number} id vnum
* @property {boolean} isEquipped Whether or not item is currently equipped
* @property {Map} inventory Current items this item contains
* @property {string} name Name shown in inventory and when equipped
* @property {Room} room Room the item is currently in
* @property {string} roomDesc Description shown when item is seen in a room
* @property {string} script A custom script for this item
* @property {ItemType|string} type
* @property {string} uuid UUID differentiating all instances of this item
*/
class Item extends EventEmitter {
constructor (area, item) {
super();
const validate = ['keywords', 'name', 'id'];
for (const prop of validate) {
if (!(prop in item)) {
throw new ReferenceError(`Item in area [${area.name}] missing required property [${prop}]`);
}
}
this.area = area;
this.properties = item.properties || {};
this.behaviors = item.behaviors || {};
this.defaultItems = item.items || [];
this.description = item.description || 'Nothing special.';
this.entityReference = item.entityReference; // EntityFactory key
this.id = item.id;
this.maxItems = item.maxItems || Infinity;
this.inventory = item.inventory ? new Inventory(item.inventory) : null;
if (this.inventory) {
this.inventory.setMax(this.maxItems);
}
this.isEquipped = item.isEquipped || false;
this.keywords = item.keywords;
this.level = item.level || 1;
this.itemLevel = item.itemLevel || this.level;
this.name = item.name;
this.quality = item.quality || 'common';
this.room = item.room || null;
this.roomDesc = item.roomDesc || '';
this.script = item.script || null;
this.slot = item.slot || null;
this.type = typeof item.type === 'string' ? ItemType[item.type] : (item.type || ItemType.OBJECT);
this.uuid = item.uuid || uuid.v4();
}
hasKeyword(keyword) {
return this.keywords.indexOf(keyword) !== -1;
}
/**
* @param {string} name
* @return {boolean}
*/
hasBehavior(name) {
if (!(this.behaviors instanceof Map)) {
throw new Error("Item has not been hydrated. Cannot access behaviors.");
}
return this.behaviors.has(name);
}
/**
* @param {string} name
* @return {*}
*/
getBehavior(name) {
if (!(this.behaviors instanceof Map)) {
throw new Error("Item has not been hydrated. Cannot access behaviors.");
}
return this.behaviors.get(name);
}
addItem(item) {
this._setupInventory();
this.inventory.addItem(item);
item.belongsTo = this;
}
removeItem(item) {
this.inventory.removeItem(item);
// if we removed the last item unset the inventory
// This ensures that when it's reloaded it won't try to set
// its default inventory. Instead it will persist the fact
// that all the items were removed from it
if (!this.inventory.size) {
this.inventory = null;
}
item.belongsTo = null;
}
isInventoryFull() {
this._setupInventory();
return this.inventory.isFull;
}
_setupInventory() {
if (!this.inventory) {
this.inventory = new Inventory({
items: [],
max: this.maxItems
});
}
}
get qualityColors() {
return ({
poor: ['bold', 'black'],
common: ['bold', 'white'],
uncommon: ['bold', 'green'],
rare: ['bold', 'blue'],
epic: ['bold', 'magenta'],
legendary: ['bold', 'red'],
artifact: ['yellow'],
})[this.quality];
}
/**
* Friendly display colorized by quality
*/
get display() {
return this.qualityColorize(`[${this.name}]`);
}
/**
* Colorize the given string according to this item's quality
* @param {string} string
* @return string
*/
qualityColorize(string) {
const colors = this.qualityColors;
const open = '<' + colors.join('><') + '>';
const close = '</' + colors.reverse().join('></') + '>';
return open + string + close;
}
/**
* For finding the player who has the item in their possession.
* @return {Player|null} owner
*/
findOwner() {
let found = null;
let owner = this.belongsTo;
while (owner) {
if (owner instanceof Player) {
found = owner;
break;
}
owner = owner.belongsTo;
}
return found;
}
hydrate(state, serialized = {}) {
if (typeof this.area === 'string') {
this.area = state.AreaManager.getArea(this.area);
}
// if the item was saved with a custom inventory hydrate it
if (this.inventory) {
this.inventory.hydrate(state);
} else {
// otherwise load its default inv
this.defaultItems.forEach(defaultItemId => {
Logger.verbose(`\tDIST: Adding item [${defaultItemId}] to item [${this.name}]`);
const newItem = state.ItemFactory.create(this.area, defaultItemId);
newItem.hydrate(state);
state.ItemManager.add(newItem);
this.addItem(newItem);
});
}
// perform deep copy if behaviors is set to prevent sharing of the object between
// item instances
const behaviors = JSON.parse(JSON.stringify(serialized.behaviors || this.behaviors));
this.behaviors = new Map(Object.entries(behaviors));
for (let [behaviorName, config] of this.behaviors) {
let behavior = state.ItemBehaviorManager.get(behaviorName);
if (!behavior) {
return;
}
// behavior may be a boolean in which case it will be `behaviorName: true`
config = config === true ? {} : config;
behavior.attach(this, config);
}
}
serialize() {
let behaviors = {};
for (const [key, val] of this.behaviors) {
behaviors[key] = val;
}
return {
entityReference: this.entityReference,
inventory: this.inventory && this.inventory.serialize(),
// behaviors are serialized in case their config was modified during gameplay
// and that state needs to persist (charges of a scroll remaining, etc)
behaviors,
};
}
}
module.exports = Item;
| CodeOtter/tech-career | src/Item.js | JavaScript | mit | 6,892 |
๏ปฟusing System;
using System.Collections.Generic;
using Foundation.ObjectHydrator.Interfaces;
namespace Foundation.ObjectHydrator.Generators
{
public class UnitedKingdomCityGenerator : IGenerator<string>
{
private readonly Random _random;
private IList<string> _citynames = new List<string>();
public UnitedKingdomCityGenerator()
{
_random = RandomSingleton.Instance.Random;
LoadCityNames();
}
private void LoadCityNames()
{
_citynames = new List<string>()
{
"Aberaeron",
"Aberdare",
"Aberdeen",
"Aberfeldy",
"Abergavenny",
"Abergele",
"Abertillery",
"Aberystwyth",
"Abingdon",
"Accrington",
"Adlington",
"Airdrie",
"Alcester",
"Aldeburgh",
"Aldershot",
"Aldridge",
"Alford",
"Alfreton",
"Alloa",
"Alnwick",
"Alsager",
"Alston",
"Amesbury",
"Amlwch",
"Ammanford",
"Ampthill",
"Andover",
"Annan",
"Antrim",
"Appleby in Westmorland",
"Arbroath",
"Armagh",
"Arundel",
"Ashbourne",
"Ashburton",
"Ashby de la Zouch",
"Ashford",
"Ashington",
"Ashton in Makerfield",
"Atherstone",
"Auchtermuchty",
"Axminster",
"Aylesbury",
"Aylsham",
"Ayr",
"Bacup",
"Bakewell",
"Bala",
"Ballater",
"Ballycastle",
"Ballyclare",
"Ballymena",
"Ballymoney",
"Ballynahinch",
"Banbridge",
"Banbury",
"Banchory",
"Banff",
"Bangor",
"Barmouth",
"Barnard Castle",
"Barnet",
"Barnoldswick",
"Barnsley",
"Barnstaple",
"Barrhead",
"Barrow in Furness",
"Barry",
"Barton upon Humber",
"Basildon",
"Basingstoke",
"Bath",
"Bathgate",
"Batley",
"Battle",
"Bawtry",
"Beaconsfield",
"Bearsden",
"Beaumaris",
"Bebington",
"Beccles",
"Bedale",
"Bedford",
"Bedlington",
"Bedworth",
"Beeston",
"Bellshill",
"Belper",
"Berkhamsted",
"Berwick upon Tweed",
"Betws y Coed",
"Beverley",
"Bewdley",
"Bexhill on Sea",
"Bicester",
"Biddulph",
"Bideford",
"Biggar",
"Biggleswade",
"Billericay",
"Bilston",
"Bingham",
"Birkenhead",
"Birmingham",
"Bishop Auckland",
"Blackburn",
"Blackheath",
"Blackpool",
"Blaenau Ffestiniog",
"Blandford Forum",
"Bletchley",
"Bloxwich",
"Blyth",
"Bodmin",
"Bognor Regis",
"Bollington",
"Bolsover",
"Bolton",
"Bootle",
"Borehamwood",
"Boston",
"Bourne",
"Bournemouth",
"Brackley",
"Bracknell",
"Bradford",
"Bradford on Avon",
"Brading",
"Bradley Stoke",
"Bradninch",
"Braintree",
"Brechin",
"Brecon",
"Brentwood",
"Bridge of Allan",
"Bridgend",
"Bridgnorth",
"Bridgwater",
"Bridlington",
"Bridport",
"Brigg",
"Brighouse",
"Brightlingsea",
"Brighton",
"Bristol",
"Brixham",
"Broadstairs",
"Bromsgrove",
"Bromyard",
"Brynmawr",
"Buckfastleigh",
"Buckie",
"Buckingham",
"Buckley",
"Bude",
"Budleigh Salterton",
"Builth Wells",
"Bungay",
"Buntingford",
"Burford",
"Burgess Hill",
"Burnham on Crouch",
"Burnham on Sea",
"Burnley",
"Burntisland",
"Burntwood",
"Burry Port",
"Burton Latimer",
"Bury",
"Bushmills",
"Buxton",
"Caernarfon",
"Caerphilly",
"Caistor",
"Caldicot",
"Callander",
"Calne",
"Camberley",
"Camborne",
"Cambridge",
"Camelford",
"Campbeltown",
"Cannock",
"Canterbury",
"Cardiff",
"Cardigan",
"Carlisle",
"Carluke",
"Carmarthen",
"Carnforth",
"Carnoustie",
"Carrickfergus",
"Carterton",
"Castle Douglas",
"Castlederg",
"Castleford",
"Castlewellan",
"Chard",
"Charlbury",
"Chatham",
"Chatteris",
"Chelmsford",
"Cheltenham",
"Chepstow",
"Chesham",
"Cheshunt",
"Chester",
"Chester le Street",
"Chesterfield",
"Chichester",
"Chippenham",
"Chipping Campden",
"Chipping Norton",
"Chipping Sodbury",
"Chorley",
"Christchurch",
"Church Stretton",
"Cinderford",
"Cirencester",
"Clacton on Sea",
"Cleckheaton",
"Cleethorpes",
"Clevedon",
"Clitheroe",
"Clogher",
"Clydebank",
"Coalisland",
"Coalville",
"Coatbridge",
"Cockermouth",
"Coggeshall",
"Colchester",
"Coldstream",
"Coleraine",
"Coleshill",
"Colne",
"Colwyn Bay",
"Comber",
"Congleton",
"Conwy",
"Cookstown",
"Corbridge",
"Corby",
"Coventry",
"Cowbridge",
"Cowdenbeath",
"Cowes",
"Craigavon",
"Cramlington",
"Crawley",
"Crayford",
"Crediton",
"Crewe",
"Crewkerne",
"Criccieth",
"Crickhowell",
"Crieff",
"Cromarty",
"Cromer",
"Crowborough",
"Crowthorne",
"Crumlin",
"Cuckfield",
"Cullen",
"Cullompton",
"Cumbernauld",
"Cupar",
"Cwmbran",
"Dalbeattie",
"Dalkeith",
"Darlington",
"Dartford",
"Dartmouth",
"Darwen",
"Daventry",
"Dawlish",
"Deal",
"Denbigh",
"Denton",
"Derby",
"Dereham",
"Devizes",
"Dewsbury",
"Didcot",
"Dingwall",
"Dinnington",
"Diss",
"Dolgellau",
"Donaghadee",
"Doncaster",
"Dorchester",
"Dorking",
"Dornoch",
"Dover",
"Downham Market",
"Downpatrick",
"Driffield",
"Dronfield",
"Droylsden",
"Dudley",
"Dufftown",
"Dukinfield",
"Dumbarton",
"Dumfries",
"Dunbar",
"Dunblane",
"Dundee",
"Dunfermline",
"Dungannon",
"Dunoon",
"Duns",
"Dunstable",
"Durham",
"Dursley",
"Easingwold",
"East Grinstead",
"East Kilbride",
"Eastbourne",
"Eastleigh",
"Eastwood",
"Ebbw Vale",
"Edenbridge",
"Edinburgh",
"Egham",
"Elgin",
"Ellesmere",
"Ellesmere Port",
"Ely",
"Enniskillen",
"Epping",
"Epsom",
"Erith",
"Esher",
"Evesham",
"Exeter",
"Exmouth",
"Eye",
"Eyemouth",
"Failsworth",
"Fairford",
"Fakenham",
"Falkirk",
"Falkland",
"Falmouth",
"Fareham",
"Faringdon",
"Farnborough",
"Farnham",
"Farnworth",
"Faversham",
"Felixstowe",
"Ferndown",
"Filey",
"Fintona",
"Fishguard",
"Fivemiletown",
"Fleet",
"Fleetwood",
"Flint",
"Flitwick",
"Folkestone",
"Fordingbridge",
"Forfar",
"Forres",
"Fort William",
"Fowey",
"Framlingham",
"Fraserburgh",
"Frodsham",
"Frome",
"Gainsborough",
"Galashiels",
"Gateshead",
"Gillingham",
"Glasgow",
"Glastonbury",
"Glossop",
"Gloucester",
"Godalming",
"Godmanchester",
"Goole",
"Gorseinon",
"Gosport",
"Gourock",
"Grange over Sands",
"Grangemouth",
"Grantham",
"Grantown on Spey",
"Gravesend",
"Grays",
"Great Yarmouth",
"Greenock",
"Grimsby",
"Guildford",
"Haddington",
"Hadleigh",
"Hailsham",
"Halesowen",
"Halesworth",
"Halifax",
"Halstead",
"Haltwhistle",
"Hamilton",
"Harlow",
"Harpenden",
"Harrogate",
"Hartlepool",
"Harwich",
"Haslemere",
"Hastings",
"Hatfield",
"Havant",
"Haverfordwest",
"Haverhill",
"Hawarden",
"Hawick",
"Hay on Wye",
"Hayle",
"Haywards Heath",
"Heanor",
"Heathfield",
"Hebden Bridge",
"Helensburgh",
"Helston",
"Hemel Hempstead",
"Henley on Thames",
"Hereford",
"Herne Bay",
"Hertford",
"Hessle",
"Heswall",
"Hexham",
"High Wycombe",
"Higham Ferrers",
"Highworth",
"Hinckley",
"Hitchin",
"Hoddesdon",
"Holmfirth",
"Holsworthy",
"Holyhead",
"Holywell",
"Honiton",
"Horley",
"Horncastle",
"Hornsea",
"Horsham",
"Horwich",
"Houghton le Spring",
"Hove",
"Howden",
"Hoylake",
"Hucknall",
"Huddersfield",
"Hungerford",
"Hunstanton",
"Huntingdon",
"Huntly",
"Hyde",
"Hythe",
"Ilford",
"Ilfracombe",
"Ilkeston",
"Ilkley",
"Ilminster",
"Innerleithen",
"Inveraray",
"Inverkeithing",
"Inverness",
"Inverurie",
"Ipswich",
"Irthlingborough",
"Irvine",
"Ivybridge",
"Jarrow",
"Jedburgh",
"Johnstone",
"Keighley",
"Keith",
"Kelso",
"Kempston",
"Kendal",
"Kenilworth",
"Kesgrave",
"Keswick",
"Kettering",
"Keynsham",
"Kidderminster",
"Kilbarchan",
"Kilkeel",
"Killyleagh",
"Kilmarnock",
"Kilwinning",
"Kinghorn",
"Kingsbridge",
"Kington",
"Kingussie",
"Kinross",
"Kintore",
"Kirkby",
"Kirkby Lonsdale",
"Kirkcaldy",
"Kirkcudbright",
"Kirkham",
"Kirkwall",
"Kirriemuir",
"Knaresborough",
"Knighton",
"Knutsford",
"Ladybank",
"Lampeter",
"Lanark",
"Lancaster",
"Langholm",
"Largs",
"Larne",
"Laugharne",
"Launceston",
"Laurencekirk",
"Leamington Spa",
"Leatherhead",
"Ledbury",
"Leeds",
"Leek",
"Leicester",
"Leighton Buzzard",
"Leiston",
"Leominster",
"Lerwick",
"Letchworth",
"Leven",
"Lewes",
"Leyland",
"Lichfield",
"Limavady",
"Lincoln",
"Linlithgow",
"Lisburn",
"Liskeard",
"Lisnaskea",
"Littlehampton",
"Liverpool",
"Llandeilo",
"Llandovery",
"Llandrindod Wells",
"Llandudno",
"Llanelli",
"Llanfyllin",
"Llangollen",
"Llanidloes",
"Llanrwst",
"Llantrisant",
"Llantwit Major",
"Llanwrtyd Wells",
"Loanhead",
"Lochgilphead",
"Lockerbie",
"Londonderry",
"Long Eaton",
"Longridge",
"Looe",
"Lossiemouth",
"Lostwithiel",
"Loughborough",
"Loughton",
"Louth",
"Lowestoft",
"Ludlow",
"Lurgan",
"Luton",
"Lutterworth",
"Lydd",
"Lydney",
"Lyme Regis",
"Lymington",
"Lynton",
"Mablethorpe",
"Macclesfield",
"Machynlleth",
"Maesteg",
"Magherafelt",
"Maidenhead",
"Maidstone",
"Maldon",
"Malmesbury",
"Malton",
"Malvern",
"Manchester",
"Manningtree",
"Mansfield",
"March",
"Margate",
"Market Deeping",
"Market Drayton",
"Market Harborough",
"Market Rasen",
"Market Weighton",
"Markethill",
"Markinch",
"Marlborough",
"Marlow",
"Maryport",
"Matlock",
"Maybole",
"Melksham",
"Melrose",
"Melton Mowbray",
"Merthyr Tydfil",
"Mexborough",
"Middleham",
"Middlesbrough",
"Middlewich",
"Midhurst",
"Midsomer Norton",
"Milford Haven",
"Milngavie",
"Milton Keynes",
"Minehead",
"Moffat",
"Mold",
"Monifieth",
"Monmouth",
"Montgomery",
"Montrose",
"Morecambe",
"Moreton in Marsh",
"Moretonhampstead",
"Morley",
"Morpeth",
"Motherwell",
"Musselburgh",
"Nailsea",
"Nailsworth",
"Nairn",
"Nantwich",
"Narberth",
"Neath",
"Needham Market",
"Neston",
"New Mills",
"New Milton",
"Newbury",
"Newcastle",
"Newcastle Emlyn",
"Newcastle upon Tyne",
"Newent",
"Newhaven",
"Newmarket",
"Newport",
"Newport Pagnell",
"Newport on Tay",
"Newquay",
"Newry",
"Newton Abbot",
"Newton Aycliffe",
"Newton Stewart",
"Newton le Willows",
"Newtown",
"Newtownabbey",
"Newtownards",
"Normanton",
"North Berwick",
"North Walsham",
"Northallerton",
"Northampton",
"Northwich",
"Norwich",
"Nottingham",
"Nuneaton",
"Oakham",
"Oban",
"Okehampton",
"Oldbury",
"Oldham",
"Oldmeldrum",
"Olney",
"Omagh",
"Ormskirk",
"Orpington",
"Ossett",
"Oswestry",
"Otley",
"Oundle",
"Oxford",
"Padstow",
"Paignton",
"Painswick",
"Paisley",
"Peebles",
"Pembroke",
"Penarth",
"Penicuik",
"Penistone",
"Penmaenmawr",
"Penrith",
"Penryn",
"Penzance",
"Pershore",
"Perth",
"Peterborough",
"Peterhead",
"Peterlee",
"Petersfield",
"Petworth",
"Pickering",
"Pitlochry",
"Pittenweem",
"Plymouth",
"Pocklington",
"Polegate",
"Pontefract",
"Pontypridd",
"Poole",
"Port Talbot",
"Portadown",
"Portaferry",
"Porth",
"Porthcawl",
"Porthmadog",
"Portishead",
"Portrush",
"Portsmouth",
"Portstewart",
"Potters Bar",
"Potton",
"Poulton le Fylde",
"Prescot",
"Prestatyn",
"Presteigne",
"Preston",
"Prestwick",
"Princes Risborough",
"Prudhoe",
"Pudsey",
"Pwllheli",
"Ramsgate",
"Randalstown",
"Rayleigh",
"Reading",
"Redcar",
"Redditch",
"Redhill",
"Redruth",
"Reigate",
"Retford",
"Rhayader",
"Rhuddlan",
"Rhyl",
"Richmond",
"Rickmansworth",
"Ringwood",
"Ripley",
"Ripon",
"Rochdale",
"Rochester",
"Rochford",
"Romford",
"Romsey",
"Ross on Wye",
"Rostrevor",
"Rothbury",
"Rotherham",
"Rothesay",
"Rowley Regis",
"Royston",
"Rugby",
"Rugeley",
"Runcorn",
"Rushden",
"Rutherglen",
"Ruthin",
"Ryde",
"Rye",
"Saffron Walden",
"Saintfield",
"Salcombe",
"Sale",
"Salford",
"Salisbury",
"Saltash",
"Saltcoats",
"Sandbach",
"Sandhurst",
"Sandown",
"Sandwich",
"Sandy",
"Sawbridgeworth",
"Saxmundham",
"Scarborough",
"Scunthorpe",
"Seaford",
"Seaton",
"Sedgefield",
"Selby",
"Selkirk",
"Selsey",
"Settle",
"Sevenoaks",
"Shaftesbury",
"Shanklin",
"Sheerness",
"Sheffield",
"Shepshed",
"Shepton Mallet",
"Sherborne",
"Sheringham",
"Shildon",
"Shipston on Stour",
"Shoreham by Sea",
"Shrewsbury",
"Sidmouth",
"Sittingbourne",
"Skegness",
"Skelmersdale",
"Skipton",
"Sleaford",
"Slough",
"Smethwick",
"Soham",
"Solihull",
"Somerton",
"South Molton",
"South Shields",
"South Woodham Ferrers",
"Southam",
"Southampton",
"Southborough",
"Southend on Sea",
"Southport",
"Southsea",
"Southwell",
"Southwold",
"Spalding",
"Spennymoor",
"Spilsby",
"Stafford",
"Staines",
"Stamford",
"Stanley",
"Staveley",
"Stevenage",
"Stirling",
"Stockport",
"Stockton on Tees",
"Stoke on Trent",
"Stone",
"Stowmarket",
"Strabane",
"Stranraer",
"Stratford upon Avon",
"Strood",
"Stroud",
"Sudbury",
"Sunderland",
"Sutton Coldfield",
"Sutton in Ashfield",
"Swadlincote",
"Swanage",
"Swanley",
"Swansea",
"Swindon",
"Tadcaster",
"Tadley",
"Tain",
"Talgarth",
"Tamworth",
"Taunton",
"Tavistock",
"Teignmouth",
"Telford",
"Tenby",
"Tenterden",
"Tetbury",
"Tewkesbury",
"Thame",
"Thatcham",
"Thaxted",
"Thetford",
"Thirsk",
"Thornbury",
"Thrapston",
"Thurso",
"Tilbury",
"Tillicoultry",
"Tipton",
"Tiverton",
"Tobermory",
"Todmorden",
"Tonbridge",
"Torpoint",
"Torquay",
"Totnes",
"Totton",
"Towcester",
"Tredegar",
"Tregaron",
"Tring",
"Troon",
"Trowbridge",
"Truro",
"Tunbridge Wells",
"Tywyn",
"Uckfield",
"Ulverston",
"Uppingham",
"Usk",
"Uttoxeter",
"Ventnor",
"Verwood",
"Wadebridge",
"Wadhurst",
"Wakefield",
"Wallasey",
"Wallingford",
"Walsall",
"Waltham Abbey",
"Waltham Cross",
"Walton on Thames",
"Walton on the Naze",
"Wantage",
"Ware",
"Wareham",
"Warminster",
"Warrenpoint",
"Warrington",
"Warwick",
"Washington",
"Watford",
"Wednesbury",
"Wednesfield",
"Wellingborough",
"Wellington",
"Wells",
"Wells next the Sea",
"Welshpool",
"Welwyn Garden City",
"Wem",
"Wendover",
"West Bromwich",
"Westbury",
"Westerham",
"Westhoughton",
"Weston super Mare",
"Wetherby",
"Weybridge",
"Weymouth",
"Whaley Bridge",
"Whitby",
"Whitchurch",
"Whitehaven",
"Whitley Bay",
"Whitnash",
"Whitstable",
"Whitworth",
"Wick",
"Wickford",
"Widnes",
"Wigan",
"Wigston",
"Wigtown",
"Willenhall",
"Wincanton",
"Winchester",
"Windermere",
"Winsford",
"Winslow",
"Wisbech",
"Witham",
"Withernsea",
"Witney",
"Woburn",
"Woking",
"Wokingham",
"Wolverhampton",
"Wombwell",
"Woodbridge",
"Woodstock",
"Wootton Bassett",
"Worcester",
"Workington",
"Worksop",
"Worthing",
"Wotton under Edge",
"Wrexham",
"Wymondham",
"Yarm",
"Yarmouth",
"Yate",
"Yateley",
"Yeadon",
"Yeovil",
"York"
};
}
public string Generate()
{
return _citynames[_random.Next(0, _citynames.Count)];
}
}
} | flightlog/flsserver | src/ObjectHydrator-master/Foundation.ObjectHydrator/Generators/UnitedKingdomCityGenerator.cs | C# | mit | 29,362 |
<fieldset class="col-sm-12 bordure">
<legend class="legende">{{ 'Verifyrecord' | translate }}</legend>
<div ng-include src="'partials/message-include.html'"></div>
<form class="well form-horizontal">
<div class="form-group">
<label for="verifyrecord_uid" class="col-sm-2 control-label">{{ 'verifyrecord.uid' | translate }}</label>
<div class="col-sm-10">
<input type="text" id="verifyrecord_uid" name="uid" ng-model="verifyrecord.uid" class="form-control" maxLength="20" ng-disabled="mode != 'create'"/>
</div>
</div>
<div class="form-group">
<label for="verifyrecord_verifytype" class="col-sm-2 control-label">{{ 'verifyrecord.verifytype' | translate }}</label>
<div class="col-sm-10">
<input type="text" id="verifyrecord_verifytype" name="verifytype" ng-model="verifyrecord.verifytype" class="form-control" maxLength="4" />
</div>
</div>
<div class="form-group">
<label for="verifyrecord_userid" class="col-sm-2 control-label">{{ 'verifyrecord.userid' | translate }}</label>
<div class="col-sm-10">
<input type="text" id="verifyrecord_userid" name="userid" ng-model="verifyrecord.userid" class="form-control" maxLength="20" />
</div>
</div>
<div class="form-group">
<label for="verifyrecord_username" class="col-sm-2 control-label">{{ 'verifyrecord.username' | translate }}</label>
<div class="col-sm-10">
<input type="text" id="verifyrecord_username" name="username" ng-model="verifyrecord.username" class="form-control" maxLength="50" />
</div>
</div>
<div class="form-group">
<label for="verifyrecord_email" class="col-sm-2 control-label">{{ 'verifyrecord.email' | translate }}</label>
<div class="col-sm-10">
<input type="text" id="verifyrecord_email" name="email" ng-model="verifyrecord.email" class="form-control" maxLength="100" />
</div>
</div>
<div class="form-group">
<label for="verifyrecord_randomcode" class="col-sm-2 control-label">{{ 'verifyrecord.randomcode' | translate }}</label>
<div class="col-sm-10">
<input type="text" id="verifyrecord_randomcode" name="randomcode" ng-model="verifyrecord.randomcode" class="form-control" maxLength="30" />
</div>
</div>
<div class="form-group">
<label for="verifyrecord_expiretime" class="col-sm-2 control-label">{{ 'verifyrecord.expiretime' | translate }}</label>
<div class="col-sm-10">
<input id="verifyrecord_expiretime" bs-datepicker data-autoclose="1" ng-model="verifyrecord.expiretime" name="expiretime" class="form-control" />
</div>
</div>
<div class="form-group">
<label for="verifyrecord_resetflg" class="col-sm-2 control-label">{{ 'verifyrecord.resetflg' | translate }}</label>
<div class="col-sm-10">
<input type="text" id="verifyrecord_resetflg" name="resetflg" ng-model="verifyrecord.resetflg" class="form-control" maxLength="4" />
</div>
</div>
<div class="form-group">
<label for="verifyrecord_createdate" class="col-sm-2 control-label">{{ 'verifyrecord.createdate' | translate }}</label>
<div class="col-sm-10">
<input id="verifyrecord_createdate" bs-datepicker data-autoclose="1" ng-model="verifyrecord.createdate" name="createdate" class="form-control" />
</div>
</div>
<div class="form-group">
<label for="verifyrecord_modifydate" class="col-sm-2 control-label">{{ 'verifyrecord.modifydate' | translate }}</label>
<div class="col-sm-10">
<input id="verifyrecord_modifydate" bs-datepicker data-autoclose="1" ng-model="verifyrecord.modifydate" name="modifydate" class="form-control" />
</div>
</div>
<div class="form-group">
<label for="verifyrecord_createuser" class="col-sm-2 control-label">{{ 'verifyrecord.createuser' | translate }}</label>
<div class="col-sm-10">
<input type="text" id="verifyrecord_createuser" name="createuser" ng-model="verifyrecord.createuser" class="form-control" maxLength="50" />
</div>
</div>
<div class="form-group">
<label for="verifyrecord_modifyuser" class="col-sm-2 control-label">{{ 'verifyrecord.modifyuser' | translate }}</label>
<div class="col-sm-10">
<input type="text" id="verifyrecord_modifyuser" name="modifyuser" ng-model="verifyrecord.modifyuser" class="form-control" maxLength="50" />
</div>
</div>
<div class="form-group">
<label for="verifyrecord_delflg" class="col-sm-2 control-label">{{ 'verifyrecord.delflg' | translate }}</label>
<div class="col-sm-10">
<input type="text" id="verifyrecord_delflg" name="delflg" ng-model="verifyrecord.delflg" class="form-control" maxLength="4" />
</div>
</div>
<div class="form-group">
<label for="verifyrecord_platform" class="col-sm-2 control-label">{{ 'verifyrecord.platform' | translate }}</label>
<div class="col-sm-10">
<input type="text" id="verifyrecord_platform" name="platform" ng-model="verifyrecord.platform" class="form-control" maxLength="50" />
</div>
</div>
<!-- ACTION BUTTONS -->
<div class="form-group">
<div class="col-sm-offset-2 col-sm-2">
<a role="button" class="btn btn-danger btn-block" ng-click="delete(verifyrecord.uid)" ng-show="mode != 'create'">{{ 'delete' | translate }}</a>
</div>
<div class="col-sm-offset-4 col-sm-2">
<a role="button" class="btn btn-default btn-block" href="#/verifyrecord">{{ 'cancel' | translate }}</a>
</div>
<div class="col-sm-2">
<button type="submit" class="btn btn-primary btn-lg btn-block" ng-click="save()">{{ 'save' | translate }}</button>
</div>
</div>
</form>
</fieldset>
| cytochromewangdong/HippoStart | src/main/webapp/app_back/partials/verifyrecord/verifyrecord_form.html | HTML | mit | 5,603 |
// This file was generated based on 'C:\ProgramData\Uno\Packages\Fuse.Controls\0.18.8\$.uno'.
// WARNING: Changes might be lost if you edit this file directly.
#pragma once
#include <Fuse.Animations.IResize.h>
#include <Fuse.Controls.TextBlock.h>
#include <Fuse.IActualPlacement.h>
#include <Fuse.Navigation.INavigationPanel.h>
#include <Fuse.Node.h>
#include <Fuse.Scripting.INameScope.h>
#include <Fuse.Triggers.Actions.ICollapse.h>
#include <Fuse.Triggers.Actions.IHide.h>
#include <Fuse.Triggers.Actions.IShow.h>
#include <Fuse.Triggers.IAddRemove-1.h>
#include <Fuse.Triggers.IValue-1.h>
#include <Uno.String.h>
namespace g{namespace Fuse{namespace Controls{struct Text;}}}
namespace g{
namespace Fuse{
namespace Controls{
// public sealed class Text :4254
// {
::g::Fuse::Controls::TextControl_type* Text_typeof();
void Text__ctor_6_fn(Text* __this);
void Text__New2_fn(Text** __retval);
void Text__OnRooted_fn(Text* __this);
void Text__OnUnrooted_fn(Text* __this);
struct Text : ::g::Fuse::Controls::TextBlock
{
void ctor_6();
static Text* New2();
};
// }
}}} // ::g::Fuse::Controls
| blyk/BlackCode-Fuse | TestApp/.build/Simulator/Android/include/Fuse.Controls.Text.h | C | mit | 1,103 |
var JobsList = React.createClass({displayName: "JobsList",
render: function() {
return (
React.createElement(JobItem, {title: "Trabalho Python", desc: "Descricao aqui"})
);
}
});
var JobItem = React.createClass({displayName: "JobItem",
render: function() {
React.createElement("div", {className: "panel panel-default"},
React.createElement("div", {className: "panel-heading"}, this.params.job.title),
React.createElement("div", {className: "panel-body"},
this.params.job.desc
)
)
}
}) | raonyguimaraes/pyjobs | pyjobs/web/static/js/.module-cache/4ae00001aee8e40f0fb90fff1d2d3b85d7f734e2.js | JavaScript | mit | 600 |
/*************************************************************************/
/* skeleton_modification_stack_3d.h */
/*************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
/* https://godotengine.org */
/*************************************************************************/
/* Copyright (c) 2007-2021 Juan Linietsky, Ariel Manzur. */
/* Copyright (c) 2014-2021 Godot Engine contributors (cf. AUTHORS.md). */
/* */
/* Permission is hereby granted, free of charge, to any person obtaining */
/* a copy of this software and associated documentation files (the */
/* "Software"), to deal in the Software without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/*************************************************************************/
#ifndef SKELETONMODIFICATIONSTACK3D_H
#define SKELETONMODIFICATIONSTACK3D_H
#include "core/templates/local_vector.h"
#include "scene/3d/skeleton_3d.h"
class Skeleton3D;
class SkeletonModification3D;
class SkeletonModificationStack3D : public Resource {
GDCLASS(SkeletonModificationStack3D, Resource);
friend class Skeleton3D;
friend class SkeletonModification3D;
protected:
static void _bind_methods();
virtual void _get_property_list(List<PropertyInfo> *p_list) const;
virtual bool _set(const StringName &p_path, const Variant &p_value);
virtual bool _get(const StringName &p_path, Variant &r_ret) const;
public:
Skeleton3D *skeleton = nullptr;
bool is_setup = false;
bool enabled = false;
real_t strength = 1.0;
enum EXECUTION_MODE {
execution_mode_process,
execution_mode_physics_process,
};
LocalVector<Ref<SkeletonModification3D>> modifications = LocalVector<Ref<SkeletonModification3D>>();
int modifications_count = 0;
virtual void setup();
virtual void execute(real_t p_delta, int p_execution_mode);
void enable_all_modifications(bool p_enable);
Ref<SkeletonModification3D> get_modification(int p_mod_idx) const;
void add_modification(Ref<SkeletonModification3D> p_mod);
void delete_modification(int p_mod_idx);
void set_modification(int p_mod_idx, Ref<SkeletonModification3D> p_mod);
void set_modification_count(int p_count);
int get_modification_count() const;
void set_skeleton(Skeleton3D *p_skeleton);
Skeleton3D *get_skeleton() const;
bool get_is_setup() const;
void set_enabled(bool p_enabled);
bool get_enabled() const;
void set_strength(real_t p_strength);
real_t get_strength() const;
SkeletonModificationStack3D();
};
#endif // SKELETONMODIFICATIONSTACK3D_H
| DmitriySalnikov/godot | scene/resources/skeleton_modification_stack_3d.h | C | mit | 3,932 |
Merge Search Result Templates
================
With these templates you are able to merge search results from multiple search web parts into one view.

File | Desciption
--- | ---
__Control_Combine.html__ | This is the control display template that enables you to merge search results.
__Item_Combine.html__ | This is the item display template that pushes the item HTML to the control template.
Related blog post
-------
Check the following post to get more information about how you need to configure this on your environment: [Merge search results from multiple search web parts together](http://www.eliostruyf.com/merge-search-results-from-multiple-search-web-parts-together/)
| johnmeilleur/DisplayTemplates | Search Display Templates/Merge Search Result Templates/README.md | Markdown | mit | 842 |
/* Copyright (c) 2007-2016, The Tor Project, Inc. */
/* See LICENSE for licensing information */
/**
* \file geoip.c
* \brief Functions related to maintaining an IP-to-country database;
* to summarizing client connections by country to entry guards, bridges,
* and directory servers; and for statistics on answering network status
* requests.
*
* There are two main kinds of functions in this module: geoip functions,
* which map groups of IPv4 and IPv6 addresses to country codes, and
* statistical functions, which collect statistics about different kinds of
* per-country usage.
*
* The geoip lookup tables are implemented as sorted lists of disjoint address
* ranges, each mapping to a singleton geoip_country_t. These country objects
* are also indexed by their names in a hashtable.
*
* The tables are populated from disk at startup by the geoip_load_file()
* function. For more information on the file format they read, see that
* function. See the scripts and the README file in src/config for more
* information about how those files are generated.
*
* Tor uses GeoIP information in order to implement user requests (such as
* ExcludeNodes {cc}), and to keep track of how much usage relays are getting
* for each country.
*/
#define GEOIP_PRIVATE
#include "or.h"
#include "ht.h"
#include "config.h"
#include "control.h"
#include "dnsserv.h"
#include "geoip.h"
#include "routerlist.h"
static void init_geoip_countries(void);
/** An entry from the GeoIP IPv4 file: maps an IPv4 range to a country. */
typedef struct geoip_ipv4_entry_t {
uint32_t ip_low; /**< The lowest IP in the range, in host order */
uint32_t ip_high; /**< The highest IP in the range, in host order */
intptr_t country; /**< An index into geoip_countries */
} geoip_ipv4_entry_t;
/** An entry from the GeoIP IPv6 file: maps an IPv6 range to a country. */
typedef struct geoip_ipv6_entry_t {
struct in6_addr ip_low; /**< The lowest IP in the range, in host order */
struct in6_addr ip_high; /**< The highest IP in the range, in host order */
intptr_t country; /**< An index into geoip_countries */
} geoip_ipv6_entry_t;
/** A per-country record for GeoIP request history. */
typedef struct geoip_country_t {
char countrycode[3];
uint32_t n_v3_ns_requests;
} geoip_country_t;
/** A list of geoip_country_t */
static smartlist_t *geoip_countries = NULL;
/** A map from lowercased country codes to their position in geoip_countries.
* The index is encoded in the pointer, and 1 is added so that NULL can mean
* not found. */
static strmap_t *country_idxplus1_by_lc_code = NULL;
/** Lists of all known geoip_ipv4_entry_t and geoip_ipv6_entry_t, sorted
* by their respective ip_low. */
static smartlist_t *geoip_ipv4_entries = NULL, *geoip_ipv6_entries = NULL;
/** SHA1 digest of the GeoIP files to include in extra-info descriptors. */
static char geoip_digest[DIGEST_LEN];
static char geoip6_digest[DIGEST_LEN];
/** Return the index of the <b>country</b>'s entry in the GeoIP
* country list if it is a valid 2-letter country code, otherwise
* return -1. */
MOCK_IMPL(country_t,
geoip_get_country,(const char *country))
{
void *idxplus1_;
intptr_t idx;
idxplus1_ = strmap_get_lc(country_idxplus1_by_lc_code, country);
if (!idxplus1_)
return -1;
idx = ((uintptr_t)idxplus1_)-1;
return (country_t)idx;
}
/** Add an entry to a GeoIP table, mapping all IP addresses between <b>low</b>
* and <b>high</b>, inclusive, to the 2-letter country code <b>country</b>. */
static void
geoip_add_entry(const tor_addr_t *low, const tor_addr_t *high,
const char *country)
{
intptr_t idx;
void *idxplus1_;
IF_BUG_ONCE(tor_addr_family(low) != tor_addr_family(high))
return;
IF_BUG_ONCE(tor_addr_compare(high, low, CMP_EXACT) < 0)
return;
idxplus1_ = strmap_get_lc(country_idxplus1_by_lc_code, country);
if (!idxplus1_) {
geoip_country_t *c = tor_malloc_zero(sizeof(geoip_country_t));
strlcpy(c->countrycode, country, sizeof(c->countrycode));
tor_strlower(c->countrycode);
smartlist_add(geoip_countries, c);
idx = smartlist_len(geoip_countries) - 1;
strmap_set_lc(country_idxplus1_by_lc_code, country, (void*)(idx+1));
} else {
idx = ((uintptr_t)idxplus1_)-1;
}
{
geoip_country_t *c = smartlist_get(geoip_countries, idx);
tor_assert(!strcasecmp(c->countrycode, country));
}
if (tor_addr_family(low) == AF_INET) {
geoip_ipv4_entry_t *ent = tor_malloc_zero(sizeof(geoip_ipv4_entry_t));
ent->ip_low = tor_addr_to_ipv4h(low);
ent->ip_high = tor_addr_to_ipv4h(high);
ent->country = idx;
smartlist_add(geoip_ipv4_entries, ent);
} else if (tor_addr_family(low) == AF_INET6) {
geoip_ipv6_entry_t *ent = tor_malloc_zero(sizeof(geoip_ipv6_entry_t));
ent->ip_low = *tor_addr_to_in6_assert(low);
ent->ip_high = *tor_addr_to_in6_assert(high);
ent->country = idx;
smartlist_add(geoip_ipv6_entries, ent);
}
}
/** Add an entry to the GeoIP table indicated by <b>family</b>,
* parsing it from <b>line</b>. The format is as for geoip_load_file(). */
STATIC int
geoip_parse_entry(const char *line, sa_family_t family)
{
tor_addr_t low_addr, high_addr;
char c[3];
char *country = NULL;
if (!geoip_countries)
init_geoip_countries();
if (family == AF_INET) {
if (!geoip_ipv4_entries)
geoip_ipv4_entries = smartlist_new();
} else if (family == AF_INET6) {
if (!geoip_ipv6_entries)
geoip_ipv6_entries = smartlist_new();
} else {
log_warn(LD_GENERAL, "Unsupported family: %d", family);
return -1;
}
while (TOR_ISSPACE(*line))
++line;
if (*line == '#')
return 0;
char buf[512];
if (family == AF_INET) {
unsigned int low, high;
if (tor_sscanf(line,"%u,%u,%2s", &low, &high, c) == 3 ||
tor_sscanf(line,"\"%u\",\"%u\",\"%2s\",", &low, &high, c) == 3) {
tor_addr_from_ipv4h(&low_addr, low);
tor_addr_from_ipv4h(&high_addr, high);
} else
goto fail;
country = c;
} else { /* AF_INET6 */
char *low_str, *high_str;
struct in6_addr low, high;
char *strtok_state;
strlcpy(buf, line, sizeof(buf));
low_str = tor_strtok_r(buf, ",", &strtok_state);
if (!low_str)
goto fail;
high_str = tor_strtok_r(NULL, ",", &strtok_state);
if (!high_str)
goto fail;
country = tor_strtok_r(NULL, "\n", &strtok_state);
if (!country)
goto fail;
if (strlen(country) != 2)
goto fail;
if (tor_inet_pton(AF_INET6, low_str, &low) <= 0)
goto fail;
tor_addr_from_in6(&low_addr, &low);
if (tor_inet_pton(AF_INET6, high_str, &high) <= 0)
goto fail;
tor_addr_from_in6(&high_addr, &high);
}
geoip_add_entry(&low_addr, &high_addr, country);
return 0;
fail:
log_warn(LD_GENERAL, "Unable to parse line from GEOIP %s file: %s",
family == AF_INET ? "IPv4" : "IPv6", escaped(line));
return -1;
}
/** Sorting helper: return -1, 1, or 0 based on comparison of two
* geoip_ipv4_entry_t */
static int
geoip_ipv4_compare_entries_(const void **_a, const void **_b)
{
const geoip_ipv4_entry_t *a = *_a, *b = *_b;
if (a->ip_low < b->ip_low)
return -1;
else if (a->ip_low > b->ip_low)
return 1;
else
return 0;
}
/** bsearch helper: return -1, 1, or 0 based on comparison of an IP (a pointer
* to a uint32_t in host order) to a geoip_ipv4_entry_t */
static int
geoip_ipv4_compare_key_to_entry_(const void *_key, const void **_member)
{
/* No alignment issue here, since _key really is a pointer to uint32_t */
const uint32_t addr = *(uint32_t *)_key;
const geoip_ipv4_entry_t *entry = *_member;
if (addr < entry->ip_low)
return -1;
else if (addr > entry->ip_high)
return 1;
else
return 0;
}
/** Sorting helper: return -1, 1, or 0 based on comparison of two
* geoip_ipv6_entry_t */
static int
geoip_ipv6_compare_entries_(const void **_a, const void **_b)
{
const geoip_ipv6_entry_t *a = *_a, *b = *_b;
return fast_memcmp(a->ip_low.s6_addr, b->ip_low.s6_addr,
sizeof(struct in6_addr));
}
/** bsearch helper: return -1, 1, or 0 based on comparison of an IPv6
* (a pointer to a in6_addr) to a geoip_ipv6_entry_t */
static int
geoip_ipv6_compare_key_to_entry_(const void *_key, const void **_member)
{
const struct in6_addr *addr = (struct in6_addr *)_key;
const geoip_ipv6_entry_t *entry = *_member;
if (fast_memcmp(addr->s6_addr, entry->ip_low.s6_addr,
sizeof(struct in6_addr)) < 0)
return -1;
else if (fast_memcmp(addr->s6_addr, entry->ip_high.s6_addr,
sizeof(struct in6_addr)) > 0)
return 1;
else
return 0;
}
/** Return 1 if we should collect geoip stats on bridge users, and
* include them in our extrainfo descriptor. Else return 0. */
int
should_record_bridge_info(const or_options_t *options)
{
return options->BridgeRelay && options->BridgeRecordUsageByCountry;
}
/** Set up a new list of geoip countries with no countries (yet) set in it,
* except for the unknown country.
*/
static void
init_geoip_countries(void)
{
geoip_country_t *geoip_unresolved;
geoip_countries = smartlist_new();
/* Add a geoip_country_t for requests that could not be resolved to a
* country as first element (index 0) to geoip_countries. */
geoip_unresolved = tor_malloc_zero(sizeof(geoip_country_t));
strlcpy(geoip_unresolved->countrycode, "??",
sizeof(geoip_unresolved->countrycode));
smartlist_add(geoip_countries, geoip_unresolved);
country_idxplus1_by_lc_code = strmap_new();
strmap_set_lc(country_idxplus1_by_lc_code, "??", (void*)(1));
}
/** Clear appropriate GeoIP database, based on <b>family</b>, and
* reload it from the file <b>filename</b>. Return 0 on success, -1 on
* failure.
*
* Recognized line formats for IPv4 are:
* INTIPLOW,INTIPHIGH,CC
* and
* "INTIPLOW","INTIPHIGH","CC","CC3","COUNTRY NAME"
* where INTIPLOW and INTIPHIGH are IPv4 addresses encoded as 4-byte unsigned
* integers, and CC is a country code.
*
* Recognized line format for IPv6 is:
* IPV6LOW,IPV6HIGH,CC
* where IPV6LOW and IPV6HIGH are IPv6 addresses and CC is a country code.
*
* It also recognizes, and skips over, blank lines and lines that start
* with '#' (comments).
*/
int
geoip_load_file(sa_family_t family, const char *filename)
{
FILE *f;
const char *msg = "";
const or_options_t *options = get_options();
int severity = options_need_geoip_info(options, &msg) ? LOG_WARN : LOG_INFO;
crypto_digest_t *geoip_digest_env = NULL;
tor_assert(family == AF_INET || family == AF_INET6);
if (!(f = tor_fopen_cloexec(filename, "r"))) {
log_fn(severity, LD_GENERAL, "Failed to open GEOIP file %s. %s",
filename, msg);
return -1;
}
if (!geoip_countries)
init_geoip_countries();
if (family == AF_INET) {
if (geoip_ipv4_entries) {
SMARTLIST_FOREACH(geoip_ipv4_entries, geoip_ipv4_entry_t *, e,
tor_free(e));
smartlist_free(geoip_ipv4_entries);
}
geoip_ipv4_entries = smartlist_new();
} else { /* AF_INET6 */
if (geoip_ipv6_entries) {
SMARTLIST_FOREACH(geoip_ipv6_entries, geoip_ipv6_entry_t *, e,
tor_free(e));
smartlist_free(geoip_ipv6_entries);
}
geoip_ipv6_entries = smartlist_new();
}
geoip_digest_env = crypto_digest_new();
log_notice(LD_GENERAL, "Parsing GEOIP %s file %s.",
(family == AF_INET) ? "IPv4" : "IPv6", filename);
while (!feof(f)) {
char buf[512];
if (fgets(buf, (int)sizeof(buf), f) == NULL)
break;
crypto_digest_add_bytes(geoip_digest_env, buf, strlen(buf));
/* FFFF track full country name. */
geoip_parse_entry(buf, family);
}
/*XXXX abort and return -1 if no entries/illformed?*/
fclose(f);
/* Sort list and remember file digests so that we can include it in
* our extra-info descriptors. */
if (family == AF_INET) {
smartlist_sort(geoip_ipv4_entries, geoip_ipv4_compare_entries_);
/* Okay, now we need to maybe change our mind about what is in
* which country. We do this for IPv4 only since that's what we
* store in node->country. */
refresh_all_country_info();
crypto_digest_get_digest(geoip_digest_env, geoip_digest, DIGEST_LEN);
} else {
/* AF_INET6 */
smartlist_sort(geoip_ipv6_entries, geoip_ipv6_compare_entries_);
crypto_digest_get_digest(geoip_digest_env, geoip6_digest, DIGEST_LEN);
}
crypto_digest_free(geoip_digest_env);
return 0;
}
/** Given an IP address in host order, return a number representing the
* country to which that address belongs, -1 for "No geoip information
* available", or 0 for the 'unknown country'. The return value will always
* be less than geoip_get_n_countries(). To decode it, call
* geoip_get_country_name().
*/
STATIC int
geoip_get_country_by_ipv4(uint32_t ipaddr)
{
geoip_ipv4_entry_t *ent;
if (!geoip_ipv4_entries)
return -1;
ent = smartlist_bsearch(geoip_ipv4_entries, &ipaddr,
geoip_ipv4_compare_key_to_entry_);
return ent ? (int)ent->country : 0;
}
/** Given an IPv6 address, return a number representing the country to
* which that address belongs, -1 for "No geoip information available", or
* 0 for the 'unknown country'. The return value will always be less than
* geoip_get_n_countries(). To decode it, call geoip_get_country_name().
*/
STATIC int
geoip_get_country_by_ipv6(const struct in6_addr *addr)
{
geoip_ipv6_entry_t *ent;
if (!geoip_ipv6_entries)
return -1;
ent = smartlist_bsearch(geoip_ipv6_entries, addr,
geoip_ipv6_compare_key_to_entry_);
return ent ? (int)ent->country : 0;
}
/** Given an IP address, return a number representing the country to which
* that address belongs, -1 for "No geoip information available", or 0 for
* the 'unknown country'. The return value will always be less than
* geoip_get_n_countries(). To decode it, call geoip_get_country_name().
*/
MOCK_IMPL(int,
geoip_get_country_by_addr,(const tor_addr_t *addr))
{
if (tor_addr_family(addr) == AF_INET) {
return geoip_get_country_by_ipv4(tor_addr_to_ipv4h(addr));
} else if (tor_addr_family(addr) == AF_INET6) {
return geoip_get_country_by_ipv6(tor_addr_to_in6(addr));
} else {
return -1;
}
}
/** Return the number of countries recognized by the GeoIP country list. */
MOCK_IMPL(int,
geoip_get_n_countries,(void))
{
if (!geoip_countries)
init_geoip_countries();
return (int) smartlist_len(geoip_countries);
}
/** Return the two-letter country code associated with the number <b>num</b>,
* or "??" for an unknown value. */
const char *
geoip_get_country_name(country_t num)
{
if (geoip_countries && num >= 0 && num < smartlist_len(geoip_countries)) {
geoip_country_t *c = smartlist_get(geoip_countries, num);
return c->countrycode;
} else
return "??";
}
/** Return true iff we have loaded a GeoIP database.*/
MOCK_IMPL(int,
geoip_is_loaded,(sa_family_t family))
{
tor_assert(family == AF_INET || family == AF_INET6);
if (geoip_countries == NULL)
return 0;
if (family == AF_INET)
return geoip_ipv4_entries != NULL;
else /* AF_INET6 */
return geoip_ipv6_entries != NULL;
}
/** Return the hex-encoded SHA1 digest of the loaded GeoIP file. The
* result does not need to be deallocated, but will be overwritten by the
* next call of hex_str(). */
const char *
geoip_db_digest(sa_family_t family)
{
tor_assert(family == AF_INET || family == AF_INET6);
if (family == AF_INET)
return hex_str(geoip_digest, DIGEST_LEN);
else /* AF_INET6 */
return hex_str(geoip6_digest, DIGEST_LEN);
}
/** Entry in a map from IP address to the last time we've seen an incoming
* connection from that IP address. Used by bridges only, to track which
* countries have them blocked. */
typedef struct clientmap_entry_t {
HT_ENTRY(clientmap_entry_t) node;
tor_addr_t addr;
/* Name of pluggable transport used by this client. NULL if no
pluggable transport was used. */
char *transport_name;
/** Time when we last saw this IP address, in MINUTES since the epoch.
*
* (This will run out of space around 4011 CE. If Tor is still in use around
* 4000 CE, please remember to add more bits to last_seen_in_minutes.) */
unsigned int last_seen_in_minutes:30;
unsigned int action:2;
} clientmap_entry_t;
/** Largest allowable value for last_seen_in_minutes. (It's a 30-bit field,
* so it can hold up to (1u<<30)-1, or 0x3fffffffu.
*/
#define MAX_LAST_SEEN_IN_MINUTES 0X3FFFFFFFu
/** Map from client IP address to last time seen. */
static HT_HEAD(clientmap, clientmap_entry_t) client_history =
HT_INITIALIZER();
/** Hashtable helper: compute a hash of a clientmap_entry_t. */
static inline unsigned
clientmap_entry_hash(const clientmap_entry_t *a)
{
unsigned h = (unsigned) tor_addr_hash(&a->addr);
if (a->transport_name)
h += (unsigned) siphash24g(a->transport_name, strlen(a->transport_name));
return h;
}
/** Hashtable helper: compare two clientmap_entry_t values for equality. */
static inline int
clientmap_entries_eq(const clientmap_entry_t *a, const clientmap_entry_t *b)
{
if (strcmp_opt(a->transport_name, b->transport_name))
return 0;
return !tor_addr_compare(&a->addr, &b->addr, CMP_EXACT) &&
a->action == b->action;
}
HT_PROTOTYPE(clientmap, clientmap_entry_t, node, clientmap_entry_hash,
clientmap_entries_eq)
HT_GENERATE2(clientmap, clientmap_entry_t, node, clientmap_entry_hash,
clientmap_entries_eq, 0.6, tor_reallocarray_, tor_free_)
/** Free all storage held by <b>ent</b>. */
static void
clientmap_entry_free(clientmap_entry_t *ent)
{
if (!ent)
return;
tor_free(ent->transport_name);
tor_free(ent);
}
/** Clear history of connecting clients used by entry and bridge stats. */
static void
client_history_clear(void)
{
clientmap_entry_t **ent, **next, *this;
for (ent = HT_START(clientmap, &client_history); ent != NULL;
ent = next) {
if ((*ent)->action == GEOIP_CLIENT_CONNECT) {
this = *ent;
next = HT_NEXT_RMV(clientmap, &client_history, ent);
clientmap_entry_free(this);
} else {
next = HT_NEXT(clientmap, &client_history, ent);
}
}
}
/** Note that we've seen a client connect from the IP <b>addr</b>
* at time <b>now</b>. Ignored by all but bridges and directories if
* configured accordingly. */
void
geoip_note_client_seen(geoip_client_action_t action,
const tor_addr_t *addr,
const char *transport_name,
time_t now)
{
const or_options_t *options = get_options();
clientmap_entry_t lookup, *ent;
memset(&lookup, 0, sizeof(clientmap_entry_t));
if (action == GEOIP_CLIENT_CONNECT) {
/* Only remember statistics as entry guard or as bridge. */
if (!options->EntryStatistics &&
(!(options->BridgeRelay && options->BridgeRecordUsageByCountry)))
return;
} else {
/* Only gather directory-request statistics if configured, and
* forcibly disable them on bridge authorities. */
if (!options->DirReqStatistics || options->BridgeAuthoritativeDir)
return;
}
log_debug(LD_GENERAL, "Seen client from '%s' with transport '%s'.",
safe_str_client(fmt_addr((addr))),
transport_name ? transport_name : "<no transport>");
tor_addr_copy(&lookup.addr, addr);
lookup.action = (int)action;
lookup.transport_name = (char*) transport_name;
ent = HT_FIND(clientmap, &client_history, &lookup);
if (! ent) {
ent = tor_malloc_zero(sizeof(clientmap_entry_t));
tor_addr_copy(&ent->addr, addr);
if (transport_name)
ent->transport_name = tor_strdup(transport_name);
ent->action = (int)action;
HT_INSERT(clientmap, &client_history, ent);
}
if (now / 60 <= (int)MAX_LAST_SEEN_IN_MINUTES && now >= 0)
ent->last_seen_in_minutes = (unsigned)(now/60);
else
ent->last_seen_in_minutes = 0;
if (action == GEOIP_CLIENT_NETWORKSTATUS) {
int country_idx = geoip_get_country_by_addr(addr);
if (country_idx < 0)
country_idx = 0; /** unresolved requests are stored at index 0. */
if (country_idx >= 0 && country_idx < smartlist_len(geoip_countries)) {
geoip_country_t *country = smartlist_get(geoip_countries, country_idx);
++country->n_v3_ns_requests;
}
}
}
/** HT_FOREACH helper: remove a clientmap_entry_t from the hashtable if it's
* older than a certain time. */
static int
remove_old_client_helper_(struct clientmap_entry_t *ent, void *_cutoff)
{
time_t cutoff = *(time_t*)_cutoff / 60;
if (ent->last_seen_in_minutes < cutoff) {
clientmap_entry_free(ent);
return 1;
} else {
return 0;
}
}
/** Forget about all clients that haven't connected since <b>cutoff</b>. */
void
geoip_remove_old_clients(time_t cutoff)
{
clientmap_HT_FOREACH_FN(&client_history,
remove_old_client_helper_,
&cutoff);
}
/** How many responses are we giving to clients requesting v3 network
* statuses? */
static uint32_t ns_v3_responses[GEOIP_NS_RESPONSE_NUM];
/** Note that we've rejected a client's request for a v3 network status
* for reason <b>reason</b> at time <b>now</b>. */
void
geoip_note_ns_response(geoip_ns_response_t response)
{
static int arrays_initialized = 0;
if (!get_options()->DirReqStatistics)
return;
if (!arrays_initialized) {
memset(ns_v3_responses, 0, sizeof(ns_v3_responses));
arrays_initialized = 1;
}
tor_assert(response < GEOIP_NS_RESPONSE_NUM);
ns_v3_responses[response]++;
}
/** Do not mention any country from which fewer than this number of IPs have
* connected. This conceivably avoids reporting information that could
* deanonymize users, though analysis is lacking. */
#define MIN_IPS_TO_NOTE_COUNTRY 1
/** Do not report any geoip data at all if we have fewer than this number of
* IPs to report about. */
#define MIN_IPS_TO_NOTE_ANYTHING 1
/** When reporting geoip data about countries, round up to the nearest
* multiple of this value. */
#define IP_GRANULARITY 8
/** Helper type: used to sort per-country totals by value. */
typedef struct c_hist_t {
char country[3]; /**< Two-letter country code. */
unsigned total; /**< Total IP addresses seen in this country. */
} c_hist_t;
/** Sorting helper: return -1, 1, or 0 based on comparison of two
* geoip_ipv4_entry_t. Sort in descending order of total, and then by country
* code. */
static int
c_hist_compare_(const void **_a, const void **_b)
{
const c_hist_t *a = *_a, *b = *_b;
if (a->total > b->total)
return -1;
else if (a->total < b->total)
return 1;
else
return strcmp(a->country, b->country);
}
/** When there are incomplete directory requests at the end of a 24-hour
* period, consider those requests running for longer than this timeout as
* failed, the others as still running. */
#define DIRREQ_TIMEOUT (10*60)
/** Entry in a map from either chan->global_identifier for direct requests
* or a unique circuit identifier for tunneled requests to request time,
* response size, and completion time of a network status request. Used to
* measure download times of requests to derive average client
* bandwidths. */
typedef struct dirreq_map_entry_t {
HT_ENTRY(dirreq_map_entry_t) node;
/** Unique identifier for this network status request; this is either the
* chan->global_identifier of the dir channel (direct request) or a new
* locally unique identifier of a circuit (tunneled request). This ID is
* only unique among other direct or tunneled requests, respectively. */
uint64_t dirreq_id;
unsigned int state:3; /**< State of this directory request. */
unsigned int type:1; /**< Is this a direct or a tunneled request? */
unsigned int completed:1; /**< Is this request complete? */
/** When did we receive the request and started sending the response? */
struct timeval request_time;
size_t response_size; /**< What is the size of the response in bytes? */
struct timeval completion_time; /**< When did the request succeed? */
} dirreq_map_entry_t;
/** Map of all directory requests asking for v2 or v3 network statuses in
* the current geoip-stats interval. Values are
* of type *<b>dirreq_map_entry_t</b>. */
static HT_HEAD(dirreqmap, dirreq_map_entry_t) dirreq_map =
HT_INITIALIZER();
static int
dirreq_map_ent_eq(const dirreq_map_entry_t *a,
const dirreq_map_entry_t *b)
{
return a->dirreq_id == b->dirreq_id && a->type == b->type;
}
/* DOCDOC dirreq_map_ent_hash */
static unsigned
dirreq_map_ent_hash(const dirreq_map_entry_t *entry)
{
unsigned u = (unsigned) entry->dirreq_id;
u += entry->type << 20;
return u;
}
HT_PROTOTYPE(dirreqmap, dirreq_map_entry_t, node, dirreq_map_ent_hash,
dirreq_map_ent_eq)
HT_GENERATE2(dirreqmap, dirreq_map_entry_t, node, dirreq_map_ent_hash,
dirreq_map_ent_eq, 0.6, tor_reallocarray_, tor_free_)
/** Helper: Put <b>entry</b> into map of directory requests using
* <b>type</b> and <b>dirreq_id</b> as key parts. If there is
* already an entry for that key, print out a BUG warning and return. */
static void
dirreq_map_put_(dirreq_map_entry_t *entry, dirreq_type_t type,
uint64_t dirreq_id)
{
dirreq_map_entry_t *old_ent;
tor_assert(entry->type == type);
tor_assert(entry->dirreq_id == dirreq_id);
/* XXXX we could switch this to HT_INSERT some time, since it seems that
* this bug doesn't happen. But since this function doesn't seem to be
* critical-path, it's sane to leave it alone. */
old_ent = HT_REPLACE(dirreqmap, &dirreq_map, entry);
if (old_ent && old_ent != entry) {
log_warn(LD_BUG, "Error when putting directory request into local "
"map. There was already an entry for the same identifier.");
return;
}
}
/** Helper: Look up and return an entry in the map of directory requests
* using <b>type</b> and <b>dirreq_id</b> as key parts. If there
* is no such entry, return NULL. */
static dirreq_map_entry_t *
dirreq_map_get_(dirreq_type_t type, uint64_t dirreq_id)
{
dirreq_map_entry_t lookup;
lookup.type = type;
lookup.dirreq_id = dirreq_id;
return HT_FIND(dirreqmap, &dirreq_map, &lookup);
}
/** Note that an either direct or tunneled (see <b>type</b>) directory
* request for a v3 network status with unique ID <b>dirreq_id</b> of size
* <b>response_size</b> has started. */
void
geoip_start_dirreq(uint64_t dirreq_id, size_t response_size,
dirreq_type_t type)
{
dirreq_map_entry_t *ent;
if (!get_options()->DirReqStatistics)
return;
ent = tor_malloc_zero(sizeof(dirreq_map_entry_t));
ent->dirreq_id = dirreq_id;
tor_gettimeofday(&ent->request_time);
ent->response_size = response_size;
ent->type = type;
dirreq_map_put_(ent, type, dirreq_id);
}
/** Change the state of the either direct or tunneled (see <b>type</b>)
* directory request with <b>dirreq_id</b> to <b>new_state</b> and
* possibly mark it as completed. If no entry can be found for the given
* key parts (e.g., if this is a directory request that we are not
* measuring, or one that was started in the previous measurement period),
* or if the state cannot be advanced to <b>new_state</b>, do nothing. */
void
geoip_change_dirreq_state(uint64_t dirreq_id, dirreq_type_t type,
dirreq_state_t new_state)
{
dirreq_map_entry_t *ent;
if (!get_options()->DirReqStatistics)
return;
ent = dirreq_map_get_(type, dirreq_id);
if (!ent)
return;
if (new_state == DIRREQ_IS_FOR_NETWORK_STATUS)
return;
if (new_state - 1 != ent->state)
return;
ent->state = new_state;
if ((type == DIRREQ_DIRECT &&
new_state == DIRREQ_FLUSHING_DIR_CONN_FINISHED) ||
(type == DIRREQ_TUNNELED &&
new_state == DIRREQ_CHANNEL_BUFFER_FLUSHED)) {
tor_gettimeofday(&ent->completion_time);
ent->completed = 1;
}
}
/** Return the bridge-ip-transports string that should be inserted in
* our extra-info descriptor. Return NULL if the bridge-ip-transports
* line should be empty. */
char *
geoip_get_transport_history(void)
{
unsigned granularity = IP_GRANULARITY;
/** String hash table (name of transport) -> (number of users). */
strmap_t *transport_counts = strmap_new();
/** Smartlist that contains copies of the names of the transports
that have been used. */
smartlist_t *transports_used = smartlist_new();
/* Special string to signify that no transport was used for this
connection. Pluggable transport names can't have symbols in their
names, so this string will never collide with a real transport. */
static const char* no_transport_str = "<OR>";
clientmap_entry_t **ent;
smartlist_t *string_chunks = smartlist_new();
char *the_string = NULL;
/* If we haven't seen any clients yet, return NULL. */
if (HT_EMPTY(&client_history))
goto done;
/** We do the following steps to form the transport history string:
* a) Foreach client that uses a pluggable transport, we increase the
* times that transport was used by one. If the client did not use
* a transport, we increase the number of times someone connected
* without obfuscation.
* b) Foreach transport we observed, we write its transport history
* string and push it to string_chunks. So, for example, if we've
* seen 665 obfs2 clients, we write "obfs2=665".
* c) We concatenate string_chunks to form the final string.
*/
log_debug(LD_GENERAL,"Starting iteration for transport history. %d clients.",
HT_SIZE(&client_history));
/* Loop through all clients. */
HT_FOREACH(ent, clientmap, &client_history) {
uintptr_t val;
void *ptr;
const char *transport_name = (*ent)->transport_name;
if (!transport_name)
transport_name = no_transport_str;
/* Increase the count for this transport name. */
ptr = strmap_get(transport_counts, transport_name);
val = (uintptr_t)ptr;
val++;
ptr = (void*)val;
strmap_set(transport_counts, transport_name, ptr);
/* If it's the first time we see this transport, note it. */
if (val == 1)
smartlist_add_strdup(transports_used, transport_name);
log_debug(LD_GENERAL, "Client from '%s' with transport '%s'. "
"I've now seen %d clients.",
safe_str_client(fmt_addr(&(*ent)->addr)),
transport_name ? transport_name : "<no transport>",
(int)val);
}
/* Sort the transport names (helps with unit testing). */
smartlist_sort_strings(transports_used);
/* Loop through all seen transports. */
SMARTLIST_FOREACH_BEGIN(transports_used, const char *, transport_name) {
void *transport_count_ptr = strmap_get(transport_counts, transport_name);
uintptr_t transport_count = (uintptr_t) transport_count_ptr;
log_debug(LD_GENERAL, "We got "U64_FORMAT" clients with transport '%s'.",
U64_PRINTF_ARG((uint64_t)transport_count), transport_name);
smartlist_add_asprintf(string_chunks, "%s="U64_FORMAT,
transport_name,
U64_PRINTF_ARG(round_uint64_to_next_multiple_of(
(uint64_t)transport_count,
granularity)));
} SMARTLIST_FOREACH_END(transport_name);
the_string = smartlist_join_strings(string_chunks, ",", 0, NULL);
log_debug(LD_GENERAL, "Final bridge-ip-transports string: '%s'", the_string);
done:
strmap_free(transport_counts, NULL);
SMARTLIST_FOREACH(transports_used, char *, s, tor_free(s));
smartlist_free(transports_used);
SMARTLIST_FOREACH(string_chunks, char *, s, tor_free(s));
smartlist_free(string_chunks);
return the_string;
}
/** Return a newly allocated comma-separated string containing statistics
* on network status downloads. The string contains the number of completed
* requests, timeouts, and still running requests as well as the download
* times by deciles and quartiles. Return NULL if we have not observed
* requests for long enough. */
static char *
geoip_get_dirreq_history(dirreq_type_t type)
{
char *result = NULL;
smartlist_t *dirreq_completed = NULL;
uint32_t complete = 0, timeouts = 0, running = 0;
int bufsize = 1024, written;
dirreq_map_entry_t **ptr, **next;
struct timeval now;
tor_gettimeofday(&now);
dirreq_completed = smartlist_new();
for (ptr = HT_START(dirreqmap, &dirreq_map); ptr; ptr = next) {
dirreq_map_entry_t *ent = *ptr;
if (ent->type != type) {
next = HT_NEXT(dirreqmap, &dirreq_map, ptr);
continue;
} else {
if (ent->completed) {
smartlist_add(dirreq_completed, ent);
complete++;
next = HT_NEXT_RMV(dirreqmap, &dirreq_map, ptr);
} else {
if (tv_mdiff(&ent->request_time, &now) / 1000 > DIRREQ_TIMEOUT)
timeouts++;
else
running++;
next = HT_NEXT_RMV(dirreqmap, &dirreq_map, ptr);
tor_free(ent);
}
}
}
#define DIR_REQ_GRANULARITY 4
complete = round_uint32_to_next_multiple_of(complete,
DIR_REQ_GRANULARITY);
timeouts = round_uint32_to_next_multiple_of(timeouts,
DIR_REQ_GRANULARITY);
running = round_uint32_to_next_multiple_of(running,
DIR_REQ_GRANULARITY);
result = tor_malloc_zero(bufsize);
written = tor_snprintf(result, bufsize, "complete=%u,timeout=%u,"
"running=%u", complete, timeouts, running);
if (written < 0) {
tor_free(result);
goto done;
}
#define MIN_DIR_REQ_RESPONSES 16
if (complete >= MIN_DIR_REQ_RESPONSES) {
uint32_t *dltimes;
/* We may have rounded 'completed' up. Here we want to use the
* real value. */
complete = smartlist_len(dirreq_completed);
dltimes = tor_calloc(complete, sizeof(uint32_t));
SMARTLIST_FOREACH_BEGIN(dirreq_completed, dirreq_map_entry_t *, ent) {
uint32_t bytes_per_second;
uint32_t time_diff = (uint32_t) tv_mdiff(&ent->request_time,
&ent->completion_time);
if (time_diff == 0)
time_diff = 1; /* Avoid DIV/0; "instant" answers are impossible
* by law of nature or something, but a millisecond
* is a bit greater than "instantly" */
bytes_per_second = (uint32_t)(1000 * ent->response_size / time_diff);
dltimes[ent_sl_idx] = bytes_per_second;
} SMARTLIST_FOREACH_END(ent);
median_uint32(dltimes, complete); /* sorts as a side effect. */
written = tor_snprintf(result + written, bufsize - written,
",min=%u,d1=%u,d2=%u,q1=%u,d3=%u,d4=%u,md=%u,"
"d6=%u,d7=%u,q3=%u,d8=%u,d9=%u,max=%u",
dltimes[0],
dltimes[1*complete/10-1],
dltimes[2*complete/10-1],
dltimes[1*complete/4-1],
dltimes[3*complete/10-1],
dltimes[4*complete/10-1],
dltimes[5*complete/10-1],
dltimes[6*complete/10-1],
dltimes[7*complete/10-1],
dltimes[3*complete/4-1],
dltimes[8*complete/10-1],
dltimes[9*complete/10-1],
dltimes[complete-1]);
if (written<0)
tor_free(result);
tor_free(dltimes);
}
done:
SMARTLIST_FOREACH(dirreq_completed, dirreq_map_entry_t *, ent,
tor_free(ent));
smartlist_free(dirreq_completed);
return result;
}
/** Store a newly allocated comma-separated string in
* *<a>country_str</a> containing entries for all the countries from
* which we've seen enough clients connect as a bridge, directory
* server, or entry guard. The entry format is cc=num where num is the
* number of IPs we've seen connecting from that country, and cc is a
* lowercased country code. *<a>country_str</a> is set to NULL if
* we're not ready to export per country data yet.
*
* Store a newly allocated comma-separated string in <a>ipver_str</a>
* containing entries for clients connecting over IPv4 and IPv6. The
* format is family=num where num is the nubmer of IPs we've seen
* connecting over that protocol family, and family is 'v4' or 'v6'.
*
* Return 0 on success and -1 if we're missing geoip data. */
int
geoip_get_client_history(geoip_client_action_t action,
char **country_str, char **ipver_str)
{
unsigned granularity = IP_GRANULARITY;
smartlist_t *entries = NULL;
int n_countries = geoip_get_n_countries();
int i;
clientmap_entry_t **cm_ent;
unsigned *counts = NULL;
unsigned total = 0;
unsigned ipv4_count = 0, ipv6_count = 0;
if (!geoip_is_loaded(AF_INET) && !geoip_is_loaded(AF_INET6))
return -1;
counts = tor_calloc(n_countries, sizeof(unsigned));
HT_FOREACH(cm_ent, clientmap, &client_history) {
int country;
if ((*cm_ent)->action != (int)action)
continue;
country = geoip_get_country_by_addr(&(*cm_ent)->addr);
if (country < 0)
country = 0; /** unresolved requests are stored at index 0. */
tor_assert(0 <= country && country < n_countries);
++counts[country];
++total;
switch (tor_addr_family(&(*cm_ent)->addr)) {
case AF_INET:
ipv4_count++;
break;
case AF_INET6:
ipv6_count++;
break;
}
}
if (ipver_str) {
smartlist_t *chunks = smartlist_new();
smartlist_add_asprintf(chunks, "v4=%u",
round_to_next_multiple_of(ipv4_count, granularity));
smartlist_add_asprintf(chunks, "v6=%u",
round_to_next_multiple_of(ipv6_count, granularity));
*ipver_str = smartlist_join_strings(chunks, ",", 0, NULL);
SMARTLIST_FOREACH(chunks, char *, c, tor_free(c));
smartlist_free(chunks);
}
/* Don't record per country data if we haven't seen enough IPs. */
if (total < MIN_IPS_TO_NOTE_ANYTHING) {
tor_free(counts);
if (country_str)
*country_str = NULL;
return 0;
}
/* Make a list of c_hist_t */
entries = smartlist_new();
for (i = 0; i < n_countries; ++i) {
unsigned c = counts[i];
const char *countrycode;
c_hist_t *ent;
/* Only report a country if it has a minimum number of IPs. */
if (c >= MIN_IPS_TO_NOTE_COUNTRY) {
c = round_to_next_multiple_of(c, granularity);
countrycode = geoip_get_country_name(i);
ent = tor_malloc(sizeof(c_hist_t));
strlcpy(ent->country, countrycode, sizeof(ent->country));
ent->total = c;
smartlist_add(entries, ent);
}
}
/* Sort entries. Note that we must do this _AFTER_ rounding, or else
* the sort order could leak info. */
smartlist_sort(entries, c_hist_compare_);
if (country_str) {
smartlist_t *chunks = smartlist_new();
SMARTLIST_FOREACH(entries, c_hist_t *, ch, {
smartlist_add_asprintf(chunks, "%s=%u", ch->country, ch->total);
});
*country_str = smartlist_join_strings(chunks, ",", 0, NULL);
SMARTLIST_FOREACH(chunks, char *, c, tor_free(c));
smartlist_free(chunks);
}
SMARTLIST_FOREACH(entries, c_hist_t *, c, tor_free(c));
smartlist_free(entries);
tor_free(counts);
return 0;
}
/** Return a newly allocated string holding the per-country request history
* for v3 network statuses in a format suitable for an extra-info document,
* or NULL on failure. */
char *
geoip_get_request_history(void)
{
smartlist_t *entries, *strings;
char *result;
unsigned granularity = IP_GRANULARITY;
if (!geoip_countries)
return NULL;
entries = smartlist_new();
SMARTLIST_FOREACH_BEGIN(geoip_countries, geoip_country_t *, c) {
uint32_t tot = 0;
c_hist_t *ent;
tot = c->n_v3_ns_requests;
if (!tot)
continue;
ent = tor_malloc_zero(sizeof(c_hist_t));
strlcpy(ent->country, c->countrycode, sizeof(ent->country));
ent->total = round_to_next_multiple_of(tot, granularity);
smartlist_add(entries, ent);
} SMARTLIST_FOREACH_END(c);
smartlist_sort(entries, c_hist_compare_);
strings = smartlist_new();
SMARTLIST_FOREACH(entries, c_hist_t *, ent, {
smartlist_add_asprintf(strings, "%s=%u", ent->country, ent->total);
});
result = smartlist_join_strings(strings, ",", 0, NULL);
SMARTLIST_FOREACH(strings, char *, cp, tor_free(cp));
SMARTLIST_FOREACH(entries, c_hist_t *, ent, tor_free(ent));
smartlist_free(strings);
smartlist_free(entries);
return result;
}
/** Start time of directory request stats or 0 if we're not collecting
* directory request statistics. */
static time_t start_of_dirreq_stats_interval;
/** Initialize directory request stats. */
void
geoip_dirreq_stats_init(time_t now)
{
start_of_dirreq_stats_interval = now;
}
/** Reset counters for dirreq stats. */
void
geoip_reset_dirreq_stats(time_t now)
{
SMARTLIST_FOREACH(geoip_countries, geoip_country_t *, c, {
c->n_v3_ns_requests = 0;
});
{
clientmap_entry_t **ent, **next, *this;
for (ent = HT_START(clientmap, &client_history); ent != NULL;
ent = next) {
if ((*ent)->action == GEOIP_CLIENT_NETWORKSTATUS) {
this = *ent;
next = HT_NEXT_RMV(clientmap, &client_history, ent);
clientmap_entry_free(this);
} else {
next = HT_NEXT(clientmap, &client_history, ent);
}
}
}
memset(ns_v3_responses, 0, sizeof(ns_v3_responses));
{
dirreq_map_entry_t **ent, **next, *this;
for (ent = HT_START(dirreqmap, &dirreq_map); ent != NULL; ent = next) {
this = *ent;
next = HT_NEXT_RMV(dirreqmap, &dirreq_map, ent);
tor_free(this);
}
}
start_of_dirreq_stats_interval = now;
}
/** Stop collecting directory request stats in a way that we can re-start
* doing so in geoip_dirreq_stats_init(). */
void
geoip_dirreq_stats_term(void)
{
geoip_reset_dirreq_stats(0);
}
/** Return a newly allocated string containing the dirreq statistics
* until <b>now</b>, or NULL if we're not collecting dirreq stats. Caller
* must ensure start_of_dirreq_stats_interval is in the past. */
char *
geoip_format_dirreq_stats(time_t now)
{
char t[ISO_TIME_LEN+1];
int i;
char *v3_ips_string = NULL, *v3_reqs_string = NULL,
*v3_direct_dl_string = NULL, *v3_tunneled_dl_string = NULL;
char *result = NULL;
if (!start_of_dirreq_stats_interval)
return NULL; /* Not initialized. */
tor_assert(now >= start_of_dirreq_stats_interval);
format_iso_time(t, now);
geoip_get_client_history(GEOIP_CLIENT_NETWORKSTATUS, &v3_ips_string, NULL);
v3_reqs_string = geoip_get_request_history();
#define RESPONSE_GRANULARITY 8
for (i = 0; i < GEOIP_NS_RESPONSE_NUM; i++) {
ns_v3_responses[i] = round_uint32_to_next_multiple_of(
ns_v3_responses[i], RESPONSE_GRANULARITY);
}
#undef RESPONSE_GRANULARITY
v3_direct_dl_string = geoip_get_dirreq_history(DIRREQ_DIRECT);
v3_tunneled_dl_string = geoip_get_dirreq_history(DIRREQ_TUNNELED);
/* Put everything together into a single string. */
tor_asprintf(&result, "dirreq-stats-end %s (%d s)\n"
"dirreq-v3-ips %s\n"
"dirreq-v3-reqs %s\n"
"dirreq-v3-resp ok=%u,not-enough-sigs=%u,unavailable=%u,"
"not-found=%u,not-modified=%u,busy=%u\n"
"dirreq-v3-direct-dl %s\n"
"dirreq-v3-tunneled-dl %s\n",
t,
(unsigned) (now - start_of_dirreq_stats_interval),
v3_ips_string ? v3_ips_string : "",
v3_reqs_string ? v3_reqs_string : "",
ns_v3_responses[GEOIP_SUCCESS],
ns_v3_responses[GEOIP_REJECT_NOT_ENOUGH_SIGS],
ns_v3_responses[GEOIP_REJECT_UNAVAILABLE],
ns_v3_responses[GEOIP_REJECT_NOT_FOUND],
ns_v3_responses[GEOIP_REJECT_NOT_MODIFIED],
ns_v3_responses[GEOIP_REJECT_BUSY],
v3_direct_dl_string ? v3_direct_dl_string : "",
v3_tunneled_dl_string ? v3_tunneled_dl_string : "");
/* Free partial strings. */
tor_free(v3_ips_string);
tor_free(v3_reqs_string);
tor_free(v3_direct_dl_string);
tor_free(v3_tunneled_dl_string);
return result;
}
/** If 24 hours have passed since the beginning of the current dirreq
* stats period, write dirreq stats to $DATADIR/stats/dirreq-stats
* (possibly overwriting an existing file) and reset counters. Return
* when we would next want to write dirreq stats or 0 if we never want to
* write. */
time_t
geoip_dirreq_stats_write(time_t now)
{
char *str = NULL;
if (!start_of_dirreq_stats_interval)
return 0; /* Not initialized. */
if (start_of_dirreq_stats_interval + WRITE_STATS_INTERVAL > now)
goto done; /* Not ready to write. */
/* Discard all items in the client history that are too old. */
geoip_remove_old_clients(start_of_dirreq_stats_interval);
/* Generate history string .*/
str = geoip_format_dirreq_stats(now);
if (! str)
goto done;
/* Write dirreq-stats string to disk. */
if (!check_or_create_data_subdir("stats")) {
write_to_data_subdir("stats", "dirreq-stats", str, "dirreq statistics");
/* Reset measurement interval start. */
geoip_reset_dirreq_stats(now);
}
done:
tor_free(str);
return start_of_dirreq_stats_interval + WRITE_STATS_INTERVAL;
}
/** Start time of bridge stats or 0 if we're not collecting bridge
* statistics. */
static time_t start_of_bridge_stats_interval;
/** Initialize bridge stats. */
void
geoip_bridge_stats_init(time_t now)
{
start_of_bridge_stats_interval = now;
}
/** Stop collecting bridge stats in a way that we can re-start doing so in
* geoip_bridge_stats_init(). */
void
geoip_bridge_stats_term(void)
{
client_history_clear();
start_of_bridge_stats_interval = 0;
}
/** Validate a bridge statistics string as it would be written to a
* current extra-info descriptor. Return 1 if the string is valid and
* recent enough, or 0 otherwise. */
static int
validate_bridge_stats(const char *stats_str, time_t now)
{
char stats_end_str[ISO_TIME_LEN+1], stats_start_str[ISO_TIME_LEN+1],
*eos;
const char *BRIDGE_STATS_END = "bridge-stats-end ";
const char *BRIDGE_IPS = "bridge-ips ";
const char *BRIDGE_IPS_EMPTY_LINE = "bridge-ips\n";
const char *BRIDGE_TRANSPORTS = "bridge-ip-transports ";
const char *BRIDGE_TRANSPORTS_EMPTY_LINE = "bridge-ip-transports\n";
const char *tmp;
time_t stats_end_time;
int seconds;
tor_assert(stats_str);
/* Parse timestamp and number of seconds from
"bridge-stats-end YYYY-MM-DD HH:MM:SS (N s)" */
tmp = find_str_at_start_of_line(stats_str, BRIDGE_STATS_END);
if (!tmp)
return 0;
tmp += strlen(BRIDGE_STATS_END);
if (strlen(tmp) < ISO_TIME_LEN + 6)
return 0;
strlcpy(stats_end_str, tmp, sizeof(stats_end_str));
if (parse_iso_time(stats_end_str, &stats_end_time) < 0)
return 0;
if (stats_end_time < now - (25*60*60) ||
stats_end_time > now + (1*60*60))
return 0;
seconds = (int)strtol(tmp + ISO_TIME_LEN + 2, &eos, 10);
if (!eos || seconds < 23*60*60)
return 0;
format_iso_time(stats_start_str, stats_end_time - seconds);
/* Parse: "bridge-ips CC=N,CC=N,..." */
tmp = find_str_at_start_of_line(stats_str, BRIDGE_IPS);
if (!tmp) {
/* Look if there is an empty "bridge-ips" line */
tmp = find_str_at_start_of_line(stats_str, BRIDGE_IPS_EMPTY_LINE);
if (!tmp)
return 0;
}
/* Parse: "bridge-ip-transports PT=N,PT=N,..." */
tmp = find_str_at_start_of_line(stats_str, BRIDGE_TRANSPORTS);
if (!tmp) {
/* Look if there is an empty "bridge-ip-transports" line */
tmp = find_str_at_start_of_line(stats_str, BRIDGE_TRANSPORTS_EMPTY_LINE);
if (!tmp)
return 0;
}
return 1;
}
/** Most recent bridge statistics formatted to be written to extra-info
* descriptors. */
static char *bridge_stats_extrainfo = NULL;
/** Return a newly allocated string holding our bridge usage stats by country
* in a format suitable for inclusion in an extrainfo document. Return NULL on
* failure. */
char *
geoip_format_bridge_stats(time_t now)
{
char *out = NULL;
char *country_data = NULL, *ipver_data = NULL, *transport_data = NULL;
long duration = now - start_of_bridge_stats_interval;
char written[ISO_TIME_LEN+1];
if (duration < 0)
return NULL;
if (!start_of_bridge_stats_interval)
return NULL; /* Not initialized. */
format_iso_time(written, now);
geoip_get_client_history(GEOIP_CLIENT_CONNECT, &country_data, &ipver_data);
transport_data = geoip_get_transport_history();
tor_asprintf(&out,
"bridge-stats-end %s (%ld s)\n"
"bridge-ips %s\n"
"bridge-ip-versions %s\n"
"bridge-ip-transports %s\n",
written, duration,
country_data ? country_data : "",
ipver_data ? ipver_data : "",
transport_data ? transport_data : "");
tor_free(country_data);
tor_free(ipver_data);
tor_free(transport_data);
return out;
}
/** Return a newly allocated string holding our bridge usage stats by country
* in a format suitable for the answer to a controller request. Return NULL on
* failure. */
static char *
format_bridge_stats_controller(time_t now)
{
char *out = NULL, *country_data = NULL, *ipver_data = NULL;
char started[ISO_TIME_LEN+1];
(void) now;
format_iso_time(started, start_of_bridge_stats_interval);
geoip_get_client_history(GEOIP_CLIENT_CONNECT, &country_data, &ipver_data);
tor_asprintf(&out,
"TimeStarted=\"%s\" CountrySummary=%s IPVersions=%s",
started,
country_data ? country_data : "",
ipver_data ? ipver_data : "");
tor_free(country_data);
tor_free(ipver_data);
return out;
}
/** Return a newly allocated string holding our bridge usage stats by
* country in a format suitable for inclusion in our heartbeat
* message. Return NULL on failure. */
char *
format_client_stats_heartbeat(time_t now)
{
const int n_hours = 6;
char *out = NULL;
int n_clients = 0;
clientmap_entry_t **ent;
unsigned cutoff = (unsigned)( (now-n_hours*3600)/60 );
if (!start_of_bridge_stats_interval)
return NULL; /* Not initialized. */
/* count unique IPs */
HT_FOREACH(ent, clientmap, &client_history) {
/* only count directly connecting clients */
if ((*ent)->action != GEOIP_CLIENT_CONNECT)
continue;
if ((*ent)->last_seen_in_minutes < cutoff)
continue;
n_clients++;
}
tor_asprintf(&out, "Heartbeat: "
"In the last %d hours, I have seen %d unique clients.",
n_hours,
n_clients);
return out;
}
/** Write bridge statistics to $DATADIR/stats/bridge-stats and return
* when we should next try to write statistics. */
time_t
geoip_bridge_stats_write(time_t now)
{
char *val = NULL;
/* Check if 24 hours have passed since starting measurements. */
if (now < start_of_bridge_stats_interval + WRITE_STATS_INTERVAL)
return start_of_bridge_stats_interval + WRITE_STATS_INTERVAL;
/* Discard all items in the client history that are too old. */
geoip_remove_old_clients(start_of_bridge_stats_interval);
/* Generate formatted string */
val = geoip_format_bridge_stats(now);
if (val == NULL)
goto done;
/* Update the stored value. */
tor_free(bridge_stats_extrainfo);
bridge_stats_extrainfo = val;
start_of_bridge_stats_interval = now;
/* Write it to disk. */
if (!check_or_create_data_subdir("stats")) {
write_to_data_subdir("stats", "bridge-stats",
bridge_stats_extrainfo, "bridge statistics");
/* Tell the controller, "hey, there are clients!" */
{
char *controller_str = format_bridge_stats_controller(now);
if (controller_str)
control_event_clients_seen(controller_str);
tor_free(controller_str);
}
}
done:
return start_of_bridge_stats_interval + WRITE_STATS_INTERVAL;
}
/** Try to load the most recent bridge statistics from disk, unless we
* have finished a measurement interval lately, and check whether they
* are still recent enough. */
static void
load_bridge_stats(time_t now)
{
char *fname, *contents;
if (bridge_stats_extrainfo)
return;
fname = get_datadir_fname2("stats", "bridge-stats");
contents = read_file_to_str(fname, RFTS_IGNORE_MISSING, NULL);
if (contents && validate_bridge_stats(contents, now)) {
bridge_stats_extrainfo = contents;
} else {
tor_free(contents);
}
tor_free(fname);
}
/** Return most recent bridge statistics for inclusion in extra-info
* descriptors, or NULL if we don't have recent bridge statistics. */
const char *
geoip_get_bridge_stats_extrainfo(time_t now)
{
load_bridge_stats(now);
return bridge_stats_extrainfo;
}
/** Return a new string containing the recent bridge statistics to be returned
* to controller clients, or NULL if we don't have any bridge statistics. */
char *
geoip_get_bridge_stats_controller(time_t now)
{
return format_bridge_stats_controller(now);
}
/** Start time of entry stats or 0 if we're not collecting entry
* statistics. */
static time_t start_of_entry_stats_interval;
/** Initialize entry stats. */
void
geoip_entry_stats_init(time_t now)
{
start_of_entry_stats_interval = now;
}
/** Reset counters for entry stats. */
void
geoip_reset_entry_stats(time_t now)
{
client_history_clear();
start_of_entry_stats_interval = now;
}
/** Stop collecting entry stats in a way that we can re-start doing so in
* geoip_entry_stats_init(). */
void
geoip_entry_stats_term(void)
{
geoip_reset_entry_stats(0);
}
/** Return a newly allocated string containing the entry statistics
* until <b>now</b>, or NULL if we're not collecting entry stats. Caller
* must ensure start_of_entry_stats_interval lies in the past. */
char *
geoip_format_entry_stats(time_t now)
{
char t[ISO_TIME_LEN+1];
char *data = NULL;
char *result;
if (!start_of_entry_stats_interval)
return NULL; /* Not initialized. */
tor_assert(now >= start_of_entry_stats_interval);
geoip_get_client_history(GEOIP_CLIENT_CONNECT, &data, NULL);
format_iso_time(t, now);
tor_asprintf(&result,
"entry-stats-end %s (%u s)\n"
"entry-ips %s\n",
t, (unsigned) (now - start_of_entry_stats_interval),
data ? data : "");
tor_free(data);
return result;
}
/** If 24 hours have passed since the beginning of the current entry stats
* period, write entry stats to $DATADIR/stats/entry-stats (possibly
* overwriting an existing file) and reset counters. Return when we would
* next want to write entry stats or 0 if we never want to write. */
time_t
geoip_entry_stats_write(time_t now)
{
char *str = NULL;
if (!start_of_entry_stats_interval)
return 0; /* Not initialized. */
if (start_of_entry_stats_interval + WRITE_STATS_INTERVAL > now)
goto done; /* Not ready to write. */
/* Discard all items in the client history that are too old. */
geoip_remove_old_clients(start_of_entry_stats_interval);
/* Generate history string .*/
str = geoip_format_entry_stats(now);
/* Write entry-stats string to disk. */
if (!check_or_create_data_subdir("stats")) {
write_to_data_subdir("stats", "entry-stats", str, "entry statistics");
/* Reset measurement interval start. */
geoip_reset_entry_stats(now);
}
done:
tor_free(str);
return start_of_entry_stats_interval + WRITE_STATS_INTERVAL;
}
/** Helper used to implement GETINFO ip-to-country/... controller command. */
int
getinfo_helper_geoip(control_connection_t *control_conn,
const char *question, char **answer,
const char **errmsg)
{
(void)control_conn;
if (!strcmpstart(question, "ip-to-country/")) {
int c;
sa_family_t family;
tor_addr_t addr;
question += strlen("ip-to-country/");
family = tor_addr_parse(&addr, question);
if (family != AF_INET && family != AF_INET6) {
*errmsg = "Invalid address family";
return -1;
}
if (!geoip_is_loaded(family)) {
*errmsg = "GeoIP data not loaded";
return -1;
}
if (family == AF_INET)
c = geoip_get_country_by_ipv4(tor_addr_to_ipv4h(&addr));
else /* AF_INET6 */
c = geoip_get_country_by_ipv6(tor_addr_to_in6(&addr));
*answer = tor_strdup(geoip_get_country_name(c));
}
return 0;
}
/** Release all storage held by the GeoIP databases and country list. */
STATIC void
clear_geoip_db(void)
{
if (geoip_countries) {
SMARTLIST_FOREACH(geoip_countries, geoip_country_t *, c, tor_free(c));
smartlist_free(geoip_countries);
}
strmap_free(country_idxplus1_by_lc_code, NULL);
if (geoip_ipv4_entries) {
SMARTLIST_FOREACH(geoip_ipv4_entries, geoip_ipv4_entry_t *, ent,
tor_free(ent));
smartlist_free(geoip_ipv4_entries);
}
if (geoip_ipv6_entries) {
SMARTLIST_FOREACH(geoip_ipv6_entries, geoip_ipv6_entry_t *, ent,
tor_free(ent));
smartlist_free(geoip_ipv6_entries);
}
geoip_countries = NULL;
country_idxplus1_by_lc_code = NULL;
geoip_ipv4_entries = NULL;
geoip_ipv6_entries = NULL;
}
/** Release all storage held in this file. */
void
geoip_free_all(void)
{
{
clientmap_entry_t **ent, **next, *this;
for (ent = HT_START(clientmap, &client_history); ent != NULL; ent = next) {
this = *ent;
next = HT_NEXT_RMV(clientmap, &client_history, ent);
clientmap_entry_free(this);
}
HT_CLEAR(clientmap, &client_history);
}
{
dirreq_map_entry_t **ent, **next, *this;
for (ent = HT_START(dirreqmap, &dirreq_map); ent != NULL; ent = next) {
this = *ent;
next = HT_NEXT_RMV(dirreqmap, &dirreq_map, ent);
tor_free(this);
}
HT_CLEAR(dirreqmap, &dirreq_map);
}
clear_geoip_db();
tor_free(bridge_stats_extrainfo);
}
| BreakoutCoin/Breakout-Chain-Client | src/tor/or/geoip.c | C | mit | 58,156 |
# WebSocket
[![Swift][swift-badge]][swift-url]
[![Zewo][zewo-badge]][zewo-url]
[![Platform][platform-badge]][platform-url]
[![License][mit-badge]][mit-url]
[![Slack][slack-badge]][slack-url]
[![Travis][travis-badge]][travis-url]
[![Codebeat][codebeat-badge]][codebeat-url]
> :warning: This module contains no networking. To create a WebSocket Server, see [WebSocketServer](https://github.com/Zewo/WebSocketServer). To create a WebSocket Client, see [WebSocketClient](https://github.com/Zewo/WebSocketClient).
## Installation
```swift
import PackageDescription
let package = Package(
dependencies: [
.Package(url: "https://github.com/Zewo/WebSocket.git", majorVersion: 0, minor: 14),
]
)
```
## Support
If you need any help you can join our [Slack](http://slack.zewo.io) and go to the **#help** channel. Or you can create a Github [issue](https://github.com/Zewo/Zewo/issues/new) in our main repository. When stating your issue be sure to add enough details, specify what module is causing the problem and reproduction steps.
## Community
[![Slack][slack-image]][slack-url]
The entire Zewo code base is licensed under MIT. By contributing to Zewo you are contributing to an open and engaged community of brilliant Swift programmers. Join us on [Slack](http://slack.zewo.io) to get to know us!
## License
This project is released under the MIT license. See [LICENSE](LICENSE) for details.
[swift-badge]: https://img.shields.io/badge/Swift-3.0-orange.svg?style=flat
[swift-url]: https://swift.org
[zewo-badge]: https://img.shields.io/badge/Zewo-0.14-FF7565.svg?style=flat
[zewo-url]: http://zewo.io
[platform-badge]: https://img.shields.io/badge/Platforms-OS%20X%20--%20Linux-lightgray.svg?style=flat
[platform-url]: https://swift.org
[mit-badge]: https://img.shields.io/badge/License-MIT-blue.svg?style=flat
[mit-url]: https://tldrlegal.com/license/mit-license
[slack-image]: http://s13.postimg.org/ybwy92ktf/Slack.png
[slack-badge]: https://zewo-slackin.herokuapp.com/badge.svg
[slack-url]: http://slack.zewo.io
[travis-badge]: https://travis-ci.org/Zewo/WebSocket.svg?branch=master
[travis-url]: https://travis-ci.org/Zewo/WebSocket
[codebeat-badge]: https://codebeat.co/badges/7b271ac4-f447-45a5-8cd0-f0f4c2e57690
[codebeat-url]: https://codebeat.co/projects/github-com-zewo-websocket
| NickAger/elm-slider | ServerSlider/WebSocketServer/Packages/WebSocket-0.14.3/README.md | Markdown | mit | 2,315 |
#ifndef __ABSPRITE_H
#define __ABSPRITE_H
#include <Arduino.h>
#include <Adafruit_GFX.h>
#include <Adafruit_ST7735.h>
#include <SPI.h>
#include "ab_lcd_image.h"
#include "abImage.h"
#define AB_SPRITE_SIZE 15
#define DEFAULT_MOVE_DIST 20
#define MAX_SCREEN_WIDTH 128
#define MAX_SCREEN_HEIGHT 160
class abSprite {
private:
int width;
int height;
abImage image;
public:
abSprite();
int x_old;
int x;
int y_old;
int y;
int current_sheet;
int max_sheet;
void setImage(abImage image);
abImage getImage();
void setCoordinates(int x, int y);
void setSize(int width, int height);
void moveRight();
void moveLeft();
void moveUp();
void moveDown();
void moveRight(int dist);
void moveLeft(int dist);
void moveUp(int dist);
void moveDown(int dist);
void undraw_old(Adafruit_ST7735 *tft, lcd_image_t *bg);
void draw(Adafruit_ST7735 *tft);
};
#endif
| rosshamish/c275-stamp | examples/abSprite.h | C | mit | 949 |
/*
* @package jsDAV
* @subpackage CardDAV
* @copyright Copyright(c) 2013 Mike de Boer. <info AT mikedeboer DOT nl>
* @author Mike de Boer <info AT mikedeboer DOT nl>
* @license http://github.com/mikedeboer/jsDAV/blob/master/LICENSE MIT License
*/
"use strict";
var jsDAV_Plugin = require("./../DAV/plugin");
var jsDAV_Property_Href = require("./../DAV/property/href");
var jsDAV_Property_HrefList = require("./../DAV/property/hrefList");
var jsDAV_Property_iHref = require("./../DAV/interfaces/iHref");
var jsCardDAV_iAddressBook = require("./interfaces/iAddressBook");
var jsCardDAV_iCard = require("./interfaces/iCard");
var jsCardDAV_iDirectory = require("./interfaces/iDirectory");
var jsCardDAV_UserAddressBooks = require("./userAddressBooks");
var jsCardDAV_AddressBookQueryParser = require("./addressBookQueryParser");
var jsDAVACL_iPrincipal = require("./../DAVACL/interfaces/iPrincipal");
var jsVObject_Reader = require("./../VObject/reader");
var AsyncEventEmitter = require("./../shared/asyncEvents").EventEmitter;
var Exc = require("./../shared/exceptions");
var Util = require("./../shared/util");
var Xml = require("./../shared/xml");
var Async = require("asyncjs");
/**
* CardDAV plugin
*
* The CardDAV plugin adds CardDAV functionality to the WebDAV server
*/
var jsCardDAV_Plugin = module.exports = jsDAV_Plugin.extend({
/**
* Plugin name
*
* @var String
*/
name: "carddav",
/**
* Url to the addressbooks
*/
ADDRESSBOOK_ROOT: "addressbooks",
/**
* xml namespace for CardDAV elements
*/
NS_CARDDAV: "urn:ietf:params:xml:ns:carddav",
/**
* Add urls to this property to have them automatically exposed as
* 'directories' to the user.
*
* @var array
*/
directories: null,
/**
* Handler class
*
* @var jsDAV_Handler
*/
handler: null,
/**
* Initializes the plugin
*
* @param DAV\Server server
* @return void
*/
initialize: function(handler) {
this.directories = [];
// Events
handler.addEventListener("beforeGetProperties", this.beforeGetProperties.bind(this));
handler.addEventListener("afterGetProperties", this.afterGetProperties.bind(this));
handler.addEventListener("updateProperties", this.updateProperties.bind(this));
handler.addEventListener("report", this.report.bind(this));
handler.addEventListener("onHTMLActionsPanel", this.htmlActionsPanel.bind(this), AsyncEventEmitter.PRIO_HIGH);
handler.addEventListener("onBrowserPostAction", this.browserPostAction.bind(this), AsyncEventEmitter.PRIO_HIGH);
handler.addEventListener("beforeWriteContent", this.beforeWriteContent.bind(this));
handler.addEventListener("beforeCreateFile", this.beforeCreateFile.bind(this));
// Namespaces
Xml.xmlNamespaces[this.NS_CARDDAV] = "card";
// Mapping Interfaces to {DAV:}resourcetype values
handler.resourceTypeMapping["{" + this.NS_CARDDAV + "}addressbook"] = jsCardDAV_iAddressBook;
handler.resourceTypeMapping["{" + this.NS_CARDDAV + "}directory"] = jsCardDAV_iDirectory;
// Adding properties that may never be changed
handler.protectedProperties.push(
"{" + this.NS_CARDDAV + "}supported-address-data",
"{" + this.NS_CARDDAV + "}max-resource-size",
"{" + this.NS_CARDDAV + "}addressbook-home-set",
"{" + this.NS_CARDDAV + "}supported-collation-set"
);
handler.protectedProperties = Util.makeUnique(handler.protectedProperties);
handler.propertyMap["{http://calendarserver.org/ns/}me-card"] = jsDAV_Property_Href;
this.handler = handler;
},
/**
* Returns a list of supported features.
*
* This is used in the DAV: header in the OPTIONS and PROPFIND requests.
*
* @return array
*/
getFeatures: function() {
return ["addressbook"];
},
/**
* Returns a list of reports this plugin supports.
*
* This will be used in the {DAV:}supported-report-set property.
* Note that you still need to subscribe to the 'report' event to actually
* implement them
*
* @param {String} uri
* @return array
*/
getSupportedReportSet: function(uri, callback) {
var self = this;
this.handler.getNodeForPath(uri, function(err, node) {
if (err)
return callback(err);
if (node.hasFeature(jsCardDAV_iAddressBook) || node.hasFeature(jsCardDAV_iCard)) {
return callback(null, [
"{" + self.NS_CARDDAV + "}addressbook-multiget",
"{" + self.NS_CARDDAV + "}addressbook-query"
]);
}
return callback(null, []);
});
},
/**
* Adds all CardDAV-specific properties
*
* @param {String} path
* @param DAV\INode node
* @param {Array} requestedProperties
* @param {Array} returnedProperties
* @return void
*/
beforeGetProperties: function(e, path, node, requestedProperties, returnedProperties) {
var self = this;
if (node.hasFeature(jsDAVACL_iPrincipal)) {
// calendar-home-set property
var addHome = "{" + this.NS_CARDDAV + "}addressbook-home-set";
if (requestedProperties[addHome]) {
var principalId = node.getName();
var addressbookHomePath = this.ADDRESSBOOK_ROOT + "/" + principalId + "/";
delete requestedProperties[addHome];
returnedProperties["200"][addHome] = jsDAV_Property_Href.new(addressbookHomePath);
}
var directories = "{" + this.NS_CARDDAV + "}directory-gateway";
if (this.directories && requestedProperties[directories]) {
delete requestedProperties[directories];
returnedProperties["200"][directories] = jsDAV_Property_HrefList.new(this.directories);
}
}
if (node.hasFeature(jsCardDAV_iCard)) {
// The address-data property is not supposed to be a 'real'
// property, but in large chunks of the spec it does act as such.
// Therefore we simply expose it as a property.
var addressDataProp = "{" + this.NS_CARDDAV + "}address-data";
if (requestedProperties[addressDataProp]) {
delete requestedProperties[addressDataProp];
node.get(function(err, val) {
if (err)
return e.next(err);
returnedProperties["200"][addressDataProp] = val.toString("utf8");
afterICard();
});
}
else
afterICard();
}
else
afterICard();
function afterICard() {
if (node.hasFeature(jsCardDAV_UserAddressBooks)) {
var meCardProp = "{http://calendarserver.org/ns/}me-card";
if (requestedProperties[meCardProp]) {
self.handler.getProperties(node.getOwner(), ["{http://ajax.org/2005/aml}vcard-url"], function(err, props) {
if (err)
return e.next(err);
if (props["{http://ajax.org/2005/aml}vcard-url"]) {
returnedProperties["200"][meCardProp] = jsDAV_Property_Href.new(
props["{http://ajax.org/2005/aml}vcard-url"]
);
delete requestedProperties[meCardProp];
}
e.next();
});
}
else
e.next();
}
else
e.next();
}
},
/**
* This event is triggered when a PROPPATCH method is executed
*
* @param {Array} mutations
* @param {Array} result
* @param DAV\INode node
* @return bool
*/
updateProperties: function(e, mutations, result, node) {
if (!node.hasFeature(jsCardDAV_UserAddressBooks))
return e.next();
var meCard = "{http://calendarserver.org/ns/}me-card";
// The only property we care about
if (!mutations[meCard])
return e.next();
var value = mutations[meCard];
delete mutations[meCard];
if (value.hasFeature(jsDAV_Property_iHref)) {
value = this.handler.calculateUri(value.getHref());
}
else if (!value) {
result["400"][meCard] = null;
return e.stop();
}
this.server.updateProperties(node.getOwner(), {"{http://ajax.org/2005/aml}vcard-url": value}, function(err, innerResult) {
if (err)
return e.next(err);
var closureResult = false;
var props;
for (var status in innerResult) {
props = innerResult[status];
if (props["{http://ajax.org/2005/aml}vcard-url"]) {
result[status][meCard] = null;
status = parseInt(status);
closureResult = (status >= 200 && status < 300);
}
}
if (!closureResult)
return e.stop();
e.next();
});
},
/**
* This functions handles REPORT requests specific to CardDAV
*
* @param {String} reportName
* @param DOMNode dom
* @return bool
*/
report: function(e, reportName, dom) {
switch(reportName) {
case "{" + this.NS_CARDDAV + "}addressbook-multiget" :
this.addressbookMultiGetReport(e, dom);
break;
case "{" + this.NS_CARDDAV + "}addressbook-query" :
this.addressBookQueryReport(e, dom);
break;
default :
return e.next();
}
},
/**
* This function handles the addressbook-multiget REPORT.
*
* This report is used by the client to fetch the content of a series
* of urls. Effectively avoiding a lot of redundant requests.
*
* @param DOMNode dom
* @return void
*/
addressbookMultiGetReport: function(e, dom) {
var properties = Object.keys(Xml.parseProperties(dom));
var hrefElems = dom.getElementsByTagNameNS("urn:DAV", "href");
var propertyList = {};
var self = this;
Async.list(hrefElems)
.each(function(elem, next) {
var uri = self.handler.calculateUri(elem.firstChild.nodeValue);
//propertyList[uri]
self.handler.getPropertiesForPath(uri, properties, 0, function(err, props) {
if (err)
return next(err);
Util.extend(propertyList, props);
next();
});
})
.end(function(err) {
if (err)
return e.next(err);
var prefer = self.handler.getHTTPPrefer();
e.stop();
self.handler.httpResponse.writeHead(207, {
"content-type": "application/xml; charset=utf-8",
"vary": "Brief,Prefer"
});
self.handler.httpResponse.end(self.handler.generateMultiStatus(propertyList, prefer["return-minimal"]));
});
},
/**
* This method is triggered before a file gets updated with new content.
*
* This plugin uses this method to ensure that Card nodes receive valid
* vcard data.
*
* @param {String} path
* @param jsDAV_iFile node
* @param resource data
* @return void
*/
beforeWriteContent: function(e, path, node) {
if (!node.hasFeature(jsCardDAV_iCard))
return e.next();
var self = this;
this.handler.getRequestBody("utf8", null, false, function(err, data) {
if (err)
return e.next(err);
try {
self.validateVCard(data);
}
catch (ex) {
return e.next(ex);
}
e.next();
});
},
/**
* This method is triggered before a new file is created.
*
* This plugin uses this method to ensure that Card nodes receive valid
* vcard data.
*
* @param {String} path
* @param resource data
* @param jsDAV_iCollection parentNode
* @return void
*/
beforeCreateFile: function(e, path, data, enc, parentNode) {
if (!parentNode.hasFeature(jsCardDAV_iAddressBook))
return e.next();
try {
this.validateVCard(data);
}
catch (ex) {
return e.next(ex);
}
e.next();
},
/**
* Checks if the submitted iCalendar data is in fact, valid.
*
* An exception is thrown if it's not.
*
* @param resource|string data
* @return void
*/
validateVCard: function(data) {
// If it's a stream, we convert it to a string first.
if (Buffer.isBuffer(data))
data = data.toString("utf8");
var vobj;
try {
vobj = jsVObject_Reader.read(data);
}
catch (ex) {
throw new Exc.UnsupportedMediaType("This resource only supports valid vcard data. Parse error: " + ex.message);
}
if (vobj.name != "VCARD")
throw new Exc.UnsupportedMediaType("This collection can only support vcard objects.");
if (!vobj.UID)
throw new Exc.BadRequest("Every vcard must have a UID.");
},
/**
* This function handles the addressbook-query REPORT
*
* This report is used by the client to filter an addressbook based on a
* complex query.
*
* @param DOMNode dom
* @return void
*/
addressbookQueryReport: function(e, dom) {
var query = jsCardDAV_AddressBookQueryParser.new(dom);
try {
query.parse();
}
catch(ex) {
return e.next(ex);
}
var depth = this.handler.getHTTPDepth(0);
if (depth === 0) {
this.handler.getNodeForPath(this.handler.getRequestUri(), function(err, node) {
if (err)
return e.next(err);
afterCandidates([node]);
})
}
else {
this.handler.server.tree.getChildren(this.handler.getRequestUri(), function(err, children) {
if (err)
return e.next(err);
afterCandidates(children);
});
}
var self = this;
function afterCandidates(candidateNodes) {
var validNodes = [];
Async.list(candidateNodes)
.each(function(node, next) {
if (!node.hasFeature(jsCardDAV_iCard))
return next();
node.get(function(err, blob) {
if (err)
return next(err);
if (!self.validateFilters(blob.toString("utf8"), query.filters, query.test))
return next();
validNodes.push(node);
if (query.limit && query.limit <= validNodes.length) {
// We hit the maximum number of items, we can stop now.
return next(Async.STOP);
}
next();
});
})
.end(function(err) {
if (err)
return e.next(err);
var result = {};
Async.list(validNodes)
.each(function(validNode, next) {
var href = self.handler.getRequestUri();
if (depth !== 0)
href = href + "/" + validNode.getName();
self.handler.getPropertiesForPath(href, query.requestedProperties, 0, function(err, props) {
if (err)
return next(err);
Util.extend(result, props);
next();
});
})
.end(function(err) {
if (err)
return e.next(err);
e.stop();
var prefer = self.handler.getHTTPPRefer();
self.handler.httpResponse.writeHead(207, {
"content-type": "application/xml; charset=utf-8",
"vary": "Brief,Prefer"
});
self.handler.httpResponse.end(self.handler.generateMultiStatus(result, prefer["return-minimal"]));
});
});
}
},
/**
* Validates if a vcard makes it throught a list of filters.
*
* @param {String} vcardData
* @param {Array} filters
* @param {String} test anyof or allof (which means OR or AND)
* @return bool
*/
validateFilters: function(vcardData, filters, test) {
var vcard;
try {
vcard = jsVObject_Reader.read(vcardData);
}
catch (ex) {
return false;
}
if (!filters)
return true;
var filter, isDefined, success, vProperties, results, texts;
for (var i = 0, l = filters.length; i < l; ++i) {
filter = filters[i];
isDefined = vcard.get(filter.name);
if (filter["is-not-defined"]) {
if (isDefined)
success = false;
else
success = true;
}
else if ((!filter["param-filters"] && !filter["text-matches"]) || !isDefined) {
// We only need to check for existence
success = isDefined;
}
else {
vProperties = vcard.select(filter.name);
results = [];
if (filter["param-filters"])
results.push(this.validateParamFilters(vProperties, filter["param-filters"], filter.test));
if (filter["text-matches"]) {
texts = vProperties.map(function(vProperty) {
return vProperty.value;
});
results.push(this.validateTextMatches(texts, filter["text-matches"], filter.test));
}
if (results.length === 1) {
success = results[0];
}
else {
if (filter.test == "anyof")
success = results[0] || results[1];
else
success = results[0] && results[1];
}
} // else
// There are two conditions where we can already determine whether
// or not this filter succeeds.
if (test == "anyof" && success)
return true;
if (test == "allof" && !success)
return false;
} // foreach
// If we got all the way here, it means we haven't been able to
// determine early if the test failed or not.
//
// This implies for 'anyof' that the test failed, and for 'allof' that
// we succeeded. Sounds weird, but makes sense.
return test === "allof";
},
/**
* Validates if a param-filter can be applied to a specific property.
*
* @todo currently we're only validating the first parameter of the passed
* property. Any subsequence parameters with the same name are
* ignored.
* @param {Array} vProperties
* @param {Array} filters
* @param {String} test
* @return bool
*/
validateParamFilters: function(vProperties, filters, test) {
var filter, isDefined, success, j, l2, vProperty;
for (var i = 0, l = filters.length; i < l; ++i) {
filter = filters[i];
isDefined = false;
for (j = 0, l2 = vProperties.length; j < l2; ++j) {
vProperty = vProperties[j];
isDefined = !!vProperty.get(filter.name);
if (isDefined)
break;
}
if (filter["is-not-defined"]) {
success = !isDefined;
// If there's no text-match, we can just check for existence
}
else if (!filter["text-match"] || !isDefined) {
success = isDefined;
}
else {
success = false;
for (j = 0, l2 = vProperties.length; j < l2; ++j) {
vProperty = vProperties[j];
// If we got all the way here, we'll need to validate the
// text-match filter.
success = Util.textMatch(vProperty.get(filter.name).value, filter["text-match"].value, filter["text-match"]["match-type"]);
if (success)
break;
}
if (filter["text-match"]["negate-condition"])
success = !success;
} // else
// There are two conditions where we can already determine whether
// or not this filter succeeds.
if (test == "anyof" && success)
return true;
if (test == "allof" && !success)
return false;
}
// If we got all the way here, it means we haven't been able to
// determine early if the test failed or not.
//
// This implies for 'anyof' that the test failed, and for 'allof' that
// we succeeded. Sounds weird, but makes sense.
return test == "allof";
},
/**
* Validates if a text-filter can be applied to a specific property.
*
* @param {Array} texts
* @param {Array} filters
* @param {String} test
* @return bool
*/
validateTextMatches: function(texts, filters, test) {
var success, filter, j, l2, haystack;
for (var i = 0, l = filters.length; i < l; ++i) {
filter = filters[i];
success = false;
for (j = 0, l2 = texts.length; j < l2; ++j) {
haystack = texts[j];
success = Util.textMatch(haystack, filter.value, filter["match-type"]);
// Breaking on the first match
if (success)
break;
}
if (filter["negate-condition"])
success = !success;
if (success && test == "anyof")
return true;
if (!success && test == "allof")
return false;
}
// If we got all the way here, it means we haven't been able to
// determine early if the test failed or not.
//
// This implies for 'anyof' that the test failed, and for 'allof' that
// we succeeded. Sounds weird, but makes sense.
return test == "allof";
},
/**
* This event is triggered after webdav-properties have been retrieved.
*
* @return bool
*/
afterGetProperties: function(e, uri, properties) {
// If the request was made using the SOGO connector, we must rewrite
// the content-type property. By default jsDAV will send back
// text/x-vcard; charset=utf-8, but for SOGO we must strip that last
// part.
if (!properties["200"]["{DAV:}getcontenttype"])
return e.next();
if (this.handler.httpRequest.headers["user-agent"].indexOf("Thunderbird") === -1)
return e.next();
if (properties["200"]["{DAV:}getcontenttype"].indexOf("text/x-vcard") === 0)
properties["200"]["{DAV:}getcontenttype"] = "text/x-vcard";
e.next();
},
/**
* This method is used to generate HTML output for the
* Sabre\DAV\Browser\Plugin. This allows us to generate an interface users
* can use to create new calendars.
*
* @param DAV\INode node
* @param {String} output
* @return bool
*/
htmlActionsPanel: function(e, node, output) {
if (!node.hasFeature(jsCardDAV_UserAddressBooks))
return e.next();
output.html = '<tr><td colspan="2"><form method="post" action="">' +
'<h3>Create new address book</h3>' +
'<input type="hidden" name="jsdavAction" value="mkaddressbook" />' +
'<label>Name (uri):</label> <input type="text" name="name" /><br />' +
'<label>Display name:</label> <input type="text" name="{DAV:}displayname" /><br />' +
'<input type="submit" value="create" />' +
'</form>' +
'</td></tr>';
e.stop();
},
/**
* This method allows us to intercept the 'mkcalendar' sabreAction. This
* action enables the user to create new calendars from the browser plugin.
*
* @param {String} uri
* @param {String} action
* @param {Array} postVars
* @return bool
*/
browserPostAction: function(e, uri, action, postVars) {
if (action != "mkaddressbook")
return e.next();
var resourceType = ["{DAV:}collection", "{urn:ietf:params:xml:ns:carddav}addressbook"];
var properties = {};
if (postVars["{DAV:}displayname"])
properties["{DAV:}displayname"] = postVars["{DAV:}displayname"];
this.handler.createCollection(uri + "/" + postVars.name, resourceType, properties, function(err) {
if (err)
return e.next(err);
e.stop();
});
}
});
| pascience/cloxp-install | win/life_star/node_modules/lively-davfs/node_modules/jsDAV/lib/CardDAV/plugin.js | JavaScript | mit | 26,510 |
/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {Rule, SchematicsException, Tree, UpdateRecorder} from '@angular-devkit/schematics';
import {relative} from 'path';
import {getProjectTsConfigPaths} from '../../utils/project_tsconfig_paths';
import {canMigrateFile, createMigrationProgram} from '../../utils/typescript/compiler_host';
import {migrateFile} from './util';
export default function(): Rule {
return async (tree: Tree) => {
const {buildPaths, testPaths} = await getProjectTsConfigPaths(tree);
const basePath = process.cwd();
const allPaths = [...buildPaths, ...testPaths];
if (!allPaths.length) {
throw new SchematicsException(
'Could not find any tsconfig file. Cannot migrate to Typed Forms.');
}
for (const tsconfigPath of allPaths) {
runTypedFormsMigration(tree, tsconfigPath, basePath);
}
};
}
function runTypedFormsMigration(tree: Tree, tsconfigPath: string, basePath: string) {
const {program} = createMigrationProgram(tree, tsconfigPath, basePath);
const typeChecker = program.getTypeChecker();
const sourceFiles =
program.getSourceFiles().filter(sourceFile => canMigrateFile(basePath, sourceFile, program));
for (const sourceFile of sourceFiles) {
let update: UpdateRecorder|null = null;
const rewriter = (startPos: number, origLength: number, text: string) => {
if (update === null) {
// Lazily initialize update, because most files will not require migration.
update = tree.beginUpdate(relative(basePath, sourceFile.fileName));
}
update.remove(startPos, origLength);
update.insertLeft(startPos, text);
};
migrateFile(sourceFile, typeChecker, rewriter);
if (update !== null) {
tree.commitUpdate(update);
}
}
}
| gkalpak/angular | packages/core/schematics/migrations/typed-forms/index.ts | TypeScript | mit | 1,942 |
'@fixture click';
'@page http://example.com';
'@test'['Take a screenshot'] = {
'1.Click on non-existing element': function () {
act.screenshot();
},
};
'@test'['Screenshot on test code error'] = {
'1.Click on non-existing element': function () {
throw new Error('STOP');
},
};
| VasilyStrelyaev/testcafe | test/functional/legacy-fixtures/screenshots/testcafe-fixtures/screenshots.test.js | JavaScript | mit | 312 |
๏ปฟ/////////////////////////////////
// Rich Newman
// http://richnewman.wordpress.com/about/code-listings-and-diagrams/hslcolor-class/
//
using System;
using System.Drawing;
namespace AldursLab.Essentials.Extensions.DotNet.Drawing
{
/// <summary>
/// Color with Hue/Saturation/Luminescense representation.
/// </summary>
public class HslColor
{
// Private data members below are on scale 0-1
// They are scaled for use externally based on scale
private double hue = 1.0;
private double saturation = 1.0;
private double luminosity = 1.0;
private const double scale = 240.0;
public double Hue
{
get { return hue * scale; }
set { hue = CheckRange(value / scale); }
}
public double Saturation
{
get { return saturation * scale; }
set { saturation = CheckRange(value / scale); }
}
public double Luminosity
{
get { return luminosity * scale; }
set { luminosity = CheckRange(value / scale); }
}
private double CheckRange(double value)
{
if (value < 0.0)
value = 0.0;
else if (value > 1.0)
value = 1.0;
return value;
}
public override string ToString()
{
return String.Format("H: {0:#0.##} S: {1:#0.##} L: {2:#0.##}", Hue, Saturation, Luminosity);
}
public string ToRGBString()
{
Color color = (Color)this;
return String.Format("R: {0:#0.##} G: {1:#0.##} B: {2:#0.##}", color.R, color.G, color.B);
}
#region Casts to/from System.Drawing.Color
public static implicit operator Color(HslColor hslColor)
{
double r = 0, g = 0, b = 0;
if (hslColor.luminosity != 0)
{
if (hslColor.saturation == 0)
r = g = b = hslColor.luminosity;
else
{
double temp2 = GetTemp2(hslColor);
double temp1 = 2.0 * hslColor.luminosity - temp2;
r = GetColorComponent(temp1, temp2, hslColor.hue + 1.0 / 3.0);
g = GetColorComponent(temp1, temp2, hslColor.hue);
b = GetColorComponent(temp1, temp2, hslColor.hue - 1.0 / 3.0);
}
}
return Color.FromArgb((int)(255 * r), (int)(255 * g), (int)(255 * b));
}
private static double GetColorComponent(double temp1, double temp2, double temp3)
{
temp3 = MoveIntoRange(temp3);
if (temp3 < 1.0 / 6.0)
return temp1 + (temp2 - temp1) * 6.0 * temp3;
else if (temp3 < 0.5)
return temp2;
else if (temp3 < 2.0 / 3.0)
return temp1 + ((temp2 - temp1) * ((2.0 / 3.0) - temp3) * 6.0);
else
return temp1;
}
private static double MoveIntoRange(double temp3)
{
if (temp3 < 0.0)
temp3 += 1.0;
else if (temp3 > 1.0)
temp3 -= 1.0;
return temp3;
}
private static double GetTemp2(HslColor hslColor)
{
double temp2;
if (hslColor.luminosity < 0.5) //<=??
temp2 = hslColor.luminosity * (1.0 + hslColor.saturation);
else
temp2 = hslColor.luminosity + hslColor.saturation - (hslColor.luminosity * hslColor.saturation);
return temp2;
}
public static implicit operator HslColor(Color color)
{
HslColor hslColor = new HslColor();
hslColor.hue = color.GetHue() / 360.0; // we store hue as 0-1 as opposed to 0-360
hslColor.luminosity = color.GetBrightness();
hslColor.saturation = color.GetSaturation();
return hslColor;
}
#endregion
public void SetRGB(int red, int green, int blue)
{
HslColor hslColor = (HslColor)Color.FromArgb(red, green, blue);
this.hue = hslColor.hue;
this.saturation = hslColor.saturation;
this.luminosity = hslColor.luminosity;
}
public HslColor() { }
public HslColor(Color color)
{
SetRGB(color.R, color.G, color.B);
}
public HslColor(int red, int green, int blue)
{
SetRGB(red, green, blue);
}
public HslColor(double hue, double saturation, double luminosity)
{
this.Hue = hue;
this.Saturation = saturation;
this.Luminosity = luminosity;
}
}
}
| mdsolver/WurmAssistant3 | src/Common/Essentials/Extensions/DotNet/Drawing/HslColor.cs | C# | mit | 4,774 |
# RegionalSettings.Properties WorkDaysSpecified
**Namespace:** [OfficeDevPnP.Core.Framework.Provisioning.Providers.Xml.V201508](OfficeDevPnP.Core.Framework.Provisioning.Providers.Xml.V201508.md)
**Assembly:** OfficeDevPnP.Core.dll
## Syntax
```C#
public bool WorkDaysSpecified { get; set; }
```
### Property Value
Type: System.Boolean
## See also
- [RegionalSettings](OfficeDevPnP.Core.Framework.Provisioning.Providers.Xml.V201508.RegionalSettings.md)
- [OfficeDevPnP.Core.Framework.Provisioning.Providers.Xml.V201508](OfficeDevPnP.Core.Framework.Provisioning.Providers.Xml.V201508.md)
| PaoloPia/PnP-Guidance | sitescore/OfficeDevPnP.Core.Framework.Provisioning.Providers.Xml.V201508.RegionalSettings.WorkDaysSpecified.md | Markdown | mit | 600 |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// <auto-generated/>
#nullable disable
using System.Collections.Generic;
using System.Text.Json;
using Azure.Core;
namespace Azure.ResourceManager.Sql.Models
{
public partial class TransparentDataEncryptionActivityListResult
{
internal static TransparentDataEncryptionActivityListResult DeserializeTransparentDataEncryptionActivityListResult(JsonElement element)
{
IReadOnlyList<TransparentDataEncryptionActivity> value = default;
foreach (var property in element.EnumerateObject())
{
if (property.NameEquals("value"))
{
List<TransparentDataEncryptionActivity> array = new List<TransparentDataEncryptionActivity>();
foreach (var item in property.Value.EnumerateArray())
{
array.Add(TransparentDataEncryptionActivity.DeserializeTransparentDataEncryptionActivity(item));
}
value = array;
continue;
}
}
return new TransparentDataEncryptionActivityListResult(value);
}
}
}
| brjohnstmsft/azure-sdk-for-net | sdk/sqlmanagement/Azure.ResourceManager.Sql/src/Generated/Models/TransparentDataEncryptionActivityListResult.Serialization.cs | C# | mit | 1,257 |
using System;
using Marten.Services;
using Marten.Testing.Documents;
using Marten.Testing.Harness;
using Xunit;
namespace Marten.Testing.CoreFunctionality
{
public class foreign_key_persisting_Tests: IntegrationContext
{
[Fact]
public void persist_and_overwrite_foreign_key()
{
StoreOptions(_ =>
{
_.Schema.For<Issue>().ForeignKey<User>(x => x.AssigneeId);
});
var issue = new Issue();
var user = new User();
using (var session = theStore.OpenSession())
{
session.Store(user);
session.Store(issue);
session.SaveChanges();
}
issue.AssigneeId = user.Id;
using (var session = theStore.OpenSession())
{
session.Store(issue);
session.SaveChanges();
}
issue.AssigneeId = null;
using (var session = theStore.OpenSession())
{
session.Store(issue);
session.SaveChanges();
}
}
[Fact]
public void throws_exception_if_trying_to_delete_referenced_user()
{
StoreOptions(_ =>
{
_.Schema.For<Issue>()
.ForeignKey<User>(x => x.AssigneeId);
});
var issue = new Issue();
var user = new User();
issue.AssigneeId = user.Id;
using (var session = theStore.OpenSession())
{
session.Store(user);
session.Store(issue);
session.SaveChanges();
}
Exception<Marten.Exceptions.MartenCommandException>.ShouldBeThrownBy(() =>
{
using (var session = theStore.OpenSession())
{
session.Delete(user);
session.SaveChanges();
}
});
}
[Fact]
public void persist_without_referenced_user()
{
StoreOptions(_ =>
{
_.Schema.For<Issue>()
.ForeignKey<User>(x => x.AssigneeId);
});
using (var session = theStore.OpenSession())
{
session.Store(new Issue());
session.SaveChanges();
}
}
[Fact]
public void order_inserts()
{
StoreOptions(_ =>
{
_.Schema.For<Issue>()
.ForeignKey<User>(x => x.AssigneeId);
});
var issue = new Issue();
var user = new User();
issue.AssigneeId = user.Id;
using (var session = theStore.OpenSession())
{
session.Store(issue);
session.Store(user);
session.SaveChanges();
}
}
[Fact]
public void throws_exception_on_cyclic_dependency()
{
Exception<InvalidOperationException>.ShouldBeThrownBy(() =>
{
StoreOptions(_ =>
{
_.Schema.For<Node1>().ForeignKey<Node3>(x => x.Link);
_.Schema.For<Node2>().ForeignKey<Node1>(x => x.Link);
_.Schema.For<Node3>().ForeignKey<Node2>(x => x.Link);
});
}).Message.ShouldContain("Cyclic");
}
public class Node1
{
public Guid Id { get; set; }
public Guid Link { get; set; }
}
public class Node2
{
public Guid Id { get; set; }
public Guid Link { get; set; }
}
public class Node3
{
public Guid Id { get; set; }
public Guid Link { get; set; }
}
public foreign_key_persisting_Tests(DefaultStoreFixture fixture) : base(fixture)
{
DocumentTracking = DocumentTracking.IdentityOnly;
}
}
}
| ericgreenmix/marten | src/Marten.Testing/CoreFunctionality/foreign_key_persisting_Tests.cs | C# | mit | 4,074 |
/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {global} from '../../src/util/global';
// Not yet available in TypeScript: https://github.com/Microsoft/TypeScript/pull/29332
declare var globalThis: any /** TODO #9100 */;
{
describe('global', () => {
it('should be global this value', () => {
const _global = new Function('return this')();
expect(global).toBe(_global);
});
if (typeof globalThis !== 'undefined') {
it('should use globalThis as global reference', () => {
expect(global).toBe(globalThis);
});
}
});
}
| wKoza/angular | packages/core/test/util/global_spec.ts | TypeScript | mit | 732 |
๏ปฟusing System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// ๆๅ
ณ็จๅบ้็ๅธธ่งไฟกๆฏ้่ฟไธๅๅฑๆง้
// ๆงๅถใๆดๆน่ฟไบๅฑๆงๅผๅฏไฟฎๆน
// ไธ็จๅบ้ๅ
ณ่็ไฟกๆฏใ
[assembly: AssemblyTitle("Dos.Tools")]
[assembly: AssemblyDescription("ITdos.com")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("ITdos.com")]
[assembly: AssemblyProduct("Dos.Tools")]
[assembly: AssemblyCopyright("Copyright ยฉ 2009-2016")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// ๅฐ ComVisible ่ฎพ็ฝฎไธบ false ไฝฟๆญค็จๅบ้ไธญ็็ฑปๅ
// ๅฏน COM ็ปไปถไธๅฏ่งใๅฆๆ้่ฆไป COM ่ฎฟ้ฎๆญค็จๅบ้ไธญ็็ฑปๅ๏ผ
// ๅๅฐ่ฏฅ็ฑปๅไธ็ ComVisible ๅฑๆง่ฎพ็ฝฎไธบ trueใ
[assembly: ComVisible(false)]
// ๅฆๆๆญค้กน็ฎๅ COM ๅ
ฌๅผ๏ผๅไธๅ GUID ็จไบ็ฑปๅๅบ็ ID
[assembly: Guid("014d6eaa-6f9d-4b8a-a4dc-9b992cd94cfa")]
// ็จๅบ้็็ๆฌไฟกๆฏ็ฑไธ้ขๅไธชๅผ็ปๆ:
//
// ไธป็ๆฌ
// ๆฌก็ๆฌ
// ๅ
้จ็ๆฌๅท
// ไฟฎ่ฎขๅท
//
// ๅฏไปฅๆๅฎๆๆ่ฟไบๅผ๏ผไนๅฏไปฅไฝฟ็จโๅ
้จ็ๆฌๅทโๅโไฟฎ่ฎขๅทโ็้ป่ฎคๅผ๏ผ
// ๆนๆณๆฏๆๅฆไธๆ็คบไฝฟ็จโ*โ:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("2.0.6.0")]
[assembly: AssemblyFileVersion("2.0.6.0")]
| itdos/Dos.Tool | Properties/AssemblyInfo.cs | C# | mit | 1,337 |
#include "Halide.h"
#include <tiramisu/utils.h>
#include <cstdlib>
#include <iostream>
#include "wrapper_test_71.h"
#ifdef __cplusplus
extern "C" {
#endif
#ifdef __cplusplus
} // extern "C"
#endif
// We assume that the increment is 1.
void reference_saxpy(int N1, float alpha, float *A, float *B)
{
for (int i=0; i<N1; i++)
B[i] = alpha*A[i] + B[i];
}
int main(int, char **)
{
Halide::Buffer<float> a(1, "a");
Halide::Buffer<float> x(SIZE, "x");
Halide::Buffer<float> y_ref(SIZE, "y_ref");
Halide::Buffer<float> y(SIZE, "y");
init_buffer(x, (float)1);
init_buffer(y, (float)1);
init_buffer(y_ref, (float)1);
init_buffer(a, (float)1);
reference_saxpy(SIZE, 1, x.data(), y_ref.data());
tiramisu_generated_code(a.raw_buffer(), x.raw_buffer(), y.raw_buffer());
compare_buffers("test_" + std::string(TEST_NUMBER_STR) + "_" + std::string(TEST_NAME_STR), y, y_ref);
return 0;
}
| rbaghdadi/tiramisu | tests/wrapper_test_71.cpp | C++ | mit | 933 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.