code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9
values | license stringclasses 15
values | size int32 3 1.05M |
|---|---|---|---|---|---|
package http1
import (
"errors"
"fmt"
"io"
"strconv"
"github.com/JOTPOT-UK/JOTPOT-Server/jps/pipe"
"github.com/JOTPOT-UK/JOTPOT-Server/util"
"github.com/JOTPOT-UK/JOTPOT-Server/http"
"github.com/JOTPOT-UK/JOTPOT-Server/jps"
"github.com/JOTPOT-UK/JOTPOT-Server/http/header"
)
var ErrBodyLengthExceded = errors.New("body length exceded")
var ErrNoBody = errors.New("no body")
type writerWrapper struct {
WriteFlusher util.WriteFlusher
Closer func() error
HeaderCB func() []byte
HeadWritten bool
}
func (ww *writerWrapper) writeHeader() error {
head := ww.HeaderCB()
n, err := ww.WriteFlusher.Write(head)
if err != nil {
return fmt.Errorf("failed to write header: %w", err)
}
if n != len(head) {
return fmt.Errorf("failed to write header: %w", io.ErrShortWrite)
}
ww.HeadWritten = true
return nil
}
func (ww *writerWrapper) Write(src []byte) (int, error) {
if !ww.HeadWritten {
head := ww.HeaderCB()
n, err := ww.WriteFlusher.Write(append(head, src...))
n -= len(head)
if n < 0 {
if err == nil {
err = fmt.Errorf("failed to write header: %w", io.ErrShortWrite)
}
} else {
ww.HeadWritten = true
}
return n, err
}
return ww.WriteFlusher.Write(src)
}
func (ww *writerWrapper) Flush() error {
if !ww.HeadWritten {
err := ww.writeHeader()
if err != nil {
ww.WriteFlusher.Flush()
return err
}
}
return ww.WriteFlusher.Flush()
}
func (ww *writerWrapper) Close() error {
if !ww.HeadWritten {
err := ww.writeHeader()
if err != nil {
ww.Closer()
return err
}
}
return ww.Closer()
}
type nilBody struct {
util.CloseFlusher
}
func (_ nilBody) Write(_ []byte) (int, error) {
return 0, ErrNoBody
}
type LimitWriteFlushCloser struct {
i, l int64
w jps.WriteFlushCloser
}
func (w LimitWriteFlushCloser) Write(buf []byte) (int, error) {
w.i += int64(len(buf))
if w.i > w.l {
overflow := len(buf) - int(w.i-w.l)
w.i = w.l
toWrite := len(buf) - overflow
if toWrite == 0 {
return 0, ErrBodyLengthExceded
}
n, err := w.w.Write(buf[:toWrite])
if n == toWrite || err == nil {
err = ErrBodyLengthExceded
}
return n, err
}
return w.w.Write(buf)
}
func (w LimitWriteFlushCloser) Close() error {
return w.w.Close()
}
func (w LimitWriteFlushCloser) Flush() error {
return w.w.Flush()
}
type CloseCBWriteFlushCloser struct {
closed bool
close func() (error, bool)
passon jps.WriteFlushCloser
}
func (w *CloseCBWriteFlushCloser) Write(src []byte) (int, error) {
if w.closed {
return 0, io.ErrClosedPipe
}
return w.passon.Write(src)
}
func (w *CloseCBWriteFlushCloser) Flush() error {
if w.closed {
return io.ErrClosedPipe
}
return w.passon.Flush()
}
func (w *CloseCBWriteFlushCloser) Close() (err error) {
if w.closed {
return io.ErrClosedPipe
}
//TODO: Fix properly!!!
/*if err = w.passon.Close(); err != nil {
return
}*/
err, w.closed = w.close()
return
}
type BodyWriter struct {
ses jps.Session
config *http.Config
header *header.Header
req *http.Request
hasBody func() bool
finalWriter writerWrapper
writer jps.WriteFlushCloser
}
func NewBodyWriter(
ses jps.Session,
config *http.Config,
header *header.Header,
req *http.Request,
hasBody func() bool,
rawWriter jps.WriteFlushCloser,
headerGenerator func() []byte,
close func() error,
) BodyWriter {
return BodyWriter{
ses: ses,
config: config,
header: header,
req: req,
hasBody: hasBody,
finalWriter: writerWrapper{
WriteFlusher: rawWriter,
Closer: close,
HeaderCB: headerGenerator,
},
}
}
func (w *BodyWriter) Session() jps.Session {
return w.ses
}
func (w *BodyWriter) BodyLength() (int64, error) {
if w.req != nil && w.req.MethodStr == "HEAD" {
return 0, nil
}
lens := w.header.GetValues("Content-Length")
if len(lens) == 0 {
return -1, nil
} else if len(lens) == 1 {
l, err := strconv.ParseUint(lens[0], 10, 63)
if err != nil {
return -2, http.MakeErrMalformedContentLength(err)
}
return int64(l), nil
} else {
return -2, http.ErrTooManyContentLength
}
}
func (w *BodyWriter) SetBodyLength(length int64) error {
if length >= 0 {
w.header.Set("Content-Length", strconv.FormatUint(uint64(length), 10))
codes := w.header.GetValues("Transfer-Encoding")
last := len(codes) - 1
if last > -1 && codes[last] == "chunked" {
w.header.SetValues("Transfer-Encoding", codes[:last])
}
} else if length == -1 {
w.header.Del("Content-Length")
codes := w.header.GetValues("Transfer-Encoding")
last := len(codes) - 1
if last < 0 || codes[last] != "chunked" {
w.header.Add("Transfer-Encodings", "chunked")
}
} else {
panic("body length cannot be less than -1")
}
return nil
}
func (w *BodyWriter) Body() (jps.WriteFlushCloser, error) {
if w.writer == nil {
if w.hasBody() {
//TIMER:start := time.Now()
l, err := w.BodyLength()
if err != nil {
return nil, err
}
//TIMER:jps.HBLengthTimes = append(jps.HBLengthTimes, time.Since(start))
//TIMER:start = time.Now()
if l == -1 {
codes := w.header.GetValuesRawKey("Transfer-Encoding")
lm1 := len(codes) - 1
for i := 0; i < lm1; i++ {
if codes[i] == "chunked" {
return nil, http.ErrMustChunkLast
}
}
//TODO: Close connection if chunked is not last
pipes, ok := w.config.TransferEncodings.GetWriterPipeGenerators(codes)
if !ok {
return nil, http.ErrUnsupportedTransferEncoding
}
if w.writer, err = pipe.To(&w.finalWriter, pipes); err != nil {
return w.writer, err
}
} else {
//TIMER:jps.HBHeadWriteTimes = append(jps.HBHeadWriteTimes, time.Since(start))
w.writer = LimitWriteFlushCloser{
l: l,
w: &w.finalWriter,
}
}
} else {
w.writer = nilBody{&w.finalWriter}
}
}
return w.writer, nil
}
| JOTPOT-UK/JOTPOT-Server | http/http1/bodyWriter.go | GO | mit | 5,805 |
package prob547.friend.circles;
/**
* Created by yanya04 on 2/20/2018.
*/
public class Solution {
private static class UFSet {
private int[] parents;
UFSet(int m){
this.parents = new int[m];
for(int i = 0; i < m; i ++){
parents[i] = i;
}
}
private int find(int p){
while(parents[p] != p){
parents[p] = parents[parents[p]];
p = parents[p];
}
return p;
}
boolean union(int i, int j){
int p1 = find(i);
int p2 = find(j);
if(p1 == p2) return false;
if(p1 < p2){
parents[p2] = p1;
} else {
parents[p1] = p2;
}
return true;
}
}
public int findCircleNum(int[][] M) {
if(M == null || M.length == 0) return 0;
int n = M.length;
int count = n;
UFSet set = new UFSet(n);
for(int i = 0; i < n; i ++){
for(int j = i + 1; j < n; j ++){
if(M[i][j] == 1){
if(set.union(i, j)){
count --;
}
}
}
}
return count;
}
} | yangyangv2/leet-code | problems/src/main/java/prob547/friend/circles/Solution.java | Java | mit | 1,293 |
package basic;
/**
* Created by bonismo
* 14/10/17 上午9:43
*/
public class SubModifierDemo extends ModifierDemo {
public SubModifierDemo() {
}
public static void main(String[] args) {
SubModifierDemo subModifierDemo = new SubModifierDemo();
System.out.println(subModifierDemo.i);
System.out.println(subModifierDemo.i1);
// private 子类继承了父类,依然不能调用父类的私有域、方法
System.out.println(subModifierDemo.i3);
System.out.println(subModifierDemo.name);
System.out.println(subModifierDemo.name1);
System.out.println(subModifierDemo.name3);
}
}
| StayHungryStayFoolish/stayhungrystayfoolish.github.com | JavaSE/src/main/java/basic/SubModifierDemo.java | Java | mit | 662 |
# -*- coding: utf-8 -*-
import wx
import win32clipboard
import win32con
import gui
import treeInterceptorHandler
import textInfos
import globalVars
def getSelectedText():
obj = globalVars.focusObject
if isinstance(obj.treeInterceptor, treeInterceptorHandler.DocumentTreeInterceptor) and not obj.treeInterceptor.passThrough:
obj = obj.treeInterceptor
try:
info = obj.makeTextInfo(textInfos.POSITION_SELECTION)
except (RuntimeError, NotImplementedError):
info = None
if not info or info.isCollapsed:
return None
return info.text
def getClipboardText():
try:
win32clipboard.OpenClipboard()
except win32clipboard.error:
return None
try:
text = win32clipboard.GetClipboardData(win32con.CF_UNICODETEXT)
except:
text = None
finally:
win32clipboard.CloseClipboard()
return text
def setClipboardText(text):
if not isinstance(text, unicode) or len(text)==0 or text.isspace():
return False
try:
win32clipboard.OpenClipboard()
except win32clipboard.error:
return False
try:
win32clipboard.EmptyClipboard()
win32clipboard.SetClipboardData(win32con.CF_UNICODETEXT, text)
success = True
except:
success = False
win32clipboard.CloseClipboard()
return success
class TextWindow(wx.Frame):
def __init__(self, text, title, readOnly=True):
super(TextWindow, self).__init__(gui.mainFrame, title=title)
sizer = wx.BoxSizer(wx.VERTICAL)
style = wx.TE_MULTILINE | wx.TE_RICH
if readOnly:
style |= wx.TE_READONLY
self.outputCtrl = wx.TextCtrl(self, style=style)
self.outputCtrl.Bind(wx.EVT_KEY_DOWN, self.onOutputKeyDown)
sizer.Add(self.outputCtrl, proportion=1, flag=wx.EXPAND)
self.SetSizer(sizer)
sizer.Fit(self)
self.outputCtrl.SetValue(text)
self.outputCtrl.SetFocus()
self.Raise()
self.Maximize()
self.Show()
def onOutputKeyDown(self, event):
if event.GetKeyCode() == wx.WXK_ESCAPE:
self.Close()
event.Skip()
| kvark128/yandexTranslate | globalPlugins/yandexTranslate/helper.py | Python | mit | 1,890 |
package get
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"fmt"
"io"
"github.com/go-openapi/runtime"
strfmt "github.com/go-openapi/strfmt"
"github.com/codedellemc/gorackhd/models"
)
// GetPollersReader is a Reader for the GetPollers structure.
type GetPollersReader struct {
formats strfmt.Registry
}
// ReadResponse reads a server response into the recieved o.
func (o *GetPollersReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {
switch response.Code() {
case 200:
result := NewGetPollersOK()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return result, nil
default:
result := NewGetPollersDefault(response.Code())
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
}
}
// NewGetPollersOK creates a GetPollersOK with default headers values
func NewGetPollersOK() *GetPollersOK {
return &GetPollersOK{}
}
/*GetPollersOK handles this case with default header values.
list of all pollers
*/
type GetPollersOK struct {
Payload []interface{}
}
func (o *GetPollersOK) Error() string {
return fmt.Sprintf("[GET /pollers][%d] getPollersOK %+v", 200, o.Payload)
}
func (o *GetPollersOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
// response payload
if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
// NewGetPollersDefault creates a GetPollersDefault with default headers values
func NewGetPollersDefault(code int) *GetPollersDefault {
return &GetPollersDefault{
_statusCode: code,
}
}
/*GetPollersDefault handles this case with default header values.
Unexpected error
*/
type GetPollersDefault struct {
_statusCode int
Payload *models.Error
}
// Code gets the status code for the get pollers default response
func (o *GetPollersDefault) Code() int {
return o._statusCode
}
func (o *GetPollersDefault) Error() string {
return fmt.Sprintf("[GET /pollers][%d] GetPollers default %+v", o._statusCode, o.Payload)
}
func (o *GetPollersDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.Error)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
| emccode/gorackhd | client/get/get_pollers_responses.go | GO | mit | 2,569 |
/*
* Copyright (C) 2010-2015 FBReader.ORG Limited <contact@fbreader.org>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
package org.geometerplus.android.fbreader;
import android.content.Intent;
import android.content.ActivityNotFoundException;
import android.net.Uri;
import org.geometerplus.zlibrary.core.resources.ZLResource;
import org.geometerplus.fbreader.fbreader.FBReaderApp;
class OpenWebHelpAction extends FBAndroidAction {
OpenWebHelpAction(FBReader baseActivity, FBReaderApp fbreader) {
super(baseActivity, fbreader);
}
@Override
protected void run(Object ... params) {
final String url = ZLResource.resource("links").getResource("faqPage").getValue();
final Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse(url));
new Thread(new Runnable() {
public void run() {
BaseActivity.runOnUiThread(new Runnable() {
public void run() {
try {
BaseActivity.startActivity(intent);
} catch (ActivityNotFoundException e) {
e.printStackTrace();
}
}
});
}
}).start();
}
}
| zhqhzhqh/FbreaderJ | app/src/main/java/org/geometerplus/android/fbreader/OpenWebHelpAction.java | Java | mit | 1,734 |
namespace DCSExt.Xml.Tests.TestClasses.WithoutAttributes {
public class TestCase10 {
private string Prop1 { get; set; }
private int Prop2 { get; set; }
public char Field1;
public string GetProp1() {
return Prop1;
}
public int GetProp2() {
return Prop2;
}
public static string OrderedXml() {
return "<TestCase10 xmlns:i=\"http://www.w3.org/2001/XMLSchema-instance\" xmlns=\"http://schemas.datacontract.org/2004/07/DCSExt.Xml.Tests.TestClasses.WithoutAttributes\">"
+ "<Prop1>Prop1</Prop1>"
+ "<Prop2>4</Prop2>"
+ "<Field1>99</Field1>" + // 99 = 'c'
"</TestCase10>";
}
public static string UnorderedXml() {
return "<TestCase10 xmlns:i=\"http://www.w3.org/2001/XMLSchema-instance\" xmlns=\"http://schemas.datacontract.org/2004/07/DCSExt.Xml.Tests.TestClasses.WithoutAttributes\">"
+ "<Field1>99</Field1>" // 99 = 'c'
+ "<Prop2>4</Prop2>"
+ "<Prop1>Prop1</Prop1>" +
"</TestCase10>";
}
}
} | StefanOssendorf/DCSExt | test/DCSExt.Xml.Tests/TestClasses/WithoutAttributes/TestCase10.cs | C# | mit | 1,221 |
/**
* CS 240: Introduction to Data Structures
* Professor: Edwin Rodríguez
*
* Programming Assignment #5
*
* Assignment #5 is meant to grant a thorough understanding of the hash table
* data structure, the hash functions used by hash tables, as well as the stack
* and queue data structures. Expected is an implementation of a hash table able
* to utilize different hash functions, an implementation of a 'scrambled' version
* of rotational hashing using a stack and a queue, and a simple program which
* uses a hash table as a registry of student records.
*
* Carlos Marquez
*/
package edu.cpp.cs.cs240.prog_assgnmnt_5;
public class Student implements Comparable<Student> {
private String ID;
private String name;
private String grade;
Student(String ID, String name, String grade) {
this.ID = ID;
this.name = name;
this.grade = grade;
}
public String getID() {
return this.ID;
}
public String getKey() {
return this.ID;
}
public String getName() {
return this.name;
}
public String getGrade() {
return this.grade;
}
@Override
public boolean equals(Object o) {
if (o.getClass() != String.class) { //if o is not a String (key not passed)
Student s = (Student) o;
if ((this.ID.equals(s.getID())) && (this.name.equals(s.getName())) && (this.grade.equals(s.getGrade()))) {
return true;
}
}
if (o.getClass() == String.class) { //a key was passed as argument
if (this.getID().equals(o)) {
return true;
}
}
return false;
}
@Override
public int compareTo(Student s) {
String ID1 = this.ID;
String ID2 = s.ID;
return ID1.compareTo(ID2);
}
} | noveris/HashTableDemo | HashTableDemo/src/edu/cpp/cs/cs240/prog_assgnmnt_5/Student.java | Java | mit | 1,717 |
package controlador;
import java.util.ArrayList;
import java.util.HashMap;
import modelo.Palavras;
import modelo.Texto;
import persistencia.PalavrasDAO;
import persistencia.TextoDAO;
import spark.ModelAndView;
import spark.Request;
import spark.Response;
import spark.TemplateViewRoute;
public class ListaControlador implements TemplateViewRoute {
private PalavrasDAO dao = new PalavrasDAO();
public ModelAndView handle(Request req, Response resp) {
ArrayList<Palavras> sinonimos = dao.findAll();
HashMap mapa = new HashMap();
mapa.put("sinonimos", sinonimos);
return new ModelAndView(mapa, "lista.html");
}
}
| Thaylles/Projeto_Corretor | src/controlador/ListaControlador.java | Java | mit | 630 |
#include <cpp3ds/Config.hpp>
#include <map>
namespace cpp3ds {
namespace priv {
struct ResourceInfo {
ResourceInfo() : data(nullptr), size(0) { }
ResourceInfo(const Uint8 *x, const Uint32 y) : data(x), size(y) { }
const Uint8 *data;
const Uint32 size;
};
// Defined by source file generated by res_compile.py
extern std::map<std::string, ResourceInfo> core_resources;
} // namespace priv
} // namespace cpp3ds | cpp3ds/cpp3ds | include/cpp3ds/Resources.hpp | C++ | mit | 495 |
// Copyright (c) 2009-2010 Satoshi Nakamoto
// Copyright (c) 2009-2012 The Bitcoin developers
// Distributed under the MIT/X11 software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include "irc.h"
#include "db.h"
#include "net.h"
#include "init.h"
#include "addrman.h"
#include "ui_interface.h"
#include "script.h"
#ifdef WIN32
#include <string.h>
#endif
#ifdef USE_UPNP
#include <miniupnpc/miniwget.h>
#include <miniupnpc/miniupnpc.h>
#include <miniupnpc/upnpcommands.h>
#include <miniupnpc/upnperrors.h>
#endif
// Dump addresses to peers.dat every 15 minutes (900s)
#define DUMP_ADDRESSES_INTERVAL 900
using namespace std;
using namespace boost;
static const int MAX_OUTBOUND_CONNECTIONS = 8;
bool OpenNetworkConnection(const CAddress& addrConnect, CSemaphoreGrant *grantOutbound = NULL, const char *strDest = NULL, bool fOneShot = false);
struct LocalServiceInfo {
int nScore;
int nPort;
};
//
// Global state variables
//
bool fDiscover = true;
uint64 nLocalServices = NODE_NETWORK;
static CCriticalSection cs_mapLocalHost;
static map<CNetAddr, LocalServiceInfo> mapLocalHost;
static bool vfReachable[NET_MAX] = {};
static bool vfLimited[NET_MAX] = {};
static CNode* pnodeLocalHost = NULL;
static CNode* pnodeSync = NULL;
uint64 nLocalHostNonce = 0;
static std::vector<SOCKET> vhListenSocket;
CAddrMan addrman;
int nMaxConnections = 125;
vector<CNode*> vNodes;
CCriticalSection cs_vNodes;
map<CInv, CDataStream> mapRelay;
deque<pair<int64, CInv> > vRelayExpiration;
CCriticalSection cs_mapRelay;
limitedmap<CInv, int64> mapAlreadyAskedFor(MAX_INV_SZ);
static deque<string> vOneShots;
CCriticalSection cs_vOneShots;
set<CNetAddr> setservAddNodeAddresses;
CCriticalSection cs_setservAddNodeAddresses;
vector<std::string> vAddedNodes;
CCriticalSection cs_vAddedNodes;
static CSemaphore *semOutbound = NULL;
void AddOneShot(string strDest)
{
LOCK(cs_vOneShots);
vOneShots.push_back(strDest);
}
unsigned short GetListenPort()
{
return (unsigned short)(GetArg("-port", GetDefaultPort()));
}
void CNode::PushGetBlocks(CBlockIndex* pindexBegin, uint256 hashEnd)
{
// Filter out duplicate requests
if (pindexBegin == pindexLastGetBlocksBegin && hashEnd == hashLastGetBlocksEnd)
return;
pindexLastGetBlocksBegin = pindexBegin;
hashLastGetBlocksEnd = hashEnd;
PushMessage("getblocks", CBlockLocator(pindexBegin), hashEnd);
}
// find 'best' local address for a particular peer
bool GetLocal(CService& addr, const CNetAddr *paddrPeer)
{
if (fNoListen)
return false;
int nBestScore = -1;
int nBestReachability = -1;
{
LOCK(cs_mapLocalHost);
for (map<CNetAddr, LocalServiceInfo>::iterator it = mapLocalHost.begin(); it != mapLocalHost.end(); it++)
{
int nScore = (*it).second.nScore;
int nReachability = (*it).first.GetReachabilityFrom(paddrPeer);
if (nReachability > nBestReachability || (nReachability == nBestReachability && nScore > nBestScore))
{
addr = CService((*it).first, (*it).second.nPort);
nBestReachability = nReachability;
nBestScore = nScore;
}
}
}
return nBestScore >= 0;
}
// get best local address for a particular peer as a CAddress
CAddress GetLocalAddress(const CNetAddr *paddrPeer)
{
CAddress ret(CService("0.0.0.0",0),0);
CService addr;
if (GetLocal(addr, paddrPeer))
{
ret = CAddress(addr);
ret.nServices = nLocalServices;
ret.nTime = GetAdjustedTime();
}
return ret;
}
bool RecvLine(SOCKET hSocket, string& strLine)
{
strLine = "";
loop
{
char c;
int nBytes = recv(hSocket, &c, 1, 0);
if (nBytes > 0)
{
if (c == '\n')
continue;
if (c == '\r')
return true;
strLine += c;
if (strLine.size() >= 9000)
return true;
}
else if (nBytes <= 0)
{
boost::this_thread::interruption_point();
if (nBytes < 0)
{
int nErr = WSAGetLastError();
if (nErr == WSAEMSGSIZE)
continue;
if (nErr == WSAEWOULDBLOCK || nErr == WSAEINTR || nErr == WSAEINPROGRESS)
{
MilliSleep(10);
continue;
}
}
if (!strLine.empty())
return true;
if (nBytes == 0)
{
// socket closed
printf("socket closed\n");
return false;
}
else
{
// socket error
int nErr = WSAGetLastError();
printf("recv failed: %d\n", nErr);
return false;
}
}
}
}
// used when scores of local addresses may have changed
// pushes better local address to peers
void static AdvertizeLocal()
{
LOCK(cs_vNodes);
BOOST_FOREACH(CNode* pnode, vNodes)
{
if (pnode->fSuccessfullyConnected)
{
CAddress addrLocal = GetLocalAddress(&pnode->addr);
if (addrLocal.IsRoutable() && (CService)addrLocal != (CService)pnode->addrLocal)
{
pnode->PushAddress(addrLocal);
pnode->addrLocal = addrLocal;
}
}
}
}
void SetReachable(enum Network net, bool fFlag)
{
LOCK(cs_mapLocalHost);
vfReachable[net] = fFlag;
if (net == NET_IPV6 && fFlag)
vfReachable[NET_IPV4] = true;
}
// learn a new local address
bool AddLocal(const CService& addr, int nScore)
{
if (!addr.IsRoutable())
return false;
if (!fDiscover && nScore < LOCAL_MANUAL)
return false;
if (IsLimited(addr))
return false;
printf("AddLocal(%s,%i)\n", addr.ToString().c_str(), nScore);
{
LOCK(cs_mapLocalHost);
bool fAlready = mapLocalHost.count(addr) > 0;
LocalServiceInfo &info = mapLocalHost[addr];
if (!fAlready || nScore >= info.nScore) {
info.nScore = nScore + (fAlready ? 1 : 0);
info.nPort = addr.GetPort();
}
SetReachable(addr.GetNetwork());
}
AdvertizeLocal();
return true;
}
bool AddLocal(const CNetAddr &addr, int nScore)
{
return AddLocal(CService(addr, GetListenPort()), nScore);
}
/** Make a particular network entirely off-limits (no automatic connects to it) */
void SetLimited(enum Network net, bool fLimited)
{
if (net == NET_UNROUTABLE)
return;
LOCK(cs_mapLocalHost);
vfLimited[net] = fLimited;
}
bool IsLimited(enum Network net)
{
LOCK(cs_mapLocalHost);
return vfLimited[net];
}
bool IsLimited(const CNetAddr &addr)
{
return IsLimited(addr.GetNetwork());
}
/** vote for a local address */
bool SeenLocal(const CService& addr)
{
{
LOCK(cs_mapLocalHost);
if (mapLocalHost.count(addr) == 0)
return false;
mapLocalHost[addr].nScore++;
}
AdvertizeLocal();
return true;
}
/** check whether a given address is potentially local */
bool IsLocal(const CService& addr)
{
LOCK(cs_mapLocalHost);
return mapLocalHost.count(addr) > 0;
}
/** check whether a given address is in a network we can probably connect to */
bool IsReachable(const CNetAddr& addr)
{
LOCK(cs_mapLocalHost);
enum Network net = addr.GetNetwork();
return vfReachable[net] && !vfLimited[net];
}
bool GetMyExternalIP2(const CService& addrConnect, const char* pszGet, const char* pszKeyword, CNetAddr& ipRet)
{
SOCKET hSocket;
if (!ConnectSocket(addrConnect, hSocket))
return error("GetMyExternalIP() : connection to %s failed", addrConnect.ToString().c_str());
send(hSocket, pszGet, strlen(pszGet), MSG_NOSIGNAL);
string strLine;
while (RecvLine(hSocket, strLine))
{
if (strLine.empty()) // HTTP response is separated from headers by blank line
{
loop
{
if (!RecvLine(hSocket, strLine))
{
closesocket(hSocket);
return false;
}
if (pszKeyword == NULL)
break;
if (strLine.find(pszKeyword) != string::npos)
{
strLine = strLine.substr(strLine.find(pszKeyword) + strlen(pszKeyword));
break;
}
}
closesocket(hSocket);
if (strLine.find("<") != string::npos)
strLine = strLine.substr(0, strLine.find("<"));
strLine = strLine.substr(strspn(strLine.c_str(), " \t\n\r"));
while (strLine.size() > 0 && isspace(strLine[strLine.size()-1]))
strLine.resize(strLine.size()-1);
CService addr(strLine,0,true);
printf("GetMyExternalIP() received [%s] %s\n", strLine.c_str(), addr.ToString().c_str());
if (!addr.IsValid() || !addr.IsRoutable())
return false;
ipRet.SetIP(addr);
return true;
}
}
closesocket(hSocket);
return error("GetMyExternalIP() : connection closed");
}
bool GetMyExternalIP(CNetAddr& ipRet)
{
CService addrConnect;
const char* pszGet;
const char* pszKeyword;
for (int nLookup = 0; nLookup <= 1; nLookup++)
for (int nHost = 1; nHost <= 2; nHost++)
{
// We should be phasing out our use of sites like these. If we need
// replacements, we should ask for volunteers to put this simple
// php file on their web server that prints the client IP:
// <?php echo $_SERVER["REMOTE_ADDR"]; ?>
if (nHost == 1)
{
addrConnect = CService("91.198.22.70", 80); // checkip.dyndns.org
if (nLookup == 1)
{
CService addrIP("checkip.dyndns.org", 80, true);
if (addrIP.IsValid())
addrConnect = addrIP;
}
pszGet = "GET / HTTP/1.1\r\n"
"Host: checkip.dyndns.org\r\n"
"User-Agent: Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)\r\n"
"Connection: close\r\n"
"\r\n";
pszKeyword = "Address:";
}
else if (nHost == 2)
{
addrConnect = CService("74.208.43.192", 80); // www.showmyip.com
if (nLookup == 1)
{
CService addrIP("www.showmyip.com", 80, true);
if (addrIP.IsValid())
addrConnect = addrIP;
}
pszGet = "GET /simple/ HTTP/1.1\r\n"
"Host: www.showmyip.com\r\n"
"User-Agent: Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)\r\n"
"Connection: close\r\n"
"\r\n";
pszKeyword = NULL; // Returns just IP address
}
if (GetMyExternalIP2(addrConnect, pszGet, pszKeyword, ipRet))
return true;
}
return false;
}
void ThreadGetMyExternalIP(void* parg)
{
// Make this thread recognisable as the external IP detection thread
RenameThread("bitcoin-ext-ip");
CNetAddr addrLocalHost;
if (GetMyExternalIP(addrLocalHost))
{
printf("GetMyExternalIP() returned %s\n", addrLocalHost.ToStringIP().c_str());
AddLocal(addrLocalHost, LOCAL_HTTP);
}
}
void AddressCurrentlyConnected(const CService& addr)
{
addrman.Connected(addr);
}
CNode* FindNode(const CNetAddr& ip)
{
LOCK(cs_vNodes);
BOOST_FOREACH(CNode* pnode, vNodes)
if ((CNetAddr)pnode->addr == ip)
return (pnode);
return NULL;
}
CNode* FindNode(std::string addrName)
{
LOCK(cs_vNodes);
BOOST_FOREACH(CNode* pnode, vNodes)
if (pnode->addrName == addrName)
return (pnode);
return NULL;
}
CNode* FindNode(const CService& addr)
{
LOCK(cs_vNodes);
BOOST_FOREACH(CNode* pnode, vNodes)
if ((CService)pnode->addr == addr)
return (pnode);
return NULL;
}
CNode* ConnectNode(CAddress addrConnect, const char *pszDest)
{
if (pszDest == NULL) {
if (IsLocal(addrConnect))
return NULL;
// Look for an existing connection
CNode* pnode = FindNode((CService)addrConnect);
if (pnode)
{
pnode->AddRef();
return pnode;
}
}
/// debug print
printf("trying connection %s lastseen=%.1fhrs\n",
pszDest ? pszDest : addrConnect.ToString().c_str(),
pszDest ? 0 : (double)(GetAdjustedTime() - addrConnect.nTime)/3600.0);
// Connect
SOCKET hSocket;
if (pszDest ? ConnectSocketByName(addrConnect, hSocket, pszDest, GetDefaultPort()) : ConnectSocket(addrConnect, hSocket))
{
addrman.Attempt(addrConnect);
/// debug print
printf("connected %s\n", pszDest ? pszDest : addrConnect.ToString().c_str());
// Set to non-blocking
#ifdef WIN32
u_long nOne = 1;
if (ioctlsocket(hSocket, FIONBIO, &nOne) == SOCKET_ERROR)
printf("ConnectSocket() : ioctlsocket non-blocking setting failed, error %d\n", WSAGetLastError());
#else
if (fcntl(hSocket, F_SETFL, O_NONBLOCK) == SOCKET_ERROR)
printf("ConnectSocket() : fcntl non-blocking setting failed, error %d\n", errno);
#endif
// Add node
CNode* pnode = new CNode(hSocket, addrConnect, pszDest ? pszDest : "", false);
pnode->AddRef();
{
LOCK(cs_vNodes);
vNodes.push_back(pnode);
}
pnode->nTimeConnected = GetTime();
return pnode;
}
else
{
return NULL;
}
}
void CNode::CloseSocketDisconnect()
{
fDisconnect = true;
if (hSocket != INVALID_SOCKET)
{
printf("disconnecting node %s\n", addrName.c_str());
closesocket(hSocket);
hSocket = INVALID_SOCKET;
}
// in case this fails, we'll empty the recv buffer when the CNode is deleted
TRY_LOCK(cs_vRecvMsg, lockRecv);
if (lockRecv)
vRecvMsg.clear();
// if this was the sync node, we'll need a new one
if (this == pnodeSync)
pnodeSync = NULL;
}
void CNode::Cleanup()
{
}
void CNode::PushVersion()
{
/// when NTP implemented, change to just nTime = GetAdjustedTime()
int64 nTime = (fInbound ? GetAdjustedTime() : GetTime());
CAddress addrYou = (addr.IsRoutable() && !IsProxy(addr) ? addr : CAddress(CService("0.0.0.0",0)));
CAddress addrMe = GetLocalAddress(&addr);
RAND_bytes((unsigned char*)&nLocalHostNonce, sizeof(nLocalHostNonce));
printf("send version message: version %d, blocks=%d, us=%s, them=%s, peer=%s\n", PROTOCOL_VERSION, nBestHeight, addrMe.ToString().c_str(), addrYou.ToString().c_str(), addr.ToString().c_str());
PushMessage("version", PROTOCOL_VERSION, nLocalServices, nTime, addrYou, addrMe,
nLocalHostNonce, FormatSubVersion(CLIENT_NAME, CLIENT_VERSION, std::vector<string>()), nBestHeight);
}
std::map<CNetAddr, int64> CNode::setBanned;
CCriticalSection CNode::cs_setBanned;
void CNode::ClearBanned()
{
setBanned.clear();
}
bool CNode::IsBanned(CNetAddr ip)
{
bool fResult = false;
{
LOCK(cs_setBanned);
std::map<CNetAddr, int64>::iterator i = setBanned.find(ip);
if (i != setBanned.end())
{
int64 t = (*i).second;
if (GetTime() < t)
fResult = true;
}
}
return fResult;
}
bool CNode::Misbehaving(int howmuch)
{
if (addr.IsLocal())
{
printf("Warning: Local node %s misbehaving (delta: %d)!\n", addrName.c_str(), howmuch);
return false;
}
nMisbehavior += howmuch;
if (nMisbehavior >= GetArg("-banscore", 100))
{
int64 banTime = GetTime()+GetArg("-bantime", 60*60*24); // Default 24-hour ban
printf("Misbehaving: %s (%d -> %d) DISCONNECTING\n", addr.ToString().c_str(), nMisbehavior-howmuch, nMisbehavior);
{
LOCK(cs_setBanned);
if (setBanned[addr] < banTime)
setBanned[addr] = banTime;
}
CloseSocketDisconnect();
return true;
} else
printf("Misbehaving: %s (%d -> %d)\n", addr.ToString().c_str(), nMisbehavior-howmuch, nMisbehavior);
return false;
}
#undef X
#define X(name) stats.name = name
void CNode::copyStats(CNodeStats &stats)
{
X(nServices);
X(nLastSend);
X(nLastRecv);
X(nTimeConnected);
X(addrName);
X(nVersion);
X(cleanSubVer);
X(fInbound);
X(nStartingHeight);
X(nMisbehavior);
X(nSendBytes);
X(nRecvBytes);
X(nBlocksRequested);
stats.fSyncNode = (this == pnodeSync);
}
#undef X
// requires LOCK(cs_vRecvMsg)
bool CNode::ReceiveMsgBytes(const char *pch, unsigned int nBytes)
{
while (nBytes > 0) {
// get current incomplete message, or create a new one
if (vRecvMsg.empty() ||
vRecvMsg.back().complete())
vRecvMsg.push_back(CNetMessage(SER_NETWORK, nRecvVersion));
CNetMessage& msg = vRecvMsg.back();
// absorb network data
int handled;
if (!msg.in_data)
handled = msg.readHeader(pch, nBytes);
else
handled = msg.readData(pch, nBytes);
if (handled < 0)
return false;
pch += handled;
nBytes -= handled;
}
return true;
}
int CNetMessage::readHeader(const char *pch, unsigned int nBytes)
{
// copy data to temporary parsing buffer
unsigned int nRemaining = 24 - nHdrPos;
unsigned int nCopy = std::min(nRemaining, nBytes);
memcpy(&hdrbuf[nHdrPos], pch, nCopy);
nHdrPos += nCopy;
// if header incomplete, exit
if (nHdrPos < 24)
return nCopy;
// deserialize to CMessageHeader
try {
hdrbuf >> hdr;
}
catch (std::exception &e) {
return -1;
}
// reject messages larger than MAX_SIZE
if (hdr.nMessageSize > MAX_SIZE)
return -1;
// switch state to reading message data
in_data = true;
vRecv.resize(hdr.nMessageSize);
return nCopy;
}
int CNetMessage::readData(const char *pch, unsigned int nBytes)
{
unsigned int nRemaining = hdr.nMessageSize - nDataPos;
unsigned int nCopy = std::min(nRemaining, nBytes);
memcpy(&vRecv[nDataPos], pch, nCopy);
nDataPos += nCopy;
return nCopy;
}
// requires LOCK(cs_vSend)
void SocketSendData(CNode *pnode)
{
std::deque<CSerializeData>::iterator it = pnode->vSendMsg.begin();
while (it != pnode->vSendMsg.end()) {
const CSerializeData &data = *it;
assert(data.size() > pnode->nSendOffset);
int nBytes = send(pnode->hSocket, &data[pnode->nSendOffset], data.size() - pnode->nSendOffset, MSG_NOSIGNAL | MSG_DONTWAIT);
if (nBytes > 0) {
pnode->nLastSend = GetTime();
pnode->nSendBytes += nBytes;
pnode->nSendOffset += nBytes;
if (pnode->nSendOffset == data.size()) {
pnode->nSendOffset = 0;
pnode->nSendSize -= data.size();
it++;
} else {
// could not send full message; stop sending more
break;
}
} else {
if (nBytes < 0) {
// error
int nErr = WSAGetLastError();
if (nErr != WSAEWOULDBLOCK && nErr != WSAEMSGSIZE && nErr != WSAEINTR && nErr != WSAEINPROGRESS)
{
printf("socket send error %d\n", nErr);
pnode->CloseSocketDisconnect();
}
}
// couldn't send anything at all
break;
}
}
if (it == pnode->vSendMsg.end()) {
assert(pnode->nSendOffset == 0);
assert(pnode->nSendSize == 0);
}
pnode->vSendMsg.erase(pnode->vSendMsg.begin(), it);
}
static list<CNode*> vNodesDisconnected;
void ThreadSocketHandler()
{
unsigned int nPrevNodeCount = 0;
loop
{
//
// Disconnect nodes
//
{
LOCK(cs_vNodes);
// Disconnect unused nodes
vector<CNode*> vNodesCopy = vNodes;
BOOST_FOREACH(CNode* pnode, vNodesCopy)
{
if (pnode->fDisconnect ||
(pnode->GetRefCount() <= 0 && pnode->vRecvMsg.empty() && pnode->nSendSize == 0 && pnode->ssSend.empty()))
{
// remove from vNodes
vNodes.erase(remove(vNodes.begin(), vNodes.end(), pnode), vNodes.end());
// release outbound grant (if any)
pnode->grantOutbound.Release();
// close socket and cleanup
pnode->CloseSocketDisconnect();
pnode->Cleanup();
// hold in disconnected pool until all refs are released
if (pnode->fNetworkNode || pnode->fInbound)
pnode->Release();
vNodesDisconnected.push_back(pnode);
}
}
// Delete disconnected nodes
list<CNode*> vNodesDisconnectedCopy = vNodesDisconnected;
BOOST_FOREACH(CNode* pnode, vNodesDisconnectedCopy)
{
// wait until threads are done using it
if (pnode->GetRefCount() <= 0)
{
bool fDelete = false;
{
TRY_LOCK(pnode->cs_vSend, lockSend);
if (lockSend)
{
TRY_LOCK(pnode->cs_vRecvMsg, lockRecv);
if (lockRecv)
{
TRY_LOCK(pnode->cs_inventory, lockInv);
if (lockInv)
fDelete = true;
}
}
}
if (fDelete)
{
vNodesDisconnected.remove(pnode);
delete pnode;
}
}
}
}
if (vNodes.size() != nPrevNodeCount)
{
nPrevNodeCount = vNodes.size();
uiInterface.NotifyNumConnectionsChanged(vNodes.size());
}
//
// Find which sockets have data to receive
//
struct timeval timeout;
timeout.tv_sec = 0;
timeout.tv_usec = 50000; // frequency to poll pnode->vSend
fd_set fdsetRecv;
fd_set fdsetSend;
fd_set fdsetError;
FD_ZERO(&fdsetRecv);
FD_ZERO(&fdsetSend);
FD_ZERO(&fdsetError);
SOCKET hSocketMax = 0;
bool have_fds = false;
BOOST_FOREACH(SOCKET hListenSocket, vhListenSocket) {
FD_SET(hListenSocket, &fdsetRecv);
hSocketMax = max(hSocketMax, hListenSocket);
have_fds = true;
}
{
LOCK(cs_vNodes);
BOOST_FOREACH(CNode* pnode, vNodes)
{
if (pnode->hSocket == INVALID_SOCKET)
continue;
FD_SET(pnode->hSocket, &fdsetError);
hSocketMax = max(hSocketMax, pnode->hSocket);
have_fds = true;
// Implement the following logic:
// * If there is data to send, select() for sending data. As this only
// happens when optimistic write failed, we choose to first drain the
// write buffer in this case before receiving more. This avoids
// needlessly queueing received data, if the remote peer is not themselves
// receiving data. This means properly utilizing TCP flow control signalling.
// * Otherwise, if there is no (complete) message in the receive buffer,
// or there is space left in the buffer, select() for receiving data.
// * (if neither of the above applies, there is certainly one message
// in the receiver buffer ready to be processed).
// Together, that means that at least one of the following is always possible,
// so we don't deadlock:
// * We send some data.
// * We wait for data to be received (and disconnect after timeout).
// * We process a message in the buffer (message handler thread).
{
TRY_LOCK(pnode->cs_vSend, lockSend);
if (lockSend && !pnode->vSendMsg.empty()) {
FD_SET(pnode->hSocket, &fdsetSend);
continue;
}
}
{
TRY_LOCK(pnode->cs_vRecvMsg, lockRecv);
if (lockRecv && (
pnode->vRecvMsg.empty() || !pnode->vRecvMsg.front().complete() ||
pnode->GetTotalRecvSize() <= ReceiveFloodSize()))
FD_SET(pnode->hSocket, &fdsetRecv);
}
}
}
int nSelect = select(have_fds ? hSocketMax + 1 : 0,
&fdsetRecv, &fdsetSend, &fdsetError, &timeout);
boost::this_thread::interruption_point();
if (nSelect == SOCKET_ERROR)
{
if (have_fds)
{
int nErr = WSAGetLastError();
printf("socket select error %d\n", nErr);
for (unsigned int i = 0; i <= hSocketMax; i++)
FD_SET(i, &fdsetRecv);
}
FD_ZERO(&fdsetSend);
FD_ZERO(&fdsetError);
MilliSleep(timeout.tv_usec/1000);
}
//
// Accept new connections
//
BOOST_FOREACH(SOCKET hListenSocket, vhListenSocket)
if (hListenSocket != INVALID_SOCKET && FD_ISSET(hListenSocket, &fdsetRecv))
{
#ifdef USE_IPV6
struct sockaddr_storage sockaddr;
#else
struct sockaddr sockaddr;
#endif
socklen_t len = sizeof(sockaddr);
SOCKET hSocket = accept(hListenSocket, (struct sockaddr*)&sockaddr, &len);
CAddress addr;
int nInbound = 0;
if (hSocket != INVALID_SOCKET)
if (!addr.SetSockAddr((const struct sockaddr*)&sockaddr))
printf("Warning: Unknown socket family\n");
{
LOCK(cs_vNodes);
BOOST_FOREACH(CNode* pnode, vNodes)
if (pnode->fInbound)
nInbound++;
}
if (hSocket == INVALID_SOCKET)
{
int nErr = WSAGetLastError();
if (nErr != WSAEWOULDBLOCK)
printf("socket error accept failed: %d\n", nErr);
}
else if (nInbound >= nMaxConnections - MAX_OUTBOUND_CONNECTIONS)
{
{
LOCK(cs_setservAddNodeAddresses);
if (!setservAddNodeAddresses.count(addr))
closesocket(hSocket);
}
}
else if (CNode::IsBanned(addr))
{
printf("connection from %s dropped (banned)\n", addr.ToString().c_str());
closesocket(hSocket);
}
else
{
printf("accepted connection %s\n", addr.ToString().c_str());
CNode* pnode = new CNode(hSocket, addr, "", true);
pnode->AddRef();
{
LOCK(cs_vNodes);
vNodes.push_back(pnode);
}
}
}
//
// Service each socket
//
vector<CNode*> vNodesCopy;
{
LOCK(cs_vNodes);
vNodesCopy = vNodes;
BOOST_FOREACH(CNode* pnode, vNodesCopy)
pnode->AddRef();
}
BOOST_FOREACH(CNode* pnode, vNodesCopy)
{
boost::this_thread::interruption_point();
//
// Receive
//
if (pnode->hSocket == INVALID_SOCKET)
continue;
if (FD_ISSET(pnode->hSocket, &fdsetRecv) || FD_ISSET(pnode->hSocket, &fdsetError))
{
TRY_LOCK(pnode->cs_vRecvMsg, lockRecv);
if (lockRecv)
{
{
// typical socket buffer is 8K-64K
char pchBuf[0x10000];
int nBytes = recv(pnode->hSocket, pchBuf, sizeof(pchBuf), MSG_DONTWAIT);
if (nBytes > 0)
{
if (!pnode->ReceiveMsgBytes(pchBuf, nBytes))
pnode->CloseSocketDisconnect();
pnode->nLastRecv = GetTime();
pnode->nRecvBytes += nBytes;
}
else if (nBytes == 0)
{
// socket closed gracefully
if (!pnode->fDisconnect)
printf("socket closed\n");
pnode->CloseSocketDisconnect();
}
else if (nBytes < 0)
{
// error
int nErr = WSAGetLastError();
if (nErr != WSAEWOULDBLOCK && nErr != WSAEMSGSIZE && nErr != WSAEINTR && nErr != WSAEINPROGRESS)
{
if (!pnode->fDisconnect)
printf("socket recv error %d\n", nErr);
pnode->CloseSocketDisconnect();
}
}
}
}
}
//
// Send
//
if (pnode->hSocket == INVALID_SOCKET)
continue;
if (FD_ISSET(pnode->hSocket, &fdsetSend))
{
TRY_LOCK(pnode->cs_vSend, lockSend);
if (lockSend)
SocketSendData(pnode);
}
//
// Inactivity checking
//
if (pnode->vSendMsg.empty())
pnode->nLastSendEmpty = GetTime();
if (GetTime() - pnode->nTimeConnected > 60)
{
if (pnode->nLastRecv == 0 || pnode->nLastSend == 0)
{
printf("socket no message in first 60 seconds, %d %d\n", pnode->nLastRecv != 0, pnode->nLastSend != 0);
pnode->fDisconnect = true;
}
else if (GetTime() - pnode->nLastSend > 90*60 && GetTime() - pnode->nLastSendEmpty > 90*60)
{
printf("socket not sending\n");
pnode->fDisconnect = true;
}
else if (GetTime() - pnode->nLastRecv > 90*60)
{
printf("socket inactivity timeout\n");
pnode->fDisconnect = true;
}
}
}
{
LOCK(cs_vNodes);
BOOST_FOREACH(CNode* pnode, vNodesCopy)
pnode->Release();
}
MilliSleep(10);
}
}
#ifdef USE_UPNP
void ThreadMapPort()
{
std::string port = strprintf("%u", GetListenPort());
const char * multicastif = 0;
const char * minissdpdpath = 0;
struct UPNPDev * devlist = 0;
char lanaddr[64];
#ifndef UPNPDISCOVER_SUCCESS
/* miniupnpc 1.5 */
devlist = upnpDiscover(2000, multicastif, minissdpdpath, 0);
#else
/* miniupnpc 1.6 */
int error = 0;
devlist = upnpDiscover(2000, multicastif, minissdpdpath, 0, 0, &error);
#endif
struct UPNPUrls urls;
struct IGDdatas data;
int r;
r = UPNP_GetValidIGD(devlist, &urls, &data, lanaddr, sizeof(lanaddr));
if (r == 1)
{
if (fDiscover) {
char externalIPAddress[40];
r = UPNP_GetExternalIPAddress(urls.controlURL, data.first.servicetype, externalIPAddress);
if(r != UPNPCOMMAND_SUCCESS)
printf("UPnP: GetExternalIPAddress() returned %d\n", r);
else
{
if(externalIPAddress[0])
{
printf("UPnP: ExternalIPAddress = %s\n", externalIPAddress);
AddLocal(CNetAddr(externalIPAddress), LOCAL_UPNP);
}
else
printf("UPnP: GetExternalIPAddress failed.\n");
}
}
string strDesc = "CowCoin " + FormatFullVersion();
try {
loop {
#ifndef UPNPDISCOVER_SUCCESS
/* miniupnpc 1.5 */
r = UPNP_AddPortMapping(urls.controlURL, data.first.servicetype,
port.c_str(), port.c_str(), lanaddr, strDesc.c_str(), "TCP", 0);
#else
/* miniupnpc 1.6 */
r = UPNP_AddPortMapping(urls.controlURL, data.first.servicetype,
port.c_str(), port.c_str(), lanaddr, strDesc.c_str(), "TCP", 0, "0");
#endif
if(r!=UPNPCOMMAND_SUCCESS)
printf("AddPortMapping(%s, %s, %s) failed with code %d (%s)\n",
port.c_str(), port.c_str(), lanaddr, r, strupnperror(r));
else
printf("UPnP Port Mapping successful.\n");;
MilliSleep(20*60*1000); // Refresh every 20 minutes
}
}
catch (boost::thread_interrupted)
{
r = UPNP_DeletePortMapping(urls.controlURL, data.first.servicetype, port.c_str(), "TCP", 0);
printf("UPNP_DeletePortMapping() returned : %d\n", r);
freeUPNPDevlist(devlist); devlist = 0;
FreeUPNPUrls(&urls);
throw;
}
} else {
printf("No valid UPnP IGDs found\n");
freeUPNPDevlist(devlist); devlist = 0;
if (r != 0)
FreeUPNPUrls(&urls);
}
}
void MapPort(bool fUseUPnP)
{
static boost::thread* upnp_thread = NULL;
if (fUseUPnP)
{
if (upnp_thread) {
upnp_thread->interrupt();
upnp_thread->join();
delete upnp_thread;
}
upnp_thread = new boost::thread(boost::bind(&TraceThread<boost::function<void()> >, "upnp", &ThreadMapPort));
}
else if (upnp_thread) {
upnp_thread->interrupt();
upnp_thread->join();
delete upnp_thread;
upnp_thread = NULL;
}
}
#else
void MapPort(bool)
{
// Intentionally left blank.
}
#endif
// DNS seeds
// Each pair gives a source name and a seed name.
// The first name is used as information source for addrman.
// The second name should resolve to a list of seed addresses.
static const char *strMainNetDNSSeed[][2] = {
{"cryptolife.net","wallet.cryptolife.net"},
{"cryptolife.net","explore.cryptolife.net"},
{"cryptolife.net","seed1.cryptolife.net"},
{"cryptolife.net","seed2.cryptolife.net"},
{NULL, NULL}
};
static const char *strTestNetDNSSeed[][2] = {
{NULL, NULL}
};
void ThreadDNSAddressSeed()
{
static const char *(*strDNSSeed)[2] = fTestNet ? strTestNetDNSSeed : strMainNetDNSSeed;
int found = 0;
printf("Loading addresses from DNS seeds (could take a while)\n");
for (unsigned int seed_idx = 0; strDNSSeed[seed_idx][0] != NULL; seed_idx++) {
if (HaveNameProxy()) {
AddOneShot(strDNSSeed[seed_idx][1]);
} else {
vector<CNetAddr> vaddr;
vector<CAddress> vAdd;
if (LookupHost(strDNSSeed[seed_idx][1], vaddr))
{
BOOST_FOREACH(CNetAddr& ip, vaddr)
{
int nOneDay = 24*3600;
CAddress addr = CAddress(CService(ip, GetDefaultPort()));
addr.nTime = GetTime() - 3*nOneDay - GetRand(4*nOneDay); // use a random age between 3 and 7 days old
vAdd.push_back(addr);
found++;
}
}
addrman.Add(vAdd, CNetAddr(strDNSSeed[seed_idx][0], true));
}
}
printf("%d addresses found from DNS seeds\n", found);
}
unsigned int pnSeed[] =
{
0x119caa6b
// 0x92B9B572, 0xA2F3716E, 0x5F551D90
};
void DumpAddresses()
{
int64 nStart = GetTimeMillis();
CAddrDB adb;
adb.Write(addrman);
printf("Flushed %d addresses to peers.dat %"PRI64d"ms\n",
addrman.size(), GetTimeMillis() - nStart);
}
void static ProcessOneShot()
{
string strDest;
{
LOCK(cs_vOneShots);
if (vOneShots.empty())
return;
strDest = vOneShots.front();
vOneShots.pop_front();
}
CAddress addr;
CSemaphoreGrant grant(*semOutbound, true);
if (grant) {
if (!OpenNetworkConnection(addr, &grant, strDest.c_str(), true))
AddOneShot(strDest);
}
}
void ThreadOpenConnections()
{
// Connect to specific addresses
if (mapArgs.count("-connect") && mapMultiArgs["-connect"].size() > 0)
{
for (int64 nLoop = 0;; nLoop++)
{
ProcessOneShot();
BOOST_FOREACH(string strAddr, mapMultiArgs["-connect"])
{
CAddress addr;
OpenNetworkConnection(addr, NULL, strAddr.c_str());
for (int i = 0; i < 10 && i < nLoop; i++)
{
MilliSleep(500);
}
}
MilliSleep(500);
}
}
// Initiate network connections
int64 nStart = GetTime();
loop
{
ProcessOneShot();
MilliSleep(500);
CSemaphoreGrant grant(*semOutbound);
boost::this_thread::interruption_point();
// Add seed nodes if IRC isn't working
if (addrman.size()==0 && (GetTime() - nStart > 60) && !fTestNet)
{
std::vector<CAddress> vAdd;
for (unsigned int i = 0; i < ARRAYLEN(pnSeed); i++)
{
// It'll only connect to one or two seed nodes because once it connects,
// it'll get a pile of addresses with newer timestamps.
// Seed nodes are given a random 'last seen time' of between one and two
// weeks ago.
const int64 nOneWeek = 7*24*60*60;
struct in_addr ip;
memcpy(&ip, &pnSeed[i], sizeof(ip));
CAddress addr(CService(ip, GetDefaultPort()));
addr.nTime = GetTime()-GetRand(nOneWeek)-nOneWeek;
vAdd.push_back(addr);
}
addrman.Add(vAdd, CNetAddr("127.0.0.1"));
}
//
// Choose an address to connect to based on most recently seen
//
CAddress addrConnect;
// Only connect out to one peer per network group (/16 for IPv4).
// Do this here so we don't have to critsect vNodes inside mapAddresses critsect.
int nOutbound = 0;
set<vector<unsigned char> > setConnected;
{
LOCK(cs_vNodes);
BOOST_FOREACH(CNode* pnode, vNodes) {
if (!pnode->fInbound) {
setConnected.insert(pnode->addr.GetGroup());
nOutbound++;
}
}
}
int64 nANow = GetAdjustedTime();
int nTries = 0;
loop
{
// use an nUnkBias between 10 (no outgoing connections) and 90 (8 outgoing connections)
CAddress addr = addrman.Select(10 + min(nOutbound,8)*10);
// if we selected an invalid address, restart
if (!addr.IsValid() || setConnected.count(addr.GetGroup()) || IsLocal(addr))
break;
// If we didn't find an appropriate destination after trying 100 addresses fetched from addrman,
// stop this loop, and let the outer loop run again (which sleeps, adds seed nodes, recalculates
// already-connected network ranges, ...) before trying new addrman addresses.
nTries++;
if (nTries > 100)
break;
if (IsLimited(addr))
continue;
// only consider very recently tried nodes after 30 failed attempts
if (nANow - addr.nLastTry < 600 && nTries < 30)
continue;
// do not allow non-default ports, unless after 50 invalid addresses selected already
if (addr.GetPort() != GetDefaultPort() && nTries < 50)
continue;
addrConnect = addr;
break;
}
if (addrConnect.IsValid())
OpenNetworkConnection(addrConnect, &grant);
}
}
void ThreadOpenAddedConnections()
{
{
LOCK(cs_vAddedNodes);
vAddedNodes = mapMultiArgs["-addnode"];
}
if (HaveNameProxy()) {
while(true) {
list<string> lAddresses(0);
{
LOCK(cs_vAddedNodes);
BOOST_FOREACH(string& strAddNode, vAddedNodes)
lAddresses.push_back(strAddNode);
}
BOOST_FOREACH(string& strAddNode, lAddresses) {
CAddress addr;
CSemaphoreGrant grant(*semOutbound);
OpenNetworkConnection(addr, &grant, strAddNode.c_str());
MilliSleep(500);
}
MilliSleep(120000); // Retry every 2 minutes
}
}
for (unsigned int i = 0; true; i++)
{
list<string> lAddresses(0);
{
LOCK(cs_vAddedNodes);
BOOST_FOREACH(string& strAddNode, vAddedNodes)
lAddresses.push_back(strAddNode);
}
list<vector<CService> > lservAddressesToAdd(0);
BOOST_FOREACH(string& strAddNode, lAddresses)
{
vector<CService> vservNode(0);
if(Lookup(strAddNode.c_str(), vservNode, GetDefaultPort(), fNameLookup, 0))
{
lservAddressesToAdd.push_back(vservNode);
{
LOCK(cs_setservAddNodeAddresses);
BOOST_FOREACH(CService& serv, vservNode)
setservAddNodeAddresses.insert(serv);
}
}
}
// Attempt to connect to each IP for each addnode entry until at least one is successful per addnode entry
// (keeping in mind that addnode entries can have many IPs if fNameLookup)
{
LOCK(cs_vNodes);
BOOST_FOREACH(CNode* pnode, vNodes)
for (list<vector<CService> >::iterator it = lservAddressesToAdd.begin(); it != lservAddressesToAdd.end(); it++)
BOOST_FOREACH(CService& addrNode, *(it))
if (pnode->addr == addrNode)
{
it = lservAddressesToAdd.erase(it);
it--;
break;
}
}
BOOST_FOREACH(vector<CService>& vserv, lservAddressesToAdd)
{
CSemaphoreGrant grant(*semOutbound);
OpenNetworkConnection(CAddress(vserv[i % vserv.size()]), &grant);
MilliSleep(500);
}
MilliSleep(120000); // Retry every 2 minutes
}
}
// if successful, this moves the passed grant to the constructed node
bool OpenNetworkConnection(const CAddress& addrConnect, CSemaphoreGrant *grantOutbound, const char *strDest, bool fOneShot)
{
//
// Initiate outbound network connection
//
boost::this_thread::interruption_point();
if (!strDest)
if (IsLocal(addrConnect) ||
FindNode((CNetAddr)addrConnect) || CNode::IsBanned(addrConnect) ||
FindNode(addrConnect.ToStringIPPort().c_str()))
return false;
if (strDest && FindNode(strDest))
return false;
CNode* pnode = ConnectNode(addrConnect, strDest);
boost::this_thread::interruption_point();
if (!pnode)
return false;
if (grantOutbound)
grantOutbound->MoveTo(pnode->grantOutbound);
pnode->fNetworkNode = true;
if (fOneShot)
pnode->fOneShot = true;
return true;
}
// for now, use a very simple selection metric: the node from which we received
// most recently
double static NodeSyncScore(const CNode *pnode) {
return -pnode->nLastRecv;
}
void static StartSync(const vector<CNode*> &vNodes) {
CNode *pnodeNewSync = NULL;
double dBestScore = 0;
// fImporting and fReindex are accessed out of cs_main here, but only
// as an optimization - they are checked again in SendMessages.
if (fImporting || fReindex)
return;
// Iterate over all nodes
BOOST_FOREACH(CNode* pnode, vNodes) {
// check preconditions for allowing a sync
if (!pnode->fClient && !pnode->fOneShot &&
!pnode->fDisconnect && pnode->fSuccessfullyConnected &&
(pnode->nStartingHeight > (nBestHeight - 144)) &&
(pnode->nVersion < NOBLKS_VERSION_START || pnode->nVersion >= NOBLKS_VERSION_END)) {
// if ok, compare node's score with the best so far
double dScore = NodeSyncScore(pnode);
if (pnodeNewSync == NULL || dScore > dBestScore) {
pnodeNewSync = pnode;
dBestScore = dScore;
}
}
}
// if a new sync candidate was found, start sync!
if (pnodeNewSync) {
pnodeNewSync->fStartSync = true;
pnodeSync = pnodeNewSync;
}
}
void ThreadMessageHandler()
{
SetThreadPriority(THREAD_PRIORITY_BELOW_NORMAL);
while (true)
{
bool fHaveSyncNode = false;
vector<CNode*> vNodesCopy;
{
LOCK(cs_vNodes);
vNodesCopy = vNodes;
BOOST_FOREACH(CNode* pnode, vNodesCopy) {
pnode->AddRef();
if (pnode == pnodeSync)
fHaveSyncNode = true;
}
}
if (!fHaveSyncNode)
StartSync(vNodesCopy);
// Poll the connected nodes for messages
CNode* pnodeTrickle = NULL;
if (!vNodesCopy.empty())
pnodeTrickle = vNodesCopy[GetRand(vNodesCopy.size())];
bool fSleep = true;
BOOST_FOREACH(CNode* pnode, vNodesCopy)
{
if (pnode->fDisconnect)
continue;
// Receive messages
{
TRY_LOCK(pnode->cs_vRecvMsg, lockRecv);
if (lockRecv)
{
if (!ProcessMessages(pnode))
pnode->CloseSocketDisconnect();
if (pnode->nSendSize < SendBufferSize())
{
if (!pnode->vRecvGetData.empty() || (!pnode->vRecvMsg.empty() && pnode->vRecvMsg[0].complete()))
{
fSleep = false;
}
}
}
}
boost::this_thread::interruption_point();
// Send messages
{
TRY_LOCK(pnode->cs_vSend, lockSend);
if (lockSend)
SendMessages(pnode, pnode == pnodeTrickle);
}
boost::this_thread::interruption_point();
}
{
LOCK(cs_vNodes);
BOOST_FOREACH(CNode* pnode, vNodesCopy)
pnode->Release();
}
if (fSleep)
MilliSleep(100);
}
}
bool BindListenPort(const CService &addrBind, string& strError)
{
strError = "";
int nOne = 1;
// Create socket for listening for incoming connections
#ifdef USE_IPV6
struct sockaddr_storage sockaddr;
#else
struct sockaddr sockaddr;
#endif
socklen_t len = sizeof(sockaddr);
if (!addrBind.GetSockAddr((struct sockaddr*)&sockaddr, &len))
{
strError = strprintf("Error: bind address family for %s not supported", addrBind.ToString().c_str());
printf("%s\n", strError.c_str());
return false;
}
SOCKET hListenSocket = socket(((struct sockaddr*)&sockaddr)->sa_family, SOCK_STREAM, IPPROTO_TCP);
if (hListenSocket == INVALID_SOCKET)
{
strError = strprintf("Error: Couldn't open socket for incoming connections (socket returned error %d)", WSAGetLastError());
printf("%s\n", strError.c_str());
return false;
}
#ifdef SO_NOSIGPIPE
// Different way of disabling SIGPIPE on BSD
setsockopt(hListenSocket, SOL_SOCKET, SO_NOSIGPIPE, (void*)&nOne, sizeof(int));
#endif
#ifndef WIN32
// Allow binding if the port is still in TIME_WAIT state after
// the program was closed and restarted. Not an issue on windows.
setsockopt(hListenSocket, SOL_SOCKET, SO_REUSEADDR, (void*)&nOne, sizeof(int));
#endif
#ifdef WIN32
// Set to non-blocking, incoming connections will also inherit this
if (ioctlsocket(hListenSocket, FIONBIO, (u_long*)&nOne) == SOCKET_ERROR)
#else
if (fcntl(hListenSocket, F_SETFL, O_NONBLOCK) == SOCKET_ERROR)
#endif
{
strError = strprintf("Error: Couldn't set properties on socket for incoming connections (error %d)", WSAGetLastError());
printf("%s\n", strError.c_str());
return false;
}
#ifdef USE_IPV6
// some systems don't have IPV6_V6ONLY but are always v6only; others do have the option
// and enable it by default or not. Try to enable it, if possible.
if (addrBind.IsIPv6()) {
#ifdef IPV6_V6ONLY
#ifdef WIN32
setsockopt(hListenSocket, IPPROTO_IPV6, IPV6_V6ONLY, (const char*)&nOne, sizeof(int));
#else
setsockopt(hListenSocket, IPPROTO_IPV6, IPV6_V6ONLY, (void*)&nOne, sizeof(int));
#endif
#endif
#ifdef WIN32
int nProtLevel = 10 /* PROTECTION_LEVEL_UNRESTRICTED */;
int nParameterId = 23 /* IPV6_PROTECTION_LEVEl */;
// this call is allowed to fail
setsockopt(hListenSocket, IPPROTO_IPV6, nParameterId, (const char*)&nProtLevel, sizeof(int));
#endif
}
#endif
if (::bind(hListenSocket, (struct sockaddr*)&sockaddr, len) == SOCKET_ERROR)
{
int nErr = WSAGetLastError();
if (nErr == WSAEADDRINUSE)
strError = strprintf(_("Unable to bind to %s on this computer. CowCoin is probably already running."), addrBind.ToString().c_str());
else
strError = strprintf(_("Unable to bind to %s on this computer (bind returned error %d, %s)"), addrBind.ToString().c_str(), nErr, strerror(nErr));
printf("%s\n", strError.c_str());
return false;
}
printf("Bound to %s\n", addrBind.ToString().c_str());
// Listen for incoming connections
if (listen(hListenSocket, SOMAXCONN) == SOCKET_ERROR)
{
strError = strprintf("Error: Listening for incoming connections failed (listen returned error %d)", WSAGetLastError());
printf("%s\n", strError.c_str());
return false;
}
vhListenSocket.push_back(hListenSocket);
if (addrBind.IsRoutable() && fDiscover)
AddLocal(addrBind, LOCAL_BIND);
return true;
}
void static Discover()
{
if (!fDiscover)
return;
#ifdef WIN32
// Get local host IP
char pszHostName[1000] = "";
if (gethostname(pszHostName, sizeof(pszHostName)) != SOCKET_ERROR)
{
vector<CNetAddr> vaddr;
if (LookupHost(pszHostName, vaddr))
{
BOOST_FOREACH (const CNetAddr &addr, vaddr)
{
AddLocal(addr, LOCAL_IF);
}
}
}
#else
// Get local host ip
struct ifaddrs* myaddrs;
if (getifaddrs(&myaddrs) == 0)
{
for (struct ifaddrs* ifa = myaddrs; ifa != NULL; ifa = ifa->ifa_next)
{
if (ifa->ifa_addr == NULL) continue;
if ((ifa->ifa_flags & IFF_UP) == 0) continue;
if (strcmp(ifa->ifa_name, "lo") == 0) continue;
if (strcmp(ifa->ifa_name, "lo0") == 0) continue;
if (ifa->ifa_addr->sa_family == AF_INET)
{
struct sockaddr_in* s4 = (struct sockaddr_in*)(ifa->ifa_addr);
CNetAddr addr(s4->sin_addr);
if (AddLocal(addr, LOCAL_IF))
printf("IPv4 %s: %s\n", ifa->ifa_name, addr.ToString().c_str());
}
#ifdef USE_IPV6
else if (ifa->ifa_addr->sa_family == AF_INET6)
{
struct sockaddr_in6* s6 = (struct sockaddr_in6*)(ifa->ifa_addr);
CNetAddr addr(s6->sin6_addr);
if (AddLocal(addr, LOCAL_IF))
printf("IPv6 %s: %s\n", ifa->ifa_name, addr.ToString().c_str());
}
#endif
}
freeifaddrs(myaddrs);
}
#endif
// Don't use external IPv4 discovery, when -onlynet="IPv6"
if (!IsLimited(NET_IPV4))
NewThread(ThreadGetMyExternalIP, NULL);
}
void StartNode(boost::thread_group& threadGroup)
{
if (semOutbound == NULL) {
// initialize semaphore
int nMaxOutbound = min(MAX_OUTBOUND_CONNECTIONS, nMaxConnections);
semOutbound = new CSemaphore(nMaxOutbound);
}
if (pnodeLocalHost == NULL)
pnodeLocalHost = new CNode(INVALID_SOCKET, CAddress(CService("127.0.0.1", 0), nLocalServices));
Discover();
//
// Start threads
//
if (!GetBoolArg("-dnsseed", true))
printf("DNS seeding disabled\n");
else
threadGroup.create_thread(boost::bind(&TraceThread<boost::function<void()> >, "dnsseed", &ThreadDNSAddressSeed));
#ifdef USE_UPNP
// Map ports with UPnP
MapPort(GetBoolArg("-upnp", USE_UPNP));
#endif
// Get addresses from IRC and advertise ours
threadGroup.create_thread(boost::bind(&TraceThread<void (*)()>, "irc", &ThreadIRCSeed));
// Send and receive from sockets, accept connections
threadGroup.create_thread(boost::bind(&TraceThread<void (*)()>, "net", &ThreadSocketHandler));
// Initiate outbound connections from -addnode
threadGroup.create_thread(boost::bind(&TraceThread<void (*)()>, "addcon", &ThreadOpenAddedConnections));
// Initiate outbound connections
threadGroup.create_thread(boost::bind(&TraceThread<void (*)()>, "opencon", &ThreadOpenConnections));
// Process messages
threadGroup.create_thread(boost::bind(&TraceThread<void (*)()>, "msghand", &ThreadMessageHandler));
// Dump network addresses
threadGroup.create_thread(boost::bind(&LoopForever<void (*)()>, "dumpaddr", &DumpAddresses, DUMP_ADDRESSES_INTERVAL * 1000));
}
bool StopNode()
{
printf("StopNode()\n");
GenerateBitcoins(false, NULL);
MapPort(false);
nTransactionsUpdated++;
if (semOutbound)
for (int i=0; i<MAX_OUTBOUND_CONNECTIONS; i++)
semOutbound->post();
MilliSleep(50);
DumpAddresses();
return true;
}
class CNetCleanup
{
public:
CNetCleanup()
{
}
~CNetCleanup()
{
// Close sockets
BOOST_FOREACH(CNode* pnode, vNodes)
if (pnode->hSocket != INVALID_SOCKET)
closesocket(pnode->hSocket);
BOOST_FOREACH(SOCKET hListenSocket, vhListenSocket)
if (hListenSocket != INVALID_SOCKET)
if (closesocket(hListenSocket) == SOCKET_ERROR)
printf("closesocket(hListenSocket) failed with error %d\n", WSAGetLastError());
// clean up some globals (to help leak detection)
BOOST_FOREACH(CNode *pnode, vNodes)
delete pnode;
BOOST_FOREACH(CNode *pnode, vNodesDisconnected)
delete pnode;
vNodes.clear();
vNodesDisconnected.clear();
delete semOutbound;
semOutbound = NULL;
delete pnodeLocalHost;
pnodeLocalHost = NULL;
#ifdef WIN32
// Shutdown Windows Sockets
WSACleanup();
#endif
}
}
instance_of_cnetcleanup;
void RelayTransaction(const CTransaction& tx, const uint256& hash)
{
CDataStream ss(SER_NETWORK, PROTOCOL_VERSION);
ss.reserve(10000);
ss << tx;
RelayTransaction(tx, hash, ss);
}
void RelayTransaction(const CTransaction& tx, const uint256& hash, const CDataStream& ss)
{
CInv inv(MSG_TX, hash);
{
LOCK(cs_mapRelay);
// Expire old relay messages
while (!vRelayExpiration.empty() && vRelayExpiration.front().first < GetTime())
{
mapRelay.erase(vRelayExpiration.front().second);
vRelayExpiration.pop_front();
}
// Save original serialized message so newer versions are preserved
mapRelay.insert(std::make_pair(inv, ss));
vRelayExpiration.push_back(std::make_pair(GetTime() + 15 * 60, inv));
}
LOCK(cs_vNodes);
BOOST_FOREACH(CNode* pnode, vNodes)
{
if(!pnode->fRelayTxes)
continue;
LOCK(pnode->cs_filter);
if (pnode->pfilter)
{
if (pnode->pfilter->IsRelevantAndUpdate(tx, hash))
pnode->PushInventory(inv);
} else
pnode->PushInventory(inv);
}
}
| justinetim/coins | src/net.cpp | C++ | mit | 57,393 |
/*
Copyright (c) 2011, Chris Umbel
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
var natural = require('natural'),
classifier = new natural.BayesClassifier();
classifier.train([
{classification: 'software', text: "my unit-tests failed."},
{classification: 'software', text: "tried the program, but it was buggy."},
{classification: 'hardware', text: "the drive has a 2TB capacity."},
{classification: 'hardware', text: "i need a new power supply."}
]);
console.log(classifier.classify('did the tests pass?'));
console.log(classifier.classify('did you buy a new drive?'));
| tj/natural | examples/classification/basic.js | JavaScript | mit | 1,580 |
# -*- coding: utf-8 -*-
"""
This file is part of pyCMBS.
(c) 2012- Alexander Loew
For COPYING and LICENSE details, please refer to the LICENSE file
"""
from cdo import Cdo
from pycmbs.data import Data
import tempfile as tempfile
import copy
import glob
import os
import sys
import numpy as np
from pycmbs.benchmarking import preprocessor
from pycmbs.benchmarking.utils import get_T63_landseamask, get_temporary_directory
from pycmbs.benchmarking.models.model_basic import *
class JSBACH_BOT(Model):
def __init__(self, filename, dic_variables, experiment, name='', shift_lon=False, **kwargs):
super(JSBACH_BOT, self).__init__(filename, dic_variables, name=name, **kwargs)
self.experiment = experiment
self.shift_lon = shift_lon
self.type = 'JSBACH_BOT'
self._unique_name = self._get_unique_name()
def _get_unique_name(self):
"""
get unique name from model and experiment
@return: string with unique combination of models and experiment
"""
return self.name.replace(' ', '') + '-' + self.experiment.replace(' ', '')
def get_albedo_data(self, interval='season'):
"""
get albedo data for JSBACH
returns Data object
"""
if interval != 'season':
raise ValueError('Other temporal sampling than SEASON not supported yet for JSBACH BOT files, sorry')
v = 'var176'
filename = self.data_dir + 'data/model1/' + self.experiment + '_echam6_BOT_mm_1979-2006_albedo_yseasmean.nc'
ls_mask = get_T63_landseamask(self.shift_lon)
albedo = Data(filename, v, read=True,
label='MPI-ESM albedo ' + self.experiment, unit='-', lat_name='lat', lon_name='lon',
shift_lon=self.shift_lon,
mask=ls_mask.data.data)
return albedo
def get_tree_fraction(self, interval='season'):
"""
todo implement this for data from a real run !!!
"""
if interval != 'season':
raise ValueError('Other temporal sampling than SEASON not supported yet for JSBACH BOT files, sorry')
ls_mask = get_T63_landseamask(self.shift_lon)
filename = '/home/m300028/shared/dev/svn/trstools-0.0.1/lib/python/pyCMBS/framework/external/vegetation_benchmarking/VEGETATION_COVER_BENCHMARKING/example/historical_r1i1p1-LR_1850-2005_forest_shrub.nc'
v = 'var12'
tree = Data(filename, v, read=True,
label='MPI-ESM tree fraction ' + self.experiment, unit='-', lat_name='lat', lon_name='lon',
shift_lon=self.shift_lon,
mask=ls_mask.data.data, start_time=pl.num2date(pl.datestr2num('2001-01-01')), stop_time=pl.num2date(pl.datestr2num('2001-12-31')))
return tree
def get_grass_fraction(self, interval='season'):
"""
todo implement this for data from a real run !!!
"""
if interval != 'season':
raise ValueError('Other temporal sampling than SEASON not supported yet for JSBACH BOT files, sorry')
ls_mask = get_T63_landseamask(self.shift_lon)
filename = '/home/m300028/shared/dev/svn/trstools-0.0.1/lib/python/pyCMBS/framework/external/vegetation_benchmarking/VEGETATION_COVER_BENCHMARKING/example/historical_r1i1p1-LR_1850-2005_grass_crop_pasture_2001.nc'
v = 'var12'
grass = Data(filename, v, read=True,
label='MPI-ESM tree fraction ' + self.experiment, unit='-', lat_name='lat', lon_name='lon',
#shift_lon=shift_lon,
mask=ls_mask.data.data, start_time=pl.num2date(pl.datestr2num('2001-01-01')), stop_time=pl.num2date(pl.datestr2num('2001-12-31')), squeeze=True)
return grass
def get_surface_shortwave_radiation_down(self, interval='season'):
"""
get surface shortwave incoming radiation data for JSBACH
returns Data object
"""
if interval != 'season':
raise ValueError('Other temporal sampling than SEASON not supported yet for JSBACH BOT files, sorry')
v = 'var176'
y1 = '1979-01-01'
y2 = '2006-12-31'
rawfilename = self.data_dir + 'data/model/' + self.experiment + '_echam6_BOT_mm_1979-2006_srads.nc'
if not os.path.exists(rawfilename):
return None
#--- read data
cdo = pyCDO(rawfilename, y1, y2)
if interval == 'season':
seasfile = cdo.seasmean()
del cdo
print 'seasfile: ', seasfile
cdo = pyCDO(seasfile, y1, y2)
filename = cdo.yseasmean()
else:
raise ValueError('Invalid interval option %s ' % interval)
#--- read land-sea mask
ls_mask = get_T63_landseamask(self.shift_lon)
#--- read SIS data
sis = Data(filename, v, read=True,
label='MPI-ESM SIS ' + self.experiment, unit='-', lat_name='lat', lon_name='lon',
#shift_lon=shift_lon,
mask=ls_mask.data.data)
return sis
def get_rainfall_data(self, interval='season'):
"""
get rainfall data for JSBACH
returns Data object
"""
if interval == 'season':
pass
else:
raise ValueError('Invalid value for interval: %s' % interval)
#/// PREPROCESSING: seasonal means ///
s_start_time = str(self.start_time)[0:10]
s_stop_time = str(self.stop_time)[0:10]
filename1 = self.data_dir + self.experiment + '_echam6_BOT_mm_1980_sel.nc'
tmp = pyCDO(filename1, s_start_time, s_stop_time).seldate()
tmp1 = pyCDO(tmp, s_start_time, s_stop_time).seasmean()
filename = pyCDO(tmp1, s_start_time, s_stop_time).yseasmean()
#/// READ DATA ///
#1) land / sea mask
ls_mask = get_T63_landseamask(self.shift_lon)
#2) precipitation data
try:
v = 'var4'
rain = Data(filename, v, read=True, scale_factor=86400.,
label='MPI-ESM ' + self.experiment, unit='mm/day', lat_name='lat', lon_name='lon',
shift_lon=self.shift_lon,
mask=ls_mask.data.data)
except:
v = 'var142'
rain = Data(filename, v, read=True, scale_factor=86400.,
label='MPI-ESM ' + self.experiment, unit='mm/day', lat_name='lat', lon_name='lon',
shift_lon=self.shift_lon,
mask=ls_mask.data.data)
return rain
class JSBACH_RAW2(Model):
"""
Class for RAW JSBACH model output
works on the real raw output
"""
#def __init__(self, filename, dic_variables, experiment, name='', shift_lon=False, model_dict=None, input_format='grb', raw_outdata='outdata/jsbach/', **kwargs):
def __init__(self, filename, dic_variables, experiment, name='', shift_lon=False, input_format='grb', raw_outdata='outdata/jsbach/', **kwargs):
"""
The assignment of certain variables to different input streams is done in the routine
get_jsbach_data_generic()
Parameters
----------
input_format : str
specifies file format of input data
['nc','grb']
"""
super(JSBACH_RAW2, self).__init__(filename, dic_variables, name=name, **kwargs)
self.experiment = experiment
self.shift_lon = shift_lon
#self.get_data()
self.type = 'JSBACH_RAW2'
self.input_format = input_format
assert self.input_format in ['nc', 'grb']
self.raw_outdata = raw_outdata
self._unique_name = self._get_unique_name()
# do preprocessing of streams (only needed once!) ---
self.files = {}
self._preproc_streams()
#~ self.model_dict = copy.deepcopy(model_dict)
self.model = 'JSBACH'
def _get_filenames_jsbach_stream(self):
return self.data_dir + self.raw_outdata + self.experiment + '_jsbach_main_mm_*.' + self.input_format
def _get_filenames_veg_stream(self):
return self.data_dir + self.raw_outdata + self.experiment + '_jsbach_veg_mm_*.' + self.input_format
def _get_filenames_land_stream(self):
return self.data_dir + self.raw_outdata + self.experiment + '_jsbach_land_mm_*.' + self.input_format
def _get_filenames_surf_stream(self):
return self.data_dir + self.raw_outdata + self.experiment + '_jsbach_surf_mm_*.' + self.input_format
def _get_filenames_albedo_VIS(self):
return self.data_dir + self.raw_outdata + self.experiment + '_jsbach_mm_*_VIS_albedo.' + self.input_format
def _get_filenames_albedo_NIR(self):
return self.data_dir + self.raw_outdata + self.experiment + '_jsbach_mm_*_NIR_albedo.' + self.input_format
def _get_filenames_echam_BOT(self):
return self.data_dir + self.raw_outdata + '../echam6/' + self.experiment + '_echam6_BOT_mm_*.sz'
def _preproc_streams(self):
"""
It is assumed that the standard JSBACH postprocessing scripts have been applied.
Thus monthly mean data is available for each stream and code tables still need to be applied.
This routine does the following:
1) merge all times from individual (monthly mean) output files
2) assign codetables to work with proper variable names
3) aggregate data from tiles to gridbox values
"""
print 'Preprocessing JSBACH raw data streams (may take a while) ...'
cdo = Cdo()
# jsbach stream
print ' JSBACH stream ...'
outfile = get_temporary_directory() + self.experiment + '_jsbach_mm_full.nc'
if os.path.exists(outfile):
pass
else:
codetable = self.data_dir + 'log/' + self.experiment + '_jsbach.codes'
tmp = tempfile.mktemp(suffix='.nc', prefix=self.experiment + '_jsbach_', dir=get_temporary_directory()) # temporary file
#~ print self.data_dir
#~ print self.raw_outdata
#~ print 'Files: ', self._get_filenames_jsbach_stream()
#~ stop
if len(glob.glob(self._get_filenames_jsbach_stream())) > 0: # check if input files existing at all
print 'Mering the following files:', self._get_filenames_jsbach_stream()
cdo.mergetime(options='-f nc', output=tmp, input=self._get_filenames_jsbach_stream())
if os.path.exists(codetable):
cdo.monmean(options='-f nc', output=outfile, input='-setpartab,' + codetable + ' ' + tmp) # monmean needed here, as otherwise interface does not work
else:
cdo.monmean(options='-f nc', output=outfile, input=tmp) # monmean needed here, as otherwise interface does not work
print 'Outfile: ', outfile
#~ os.remove(tmp)
print 'Temporary name: ', tmp
self.files.update({'jsbach': outfile})
# veg stream
print ' VEG stream ...'
outfile = get_temporary_directory() + self.experiment + '_jsbach_veg_mm_full.nc'
if os.path.exists(outfile):
pass
else:
codetable = self.data_dir + 'log/' + self.experiment + '_jsbach_veg.codes'
tmp = tempfile.mktemp(suffix='.nc', prefix=self.experiment + '_jsbach_veg_', dir=get_temporary_directory()) # temporary file
if len(glob.glob(self._get_filenames_veg_stream())) > 0: # check if input files existing at all
cdo.mergetime(options='-f nc', output=tmp, input=self._get_filenames_veg_stream())
if os.path.exists(codetable):
cdo.monmean(options='-f nc', output=outfile, input='-setpartab,' + codetable + ' ' + tmp) # monmean needed here, as otherwise interface does not work
else:
cdo.monmean(options='-f nc', output=outfile, input=tmp) # monmean needed here, as otherwise interface does not work
os.remove(tmp)
self.files.update({'veg': outfile})
# veg land
print ' LAND stream ...'
outfile = get_temporary_directory() + self.experiment + '_jsbach_land_mm_full.nc'
if os.path.exists(outfile):
pass
else:
codetable = self.data_dir + 'log/' + self.experiment + '_jsbach_land.codes'
tmp = tempfile.mktemp(suffix='.nc', prefix=self.experiment + '_jsbach_land_', dir=get_temporary_directory()) # temporary file
if len(glob.glob(self._get_filenames_land_stream())) > 0: # check if input files existing at all
cdo.mergetime(options='-f nc', output=tmp, input=self._get_filenames_land_stream())
if os.path.exists(codetable):
cdo.monmean(options='-f nc', output=outfile, input='-setpartab,' + codetable + ' ' + tmp) # monmean needed here, as otherwise interface does not work
else:
cdo.monmean(options='-f nc', output=outfile, input=tmp) # monmean needed here, as otherwise interface does not work
os.remove(tmp)
self.files.update({'land': outfile})
# surf stream
print ' SURF stream ...'
outfile = get_temporary_directory() + self.experiment + '_jsbach_surf_mm_full.nc'
if os.path.exists(outfile):
pass
else:
codetable = self.data_dir + 'log/' + self.experiment + '_jsbach_surf.codes'
tmp = tempfile.mktemp(suffix='.nc', prefix=self.experiment + '_jsbach_surf_', dir=get_temporary_directory()) # temporary file
if len(glob.glob(self._get_filenames_surf_stream())) > 0: # check if input files existing at all
print glob.glob(self._get_filenames_surf_stream())
cdo.mergetime(options='-f nc', output=tmp, input=self._get_filenames_surf_stream())
if os.path.exists(codetable):
cdo.monmean(options='-f nc', output=outfile, input='-setpartab,' + codetable + ' ' + tmp) # monmean needed here, as otherwise interface does not work
else:
cdo.monmean(options='-f nc', output=outfile, input=tmp) # monmean needed here, as otherwise interface does not work
os.remove(tmp)
self.files.update({'surf': outfile})
# ECHAM BOT stream
print ' BOT stream ...'
outfile = get_temporary_directory() + self.experiment + '_echam6_echam_mm_full.nc'
if os.path.exists(outfile):
pass
else:
codetable = self.data_dir + 'log/' + self.experiment + '_echam6_echam.codes'
tmp = tempfile.mktemp(suffix='.nc', prefix=self.experiment + '_echam6_echam_', dir=get_temporary_directory()) # temporary file
if len(glob.glob(self._get_filenames_echam_BOT())) > 0: # check if input files existing at all
cdo.mergetime(options='-f nc', output=tmp, input=self._get_filenames_echam_BOT())
if os.path.exists(codetable):
cdo.monmean(options='-f nc', output=outfile, input='-setpartab,' + codetable + ' ' + tmp) # monmean needed here, as otherwise interface does not work
else:
cdo.monmean(options='-f nc', output=outfile, input=tmp) # monmean needed here, as otherwise interface does not work
os.remove(tmp)
self.files.update({'echam': outfile})
# ALBEDO file
# albedo files as preprocessed by a script of Thomas
print ' ALBEDO VIS stream ...'
outfile = get_temporary_directory() + self.experiment + '_jsbach_VIS_albedo_mm_full.nc'
if os.path.exists(outfile):
pass
else:
if len(glob.glob(self._get_filenames_albedo_VIS())) > 0: # check if input files existing at all
cdo.mergetime(options='-f nc', output=outfile, input=self._get_filenames_albedo_VIS())
self.files.update({'albedo_vis': outfile})
print ' ALBEDO NIR stream ...'
outfile = get_temporary_directory() + self.experiment + '_jsbach_NIR_albedo_mm_full.nc'
if os.path.exists(outfile):
pass
else:
if len(glob.glob(self._get_filenames_albedo_NIR())) > 0: # check if input files existing at all
cdo.mergetime(options='-f nc', output=outfile, input=self._get_filenames_albedo_NIR())
self.files.update({'albedo_nir': outfile})
def _get_unique_name(self):
"""
get unique name from model and experiment
@return: string with unique combination of models and experiment
"""
return self.name.replace(' ', '') + '-' + self.experiment.replace(' ', '')
def get_albedo_data(self, interval='season'):
"""
calculate albedo as ratio of upward and downwelling fluxes
first the monthly mean fluxes are used to calculate the albedo,
This routine uses the definitions of the routines how to
read upward and downward fluxes
"""
if self.start_time is None:
raise ValueError('Start time needs to be specified')
if self.stop_time is None:
raise ValueError('Stop time needs to be specified')
#~ tmpdict = copy.deepcopy(kwargs)
#~ print self.dic_vars
routine_up = self.dic_vars['surface_upward_flux']
routine_down = self.dic_vars['sis']
#sw_down = self.get_surface_shortwave_radiation_down(interval=interval, **kwargs)
cmd = 'sw_down = self.' + routine_down
exec(cmd)
#sw_up = self.get_surface_shortwave_radiation_up(interval=interval, **kwargs)
cmd = 'sw_up = self.' + routine_up
exec(cmd)
# climatological mean
alb = sw_up[0].div(sw_down[0])
alb.label = self.experiment + ' albedo'
alb.unit = '-'
# original data
alb_org = sw_up[1][2].div(sw_down[1][2])
alb_org.label = self.experiment + ' albedo'
alb_org.unit = '-'
retval = (alb_org.time, alb_org.fldmean(), alb_org)
return alb, retval
def get_albedo_data_vis(self, interval='season', **kwargs):
"""
This routine retrieves the JSBACH albedo information for VIS
it requires a preprocessing with a script that aggregates from tile
to box values!
Parameters
----------
interval : str
['season','monthly']
"""
#~ tmpdict = copy.deepcopy(self.model_dict['albedo_vis'])
return self.get_jsbach_data_generic(interval=interval, **kwargs)
def get_albedo_data_nir(self, interval='season', **kwargs):
"""
This routine retrieves the JSBACH albedo information for VIS
it requires a preprocessing with a script that aggregates from tile
to box values!
Parameters
----------
interval : str
['season','monthly']
"""
#~ tmpdict = copy.deepcopy(self.model_dict['albedo_nir'])
return self.get_jsbach_data_generic(interval=interval, **kwargs)
def get_surface_shortwave_radiation_up(self, interval='season', **kwargs):
return self.get_jsbach_data_generic(interval=interval, **kwargs)
def get_surface_shortwave_radiation_down(self, interval='season', **kwargs):
return self.get_jsbach_data_generic(interval=interval, **kwargs)
def get_rainfall_data(self, interval='season', **kwargs):
return self.get_jsbach_data_generic(interval=interval, **kwargs)
def get_temperature_2m(self, interval='season', **kwargs):
return self.get_jsbach_data_generic(interval=interval, **kwargs)
def get_jsbach_data_generic(self, interval='season', **kwargs):
"""
unique parameters are:
filename - file basename
variable - name of the variable as the short_name in the netcdf file
kwargs is a dictionary with keys for each model. Then a dictionary with properties follows
"""
if not self.type in kwargs.keys():
print 'WARNING: it is not possible to get data using generic function, as method missing: ', self.type, kwargs.keys()
return None
print self.type
print kwargs
locdict = kwargs[self.type]
# read settings and details from the keyword arguments
# no defaults; everything should be explicitely specified in either the config file or the dictionaries
varname = locdict.pop('variable')
units = locdict.pop('unit', 'Unit not specified')
lat_name = locdict.pop('lat_name', 'lat')
lon_name = locdict.pop('lon_name', 'lon')
#model_suffix = locdict.pop('model_suffix')
#model_prefix = locdict.pop('model_prefix')
file_format = locdict.pop('file_format')
scf = locdict.pop('scale_factor')
valid_mask = locdict.pop('valid_mask')
custom_path = locdict.pop('custom_path', None)
thelevel = locdict.pop('level', None)
target_grid = self._actplot_options['targetgrid']
interpolation = self._actplot_options['interpolation']
if self.type != 'JSBACH_RAW2':
print self.type
raise ValueError('Invalid data format here!')
# define from which stream of JSBACH data needs to be taken for specific variables
if varname in ['swdown_acc', 'swdown_reflect_acc']:
filename1 = self.files['jsbach']
elif varname in ['precip_acc']:
filename1 = self.files['land']
elif varname in ['temp2']:
filename1 = self.files['echam']
elif varname in ['var14']: # albedo vis
filename1 = self.files['albedo_vis']
elif varname in ['var15']: # albedo NIR
filename1 = self.files['albedo_nir']
else:
print varname
raise ValueError('Unknown variable type for JSBACH_RAW2 processing!')
force_calc = False
if self.start_time is None:
raise ValueError('Start time needs to be specified')
if self.stop_time is None:
raise ValueError('Stop time needs to be specified')
#/// PREPROCESSING ///
cdo = Cdo()
s_start_time = str(self.start_time)[0:10]
s_stop_time = str(self.stop_time)[0:10]
#1) select timeperiod and generate monthly mean file
if target_grid == 't63grid':
gridtok = 'T63'
else:
gridtok = 'SPECIAL_GRID'
file_monthly = filename1[:-3] + '_' + s_start_time + '_' + s_stop_time + '_' + gridtok + '_monmean.nc' # target filename
file_monthly = get_temporary_directory() + os.path.basename(file_monthly)
sys.stdout.write('\n *** Model file monthly: %s\n' % file_monthly)
if not os.path.exists(filename1):
print 'WARNING: File not existing: ' + filename1
return None
cdo.monmean(options='-f nc', output=file_monthly, input='-' + interpolation + ',' + target_grid + ' -seldate,' + s_start_time + ',' + s_stop_time + ' ' + filename1, force=force_calc)
sys.stdout.write('\n *** Reading model data... \n')
sys.stdout.write(' Interval: ' + interval + '\n')
#2) calculate monthly or seasonal climatology
if interval == 'monthly':
mdata_clim_file = file_monthly[:-3] + '_ymonmean.nc'
mdata_sum_file = file_monthly[:-3] + '_ymonsum.nc'
mdata_N_file = file_monthly[:-3] + '_ymonN.nc'
mdata_clim_std_file = file_monthly[:-3] + '_ymonstd.nc'
cdo.ymonmean(options='-f nc -b 32', output=mdata_clim_file, input=file_monthly, force=force_calc)
cdo.ymonsum(options='-f nc -b 32', output=mdata_sum_file, input=file_monthly, force=force_calc)
cdo.ymonstd(options='-f nc -b 32', output=mdata_clim_std_file, input=file_monthly, force=force_calc)
cdo.div(options='-f nc', output=mdata_N_file, input=mdata_sum_file + ' ' + mdata_clim_file, force=force_calc) # number of samples
elif interval == 'season':
mdata_clim_file = file_monthly[:-3] + '_yseasmean.nc'
mdata_sum_file = file_monthly[:-3] + '_yseassum.nc'
mdata_N_file = file_monthly[:-3] + '_yseasN.nc'
mdata_clim_std_file = file_monthly[:-3] + '_yseasstd.nc'
cdo.yseasmean(options='-f nc -b 32', output=mdata_clim_file, input=file_monthly, force=force_calc)
cdo.yseassum(options='-f nc -b 32', output=mdata_sum_file, input=file_monthly, force=force_calc)
cdo.yseasstd(options='-f nc -b 32', output=mdata_clim_std_file, input=file_monthly, force=force_calc)
cdo.div(options='-f nc -b 32', output=mdata_N_file, input=mdata_sum_file + ' ' + mdata_clim_file, force=force_calc) # number of samples
else:
raise ValueError('Unknown temporal interval. Can not perform preprocessing! ')
if not os.path.exists(mdata_clim_file):
return None
#3) read data
if interval == 'monthly':
thetime_cylce = 12
elif interval == 'season':
thetime_cylce = 4
else:
print interval
raise ValueError('Unsupported interval!')
mdata = Data(mdata_clim_file, varname, read=True, label=self.model, unit=units, lat_name=lat_name, lon_name=lon_name, shift_lon=False, scale_factor=scf, level=thelevel, time_cycle=thetime_cylce)
mdata_std = Data(mdata_clim_std_file, varname, read=True, label=self.model + ' std', unit='-', lat_name=lat_name, lon_name=lon_name, shift_lon=False, level=thelevel, time_cycle=thetime_cylce)
mdata.std = mdata_std.data.copy()
del mdata_std
mdata_N = Data(mdata_N_file, varname, read=True, label=self.model + ' std', unit='-', lat_name=lat_name, lon_name=lon_name, shift_lon=False, scale_factor=scf, level=thelevel)
mdata.n = mdata_N.data.copy()
del mdata_N
#ensure that climatology always starts with J anuary, therefore set date and then sort
mdata.adjust_time(year=1700, day=15) # set arbitrary time for climatology
mdata.timsort()
#4) read monthly data
mdata_all = Data(file_monthly, varname, read=True, label=self.model, unit=units, lat_name=lat_name, lon_name=lon_name, shift_lon=False, time_cycle=12, scale_factor=scf, level=thelevel)
mdata_all.adjust_time(day=15)
if target_grid == 't63grid':
mdata._apply_mask(get_T63_landseamask(False, area=valid_mask))
mdata_all._apply_mask(get_T63_landseamask(False, area=valid_mask))
else:
tmpmsk = get_generic_landseamask(False, area=valid_mask, target_grid=target_grid)
mdata._apply_mask(tmpmsk)
mdata_all._apply_mask(tmpmsk)
del tmpmsk
mdata_mean = mdata_all.fldmean()
# return data as a tuple list
retval = (mdata_all.time, mdata_mean, mdata_all)
del mdata_all
return mdata, retval
class JSBACH_SPECIAL(JSBACH_RAW2):
"""
special class for more flexible reading of JSBACH input data
it allows to specify the input format and the directory of the input data
in case that you use a different setup, it is probably easiest to
just copy this class and make the required adaptations.
"""
def __init__(self, filename, dic_variables, experiment, name='', shift_lon=False, model_dict=None, input_format='nc', raw_outdata='', **kwargs):
super(JSBACH_SPECIAL, self).__init__(filename, dic_variables, experiment, name=name, shift_lon=shift_lon, model_dict=model_dict, input_format=input_format, raw_outdata=raw_outdata, **kwargs)
class xxxxxxxxJSBACH_RAW(Model):
"""
Class for RAW JSBACH model output
works on manually preprocessed already concatenated data
"""
def __init__(self, filename, dic_variables, experiment, name='', shift_lon=False, intervals='monthly', **kwargs):
super(JSBACH_RAW, self).__init__(filename, dic_variables, name=name, intervals=intervals, **kwargs)
print('WARNING: This model class should be depreciated as it contained a lot of hardcoded dependencies and is only intermediate')
#TODO: depreciate this class
stop
self.experiment = experiment
self.shift_lon = shift_lon
self.type = 'JSBACH_RAW'
self._unique_name = self._get_unique_name()
def _get_unique_name(self):
"""
get unique name from model and experiment
"""
return self.name.replace(' ', '') + '-' + self.experiment.replace(' ', '')
def get_temperature_2m(self, interval='monthly', **kwargs):
"""
get surface temperature (2m) from JSBACH model results
Parameters
----------
interval : str
specifies the aggregation interval. Possible options: ['season','monthly']
"""
locdict = kwargs[self.type]
y1 = '1980-01-01' # TODO move this to the JSON dictionary or some parameter file
y2 = '2010-12-31'
variable = 'temp2'
rawfile = self.data_dir + self.experiment + '_echam6_echam_' + variable + '_ALL.nc'
files = glob.glob(rawfile)
if len(files) != 1:
print 'Inputfiles: ', files
raise ValueError('Something went wrong: Invalid number of input files!')
else:
rawfile = files[0]
mdata, retval = self._do_preprocessing(rawfile, variable, y1, y2, interval=interval, valid_mask=locdict['valid_mask'])
return mdata, retval
def get_albedo_data(self, interval='monthly', **kwargs):
"""
calculate albedo as ratio of upward and downwelling fluxes
first the monthly mean fluxes are used to calculate the albedo,
"""
# read land-sea mask
ls_mask = get_T63_landseamask(self.shift_lon) # TODO make this more flexible
if self.start_time is None:
raise ValueError('Start time needs to be specified')
if self.stop_time is None:
raise ValueError('Stop time needs to be specified')
Fd = self.get_surface_shortwave_radiation_down(**kwargs)
Fu = self.get_surface_shortwave_radiation_up(**kwargs)
if Fu is None:
print 'File not existing for UPWARD flux!: ', self.name
return None
else:
Fu_i = Fu[0]
if Fu_i is None:
return None
if Fd is None:
print 'File not existing for DOWNWARD flux!: ', self.name
return None
else:
Fd_i = Fd[0]
if Fd_i is None:
return None
lab = Fu_i.label
# albedo for chosen interval as caluclated as ratio of means of fluxes in that interval (e.g. season, months)
Fu_i.div(Fd_i, copy=False)
del Fd_i # Fu contains now the albedo
Fu_i._apply_mask(ls_mask.data)
#albedo for monthly data (needed for global mean plots )
Fu_m = Fu[1][2]
del Fu
Fd_m = Fd[1][2]
del Fd
Fu_m.div(Fd_m, copy=False)
del Fd_m
Fu_m._apply_mask(ls_mask.data)
Fu_m._set_valid_range(0., 1.)
Fu_m.label = lab + ' albedo'
Fu_i.label = lab + ' albedo'
Fu_m.unit = '-'
Fu_i.unit = '-'
# center dates of months
Fu_m.adjust_time(day=15)
Fu_i.adjust_time(day=15)
# return data as a tuple list
retval = (Fu_m.time, Fu_m.fldmean(), Fu_m)
return Fu_i, retval
#-----------------------------------------------------------------------
def _do_preprocessing(self, rawfile, varname, s_start_time, s_stop_time, interval='monthly', force_calc=False, valid_mask='global', target_grid='t63grid'):
"""
perform preprocessing
* selection of variable
* temporal subsetting
"""
cdo = Cdo()
if not os.path.exists(rawfile):
print('File not existing! %s ' % rawfile)
return None, None
# calculate monthly means
file_monthly = get_temporary_directory() + os.sep + os.path.basename(rawfile[:-3]) + '_' + varname + '_' + s_start_time + '_' + s_stop_time + '_mm.nc'
if (force_calc) or (not os.path.exists(file_monthly)):
cdo.monmean(options='-f nc', output=file_monthly, input='-seldate,' + s_start_time + ',' + s_stop_time + ' ' + '-selvar,' + varname + ' ' + rawfile, force=force_calc)
else:
pass
if not os.path.exists(file_monthly):
raise ValueError('Monthly preprocessing did not work! %s ' % file_monthly)
# calculate monthly or seasonal climatology
if interval == 'monthly':
mdata_clim_file = file_monthly[:-3] + '_ymonmean.nc'
mdata_sum_file = file_monthly[:-3] + '_ymonsum.nc'
mdata_N_file = file_monthly[:-3] + '_ymonN.nc'
mdata_clim_std_file = file_monthly[:-3] + '_ymonstd.nc'
cdo.ymonmean(options='-f nc -b 32', output=mdata_clim_file, input=file_monthly, force=force_calc)
cdo.ymonsum(options='-f nc -b 32', output=mdata_sum_file, input=file_monthly, force=force_calc)
cdo.ymonstd(options='-f nc -b 32', output=mdata_clim_std_file, input=file_monthly, force=force_calc)
cdo.div(options='-f nc', output=mdata_N_file, input=mdata_sum_file + ' ' + mdata_clim_file, force=force_calc) # number of samples
elif interval == 'season':
mdata_clim_file = file_monthly[:-3] + '_yseasmean.nc'
mdata_sum_file = file_monthly[:-3] + '_yseassum.nc'
mdata_N_file = file_monthly[:-3] + '_yseasN.nc'
mdata_clim_std_file = file_monthly[:-3] + '_yseasstd.nc'
cdo.yseasmean(options='-f nc -b 32', output=mdata_clim_file, input=file_monthly, force=force_calc)
cdo.yseassum(options='-f nc -b 32', output=mdata_sum_file, input=file_monthly, force=force_calc)
cdo.yseasstd(options='-f nc -b 32', output=mdata_clim_std_file, input=file_monthly, force=force_calc)
cdo.div(options='-f nc -b 32', output=mdata_N_file, input=mdata_sum_file + ' ' + mdata_clim_file, force=force_calc) # number of samples
else:
raise ValueError('Unknown temporal interval. Can not perform preprocessing!')
if not os.path.exists(mdata_clim_file):
return None
# read data
if interval == 'monthly':
thetime_cylce = 12
elif interval == 'season':
thetime_cylce = 4
else:
print interval
raise ValueError('Unsupported interval!')
mdata = Data(mdata_clim_file, varname, read=True, label=self.name, shift_lon=False, time_cycle=thetime_cylce, lat_name='lat', lon_name='lon')
mdata_std = Data(mdata_clim_std_file, varname, read=True, label=self.name + ' std', unit='-', shift_lon=False, time_cycle=thetime_cylce, lat_name='lat', lon_name='lon')
mdata.std = mdata_std.data.copy()
del mdata_std
mdata_N = Data(mdata_N_file, varname, read=True, label=self.name + ' std', shift_lon=False, lat_name='lat', lon_name='lon')
mdata.n = mdata_N.data.copy()
del mdata_N
# ensure that climatology always starts with January, therefore set date and then sort
mdata.adjust_time(year=1700, day=15) # set arbitrary time for climatology
mdata.timsort()
#4) read monthly data
mdata_all = Data(file_monthly, varname, read=True, label=self.name, shift_lon=False, time_cycle=12, lat_name='lat', lon_name='lon')
mdata_all.adjust_time(day=15)
#mask_antarctica masks everything below 60 degree S.
#here we only mask Antarctica, if only LAND points shall be used
if valid_mask == 'land':
mask_antarctica = True
elif valid_mask == 'ocean':
mask_antarctica = False
else:
mask_antarctica = False
if target_grid == 't63grid':
mdata._apply_mask(get_T63_landseamask(False, area=valid_mask, mask_antarctica=mask_antarctica))
mdata_all._apply_mask(get_T63_landseamask(False, area=valid_mask, mask_antarctica=mask_antarctica))
else:
tmpmsk = get_generic_landseamask(False, area=valid_mask, target_grid=target_grid, mask_antarctica=mask_antarctica)
mdata._apply_mask(tmpmsk)
mdata_all._apply_mask(tmpmsk)
del tmpmsk
mdata_mean = mdata_all.fldmean()
# return data as a tuple list
retval = (mdata_all.time, mdata_mean, mdata_all)
del mdata_all
return mdata, retval
def get_surface_shortwave_radiation_down(self, interval='monthly', **kwargs):
"""
get surface shortwave incoming radiation data for JSBACH
Parameters
----------
interval : str
specifies the aggregation interval. Possible options: ['season','monthly']
"""
locdict = kwargs[self.type]
y1 = '1980-01-01' # TODO move this to the JSON dictionary or some parameter file
y2 = '2010-12-31'
rawfile = self.data_dir + self.experiment + '_jsbach_' + y1[0: 4] + '_' + y2[0: 4] + '.nc'
mdata, retval = self._do_preprocessing(rawfile, 'swdown_acc', y1, y2, interval=interval, valid_mask=locdict['valid_mask'])
return mdata, retval
#-----------------------------------------------------------------------
def get_surface_shortwave_radiation_up(self, interval='monthly', **kwargs):
"""
get surface shortwave upward radiation data for JSBACH
Parameters
----------
interval : str
specifies the aggregation interval. Possible options: ['season','monthly']
"""
locdict = kwargs[self.type]
y1 = '1980-01-01' # TODO: move this to the JSON dictionary or some parameter file
y2 = '2010-12-31'
rawfile = self.data_dir + self.experiment + '_jsbach_' + y1[0: 4] + '_' + y2[0: 4] + '.nc'
mdata, retval = self._do_preprocessing(rawfile, 'swdown_reflect_acc', y1, y2, interval=interval, valid_mask=locdict['valid_mask'])
return mdata, retval
#-----------------------------------------------------------------------
def get_model_data_generic(self, interval='monthly', **kwargs):
"""
This is only a wrapper to redirect to individual functions
for the JSBACH_RAW class
Currently only the usage for rainfall is supported!
"""
# HACK: only a wrapper, should be depreciated
raise ValueError('Rainfall analysis not working yet!')
self.get_rainfall_data(interval=interval, **kwargs)
def get_rainfall_data(self, interval='monthly', **kwargs):
"""
get surface rainfall data for JSBACH
uses already preprocessed data where the convective and
advective rainfall has been merged
Parameters
----------
interval : str
specifies the aggregation interval. Possible options: ['season','monthly']
"""
locdict = kwargs[self.type]
y1 = '1980-01-01' # TODO : move this to the JSON dictionary or some parameter file
y2 = '2010-12-31'
variable = 'aprc'
rawfile = self.data_dir + self.experiment + '_echam6_echam_*_precipitation.nc'
files = glob.glob(rawfile)
if len(files) != 1:
print 'Inputfiles: ', files
raise ValueError('Something went wrong: Invalid number of input files!')
else:
rawfile = files[0]
mdata, retval = self._do_preprocessing(rawfile, variable, y1, y2, interval=interval, valid_mask=locdict['valid_mask'])
return mdata, retval
#-----------------------------------------------------------------------
def get_gpp_data(self, interval='season'):
"""
get surface GPP data for JSBACH
todo temporal aggregation of data --> or leave it to the user!
"""
cdo = Cdo()
v = 'var167'
y1 = str(self.start_time)[0:10]
y2 = str(self.stop_time)[0:10]
rawfilename = self.data_dir + 'data/model/' + self.experiment + '_' + y1[0:4] + '-' + y2[0:4] + '.nc'
times_in_file = int(''.join(cdo.ntime(input=rawfilename)))
if interval == 'season':
if times_in_file != 4:
tmp_file = get_temporary_directory() + os.path.basename(rawfilename)
cdo.yseasmean(options='-f nc -b 32 -r ', input='-selvar,' + v + ' ' + rawfilename, output=tmp_file[:-3] + '_yseasmean.nc')
rawfilename = tmp_file[:-3] + '_yseasmean.nc'
if interval == 'monthly':
if times_in_file != 12:
tmp_file = get_temporary_directory() + os.path.basename(rawfilename)
cdo.ymonmean(options='-f nc -b 32 -r ', input='-selvar,' + v + ' ' + rawfilename, output=tmp_file[:-3] + '_ymonmean.nc')
rawfilename = tmp_file[:-3] + '_ymonmean.nc'
if not os.path.exists(rawfilename):
return None
filename = rawfilename
#--- read land-sea mask
ls_mask = get_T63_landseamask(self.shift_lon)
#--- read SW up data
gpp = Data4D(filename, v, read=True,
label=self.experiment + ' ' + v, unit='gC m-2 a-1', lat_name='lat', lon_name='lon',
shift_lon=self.shift_lon,
mask=ls_mask.data.data, scale_factor=3600. * 24. * 30. / 0.083
)
return gpp.sum_data4D()
#-----------------------------------------------------------------------
| pygeo/pycmbs | pycmbs/benchmarking/models/mpi_esm.py | Python | mit | 41,720 |
var helpers = require('./');
/** Boids is a lot like Castle in the way it treats elements. But it is more
predictable, and retains attribute information.
1. The root node is collapsed, but its name is preserved in document['*']
2. Attributes are stored in a Object<String -> String> mapping, called "$".
* XML elements that are named '$' will conflict with the way boids represents
attributes.
3. in between elements is discarded.
4. Namespacing is taken literally.
5. Relative order between siblings of different names (or text nodes) is
disregarded, though order between siblings of the same name will be
preserved.
*/
function convertNode(node) {
/** Convert a single node into a javascript object. */
var obj = {$: {}};
// save all attributes to the '$' field
node.attrs().forEach(function(attr) {
obj.$[helpers.fullName.apply(attr)] = attr.value();
});
var texts = [];
node.childNodes().forEach(function(node) {
var type = node.type();
if (type == 'element') {
var tag = helpers.fullName.apply(node);
if (obj[tag] === undefined) {
obj[tag] = [];
}
obj[tag].push(convertNode(node));
}
else {
texts.push(node.text());
}
});
// finally concatenate all the text nodes together and save them as the '_' field
obj._ = helpers.parseString(texts.join('').trim());
return obj;
}
function boids(xml, opts) {
var root = helpers.parseXML(xml);
var obj = convertNode(root);
// special root handling:
obj['*'] = root.name();
return obj;
}
module.exports = boids;
| chbrown/xmlconv | lib/boids.js | JavaScript | mit | 1,571 |
let express = require('express');
let bodyParser = require('body-parser');
let path = require('path');
module.exports = function() {
let app = express();
app.use(bodyParser.urlencoded({
extended: true
}));
app.use(bodyParser.json());
app.use('/gallery', express.static('public'));
app.get('/gallery', function(req,res) {
res.sendFile(path.join(__dirname + '/../index.html'));
});
return app;
};
| owlsketch/the-gallery | config/express.js | JavaScript | mit | 413 |
import { module, test } from 'qunit';
module('Unit | Utility | fd common primitives');
// Replace this with your real tests.
test('it works', function(assert) {
assert.ok(true);
});
| Flexberry/ember-flexberry-designer | tests/unit/utils/fd-common-primitives-test.js | JavaScript | mit | 186 |
using System.Reflection;
[assembly: AssemblyVersion("3.1.1")]
[assembly: AssemblyFileVersion("3.1.1")]
[assembly: AssemblyProduct("CSharp GeckoBoard Push")]
[assembly: AssemblyCompany("Mattias Nordqvist")]
[assembly: AssemblyCopyright("Copyright © Mattias Nordqvist")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")] | mattiasnordqvist/CSharp-GeckoBoard-Push | ProductAssemblyInfo.cs | C# | mit | 335 |
using Avalonia.Controls.Primitives;
using Avalonia.Input;
using Avalonia.Platform;
using Avalonia.UnitTests;
using Moq;
using Xunit;
namespace Avalonia.Controls.UnitTests
{
public class GridSplitterTests
{
public GridSplitterTests()
{
var cursorFactoryImpl = new Mock<IStandardCursorFactory>();
AvaloniaLocator.CurrentMutable.Bind<IStandardCursorFactory>().ToConstant(cursorFactoryImpl.Object);
}
[Fact]
public void Detects_Horizontal_Orientation()
{
var grid = new Grid()
{
RowDefinitions = new RowDefinitions("*,Auto,*"),
ColumnDefinitions = new ColumnDefinitions("*,*"),
Children = new Controls()
{
new Border { [Grid.RowProperty] = 0 },
new GridSplitter { [Grid.RowProperty] = 1, Name = "splitter" },
new Border { [Grid.RowProperty] = 2 }
}
};
var root = new TestRoot { Child = grid };
root.Measure(new Size(100, 300));
root.Arrange(new Rect(0, 0, 100, 300));
Assert.Contains(grid.FindControl<GridSplitter>("splitter").Classes, ":horizontal".Equals);
}
[Fact]
public void Detects_Vertical_Orientation()
{
var grid = new Grid()
{
ColumnDefinitions = new ColumnDefinitions("*,Auto,*"),
RowDefinitions = new RowDefinitions("*,*"),
Children = new Controls()
{
new Border { [Grid.ColumnProperty] = 0 },
new GridSplitter { [Grid.ColumnProperty] = 1, Name = "splitter" },
new Border { [Grid.ColumnProperty] = 2 },
}
};
var root = new TestRoot { Child = grid };
root.Measure(new Size(100, 300));
root.Arrange(new Rect(0, 0, 100, 300));
Assert.Contains(grid.FindControl<GridSplitter>("splitter").Classes, ":vertical".Equals);
}
[Fact]
public void Detects_With_Both_Auto()
{
var grid = new Grid()
{
ColumnDefinitions = new ColumnDefinitions("Auto,Auto,Auto"),
RowDefinitions = new RowDefinitions("Auto,Auto"),
Children = new Controls()
{
new Border { [Grid.ColumnProperty] = 0 },
new GridSplitter { [Grid.ColumnProperty] = 1, Name = "splitter" },
new Border { [Grid.ColumnProperty] = 2 },
}
};
var root = new TestRoot { Child = grid };
root.Measure(new Size(100, 300));
root.Arrange(new Rect(0, 0, 100, 300));
Assert.Contains(grid.FindControl<GridSplitter>("splitter").Classes, ":vertical".Equals);
}
[Fact]
public void Horizontal_Stays_Within_Constraints()
{
var control1 = new Border { [Grid.RowProperty] = 0 };
var splitter = new GridSplitter
{
[Grid.RowProperty] = 1,
};
var control2 = new Border { [Grid.RowProperty] = 2 };
var rowDefinitions = new RowDefinitions()
{
new RowDefinition(1, GridUnitType.Star) { MinHeight = 70, MaxHeight = 110 },
new RowDefinition(GridLength.Auto),
new RowDefinition(1, GridUnitType.Star) { MinHeight = 10, MaxHeight = 140 },
};
var grid = new Grid()
{
RowDefinitions = rowDefinitions,
Children = new Controls()
{
control1, splitter, control2
}
};
var root = new TestRoot { Child = grid };
root.Measure(new Size(100, 200));
root.Arrange(new Rect(0, 0, 100, 200));
splitter.RaiseEvent(new VectorEventArgs
{
RoutedEvent = Thumb.DragDeltaEvent,
Vector = new Vector(0, -100)
});
Assert.Equal(rowDefinitions[0].Height, new GridLength(70, GridUnitType.Star));
Assert.Equal(rowDefinitions[2].Height, new GridLength(130, GridUnitType.Star));
splitter.RaiseEvent(new VectorEventArgs
{
RoutedEvent = Thumb.DragDeltaEvent,
Vector = new Vector(0, 100)
});
Assert.Equal(rowDefinitions[0].Height, new GridLength(110, GridUnitType.Star));
Assert.Equal(rowDefinitions[2].Height, new GridLength(90, GridUnitType.Star));
}
[Fact]
public void In_First_Position_Doesnt_Throw_Exception()
{
var grid = new Grid()
{
ColumnDefinitions = new ColumnDefinitions("Auto,*,*"),
RowDefinitions = new RowDefinitions("*,*"),
Children = new Controls()
{
new GridSplitter { [Grid.ColumnProperty] = 0, Name = "splitter" },
new Border { [Grid.ColumnProperty] = 1 },
new Border { [Grid.ColumnProperty] = 2 },
}
};
var root = new TestRoot { Child = grid };
root.Measure(new Size(100, 300));
root.Arrange(new Rect(0, 0, 100, 300));
var splitter = grid.FindControl<GridSplitter>("splitter");
splitter.RaiseEvent(new VectorEventArgs
{
RoutedEvent = Thumb.DragDeltaEvent,
Vector = new Vector(100, 1000)
});
}
[Fact]
public void Vertical_Stays_Within_Constraints()
{
var control1 = new Border { [Grid.ColumnProperty] = 0 };
var splitter = new GridSplitter
{
[Grid.ColumnProperty] = 1,
};
var control2 = new Border { [Grid.ColumnProperty] = 2 };
var columnDefinitions = new ColumnDefinitions()
{
new ColumnDefinition(1, GridUnitType.Star) { MinWidth = 10, MaxWidth = 190 },
new ColumnDefinition(GridLength.Auto),
new ColumnDefinition(1, GridUnitType.Star) { MinWidth = 80, MaxWidth = 120 },
};
var grid = new Grid()
{
ColumnDefinitions = columnDefinitions,
Children = new Controls()
{
control1, splitter, control2
}
};
var root = new TestRoot { Child = grid };
root.Measure(new Size(200, 100));
root.Arrange(new Rect(0, 0, 200, 100));
splitter.RaiseEvent(new VectorEventArgs
{
RoutedEvent = Thumb.DragDeltaEvent,
Vector = new Vector(-100, 0)
});
Assert.Equal(columnDefinitions[0].Width, new GridLength(80, GridUnitType.Star));
Assert.Equal(columnDefinitions[2].Width, new GridLength(120, GridUnitType.Star));
splitter.RaiseEvent(new VectorEventArgs
{
RoutedEvent = Thumb.DragDeltaEvent,
Vector = new Vector(100, 0)
});
Assert.Equal(columnDefinitions[0].Width, new GridLength(120, GridUnitType.Star));
Assert.Equal(columnDefinitions[2].Width, new GridLength(80, GridUnitType.Star));
}
}
} | jazzay/Perspex | tests/Avalonia.Controls.UnitTests/GridSplitterTests.cs | C# | mit | 9,106 |
"use strict";
var __extends = (this && this.__extends) || function (d, b) {
for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p];
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
var __metadata = (this && this.__metadata) || function (k, v) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
};
var React = require('react');
var _1 = require("../../../");
var bemBlock = require('bem-cn');
var size = require('lodash/size');
var toArray = require('lodash/toArray');
var map = require('lodash/map');
var FilterGroupItem = (function (_super) {
__extends(FilterGroupItem, _super);
function FilterGroupItem(props) {
_super.call(this, props);
this.removeFilter = this.removeFilter.bind(this);
}
FilterGroupItem.prototype.removeFilter = function () {
var _a = this.props, removeFilter = _a.removeFilter, filter = _a.filter;
if (removeFilter) {
removeFilter(filter);
}
};
FilterGroupItem.prototype.render = function () {
var _a = this.props, bemBlocks = _a.bemBlocks, label = _a.label, itemKey = _a.itemKey;
return (React.createElement(_1.FastClick, {handler: this.removeFilter}, React.createElement("div", {className: bemBlocks.items("value"), "data-key": itemKey}, label)));
};
FilterGroupItem = __decorate([
_1.PureRender,
__metadata('design:paramtypes', [Object])
], FilterGroupItem);
return FilterGroupItem;
}(React.Component));
exports.FilterGroupItem = FilterGroupItem;
var FilterGroup = (function (_super) {
__extends(FilterGroup, _super);
function FilterGroup(props) {
_super.call(this, props);
this.removeFilters = this.removeFilters.bind(this);
}
FilterGroup.prototype.removeFilters = function () {
var _a = this.props, removeFilters = _a.removeFilters, filters = _a.filters;
if (removeFilters) {
removeFilters(filters);
}
};
FilterGroup.prototype.render = function () {
var _this = this;
var _a = this.props, mod = _a.mod, className = _a.className, title = _a.title, filters = _a.filters, removeFilters = _a.removeFilters, removeFilter = _a.removeFilter;
var bemBlocks = {
container: bemBlock(mod),
items: bemBlock(mod + "-items")
};
return (React.createElement("div", {key: title, className: bemBlocks.container().mix(className)}, React.createElement("div", {className: bemBlocks.items()}, React.createElement("div", {className: bemBlocks.items("title")}, title), React.createElement("div", {className: bemBlocks.items("list")}, map(filters, function (filter) { return _this.renderFilter(filter, bemBlocks); }))), this.renderRemove(bemBlocks)));
};
FilterGroup.prototype.renderFilter = function (filter, bemBlocks) {
var _a = this.props, translate = _a.translate, removeFilter = _a.removeFilter;
return (React.createElement(FilterGroupItem, {key: filter.value, itemKey: filter.value, bemBlocks: bemBlocks, filter: filter, label: translate(filter.value), removeFilter: removeFilter}));
};
FilterGroup.prototype.renderRemove = function (bemBlocks) {
if (!this.props.removeFilters)
return null;
return (React.createElement(_1.FastClick, {handler: this.removeFilters}, React.createElement("div", {className: bemBlocks.container("remove-action"), onClick: this.removeFilters}, "X")));
};
FilterGroup.defaultProps = {
mod: "sk-filter-group",
translate: function (str) { return str; }
};
return FilterGroup;
}(React.Component));
exports.FilterGroup = FilterGroup;
//# sourceMappingURL=FilterGroup.js.map | viktorkh/elastickit_express | node_modules/searchkit/lib/src/components/ui/filter-group/FilterGroup.js | JavaScript | mit | 4,385 |
"use strict";
module ServiceRegister
{
export class ServiceService
{
public static $inject = ["$http", "apiBaseUrl"];
constructor(private $http: angular.IHttpService, private apiBaseUrl: string)
{
}
public addService(organizationId: string, service: Service): angular.IPromise<string>
{
return this.$http.post(this.apiBaseUrl + "serviceregister/organizations/" + organizationId + "/services", service)
.then((response: angular.IHttpPromiseCallbackArg<string>): string =>
{
return response.data;
});
}
public getServices(organizationId: string): angular.IPromise<Array<ServiceListItem>>
{
return this.$http.get(this.apiBaseUrl + "serviceregister/organizations/" + organizationId + "/services")
.then((response: angular.IHttpPromiseCallbackArg<any>): Array<ServiceListItem> =>
{
return ServiceListItemMapper.map(response.data);
});
}
public getService(organizationId: string, serviceId: string): angular.IPromise<Service>
{
return this.$http.get(this.apiBaseUrl + "serviceregister/organizations/" + organizationId + "/services/" + serviceId)
.then((response: angular.IHttpPromiseCallbackArg<any>): Service =>
{
return ServiceMapper.map(response.data);
});
}
public setServiceBasicInformation(organizationId: string, service: Service): angular.IPromise<void>
{
return this.$http.put(this.apiBaseUrl + "serviceregister/organizations/" + organizationId + "/services/" + service.id + "/basicinformation", service)
.then((): void =>
{
});
}
public setServiceClassification(organizationId: string, service: Service): angular.IPromise<void>
{
return this.$http.put(this.apiBaseUrl + "serviceregister/organizations/" + organizationId + "/services/" + service.id + "/classification",
new ServiceClassificationCommand(service.serviceClasses, service.ontologyTerms.map((term: Class) => term.id), service.targetGroups, service.lifeEvents, service.keywords))
.then((): void =>
{
});
}
}
} | City-of-Helsinki/palvelutietovaranto | Source/AngularApplication/App/Services/ServiceService.ts | TypeScript | mit | 2,423 |
package org.archeo4j.core.analyzer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import javassist.ByteArrayClassPath;
import javassist.CannotCompileException;
import javassist.ClassPool;
import javassist.CtClass;
import javassist.CtMethod;
import javassist.NotFoundException;
import javassist.bytecode.AnnotationsAttribute;
import javassist.bytecode.annotation.Annotation;
import javassist.expr.ExprEditor;
import javassist.expr.MethodCall;
import org.archeo4j.core.model.AnalyzedAnnotation;
import org.archeo4j.core.model.AnalyzedClass;
import org.archeo4j.core.model.AnalyzedMethod;
public class ClassAnalyzer {
private AnalyzisConfig analyzisConfig;
public ClassAnalyzer(AnalyzisConfig analyzisConfig) {
this.analyzisConfig = analyzisConfig;
}
public AnalyzedClass analyzeCallsForClass(String className, byte[] classBytes, String location) {
if (!analyzisConfig.classFilter().test(className))
return null;
AnalyzedClass analyzedClass = new AnalyzedClass(className);
ClassPool cp = new ClassPool();
CtClass ctClass = parseClassByteCode(className, classBytes, cp);
analyzeClassAnnotations(analyzedClass, ctClass);
analyzeInterfaces(analyzedClass, ctClass);
try {
CtMethod[] methods = ctClass.getDeclaredMethods();
for (CtMethod ctMethod : methods) {
AnalyzedMethod method = analyzeMethodCalls(ctMethod);
analyzeMethodAnnotations(method, ctMethod);
analyzedClass.addAnalyzedMethod(method);
}
} catch (RuntimeException e) {
System.out.println("WARN !! failed to analyze " + className + " " + e.getMessage());
}
return analyzedClass;
}
private void analyzeInterfaces(AnalyzedClass analyzedClass, CtClass ctClass) {
analyzedClass.setInterfaceNames(Arrays.asList(ctClass.getClassFile2().getInterfaces()));
analyzedClass.setSuperClassName(ctClass.getClassFile2().getSuperclass());
}
private void analyzeClassAnnotations(AnalyzedClass analyzedClass, CtClass ctClass) {
List<AnalyzedAnnotation> annotations = new ArrayList<>();
for (Object o : ctClass.getClassFile2().getAttributes()) {
if (o instanceof AnnotationsAttribute) {
AnnotationsAttribute attribute = (AnnotationsAttribute) o;
for (Annotation analyzedAnnotation : attribute.getAnnotations()) {
annotations.add(new AnalyzedAnnotation(analyzedAnnotation.toString()));
}
}
}
analyzedClass.setAnnotations(annotations);
}
private void analyzeMethodAnnotations(AnalyzedMethod method, CtMethod ctMethod) {
List<AnalyzedAnnotation> annotations = new ArrayList<>();
for (Object o : ctMethod.getMethodInfo().getAttributes()) {
if (o instanceof AnnotationsAttribute) {
AnnotationsAttribute attribute = (AnnotationsAttribute) o;
annotations.add(new AnalyzedAnnotation(attribute.toString()));
}
}
method.setAnnotations(annotations);
}
private CtClass parseClassByteCode(String className, byte[] classBytes, ClassPool cp) {
cp.appendClassPath(new ByteArrayClassPath(className, classBytes));
CtClass ctClass;
try {
ctClass = cp.get(className);
} catch (NotFoundException e1) {
throw new RuntimeException(e1);
}
return ctClass;
}
private AnalyzedMethod analyzeMethodCalls(CtMethod ctMethod) {
final List<AnalyzedMethod> methodsCalled = new ArrayList<AnalyzedMethod>();
try {
ctMethod.instrument(new ExprEditor() {
@Override
public void edit(MethodCall m) throws CannotCompileException {
if (analyzisConfig.classFilter().test(m.getClassName())) {
methodsCalled.add(asAnalyzedMethod(m));
}
}
});
} catch (CannotCompileException e) {
throw new RuntimeException(e);
}
AnalyzedMethod method = asAnalyzedMethod(ctMethod);
method.setCalledMethods(methodsCalled);
return method;
}
private static AnalyzedMethod asAnalyzedMethod(CtMethod ctMethod) {
String params =
(ctMethod.getLongName().replace(ctMethod.getDeclaringClass().getName(), "").replace("."
+ ctMethod.getName(), ""));
return new AnalyzedMethod(ctMethod.getDeclaringClass().getName(), ctMethod.getName(),
ctMethod.getGenericSignature() != null ? ctMethod.getGenericSignature()
: ctMethod.getSignature(), params, ctMethod.getModifiers());
}
private static AnalyzedMethod asAnalyzedMethod(MethodCall m) {
return new AnalyzedMethod(m.getClassName(), m.getMethodName(), m.getSignature());
}
}
| mestachs/archeo4j | archeo4j-core/src/main/java/org/archeo4j/core/analyzer/ClassAnalyzer.java | Java | mit | 4,707 |
<?php
/**
* The following variables are available in this template:
* - $this: the BootstrapCode object
*/
?>
<?php echo "<?php\n"; ?>
/* @var $this <?php echo $this->getControllerClass(); ?> */
/* @var $model <?php echo $this->getModelClass(); ?> */
<?php echo "?>\n"; ?>
<?php
echo "<?php\n";
$label = $this->pluralize($this->class2name($this->modelClass));
echo "\$this->breadcrumbs=array(
'$label'=>array('index'),
'Create',
);\n";
?>
$this->menu=array(
array('icon' => 'glyphicon glyphicon-home','label'=>'Manage <?php echo $this->modelClass; ?>', 'url'=>array('admin')),
);
?>
<?php echo "<?php echo BSHtml::pageHeader('Create','$this->modelClass') ?>\n"; ?>
<?php echo "<?php \$this->renderPartial('_form', array('model'=>\$model)); ?>"; ?> | Daemeron/bofo | protected/modules/bootstrap/gii/bootstrap/templates/default/create.php | PHP | mit | 779 |
package com.example.devipriya.popularmovies.application;
import android.app.Application;
import android.text.TextUtils;
import com.android.volley.Request;
import com.android.volley.RequestQueue;
import com.android.volley.toolbox.Volley;
/**
* Created by Devipriya on 25-Oct-15.
*/
public class AppController extends Application {
private static final String TAG = AppController.class.getSimpleName();
private RequestQueue mRequestQueue;
private static AppController mInstance;
@Override
public void onCreate() {
super.onCreate();
mInstance = this;
}
public static synchronized AppController getInstance() {
return mInstance;
}
private RequestQueue getRequestQueue() {
if (mRequestQueue == null) {
mRequestQueue = Volley.newRequestQueue(getApplicationContext());
}
return mRequestQueue;
}
public <T> void addToRequestQueue(Request<T> req, String tag) {
req.setTag(TextUtils.isEmpty(tag) ? TAG : tag);
getRequestQueue().add(req);
}
public <T> void addToRequestQueue(Request<T> req) {
req.setTag(TAG);
getRequestQueue().add(req);
}
public void cancelPendingRequests(Object tag) {
if (mRequestQueue != null) {
mRequestQueue.cancelAll(tag);
}
}
}
| DevipriyaSarkar/Popular-Movies | app/src/main/java/com/example/devipriya/popularmovies/application/AppController.java | Java | mit | 1,341 |
<?php
/**
* Created by PhpStorm.
* User: Administrator
* Date: 2016/9/1
* Time: 15:19
*/
require_once('../conn/conn.php');
?>
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>编辑编程词典版本</title>
<style>
*{-webkit-transition: all 0.3s;
-moz-transition: all 0.3s;
-ms-transition: all 0.3s;
-o-transition: all 0.3s;
transition: all 0.3s;
margin:0;padding:0;}
body,html{width:100%;height:100%;}
body{display:table;background:-webkit-linear-gradient(top,#369,#365,#360);}
.formWrap{display:table-cell;vertical-align:middle;}
form{width:300px;margin:0 auto;}
.gradientshadow{
-webkit-border-radius:5px;
-moz-border-radius:5px;
border-radius:5px;
border:1px solid #ccc;
outline:none;
}
form select{width:100%;height:35px;}
form select:focus{box-shadow:0 0 10px #FFF;border-color:#FFF;}
form input{width:100%;
height:30px;
text-indent:8px;
}
form input:focus{box-shadow:0 0 10px #FFF;border-color:#FFF;}
form button{width:100px;height:30px;background:#fff;}
form button:hover{box-shadow:0 0 10px #FFF;border-color:#FFF;}
form>p{margin-bottom:10px;}
</style>
</head>
<body>
<div class="formWrap">
<form method="post" action="savebb.php">
<input type="hidden" name="act" value="edit"/>
<?php $res = $conn->query("SELECT * FROM tb_bb");if($res->num_rows == 0){ ?>
<p>没有任何词典版本,您可以<a href="index.php?this=添加编程词典版本">添加词典版本</a></p>
<?php }else{ ?>
<p><select class="gradientshadow" name="bbid">
<?php while($row = $res->fetch_assoc()){ ?>
<option value="<?php echo $row['id']; ?>"><?php echo $row['bbname']; ?></option>
<?php }} ?>
</select></p>
<p><input type="text" class="a gradientshadow" name="modifybb" placeholder="输入你要修改的版本名称"/></p>
<p><button type="submit" class="gradientshadow">确认修改</button> </p>
</form>
</div>
<script>
var oSelect = document.getElementsByTagName('select')[0];
document.getElementsByClassName('a')[0].value = oSelect.options[oSelect.selectedIndex].innerText;
oSelect.onchange = function(){
document.getElementsByClassName('a')[0].value = this.options[this.selectedIndex].innerText;
}
</script>
</body>
</html>
| Hackandquan/wwqbbs | admin/editbb.php | PHP | mit | 2,564 |
class NilClass
def to_bool
false
end
end
| ministrycentered/maybe_so | lib/maybe_so/core_ext/nil_class.rb | Ruby | mit | 49 |
/**古代战争
* 作者:YYC
* 日期:2014-02-12
* 电子邮箱:395976266@qq.com
* QQ: 395976266
* 博客:http://www.cnblogs.com/chaogex/
*/
var PlantsSprite = YYC.Class(TerrainSprite, {
Protected: {
},
Public: {
name: "plants"
}
}); | yyc-git/AncientWar | src/sprite/terrain/PlantsSprite.js | JavaScript | mit | 271 |
require 'telegraph/validations/page'
require 'telegraph/validations/account' | deff7/ruby_telegraph_api | lib/telegraph/validations.rb | Ruby | mit | 76 |
<!DOCTYPE html>
<html>
<?php include 'head.php'; ?>
<body>
<?php include 'header.php'; ?>
<div class="content">
<div class="valign topic-heading">
<div class="width">
<h1>News</h1>
<h2>Posts made on OrgSync</h2>
</div>
</div>
<div class="width page-body">
<div class="module" id="module_news">
<ul>
<li id="post_220571" class="post_entry">
<p class="news_info">
<span class="new-tag">NEW</span>
<strong>05-04-2017</strong>: <a href="#" onclick="$j('#news_post_body_220571').toggle(); return false;">Test News Post</a>
</p>
<div class="news_post_body" id="news_post_body_220571" style="display: block;">
<p></p><p>This is a test news post for the website we are building. You may ignore this message, and it will be deleted soon.</p>
<p></p>
</div>
</li>
</ul>
</div>
</div>
</div>
<?php include 'footer.php'; ?>
</body>
<script type="text/javascript">
function showDates() {
var posts = document.querySelectorAll('.post_entry');
posts.forEach(function(e) {
// Get date from post
var date = new Date(e.querySelector('.news_info strong').innerHTML);
var months = [ "January", "February", "March", "April", "May", "June",
"July", "August", "September", "October", "November", "December" ];
// Create element with date of post
var dateday = document.createElement('div');
dateday.className = 'date-day valign';
var month = document.createElement('h4');
month.innerHTML = months[parseInt(date.getMonth())];
var day = document.createElement('h5');
day.innerHTML = date.getDay();
var insideDiv = document.createElement('div');
insideDiv.appendChild(month);
insideDiv.appendChild(day);
dateday.appendChild(insideDiv);
// Get anchor link
var link = e.querySelector('.news_info a');
// Remove all content from the news info
var info = e.querySelector('.news_info');
info.innerHTML = "";
// Re-append everything
info.appendChild(dateday);
info.appendChild(link);
})
}
showDates();
</script>
</html>
| NUVR/OrgSync-Website | news.php | PHP | mit | 2,591 |
<?php
/**
* Part of ww4 project.
*
* @copyright Copyright (C) 2020 __ORGANIZATION__.
* @license __LICENSE__
*/
declare(strict_types=1);
namespace Windwalker\Query\Clause;
use Windwalker\Database\Schema\Ddl\Constraint;
use Windwalker\Query\Query;
use Windwalker\Utilities\Classes\FlowControlTrait;
/**
* The Alter class.
*/
class AlterClause implements ClauseInterface
{
use FlowControlTrait;
protected Clause $clause;
/**
* @var Query
*/
protected Query $query;
/**
* Alter constructor.
*
* @param Query $query
*/
public function __construct(Query $query)
{
$this->query = $query;
$this->clause = $query->clause('ALTER', [], ",\n");
}
public function target(string $target, string $targetName): static
{
$this->clause->setName(
$this->query->format("ALTER %r %n\n", $target, $targetName)
);
return $this;
}
public function table(string $table): static
{
return $this->target('TABLE', $this->query->quoteName($table));
}
public function database(string $database): static
{
return $this->target('DATABASE', $this->query->quoteName($database));
}
public function schema(string $database): static
{
return $this->target('SCHEMA', $this->query->quoteName($database));
}
public function subClause(string $name, array $elements = []): Clause
{
$this->clause->append(
$clause = $this->query->clause($name, $elements)
);
return $clause;
}
/**
* addIndex
*
* @param string $name
* @param string[] $columns
*
* @return Clause
*/
public function addIndex(string $name, array $columns = []): Clause
{
return $this->subClause('ADD INDEX')
->append($this->query->quoteName($name))
->append(
$this->query->clause(
'()',
$columns,
','
)
);
}
public function addConstraint(?string $name, string $type, array $columns = []): Clause
{
return $this->subClause('ADD CONSTRAINT')
->append($name ? $this->query->quoteName($name) : '')
->append(
$this->query->clause(
$type . ' ()',
$columns,
','
)
);
}
public function addPrimaryKey(?string $name, array $columns): Clause
{
return $this->addConstraint($name, Constraint::TYPE_PRIMARY_KEY, $columns);
}
public function addUniqueKey(string $name, array $columns): Clause
{
return $this->addConstraint($name, Constraint::TYPE_UNIQUE, $columns);
}
public function addForeignKey(
string $name,
array $columns,
array $refColumns,
?string $onUpdate,
?string $onDelete
): Clause {
$clause = $this->addConstraint($name, Constraint::TYPE_FOREIGN_KEY, $columns)
->append(
$this->query->clause(
'REFERENCES ()',
$refColumns,
",\n"
)
);
if ($onUpdate) {
$clause->append(['ON UPDATE', $onUpdate]);
}
if ($onDelete) {
$clause->append(['ON DELETE', $onDelete]);
}
return $clause;
}
public function modifyColumn(string $columnName, string $expression): Clause
{
return $this->subClause(
'',
[
'MODIFY COLUMN',
$this->query->quoteName($columnName),
$expression
]
);
}
public function append($elements): static
{
$this->clause->append($elements);
return $this;
}
/**
* __toString
*
* @return string
*/
public function __toString(): string
{
return (string) $this->clause;
}
/**
* @return Clause
*/
public function getClause(): Clause
{
return $this->clause;
}
}
| ventoviro/ww4 | packages/query/src/Clause/AlterClause.php | PHP | mit | 4,186 |
(function (root, undefined) {
"use strict";
| maniartech/framework-factory | src/(open).js | JavaScript | mit | 44 |
#include "addtorrentform.h"
#include "ui_addtorrentform.h"
#include "torrentcontentfiltermodel.h"
#include "torrentcontentmodel.h"
#include "proplistdelegate.h"
#include "utilities/utils.h"
#include <QDateTime>
#include <QFileDialog>
#include <QDebug>
#include "settings_declaration.h"
#include "libtorrent/torrent_handle.hpp"
#include "libtorrent/torrent_status.hpp"
AddTorrentForm::AddTorrentForm(const libtorrent::torrent_handle& handle, QWidget* parent) :
QDialog(parent),
ui(new Ui::AddTorrentForm),
m_contentModel(nullptr),
m_torrentInfo(nullptr),
m_torrentAddParams(nullptr),
m_torrentHandle(handle)
{
ui->setupUi(this);
setWindowFlags(Qt::Dialog | Qt::WindowTitleHint);
// Initialize using torrent handle
initialize();
}
AddTorrentForm::AddTorrentForm(libtorrent::add_torrent_params* info, QWidget* parent) :
QDialog(parent),
ui(new Ui::AddTorrentForm),
m_contentModel(nullptr),
m_torrentInfo(nullptr),
m_torrentAddParams(info)
{
ui->setupUi(this);
// Initialize using torrent add params
setWindowFlags(Qt::Dialog | Qt::WindowTitleHint);
initialize();
}
AddTorrentForm::~AddTorrentForm()
{
delete ui;
}
void AddTorrentForm::initialize()
{
ui->savePathLineEdit->setText("");
// using torrent info from handle
if (m_torrentHandle.is_valid())
{
m_torrentInfo = &m_torrentHandle.get_torrent_info();
setSavePath(QString::fromStdString(m_torrentHandle.save_path()));
}
else if (m_torrentAddParams)
{
m_torrentInfo = &(*m_torrentAddParams->ti);
setSavePath(QString::fromStdString(m_torrentAddParams->save_path));
}
Q_ASSERT(m_torrentInfo->is_valid());
m_contentModel = new TorrentContentFilterModel(this);
m_contentModel->model()->setSavePath(savePath());
ui->treeTorrentContent->setModel(m_contentModel);
// Setting up other saving path stuffs
VERIFY(connect(ui->savePathButton, SIGNAL(clicked()), SLOT(browseSavePath())));
VERIFY(connect(ui->savePathLineEdit, SIGNAL(textEdited(QString)), SLOT(savePathEdited(QString))));
VERIFY(connect(ui->selectNoneButton, SIGNAL(clicked()), SLOT(selectNone())));
VERIFY(connect(ui->selectAllButton, SIGNAL(clicked()), SLOT(selectAll())));
auto* contentDelegate = new PropListDelegate(this);
ui->treeTorrentContent->setItemDelegate(contentDelegate);
VERIFY(connect(ui->treeTorrentContent, SIGNAL(clicked(const QModelIndex&)),
ui->treeTorrentContent, SLOT(edit(const QModelIndex&))));
// List files in torrent
m_contentModel->model()->setupModelData(*m_torrentInfo, libtorrent::torrent_status());
ui->lblName->setText(QString::fromStdString(m_torrentInfo->name()));
QString comment = QString::fromStdString(m_torrentInfo->comment());
ui->lblComment->setText(comment.replace('\n', ' '));
if (auto dt = m_torrentInfo->creation_date())
{
ui->lblDate->setText(QDateTime::fromTime_t(*dt).toString("dd/MM/yyyy hh:mm"));
}
updateDiskSpaceLabel();
VERIFY(connect(m_contentModel->model(), SIGNAL(filteredFilesChanged()), SLOT(updateDiskSpaceLabel())));
VERIFY(connect(m_contentModel->model(), SIGNAL(filteredFilesChanged()), SLOT(checkAcceptAvailable())));
// Expand root folder
ui->treeTorrentContent->setExpanded(m_contentModel->index(0, 0), true);
ui->treeTorrentContent->header()->setSectionResizeMode(0, QHeaderView::Stretch);
if (m_torrentHandle.is_valid())
{
m_contentModel->model()->updateFilesPriorities(m_torrentHandle.file_priorities());
std::vector<boost::int64_t> fp;
m_torrentHandle.file_progress(fp);
m_contentModel->model()->updateFilesProgress(fp);
}
else if (m_torrentAddParams)
{
m_contentModel->model()->updateFilesPriorities(m_torrentAddParams->file_priorities);
}
// hide columns
ui->treeTorrentContent->hideColumn(TorrentContentModelItem::COL_STATUS);
ui->treeTorrentContent->hideColumn(TorrentContentModelItem::COL_PROGRESS);
ui->treeTorrentContent->hideColumn(TorrentContentModelItem::COL_PRIO);
ui->treeTorrentContent->header()->setSortIndicator(TorrentContentModelItem::COL_NAME, Qt::AscendingOrder);
}
void AddTorrentForm::updateDiskSpaceLabel()
{
qulonglong torrent_size = 0;
if (m_contentModel)
{
std::vector<int> priorities;
m_contentModel->model()->getFilesPriorities(priorities);
Q_ASSERT(priorities.size() == (uint) m_torrentInfo->num_files());
for (uint i = 0; i < priorities.size(); ++i)
{
if (priorities[i] > 0)
{
torrent_size += m_torrentInfo->file_at(i).size;
}
}
}
else
{
torrent_size = m_torrentInfo->total_size();
}
QString size_string = utilities::SizeToString(torrent_size);
ui->lblSize->setText(size_string);
}
std::vector<int> AddTorrentForm::filesPriorities() const
{
std::vector<int> priorities;
m_contentModel->model()->getFilesPriorities(priorities);
return priorities;
}
void AddTorrentForm::accept()
{
if (m_torrentHandle.is_valid())
{
std::vector<int> priorities;
m_contentModel->model()->getFilesPriorities(priorities);
m_torrentHandle.prioritize_files(priorities);
m_torrentHandle.move_storage(savePath().toUtf8().constData());
}
else if (m_torrentAddParams)
{
m_contentModel->model()->getFilesPriorities(m_torrentAddParams->file_priorities);
m_torrentAddParams->save_path = savePath().toUtf8().constData();
}
if (ui->checkBoxDontShowAgain->isChecked())
{
QSettings().setValue(app_settings::ShowAddTorrentDialog, false);
}
QDialog::accept();
}
void AddTorrentForm::browseSavePath()
{
QString dir = QFileDialog::getExistingDirectory(
this, tr("Download torrent to..."),
"/home",
QFileDialog::ShowDirsOnly
| QFileDialog::DontResolveSymlinks);
if (dir.isEmpty())
{
return;
}
setSavePath(dir);
}
QString AddTorrentForm::savePath() const
{
return m_savePath;
}
void AddTorrentForm::setSavePath(const QString& savePath)
{
m_savePath = savePath;
if (ui->savePathLineEdit->text() != m_savePath)
{
ui->savePathLineEdit->setText(m_savePath);
}
if (m_contentModel)
{
m_contentModel->model()->setSavePath(m_savePath);
}
}
int AddTorrentForm::exec()
{
#ifdef Q_OS_WIN
// Retrieve your application's window Id
WId mwWinId = winId();
// Restore your application, should it be minimized
if (IsIconic((HWND)mwWinId))
{
SendMessage((HWND)mwWinId, WM_SYSCOMMAND, SC_RESTORE, 0);
}
// Bring your application to the foreground
DWORD foregroundThreadPId = GetWindowThreadProcessId(GetForegroundWindow(), NULL);
DWORD mwThreadPId = GetWindowThreadProcessId((HWND)mwWinId, NULL);
if (foregroundThreadPId != mwThreadPId)
{
// Your application's thread process Id is not that of the foreground window, so
// attach the foreground thread to your application's, set your application to the
// foreground, and detach the foreground thread from your application's
AttachThreadInput(foregroundThreadPId, mwThreadPId, true);
SetForegroundWindow((HWND)mwWinId);
AttachThreadInput(foregroundThreadPId, mwThreadPId, false);
}
else
{
// Your application's thread process Id is that of the foreground window, so
// just set your application to the foreground
SetForegroundWindow((HWND)mwWinId);
}
#else
activateWindow();
#endif
return QDialog::exec();
}
void AddTorrentForm::checkAcceptAvailable()
{
if (QPushButton* okBtn = ui->buttonBox_2->button(QDialogButtonBox::Ok))
{
okBtn->setEnabled(!m_contentModel->model()->allFiltered());
}
}
void AddTorrentForm::selectAll()
{
m_contentModel->selectAll();
}
void AddTorrentForm::selectNone()
{
m_contentModel->selectNone();
}
void AddTorrentForm::savePathEdited(const QString& sPath)
{
if (QDir(sPath).exists())
{
setSavePath(sPath);
}
}
| aliakseis/LIII | src/gui/addtorrentform.cpp | C++ | mit | 8,240 |
<?php
namespace Chipin\DBBundle\Entity;
use Doctrine\ORM\Mapping as ORM;
/**
* Frontuser
*
* @ORM\Table(name="frontUser")
* @ORM\Entity
* @ORM\HasLifecycleCallbacks()
*/
class Frontuser
{
/**
* @var string
*
* @ORM\Column(name="id", type="string", length=255, nullable=false)
* @ORM\Id
* @ORM\GeneratedValue(strategy="NICE16")
*/
private $id;
/**
* @var string
*
* @ORM\Column(name="email", type="string", length=255, nullable=false)
*/
private $email;
/**
* @var string
*
* @ORM\Column(name="forename", type="string", length=255, nullable=true)
*/
private $forename;
/**
* @var string
*
* @ORM\Column(name="surname", type="string", length=255, nullable=true)
*/
private $surname;
/**
* @var string
*
* @ORM\Column(name="nickname", type="string", length=255, nullable=true)
*/
private $nickname;
/**
* @var string
*
* @ORM\Column(name="password", type="string", length=32, nullable=true)
*/
private $password;
/**
* @var \DateTime
*
* @ORM\Column(name="dateCreated", type="datetime", nullable=false)
*/
private $datecreated;
/**
* @var \DateTime
*
* @ORM\Column(name="dateEdited", type="datetime", nullable=false)
*/
private $dateedited;
/**
* @var \DateTime
*
* @ORM\Column(name="dateRegistered", type="datetime", nullable=true)
*/
private $dateregistered;
/**
* @var \DateTime
*
* @ORM\Column(name="dateActivate", type="datetime", nullable=true)
*/
private $dateactivate;
/**
* @var \DateTime
*
* @ORM\Column(name="dateDeleted", type="datetime", nullable=true)
*/
private $datedeleted;
/**
* @var \Usersstatus
*
* @ORM\ManyToOne(targetEntity="Usersstatus")
* @ORM\JoinColumns({
* @ORM\JoinColumn(name="usersStatusID", referencedColumnName="id")
* })
*/
private $usersstatusid;
/**
* @ORM\PrePersist
*/
public function setInitData() {
$currentDate = new \DateTime();
$this->setDatecreated($currentDate);
$this->setDateedited($currentDate);
// @ORM\HasLifecycleCallbacks()
}
/**
* Get id
*
* @return string
*/
public function getId()
{
return $this->id;
}
/**
* Set email
*
* @param string $email
* @return Frontuser
*/
public function setEmail($email)
{
$this->email = $email;
return $this;
}
/**
* Get email
*
* @return string
*/
public function getEmail()
{
return $this->email;
}
/**
* Set forename
*
* @param string $forename
* @return Frontuser
*/
public function setForename($forename)
{
$this->forename = $forename;
return $this;
}
/**
* Get forename
*
* @return string
*/
public function getForename()
{
return $this->forename;
}
/**
* Set surname
*
* @param string $surname
* @return Frontuser
*/
public function setSurname($surname)
{
$this->surname = $surname;
return $this;
}
/**
* Get surname
*
* @return string
*/
public function getSurname()
{
return $this->surname;
}
/**
* Set nickname
*
* @param string $nickname
* @return Frontuser
*/
public function setNickname($nickname)
{
$this->nickname = $nickname;
return $this;
}
/**
* Get nickname
*
* @return string
*/
public function getNickname()
{
return $this->nickname;
}
/**
* Set password
*
* @param string $password
* @return Frontuser
*/
public function setPassword($password)
{
$this->password = $password;
return $this;
}
/**
* Get password
*
* @return string
*/
public function getPassword()
{
return $this->password;
}
/**
* Set datecreated
*
* @param \DateTime $datecreated
* @return Frontuser
*/
public function setDatecreated($datecreated)
{
$this->datecreated = $datecreated;
return $this;
}
/**
* Get datecreated
*
* @return \DateTime
*/
public function getDatecreated()
{
return $this->datecreated;
}
/**
* Set dateedited
*
* @param \DateTime $dateedited
* @return Frontuser
*/
public function setDateedited($dateedited)
{
$this->dateedited = $dateedited;
return $this;
}
/**
* Get dateedited
*
* @return \DateTime
*/
public function getDateedited()
{
return $this->dateedited;
}
/**
* Set dateregistered
*
* @param \DateTime $dateregistered
* @return Frontuser
*/
public function setDateregistered($dateregistered)
{
$this->dateregistered = $dateregistered;
return $this;
}
/**
* Get dateregistered
*
* @return \DateTime
*/
public function getDateregistered()
{
return $this->dateregistered;
}
/**
* Set dateactivate
*
* @param \DateTime $dateactivate
* @return Frontuser
*/
public function setDateactivate($dateactivate)
{
$this->dateactivate = $dateactivate;
return $this;
}
/**
* Get dateactivate
*
* @return \DateTime
*/
public function getDateactivate()
{
return $this->dateactivate;
}
/**
* Set datedeleted
*
* @param \DateTime $datedeleted
* @return Frontuser
*/
public function setDatedeleted($datedeleted)
{
$this->datedeleted = $datedeleted;
return $this;
}
/**
* Get datedeleted
*
* @return \DateTime
*/
public function getDatedeleted()
{
return $this->datedeleted;
}
/**
* Set usersstatusid
*
* @param \Chipin\DBBundle\Entity\Usersstatus $usersstatusid
* @return Frontuser
*/
public function setUsersstatusid(\Chipin\DBBundle\Entity\Usersstatus $usersstatusid = null)
{
$this->usersstatusid = $usersstatusid;
return $this;
}
/**
* Get usersstatusid
*
* @return \Chipin\DBBundle\Entity\Usersstatus
*/
public function getUsersstatusid()
{
return $this->usersstatusid;
}
} | bmoll/symfo | src/Chipin/DBBundle/Entity/Frontuser.php | PHP | mit | 6,766 |
require 'active_record'
I18n.enforce_available_locales = true
ActiveRecord::Base.establish_connection(:adapter => 'sqlite3', :database => "#{File.dirname(__FILE__)}/../db/price_wish.sqlite3")
| sf-fiery-skippers-2015/price-wish-project | source/price-wish/db/config.rb | Ruby | mit | 193 |
/*
* ============================================================================
* Filename: server.hxx
* Description: Server side implementation for flihabi network
* Version: 1.0
* Created: 04/20/2015 06:21:00 PM
* Revision: none
* Compiler: gcc
* Author: Rafael Gozlan
* Organization: Flihabi
* ============================================================================
*/
#define _POSIX_SOURCE
#include <sys/types.h>
#include <sys/socket.h>
#include <netdb.h>
#include <string.h>
#include <thread>
#include <iostream>
#include "blocking_queue.hh"
#include "utils.hh"
#include "server.hh"
#include "broadcaster.hh"
#include "listener.hh"
#include "network.hh"
// TODO: More free
// PUBLIC METHODS
Server::Server() : results_(), busy_(), todo_()
{
// Launch the server broadcaster and connection handler
std::thread broadcaster(broadcastLoop);
broadcaster.detach();
std::thread handle(Server::handler, this);
handle.detach();
}
Result *Server::getResult(int i)
{
Result *r = results_[i];
if (r != NULL)
{
results_[i] = NULL;
busy_[i] = false; /* set busy to false so the emplacement is free*/
}
return r;
}
int Server::execBytecode(std::string bytecode)
{
int i = getResultEmplacement();
TodoItem *t = new TodoItem();
t->id = i;
t->bytecode = bytecode;
todo_.push(t);
return i;
}
// PRIVATE METHODS
void Server::setResult(int i, Result *r)
{
results_[i] = r;
}
int Server::getResultEmplacement()
{
for (size_t i = 0; i < busy_.size(); i++)
{
if (!busy_[i])
{
busy_[i] = true;
return i;
}
}
/* Add emplacement */
busy_.push_back(false);
results_.push_back(NULL);
return busy_.size() - 1;
}
/* Server slaves handling */
void Server::handler(Server *server)
{
int sockfd, new_fd; // listen on sock_fd, new connection on new_fd
struct addrinfo hints, *servinfo, *p;
struct sockaddr_storage their_addr; // connector's address information
socklen_t sin_size;
int yes=1;
char s[INET6_ADDRSTRLEN];
int rv;
memset(&hints, 0, sizeof hints);
hints.ai_family = AF_UNSPEC;
hints.ai_socktype = SOCK_STREAM;
hints.ai_flags = AI_PASSIVE; // use my IP
if ((rv = getaddrinfo(NULL, std::to_string(CONNECTION_PORT).c_str(),
&hints, &servinfo)) != 0)
{
fprintf(stderr, "Handler: getaddrinfo: %s\n", gai_strerror(rv));
return;
}
// loop through all the results and bind to the first we can
for(p = servinfo; p != NULL; p = p->ai_next) {
if ((sockfd = socket(p->ai_family, p->ai_socktype,
p->ai_protocol)) == -1) {
perror("Handler: socket");
continue;
}
if (setsockopt(sockfd, SOL_SOCKET, SO_REUSEADDR, &yes,
sizeof(int)) == -1) {
perror("Handler: setsockopt");
exit(1);
}
if (bind(sockfd, p->ai_addr, p->ai_addrlen) == -1) {
close(sockfd);
perror("Handler: bind");
continue;
}
break;
}
if (p == NULL) {
fprintf(stderr, "Handler: failed to bind\n");
return;
}
freeaddrinfo(servinfo); // all done with this structure
if (listen(sockfd, 10) == -1)
{
perror("listen");
exit(1);
}
printf("Handler: waiting for connections...\n");
while (true) /* I love stuff like this */
{
sin_size = sizeof their_addr;
new_fd = accept(sockfd, (struct sockaddr *)&their_addr, &sin_size);
if (new_fd == -1) {
perror("Handler: accept");
continue;
}
inet_ntop(their_addr.ss_family,
Utils::get_in_addr((struct sockaddr *)&their_addr),
s, sizeof s);
printf("Handler: got connection from %s\n", s);
int numbytes;
char buf[100];
// Receiving connection msg
if ((numbytes = recv(new_fd, buf, 100-1, 0)) == -1)
{
perror("Server: failed to recv the connection msg");
exit(1);
}
buf[numbytes] = '\0';
printf("Server: received '%s'\n",buf);
if (std::string(buf) == CONNECTION_MSG)
{
std::thread client(Server::clientThread, server, new_fd);
client.detach();
}
}
}
void Server::clientThread(Server *s, int sockfd)
{
std::cout << "Client thread: sending Hello!" << std::endl;
// Sending ACK
if (send(sockfd, CONNECTION_MSG, strlen(CONNECTION_MSG), 0) == -1)
perror("Client thread: failed sending Hello!");
while (true) /* client loop */
{
TodoItem *t = NULL;
while ((t = s->todo_.pop()) == NULL); // Try to get bytecode to exec
std::cout << "Server thread opening task:\n";
/*
const char *buffer = t->bytecode.c_str();
for (unsigned i = 0; i < t->bytecode.size(); i++)
{
if (buffer[i] <= '~' && buffer[i] >= ' ')
printf("%c", buffer[i]);
else
printf("\\%02X", buffer[i]);
}
*/
std::cout << "\n==\n";
// Sending Bytecode
if (Utils::sendBytecode(sockfd, t->bytecode, t->bytecode.size()) == (uint64_t) -1)
{
perror("Client thread: failed sending bytecode");
s->todo_.push(t);
break;
}
ssize_t nbytes;
uint64_t len;
if ((len = Utils::recvBytecodeLen(sockfd)) == (uint64_t) -1)
{
perror("Client thread: fail to get bytecode len");
s->todo_.push(t);
break;
}
uint64_t len_aux = len;
char *buf = (char*) malloc(len);
char *aux = buf;
// Receiving connection msg
while (len > 0)
{
if (len < 4096)
nbytes = recv(sockfd, aux, len, 0);
else
nbytes = recv(sockfd, aux, 4096, 0);
if (nbytes == -1)
{
perror("Client thread: failed receiving bytecode");
s->todo_.push(t);
break;
}
if (nbytes == 0)
{
std::cout << "Client thread: Connection seems to be reset."
<< std::endl;
s->todo_.push(t);
close(sockfd);
break;
}
len -= nbytes;
aux += nbytes;
}
if (nbytes == -1)
{
return;
}
if (nbytes == 0)
{
break;
}
// Setting result
/* TODO: Test if r is persistant */
std::cout << "Server got returned bytecode\n";
Result *r = new Result();
r->value = std::string(buf, len_aux);
s->setResult(t->id, r);
free(buf);
}
}
| FLIHABI/network | src/server.cc | C++ | mit | 6,991 |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace DMT.Common.Rest
{
class UrlSanitizer
{
public string Sanitize(string url)
{
int index = url.IndexOf('?');
if (index >-1)
{
url = url.Substring(0, index);
}
return url.TrimEnd('/');
}
}
}
| tmichel/thesis | Common/DMT.Common/Rest/UrlSanitizer.cs | C# | mit | 430 |
package seedu.task.model.task;
import seedu.task.commons.exceptions.IllegalValueException;
/**
* Represents a Task's priority level in the task manager.
* Guarantees: immutable; is valid as declared in {@link #isValidPriorityLevel(String)}
*/
public class PriorityLevel {
public static final String MESSAGE_PRIORITY_LEVEL_CONSTRAINTS = "Priority Levels should be indicated by an integer,"
+ " ranging from 1 to 4; with 1 being the highest priority and 4 being the lowest priority.";
public static final String PRIORITY_LEVEL_VALIDATION_REGEX = "[1-4]";
public final String value;
/**
* Validates given priority level.
*
* @throws IllegalValueException if given priority level string is invalid.
*/
public PriorityLevel(String priority) throws IllegalValueException {
//assert priority != null;
String trimmedPriority = priority.trim();
if (trimmedPriority.equals("")) {
this.value = trimmedPriority;
} else {
if (!isValidPriorityLevel(trimmedPriority)) {
throw new IllegalValueException(MESSAGE_PRIORITY_LEVEL_CONSTRAINTS);
}
this.value = trimmedPriority;
}
}
/**
* Returns if a given string is a valid priority level.
*/
public static boolean isValidPriorityLevel(String test) {
return test.matches(PRIORITY_LEVEL_VALIDATION_REGEX);
}
@Override
public String toString() {
return value;
}
@Override
public boolean equals(Object other) {
return other == this // short circuit if same object
|| (other instanceof PriorityLevel // instanceof handles nulls
&& this.value.equals(((PriorityLevel) other).value)); // state check
}
@Override
public int hashCode() {
return value.hashCode();
}
}
| CS2103JAN2017-W09-B4/main | src/main/java/seedu/task/model/task/PriorityLevel.java | Java | mit | 1,885 |
/*
* MIT License
*
* Copyright (c) 2018 Asynchronous Game Query Library
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NON INFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.ibasco.agql.protocols.valve.dota2.webapi.pojos;
import com.google.gson.annotations.Expose;
import com.google.gson.annotations.SerializedName;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import java.util.ArrayList;
import java.util.List;
public class Dota2MatchHistory {
@SerializedName("status")
@Expose
private int status;
@SerializedName("num_results")
@Expose
private int numResults;
@SerializedName("total_results")
@Expose
private int totalResults;
@SerializedName("results_remaining")
@Expose
private int resultsRemaining;
@SerializedName("matches")
@Expose
private List<Dota2MatchHistoryInfo> matches = new ArrayList<>();
/**
* @return The status
*/
public int getStatus() {
return status;
}
/**
* @param status
* The status
*/
public void setStatus(int status) {
this.status = status;
}
/**
* @return The numResults
*/
public int getNumResults() {
return numResults;
}
/**
* @param numResults
* The num_results
*/
public void setNumResults(int numResults) {
this.numResults = numResults;
}
/**
* @return The totalResults
*/
public int getTotalResults() {
return totalResults;
}
/**
* @param totalResults
* The total_results
*/
public void setTotalResults(int totalResults) {
this.totalResults = totalResults;
}
/**
* @return The resultsRemaining
*/
public int getResultsRemaining() {
return resultsRemaining;
}
/**
* @param resultsRemaining
* The results_remaining
*/
public void setResultsRemaining(int resultsRemaining) {
this.resultsRemaining = resultsRemaining;
}
/**
* @return The matches
*/
public List<Dota2MatchHistoryInfo> getMatches() {
return matches;
}
/**
* @param matches
* The matches
*/
public void setMatches(List<Dota2MatchHistoryInfo> matches) {
this.matches = matches;
}
@Override
public String toString() {
return ToStringBuilder.reflectionToString(this, ToStringStyle.NO_CLASS_NAME_STYLE);
}
}
| ribasco/async-gamequery-lib | protocols/valve/dota2/webapi/src/main/java/com/ibasco/agql/protocols/valve/dota2/webapi/pojos/Dota2MatchHistory.java | Java | mit | 3,519 |
'use strict';
var _ = require('underscore'),
through2 = require('through2'),
fs = require('fs'),
hdiff = require('hdiff'),
spawn = require('child_process').spawn,
which = require('which'),
gutil = require('gulp-util'),
path = require('path');
module.exports = function () {
var diffMapper = jsonDiffMapper();
return through2(
{objectMode: true},
function (file, enc, cb) {
log('Checking ' + file.path + 'for updates');
execCommands(_.map(_.filter(mapDiffToCommands(diffMapper(file)), filterEmptyCommands), commandToCurrentCwdMapper(path.dirname(file.path))), cb);
this.push(file);
}
)
}
function log() {
return gutil.log.apply(gutil, [].slice.call(arguments));
}
function filterEmptyCommands(cmd) {
return !!cmd.length;
}
function execCommands(cmds, cb) {
if (!cmds.length) return cb();
var cmd = cmds.shift();
which(cmd[0], function (err, cmdpath) {
if (err) {
log("Couldn't find the path to " + cmd[0]);
return cb(err);
}
log(cmd[0] + ' ' + cmd[1].join(' '));
cmd[0] = cmdpath;
cmd[2].stdio = 'inherit';
cmd = spawn.apply(spawn, cmd);
cmd.on('close', function onClose() { return execCommands(cmds, cb); })
})
}
function jsonDiffMapper() {
var cache = {};
return function (file) {
var oldJson = cache[file.path];
var newJson = JSON.parse(fs.readFileSync(file.path, {encoding: 'utf8'}));
cache[file.path] = newJson;
if (!oldJson)
return {'$new': newJson}
return hdiff(oldJson, newJson, {unchanged: false});
}
}
function commandToCurrentCwdMapper(cwd) {
return function (cmd) {
if (!cmd) return;
cmd[2] = cmd[2] || {};
cmd[2].cwd = cwd;
return cmd;
}
}
function mapDiffToCommands(diff) {
if (!diff) return [];
var mappers = [mapInstallToCmd, mapRemoveToCmd, mapUpdateToCmd];
return _.union(
mapNewToCmds(diff),
mapDependenciesToCmds(diff.dependencies, mappers),
_.map(mapDependenciesToCmds(diff.devDependencies, mappers), mapDevCmds)
);
}
function mapNewToCmds(diff) {
if (!diff['$new']) return [];
return [createCommand('npm',['install'])];
}
function mapDependenciesToCmds(deps, mappers) {
if (!_.isObject(deps)) return [];
var cmds = [];
_.each(mappers,function(mapper) {
cmds.push(_mapDependenciesToCmds(deps, mapper));
})
return _.union.apply(_, cmds);
}
function _mapDependenciesToCmds(deps, mapper) {
if (!_.isObject(deps)) return [];
var cmds = [];
_.each(deps,function(diff, pkg) {
var cmd = mapper(diff, pkg);
if (cmd)
cmds.push(cmd);
})
return cmds;
}
function mapRemoveToCmd(diff, pkg) {
if (!isRemove(diff)) return;
return createCommand('npm',['remove', pkg]);
}
function mapInstallToCmd(diff, pkg) {
if (!isInstall(diff)) return [];
return createCommand('npm',['install', resolveFullPackageName(diff, pkg)]);
}
function mapUpdateToCmd(diff, pkg) {
if (!isUpdate(diff)) return [];
return createCommand('npm',['install', resolveFullPackageName(diff, pkg)]);
}
function mapDevCmds(cmd) {
if (cmd.length)
cmd[1].push('--save-dev');
return cmd;
}
function isUpdate(diff){ return diff['$add'] && diff['$del']; }
function isInstall(diff){ return diff['$add'] && !diff['$del']; }
function isRemove(diff){ return !diff['$add'] && diff['$del']; }
function resolveFullPackageName(diff, pkg) {
if (!diff['$add']) return pkg;
return pkg + '@' + diff['$add'];
}
function createCommand(cmd, args, opts) {
return [cmd, args, opts || {}];
}
| tounano/gulp-update | index.js | JavaScript | mit | 3,562 |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace PiCandy.Logging
{
public interface ILog
{
bool IsErrorEnabled { get; }
void Error(object message, Exception exception);
bool IsWarnEnabled { get; }
void Warn(object message);
void WarnFormat(string message, params object[] args);
bool IsInfoEnabled { get; }
void Info(object message);
void InfoFormat(string message, params object[] args);
bool IsDebugEnabled { get; }
void Debug(object message);
void DebugFormat(string message, params object[] args);
bool IsVerboseEnabled { get; }
void Verbose(object message);
void VerboseFormat(string message, params object[] args);
}
}
| piers7/PiCandy | src/PiCandy.Core/Logging/ILog.cs | C# | mit | 834 |
package ru.mirea.oop.practice.set;
import java.util.Iterator;
@SuppressWarnings("unchecked")
public final class SetImpl<E> implements ISet<E> {
private int size;
private final Node<E>[] table;
public SetImpl(int capacity) {
table = (Node<E>[]) new Node[capacity];
}
public SetImpl() {
this(256);
}
private int indexOf(E element) {
if (element == null)
return 0;
return (table.length - 1) & hash(element);
}
@Override
public int size() {
return size;
}
@Override
public boolean isEmpty() {
return size == 0;
}
@Override
public boolean contains(E element) {
int i = indexOf(element);
int hash = hash(element);
Node<E> it;
if ((it = table[i]) == null)
return false;
if (it.hash == hash && element != null && element.equals(it.item))
return true;
while (it != null) {
if (it.next == null) {
break;
}
if (it.hash == hash && element != null && element.equals(it.item))
return true;
it = it.next;
}
return false;
}
@Override
public void put(E element) {
int i = indexOf(element);
int hash = hash(element);
Node<E> it;
if ((it = table[i]) == null) {
table[i] = new Node<>(null, element, null);
size++;
} else {
Node<E> exists = null;
if (it.hash == hash && element != null && element.equals(it.item)) {
exists = it;
} else {
while (it != null) {
if ((exists = it.next) == null) {
it.next = new Node<>(it, element, null);
break;
}
if (exists.hash == hash && element != null && element.equals(exists.item))
break;
it = it.next;
}
}
if (exists == null) {
size++;
}
}
}
private static <E> int hash(E element) {
return element.hashCode();
}
@Override
public void remove(E element) {
int i = indexOf(element);
int hash = hash(element);
Node<E> it = table[i];
if (it != null) {
if (it.hash == hash && element != null && element.equals(it.item)) {
table[i] = it.next;
--size;
} else {
Node<E> next = it;
while (next != null) {
if (it.hash == hash && element != null && element.equals(it.item)) {
Node<E> itNext = it.next;
Node<E> itPrev = it.prev;
itPrev.next = itNext;
if (itNext != null)
itNext.prev = itPrev;
--size;
break;
}
next = next.next;
}
}
}
}
@Override
public void clear() {
for (int i = 0; i < table.length; ++i)
table[i] = null;
size = 0;
}
@Override
public Iterator<E> iterator() {
return new IteratorImpl2(table);
}
private static final class Node<E> {
final E item;
final int hash;
Node<E> next;
Node<E> prev;
private Node(Node<E> prev, E element, Node<E> next) {
this.item = element;
this.next = next;
this.prev = prev;
this.hash = hash(element);
}
}
@Override
public boolean equals(Object obj) {
if (obj instanceof ISet) {
throw new RuntimeException("Not implement yet");
}
return super.equals(obj);
}
private final class IteratorImpl2 implements Iterator<E> {
private final Node<E> [] table;
private int nextIndex;
private Iterator<E> next;
public IteratorImpl2(Node<E>[] table) {
this.table = table;
this.nextIndex = 0;
next = new IteratorImpl(table[0]);
}
@Override
public boolean hasNext() {
if (nextIndex >= table.length)
return false;
if (next.hasNext())
return true;
while (nextIndex < table.length) {
next = new IteratorImpl(table[nextIndex]);
if (next.hasNext())
return true;
nextIndex++;
}
return nextIndex < table.length;
}
@Override
public E next() {
return next.next();
}
}
private final class IteratorImpl implements Iterator<E> {
private int nextIndex = 0;
private Node<E> next;
private Node<E> returned;
private IteratorImpl(Node<E> next) {
this.next = next;
}
@Override
public boolean hasNext() {
return nextIndex < size && next != null;
}
@Override
public E next() {
returned = next;
next = next.next;
nextIndex++;
return returned.item;
}
}
}
| miroha/2015.2 | Examples/structs/src/main/java/ru/mirea/oop/practice/set/SetImpl.java | Java | mit | 5,357 |
/**
* @file expectation.cpp
* @author Chris Vig (chris@invictus.so)
* @date 2016/12/30
*/
/* -- Includes -- */
#include <sstream>
#include <string>
#include <spookshow/spookshow.hpp>
/* -- Namespaces -- */
using namespace spookshow;
/* -- Procedures -- */
expectation::expectation(const std::string& name, int required_count)
: m_name(name),
m_required_count(required_count),
m_order(expectation_order::current_order()),
m_count(0)
{
if (m_order)
m_order->enqueue_expectation(this);
}
expectation::~expectation()
{
if (is_fulfilled())
return;
std::ostringstream message;
message << "Unfulfilled expectation! [" << name()
<< "] Expected " << m_required_count << " call" << (m_required_count == 1 ? "" : "s")
<< ", received " << m_count << " call" << (m_count == 1 ? "" : "s") << ".";
internal::handle_failure(message.str());
}
void expectation::fulfill()
{
if (m_order && m_count == 0)
{
if (m_order->is_expectation_next(this))
m_order->dequeue_expectation();
else
{
std::ostringstream message;
message << "Expectation fulfilled out of order! [" << name() << "]";
internal::handle_failure(message.str());
}
}
++m_count;
}
| xchrishawk/spookshow | src/expectation.cpp | C++ | mit | 1,235 |
// tslint:disable:no-console
import chalk from "chalk";
import { EventEmitter } from "events";
import { assert } from "node-opcua-assert";
import { display_trace_from_this_projet_only, hexDump, make_debugLog } from "node-opcua-debug";
import { analyseExtensionObject } from "node-opcua-packet-analyzer";
import { GetEndpointsResponse } from "node-opcua-service-endpoints";
import { CloseSecureChannelResponse, OpenSecureChannelResponse } from "node-opcua-service-secure-channel";
import { ActivateSessionResponse, CreateSessionResponse } from "node-opcua-service-session";
import { AcknowledgeMessage } from "node-opcua-transport";
import { DirectTransport } from "node-opcua-transport/dist/test_helpers";
import * as _ from "underscore";
const debugLog = make_debugLog(__filename);
export const fakeAcknowledgeMessage = new AcknowledgeMessage({
maxChunkCount: 600000,
maxMessageSize: 100000,
protocolVersion: 0,
receiveBufferSize: 8192,
sendBufferSize: 8192,
});
export const fakeCloseSecureChannelResponse = new CloseSecureChannelResponse({});
export const fakeOpenSecureChannelResponse = new OpenSecureChannelResponse({
serverProtocolVersion: 0,
securityToken: {
channelId: 23,
createdAt: new Date(), // now
revisedLifetime: 30000,
tokenId: 1,
},
serverNonce: Buffer.from("qwerty")
});
export const fakeGetEndpointsResponse = new GetEndpointsResponse({
endpoints: [
{
endpointUrl: "fake://localhost:2033/SomeAddress"
}
]
});
export const fakeCreateSessionResponse = new CreateSessionResponse({});
export const fakeActivateSessionResponse = new ActivateSessionResponse({});
export class MockServerTransport extends EventEmitter {
private _replies: any;
private _mockTransport: DirectTransport;
private _counter: number;
constructor(expectedReplies: any) {
super();
this._replies = expectedReplies;
this._counter = 0;
this._mockTransport = new DirectTransport();
this._mockTransport.initialize(() => {
console.log("initialized");
});
this._mockTransport.server.on("data", (data: Buffer) => {
let reply = this._replies[this._counter];
this._counter++;
if (reply) {
if (_.isFunction(reply)) {
reply = reply.call(this);
// console.log(" interpreting reply as a function" + reply);
if (!reply) {
return;
}
}
debugLog("\nFAKE SERVER RECEIVED");
debugLog(hexDump(data));
let replies = [];
if (reply instanceof Buffer) {
replies.push(reply);
} else {
replies = reply;
}
assert(replies.length >= 1, " expecting at least one reply " + JSON.stringify(reply));
replies.forEach((reply1: any) => {
debugLog("\nFAKE SERVER SEND");
debugLog(chalk.red(hexDump(reply1)));
this._mockTransport.server.write(reply1);
});
} else {
const msg = " MockServerTransport has no more packets to send to client to" +
" emulate server responses.... ";
console.log(chalk.red.bold(msg));
console.log(chalk.blue.bold(hexDump(data)));
display_trace_from_this_projet_only();
analyseExtensionObject(data, 0, 0, {});
this.emit("done");
}
});
}
}
| sebgod/node-opcua | packages/node-opcua-secure-channel/test_helpers/mock/mock_transport.ts | TypeScript | mit | 3,686 |
import { FieldErrorProcessor } from "./processors/field-error-processor";
import { RuleResolver } from "./rulesets/rule-resolver";
import { ValidationGroupBuilder } from "./builders/validation-group-builder";
import { ruleRegistry } from "./rule-registry-setup";
import { RulesetBuilder } from "./builders/ruleset-builder";
import { DefaultLocaleHandler } from "./localization/default-locale-handler";
import { locale as defaultLocale } from "./locales/en-us";
import { Ruleset } from "./rulesets/ruleset";
const defaultLocaleCode = "en-us";
const defaultLocaleHandler = new DefaultLocaleHandler();
defaultLocaleHandler.registerLocale(defaultLocaleCode, defaultLocale);
defaultLocaleHandler.useLocale(defaultLocaleCode);
const fieldErrorProcessor = new FieldErrorProcessor(ruleRegistry, defaultLocaleHandler);
const ruleResolver = new RuleResolver();
export function createRuleset(basedUpon, withRuleVerification = false) {
const rulesetBuilder = withRuleVerification ? new RulesetBuilder(ruleRegistry) : new RulesetBuilder();
return rulesetBuilder.create(basedUpon);
}
export function mergeRulesets(rulesetA, rulesetB) {
const newRuleset = new Ruleset();
newRuleset.rules = Object.assign({}, rulesetA.rules, rulesetB.rules);
newRuleset.compositeRules = Object.assign({}, rulesetA.compositeRules, rulesetB.compositeRules);
newRuleset.propertyDisplayNames = Object.assign({}, rulesetA.propertyDisplayNames, rulesetB.propertyDisplayNames);
return newRuleset;
}
export function createGroup() { return new ValidationGroupBuilder(fieldErrorProcessor, ruleResolver, defaultLocaleHandler).create(); }
export const localeHandler = defaultLocaleHandler;
export function supplementLocale(localeCode, localeResource) {
defaultLocaleHandler.supplementLocaleFrom(localeCode, localeResource);
}
| grofit/treacherous | dist/es2015/exposer.js | JavaScript | mit | 1,813 |
package repositories.Control;
import repositories.model.Goods;
import repositories.util.DB;
import repositories.util.ScannerChoice;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
/**
* Created by bonismo
* 15/7/18 下午4:04
* <p>
* 数据库 Goods 表 数据操作
*/
public final class GoodsControl {
private Connection connection = null;
private PreparedStatement statement = null;
ResultSet resultSet = null;
/**
* 添加商品信息 到数据库 Goods 商品表
*
* @param goods 商品模型类的实例
* @return boolean
*/
public boolean addGoods(Goods goods) {
boolean flag = false;
String sqlAdd = "INSERT INTO db_shopping_management.goods VALUES (NULL , ?, ?, ?);";
connection = DB.getConnection();
statement = null;
try {
if (connection != null) {
statement = connection.prepareStatement(sqlAdd);
} else {
return false;
}
statement.setString(1, goods.getGoods_name());
statement.setDouble(2, goods.getGoods_price());
statement.setInt(3, goods.getGoods_num());
int rs = statement.executeUpdate();
if (rs > 0) {
flag = true;
}
} catch (SQLException e) {
e.printStackTrace();
} finally {
DB.close(null, statement, connection);
}
return flag;
}
/**
* 商品信息更新 Goods 商品表
*
* @param key 更改商品的选项
* @param goods 商品模型类的实例
* @return boolean
*/
public boolean updateGoods(int key, Goods goods) {
boolean flag = false;
connection = DB.getConnection();
switch (key) {
case 1: // key = 1,更改商品名称
String updateName = "UPDATE db_shopping_management.goods SET goods_name = ? WHERE id = ?";
try {
if (connection != null) {
statement = connection.prepareStatement(updateName);
} else {
return false;
}
statement.setString(1, goods.getGoods_name());
statement.setInt(2, goods.getId());
int rs = statement.executeUpdate();
if (rs > 0) {
flag = true;
}
} catch (SQLException e) {
e.printStackTrace();
} finally {
DB.close(null, statement, connection);
}
break;
case 2: // key = 2 ,更改商品价格
String updatePrice = "UPDATE db_shopping_management.goods SET goods_price = ? WHERE id = ?";
try {
if (connection != null) {
statement = connection.prepareStatement(updatePrice);
} else {
return false;
}
statement.setDouble(1, goods.getGoods_price());
statement.setInt(2, goods.getId());
int rs = statement.executeUpdate();
if (rs > 0) {
flag = true;
}
} catch (SQLException e) {
e.printStackTrace();
} finally {
DB.close(null, statement, connection);
}
break;
case 3: // key = 3 ,更改商品数量
String updateNum = "UPDATE db_shopping_management.goods SET goods_num = ? WHERE id = ?";
try {
statement = connection.prepareStatement(updateNum);
statement.setInt(1, goods.getGoods_num());
statement.setInt(2, goods.getId());
int rs = statement.executeUpdate();
if (rs > 0) {
flag = true;
}
} catch (SQLException e) {
e.printStackTrace();
} finally {
DB.close(null, statement, connection);
}
break;
default:
break;
}
return flag;
}
/**
* 删除商品信息 Goods 商品表
*
* @param id 商品表的 主键 ID
* @return boolean
*/
public boolean deleteGoods(int id) {
boolean flag = false;
connection = DB.getConnection();
String deleteId = "DELETE FROM db_shopping_management.goods WHERE id = ?";
try {
statement = connection.prepareStatement(deleteId);
statement.setInt(1, id);
int rs = statement.executeUpdate();
if (rs > 0) {
flag = true;
}
} catch (SQLException e) {
e.printStackTrace();
} finally {
DB.close(null, statement, connection);
}
return flag;
}
/**
* 查询 方法重构
*
* @param goodsArrayList 商品信息集合容器
* @param query Mysql 查询语句
*/
public static void queryAll(ArrayList<Goods> goodsArrayList, String query) {
Connection connection = DB.getConnection();
PreparedStatement statement = null;
ResultSet resultSet = null;
try {
statement = connection.prepareStatement(query);
resultSet = statement.executeQuery();
while (resultSet.next()) {
int id = resultSet.getInt("id");
String goods_name = resultSet.getString("goods_name");
Double goods_price = resultSet.getDouble("goods_price");
int goods_num = resultSet.getInt("goods_num");
// 获取结果集,根据有参构造,传参
Goods goods = new Goods(id, goods_name, goods_price, goods_num);
// 将类信息添加到集合容器
goodsArrayList.add(goods);
}
} catch (SQLException e) {
e.printStackTrace();
} finally {
DB.close(resultSet, statement, connection);
}
}
/**
* 查询商品信息
*
* @param key 查询选项
* @return ArrayList<Goods>
*/
public ArrayList<Goods> queryGoods(int key) {
ArrayList<Goods> goodsArrayList = new ArrayList<>();
connection = DB.getConnection();
switch (key) {
case 1: // 查询商品数量,默认升序
String selNum = "SELECT * FROM db_shopping_management.goods ORDER BY goods_num";
queryAll(goodsArrayList, selNum);
break;
case 2: // 查询商品价格,默认升序
String selPrice = "SELECT * FROM db_shopping_management.goods ORDER BY goods_price";
queryAll(goodsArrayList, selPrice);
break;
case 3: // 查询商品名字
String nameGet = ScannerChoice.scannerInfoString();
nameGet = "%" + nameGet + "%";
String sqlGoodsName = "SELECT * FROM db_shopping_management.goods WHERE goods.goods_name LIKE ?";
connection = DB.getConnection();
try {
statement = connection.prepareStatement(sqlGoodsName);
statement.setString(1, nameGet);
} catch (SQLException e) {
e.printStackTrace();
}
}
return goodsArrayList;
}
/**
* 显示商品所有信息
*
* @return ArrayList<Goods>
*/
public ArrayList<Goods> displayGoods() {
ArrayList<Goods> goodsArrayList = new ArrayList<>();
String query = "SELECT * FROM db_shopping_management.goods";
queryAll(goodsArrayList, query);
return goodsArrayList;
}
}
| StayHungryStayFoolish/stayhungrystayfoolish.github.com | Shoping_Management_System/src/main/java/repositories/Control/GoodsControl.java | Java | mit | 8,118 |
using System;
using System.Reflection;
using NetOffice;
using NetOffice.Attributes;
namespace AccessApi.Utils
{
#pragma warning disable
/// <summary>
/// Necessary factory info, used from NetOffice.Core while Initialize()
/// </summary>
public class ProjectInfo : IFactoryInfo
{
#region Fields
private string _name;
private string _namespace = "NetOffice.AccessApi";
private Guid[] _componentGuid = new Guid[]{new Guid("4AFFC9A0-5F99-101B-AF4E-00AA003F0F07")};
private Assembly _assembly;
private NetOfficeAssemblyAttribute _assemblyAttribute;
private Type[] _exportedTypes;
private string[] _dependents;
#endregion
#region Ctor
public ProjectInfo()
{
_assembly = typeof(ProjectInfo).Assembly;
_assemblyAttribute = _assembly.GetCustomAttributes(typeof(NetOfficeAssemblyAttribute), true)[0] as NetOfficeAssemblyAttribute;
_name = _assembly.GetName().Name;
}
#endregion
#region IFactoryInfo
public string AssemblyName
{
get
{
return _name;
}
}
public string AssemblyNamespace
{
get
{
return _namespace;
}
}
public Guid[] ComponentGuid
{
get
{
return _componentGuid;
}
}
public Assembly Assembly
{
get
{
return _assembly;
}
}
public NetOfficeAssemblyAttribute AssemblyAttribute
{
get
{
return _assemblyAttribute;
}
}
public string[] Dependencies
{
get
{
if(null == _dependents)
_dependents = new string[]{"OfficeApi.dll","DAOApi.dll","VBIDEApi.dll","ADODBApi.dll","OWC10Api.dll"};
return _dependents;
}
}
public bool IsDuck
{
get
{
return false;
}
}
public bool Contains(Type type)
{
if (null == _exportedTypes)
_exportedTypes = Assembly.GetExportedTypes();
foreach (Type item in _exportedTypes)
{
if (item == type)
return true;
}
return false;
}
public bool Contains(string className)
{
if (null == _exportedTypes)
_exportedTypes = Assembly.GetExportedTypes();
foreach (Type item in _exportedTypes)
{
if (item.Name.EndsWith(className, StringComparison.InvariantCultureIgnoreCase))
return true;
}
return false;
}
#endregion
}
#pragma warning restore
}
| NetOfficeFw/NetOffice | Source/Access/Utils/ProjectInfo.cs | C# | mit | 2,992 |
<?php
/**
* Atom Protocol support
*
* @author Михаил Красильников <m.krasilnikov@yandex.ru>
* @license MIT
*/
namespace Mekras\Atom\Element\Meta;
use Mekras\Atom\Element\Element;
use Mekras\Atom\Element\Subtitle;
use Mekras\Atom\Node;
use Mekras\Atom\NodeInterfaceTrait;
/**
* Element has a title.
*
* @since 1.0
*
* @link https://tools.ietf.org/html/rfc4287#section-4.2.12
*/
trait HasSubtitle
{
use NodeInterfaceTrait;
/**
* Return title.
*
* @return Subtitle|null
*
* @since 1.0
*/
public function getSubtitle()
{
return $this->getCachedProperty(
'subtitle',
function () {
// No REQUIRED — no exception.
$element = $this->query('atom:subtitle', Node::SINGLE);
/** @var Element $this */
return $element ? $this->getExtensions()->parseElement($this, $element) : null;
}
);
}
/**
* Add subtitle.
*
* @param string $value
* @param string $type
*
* @return Subtitle
*
* @since 1.0
*/
public function addSubtitle($value, $type = 'text')
{
/** @var Subtitle $element */
$element = $this->addChild('atom:subtitle', 'subtitle');
$element->setContent($value, $type);
return $element;
}
}
| mekras/atom | src/Element/Meta/HasSubtitle.php | PHP | mit | 1,379 |
# Types, Variables
foo = "foo value"
foo = 321
foo = 321.12
# Arrays
foo = ["new", "foo", "value"]
# Hashes
foo = {foo: "foo-val", baz: "baz-val"}
# If/Else
if true
puts "hi"
end
# Loops
["hi", "there"].each { |i| puts i }
# functions
def baz
puts "baz called"
end
# class
class Bar
end
| chris-ramon/go-for-rubyists | main.rb | Ruby | mit | 298 |
/**
* @file A set of global functions available to all components.
* @author Rowina Sanela
*/
(bbn => {
"use strict";
if ( !bbn.vue ){
throw new Error("Impossible to find the library bbn-vue")
}
Vue.mixin({
computed: {
/**
* Return the object of the currentPopup.
* @computed currentPopup
* @return {Object}
*/
currentPopup(){
if ( !this._currentPopup ){
let e = bbn.vue._retrievePopup(this);
if ( e ){
this._currentPopup = e;
}
else{
let vm = this;
while (vm = vm.$parent) {
if ( vm._currentPopup ){
this._currentPopup = vm._currentPopup;
break;
}
else if ( vm ){
e = bbn.vue._retrievePopup(vm);
if ( e ){
this._currentPopup = e;
break;
}
}
if (vm === this.$root) {
break;
}
}
}
}
if ( this._currentPopup ){
return this._currentPopup;
}
return null;
}
},
methods: {
/**
* Return the function bbn._ for the strings' translation.
* @method _
* @return {Function}
*/
_: bbn._,
/**
* Returns the given ref (will return $refs[name] or $refs[name][0])
* @method getRef
* @param {String} name
* @fires bbn.vue.getRef
* @return {Function}
*/
getRef(name){
return bbn.vue.getRef(this, name);
},
/**
* Checks if the component corresponds to the selector
* @method is
* @fires bbn.vue.is
* @param {String} selector
* @return {Function}
*/
is(selector){
return bbn.vue.is(this, selector);
},
/**
* Returns the closest component matching the given selector
* @method closest
* @param {String} selector
* @param {Boolean} checkEle
* @return {Function}
*/
closest(selector, checkEle){
return bbn.vue.closest(this, selector, checkEle);
},
/**
* Returns an array of parent components until $root
* @method ancestors
* @param {String} selector
* @param {Boolean} checkEle
* @return {Function}
*/
ancestors(selector, checkEle){
return bbn.vue.ancestors(this, selector, checkEle);
},
/**
* Fires the function bbn.vue.getChildByKey.
* @method getChildByKey
* @param {String} key
* @param {String} selector
* @todo Remove for Vue3
* @return {Function}
*/
getChildByKey(key, selector){
return bbn.vue.getChildByKey(this, key, selector);
},
/**
* Fires the function bbn.vue.findByKey.
* @method findByKey
* @param {String} key
* @param {String} selector
* @param {Array} ar
* @todo Remove for Vue3
* @return {Function}
*/
findByKey(key, selector, ar){
return bbn.vue.findByKey(this, key, selector, ar);
},
/**
* Fires the function bbn.vue.findAllByKey.
* @method findAllByKey
* @param {String} key
* @param {String} selector
* @todo Remove for Vue3
* @return {Function}
*/
findAllByKey(key, selector){
return bbn.vue.findAllByKey(this, key, selector);
},
/**
* Fires the function bbn.vue.find.
* @method find
* @param {String} selector
* @param {Number} index
* @todo Remove for Vue3
* @return {Function}
*/
find(selector, index){
return bbn.vue.find(this, selector, index);
},
/**
* Fires the function bbn.vue.findAll.
* @method findAll
* @param {String} selector
* @param {Boolean} only_children
* @todo Remove for Vue3
* @return {Function}
*/
findAll(selector, only_children){
return bbn.vue.findAll(this, selector, only_children);
},
/**
* Extends an object with Vue.$set
* @method extend
* @param {Boolean} selector
* @param {Object} source The object to be extended
* @param {Object} obj1
* @return {Object}
*/
extend(deep, src, obj1){
let args = [this];
for ( let i = 0; i < arguments.length; i++ ){
args.push(arguments[i]);
}
return bbn.vue.extend(...args);
},
/**
* Fires the function bbn.vue.getComponents.
* @method getComponents
* @param {Array} ar
* @param {Boolean} only_children
* @todo Remove for Vue3
* @return {Function}
*/
getComponents(ar, only_children){
return bbn.vue.getComponents(this, ar, only_children);
},
/**
* Opens the closest object popup.
* @method getPopup
* @return {Object}
*/
getPopup(){
let popup = bbn.vue.getPopup(this);
if (arguments.length && popup) {
let cfg = arguments[0];
let args = [];
if (bbn.fn.isObject(cfg)) {
cfg.opener = this;
}
args.push(cfg);
for (let i = 1; i < arguments.length; i++) {
args.push(arguments[i]);
}
return popup.open.apply(popup, args);
}
return popup;
},
/**
* Opens a confirmation from the closest popup
* @method confirm
*/
confirm(){
let popup = this.getPopup();
if (arguments.length && popup) {
let cfg = arguments[0];
let args = [];
if (bbn.fn.isObject(cfg)) {
cfg.opener = this;
}
args.push(cfg);
for (let i = 1; i < arguments.length; i++) {
args.push(arguments[i]);
}
if (!bbn.fn.isObject(cfg)) {
args.push(this);
}
return popup.confirm.apply(popup, args)
}
},
/**
* Opens an alert from the closest popup
* @method alert
*/
alert(){
let popup = this.getPopup();
if (arguments.length && popup) {
let cfg = arguments[0];
let args = [];
if (bbn.fn.isObject(cfg)) {
cfg.opener = this;
}
args.push(cfg);
for (let i = 1; i < arguments.length; i++) {
args.push(arguments[i]);
}
if (!bbn.fn.isObject(cfg)) {
args.push(this);
}
return popup.alert.apply(popup, args)
}
},
/**
* Executes bbn.fn.post
* @method post
* @see {@link https://bbn.io/bbn-js/doc/ajax/post|bbn.fn.post} documentation
* @todo Stupid idea, it should be removed.
* @return {Promise}
*/
post(){
return bbn.vue.post(this, arguments);
},
/**
* Executes bbn.fn.postOut
* @method postOut
* @see {@link https://bbn.io/bbn-js/doc/ajax/postOut|bbn.fn.postOut} documentation
* @todo Stupid idea, it should be removed.
* @return {void}
*/
postOut(){
return bbn.vue.postOut(this, ...arguments);
},
/**
* @method getComponentName
* @todo Returns a component name based on the name of the given component and a path.
* @memberof bbn.vue
* @param {Vue} vm The component from which the name is created.
* @param {String} path The relative path to the component from the given component.
*/
getComponentName(){
return bbn.vue.getComponentName(this, ...arguments);
},
}
});
})(window.bbn); | nabab/bbn-vue | src/mixins.js | JavaScript | mit | 7,844 |
package com.optimaize.webcrawlerverifier.bots;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableSet;
import org.jetbrains.annotations.NotNull;
import java.util.Collections;
import java.util.Set;
/**
* Resources:
* http://en.wikipedia.org/wiki/Googlebot
* https://support.google.com/webmasters/answer/80553
*/
public class GooglebotData implements CrawlerData {
private static final Predicate<String> PREDICATE = new Predicate<String>() {
@Override
public boolean apply(String userAgent) {
if (userAgent.contains("Googlebot")) return true;
return false;
}
};
private static final ImmutableSet<String> HOSTNAMES = ImmutableSet.of("googlebot.com");
private static final GooglebotData INSTANCE = new GooglebotData();
public static GooglebotData getInstance() {
return INSTANCE;
}
private GooglebotData() {
}
@NotNull
@Override
public String getIdentifier() {
return "GOOGLEBOT";
}
@NotNull
@Override
public Predicate<String> getUserAgentChecker() {
return PREDICATE;
}
@NotNull
@Override
public Set<String> getIps() {
return Collections.emptySet();
}
@NotNull
@Override
public Set<String> getHostnames() {
return HOSTNAMES;
}
}
| optimaize/webcrawler-verifier | src/main/java/com/optimaize/webcrawlerverifier/bots/GooglebotData.java | Java | mit | 1,358 |
# encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Batch::Mgmt::V2019_08_01
#
# ApplicationOperations
#
class ApplicationOperations
include MsRestAzure
#
# Creates and initializes a new instance of the ApplicationOperations class.
# @param client service class for accessing basic functionality.
#
def initialize(client)
@client = client
end
# @return [BatchManagementClient] reference to the BatchManagementClient
attr_reader :client
#
# Adds an application to the specified Batch account.
#
# @param resource_group_name [String] The name of the resource group that
# contains the Batch account.
# @param account_name [String] The name of the Batch account.
# @param application_name [String] The name of the application. This must be
# unique within the account.
# @param parameters [Application] The parameters for the request.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Application] operation results.
#
def create(resource_group_name, account_name, application_name, parameters:nil, custom_headers:nil)
response = create_async(resource_group_name, account_name, application_name, parameters:parameters, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Adds an application to the specified Batch account.
#
# @param resource_group_name [String] The name of the resource group that
# contains the Batch account.
# @param account_name [String] The name of the Batch account.
# @param application_name [String] The name of the application. This must be
# unique within the account.
# @param parameters [Application] The parameters for the request.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def create_with_http_info(resource_group_name, account_name, application_name, parameters:nil, custom_headers:nil)
create_async(resource_group_name, account_name, application_name, parameters:parameters, custom_headers:custom_headers).value!
end
#
# Adds an application to the specified Batch account.
#
# @param resource_group_name [String] The name of the resource group that
# contains the Batch account.
# @param account_name [String] The name of the Batch account.
# @param application_name [String] The name of the application. This must be
# unique within the account.
# @param parameters [Application] The parameters for the request.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def create_async(resource_group_name, account_name, application_name, parameters:nil, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, 'account_name is nil' if account_name.nil?
fail ArgumentError, "'account_name' should satisfy the constraint - 'MaxLength': '24'" if !account_name.nil? && account_name.length > 24
fail ArgumentError, "'account_name' should satisfy the constraint - 'MinLength': '3'" if !account_name.nil? && account_name.length < 3
fail ArgumentError, "'account_name' should satisfy the constraint - 'Pattern': '^[-\w\._]+$'" if !account_name.nil? && account_name.match(Regexp.new('^^[-\w\._]+$$')).nil?
fail ArgumentError, 'application_name is nil' if application_name.nil?
fail ArgumentError, "'application_name' should satisfy the constraint - 'MaxLength': '64'" if !application_name.nil? && application_name.length > 64
fail ArgumentError, "'application_name' should satisfy the constraint - 'MinLength': '1'" if !application_name.nil? && application_name.length < 1
fail ArgumentError, "'application_name' should satisfy the constraint - 'Pattern': '^[a-zA-Z0-9_-]+$'" if !application_name.nil? && application_name.match(Regexp.new('^^[a-zA-Z0-9_-]+$$')).nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
# Serialize Request
request_mapper = Azure::Batch::Mgmt::V2019_08_01::Models::Application.mapper()
request_content = @client.serialize(request_mapper, parameters)
request_content = request_content != nil ? JSON.generate(request_content, quirks_mode: true) : nil
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Batch/batchAccounts/{accountName}/applications/{applicationName}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'accountName' => account_name,'applicationName' => application_name,'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version},
body: request_content,
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:put, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Batch::Mgmt::V2019_08_01::Models::Application.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Deletes an application.
#
# @param resource_group_name [String] The name of the resource group that
# contains the Batch account.
# @param account_name [String] The name of the Batch account.
# @param application_name [String] The name of the application. This must be
# unique within the account.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
#
def delete(resource_group_name, account_name, application_name, custom_headers:nil)
response = delete_async(resource_group_name, account_name, application_name, custom_headers:custom_headers).value!
nil
end
#
# Deletes an application.
#
# @param resource_group_name [String] The name of the resource group that
# contains the Batch account.
# @param account_name [String] The name of the Batch account.
# @param application_name [String] The name of the application. This must be
# unique within the account.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def delete_with_http_info(resource_group_name, account_name, application_name, custom_headers:nil)
delete_async(resource_group_name, account_name, application_name, custom_headers:custom_headers).value!
end
#
# Deletes an application.
#
# @param resource_group_name [String] The name of the resource group that
# contains the Batch account.
# @param account_name [String] The name of the Batch account.
# @param application_name [String] The name of the application. This must be
# unique within the account.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def delete_async(resource_group_name, account_name, application_name, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, 'account_name is nil' if account_name.nil?
fail ArgumentError, "'account_name' should satisfy the constraint - 'MaxLength': '24'" if !account_name.nil? && account_name.length > 24
fail ArgumentError, "'account_name' should satisfy the constraint - 'MinLength': '3'" if !account_name.nil? && account_name.length < 3
fail ArgumentError, "'account_name' should satisfy the constraint - 'Pattern': '^[-\w\._]+$'" if !account_name.nil? && account_name.match(Regexp.new('^^[-\w\._]+$$')).nil?
fail ArgumentError, 'application_name is nil' if application_name.nil?
fail ArgumentError, "'application_name' should satisfy the constraint - 'MaxLength': '64'" if !application_name.nil? && application_name.length > 64
fail ArgumentError, "'application_name' should satisfy the constraint - 'MinLength': '1'" if !application_name.nil? && application_name.length < 1
fail ArgumentError, "'application_name' should satisfy the constraint - 'Pattern': '^[a-zA-Z0-9_-]+$'" if !application_name.nil? && application_name.match(Regexp.new('^^[a-zA-Z0-9_-]+$$')).nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Batch/batchAccounts/{accountName}/applications/{applicationName}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'accountName' => account_name,'applicationName' => application_name,'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:delete, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200 || status_code == 204
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
result
end
promise.execute
end
#
# Gets information about the specified application.
#
# @param resource_group_name [String] The name of the resource group that
# contains the Batch account.
# @param account_name [String] The name of the Batch account.
# @param application_name [String] The name of the application. This must be
# unique within the account.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Application] operation results.
#
def get(resource_group_name, account_name, application_name, custom_headers:nil)
response = get_async(resource_group_name, account_name, application_name, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Gets information about the specified application.
#
# @param resource_group_name [String] The name of the resource group that
# contains the Batch account.
# @param account_name [String] The name of the Batch account.
# @param application_name [String] The name of the application. This must be
# unique within the account.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def get_with_http_info(resource_group_name, account_name, application_name, custom_headers:nil)
get_async(resource_group_name, account_name, application_name, custom_headers:custom_headers).value!
end
#
# Gets information about the specified application.
#
# @param resource_group_name [String] The name of the resource group that
# contains the Batch account.
# @param account_name [String] The name of the Batch account.
# @param application_name [String] The name of the application. This must be
# unique within the account.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def get_async(resource_group_name, account_name, application_name, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, 'account_name is nil' if account_name.nil?
fail ArgumentError, "'account_name' should satisfy the constraint - 'MaxLength': '24'" if !account_name.nil? && account_name.length > 24
fail ArgumentError, "'account_name' should satisfy the constraint - 'MinLength': '3'" if !account_name.nil? && account_name.length < 3
fail ArgumentError, "'account_name' should satisfy the constraint - 'Pattern': '^[-\w\._]+$'" if !account_name.nil? && account_name.match(Regexp.new('^^[-\w\._]+$$')).nil?
fail ArgumentError, 'application_name is nil' if application_name.nil?
fail ArgumentError, "'application_name' should satisfy the constraint - 'MaxLength': '64'" if !application_name.nil? && application_name.length > 64
fail ArgumentError, "'application_name' should satisfy the constraint - 'MinLength': '1'" if !application_name.nil? && application_name.length < 1
fail ArgumentError, "'application_name' should satisfy the constraint - 'Pattern': '^[a-zA-Z0-9_-]+$'" if !application_name.nil? && application_name.match(Regexp.new('^^[a-zA-Z0-9_-]+$$')).nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Batch/batchAccounts/{accountName}/applications/{applicationName}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'accountName' => account_name,'applicationName' => application_name,'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Batch::Mgmt::V2019_08_01::Models::Application.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Updates settings for the specified application.
#
# @param resource_group_name [String] The name of the resource group that
# contains the Batch account.
# @param account_name [String] The name of the Batch account.
# @param application_name [String] The name of the application. This must be
# unique within the account.
# @param parameters [Application] The parameters for the request.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Application] operation results.
#
def update(resource_group_name, account_name, application_name, parameters, custom_headers:nil)
response = update_async(resource_group_name, account_name, application_name, parameters, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Updates settings for the specified application.
#
# @param resource_group_name [String] The name of the resource group that
# contains the Batch account.
# @param account_name [String] The name of the Batch account.
# @param application_name [String] The name of the application. This must be
# unique within the account.
# @param parameters [Application] The parameters for the request.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def update_with_http_info(resource_group_name, account_name, application_name, parameters, custom_headers:nil)
update_async(resource_group_name, account_name, application_name, parameters, custom_headers:custom_headers).value!
end
#
# Updates settings for the specified application.
#
# @param resource_group_name [String] The name of the resource group that
# contains the Batch account.
# @param account_name [String] The name of the Batch account.
# @param application_name [String] The name of the application. This must be
# unique within the account.
# @param parameters [Application] The parameters for the request.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def update_async(resource_group_name, account_name, application_name, parameters, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, 'account_name is nil' if account_name.nil?
fail ArgumentError, "'account_name' should satisfy the constraint - 'MaxLength': '24'" if !account_name.nil? && account_name.length > 24
fail ArgumentError, "'account_name' should satisfy the constraint - 'MinLength': '3'" if !account_name.nil? && account_name.length < 3
fail ArgumentError, "'account_name' should satisfy the constraint - 'Pattern': '^[-\w\._]+$'" if !account_name.nil? && account_name.match(Regexp.new('^^[-\w\._]+$$')).nil?
fail ArgumentError, 'application_name is nil' if application_name.nil?
fail ArgumentError, "'application_name' should satisfy the constraint - 'MaxLength': '64'" if !application_name.nil? && application_name.length > 64
fail ArgumentError, "'application_name' should satisfy the constraint - 'MinLength': '1'" if !application_name.nil? && application_name.length < 1
fail ArgumentError, "'application_name' should satisfy the constraint - 'Pattern': '^[a-zA-Z0-9_-]+$'" if !application_name.nil? && application_name.match(Regexp.new('^^[a-zA-Z0-9_-]+$$')).nil?
fail ArgumentError, 'parameters is nil' if parameters.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
# Serialize Request
request_mapper = Azure::Batch::Mgmt::V2019_08_01::Models::Application.mapper()
request_content = @client.serialize(request_mapper, parameters)
request_content = request_content != nil ? JSON.generate(request_content, quirks_mode: true) : nil
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Batch/batchAccounts/{accountName}/applications/{applicationName}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'accountName' => account_name,'applicationName' => application_name,'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version},
body: request_content,
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:patch, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Batch::Mgmt::V2019_08_01::Models::Application.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Lists all of the applications in the specified account.
#
# @param resource_group_name [String] The name of the resource group that
# contains the Batch account.
# @param account_name [String] The name of the Batch account.
# @param maxresults [Integer] The maximum number of items to return in the
# response.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Array<Application>] operation results.
#
def list(resource_group_name, account_name, maxresults:nil, custom_headers:nil)
first_page = list_as_lazy(resource_group_name, account_name, maxresults:maxresults, custom_headers:custom_headers)
first_page.get_all_items
end
#
# Lists all of the applications in the specified account.
#
# @param resource_group_name [String] The name of the resource group that
# contains the Batch account.
# @param account_name [String] The name of the Batch account.
# @param maxresults [Integer] The maximum number of items to return in the
# response.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def list_with_http_info(resource_group_name, account_name, maxresults:nil, custom_headers:nil)
list_async(resource_group_name, account_name, maxresults:maxresults, custom_headers:custom_headers).value!
end
#
# Lists all of the applications in the specified account.
#
# @param resource_group_name [String] The name of the resource group that
# contains the Batch account.
# @param account_name [String] The name of the Batch account.
# @param maxresults [Integer] The maximum number of items to return in the
# response.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def list_async(resource_group_name, account_name, maxresults:nil, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, 'account_name is nil' if account_name.nil?
fail ArgumentError, "'account_name' should satisfy the constraint - 'MaxLength': '24'" if !account_name.nil? && account_name.length > 24
fail ArgumentError, "'account_name' should satisfy the constraint - 'MinLength': '3'" if !account_name.nil? && account_name.length < 3
fail ArgumentError, "'account_name' should satisfy the constraint - 'Pattern': '^[-\w\._]+$'" if !account_name.nil? && account_name.match(Regexp.new('^^[-\w\._]+$$')).nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Batch/batchAccounts/{accountName}/applications'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'accountName' => account_name,'subscriptionId' => @client.subscription_id},
query_params: {'maxresults' => maxresults,'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Batch::Mgmt::V2019_08_01::Models::ListApplicationsResult.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Lists all of the applications in the specified account.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [ListApplicationsResult] operation results.
#
def list_next(next_page_link, custom_headers:nil)
response = list_next_async(next_page_link, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Lists all of the applications in the specified account.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def list_next_with_http_info(next_page_link, custom_headers:nil)
list_next_async(next_page_link, custom_headers:custom_headers).value!
end
#
# Lists all of the applications in the specified account.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def list_next_async(next_page_link, custom_headers:nil)
fail ArgumentError, 'next_page_link is nil' if next_page_link.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = '{nextLink}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
skip_encoding_path_params: {'nextLink' => next_page_link},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Batch::Mgmt::V2019_08_01::Models::ListApplicationsResult.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Lists all of the applications in the specified account.
#
# @param resource_group_name [String] The name of the resource group that
# contains the Batch account.
# @param account_name [String] The name of the Batch account.
# @param maxresults [Integer] The maximum number of items to return in the
# response.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [ListApplicationsResult] which provide lazy access to pages of the
# response.
#
def list_as_lazy(resource_group_name, account_name, maxresults:nil, custom_headers:nil)
response = list_async(resource_group_name, account_name, maxresults:maxresults, custom_headers:custom_headers).value!
unless response.nil?
page = response.body
page.next_method = Proc.new do |next_page_link|
list_next_async(next_page_link, custom_headers:custom_headers)
end
page
end
end
end
end
| Azure/azure-sdk-for-ruby | management/azure_mgmt_batch/lib/2019-08-01/generated/azure_mgmt_batch/application_operations.rb | Ruby | mit | 35,396 |
#include <stdio.h>
#include <sstream>
#include <bitset>
#include "CVar.h"
#include "../dbnet/db_net.hpp"
CVar::CVar(int iWid, int iType, std::string sRem) : m_iWid(iWid), m_iType(iType), m_sRem(sRem)
{
m_bSync = false;
}
void CVar::AddAlias(int iBit, std::string sRem)
{
CAlias* pNewAlias = new CAlias(iBit, sRem);
CBase::AddItem(pNewAlias);
}
CAlias* CVar::GetLastAlias()
{
return CBase::GetLastItem();
}
/*
SAlias* CVar::GetAliasByBit(int iBit)
{
for (CAlias* pAlias = m_pAliases; pAlias; pAlias = pAlias->m_pNext)
{
if (pAlias->m_iBit == iBit)
{
return pAlias;
}
}
return NULL;
}
*/
bool CVar::Sync()
{
++m_iAttempt;
if (DBnet_ReadReg(m_iWid, m_iType, m_Data) == SUCCESS)
{
time(&m_SyncTime);
m_bSync = true;
return true;
}
else
{
m_bSync = false;
return false;
}
}
std::string CVar::GetData()
{
switch (m_iType)
{
case TYPE_INT:
{
std::bitset<8> bset(*(int *)m_Data);
return bset.to_string();
}
case TYPE_LONG:
{
std::bitset<16> bset(*(int *)m_Data);
return bset.to_string();
}
case TYPE_FLOAT:
{
char sRet[256];
sprintf(sRet, "%f", *(float *)m_Data);
return sRet;
}
default:
return "";
}
}
float CVar::GetDataF()
{
return *(float *)m_Data;
}
long CVar::GetDataL()
{
std::string sData = GetData();
int long lData = std::stol(sData,nullptr,2);
return lData;
}
std::string CVar::Write()
{
std::stringstream s;
if (m_bSync)
{
s << "wid: " << GetWid() << "\t hodnota: " << GetData() << "\t " << GetTime() << "\t" << GetAttemptCount() << "\t;" << GetRem() << std::endl;
if (m_iType == TYPE_INT || m_iType == TYPE_LONG)
{
for (CAlias* pAlias = m_pFirst; pAlias; pAlias = pAlias->m_pNext)
{
if (pAlias)
{
std::bitset<16> bset(*(long *)m_Data);
s << " bit: " << pAlias->m_iBit << "\t hodnota: " << (bset[pAlias->m_iBit] ? "ON" : "OFF") << "\t\t\t\t\t\t;" << pAlias->m_sRem << std::endl;
}
}
}
}
else
{
s << "wid: " << GetWid() << "\t nesynchronizováno!!!\t\t\t\t" << GetAttemptCount() << "\t;" << GetRem() << std::endl;
}
return s.str();
}
std::string CVar::GetTime()
{
struct tm* timeinfo = localtime(&m_SyncTime);
std::stringstream strs;
strs << 1900 + timeinfo->tm_year << "-" << 1 + timeinfo->tm_mon << "-" << timeinfo->tm_mday << " ";
strs << timeinfo->tm_hour << ":";
timeinfo->tm_min < 10 ? (strs << "0" << timeinfo->tm_min << ":") : (strs << timeinfo->tm_min << ":");
timeinfo->tm_sec < 10 ? (strs << "0" << timeinfo->tm_sec) : (strs << timeinfo->tm_sec);
return strs.str();
}
| milosbem/myAmit | src/CVar.cpp | C++ | mit | 2,579 |
/*
* Copyright (c) 2012-2014 André Bargull
* Alle Rechte vorbehalten / All Rights Reserved. Use is subject to license terms.
*
* <https://github.com/anba/es6draft>
*/
const {
assertSame
} = Assert;
// 22.2.1.2 %TypedArray%: Different [[Prototype]] for copied ArrayBuffer depending on element types
// https://bugs.ecmascript.org/show_bug.cgi?id=2175
class MyArrayBuffer extends ArrayBuffer {}
let source = new Int8Array(new MyArrayBuffer(10));
let copySameType = new Int8Array(source);
assertSame(copySameType.buffer.constructor, MyArrayBuffer);
assertSame(Object.getPrototypeOf(copySameType.buffer), MyArrayBuffer.prototype);
let copyDifferentType = new Uint8Array(source);
assertSame(copyDifferentType.buffer.constructor, MyArrayBuffer);
assertSame(Object.getPrototypeOf(copyDifferentType.buffer), MyArrayBuffer.prototype);
| rwaldron/es6draft | src/test/scripts/suite/regress/bug2175.js | JavaScript | mit | 841 |
/* * @felipe de jesus | iti_fjpp@hotmail.com */
var msj_beforeSend=function (){
bootbox.dialog({
title: 'Vive Pueblos Mágicos',
message: '<center><i class="fa fa-spinner fa-spin fa-3x msjOk"></i><br>Espere por favor.</center>'
,onEscape : function() {}
});
//fa fa-times
$('body .modal-body').addClass('text_25');
$('.modal-title').css({
'color' : 'white',
'text-align' : 'left'
});
$('.close').css({
'color' : 'white',
'font-size' : 'x-large'
});
};
var isValidEmail=function (mail){
return /^\w+([\.-]?\w+)*@\w+([\.-]?\w+)*(\.\w{2,4})+$/.test(mail);
}
function ventana(){
coordx= screen.width ? (screen.width-200)/2 : 0;
coordy= screen.height ? (screen.height-150)/2 : 0;
window.open(base_url+ 'config/mapa','miventana','width=800,height=600,top=30,right='+coordx+',left='+coordy);
} ;
var msj_error_noti=function (msj){
Messenger().post({
message: msj,
type: 'error',
showCloseButton: true
});
}
var msj_error_serve=function (){
Messenger().post({
message: 'Error al procesar la petición al servidor',
type: 'error',
showCloseButton: true
});
}
var msj_success_noti=function (msj){
Messenger().post({
message: msj,
showCloseButton: true
});
}
var msj_ok=function (titulo,msj,redirec){
bootbox.dialog({
title: 'Vive Pueblos Mágicos | '+(titulo == undefined ? '' : titulo),
message: '<center><i class="fa fa-check fa-4x msjOk"></i><br>'+(msj == undefined ? 'Acción realizado correctamente' : msj)+'</center>',
closeButton: true,
buttons: {
success: {
label : 'Aceptar',
className : 'green-msj btn btn-primary btn-cons',
callback : function(result) {
if(redirec!=undefined){
location.replace(redirec);
}
}
}
},onEscape : function() {}
});
//fa fa-times
$('body .modal-body').addClass('text_25');
$('.modal-title').css({
'color' : 'white',
'text-align' : 'left'
});
$('.close').css({
'color' : 'white',
'font-size' : 'x-large'
});
}
var msj_error=function (titulo,msj){
bootbox.dialog({
title: 'Vive Pueblos Mágicos | '+(titulo == undefined ? '' : titulo),
message: '<center><i class="fa fa-times fa-4x msjOk"></i><br>'+(msj == undefined ? 'Error al realizar la acción' : msj)+'</center>',
closeButton: true,
buttons: {
success: {
label : 'Aceptar',
className : 'green-msj btn btn-primary btn-cons',
callback : function(result) {
//location.replace(base_url)
}
}
},onEscape : function() {}
});
//fa
$('body .modal-body').addClass('text_25');
$('.modal-title').css({
'color' : 'white',
'text-align' : 'left'
});
$('.close').css({
'color' : 'white',
'font-size' : 'x-large'
});
}
var title=function (titulo){
document.title=(titulo ==undefined ? 'Vive Pueblos Mágicos' :titulo );
}
var get_fecha=function (){
var meses = new Array ("Enero","Febrero","Marzo","Abril","Mayo","Junio","Julio","Agosto","Septiembre","Octubre","Noviembre","Diciembre");
var diasSemana = new Array("Domingo","Lunes","Martes","Miércoles","Jueves","Viernes","Sábado");
var f=new Date();
return diasSemana[f.getDay()] + " " + f.getDate() + " de " + meses[f.getMonth()] + " de " + f.getFullYear();
}
var fecha_yyyy_mm_dd=function (){
var hoy = new Date();
var dd = hoy.getDate();
var mm = hoy.getMonth()+1; //hoy es 0!
var yyyy = hoy.getFullYear();
if(dd<10) {
dd='0'+dd;
}
if(mm<10) {
mm='0'+mm;
}
return yyyy+'/'+mm+'/'+dd;
}
var fecha_dd_mm_yyyy=function (){
var hoy = new Date();
var dd = hoy.getDate();
var mm = hoy.getMonth()+1; //hoy es 0!
var yyyy = hoy.getFullYear();
if(dd<10) {
dd='0'+dd;
}
if(mm<10) {
mm='0'+mm;
}
return dd+'/'+mm+'/'+yyyy;
}
| bienTICS/LuchaVSCancerInfantil | assets/js/mensajes.js | JavaScript | mit | 4,861 |
<?php
namespace FunQ\Bundle\FunQBundle\Entity;
use Doctrine\ORM\EntityRepository;
/**
* UserRepository
*
* This class was generated by the Doctrine ORM. Add your own custom
* repository methods below.
*/
class UserRepository extends EntityRepository
{
}
| jflash49/capstone-funq | src/FunQ/Bundle/FunQBundle/Entity/UserRepository.php | PHP | mit | 267 |
<?php
if (isset($_POST['ucf_api_key_submit'])){
$api_key_return = wp_remote_get('http://dnesscarkey.com/font-convertor/api/validate_key.php?license_key='.$_POST['uaf_api_key']);
if ( is_wp_error( $api_key_return ) ) {
$error_message = $api_key_return->get_error_message();
$api_message = "Something went wrong: $error_message";
} else {
$api_key_return = json_decode($api_key_return['body']);
if ($api_key_return->status == 'success'){
update_option('uaf_api_key', $_POST['uaf_api_key']);
}
$api_message = $api_key_return->msg;
}
}
if (isset($_POST['ucf_api_key_remove'])){
delete_option('uaf_api_key');
$api_message = 'Your Activation key has been removed';
}
$uaf_api_key = get_option('uaf_api_key');
?>
<?php if (!empty($api_message)):?>
<div class="updated" id="message"><p><?php echo $api_message ?></p></div>
<?php endif; ?>
<div class="wrap">
<h2>Use Any Font</h2>
<table width="100%">
<tr>
<td valign="top">
<table class="wp-list-table widefat fixed bookmarks">
<thead>
<tr>
<th>API KEY</th>
</tr>
</thead>
<tbody>
<tr>
<td>
<form action="admin.php?page=uaf_settings_page" method="post" >
API KEY :
<?php if (empty($uaf_api_key)): ?>
<input name="uaf_api_key" type="text" style="width:350px; margin-left:50px;" />
<input type="submit" name="ucf_api_key_submit" class="button-primary" value="Verify" style="padding:2px;" />
<br/> <br/>
Please keep the API key to start using this plugin. Offer your contribution (Free to $100) and get the API key from <a href="http://dnesscarkey.com/font-convertor/api/" target="_blank">here</a>.<br/>
<?php else: ?>
<span class="active_key"><?php echo $uaf_api_key; ?> - Active</span> <input type="submit" name="ucf_api_key_remove" class="button-primary" value="Remove Key" style="padding:2px; margin-left:20px;" onclick="if(!confirm('Are you sure ?')){return false;}" />
<?php endif;?>
</form>
<br/>
<strong>Note</strong> : API key is need to connect to our server for font conversion. Our server converts your fonts to required types and sends it back.
<br/><br/>
</td>
</tr>
</tbody>
</table>
<br/> | creative2020/kelley | wp-content/plugins/use-any-font/includes/uaf_header.php | PHP | mit | 2,767 |
// @flow
import { Parser, DomHandler } from 'htmlparser2';
const textToDOM = (html: string): any => {
const handler = new DomHandler();
const parser = new Parser(handler);
parser.write(html);
parser.done();
return handler.dom;
};
export default textToDOM;
| RafalFilipek/jsxfromhtml | src/textToDOM.js | JavaScript | mit | 269 |
require 'boker_tov_bot/message_handlers/base_handler'
module BokerTovBot
module MessageHandlers
class YaronHandler < BaseHandler
def initialize(options = {})
@regex = /.*ירון.*/mi
@responses = ["טחח", "יא ז׳וז׳ו טחח", "פףף", "מה נז׳מע", "חתולה"]
super(options)
end
def match?(message)
@regex.match(message.downcase) ? true : false
end
def response(message)
reply_with_probability do
[:text, @responses.sample]
end
end
end
end
end
| amitizle/boker-tov-bot | lib/boker_tov_bot/message_handlers/yaron_handler.rb | Ruby | mit | 572 |
<?php
/*
* This file is part of the Sociable package.
*
* Copyright 2013 by Sébastien Pujadas
*
* For the full copyright and licence information, please view the LICENCE
* file that was distributed with this source code.
*/
namespace Sociable\Utility;
abstract class SkypeValidator {
const SKYPE_MIN_LENGTH = 5;
const SKYPE_MAX_LENGTH = 31;
const EXCEPTION_INVALID_SKYPE_NAME = 'invalid skype name';
public static function validateName($name) {
StringValidator::validate($name, array(
'min_length' => self::SKYPE_MIN_LENGTH,
'max_length' => self::SKYPE_MAX_LENGTH
)
);
// https://support.skype.com/en/faq/FA94/what-is-a-skype-name
if (!preg_match('/^[a-zA-Z][a-zA-Z0-9_\-\,\.]{5,31}$/i', $name)) {
throw new SkypeException(self::EXCEPTION_INVALID_SKYPE_NAME);
}
}
}
| spujadas/sociable | lib/Sociable/Utility/SkypeValidator.php | PHP | mit | 906 |
package me.Aron.Heinecke.VocableTrainer.lib;
import java.awt.Font;
import javax.swing.JButton;
/**
* Cusom button with font handler
* @author aron
*
*/
public class CButton extends JButton {
private static final long serialVersionUID = 7603166773217229165L;
public CButton(String text, Font font){
super(text);
this.setFont(font);
}
}
| 0xpr03/VocableTrainer | src/me/Aron/Heinecke/VocableTrainer/lib/CButton.java | Java | mit | 353 |
from threading import Lock
import requests
from api.decorator import critical_section
from api.importer import AdditionalDataImporter
from api.importer import AdditionalDataImporterError
from api.importer.wiktionary import dyn_backend
rmi_lock = Lock()
class DictionaryImporter(AdditionalDataImporter):
def populate_cache(self, language):
rq_params = {
'language': 'eq.' + language
}
response = requests.get(dyn_backend.backend + '/word', rq_params)
query = response.json()
for json in query:
self.word_id_cache[(json['word'], json['language'])] = json['id']
class TenyMalagasyImporter(DictionaryImporter):
data_type = 'tenymalagasy/definition'
class RakibolanaMalagasyImporter(DictionaryImporter):
data_type = 'rakibolana/definition'
@critical_section(rmi_lock)
def write_tif(self, title, language, additional_data):
temp = self.data_type
self.data_type = 'rakibolana/derived'
try:
self.write_additional_data(title, language, additional_data)
except AdditionalDataImporterError as exc:
pass
self.data_type = temp
@critical_section(rmi_lock)
def write_raw(self, title, language, additional_data):
temp = self.data_type
self.data_type = 'rakibolana/raw'
try:
self.write_additional_data(title, language, additional_data)
except AdditionalDataImporterError as exc:
pass
self.data_type = temp
def get_data(self, template_title: str, wikipage: str, language: str):
pass
| radomd92/botjagwar | api/importer/rakibolanamalagasy.py | Python | mit | 1,609 |
namespace FluentACS.Specs.Rules
{
using FluentACS.Specs.Rules.Chaining;
public class OutputClaimTypeSpec
{
private readonly RuleSpec owner;
public OutputClaimTypeSpec(RuleSpec owner)
{
this.owner = owner;
}
public IAfterThenOutputClaimTypeRuleSpec ShouldPassthroughFirstInputClaimType()
{
this.owner.OutputClaimType(BaseSpec.Passthrough);
return this.owner;
}
public IAfterThenOutputClaimTypeRuleSpec ShouldBe(string outputClaimType)
{
this.owner.OutputClaimType(outputClaimType);
return this.owner;
}
}
} | jrowies/FluentACS | FluentACS/Specs/Rules/OutputClaimTypeSpec.cs | C# | mit | 668 |
// Modules.
let gulp = require('gulp'),
config = require('../../config'),
path = require('path');
// Package libraries.
gulp.task('package:css', () => {
'use strict';
let sourcePath = path.join(config.development.paths.css, '**/*');
let destinationPath = config.package.paths.css;
return gulp
.src([sourcePath])
.pipe(gulp.dest(destinationPath));
});
| mpolizzotti/solitude | tasks/package/css.js | JavaScript | mit | 394 |
module EM
class LineProcessor < EM::P::LineAndTextProtocol
attr_accessor :on_line
def receive_line(line)
if on_line
EM.next_tick do
on_line.call line
end
end
end
end
end | minefold/em-process-buffer | lib/eventmachine/line_processor.rb | Ruby | mit | 224 |
import math
class VirtualScreen: #cet ecran est normal a l'axe Z du Leap
def __init__(self,Xoffset=0,Yoffset=50,Zoffset=-50,Zlimit=220,length=350,height=300): #en mm
self.Xoffset = Xoffset; # position du milieu du bord bas de l'ecran par rapport au centre du Leap
self.Yoffset = Yoffset; # position du milieu du bord bas de l'ecran par rapport au centre du Leap
self.Zoffset = Zoffset; # position du milieu du bord bas de l'ecran par rapport au centre du Leap
self.Zlimit = Zlimit # profondeur de la zone
self.length = length;
self.height = height;
self.UpperLeftCorner = [Xoffset-length/float(2),Yoffset+height]
self.Center = [self.Xoffset,self.Yoffset+0.5*self.height,Zoffset+0.5*Zlimit]
self.zoneUpperLeftCornerArray = [];
self.zoneHeight = height / float(2);
self.zoneLength = length / float(3);
for i in range(0,2):
for j in range(0,3):
self.zoneUpperLeftCornerArray.append([self.UpperLeftCorner[0]+self.zoneLength*j,self.UpperLeftCorner[1]-self.zoneHeight*i])
# print self.zoneUpperLeftCornerArray
def distanceFromScreen(self,position):
dX = max( max( position[0] - (self.Xoffset+self.length/float(2)), 0 ) , max (self.Xoffset-self.length/float(2) - position[0], 0 ) )
dY = max( max( position[1] - (self.Yoffset+self.height) , 0 ) , max (self.Yoffset - position[1], 0 ) )
dZ = max( max(self.Zoffset - position[2], 0 ) , max(position[2] - (self.Zlimit + self.Zoffset) , 0 ) )
return math.sqrt(dX**2+dY**2+dZ**2)
def isFacingTheScreen(self,position): #donner un vecteur position 3d en mm suivant les axes du Leapmotion ([x,y,z])
isXvalid = (position[0] <= self.Xoffset+self.length/float(2)) and (position[0] >= self.Xoffset-self.length/float(2))
isYvalid = (position[1] <= self.Yoffset+self.height) and (position[1] >= self.Yoffset)
isZvalid = (position[2] >= self.Zoffset) and (position[2] <= self.Zlimit + self.Zoffset)
return isXvalid and isYvalid and isZvalid
def getScreenZonePointedAt(self,position,direction):
if not self.isFacingTheScreen(position):
return -1
else:
lambdaIntersection = (self.Zoffset-position[2])/direction[2] # (Zoffset-Zpoint)/Zdirecteur
xIntersection = position[0] + lambdaIntersection*direction[0] # Xpoint + lambda * Xdirecteur
yIntersection = position[1] + lambdaIntersection*direction[1] # Ypoint + lambda * Ydirecteur
intersection = [xIntersection,yIntersection]
return(self.getScreenZoneFromPointOnScreen(intersection))
def getScreenZoneFromPointOnScreen(self,onScreenPosition):
for index,i in enumerate(self.zoneUpperLeftCornerArray):
if(onScreenPosition[0]>=i[0] and onScreenPosition[0]<i[0]+self.zoneLength and onScreenPosition[1]<=i[1] and onScreenPosition[1]>=i[1]-self.zoneHeight):
return index+1
return -1
| IIazertyuiopII/PDS_sonification | python/VirtualScreen.py | Python | mit | 2,759 |
class UnlocksController < Devise::UnlocksController
layout 'authentication'
end
| flashlightdb/flashlightdb | app/controllers/unlocks_controller.rb | Ruby | mit | 82 |
const escapeStringRegexp = require('escape-string-regexp');
const query = require('../query/query');
const Sort = require('../helpers/sort');
const StringScore = require('../helpers/string-score');
const DataSource = {
cells: (name) => {
return new Promise((resolve) => {
// calculate text search score
const textMatch = (matches) => {
const arrWithScores = matches.map((matchedTerm) => {
return Object.assign(
{},
matchedTerm,
{
score: StringScore(matchedTerm.name, name),
}
);
});
const sorted = Sort.arrayOfObjectByKeyNumber(arrWithScores, 'score', 'desc');
// add _id to name because some cell lines have the same name
return sorted.map((cell) => {
return Object.assign(
{},
cell,
{
name: `${cell.name}; ${cell._id}`,
}
);
});
};
if (!name) {
resolve({
status: 200,
clientResponse: {
status: 200,
data: [],
message: 'Species string searched',
},
});
} else {
const escapedString = escapeStringRegexp(name);
const re = new RegExp(`^${escapedString}`, 'i');
query.get('cells', { name: { $regex: re } }, { })
.then((matches) => {
const sortedResults = textMatch(matches);
resolve({
status: 200,
clientResponse: {
status: 200,
data: sortedResults,
message: 'Cell string searched',
},
});
})
.catch((error) => {
resolve({
status: 500,
clientResponse: {
status: 500,
message: `There was an error querying the text string ${name}: ${error}`,
},
});
})
;
}
});
},
species: (name) => {
return new Promise((resolve) => {
// calculate text search score
const textMatch = (matches) => {
const arrWithScores = matches.map((matchedTerm) => {
return Object.assign(
{},
matchedTerm,
{
score: StringScore(matchedTerm.name, name),
}
);
});
return Sort.arrayOfObjectByKeyNumber(arrWithScores, 'score', 'desc');
};
if (!name) {
resolve({
status: 200,
clientResponse: {
status: 200,
data: [],
message: 'Species string searched',
},
});
} else {
const escapedString = escapeStringRegexp(name);
const re = new RegExp(`^${escapedString}`, 'i');
query.get('species', { name: { $regex: re } }, { })
.then((matches) => {
const sortedResults = textMatch(matches);
resolve({
status: 200,
clientResponse: {
status: 200,
data: sortedResults,
message: 'Species string searched',
},
});
})
.catch((error) => {
resolve({
status: 500,
clientResponse: {
status: 500,
message: `There was an error querying the text string ${name}: ${error}`,
},
});
})
;
}
});
},
};
module.exports = DataSource;
| knightjdr/screenhits | api/app/modules/data-source/data-source.js | JavaScript | mit | 3,535 |
using CinchORM.AccessLayer;
using System;
using System.Collections.Generic;
using System.Data;
using System.Data.SqlClient;
using System.Linq;
using System.Reflection;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
namespace CinchORM
{
public class CinchMapping
{
public List<string> Columns { get; set; }
public string ColumnsString
{
get
{
if(this.Columns == null || Columns.Count == 0)
throw new ApplicationException(String.Format("Could not build ColumnsString because the objects ValuesQueryParams is null or empty"), new NullReferenceException());
return string.Join(",", this.Columns);
}
}
public string QueryString { get; set; }
public List<string> ValuesQueryParams { get; set; }
public string InsertValuesQueryParamsString
{
get
{
if(this.ValuesQueryParams == null || this.ValuesQueryParams.Count == 0)
throw new ApplicationException(String.Format("Could not build ValuesQueryParamsString because the objects ValuesQueryParams is null or empty"), new NullReferenceException());
return string.Join(",", this.ValuesQueryParams);
}
}
public string UpdateValuesQueryParamsString
{
get
{
if (this.ValuesQueryParams == null || this.ValuesQueryParams.Count == 0 || this.Columns == null || Columns.Count == 0)
throw new ApplicationException(String.Format("Could not build ValuesQueryParamsString because the objects ValuesQueryParams is null or empty"), new NullReferenceException());
string updateValuesQueryParamsString = null;
for (int i = 0; i < this.Columns.Count; i++)
{
string col = this.Columns[i];
string val = this.ValuesQueryParams[i];
updateValuesQueryParamsString = String.Format("{0}{1} = {2},", updateValuesQueryParamsString, col, val);
}
if (updateValuesQueryParamsString.Substring(updateValuesQueryParamsString.Length - 1, 1) == ",")
return updateValuesQueryParamsString.Substring(0, updateValuesQueryParamsString.Length - 1);
return updateValuesQueryParamsString;
}
}
public List<SqlParameter> SqlParams { get; set; }
}
public static class Mapper
{
internal static Dictionary<string, CinchMapping> _cinchMappingCache = new Dictionary<string, CinchMapping>();
public static CinchMapping MapProperties<T>(T obj, List<string> cols = null) where T : ModelBase
{
string cacheKey = String.Format("{0}_{1}", obj.ObjName, String.Join("-", cols.ToArray()));
CinchMapping cinchMapping = null;
if(!_cinchMappingCache.ContainsKey(cacheKey))
{
PropertyInfo[] props = obj.GetType().GetProperties();
List<string> columns = new List<string>();
List<string> valuesQueryParams = new List<string>();
List<SqlParameter> sqlParams = new List<SqlParameter>();
int i = 1;
foreach (PropertyInfo prop in props)
{
if (prop.Attributes.GetAttributeFrom<CinchIgnoreAttribute>(prop) != null ||
prop.DeclaringType == typeof(ModelBase) ||
(cols != null && cols.Contains(prop.Name)))
continue;
#if NET40
object value = prop.GetValue(obj, null);
#else
object value = prop.GetValue(obj);
#endif
if (value == null)
continue;
Type t = prop.PropertyType;
string placeholder = String.Format("val{0}", i);
columns.Add(String.Format("{0}{1}{2}", SpecialCharacters.ColumnBegin, prop.Name, SpecialCharacters.ColumnEnd));
valuesQueryParams.Add(String.Format("{0}{1}", SpecialCharacters.ParamPrefix, placeholder));
sqlParams.AddParameter(placeholder, Conversion.GetSqlDbType(t), value);
i++;
}
cinchMapping = new CinchMapping()
{
Columns = columns,
ValuesQueryParams = valuesQueryParams,
SqlParams = sqlParams
};
_cinchMappingCache.Add(cacheKey, cinchMapping);
}
else
{
cinchMapping = _cinchMappingCache[cacheKey];
}
return cinchMapping;
}
public static CinchMapping MapQuery<T>(T obj, string query, object[] param) where T : ModelBase
{
string cacheKey = String.Format("{0}_{1}_{2}", obj.ObjName, query, String.Join("-", param));
CinchMapping cinchMapping = null;
if(!_cinchMappingCache.ContainsKey(cacheKey))
{
cinchMapping = new CinchMapping() { QueryString = query };
if (param != null && param.Count() > 0)
{
//where clause has params, but no param values were passed in.
if (!String.IsNullOrWhiteSpace(query) && query.IndexOf('@') > -1 && param.Count() <= 0)
throw new ApplicationException(String.Format("Could not execute Find for {0} because the parameters array is empty", obj.ObjName), new NullReferenceException());
//param counts don't match
else if (query.Count(c => c == '@') != param.Count())
throw new ApplicationException(String.Format("Could not execute Find for {0} because the number of parameters in the where clause and parameters array do not match", obj.ObjName), new NullReferenceException());
List<SqlParameter> sqlParams = BuildParamsFromString(query, param);
cinchMapping.SqlParams = sqlParams;
}
_cinchMappingCache.Add(cacheKey, cinchMapping);
}
else
{
cinchMapping = _cinchMappingCache[cacheKey];
}
return cinchMapping;
}
public static CinchMapping MapQuery(string query, object[] param)
{
string cacheKey = String.Format("{0}_{1}", query, String.Join("-", param));
CinchMapping cinchMapping = null;
if (!_cinchMappingCache.ContainsKey(cacheKey))
{
cinchMapping = new CinchMapping() { QueryString = query };
if (param != null && param.Count() > 0)
{
//where clause has params, but no param values were passed in.
if (!String.IsNullOrWhiteSpace(query) && query.IndexOf('@') > -1 && param.Count() <= 0)
throw new ApplicationException(String.Format("Could not execute Find for \"{0}\" because the parameters array is empty", query), new NullReferenceException());
//param counts don't match
else if (query.Count(c => c == '@') != param.Count())
throw new ApplicationException(String.Format("Could not execute Find for \"{0}\" because the number of parameters in the where clause and parameters array do not match", query), new NullReferenceException());
List<SqlParameter> sqlParams = BuildParamsFromString(query, param);
cinchMapping.SqlParams = sqlParams;
}
_cinchMappingCache.Add(cacheKey, cinchMapping);
}
else
{
cinchMapping = _cinchMappingCache[cacheKey];
}
return cinchMapping;
}
private static List<SqlParameter> BuildParamsFromString(string query, object[] param)
{
Regex regex = new Regex("@[A-Za-z0-9]+");
MatchCollection matches = regex.Matches(query);
//param matches don't match param array
if (matches.Count != param.Count())
throw new ApplicationException(String.Format("Could not build Params because the number of parameters in the where clause and parameters array do not match"), new NullReferenceException());
List<SqlParameter> sqlParams = new List<SqlParameter>();
for (int i = 0; i < matches.Count; i++)
{
Match match = matches[i];
object value = param[i];
//add sql param
sqlParams.AddParameter(match.Value, Conversion.GetSqlDbType(value.GetType()), value);
}
return sqlParams;
}
}
}
| pimbrouwers/cinch | Cinch/Mapping/Mapper.cs | C# | mit | 9,003 |
<?php
namespace rkgrep\Locales\Contracts;
interface Driver
{
/**
* Retrieve a locale by unique identifier.
*
* @param mixed $code
* @return \rkgrep\Locales\Contracts\Locale|null
*/
public function retrieveByCode($code);
/**
* Retrieve list of locale codes or names keyed by codes.
*
* @param name
* @return mixed
*/
public function getList($name = null);
} | rkgrep/laravel-locales | src/Contracts/Driver.php | PHP | mit | 428 |
/// <reference path="../../all.d.ts" />
module OptionsApp {
angular.module('OptionsApp', ['HeaderApp', 'gettext', 'angular-google-analytics'])
.config(Helpers.setAnaliticSetting)
.run(Helpers.setCurrentLanguage)
.run(Helpers.trackPage)
.controller('OptionsCtrl', OptionsCtrl)
.directive('checkboxOption', () => {
return {
template: `<input ng-model="param" type="checkbox"/>
<span class="fa-stack">
<i class="fa fa-square fa-stack-2x"></i>
<i ng-if="param" class="fa fa-check fa-stack-1x fa-inverse"></i>
</span>`,
scope: {
param: '=?'
}
}
})
.filter('kFilter', function () {
return function (input: any, decimals: any) {
const suffixes = ['K', 'M', 'G', 'T', 'P', 'E'];
if(isNaN(input)) return null;
if(input < 1000) return input;
const exp = Math.floor(Math.log(input) / Math.log(1000));
return (input / Math.pow(1000, exp)).toFixed(decimals) + suffixes[exp - 1];
};
});
}
| cawa-93/vknotice | src/OptionsApp/js/OptionsApp.ts | TypeScript | mit | 983 |
<%= grunt.util._.camelize(appname) %>.Routers.<%= _.classify(name) %>Router = Backbone.Router.extend({
routes: {
"login" : "login"
},
initialize : function(){
var self = this;
},
login: function(){
var self = this;
}
});
| posabsolute/backbone_generate | templates/router.js | JavaScript | mit | 235 |
class Ranking { //object for ranking in curiosity
constructor(){// in contructor set a values for default in this class
ranking.colorActive = "#2262ae";//colors of active stars
ranking.colorUnActive = "rgba(46,46,46,0.75)";//colors for unactive stars
ranking.stars = 0;//number of stars in ranking
ranking.averageStars = 0;//average of ranking
}
show (color){
ranking.colorActive = color;//colors of active stars
ranking.colorUnActive = "rgba(46,46,46,0.75)";//colors for unactive stars
ranking.stars = 0;//number of stars in ranking
ranking.averageStars = 0;//average of ranking
$.each($(".curiosity-ranking"),function(indx,object){//rut to rankings
object.style = "display:block";//show rankings
ranking.averageStars = object.getAttribute("data-stars");// get data stars
ranking.stars = (Math.floor(ranking.averageStars));// convert to int data stars
$(object).find(".star-text").text(ranking.averageStars);
ranking.runRankingSetColors($(object),ranking.stars);
});
}
init(){//init function
this.setDatasToRankings();
}
setDatasToRankings(){//this function set values to ranking this date get from attribute data-stars in html
var these = this;
$.each($(".curiosity-ranking"),function(indx,object){//rut to rankings
object.style = "display:block";//show rankings
ranking.averageStars = object.getAttribute("data-stars");// get data stars
ranking.stars = (Math.floor(ranking.averageStars));// convert to int data stars
$(object).find(".star-text").text(ranking.averageStars);
$.each($(object).children(),function(index,obj){
obj.addEventListener("mouseover",these.hoverStar,false);
obj.addEventListener("mouseout",these.mouseOut,false);
if(index <= ranking.stars){
obj.style = "color:"+ranking.colorActive+";";
}
});
});
}
uploadStars(itemRanking,averageStars){//upload data stars in ranking element
if(averageStars>5 && averageStars<0){//validate data parameter
console.error("El segundo parametros recivido tiene que ser menor que 5 y mayor que 0");
}else{//if data parameter is correct
var parent = itemRanking.parentNode;// get ranking element(ul)
var stars = Math.floor(averageStars);// convert to int data stars
parent.setAttribute("data-stars",averageStars);//set new data stars to ranking
$(parent).find(".star-text").text(averageStars);//set text of average stars
ranking.runRankingSetColors($(parent),stars);// paint stars
}
}
hoverStar(event){//hover event
var starIndex = $(event.target).index();
ranking.runRankingSetColors($(event.target.parentNode),starIndex);
}
mouseOut(event){// mouse over event
var limitStars = Math.floor(event.target.parentNode.getAttribute("data-stars"));
ranking.runRankingSetColors($(event.target.parentNode),limitStars);
}
setColorActive(color){//set new color in case active
if(/^#[0-9][a-fA-F]{6}$/.test(color) || /^rgb\([0-9]{1,3}\,[0-9]{1,3}\,[0-9]{1,3}\)$/.test(color)){
ranking.colorActive = color;
}
else{
console.error("El parametro de la funcion setBackgroundColor debe ser hexadecimal o rgb");
}
}
setColorUnActive(color){//set new color in case unactive
if(/^#[0-9][a-fA-F]{6}$/.test(color) || /^rgb\([0-9]{1,3}\,[0-9]{1,3}\,[0-9]{1,3}\)$/.test(color)){
ranking.colorUnActive = color;
}
else{
console.error("El parametro de la funcion setBackgroundColor debe ser hexadecimal o rgb");
}
}
setEventClick(clickfunction){
if($.isFunction(clickfunction)){
$(".curiosity-ranking>li.item-ranking").click(clickfunction);
}else{
console.error("El parametro recibido tiene que ser una función");
}
}
};
var ranking = {//object with values to use in this class
colorActive : "",
colorUnActive : "",
stars : 0,
averageStars : 0,
runRankingSetColors : function(rankingElement,limitStars){//run to item ranking for set stars
$.each(rankingElement.children(),function(index,object){
if(index <= limitStars){
object.style = "color:"+ranking.colorActive+";";
}else{
object.style = "color:"+ranking.colorUnActive+";";
}
});
}
};
//end of document
| Curiosity-Education/curiosity-v.1.0 | public/packages/assets/js/ranking-curiosity.js | JavaScript | mit | 4,228 |
<?php
use Illuminate\Support\Facades\Schema;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Migrations\Migration;
class CreateSurveyQuestionsTable extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('survey_questions', function (Blueprint $table) {
$table->increments('id');
$table->integer("survey_id");
$table->string("question");
$table->string("data");
$table->integer("order");
$table->timestamps();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::dropIfExists('survey_questions');
}
}
| VATUSA/api | database/migrations/2018_02_28_173734_create_survey_questions_table.php | PHP | mit | 774 |
<?php
namespace App\Extension\JWT\Providers;
use Illuminate\Support\ServiceProvider as ProviderContract;
use Illuminate\Database\Eloquent\Model;
use Illuminate\Support\Facades\Auth;
use App\Extension\JWT\Auth\Driver;
/**
* Description of JWTServiceProvider
*
* @author absolux
*/
class ServiceProvider extends ProviderContract {
/**
* Bootstrap JWT auth driver
*/
public function boot() {
Auth::extend('jwt', function($app) {
return new Driver($this->createUserProvider($app));
});
}
/**
* create a user provider for auth driver
*
* @return UserProvider
*/
protected function createUserProvider($app) {
$repository = new TokenProvider();
return new UserProvider($app['hash'], $repository, $this->createModel());
}
/**
* @return Model
*/
protected function createModel() {
$class = '\\'.ltrim(config('auth.model'), '\\');
return new $class();
}
public function register() {
$config_path = __DIR__ . '/../config.php';
$this->mergeConfigFrom($config_path, 'jwt');
}
} | absolux/Collabor8-php-api | app/Extension/JWT/Providers/ServiceProvider.php | PHP | mit | 1,175 |
from rewpapi.common.http import Request
from rewpapi.listings.listing import ListingResidential
class RemoteListingImages(Request):
def __init__(self, base_site, auth, listing_type, listing_uuid):
super(RemoteListingImages, self).__init__(auth)
self._base_site = base_site
self._auth = auth
self._listing_type = listing_type
self._listing_uuid = listing_uuid
self._endpoint = base_site + "/api/listings/%s/%s/images/" % (
listing_type, listing_uuid)
def get_all(self):
"""
Returns a list of Listing images
"""
remote_listing_images = self.execute()
listing_images = []
if remote_listing_images:
for a in remote_listing_images:
new_listing_images = ListingImages(self._base_site, self._auth,
self._listing_type, self._listing_uuid)
new_listing_images.FIELDS = []
for k, v in a.items():
setattr(new_listing_images, k, v)
new_listing_images.FIELDS.append(k)
listing_images.append(new_listing_images)
return listing_images
return None
def get(self, uuid):
"""
Returns a single ListingImage instance, matching uuid.
Raises a DoesNotExist exception if the object does not exist.
"""
b = ListingResidential()
b.branch_name = "Foo"
return b
class ListingImages(RemoteListingImages):
"""
A ListingImages object represents a Listing's images. Once instantiated,
you can:
- Change its values and send an update()
- Create it if it doesn't exist
"""
def set_fields(self, images):
self.images = images
def update(self):
"""
Update this listing's images.
"""
self._endpoint = self._base_site + "/api/listings/%s/%s/images/" % (
self._listing_type, self._listing_uuid)
images = []
for image in self.images:
image_dict = {}
image_dict['image'] = image.image
image_dict['caption'] = image.caption
image_dict['sha1'] = image.sha1
images.append(image_dict)
self.execute("PUT", images)
| propdata/rewp-api | rewpapi/listings/images.py | Python | mit | 2,288 |
<?php
namespace Behat\BehatBundle\Console\Processor;
use Symfony\Component\DependencyInjection\ContainerInterface,
Symfony\Component\Console\Input\InputInterface,
Symfony\Component\Console\Output\OutputInterface;
use Behat\Behat\Console\Processor\ContextProcessor as BaseProcessor;
/*
* This file is part of the Behat\BehatBundle.
* (c) Konstantin Kudryashov <ever.zet@gmail.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
/**
* Context processor.
*
* @author Konstantin Kudryashov <ever.zet@gmail.com>
*/
class ContextProcessor extends BaseProcessor
{
/**
* {@inheritdoc}
*/
public function process(ContainerInterface $container, InputInterface $input, OutputInterface $output)
{
// ignore context initialization if no features argument provided
if (!$input->getArgument('features')) {
return;
}
$container->get('behat.runner')->setMainContextClass(
$this->getContextClass($container, $input)
);
}
/**
* {@inheritdoc}
*/
protected function getContextClass(ContainerInterface $container, InputInterface $input)
{
$featuresPath = preg_replace('/\:\d+$/', '', $input->getArgument('features'));
if (preg_match('/^\@([^\/\\\\]+)(.*)$/', $featuresPath, $matches)) {
$bundleNamespace = $container->get('kernel')->getBundle($matches[1])->getNamespace();
} else {
foreach ($container->get('kernel')->getBundles() as $bundle) {
if (false !== strpos(realpath($featuresPath), realpath($bundle->getPath()))) {
$bundleNamespace = $bundle->getNamespace();
break;
}
}
}
return $container->get('behat.runner')->getContextClassForBundle($bundleNamespace);
}
}
| Symfomany/Julovic | vendor/bundle/Behat/BehatBundle/Console/Processor/ContextProcessor.php | PHP | mit | 1,925 |
using GW2PAO.Modules.Cycles.ViewModels;
using System.Collections.ObjectModel;
namespace GW2PAO.Modules.Cycles.Interfaces
{
public interface ICyclesController
{
/// <summary>
/// The collection of World Cycles
/// </summary>
ObservableCollection<CycleViewModel> Cycles { get; }
/// <summary>
/// The collection of events for event notifications
/// </summary>
ObservableCollection<CycleViewModel> CycleNotifications { get; }
/// <summary>
/// The interval by which to refresh events (in ms)
/// </summary>
int CycleRefreshInterval { get; set; }
/// <summary>
/// The event tracker user data
/// </summary>
CyclesUserData UserData { get; }
/// <summary>
/// Starts the controller
/// </summary>
void Start();
/// <summary>
/// Stops the controller
/// </summary>
void Stop();
/// <summary>
/// Forces a shutdown of the controller, including all running timers/threads
/// </summary>
void Shutdown();
}
} | kirkerafael/gw2pao | GW2PAO/Modules/Cycles/Interfaces/ICyclesController.cs | C# | mit | 970 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class CalculateExchangeOperations:
"""CalculateExchangeOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.reservations.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _post_initial(
self,
body: "_models.CalculateExchangeRequest",
**kwargs: Any
) -> Optional["_models.CalculateExchangeOperationResultResponse"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.CalculateExchangeOperationResultResponse"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-10-01-preview"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._post_initial.metadata['url'] # type: ignore
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'CalculateExchangeRequest')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('CalculateExchangeOperationResultResponse', pipeline_response)
if response.status_code == 202:
response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After'))
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
_post_initial.metadata = {'url': '/providers/Microsoft.Capacity/calculateExchange'} # type: ignore
async def begin_post(
self,
body: "_models.CalculateExchangeRequest",
**kwargs: Any
) -> AsyncLROPoller["_models.CalculateExchangeOperationResultResponse"]:
"""Calculates the refund amounts and price of the new purchases.
Calculates price for exchanging ``Reservations`` if there are no policy errors.
:param body: Request containing purchases and refunds that need to be executed.
:type body: ~azure.mgmt.reservations.models.CalculateExchangeRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either CalculateExchangeOperationResultResponse or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.reservations.models.CalculateExchangeOperationResultResponse]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.CalculateExchangeOperationResultResponse"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._post_initial(
body=body,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('CalculateExchangeOperationResultResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_post.metadata = {'url': '/providers/Microsoft.Capacity/calculateExchange'} # type: ignore
| Azure/azure-sdk-for-python | sdk/reservations/azure-mgmt-reservations/azure/mgmt/reservations/aio/operations/_calculate_exchange_operations.py | Python | mit | 7,926 |
#include <cstdio>
#include <vector>
using namespace std;
int k;
int s[13], result[6];
void dfs(int start, int depth) {
if (depth == 6) { // 숫자 6개를 선택했을 때
for (int i = 0; i < 6; i++) {
printf("%d ", result[i]);
}
printf("\n");
return;
}
for (int i = start; i < k; i++) { // 숫자 선택
result[depth] = s[i];
dfs(i + 1, depth + 1);
}
}
int main() {
while (scanf("%d", &k) && k) {
for (int i = 0; i < k; i++) {
scanf("%d", &s[i]);
}
dfs(0, 0);
printf("\n");
}
}
| KimBoWoon/ACM-ICPC | ACM-ICPC/6603.cpp | C++ | mit | 633 |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using OlympicGames.Core.Commands.Abstracts;
using OlympicGames.Core.Contracts;
using OlympicGames.Core.Factories;
using OlympicGames.Olympics.Contracts;
using OlympicGames.Utils;
namespace OlympicGames.Core.Commands
{
public class CreateSprinterCommand : Command, ICommand
{
private IDictionary<string, double> records = new Dictionary<string, double>();
private string command;
public CreateSprinterCommand(IList<string> commandParameters)
: base(commandParameters)
{
commandParameters.ValidateIfNull();
this.command = string.Join(" ", CommandParameters);
}
public override string Execute()
{
var commandTokens = command
.Split(new char[] { ' ' }, StringSplitOptions.RemoveEmptyEntries)
.ToArray();
var firstName = commandTokens[0];
firstName.ValidateMinAndMaxLength(2, 20);
var lastName = commandTokens[1];
lastName.ValidateMinAndMaxLength(2, 20);
var country = commandTokens[2];
country.ValidateMinAndMaxLength(3, 25);
this.records = new Dictionary<string, double>();
foreach (var kvp in commandTokens.Skip(3).ToArray())
{
var kvpTokens = kvp.Split('/').ToArray();
records.Add(kvpTokens[0], double.Parse(kvpTokens[1]));
}
Committee.Olympians.Add(OlympicsFactory.Instance.CreateSprinter(firstName, lastName, country, records));
var commandOutput = new StringBuilder();
commandOutput.AppendLine("Created Sprinter");
commandOutput.AppendLine($"SPRINTER: {firstName} {lastName} from {country}");
commandOutput.AppendLine($"PERSONAL RECORDS:");
foreach (var kvp in records)
{
commandOutput.AppendLine($"{kvp.Key}m: {kvp.Value}s");
}
return commandOutput.ToString();
}
}
}
| Xadera/Telerik-Academy-Alpha | Module 1/OOP Exam/OlympicGamesSkeleton/OlympicGames/Core/Commands/CreateSprinterCommand.cs | C# | mit | 2,099 |
// Copyright (c) 2009-2010 Satoshi Nakamoto
// Copyright (c) 2009-2012 The Bitcoin developers
// Distributed under the MIT/X11 software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include "init.h"
#include "bitcoinrpc.h"
#include <boost/algorithm/string/predicate.hpp>
void DetectShutdownThread(boost::thread_group* threadGroup)
{
bool shutdown = ShutdownRequested();
// Tell the main threads to shutdown.
while (!shutdown)
{
MilliSleep(200);
shutdown = ShutdownRequested();
}
if (threadGroup)
threadGroup->interrupt_all();
}
//////////////////////////////////////////////////////////////////////////////
//
// Start
//
bool AppInit(int argc, char* argv[])
{
boost::thread_group threadGroup;
boost::thread* detectShutdownThread = NULL;
bool fRet = false;
try
{
//
// Parameters
//
// If Qt is used, parameters/skidoo.conf are parsed in qt/bitcoin.cpp's main()
ParseParameters(argc, argv);
if (!boost::filesystem::is_directory(GetDataDir(false)))
{
fprintf(stderr, "Error: Specified directory does not exist\n");
Shutdown();
}
ReadConfigFile(mapArgs, mapMultiArgs);
if (mapArgs.count("-?") || mapArgs.count("--help"))
{
// First part of help message is specific to skidood / RPC client
std::string strUsage = _("Skidoo version") + " " + FormatFullVersion() + "\n\n" +
_("Usage:") + "\n" +
" skidood [options] " + "\n" +
" skidood [options] <command> [params] " + _("Send command to -server or skidood") + "\n" +
" skidood [options] help " + _("List commands") + "\n" +
" skidood [options] help <command> " + _("Get help for a command") + "\n";
strUsage += "\n" + HelpMessage();
fprintf(stdout, "%s", strUsage.c_str());
return false;
}
// Command-line RPC
for (int i = 1; i < argc; i++)
if (!IsSwitchChar(argv[i][0]) && !boost::algorithm::istarts_with(argv[i], "skidoo:"))
fCommandLine = true;
if (fCommandLine)
{
if (!SelectParamsFromCommandLine()) {
fprintf(stderr, "Error: invalid combination of -regtest and -testnet.\n");
return false;
}
int ret = CommandLineRPC(argc, argv);
exit(ret);
}
#if !defined(WIN32)
fDaemon = GetBoolArg("-daemon", false);
if (fDaemon)
{
// Daemonize
pid_t pid = fork();
if (pid < 0)
{
fprintf(stderr, "Error: fork() returned %d errno %d\n", pid, errno);
return false;
}
if (pid > 0) // Parent process, pid is child process id
{
CreatePidFile(GetPidFile(), pid);
return true;
}
// Child process falls through to rest of initialization
pid_t sid = setsid();
if (sid < 0)
fprintf(stderr, "Error: setsid() returned %d errno %d\n", sid, errno);
}
#endif
detectShutdownThread = new boost::thread(boost::bind(&DetectShutdownThread, &threadGroup));
fRet = AppInit2(threadGroup);
}
catch (std::exception& e) {
PrintExceptionContinue(&e, "AppInit()");
} catch (...) {
PrintExceptionContinue(NULL, "AppInit()");
}
if (!fRet) {
if (detectShutdownThread)
detectShutdownThread->interrupt();
threadGroup.interrupt_all();
}
if (detectShutdownThread)
{
detectShutdownThread->join();
delete detectShutdownThread;
detectShutdownThread = NULL;
}
Shutdown();
return fRet;
}
extern void noui_connect();
int main(int argc, char* argv[])
{
bool fRet = false;
fHaveGUI = false;
// Connect skidood signal handlers
noui_connect();
fRet = AppInit(argc, argv);
if (fRet && fDaemon)
return 0;
return (fRet ? 0 : 1);
}
| EwigeBlumenkraft/skidoo | src/bitcoind.cpp | C++ | mit | 4,231 |
using System;
namespace BinarySearchTree
{
class Program
{
static void Main(string[] args)
{
Tree testTree = new Tree();
Node root = null;
root = testTree.Add(root, 2);
testTree.Add(root, 1);
testTree.Add(root, 3);
Console.Read();
}
}
}
| Trey64/data-structures3 | BinarySearchTree/Program.cs | C# | mit | 353 |
package de.fred4jupiter.fredbet.repository;
import de.fred4jupiter.fredbet.domain.AppUser;
import de.fred4jupiter.fredbet.domain.ImageGroup;
import de.fred4jupiter.fredbet.domain.ImageMetaData;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import org.springframework.transaction.annotation.Transactional;
import java.util.List;
public interface ImageMetaDataRepository extends JpaRepository<ImageMetaData, Long> {
@Query("select a from ImageMetaData a where a.owner.username = :username and a.imageGroup.userProfileImageGroup = true")
ImageMetaData findImageMetaDataOfUserProfileImage(@Param("username") String username);
@Query("delete from ImageMetaData a where a.owner.id = :userId")
@Modifying
@Transactional
void deleteMetaDataByOwner(@Param("userId") Long userId);
ImageMetaData findByImageKey(String imageKey);
@Query("select a from ImageMetaData a where a.imageGroup.userProfileImageGroup = true")
List<ImageMetaData> loadImageMetaDataOfUserProfileImageSet();
@Query("select a from ImageMetaData a where a.imageGroup.userProfileImageGroup = false")
List<ImageMetaData> findImageMetaDataWithoutProfileImages();
@Query("select a from ImageMetaData a where a.owner.username = :username and a.imageGroup.userProfileImageGroup = false")
List<ImageMetaData> findImageMetaDataForUser(@Param("username") String username);
ImageMetaData findByOwnerAndImageGroup(AppUser owner, ImageGroup imageGroup);
}
| fred4jupiter/fredbet | src/main/java/de/fred4jupiter/fredbet/repository/ImageMetaDataRepository.java | Java | mit | 1,690 |
<?php
/* (c) Anton Medvedev <anton@medv.io>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Deployer\Task;
use Deployer\Configuration\Configuration;
use Deployer\Exception\ConfigurationException;
use Deployer\Exception\Exception;
use Deployer\Host\Host;
use Symfony\Component\Console\Input\InputInterface;
use Symfony\Component\Console\Output\OutputInterface;
class Context
{
/**
* @var Host
*/
private $host;
/**
* @var InputInterface
*/
private $input;
/**
* @var OutputInterface
*/
private $output;
/**
* @var Context[]
*/
private static $contexts = [];
/**
* @param Host $host
* @param InputInterface $input
* @param OutputInterface $output
*/
public function __construct($host, InputInterface $input = null, OutputInterface $output = null)
{
$this->host = $host;
$this->input = $input;
$this->output = $output;
}
/**
* @param Context $context
*/
public static function push(Context $context)
{
self::$contexts[] = $context;
}
/**
* @return bool
*/
public static function has()
{
return !empty(self::$contexts);
}
/**
* @return Context|false
* @throws Exception
*/
public static function get()
{
if (empty(self::$contexts)) {
throw new Exception('Context was required, but there\'s nothing there.');
}
return end(self::$contexts);
}
/**
* @return Context
*/
public static function pop()
{
return array_pop(self::$contexts);
}
/**
* Throws a Exception when not called within a task-context and therefore no Context is available.
*
* This method provides a useful error to the end-user to make him/her aware
* to use a function in the required task-context.
*
* @param string $callerName
* @throws ConfigurationException
*/
public static function required($callerName)
{
if (!self::get()) {
throw new ConfigurationException("'$callerName' can only be used within a task.");
}
}
/**
* @return Configuration
*/
public function getConfig()
{
return $this->host->getConfig();
}
/**
* @return InputInterface
*/
public function getInput()
{
return $this->input;
}
/**
* @return OutputInterface
*/
public function getOutput()
{
return $this->output;
}
/**
* @return Host
*/
public function getHost()
{
return $this->host;
}
}
| mbrodala/deployer | src/Task/Context.php | PHP | mit | 2,754 |
/**
* Copyright (C) 2010-14 diirt developers. See COPYRIGHT.TXT
* All rights reserved. Use is subject to license terms. See LICENSE.TXT
*/
package org.diirt.datasource.timecache;
import java.time.Instant;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
import org.diirt.datasource.timecache.source.DataSource;
import org.diirt.datasource.timecache.util.CacheHelper;
import org.diirt.datasource.timecache.util.IntervalsList;
import org.diirt.util.time.TimeInterval;
/**
* Retrieves chunks from the specified {@link DataSource}, channel name and
* {@link TimeInterval}. Polls chunks from the source until the
* {@link Timestamp} of the last received {@link Data} is superior to the end of
* the defined {@link TimeInterval}.
* @author Fred Arnaud (Sopra Group) - ITER
*/
public class DataRequestThread extends Thread {
private static AtomicInteger idCounter = new AtomicInteger(0);
private final Integer requestID;
private final String channelName;
private final DataSource source;
private TimeInterval interval;
private Instant lastReceived;
private List<DataRequestListener> listeners;
public DataRequestThread(String channelName, DataSource source,
TimeInterval interval) throws Exception {
if (channelName == null || channelName.isEmpty() || source == null
|| interval == null)
throw new Exception("null or empty argument not allowed");
this.requestID = idCounter.getAndIncrement();
this.listeners = new ArrayList<DataRequestListener>();
this.channelName = channelName;
this.source = source;
this.interval = CacheHelper.arrange(interval);
this.lastReceived = this.interval.getStart();
}
/** {@inheritDoc} */
@Override
public void run() {
if (interval.getStart() == null) {
notifyComplete();
return;
}
DataChunk currentChunk = source.getData(channelName, interval.getStart());
boolean process = true;
while (process) {
if (currentChunk == null || currentChunk.isEmpty()
|| !CacheHelper.intersects(interval, currentChunk.getInterval())) {
process = false;
break;
} else {
lastReceived = currentChunk.getInterval().getEnd();
notifyNewData(currentChunk);
if (!currentChunk.isFull() || !interval.contains(lastReceived)) {
process = false;
break;
}
}
currentChunk = source.getData(channelName, lastReceived.plus(IntervalsList.minDuration));
}
notifyComplete();
}
// Notify the listeners that a new chunk is available
private void notifyNewData(DataChunk chunk) {
for (DataRequestListener l : listeners)
l.newData(chunk, this);
}
// Notify the listeners that the thread has finished requesting samples
private void notifyComplete() {
for (DataRequestListener l : listeners)
l.intervalComplete(this);
}
/** Add a {@link DataRequestListener}. */
public void addListener(DataRequestListener l) {
if (l != null)
listeners.add(l);
}
/** Remove a {@link DataRequestListener}. */
public void removeListener(DataRequestListener l) {
if (l != null)
listeners.remove(l);
}
public TimeInterval getInterval() {
return interval;
}
public void setInterval(TimeInterval interval) {
this.interval = interval;
}
public String getChannelName() {
return channelName;
}
public DataSource getSource() {
return source;
}
public Instant getLastReceived() {
return lastReceived;
}
public Integer getRequestID() {
return requestID;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result
+ ((requestID == null) ? 0 : requestID.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
DataRequestThread other = (DataRequestThread) obj;
if (requestID == null) {
if (other.requestID != null)
return false;
} else if (!requestID.equals(other.requestID))
return false;
return true;
}
}
| ControlSystemStudio/diirt | pvmanager/datasource-timecache/src/main/java/org/diirt/datasource/timecache/DataRequestThread.java | Java | mit | 4,664 |
'use strict';
const ServiceEmitter = require('./lib/ServiceEmitter');
const ServiceProvider = require('./lib/ServiceProvider');
module.exports = { ServiceEmitter, ServiceProvider };
| protocoolmx/spokesman | index.js | JavaScript | mit | 184 |
<?php
namespace Swift;
class Session
{
public static function Get(string $Key) : string
{
return $_SESSION[$Key];
}
public static function Set(string $Key, string $Value)
{
$_SESSION[$Key] = $Value;
}
public static function Encode() : string
{
return session_encode();
}
public static function Decode(string $EncodedSession) : bool
{
return session_decode($EncodedSession);
}
public static function Exists(string $Key) : bool
{
return isset($_SESSION[$Key]);
}
public static function GetId() : string
{
return session_id();
}
public static function SetId(string $Id)
{
session_id($Id);
}
public static function Start($Options = array()) : bool
{
return session_start($Options);
}
} | CloudRex/Swift | Swift/Session.php | PHP | mit | 850 |
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2008-2011, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate;
import java.io.Serializable;
import java.sql.Connection;
import java.util.Map;
import java.util.Set;
import javax.naming.Referenceable;
import org.hibernate.engine.spi.FilterDefinition;
import org.hibernate.metadata.ClassMetadata;
import org.hibernate.metadata.CollectionMetadata;
import org.hibernate.proxy.EntityNotFoundDelegate;
import org.hibernate.stat.Statistics;
/**
* The main contract here is the creation of {@link Session} instances. Usually
* an application has a single {@link SessionFactory} instance and threads
* servicing client requests obtain {@link Session} instances from this factory.
* <p/>
* The internal state of a {@link SessionFactory} is immutable. Once it is created
* this internal state is set. This internal state includes all of the metadata
* about Object/Relational Mapping.
* <p/>
* Implementors <strong>must</strong> be threadsafe.
*
* @see org.hibernate.cfg.Configuration
*
* @author Gavin King
* @author Steve Ebersole
*/
public interface SessionFactory extends Referenceable, Serializable {
public interface SessionFactoryOptions {
Interceptor getInterceptor();
EntityNotFoundDelegate getEntityNotFoundDelegate();
}
public SessionFactoryOptions getSessionFactoryOptions();
/**
* Obtain a {@link Session} builder.
*
* @return The session builder
*/
public SessionBuilder withOptions();
/**
* Open a {@link Session}.
* <p/>
* JDBC {@link Connection connection(s} will be obtained from the
* configured {@link org.hibernate.service.jdbc.connections.spi.ConnectionProvider} as needed
* to perform requested work.
*
* @return The created session.
*
* @throws HibernateException Indicates a problem opening the session; pretty rare here.
*/
public Session openSession() throws HibernateException;
/**
* Obtains the current session. The definition of what exactly "current"
* means controlled by the {@link org.hibernate.context.spi.CurrentSessionContext} impl configured
* for use.
* <p/>
* Note that for backwards compatibility, if a {@link org.hibernate.context.spi.CurrentSessionContext}
* is not configured but JTA is configured this will default to the {@link org.hibernate.context.internal.JTASessionContext}
* impl.
*
* @return The current session.
*
* @throws HibernateException Indicates an issue locating a suitable current session.
*/
public Session getCurrentSession() throws HibernateException;
/**
* Obtain a {@link StatelessSession} builder.
*
* @return The stateless session builder
*/
public StatelessSessionBuilder withStatelessOptions();
/**
* Open a new stateless session.
*
* @return The created stateless session.
*/
public StatelessSession openStatelessSession();
/**
* Open a new stateless session, utilizing the specified JDBC
* {@link Connection}.
*
* @param connection Connection provided by the application.
*
* @return The created stateless session.
*/
public StatelessSession openStatelessSession(Connection connection);
/**
* Retrieve the {@link ClassMetadata} associated with the given entity class.
*
* @param entityClass The entity class
*
* @return The metadata associated with the given entity; may be null if no such
* entity was mapped.
*
* @throws HibernateException Generally null is returned instead of throwing.
*/
public ClassMetadata getClassMetadata(Class entityClass);
/**
* Retrieve the {@link ClassMetadata} associated with the given entity class.
*
* @param entityName The entity class
*
* @return The metadata associated with the given entity; may be null if no such
* entity was mapped.
*
* @throws HibernateException Generally null is returned instead of throwing.
* @since 3.0
*/
public ClassMetadata getClassMetadata(String entityName);
/**
* Get the {@link CollectionMetadata} associated with the named collection role.
*
* @param roleName The collection role (in form [owning-entity-name].[collection-property-name]).
*
* @return The metadata associated with the given collection; may be null if no such
* collection was mapped.
*
* @throws HibernateException Generally null is returned instead of throwing.
*/
public CollectionMetadata getCollectionMetadata(String roleName);
/**
* Retrieve the {@link ClassMetadata} for all mapped entities.
*
* @return A map containing all {@link ClassMetadata} keyed by the
* corresponding {@link String} entity-name.
*
* @throws HibernateException Generally empty map is returned instead of throwing.
*
* @since 3.0 changed key from {@link Class} to {@link String}.
*/
public Map<String,ClassMetadata> getAllClassMetadata();
/**
* Get the {@link CollectionMetadata} for all mapped collections
*
* @return a map from <tt>String</tt> to <tt>CollectionMetadata</tt>
*
* @throws HibernateException Generally empty map is returned instead of throwing.
*/
public Map getAllCollectionMetadata();
/**
* Retrieve the statistics fopr this factory.
*
* @return The statistics.
*/
public Statistics getStatistics();
/**
* Destroy this <tt>SessionFactory</tt> and release all resources (caches,
* connection pools, etc).
* <p/>
* It is the responsibility of the application to ensure that there are no
* open {@link Session sessions} before calling this method as the impact
* on those {@link Session sessions} is indeterminate.
* <p/>
* No-ops if already {@link #isClosed closed}.
*
* @throws HibernateException Indicates an issue closing the factory.
*/
public void close() throws HibernateException;
/**
* Is this factory already closed?
*
* @return True if this factory is already closed; false otherwise.
*/
public boolean isClosed();
/**
* Obtain direct access to the underlying cache regions.
*
* @return The direct cache access API.
*/
public Cache getCache();
/**
* Evict all entries from the second-level cache. This method occurs outside
* of any transaction; it performs an immediate "hard" remove, so does not respect
* any transaction isolation semantics of the usage strategy. Use with care.
*
* @param persistentClass The entity class for which to evict data.
*
* @throws HibernateException Generally will mean that either that
* 'persisttentClass' did not name a mapped entity or a problem
* communicating with underlying cache impl.
*
* @deprecated Use {@link Cache#evictEntityRegion(Class)} accessed through
* {@link #getCache()} instead.
*/
@Deprecated
public void evict(Class persistentClass) throws HibernateException;
/**
* Evict an entry from the second-level cache. This method occurs outside
* of any transaction; it performs an immediate "hard" remove, so does not respect
* any transaction isolation semantics of the usage strategy. Use with care.
*
* @param persistentClass The entity class for which to evict data.
* @param id The entity id
*
* @throws HibernateException Generally will mean that either that
* 'persisttentClass' did not name a mapped entity or a problem
* communicating with underlying cache impl.
*
* @deprecated Use {@link Cache#containsEntity(Class, Serializable)} accessed through
* {@link #getCache()} instead.
*/
@Deprecated
public void evict(Class persistentClass, Serializable id) throws HibernateException;
/**
* Evict all entries from the second-level cache. This method occurs outside
* of any transaction; it performs an immediate "hard" remove, so does not respect
* any transaction isolation semantics of the usage strategy. Use with care.
*
* @param entityName The entity name for which to evict data.
*
* @throws HibernateException Generally will mean that either that
* 'persisttentClass' did not name a mapped entity or a problem
* communicating with underlying cache impl.
*
* @deprecated Use {@link Cache#evictEntityRegion(String)} accessed through
* {@link #getCache()} instead.
*/
@Deprecated
public void evictEntity(String entityName) throws HibernateException;
/**
* Evict an entry from the second-level cache. This method occurs outside
* of any transaction; it performs an immediate "hard" remove, so does not respect
* any transaction isolation semantics of the usage strategy. Use with care.
*
* @param entityName The entity name for which to evict data.
* @param id The entity id
*
* @throws HibernateException Generally will mean that either that
* 'persisttentClass' did not name a mapped entity or a problem
* communicating with underlying cache impl.
*
* @deprecated Use {@link Cache#evictEntity(String,Serializable)} accessed through
* {@link #getCache()} instead.
*/
@Deprecated
public void evictEntity(String entityName, Serializable id) throws HibernateException;
/**
* Evict all entries from the second-level cache. This method occurs outside
* of any transaction; it performs an immediate "hard" remove, so does not respect
* any transaction isolation semantics of the usage strategy. Use with care.
*
* @param roleName The name of the collection role whose regions should be evicted
*
* @throws HibernateException Generally will mean that either that
* 'roleName' did not name a mapped collection or a problem
* communicating with underlying cache impl.
*
* @deprecated Use {@link Cache#evictCollectionRegion(String)} accessed through
* {@link #getCache()} instead.
*/
@Deprecated
public void evictCollection(String roleName) throws HibernateException;
/**
* Evict an entry from the second-level cache. This method occurs outside
* of any transaction; it performs an immediate "hard" remove, so does not respect
* any transaction isolation semantics of the usage strategy. Use with care.
*
* @param roleName The name of the collection role
* @param id The id of the collection owner
*
* @throws HibernateException Generally will mean that either that
* 'roleName' did not name a mapped collection or a problem
* communicating with underlying cache impl.
*
* @deprecated Use {@link Cache#evictCollection(String,Serializable)} accessed through
* {@link #getCache()} instead.
*/
@Deprecated
public void evictCollection(String roleName, Serializable id) throws HibernateException;
/**
* Evict any query result sets cached in the named query cache region.
*
* @param cacheRegion The named query cache region from which to evict.
*
* @throws HibernateException Since a not-found 'cacheRegion' simply no-ops,
* this should indicate a problem communicating with underlying cache impl.
*
* @deprecated Use {@link Cache#evictQueryRegion(String)} accessed through
* {@link #getCache()} instead.
*/
@Deprecated
public void evictQueries(String cacheRegion) throws HibernateException;
/**
* Evict any query result sets cached in the default query cache region.
*
* @throws HibernateException Indicate a problem communicating with
* underlying cache impl.
*
* @deprecated Use {@link Cache#evictQueryRegions} accessed through
* {@link #getCache()} instead.
*/
@Deprecated
public void evictQueries() throws HibernateException;
/**
* Obtain a set of the names of all filters defined on this SessionFactory.
*
* @return The set of filter names.
*/
public Set getDefinedFilterNames();
/**
* Obtain the definition of a filter by name.
*
* @param filterName The name of the filter for which to obtain the definition.
* @return The filter definition.
* @throws HibernateException If no filter defined with the given name.
*/
public FilterDefinition getFilterDefinition(String filterName) throws HibernateException;
/**
* Determine if this session factory contains a fetch profile definition
* registered under the given name.
*
* @param name The name to check
* @return True if there is such a fetch profile; false otherwise.
*/
public boolean containsFetchProfileDefinition(String name);
/**
* Retrieve this factory's {@link TypeHelper}
*
* @return The factory's {@link TypeHelper}
*/
public TypeHelper getTypeHelper();
}
| HerrB92/obp | OpenBeaconPackage/libraries/hibernate-release-4.2.7.SP1/project/hibernate-core/src/main/java/org/hibernate/SessionFactory.java | Java | mit | 13,171 |
/*
* Copyright © 2015 www.pipseq.org
* @author rspates
*/
package org.pipseq.rdf.jena.aggregate;
import java.util.ArrayList;
import java.util.List;
import com.hp.hpl.jena.sparql.engine.binding.Binding;
import com.hp.hpl.jena.sparql.expr.Expr;
import com.hp.hpl.jena.sparql.expr.ExprEvalException;
import com.hp.hpl.jena.sparql.expr.ExprList;
import com.hp.hpl.jena.sparql.expr.NodeValue;
import com.hp.hpl.jena.sparql.expr.aggregate.Accumulator;
import com.hp.hpl.jena.sparql.expr.aggregate.AccumulatorFactory;
import com.hp.hpl.jena.sparql.expr.aggregate.AggCustom;
import com.hp.hpl.jena.sparql.expr.aggregate.AggregateRegistry;
import com.hp.hpl.jena.sparql.function.FunctionEnv;
import com.hp.hpl.jena.sparql.graph.NodeConst;
/**
* allSameList
* For use as a filter function within a SPARQL query.
* The allSameList function accepts any number of parameters.
* If all the parameters are the same it returns true else false.
*/
public class allSameList implements Accumulator {
// Execution of a custom aggregate is with accumulators. One accumulator is
/** The my accumulator factory. */
// created for the factory for each group in a query execution.
static AccumulatorFactory myAccumulatorFactory = new AccumulatorFactory() {
@Override
public Accumulator createAccumulator(AggCustom agg) { return new allSameList(agg) ; }
} ;
/** The agg uri. */
/* Registration */
public static String aggUri = "java:org.pipseq.rdf.jena.aggregate.allSameList" ;
/**
* Register.
*/
public static void register(){
AggregateRegistry.register(aggUri, myAccumulatorFactory, NodeConst.nodeFalse);
}
private AggCustom agg ;
/**
* Instantiates a new all same list.
*
* @param agg the agg
*/
allSameList(AggCustom agg) { this.agg = agg ; }
/** The lnv. */
List<NodeValue> lnv = new ArrayList<NodeValue>();
/* (non-Javadoc)
* @see com.hp.hpl.jena.sparql.expr.aggregate.Accumulator#accumulate(com.hp.hpl.jena.sparql.engine.binding.Binding, com.hp.hpl.jena.sparql.function.FunctionEnv)
*/
@Override
public void accumulate(Binding binding, FunctionEnv functionEnv) {
ExprList exprList = agg.getExprList() ;
for(Expr expr: exprList) {
try {
NodeValue nv = expr.eval(binding, functionEnv) ;
// Evaluation succeeded.
lnv.add(nv);
} catch (ExprEvalException ex) {}
}
}
/* (non-Javadoc)
* @see com.hp.hpl.jena.sparql.expr.aggregate.Accumulator#getValue()
*/
@Override
public NodeValue getValue() {
if (lnv.size() < 0)
return NodeValue.FALSE;
for (int i=1;i<lnv.size();i++){
if (!lnv.get(0).equals(lnv.get(i)))
return NodeValue.FALSE;
}
return NodeValue.TRUE; //lnv.get(0) ;
}
}
| pipseq/semantic | src/main/org/pipseq/rdf/jena/aggregate/allSameList.java | Java | mit | 2,906 |
module Msplex
module Resource
class Service
DEFINED_ACTION = {
list: {
type: :get,
params: false,
},
create: {
type: :post,
params: true,
},
get: {
type: :get,
params: true,
},
update: {
type: :patch,
params: true,
},
delete: {
type: :delete,
params: true,
},
}
attr_reader :name, :actions
def self.read_schema(path)
schema = Utils.symbolize_keys(YAML.load_file(path))
self.new(schema[:name], schema[:actions])
end
def initialize(name, actions)
@name = name
@actions = actions
end
def compose(database)
{
build: "services/#{@name}",
environment: [
"RACK_ENV=production",
"VIRTUAL_HOST=#{@name}"
],
links: links(database),
}
end
def compose_service_name
@compose_service_name ||= "#{@name}_service"
end
def app_rb(database)
<<-APPRB
#{database.definitions.join("\n")}
class App < Sinatra::Base
configure do
register Sinatra::ActiveRecordExtension
use Rack::Session::Cookie, expire_after: 3600, secret: "salt"
end
helpers do
def csrf_meta_tag
Rack::Csrf.csrf_metatag(env)
end
def param_str(parameters)
parameters.map { |key, value| key.to_s + "=" + CGI.escape(value.to_s) }.join("&")
end
def http_get(endpoint, parameters = {})
uri = URI.parse(parameters.length > 0 ? endpoint + "?" + param_str(parameters) : endpoint)
JSON.parse(Net::HTTP.get_response(uri).body, symbolize_names: true)
rescue
{ error: true }
end
def http_post(endpoint, parameters)
uri = URI.parse(endpoint)
JSON.parse(Net::HTTP.post_form(uri, parameters).body, symbolize_names: true)
rescue
{ error: true }
end
def endpoint_of(service, action)
"http://" << service << "/" << action
end
end
#{Utils.indent(endpoint(database), 2)}
end
APPRB
end
def config_ru
<<-CONFIGRU
require "rubygems"
require "bundler"
Bundler.require
require "./app.rb"
run App
CONFIGRU
end
def dockerfile
<<-DOCKERFILE
FROM #{image}
MAINTAINER Your Name <you@example.com>
ENV RACK_ENV production
WORKDIR /usr/src/app
COPY Gemfile /usr/src/app/
COPY Gemfile.lock /usr/src/app/
RUN bundle install -j4 --without development test --deployment
COPY . /usr/src/app
EXPOSE 80
ENTRYPOINT ["./entrypoint.sh"]
CMD ["bundle", "exec", "rackup", "-p", "80"]
DOCKERFILE
end
def entrypoint_sh
<<-ENTRYPOINT
#!/bin/bash
bundle exec rake db:create
bundle exec rake db:migrate
exec $@
ENTRYPOINT
end
def gemfile(database)
<<-GEMFILE.gsub(/^$/, "")
source "https://rubygems.org"
gem "activesupport", require: "active_support/all"
gem "activerecord", "~> 4.2.5"
gem "sinatra", "~> 1.4.6", require: "sinatra/base"
gem "sinatra-activerecord", require: "sinatra/activerecord"
gem "slim", "~> 3.0.6"
gem "rack_csrf", require: "rack/csrf"
gem "rake"
gem "thin", "~> 1.6.4"
#{db_gem(database)}
group :development do
gem "sinatra-reloader", require: "sinatra/reloader"
end
GEMFILE
end
def gemfile_lock(database)
<<-GEMFILE_LOCK
GEM
remote: https://rubygems.org/
specs:
activemodel (4.2.5)
activesupport (= 4.2.5)
builder (~> 3.1)
activerecord (4.2.5)
activemodel (= 4.2.5)
activesupport (= 4.2.5)
arel (~> 6.0)
activesupport (4.2.5)
i18n (~> 0.7)
json (~> 1.7, >= 1.7.7)
minitest (~> 5.1)
thread_safe (~> 0.3, >= 0.3.4)
tzinfo (~> 1.1)
arel (6.0.3)
backports (3.6.7)
builder (3.2.2)
daemons (1.2.3)
eventmachine (1.0.9.1)
i18n (0.7.0)
json (1.8.3)
minitest (5.8.3)
multi_json (1.11.2)
pg (0.18.3)
rack (1.6.4)
rack-protection (1.5.3)
rack
rack-test (0.6.3)
rack (>= 1.0)
rack_csrf (2.5.0)
rack (>= 1.1.0)
rake (10.5.0)
sinatra (1.4.6)
rack (~> 1.4)
rack-protection (~> 1.4)
tilt (>= 1.3, < 3)
sinatra-activerecord (2.0.9)
activerecord (>= 3.2)
sinatra (~> 1.0)
sinatra-contrib (1.4.6)
backports (>= 2.0)
multi_json
rack-protection
rack-test
sinatra (~> 1.4.0)
tilt (>= 1.3, < 3)
sinatra-reloader (1.0)
sinatra-contrib
slim (3.0.6)
temple (~> 0.7.3)
tilt (>= 1.3.3, < 2.1)
temple (0.7.6)
thin (1.6.4)
daemons (~> 1.0, >= 1.0.9)
eventmachine (~> 1.0, >= 1.0.4)
rack (~> 1.0)
thread_safe (0.3.5)
tilt (2.0.2)
tzinfo (1.2.2)
thread_safe (~> 0.1)
PLATFORMS
ruby
DEPENDENCIES
activerecord (~> 4.2.5)
activesupport
pg (= 0.18.3)
rack_csrf
rake
sinatra (~> 1.4.6)
sinatra-activerecord
sinatra-reloader
slim (~> 3.0.6)
thin (~> 1.6.4)
BUNDLED WITH
1.11.2
GEMFILE_LOCK
end
def image
"ruby:2.3.0"
end
def rakefile
<<-RAKEFILE
require "sinatra"
require "sinatra/activerecord"
require "sinatra/activerecord/rake"
namespace :db do
task :load_config do
require "./app"
end
end
RAKEFILE
end
private
def db_gem(database)
database ? "gem #{database.gem[:gem].inspect}, #{database.gem[:version].inspect}" : ""
end
def endpoint(database)
actions.map do |action|
if DEFINED_ACTION.keys.include?(action[:type].to_sym)
defined = DEFINED_ACTION[action[:type].to_sym]
db_action = defined[:params] ?
database.send(action[:type], action[:table], {}) : database.send(action[:type], action[:table])
<<-ENDPOINT
#{defined[:type]} "/#{action[:table]}" do
content_type :json
result = {}
#{defined[:params] ? Utils.indent(database.params(action[:table]), 2) : ""}
#{Utils.indent(db_action, 2)}
result.to_json
end
ENDPOINT
else
# TODO
""
end
end.join("\n")
end
def links(database)
database ? ["#{database.compose_service_name}:db"] : []
end
end
end
end
| dtan4/msplex | lib/msplex/resource/service.rb | Ruby | mit | 6,248 |
<?php namespace Bkoetsier\Theme\Facade;
use Illuminate\Support\Facades\Facade;
class Theme extends Facade{
/**
* Get the registered name of the component.
*
* @return string
*/
protected static function getFacadeAccessor() { return 'theme.manager'; }
} | bastiankoetsier/theme | src/Facade/Theme.php | PHP | mit | 286 |
package com.github.wesleyegberto.jcachetests.cachestatistics;
import java.lang.management.ManagementFactory;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import javax.annotation.PostConstruct;
import javax.cache.Cache;
import javax.ejb.Singleton;
import javax.inject.Inject;
import javax.management.AttributeNotFoundException;
import javax.management.InstanceNotFoundException;
import javax.management.MBeanException;
import javax.management.MBeanServer;
import javax.management.MalformedObjectNameException;
import javax.management.ObjectName;
import javax.management.ReflectionException;
import javax.validation.Valid;
import javax.validation.constraints.NotNull;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import com.github.wesleyegberto.jcachetests.cdi.qualifiers.CustomCache;
import com.github.wesleyegberto.jcachetests.entity.Person;
@Path("statistics")
@Singleton
public class StatisticsResource {
private static AtomicInteger lastId = new AtomicInteger(0);
@Inject
@CustomCache
Cache<Integer, Person> peopleCache;
private ObjectName objectName;
private MBeanServer mBeanServer;
@PostConstruct
public void init() {
mBeanServer = ManagementFactory.getPlatformMBeanServer();
try {
String name = "javax.cache:type=CacheStatistics,CacheManager=\""
+ (peopleCache.getCacheManager().getURI().toString())
+ "\",Cache=\"" + peopleCache.getName() + "\"";
//System.out.println("Object name: " + name);
objectName = new ObjectName(name);
} catch(MalformedObjectNameException e) {
e.printStackTrace();
}
}
@POST
@Consumes({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
public void create(@Valid @NotNull Person person) {
int newId = lastId.incrementAndGet();
person.setId(newId);
peopleCache.put(newId, person);
}
@GET
@Path("{id: \\d}")
@Produces({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
public Response getById(@PathParam("id") int id) {
Person person = peopleCache.get(id);
if(person == null)
return Response.status(Status.NOT_FOUND).build();
return Response.ok(person).build();
}
@DELETE
@Path("{id: \\d}")
public void delete(@PathParam("id") int id) {
peopleCache.remove(id);
}
@GET
@Path("/hits")
public Response getCacheHits() {
try {
Object hits = mBeanServer.getAttribute(objectName, "CacheHits");
return Response.ok(hits).build();
} catch(AttributeNotFoundException | InstanceNotFoundException | MBeanException | ReflectionException e) {
return Response.serverError().entity(e.toString()).build();
}
}
@GET
@Path("/misses")
public Response getCacheMisses() {
try {
Object misses = mBeanServer.getAttribute(objectName, "CacheMisses");
return Response.ok(misses).build();
} catch(AttributeNotFoundException | InstanceNotFoundException | MBeanException | ReflectionException e) {
return Response.serverError().entity(e.toString()).build();
}
}
}
| wesleyegberto/javaee_projects | jcache-tests/src/main/java/com/github/wesleyegberto/jcachetests/cachestatistics/StatisticsResource.java | Java | mit | 3,184 |
/******************************************************************************
Developed and Copyright (c) by
Erik Unger
Contact: erik@erikunger.com
******************************************************************************/
#include "BaseLib/DataBases/PersistentTable.h"
#include "BaseLib/ErrorHandling.h"
namespace BaseLib {
namespace DataBases {
using namespace BaseLib::ErrorHandling;
using namespace BaseLib::Math;
BL_DEFINE_REFLECTION_CLASS(PersistentTable, AbstractTable)
{
}
PersistentTable::PersistentTable(AbstractTable* sourceTable)
: AbstractTable()
, rowCount(0)
, currentRow(-1)
, columns()
, firstRowFound(false)
{
if (checkArgumentNotNull(NAME_VALUE(sourceTable)) == true)
{
firstRowFound = sourceTable->moveToNextRow(); // We need this step for SQLite, because else we won't get column information
initColumns(sourceTable);
initRows(sourceTable);
}
}
void PersistentTable::initColumns(AbstractTable* sourceTable)
{
BL_ASSERT(sourceTable != NULL);
const int columnCount = sourceTable->getColumnCount();
columns.setCount(columnCount);
for (int i = 0; i < columnCount; ++i)
{
String name = sourceTable->getColumnName(i);
String baseType = sourceTable->getColumnBaseType(i);
String exactType = sourceTable->getColumnExactType(i);
String type = baseType.lowerCase();
if (type == "int" || type == "integer")
{
switch (sourceTable->getSizeInBytes(i))
{
case 1:
columns[i] = new IntColumn<int8>(name, baseType, exactType);
break;
case 2:
columns[i] = new IntColumn<int16>(name, baseType, exactType);
break;
case 3:
case 4:
columns[i] = new IntColumn<int32>(name, baseType, exactType);
break;
case 8:
columns[i] = new IntColumn<int64>(name, baseType, exactType);
break;
default:
BL_ASSERT_NEVER_REACHED
columns[i] = new NullColumn(name, baseType, exactType);
break;
}
}
else if (type == "bool" || type == "boolean")
{
columns[i] = new BoolColumn(name, baseType, exactType);
}
else if (type == "float" || type == "double" || type == "real")
{
switch (sourceTable->getSizeInBytes(i))
{
case 4:
columns[i] = new FloatColumn<float32>(name, baseType, exactType);
break;
case 8:
columns[i] = new FloatColumn<float64>(name, baseType, exactType);
break;
default:
BL_ASSERT_NEVER_REACHED
columns[i] = new NullColumn(name, baseType, exactType);
break;
}
}
else if (type == "text" || type == "string" || type == "char" || type == "character")
{
columns[i] = new TextColumn(name, baseType, exactType);
}
else if (type == "blob" || type == "binary")
{
columns[i] = new BlobColumn(name, baseType, exactType);
}
else if (type == "null" || type == "void")
{
columns[i] = new NullColumn(name, baseType, exactType);
}
else
{
BL_ASSERT_NEVER_REACHED
columns[i] = new NullColumn(name, baseType, exactType);
}
}
}
void PersistentTable::initRows(AbstractTable* sourceTable)
{
BL_ASSERT(sourceTable != NULL);
const int columnCount = columns.getCount();
if (columnCount > 0 && firstRowFound == true)
{
do
{
++rowCount;
for (int i = 0; i < columnCount; ++i)
{
columns[i]->copyValueFrom(sourceTable, i);
}
}
while (sourceTable->moveToNextRow() == true);
}
}
PersistentTable::~PersistentTable()
{
int columnCount = columns.getCount();
for (int i = 0; i < columnCount; ++i)
{
delete columns[i];
}
}
bool PersistentTable::isPersistent() const
{
return true;
}
bool PersistentTable::moveToNextRow()
{
if (currentRow < rowCount-1)
{
++currentRow;
return true;
}
else
{
return false;
}
}
bool PersistentTable::moveToPreviousRow()
{
if (currentRow > 0)
{
--currentRow;
return true;
}
else
{
return false;
}
}
int PersistentTable::getCurrentRow() const
{
return currentRow;
}
void PersistentTable::setCurrentRow(int newCurrentRow)
{
if (checkArgumentRange(NAME_VALUE(newCurrentRow), 0, getRowCount()-1) == true)
{
currentRow = newCurrentRow;
}
}
int PersistentTable::getRowCount() const
{
return rowCount;
}
int PersistentTable::getColumnCount() const
{
return columns.getCount();
}
String PersistentTable::getColumnName(int columnIndex) const
{
if (checkArgumentRange(NAME_VALUE(columnIndex), 0, getColumnCount()-1) == true)
{
return columns[columnIndex]->name;
}
else
{
return Empty();
}
}
String PersistentTable::getColumnBaseType(int columnIndex) const
{
if (checkArgumentRange(NAME_VALUE(columnIndex), 0, getColumnCount()-1) == true)
{
return columns[columnIndex]->baseType;
}
else
{
return Empty();
}
}
String PersistentTable::getColumnExactType(int columnIndex) const
{
if (checkArgumentRange(NAME_VALUE(columnIndex), 0, getColumnCount()-1) == true)
{
return columns[columnIndex]->exactType;
}
else
{
return Empty();
}
}
int PersistentTable::getSizeInBytes(int columnIndex) const
{
if (checkArgumentRange(NAME_VALUE(columnIndex), 0, getColumnCount()-1) == true)
{
return columns[columnIndex]->getSizeInBytes(currentRow);
}
else
{
return 0;
}
}
const void* PersistentTable::getBinary(int columnIndex) const
{
if (checkArgumentRange(NAME_VALUE(columnIndex), 0, getColumnCount()-1) == true)
{
return columns[columnIndex]->getBinary(currentRow);
}
else
{
return NULL;
}
}
String PersistentTable::getString(int columnIndex) const
{
if (checkArgumentRange(NAME_VALUE(columnIndex), 0, getColumnCount()-1) == true)
{
return columns[columnIndex]->getString(currentRow);
}
else
{
return Empty();
}
}
bool PersistentTable::getBool(int columnIndex) const
{
if (checkArgumentRange(NAME_VALUE(columnIndex), 0, getColumnCount()-1) == true)
{
return columns[columnIndex]->getBool(currentRow);
}
else
{
return false;
}
}
int32 PersistentTable::getInt(int columnIndex) const
{
if (checkArgumentRange(NAME_VALUE(columnIndex), 0, getColumnCount()-1) == true)
{
return columns[columnIndex]->getInt(currentRow);
}
else
{
return -1;
}
}
int64 PersistentTable::getInt64(int columnIndex) const
{
if (checkArgumentRange(NAME_VALUE(columnIndex), 0, getColumnCount()-1) == true)
{
return columns[columnIndex]->getInt64(currentRow);
}
else
{
return -1;
}
}
float PersistentTable::getFloat(int columnIndex) const
{
if (checkArgumentRange(NAME_VALUE(columnIndex), 0, getColumnCount()-1) == true)
{
return columns[columnIndex]->getFloat(currentRow);
}
else
{
return NAN;
}
}
double PersistentTable::getDouble(int columnIndex) const
{
if (checkArgumentRange(NAME_VALUE(columnIndex), 0, getColumnCount()-1) == true)
{
return columns[columnIndex]->getDouble(currentRow);
}
else
{
return NAN;
}
}
} // namespace DataBases
} // namespace BaseLib | ungerik/BaseLib | source/DataBases/PersistentTable.cpp | C++ | mit | 7,356 |
import { Pipe, PipeTransform } from '@angular/core';
import * as _ from 'lodash';
@Pipe({name: 'myFilterBy'})
export class FilterByPipe implements PipeTransform {
public transform(array: any[], args: string): any[] {
if (args) {
array = _.filter(array, {title: args});
}
return array;
}
}
| Kseniya-Smirnova/study_app | src/app/core/pipes/filter-by.pipe.ts | TypeScript | mit | 301 |
<?php
/*
* The background core options for the Shoestrap theme
*/
if ( !function_exists( 'shoestrap_module_background_options' ) ) :
function shoestrap_module_background_options( $sections ) {
global $redux;
//Background Patterns Reader
$bg_pattern_images_path = get_template_directory() . '/lib/modules/background/patterns';
$bg_pattern_images_url = get_template_directory_uri() . '/lib/modules/background/patterns/';
$bg_pattern_images = array();
if ( is_dir( $bg_pattern_images_path ) ) {
if ( $bg_pattern_images_dir = opendir( $bg_pattern_images_path ) ) {
$bg_pattern_images = array();
while ( ( $bg_pattern_images_file = readdir( $bg_pattern_images_dir ) ) !== false ) {
if( stristr( $bg_pattern_images_file, '.png' ) !== false || stristr( $bg_pattern_images_file, '.jpg' ) !== false )
array_push( $bg_pattern_images, $bg_pattern_images_url . $bg_pattern_images_file );
}
}
}
// Blog Options
$section = array(
'title' => __( 'Background', 'shoestrap' ),
'icon' => 'el-icon-photo icon-large',
);
$fields[] = array(
'title' => __( 'General Background Color', 'shoestrap' ),
'desc' => __( 'Select a background color for your site. Default: #ffffff.', 'shoestrap' ),
'id' => 'html_color_bg',
'default' => '#ffffff',
'customizer'=> array(),
'transparent'=> false,
'type' => 'color',
);
$fields[] = array(
'title' => __( 'Content Background Color', 'shoestrap' ),
'desc' => __( 'Select a background color for your site\'s content area. Default: #ffffff.', 'shoestrap' ),
'id' => 'color_body_bg',
'default' => '#ffffff',
'compiler' => true,
'customizer'=> array(),
'transparent'=> false,
'type' => 'color',
);
$fields[] = array(
'title' => __( 'Content Background Color Opacity', 'shoestrap' ),
'desc' => __( 'Select the opacity of your background color for the main content area so that background images and patterns will show through. Default: 100 (fully opaque)', 'shoestrap' ),
'id' => 'color_body_bg_opacity',
'default' => 100,
'min' => 0,
'step' => 1,
'max' => 100,
'type' => 'slider',
);
$fields[] = array(
'title' => 'Background Images',
'id' => 'help4',
'desc' => __( 'If you want a background image, you can select one here.
You can either upload a custom image, or use one of our pre-defined image patterns.
If you both upload a custom image and select a pattern, your custom image will override the selected pattern.
Please note that the image only applies to the area on the right and left of the main content area,
to ensure better content readability. You can also set the background position to be fixed or scroll!', 'shoestrap' ),
'type' => 'info'
);
$fields[] = array(
'title' => __( 'Use a Background Image', 'shoestrap' ),
'desc' => __( 'Enable this option to upload a custom background image for your site. This will override any patterns you may have selected. Default: OFF.', 'shoestrap' ),
'id' => 'background_image_toggle',
'default' => 0,
'type' => 'switch'
);
$fields[] = array(
'title' => __( 'Upload a Custom Background Image', 'shoestrap' ),
'desc' => __( 'Upload a Custom Background image using the media uploader, or define the URL directly.', 'shoestrap' ),
'id' => 'background_image',
'required' => array('background_image_toggle','=',array('1')),
'default' => '',
'type' => 'media',
'customizer'=> array(),
);
$fields[] = array(
'title' => __( 'Background position', 'shoestrap' ),
'desc' => __( 'Changes how the background image or pattern is displayed from scroll to fixed position. Default: Fixed.', 'shoestrap' ),
'id' => 'background_fixed_toggle',
'default' => 1,
'on' => __( 'Fixed', 'shoestrap' ),
'off' => __( 'Scroll', 'shoestrap' ),
'type' => 'switch',
'required' => array('background_image_toggle','=',array('1')),
);
$fields[] = array(
'title' => __( 'Background Image Positioning', 'shoestrap' ),
'desc' => __( 'Allows the user to modify how the background displays. By default it is full width and stretched to fill the page. Default: Full Width.', 'shoestrap' ),
'id' => 'background_image_position_toggle',
'default' => 0,
'required' => array('background_image_toggle','=',array('1')),
'on' => __( 'Custom', 'shoestrap' ),
'off' => __( 'Full Width', 'shoestrap' ),
'type' => 'switch'
);
$fields[] = array(
'title' => __( 'Background Repeat', 'shoestrap' ),
'desc' => __( 'Select how (or if) the selected background should be tiled. Default: Tile', 'shoestrap' ),
'id' => 'background_repeat',
'required' => array('background_image_position_toggle','=',array('1')),
'default' => 'repeat',
'type' => 'select',
'options' => array(
'no-repeat' => __( 'No Repeat', 'shoestrap' ),
'repeat' => __( 'Tile', 'shoestrap' ),
'repeat-x' => __( 'Tile Horizontally', 'shoestrap' ),
'repeat-y' => __( 'Tile Vertically', 'shoestrap' ),
),
);
$fields[] = array(
'title' => __( 'Background Alignment', 'shoestrap' ),
'desc' => __( 'Select how the selected background should be horizontally aligned. Default: Left', 'shoestrap' ),
'id' => 'background_position_x',
'required' => array('background_image_position_toggle','=',array('1')),
'default' => 'repeat',
'type' => 'select',
'options' => array(
'left' => __( 'Left', 'shoestrap' ),
'right' => __( 'Right', 'shoestrap' ),
'center' => __( 'Center', 'shoestrap' ),
),
);
$fields[] = array(
'title' => __( 'Use a Background Pattern', 'shoestrap' ),
'desc' => __( 'Select one of the already existing Background Patterns. Default: OFF.', 'shoestrap' ),
'id' => 'background_pattern_toggle',
'default' => 0,
'type' => 'switch'
);
$fields[] = array(
'title' => __( 'Choose a Background Pattern', 'shoestrap' ),
'desc' => __( 'Select a background pattern.', 'shoestrap' ),
'id' => 'background_pattern',
'required' => array('background_pattern_toggle','=',array('1')),
'default' => '',
'tiles' => true,
'type' => 'image_select',
'options' => $bg_pattern_images,
);
$section['fields'] = $fields;
$section = apply_filters( 'shoestrap_module_background_options_modifier', $section );
$sections[] = $section;
return $sections;
}
endif;
add_filter( 'redux/options/'.REDUX_OPT_NAME.'/sections', 'shoestrap_module_background_options', 60 );
include_once( dirname(__FILE__) . '/functions.background.php' );
include_once( dirname(__FILE__) . '/variables.php' ); | jloosli/shoestrap-3 | lib/modules/background/module.php | PHP | mit | 6,804 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import root
import j
| tonghuashuai/OnlyBoard | controller/_url.py | Python | mit | 68 |
// Copyright 2012 The Gorilla Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package mux
import (
"errors"
"fmt"
"net/http"
"path"
"regexp"
"github.com/Ghostofpq/bigzelda/Godeps/_workspace/src/github.com/gorilla/context"
)
// NewRouter returns a new router instance.
func NewRouter() *Router {
return &Router{namedRoutes: make(map[string]*Route), KeepContext: false}
}
// Router registers routes to be matched and dispatches a handler.
//
// It implements the http.Handler interface, so it can be registered to serve
// requests:
//
// var router = mux.NewRouter()
//
// func main() {
// http.Handle("/", router)
// }
//
// Or, for Google App Engine, register it in a init() function:
//
// func init() {
// http.Handle("/", router)
// }
//
// This will send all incoming requests to the router.
type Router struct {
// Configurable Handler to be used when no route matches.
NotFoundHandler http.Handler
// Parent route, if this is a subrouter.
parent parentRoute
// Routes to be matched, in order.
routes []*Route
// Routes by name for URL building.
namedRoutes map[string]*Route
// See Router.StrictSlash(). This defines the flag for new routes.
strictSlash bool
// If true, do not clear the request context after handling the request
KeepContext bool
}
// Match matches registered routes against the request.
func (r *Router) Match(req *http.Request, match *RouteMatch) bool {
for _, route := range r.routes {
if route.Match(req, match) {
return true
}
}
return false
}
// ServeHTTP dispatches the handler registered in the matched route.
//
// When there is a match, the route variables can be retrieved calling
// mux.Vars(request).
func (r *Router) ServeHTTP(w http.ResponseWriter, req *http.Request) {
// Clean path to canonical form and redirect.
if p := cleanPath(req.URL.Path); p != req.URL.Path {
// Added 3 lines (Philip Schlump) - It was dropping the query string and #whatever from query.
// This matches with fix in go 1.2 r.c. 4 for same problem. Go Issue:
// http://code.google.com/p/go/issues/detail?id=5252
url := *req.URL
url.Path = p
p = url.String()
w.Header().Set("Location", p)
w.WriteHeader(http.StatusMovedPermanently)
return
}
var match RouteMatch
var handler http.Handler
if r.Match(req, &match) {
handler = match.Handler
setVars(req, match.Vars)
setCurrentRoute(req, match.Route)
}
if handler == nil {
handler = r.NotFoundHandler
if handler == nil {
handler = http.NotFoundHandler()
}
}
if !r.KeepContext {
defer context.Clear(req)
}
handler.ServeHTTP(w, req)
}
// Get returns a route registered with the given name.
func (r *Router) Get(name string) *Route {
return r.getNamedRoutes()[name]
}
// GetRoute returns a route registered with the given name. This method
// was renamed to Get() and remains here for backwards compatibility.
func (r *Router) GetRoute(name string) *Route {
return r.getNamedRoutes()[name]
}
// StrictSlash defines the trailing slash behavior for new routes. The initial
// value is false.
//
// When true, if the route path is "/path/", accessing "/path" will redirect
// to the former and vice versa. In other words, your application will always
// see the path as specified in the route.
//
// When false, if the route path is "/path", accessing "/path/" will not match
// this route and vice versa.
//
// Special case: when a route sets a path prefix using the PathPrefix() method,
// strict slash is ignored for that route because the redirect behavior can't
// be determined from a prefix alone. However, any subrouters created from that
// route inherit the original StrictSlash setting.
func (r *Router) StrictSlash(value bool) *Router {
r.strictSlash = value
return r
}
// ----------------------------------------------------------------------------
// parentRoute
// ----------------------------------------------------------------------------
// getNamedRoutes returns the map where named routes are registered.
func (r *Router) getNamedRoutes() map[string]*Route {
if r.namedRoutes == nil {
if r.parent != nil {
r.namedRoutes = r.parent.getNamedRoutes()
} else {
r.namedRoutes = make(map[string]*Route)
}
}
return r.namedRoutes
}
// getRegexpGroup returns regexp definitions from the parent route, if any.
func (r *Router) getRegexpGroup() *routeRegexpGroup {
if r.parent != nil {
return r.parent.getRegexpGroup()
}
return nil
}
func (r *Router) buildVars(m map[string]string) map[string]string {
if r.parent != nil {
m = r.parent.buildVars(m)
}
return m
}
// ----------------------------------------------------------------------------
// Route factories
// ----------------------------------------------------------------------------
// NewRoute registers an empty route.
func (r *Router) NewRoute() *Route {
route := &Route{parent: r, strictSlash: r.strictSlash}
r.routes = append(r.routes, route)
return route
}
// Handle registers a new route with a matcher for the URL path.
// See Route.Path() and Route.Handler().
func (r *Router) Handle(path string, handler http.Handler) *Route {
return r.NewRoute().Path(path).Handler(handler)
}
// HandleFunc registers a new route with a matcher for the URL path.
// See Route.Path() and Route.HandlerFunc().
func (r *Router) HandleFunc(path string, f func(http.ResponseWriter,
*http.Request)) *Route {
return r.NewRoute().Path(path).HandlerFunc(f)
}
// Headers registers a new route with a matcher for request header values.
// See Route.Headers().
func (r *Router) Headers(pairs ...string) *Route {
return r.NewRoute().Headers(pairs...)
}
// Host registers a new route with a matcher for the URL host.
// See Route.Host().
func (r *Router) Host(tpl string) *Route {
return r.NewRoute().Host(tpl)
}
// MatcherFunc registers a new route with a custom matcher function.
// See Route.MatcherFunc().
func (r *Router) MatcherFunc(f MatcherFunc) *Route {
return r.NewRoute().MatcherFunc(f)
}
// Methods registers a new route with a matcher for HTTP methods.
// See Route.Methods().
func (r *Router) Methods(methods ...string) *Route {
return r.NewRoute().Methods(methods...)
}
// Path registers a new route with a matcher for the URL path.
// See Route.Path().
func (r *Router) Path(tpl string) *Route {
return r.NewRoute().Path(tpl)
}
// PathPrefix registers a new route with a matcher for the URL path prefix.
// See Route.PathPrefix().
func (r *Router) PathPrefix(tpl string) *Route {
return r.NewRoute().PathPrefix(tpl)
}
// Queries registers a new route with a matcher for URL query values.
// See Route.Queries().
func (r *Router) Queries(pairs ...string) *Route {
return r.NewRoute().Queries(pairs...)
}
// Schemes registers a new route with a matcher for URL schemes.
// See Route.Schemes().
func (r *Router) Schemes(schemes ...string) *Route {
return r.NewRoute().Schemes(schemes...)
}
// BuildVars registers a new route with a custom function for modifying
// route variables before building a URL.
func (r *Router) BuildVarsFunc(f BuildVarsFunc) *Route {
return r.NewRoute().BuildVarsFunc(f)
}
// Walk walks the router and all its sub-routers, calling walkFn for each route
// in the tree. The routes are walked in the order they were added. Sub-routers
// are explored depth-first.
func (r *Router) Walk(walkFn WalkFunc) error {
return r.walk(walkFn, []*Route{})
}
// SkipRouter is used as a return value from WalkFuncs to indicate that the
// router that walk is about to descend down to should be skipped.
var SkipRouter = errors.New("skip this router")
// WalkFunc is the type of the function called for each route visited by Walk.
// At every invocation, it is given the current route, and the current router,
// and a list of ancestor routes that lead to the current route.
type WalkFunc func(route *Route, router *Router, ancestors []*Route) error
func (r *Router) walk(walkFn WalkFunc, ancestors []*Route) error {
for _, t := range r.routes {
if t.regexp == nil || t.regexp.path == nil || t.regexp.path.template == "" {
continue
}
err := walkFn(t, r, ancestors)
if err == SkipRouter {
continue
}
for _, sr := range t.matchers {
if h, ok := sr.(*Router); ok {
err := h.walk(walkFn, ancestors)
if err != nil {
return err
}
}
}
if h, ok := t.handler.(*Router); ok {
ancestors = append(ancestors, t)
err := h.walk(walkFn, ancestors)
if err != nil {
return err
}
ancestors = ancestors[:len(ancestors)-1]
}
}
return nil
}
// ----------------------------------------------------------------------------
// Context
// ----------------------------------------------------------------------------
// RouteMatch stores information about a matched route.
type RouteMatch struct {
Route *Route
Handler http.Handler
Vars map[string]string
}
type contextKey int
const (
varsKey contextKey = iota
routeKey
)
// Vars returns the route variables for the current request, if any.
func Vars(r *http.Request) map[string]string {
if rv := context.Get(r, varsKey); rv != nil {
return rv.(map[string]string)
}
return nil
}
// CurrentRoute returns the matched route for the current request, if any.
// This only works when called inside the handler of the matched route
// because the matched route is stored in the request context which is cleared
// after the handler returns, unless the KeepContext option is set on the
// Router.
func CurrentRoute(r *http.Request) *Route {
if rv := context.Get(r, routeKey); rv != nil {
return rv.(*Route)
}
return nil
}
func setVars(r *http.Request, val interface{}) {
context.Set(r, varsKey, val)
}
func setCurrentRoute(r *http.Request, val interface{}) {
context.Set(r, routeKey, val)
}
// ----------------------------------------------------------------------------
// Helpers
// ----------------------------------------------------------------------------
// cleanPath returns the canonical path for p, eliminating . and .. elements.
// Borrowed from the net/http package.
func cleanPath(p string) string {
if p == "" {
return "/"
}
if p[0] != '/' {
p = "/" + p
}
np := path.Clean(p)
// path.Clean removes trailing slash except for root;
// put the trailing slash back if necessary.
if p[len(p)-1] == '/' && np != "/" {
np += "/"
}
return np
}
// uniqueVars returns an error if two slices contain duplicated strings.
func uniqueVars(s1, s2 []string) error {
for _, v1 := range s1 {
for _, v2 := range s2 {
if v1 == v2 {
return fmt.Errorf("mux: duplicated route variable %q", v2)
}
}
}
return nil
}
// checkPairs returns the count of strings passed in, and an error if
// the count is not an even number.
func checkPairs(pairs ...string) (int, error) {
length := len(pairs)
if length%2 != 0 {
return length, fmt.Errorf(
"mux: number of parameters must be multiple of 2, got %v", pairs)
}
return length, nil
}
// mapFromPairsToString converts variadic string parameters to a
// string to string map.
func mapFromPairsToString(pairs ...string) (map[string]string, error) {
length, err := checkPairs(pairs...)
if err != nil {
return nil, err
}
m := make(map[string]string, length/2)
for i := 0; i < length; i += 2 {
m[pairs[i]] = pairs[i+1]
}
return m, nil
}
// mapFromPairsToRegex converts variadic string paramers to a
// string to regex map.
func mapFromPairsToRegex(pairs ...string) (map[string]*regexp.Regexp, error) {
length, err := checkPairs(pairs...)
if err != nil {
return nil, err
}
m := make(map[string]*regexp.Regexp, length/2)
for i := 0; i < length; i += 2 {
regex, err := regexp.Compile(pairs[i+1])
if err != nil {
return nil, err
}
m[pairs[i]] = regex
}
return m, nil
}
// matchInArray returns true if the given string value is in the array.
func matchInArray(arr []string, value string) bool {
for _, v := range arr {
if v == value {
return true
}
}
return false
}
// matchMapWithString returns true if the given key/value pairs exist in a given map.
func matchMapWithString(toCheck map[string]string, toMatch map[string][]string, canonicalKey bool) bool {
for k, v := range toCheck {
// Check if key exists.
if canonicalKey {
k = http.CanonicalHeaderKey(k)
}
if values := toMatch[k]; values == nil {
return false
} else if v != "" {
// If value was defined as an empty string we only check that the
// key exists. Otherwise we also check for equality.
valueExists := false
for _, value := range values {
if v == value {
valueExists = true
break
}
}
if !valueExists {
return false
}
}
}
return true
}
// matchMapWithRegex returns true if the given key/value pairs exist in a given map compiled against
// the given regex
func matchMapWithRegex(toCheck map[string]*regexp.Regexp, toMatch map[string][]string, canonicalKey bool) bool {
for k, v := range toCheck {
// Check if key exists.
if canonicalKey {
k = http.CanonicalHeaderKey(k)
}
if values := toMatch[k]; values == nil {
return false
} else if v != nil {
// If value was defined as an empty string we only check that the
// key exists. Otherwise we also check for equality.
valueExists := false
for _, value := range values {
if v.MatchString(value) {
valueExists = true
break
}
}
if !valueExists {
return false
}
}
}
return true
}
| Ghostofpq/bigzelda | Godeps/_workspace/src/github.com/gorilla/mux/mux.go | GO | mit | 13,445 |
module LiveEditor
module API
class Site
# Returns record representing the current site.
#
# Options:
#
# - `include` - Name of relationship to include with the request. Pass an
# array to include multiple.
def self.current(options = {})
query_string = LiveEditor::API::include_query_string_for(options[:include])
query_string = '?' + query_string if query_string.present?
LiveEditor::API::client.get("/site#{query_string}")
end
# Updates record representing the current site.
#
# Optional attributes:
#
# - `title` - Updated title of site.
# - `subdomain_slug` - Updated subdomain slug for site.
# - `theme_id` - Updated published theme version for site.
def self.update(attributes = {})
payload = {
data: {
type: 'sites',
attributes: {}
}
}
attributes.except(:theme_id).each do |key, value|
payload[:data][:attributes][key.to_s.dasherize] = value
end
if attributes[:theme_id].present?
payload[:data][:relationships] = {}
payload[:data][:relationships]['theme'] = {
data: {
type: 'themes',
id: attributes[:theme_id]
}
}
end
LiveEditor::API::client.patch('/site', payload: payload)
end
end
end
end
| liveeditor/liveeditor_cli | lib/live_editor/api/site.rb | Ruby | mit | 1,438 |
using System;
using System.Collections;
using System.Collections.Generic;
namespace SkidiKit.Core
{
public class Bag<T> : ICollection<T>
{
private T[] _items;
private bool _isSealed = false;
public Bag(int capacity = 16)
{
_items = new T[capacity];
Count = 0;
}
public Bag(Bag<T> source): this(source.Count)
{
foreach (var item in source)
Add(item);
}
public void Seal()
{
Grow(Count);
_isSealed = true;
}
public int Capacity => _items.Length;
public IEnumerator<T> GetEnumerator()
{
return new BagEnumerator<T>(this);
}
public void Add(T item)
{
if (Count == _items.Length)
Grow();
_items[Count] = item;
++Count;
}
public void Add(Bag<T> range)
{
for (int i = 0, count = range.Count; count > i; i++)
Add(range[i]);
}
public void Clear()
{
for (int i = Count - 1; i >= 0; i--)
_items[i] = default(T);
Count = 0;
}
public int Count { get; private set; }
public bool IsReadOnly => false;
public bool Contains(T item)
{
for (int i = Count - 1; i >= 0; i--)
if (item.Equals(_items[i]))
return true;
return false;
}
public int IndexOf(T item)
{
for (int i = Count - 1; i >= 0; --i)
if (item.Equals(_items[i]))
return i;
return -1;
}
public bool Remove(T item)
{
var idx = IndexOf(item);
if (idx == -1)
return false;
Remove(idx);
return true;
}
public T Remove(int index)
{
var result = _items[index];
--Count;
_items[index] = _items[Count];
_items[Count] = default(T);
return result;
}
public void Apply(params Action<T>[] actions)
{
if (actions.Length == 0 || Count == 0)
return;
Action<T> compiledAction = null;
foreach (var action in actions)
compiledAction += action;
if (compiledAction == null) return;
foreach (var item in this)
compiledAction(item);
}
public T this[int index]
{
get { return _items[index]; }
set
{
if (index >= _items.Length)
{
Grow(index*2);
Count = index + 1;
}
else if (index >= Count)
Count = index + 1;
_items[index] = value;
}
}
private void Grow()
{
Grow((int) (_items.Length*1.5) + 1);
}
private void Grow(int newCapacity)
{
T[] oldItems = _items;
_items = new T[newCapacity];
Array.Copy(oldItems, 0, _items, 0, Count);
}
IEnumerator IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
public void CopyTo(T[] array, int arrayIndex)
{
Array.Copy(_items, 0, array, arrayIndex, Count);
}
public T[] ToArray()
{
var result = new T[Count];
CopyTo(result, 0);
return result;
}
}
} | nikita-sky/SkidiKit | SkidiKit/Core/Bag.cs | C# | mit | 3,676 |
#!../../../.env/bin/python
import os
import numpy as np
import time
a = np.array([
[1,0,3],
[0,2,1],
[0.1,0,0],
])
print a
row = 1
col = 2
print a[row][col]
assert a[row][col] == 1
expected_max_rows = [0, 1, 0]
expected_max_values = [1, 2, 3]
print 'expected_max_rows:', expected_max_rows
print 'expected_max_values:', expected_max_values
t0 = time.time()
actual_max_rows = list(np.argmax(a, axis=0))
td = time.time() - t0
actual_max_values = list(np.amax(a, axis=0))
print 'td:', round(td, 4)
print 'actual_max_rows:', actual_max_rows
print 'actual_max_values:', actual_max_values
assert actual_max_rows == expected_max_rows
assert actual_max_values == expected_max_values
| chrisspen/homebot | src/test/max_column/test_max_column.py | Python | mit | 690 |
<div id="content">
<div id="text" style="text-align: center;">
<h3>Download PWS Vaksinator</h3>
</div><br><br><br>
<div id="container" style="text-align: center;">
<form action="<?php echo site_url()."laporan/downloadpwsvaksinator"?>" method="post">
<div id="option" class="form">
<select name="kecamatan" style="width:120px;" class="form-control-static">
<option value="janapria">Janapria</option>
<option value="sengkol">Sengkol</option>
</select>
<select name="year" style="width:120px;" class="form-control-static">
<?php
$thn = date("Y");
while($thn >= 2015){ ?>
<option><?=$thn?></option>
<?php $thn--;} ?>
</select>
<select name="month" style="width:120px;" class="form-control-static">
<option value="januari">Januari</option>
<option value="februari">Februari</option>
<option value="maret">Maret</option>
<option value="april">April</option>
<option value="mei">Mei</option>
<option value="juni">Juni</option>
<option value="juli">Juli</option>
<option value="agustus">Agustus</option>
<option value="september">September</option>
<option value="oktober">Oktober</option>
<option value="november">November</option>
<option value="desember">Desember</option>
</select>
<select name="form" style="width:120px;" class="form-control-static">
<option value="bulanan">Laporan Bulanan Hasil Imunisasi Rutin Bayi Desa</option>
<option value="analisa">Tabel Analisa Imunisasi Bulanan Pemantauan Wilayah Setempat</option>
<option value="uci">Tabel Pematauan Desa Menuju Uci Di Puskesmas</option>
<option value="tt">Tabel Rekapitulasi Imunisasi Tt Ibu Hamil Dan Wus Di Puskesmas</option>
</select>
</div>
<br/>
<br/>
<?php echo $this->session->flashdata("file");?>
<br/>
<div id="sadasd">
<button class="btn btn-success" type="submit">DOWNLOAD</button>
</div>
</form>
</div>
</div> | sidthrive/dho | application/views/laporan/downloadpwsjurim.php | PHP | mit | 2,575 |