answer
stringlengths
15
1.25M
var chalk = require('chalk'); var argv = require('minimist')(process.argv.slice(2)); console.log(argv); //var messages = require(/*'file.json'*/); //var messages = [ // { type: 'error', message: 'fail fail fail!'}, // { type: 'info', message: 'here is a message'} //messages.forEach(function(msg){
# NScD Oak Ridge National Laboratory, European Spallation Source # & Institut Laue - Langevin from __future__ import (absolute_import, division, print_function) import tempfile import unittest import os import json import mantid from mantid.py3compat import mock from sans.gui_logic.presenter.<API key> import <API key> from sans.test_helper.mock_objects import (<API key>, FakeState, <API key>) class <API key>(unittest.TestCase): def <API key>(self): parent_presenter = <API key>() presenter = <API key>(parent_presenter) state = presenter.get_state(3) self.assertTrue(isinstance(state, FakeState)) def <API key>(self): parent_presenter = <API key>() view = <API key>() presenter = <API key>(parent_presenter) presenter.set_view(view) self.assertTrue(view.set_tree.call_count == 1) presenter.on_row_changed() self.assertTrue(view.set_tree.call_count == 2) def <API key>(self): parent_presenter = <API key>() view = <API key>() presenter = <API key>(parent_presenter) presenter.set_view(view) self.assertTrue(view.update_rows.call_count == 1) presenter.on_update_rows() self.assertTrue(view.update_rows.call_count == 2) def <API key>(self): # Arrange parent_presenter = <API key>() view = <API key>() dummy_file_path = os.path.join(tempfile.gettempdir(), "<API key>.json") print(dummy_file_path) view.get_save_location = mock.MagicMock(return_value=dummy_file_path) presenter = <API key>(parent_presenter) presenter.set_view(view) # Act presenter.on_save_state() # Assert self.assertTrue(os.path.exists(dummy_file_path)) with open(dummy_file_path) as f: data = json.load(f) self.assertTrue(data == "dummy_state") if os.path.exists(dummy_file_path): os.remove(dummy_file_path) def <API key>(self): parent_presenter = <API key>() presenter = <API key>(parent_presenter) view = mock.MagicMock() view.get_current_row.result = 1 presenter.set_view(view) parent_presenter.get_state_for_row = mock.MagicMock() parent_presenter.get_state_for_row.side_effect = RuntimeError('Test Error') presenter.on_row_changed() parent_presenter.display_warning_box.<API key>('Warning', 'Unable to find files.', 'Test Error') if __name__ == '__main__': unittest.main()
const defaultState = { token: '', loggingIn: false, err: false, info: {} }; function user(state = defaultState, action) { switch(action.type) { case 'FETCH_USER': { return { state, loggingIn: true, err: false, }; } case 'RECEIVE_USER': { const { info, token } = action; return { state, loggingIn: false, err: false, token, info, }; } case 'LOGIN_FAILED': { return { state, loggingIn: false, err: action.err, }; } case 'ADD_FAVORITE': { return { state, info: { state.info, favorites: [...state.info.favorites, action.card] } }; } case 'REMOVE_FAVORITE': { return { state, info: { state.info, favorites: state.info.favorites.filter(card => card._id !== action.card._id) } }; } case 'UPDATE_USER': { return { state, info: action.user }; } case 'ADD_STRESS': { return { state, info: { state.info, stressTestResults: [ state.info.stressTestResults, action.result ] } }; } case 'ADD_AMISOS': { return { state, info: { state.info, amisosResults: [ state.info.amisosResults, action.result ] } }; } default: { return state; } } } export default user;
<form role="search" method="get" class="search-form" action="<?php echo esc_url( home_url( '/' ) ); ?>"> <label> <span class="screen-reader-text"><?php _e('Search for:', 'modelo'); ?></span> <input type="search" class="search-field" placeholder="Search" value="<?php echo get_search_query(); ?>" name="s" title="Search for:"> </label> <button type="submit" class="search-submit fa fa-search" value="<?php echo esc_attr_x( 'Search', 'Submit' ); ?>"></button> </form><!-- .search-form -->
#Generated by bots open source edi translator from UN-docs. from bots.botsconfig import * from edifact import syntax from recordsD96AUN import recorddefs structure = [ {ID: 'UNH', MIN: 1, MAX: 1, LEVEL: [ {ID: 'BGM', MIN: 1, MAX: 1}, {ID: 'DTM', MIN: 1, MAX: 35}, {ID: 'PAI', MIN: 0, MAX: 1}, {ID: 'ALI', MIN: 0, MAX: 5}, {ID: 'IMD', MIN: 0, MAX: 1}, {ID: 'FTX', MIN: 0, MAX: 10}, {ID: 'RFF', MIN: 0, MAX: 99, LEVEL: [ {ID: 'DTM', MIN: 0, MAX: 5}, ]}, {ID: 'NAD', MIN: 0, MAX: 99, LEVEL: [ {ID: 'LOC', MIN: 0, MAX: 25}, {ID: 'FII', MIN: 0, MAX: 5}, {ID: 'RFF', MIN: 0, MAX: 9999, LEVEL: [ {ID: 'DTM', MIN: 0, MAX: 5}, ]}, {ID: 'DOC', MIN: 0, MAX: 5, LEVEL: [ {ID: 'DTM', MIN: 0, MAX: 5}, ]}, {ID: 'CTA', MIN: 0, MAX: 5, LEVEL: [ {ID: 'COM', MIN: 0, MAX: 5}, ]}, ]}, {ID: 'TAX', MIN: 0, MAX: 5, LEVEL: [ {ID: 'MOA', MIN: 0, MAX: 1}, {ID: 'LOC', MIN: 0, MAX: 5}, ]}, {ID: 'CUX', MIN: 0, MAX: 5, LEVEL: [ {ID: 'DTM', MIN: 0, MAX: 5}, ]}, {ID: 'PAT', MIN: 0, MAX: 10, LEVEL: [ {ID: 'DTM', MIN: 0, MAX: 5}, {ID: 'PCD', MIN: 0, MAX: 1}, {ID: 'MOA', MIN: 0, MAX: 1}, {ID: 'PAI', MIN: 0, MAX: 1}, {ID: 'FII', MIN: 0, MAX: 1}, ]}, {ID: 'TDT', MIN: 0, MAX: 10, LEVEL: [ {ID: 'LOC', MIN: 0, MAX: 10, LEVEL: [ {ID: 'DTM', MIN: 0, MAX: 5}, ]}, {ID: 'RFF', MIN: 0, MAX: 9999, LEVEL: [ {ID: 'DTM', MIN: 0, MAX: 5}, ]}, ]}, {ID: 'TOD', MIN: 0, MAX: 5, LEVEL: [ {ID: 'LOC', MIN: 0, MAX: 2}, ]}, {ID: 'PAC', MIN: 0, MAX: 1000, LEVEL: [ {ID: 'MEA', MIN: 0, MAX: 5}, {ID: 'PCI', MIN: 0, MAX: 5, LEVEL: [ {ID: 'RFF', MIN: 0, MAX: 1}, {ID: 'DTM', MIN: 0, MAX: 5}, {ID: 'GIN', MIN: 0, MAX: 5}, ]}, ]}, {ID: 'ALC', MIN: 0, MAX: 9999, LEVEL: [ {ID: 'ALI', MIN: 0, MAX: 5}, {ID: 'RFF', MIN: 0, MAX: 5, LEVEL: [ {ID: 'DTM', MIN: 0, MAX: 5}, ]}, {ID: 'QTY', MIN: 0, MAX: 1, LEVEL: [ {ID: 'RNG', MIN: 0, MAX: 1}, ]}, {ID: 'PCD', MIN: 0, MAX: 1, LEVEL: [ {ID: 'RNG', MIN: 0, MAX: 1}, ]}, {ID: 'MOA', MIN: 0, MAX: 2, LEVEL: [ {ID: 'RNG', MIN: 0, MAX: 1}, ]}, {ID: 'RTE', MIN: 0, MAX: 1, LEVEL: [ {ID: 'RNG', MIN: 0, MAX: 1}, ]}, {ID: 'TAX', MIN: 0, MAX: 5, LEVEL: [ {ID: 'MOA', MIN: 0, MAX: 1}, ]}, ]}, {ID: 'RCS', MIN: 0, MAX: 100, LEVEL: [ {ID: 'RFF', MIN: 0, MAX: 5}, {ID: 'DTM', MIN: 0, MAX: 5}, {ID: 'FTX', MIN: 0, MAX: 5}, ]}, {ID: 'AJT', MIN: 0, MAX: 1, LEVEL: [ {ID: 'FTX', MIN: 0, MAX: 5}, ]}, {ID: 'INP', MIN: 0, MAX: 1, LEVEL: [ {ID: 'FTX', MIN: 0, MAX: 5}, ]}, {ID: 'LIN', MIN: 0, MAX: 9999999, LEVEL: [ {ID: 'PIA', MIN: 0, MAX: 25}, {ID: 'IMD', MIN: 0, MAX: 10}, {ID: 'MEA', MIN: 0, MAX: 5}, {ID: 'QTY', MIN: 0, MAX: 5}, {ID: 'PCD', MIN: 0, MAX: 1}, {ID: 'ALI', MIN: 0, MAX: 5}, {ID: 'DTM', MIN: 0, MAX: 35}, {ID: 'GIN', MIN: 0, MAX: 1000}, {ID: 'GIR', MIN: 0, MAX: 1000}, {ID: 'QVR', MIN: 0, MAX: 1}, {ID: 'EQD', MIN: 0, MAX: 1}, {ID: 'FTX', MIN: 0, MAX: 5}, {ID: 'MOA', MIN: 0, MAX: 5, LEVEL: [ {ID: 'CUX', MIN: 0, MAX: 1}, ]}, {ID: 'PAT', MIN: 0, MAX: 10, LEVEL: [ {ID: 'DTM', MIN: 0, MAX: 5}, {ID: 'PCD', MIN: 0, MAX: 1}, {ID: 'MOA', MIN: 0, MAX: 1}, ]}, {ID: 'PRI', MIN: 0, MAX: 25, LEVEL: [ {ID: 'APR', MIN: 0, MAX: 1}, {ID: 'RNG', MIN: 0, MAX: 1}, {ID: 'DTM', MIN: 0, MAX: 5}, ]}, {ID: 'RFF', MIN: 0, MAX: 10, LEVEL: [ {ID: 'DTM', MIN: 0, MAX: 5}, ]}, {ID: 'PAC', MIN: 0, MAX: 10, LEVEL: [ {ID: 'MEA', MIN: 0, MAX: 10}, {ID: 'PCI', MIN: 0, MAX: 10, LEVEL: [ {ID: 'RFF', MIN: 0, MAX: 1}, {ID: 'DTM', MIN: 0, MAX: 5}, {ID: 'GIN', MIN: 0, MAX: 10}, ]}, ]}, {ID: 'LOC', MIN: 0, MAX: 9999, LEVEL: [ {ID: 'QTY', MIN: 0, MAX: 100}, {ID: 'DTM', MIN: 0, MAX: 5}, ]}, {ID: 'TAX', MIN: 0, MAX: 99, LEVEL: [ {ID: 'MOA', MIN: 0, MAX: 1}, {ID: 'LOC', MIN: 0, MAX: 5}, ]}, {ID: 'NAD', MIN: 0, MAX: 20, LEVEL: [ {ID: 'LOC', MIN: 0, MAX: 5}, {ID: 'RFF', MIN: 0, MAX: 5, LEVEL: [ {ID: 'DTM', MIN: 0, MAX: 5}, ]}, {ID: 'DOC', MIN: 0, MAX: 5, LEVEL: [ {ID: 'DTM', MIN: 0, MAX: 5}, ]}, {ID: 'CTA', MIN: 0, MAX: 5, LEVEL: [ {ID: 'COM', MIN: 0, MAX: 5}, ]}, ]}, {ID: 'ALC', MIN: 0, MAX: 15, LEVEL: [ {ID: 'ALI', MIN: 0, MAX: 5}, {ID: 'DTM', MIN: 0, MAX: 5}, {ID: 'QTY', MIN: 0, MAX: 1, LEVEL: [ {ID: 'RNG', MIN: 0, MAX: 1}, ]}, {ID: 'PCD', MIN: 0, MAX: 1, LEVEL: [ {ID: 'RNG', MIN: 0, MAX: 1}, ]}, {ID: 'MOA', MIN: 0, MAX: 2, LEVEL: [ {ID: 'RNG', MIN: 0, MAX: 1}, ]}, {ID: 'RTE', MIN: 0, MAX: 1, LEVEL: [ {ID: 'RNG', MIN: 0, MAX: 1}, ]}, {ID: 'TAX', MIN: 0, MAX: 5, LEVEL: [ {ID: 'MOA', MIN: 0, MAX: 1}, ]}, ]}, {ID: 'TDT', MIN: 0, MAX: 10, LEVEL: [ {ID: 'LOC', MIN: 0, MAX: 10, LEVEL: [ {ID: 'DTM', MIN: 0, MAX: 5}, ]}, ]}, {ID: 'TOD', MIN: 0, MAX: 5, LEVEL: [ {ID: 'LOC', MIN: 0, MAX: 2}, ]}, {ID: 'RCS', MIN: 0, MAX: 100, LEVEL: [ {ID: 'RFF', MIN: 0, MAX: 5}, {ID: 'DTM', MIN: 0, MAX: 5}, {ID: 'FTX', MIN: 0, MAX: 5}, ]}, ]}, {ID: 'UNS', MIN: 1, MAX: 1}, {ID: 'CNT', MIN: 0, MAX: 10}, {ID: 'MOA', MIN: 1, MAX: 100, LEVEL: [ {ID: 'RFF', MIN: 0, MAX: 1, LEVEL: [ {ID: 'DTM', MIN: 0, MAX: 5}, ]}, ]}, {ID: 'TAX', MIN: 0, MAX: 10, LEVEL: [ {ID: 'MOA', MIN: 0, MAX: 2}, ]}, {ID: 'ALC', MIN: 0, MAX: 15, LEVEL: [ {ID: 'ALI', MIN: 0, MAX: 1}, {ID: 'MOA', MIN: 0, MAX: 2}, ]}, {ID: 'UNT', MIN: 1, MAX: 1}, ]}, ]
using CP77.CR2W.Reflection; using FastMember; using static CP77.CR2W.Types.Enums; namespace CP77.CR2W.Types { [REDMeta] public class <API key> : CVariable { [Ordinal(0)] [RED("face")] public <API key> Face { get; set; } [Ordinal(1)] [RED("tongue")] public <API key> Tongue { get; set; } [Ordinal(2)] [RED("eyes")] public <API key> Eyes { get; set; } public <API key>(CR2WFile cr2w, CVariable parent, string name) : base(cr2w, parent, name) { } } }
#ifndef NSLEEP_H_INCLUDED #define NSLEEP_H_INCLUDED void pm_sleep(unsigned int const milliseconds); #endif
# coding: utf-8 from pprint import pformat from six import iteritems import re class <API key>(object): """ NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ def __init__(self, x=None, y=None, z=None): """ <API key> - a model defined in Swagger :param dict swaggerTypes: The key is attribute name and the value is attribute type. :param dict attributeMap: The key is attribute name and the value is json key in definition. """ self.swagger_types = { 'x': 'float', 'y': 'float', 'z': 'float' } self.attribute_map = { 'x': 'x', 'y': 'y', 'z': 'z' } self._x = x self._y = y self._z = z @property def x(self): """ Gets the x of this <API key>. x number :return: The x of this <API key>. :rtype: float """ return self._x @x.setter def x(self, x): """ Sets the x of this <API key>. x number :param x: The x of this <API key>. :type: float """ if x is None: raise ValueError("Invalid value for `x`, must not be `None`") self._x = x @property def y(self): """ Gets the y of this <API key>. y number :return: The y of this <API key>. :rtype: float """ return self._y @y.setter def y(self, y): """ Sets the y of this <API key>. y number :param y: The y of this <API key>. :type: float """ if y is None: raise ValueError("Invalid value for `y`, must not be `None`") self._y = y @property def z(self): """ Gets the z of this <API key>. z number :return: The z of this <API key>. :rtype: float """ return self._z @z.setter def z(self, z): """ Sets the z of this <API key>. z number :param z: The z of this <API key>. :type: float """ if z is None: raise ValueError("Invalid value for `z`, must not be `None`") self._z = z def to_dict(self): """ Returns the model properties as a dict """ result = {} for attr, _ in iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result def to_str(self): """ Returns the string representation of the model """ return pformat(self.to_dict()) def __repr__(self): """ For `print` and `pprint` """ return self.to_str() def __eq__(self, other): """ Returns true if both objects are equal """ if not isinstance(other, <API key>): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """ Returns true if both objects are not equal """ return not self == other
#!/usr/bin/env python '''A relatively simple distributed network implementation, using async SGD.''' from fastnet import net, layer, data, parser, weights from fastnet.util import EZTimer from mpi4py import MPI import ctypes import cudaconv2 import numpy as np import os WORLD = MPI.COMM_WORLD cudaconv2.init(WORLD.Get_rank()) print 'CUDA', os.environ.get('MV2_USE_CUDA') MASTER = 0 WORKERS = range(1, WORLD.Get_size()) batch_size = 128 data_dir = '/ssd/nn-data/imagenet/' data_provider = 'imagenet' checkpoint_dir = './checkpoint' param_file = 'config/imagenet.cfg' train_range = range(101, 1301) test_range = range(1, 101) data_provider = 'imagenet' #train_range = range(1, 41) #test_range = range(41, 49) train_dp = data.get_by_name(data_provider)(data_dir,train_range) test_dp = data.get_by_name(data_provider)(data_dir, test_range) model = parser.parse_config_file(param_file) network = net.FastNet((3, 224, 224, 1)) network = parser.load_model(network, model) class Tags(object): GRAD_SEND = 100 WEIGHT_UPDATE = 200 def tobuffer(gpuarray): #print 'BUFFER: 0x%x' % gpuarray.ptr #print 'SIZE: %s, %s, %s' % (gpuarray.size, gpuarray.shape, gpuarray.dtype) dtype = np.dtype(gpuarray.dtype) buf = ctypes.pythonapi.<API key>(ctypes.c_long(gpuarray.ptr), gpuarray.size * dtype.itemsize) return ctypes.cast(buf, ctypes.py_object).value def wait_for_all(reqs): for r in reqs: r.Wait() class Worker(object): def __init__(self): self.id = WORLD.Get_rank() def train(self): batch = train_dp.get_next_batch(batch_size) data, labels = network.prepare_for_train(batch.data, batch.labels) prediction = network.fprop(data) cost, correct = network.get_cost(labels, prediction) network.bprop(labels) self.send_grads() self.recv_weights() print cost, correct def send_grads(self): _ = EZTimer('send grads') sends = [] for idx, w in enumerate(layer.WEIGHTS): sends.append(WORLD.Isend(tobuffer(w.grad), dest=MASTER, tag=Tags.GRAD_SEND + idx)) wait_for_all(sends) def recv_weights(self): _ = EZTimer('recv weights') for idx, w in enumerate(layer.WEIGHTS): WORLD.Recv(tobuffer(w.wt), source=MASTER, tag=Tags.WEIGHT_UPDATE + idx) def run(self): while 1: self.train() self.send_grads() self.recv_weights() class WorkerProxy(object): def __init__(self, idx, wts): self.idx = idx self.wts = wts self.recvs = [] def start_read(self): assert len(self.recvs) == 0 for idx, w in enumerate(self.wts): self.recvs.append(WORLD.Irecv(tobuffer(w.grad), source=self.idx, tag=Tags.GRAD_SEND + idx)) def send_weights(self, wts): _ = EZTimer('send weights') for idx, w in enumerate(wts): WORLD.Send(tobuffer(w.wt), dest=self.idx, tag=Tags.WEIGHT_UPDATE + idx) def test(self): return np.all([r.Test() for r in self.recvs]) def wait(self): [r.Wait() for r in self.recvs] self.recvs = [] def try_fetch(self): if len(self.recvs) == 0: self.start_read() if not self.test(): return False self.wait() self.start_read() return True class Master(object): def __init__(self): self._workers = {} self._master_wts = layer.WEIGHTS self._requests = [] for w in WORKERS: self._workers[w] = WorkerProxy(w, layer.WEIGHTS.clone()) def update(self, worker_wts): _ = EZTimer('update') for idx, worker_wt in enumerate(worker_wts): master_wt = self._master_wts[idx] weights.update(master_wt.wt, worker_wt.grad, master_wt.incr, master_wt.epsilon, master_wt.momentum, master_wt.decay, 128) def run(self): while 1: #print 'Fetching gradients...' for w in self._workers.values(): if w.try_fetch(): self.update(w.wts) w.send_weights(self._master_wts) #print 'Sending weight updates...' if __name__ == '__main__': if WORLD.Get_rank() == 0: master = Master() master.run() else: worker = Worker() worker.run()
<!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8"> <title>Array Cardio</title> </head> <body> <p><em>Have a look at the JavaScript Console</em></p> <script src="app.js" type="text/javascript"></script> </body> </html>
<!DOCTYPE HTML PUBLIC "- <!--NewPage <HTML> <HEAD> <!-- Generated by javadoc (build 1.6.0_45) on Sun Jun 09 12:16:28 GMT+05:30 2013 --> <META http-equiv="Content-Type" content="text/html; charset=utf-8"> <TITLE> org.apache.solr.analysis (Solr 4.3.1 API) </TITLE> <META NAME="date" CONTENT="2013-06-09"> <LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../stylesheet.css" TITLE="Style"> <SCRIPT type="text/javascript"> function windowTitle() { if (location.href.indexOf('is-external=true') == -1) { parent.document.title="org.apache.solr.analysis (Solr 4.3.1 API)"; } } </SCRIPT> <NOSCRIPT> </NOSCRIPT> </HEAD> <BODY BGCOLOR="white" onload="windowTitle();"> <HR> <A NAME="navbar_top"></A> <A HREF="#skip-navbar_top" title="Skip navigation links"></A> <TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY=""> <TR> <TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A NAME="navbar_top_firstrow"></A> <TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY=""> <TR ALIGN="center" VALIGN="top"> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> &nbsp;<FONT CLASS="NavBarFont1Rev"><B>Package</B></FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Class</FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-use.html"><FONT CLASS="NavBarFont1"><B>Use</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A>&nbsp;</TD> </TR> </TABLE> </TD> <TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM> </EM> </TD> </TR> <TR> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> &nbsp;<A HREF="../../../../org/apache/solr/package-summary.html"><B>PREV PACKAGE</B></A>&nbsp; &nbsp;<A HREF="../../../../org/apache/solr/cloud/package-summary.html"><B>NEXT PACKAGE</B></A></FONT></TD> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> <A HREF="../../../../index.html?org/apache/solr/analysis/package-summary.html" target="_top"><B>FRAMES</B></A> &nbsp; &nbsp;<A HREF="package-summary.html" target="_top"><B>NO FRAMES</B></A> &nbsp; &nbsp;<SCRIPT type="text/javascript"> <! if(window==top) { document.writeln('<A HREF="../../../../allclasses-noframe.html"><B>All Classes</B></A>'); } </SCRIPT> <NOSCRIPT> <A HREF="../../../../allclasses-noframe.html"><B>All Classes</B></A> </NOSCRIPT> </FONT></TD> </TR> </TABLE> <A NAME="skip-navbar_top"></A> <HR> <H2> Package org.apache.solr.analysis </H2> Factories for Mock analysis classes that can be used in test configurations. <P> <B>See:</B> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;<A HREF="#package_description"><B>Description</B></A> <P> <TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY=""> <TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor"> <TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2"> <B>Class Summary</B></FONT></TH> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD WIDTH="15%"><B><A HREF="../../../../org/apache/solr/analysis/<API key>.html" title="class in org.apache.solr.analysis"><API key></A></B></TD> <TD>Factory for <CODE>MockCharFilter</CODE> for testing purposes.</TD> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD WIDTH="15%"><B><A HREF="../../../../org/apache/solr/analysis/<API key>.html" title="class in org.apache.solr.analysis"><API key></A></B></TD> <TD>Factory for <CODE>MockTokenFilter</CODE> for testing purposes.</TD> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD WIDTH="15%"><B><A HREF="../../../../org/apache/solr/analysis/<API key>.html" title="class in org.apache.solr.analysis"><API key></A></B></TD> <TD>Factory for <CODE>MockTokenizer</CODE> for testing purposes.</TD> </TR> </TABLE> &nbsp; <P> <A NAME="package_description"></A><H2> Package org.apache.solr.analysis Description </H2> <P> Factories for Mock analysis classes that can be used in test configurations. <P> <P> <DL> </DL> <HR> <A NAME="navbar_bottom"></A> <A HREF="#skip-navbar_bottom" title="Skip navigation links"></A> <TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY=""> <TR> <TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A NAME="<API key>"></A> <TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY=""> <TR ALIGN="center" VALIGN="top"> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> &nbsp;<FONT CLASS="NavBarFont1Rev"><B>Package</B></FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Class</FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-use.html"><FONT CLASS="NavBarFont1"><B>Use</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A>&nbsp;</TD> </TR> </TABLE> </TD> <TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM> </EM> </TD> </TR> <TR> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> &nbsp;<A HREF="../../../../org/apache/solr/package-summary.html"><B>PREV PACKAGE</B></A>&nbsp; &nbsp;<A HREF="../../../../org/apache/solr/cloud/package-summary.html"><B>NEXT PACKAGE</B></A></FONT></TD> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> <A HREF="../../../../index.html?org/apache/solr/analysis/package-summary.html" target="_top"><B>FRAMES</B></A> &nbsp; &nbsp;<A HREF="package-summary.html" target="_top"><B>NO FRAMES</B></A> &nbsp; &nbsp;<SCRIPT type="text/javascript"> <! if(window==top) { document.writeln('<A HREF="../../../../allclasses-noframe.html"><B>All Classes</B></A>'); } </SCRIPT> <NOSCRIPT> <A HREF="../../../../allclasses-noframe.html"><B>All Classes</B></A> </NOSCRIPT> </FONT></TD> </TR> </TABLE> <A NAME="skip-navbar_bottom"></A> <HR> <i>Copyright &copy; 2000-2013 Apache Software Foundation. All Rights Reserved.</i> <script src='../../../../prettify.js' type='text/javascript'></script> <script type='text/javascript'> (function(){ var oldonload = window.onload; if (typeof oldonload != 'function') { window.onload = prettyPrint; } else { window.onload = function() { oldonload(); prettyPrint(); } } })(); </script> </BODY> </HTML>
package wowhead_itemreader; public interface Sheme { String getQuery(WoWHeadData data); }
<?php // Moodle is free software: you can redistribute it and/or modify // (at your option) any later version. // Moodle is distributed in the hope that it will be useful, // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the defined('MOODLE_INTERNAL') or die; require_once("$CFG->dirroot/mod/dataform/entries_class.php"); class <API key> { public static function run($df) { global $DB; // Get gradebook users if (!$users = $df->get_gradebook_users()) { return; } // Construct entries data $data = (object) array('eids' => array()); $fieldid = $df::_USERID; $entryid = -1; foreach ($users as $userid => $unused) { $data->eids[$entryid] = $entryid; $data->{"field_{$fieldid}_{$entryid}"} = $userid; $entryid } // Add entries $em = new dataform_entries($df); $processed = $em->process_entries('update', $data->eids, $data, true); if (is_array($processed)) { list($strnotify, $processedeids) = $processed; if ($entriesprocessed = ($processedeids ? count($processedeids) : 0)) { return array('good', $strnotify); } } return array('bad', get_string('entriesupdated', 'dataform', get_string('no'))); } }
title: "आईपीएल: मुफ्त पासों से कानपूर के मैचों में घाटा" layout: item category: ["sports"] date: 2016-05-26T15:43:50.995Z image: <API key>.jpg <p>कानपुर। ग्रीन पार्क में यहां पहली बार हुए दो आईपीएल मैचों में कुल छह करोड़ 80 लाख रुपये के टिकट बिके थे। जबकि गुजरात लायंस ने टिकट बिक्री का लक्ष्य आठ करोड़ साठ लाख रुपये रखा गया था। इसका कारण जिला प्रशासन द्वारा आईपीएल मैचों के लिए प्रशासन से बहुत अधिक मात्रा में नि:शुल्क मैच पास लेना बताया जा रहा है। उत्तर प्रदेश क्रिकेट संघ के एक अधिकारी ने बताया कि 21 मई को गुजरात लायंस और मुंबई इंडियंस के बीच मुकाबले के सबसे अधिक टिकट करीब 18000 बिके थे। उसके विपरीत गुजरात लायंस और कोलकाता नायट राइडर्स के 19 मई को होने वाले मुकाबले में करीब 17200 टिकट बिके थे। टिकटों की बिक्री से गुजरात लायंस के मालिक को छह करोड़ 80 लाख रुपये मिले जबकि उनका टारगेट दोनों मैचों से आठ करोड़ 60 लाख टिकट बेंचने का था। अपने टारगेट से एक करोड़ 60 लाख रुपये के कम टिकट बिकने के बावजूद गुजरात लायंस ने अगले सीजन में ग्रीन पार्क में तीन मैच आईपीएल के कराने का आश्वासन दिया है कि क्योंकि यहां की जनता ने उन्हें बहुत प्यार और सम्मान दिया है। सूत्रों के मुताबिक किंग्स इलेवन पंजाब की मालकिन प्रीति जिंटा भी कानपुर में मैच कराने की इच्छुक है और उन्होंने अगले आईपीएल सत्र के लिए कुछ मैच कानपुर में करवाने की इच्छा जताई है। यूपीसीए के अधिकारियों के अनुसार टिकटो की बिक्री और बढ़ जाती लेकिन जिला प्रशासन के एडीएम स्तर के अधिकारियों ने अपने खास लोगों को मुफ्त मैच दिखाने के लिए भारी संख्या में पास ले लिए थे। इसके अलावा एक एडीएम स्तर के अधिकारियों ने तो अपने कर्मचारियों और अपने खास लोगों को मीडिया गैलरी में मैच शुरू होने से पहले ही मैच देखने के लिए बैठा दिया था जिसकी वजह से मीडिया को भारी परेशानी का सामना उठाना पड़ा था। मीडिया गैलरी गैर मीडिया लोगों के भर जाने के कारण कई मीडिया कर्मियों को स्टेडियम के अंदर प्रवेश भी नहीं मिला।</p>
package com.l2jglobal.gameserver.model.events.impl.character.npc; import com.l2jglobal.gameserver.model.actor.L2Npc; import com.l2jglobal.gameserver.model.events.EventType; import com.l2jglobal.gameserver.model.events.impl.IBaseEvent; /** * @author UnAfraid */ public class <API key> implements IBaseEvent { private final L2Npc _npc; public <API key>(L2Npc npc) { _npc = npc; } public L2Npc getNpc() { return _npc; } @Override public EventType getType() { return EventType.<API key>; } }
package mechanics class Item(val id: Int) { /** * If the item lies loose on the map, it has its own location, stored in this variable * wrapped in an Option. An item can also be located elsewhere, for example in * a player's inventory, in which case it lacks actual location and the variable * has the value None. * * NB! The current version of the game does not fully support items. * This is for future use. */ var location: Option[Position] = None /** * Places the item on the given coordinates on the map. */ def place(pos: Position) = { this.location = Some(pos) } }
package de.bund.bfr.knime.gis.views.graphvisualizer; import java.awt.BorderLayout; import java.awt.Dimension; import org.knime.core.node.BufferedDataTable; import org.knime.core.node.<API key>; import org.knime.core.node.NodeSettingsRO; import org.knime.core.node.NodeSettingsWO; import org.knime.core.node.<API key>; import org.knime.core.node.port.PortObject; import de.bund.bfr.knime.gis.views.<API key>; import de.bund.bfr.knime.gis.views.canvas.GraphCanvas; import de.bund.bfr.knime.gis.views.canvas.util.Naming; import de.bund.bfr.knime.ui.Dialogs; /** * <code>NodeDialog</code> for the "GraphVisualizer" Node. * * @author Christian Thoens */ public class <API key> extends <API key> { private GraphCanvas canvas; private BufferedDataTable nodeTable; private BufferedDataTable edgeTable; private <API key> set; /** * New pane for configuring the GraphVisualizer node. */ protected <API key>() { set = new <API key>(); } @Override protected void loadSettingsFrom(NodeSettingsRO settings, PortObject[] input) throws <API key> { nodeTable = (BufferedDataTable) input[0]; edgeTable = (BufferedDataTable) input[1]; set.getGraphSettings().loadSettings(settings); updateCanvas(false); resized = false; } @Override protected void saveSettingsTo(NodeSettingsWO settings) throws <API key> { set.getGraphSettings().setFromCanvas(canvas, resized); set.getGraphSettings().saveSettings(settings); } @Override protected void inputButtonPressed() { <API key> dialog = new <API key>(inputButton, nodeTable.getSpec(), edgeTable.getSpec(), set); dialog.setVisible(true); if (dialog.isApproved()) { set.getGraphSettings().setFromCanvas(canvas, resized); updateCanvas(true); } } private void updateCanvas(boolean showWarning) { if (canvas != null) { panel.remove(canvas); } <API key> creator = new <API key>(nodeTable, edgeTable, set); try { canvas = creator.createGraphCanvas(); } catch (<API key> e) { canvas = new GraphCanvas(true, Naming.DEFAULT_NAMING); canvas.setCanvasSize(new Dimension(400, 600)); if (showWarning) { Dialogs.showErrorMessage(panel, e.getMessage()); } } panel.add(canvas, BorderLayout.CENTER); panel.revalidate(); } }
#include "stdafx.h" #include <string> #include <vector> #include <map> using namespace std; typedef map<const wstring, const wstring> CMapLang; int wmain(int argc, WCHAR* argv[]) { vector<WCHAR> wcAbb(4); vector<WCHAR> wcName(20); CMapLang mapLang; for (INT i = 1; i < 1000; i++) { GetLocaleInfo(MAKELCID(i, 0), <API key>, &wcAbb.front(), (int)wcAbb.size()); GetLocaleInfo(MAKELCID(i, 0), LOCALE_SENGCOUNTRY, &wcName.front(), (int)wcName.size()); mapLang.insert(make_pair(&wcAbb.front(), &wcName.front())); } for (CMapLang::iterator iterLang = mapLang.begin(); iterLang != mapLang.end(); iterLang++) { printf("%S %S\n", iterLang->first.c_str(), iterLang->second.c_str()); } return 0; }
package de.xonical.mvplayer; import java.io.File; import java.io.FilenameFilter; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.ListIterator; import org.pmw.tinylog.Logger; //import org.apache.commons.lang.StringUtils; import de.xonical.mvplayer.presentation.Directory; public class RootDirectory { private File directory; private boolean isValidDirectory; private boolean isValidPath; private boolean isPathADirectory; private boolean isNotEmptyDirectory; public RootDirectory(Path rootPath) { checkIsValidPath(rootPath); directory = rootPath.toFile(); // All the requirements are given, therefore it's a valid directory if (!isValidPath && !isPathADirectory && !isNotEmptyDirectory) { this.isValidDirectory = true; } else { this.isValidDirectory = false; } } /** * First check if it exists, then if it's a directory and finally if it's * not empty. The order and is important to avoid an Exception * * @param rootPath */ private void checkIsValidPath(Path rootPath) { if (Files.exists(rootPath)) { isValidPath = true; } else { isValidPath = false; Logger.warn("Path not exists"); return; } if (rootPath.toFile().isDirectory()) { isPathADirectory = true; } else { isPathADirectory = false; Logger.warn("Path is not a directory"); return; } if (rootPath.toFile().listFiles().length > 0) { isNotEmptyDirectory = true; } else { isNotEmptyDirectory = false; Logger.warn("Directory is empty"); return; } } public boolean isValidDirectory() { Logger.debug("Root Directory is valid: " + isValidDirectory); return this.isValidDirectory; } /** * @return */ public List<File> <API key>() { File[] listSubDirectories = directory.listFiles((File f) -> { return f.isDirectory(); }); return Arrays.asList(listSubDirectories); } private List<File> <API key>() { // File[] listSubDirectories = directory.listFiles((File f) -> { // return f.isFile(); // return Arrays.asList(listSubDirectories); // WWW // FilenameFilter filter = (f, s) -> f.isDirectory(); // File[] listSubDirectories = directory.listFiles(filter); // return Arrays.asList(listSubDirectories); return null; } /** * @param firstSubDirectory * @return */ public List<File> <API key>(File firstSubDirectory) { File[] <API key> = firstSubDirectory .listFiles((File f) -> { return f.isFile(); }); return Arrays.asList(<API key>); } public static void main(String[] args) { Path root = Paths.get("z:/foo/bar/foobar/dummyPath"); Path root2 = Paths.get("g:/MediathekView"); RootDirectory rootDirectory = new RootDirectory(root2); System.out.println(rootDirectory.isValidPath); } }
package cuina.world; import java.io.Serializable; public interface CuinaMotor extends Serializable { public static final String EXTENSION_KEY = "motor"; public void update(); }
/** * A red button with an 'X' that, when clicked, will remove an atom (with a bond type) or a lone pair from the main molecule * * @author Jonathan Olson <jonathan.olson@colorado.edu> */ import { Shape } from '../../../kite/js/imports.js'; import merge from '../../../phet-core/js/merge.js'; import { Path } from '../../../scenery/js/imports.js'; import <API key> from '../../../sun/js/buttons/<API key>.js'; import <API key> from '../common/view/<API key>.js'; import moleculeShapes from '../moleculeShapes.js'; const CROSS_SIZE = 10; const crossNode = new Path( new Shape().moveTo( 0, 0 ).lineTo( CROSS_SIZE, CROSS_SIZE ).moveTo( 0, CROSS_SIZE ).lineTo( CROSS_SIZE, 0 ), { stroke: '#fff', lineWidth: 3 } ); class <API key> extends <API key> { /** * @param {Object} [options] */ constructor( options ) { super( merge( { content: crossNode, xMargin: 5, yMargin: 5 }, options ) ); <API key>.<API key>.link( color => { this.baseColor = color; } ); } } moleculeShapes.register( '<API key>', <API key> ); export default <API key>;
## Picocaine server api introduce - There are some rules you need understand before developing. 1. All of the results is a JSON str, and there must be have param named 'error', if the param's value is 0 (type integer), it mean success, others are unsuccess. If it success, you can find the data you want from the 'data' param, it is a object. else, these will be a param named 'msg' (type string) to answer you what happend. 2. All of the data you'll send to server also must be a JSON str if it POST method API, and you should save it at a value witch name is 'data'. If you want send photo to server, you need two param, the one is you 'data', and the other is you photo(type file), the photo's param name is 'photo'. - The apis as follow | Name | URL | Method| Request Data | Response Data | Error Examples | Note | | | Upload Photo to server | /uploadPhoto | POST | data:{"tags":["", ""], "passCode":"lovecfc"} photo:(a photo) | {"error":0,"data":{"picurl":"<API key>.png"}} | {"error":1002,"msg":"Photo already exists."}, {"error":1000 ,"msg":"Upload photo faild"} | | Get photo | /getPhoto | GET | picurl:xx.jpg, quality:small | A pic | {"error":1003,"msg":"Photo not exists."} | The quality can be these: 'small', 'source' | | Search Photo By Tag | /searchPhoto | POST | {"tag":"haha", "passCode":"lovecfc", "page":0, "pageSize":10} | {"error":0,"data":[{"picurl":"<API key>.png","tags":["hahaha"]}]} | | The pageSize is lower than 100. The 'page' is to show the current page, start with 0. | | Get hot tags | /getHotTags | POST | {"pageSize":100, "passCode":"lovecfc"} | {"error":0,"data":[{"tag":"Test"},{"tag":"Test1"},{"tag":"haha"},{"tag":"Test123"}]} | | The 'pageSize' is lower than 100. |
package de.metas.procurement.webui.model; import javax.persistence.Entity; import javax.persistence.Table; import javax.persistence.UniqueConstraint; import com.google.gwt.thirdparty.guava.common.base.Objects.ToStringHelper; @Entity @Table(name = "product_trl" , uniqueConstraints = @UniqueConstraint(name = "product_trl_uq", columnNames = { "record_id", "language" }) ) @SuppressWarnings("serial") public class ProductTrl extends <API key><Product> { private String name; @Override protected void toString(final ToStringHelper toStringHelper) { super.toString(toStringHelper); toStringHelper .add("name", name); } public String getName() { return name; } public void setName(final String name) { this.name = name; } }
__author__ = 'joseph' import statistics import numpy as np class AccelData(object): def __init__(self,Accel): #Static accelerometer data self.Accel = Accel def applyCalib(self,params,Accel): ax = params['ax'] ay = params['ay'] az = params['az'] scaling_Matrix = np.diag([params['kx'], params['ky'],params['kz']]) misalignment_Matrix = np.array([[1.0, -ax,ay], [0, 1.0, -az], [0,0,1.0]]) adjustment_matrix = np.dot(misalignment_Matrix,scaling_Matrix) bx = params['bx'] by = params['by'] bz = params['bz'] # apply theta to the accelerometer Accel[0,:] = Accel[0,:] - bx Accel[1,:] = Accel[1,:] - by Accel[2,:] = Accel[2,:] - bz Accel = np.dot(adjustment_matrix,Accel) return Accel # probally not necessary as it may of been passed by reference class GyroData(object): def __init__(self,Gyro,bx,by,bz): self.bx = bx self.by = by self.bz = bz self.Gyro = Gyro def applyCalib(self,params,Gyro): scaling_Matrix = np.diag([params['sx'], params['sy'],params['sz']]) misalignment_Matrix = np.array([ [1, params['gamma_yz'],params['gamma_zy']], [params['gamma_xz'], 1, params['gamma_zx']], [params['gamma_xy'],params['gamma_yx'],1]]) adjustment_matrix = np.dot(misalignment_Matrix,scaling_Matrix) Gyro[0,:] = Gyro[0,:] - self.bx Gyro[1,:] = Gyro[1,:] - self.by Gyro[2,:] = Gyro[2,:] - self.bz Gyro = np.dot(adjustment_matrix,Gyro) return Gyro class RollingStatistic(object): def __init__(self, window_size): self.N = window_size self.window = window_size * [0] self.average = 0 self.variance = 0 self.stddev = 0 self.index = 0 def update(self,new): # Preload if(self.index < self.N): self.window[self.index] = new self.index += 1 # If Window preloaded - start rolling statistics if(self.index == self.N): self.average = statistics.mean(self.window) self.variance = statistics.variance(self.window) return # Push element into window list and remove the old element old = self.window[0] self.window.pop(0) self.window.append(new) oldavg = self.average newavg = oldavg + (new - old)/self.N self.average = newavg if(self.N > 1): self.variance += (new-old)*(new-newavg+old-oldavg)/(self.N-1) def getVar(self): if(self.index == 1): return 0 elif(self.index < self.N): return statistics.variance(self.window[0:self.index]) # Make return 0? return self.variance def reset(self): self.index = 0 def <API key>(Data_in, Time, options,var_mult): total_samples = len(Time) Initial_Static = options[0] index = 0 static_timer = 0 static_window = options[1] running_var_x = RollingStatistic(25) running_var_y = RollingStatistic(25) running_var_z = RollingStatistic(25) # Find the total number of entries in the initial wait period while (static_timer <= Initial_Static): static_timer = static_timer + Time[index] index = index +1 Static_var_X = statistics.variance(Data_in[0:index,0]) Static_var_Y = statistics.variance(Data_in[0:index,1]) Static_var_Z = statistics.variance(Data_in[0:index,2]) Static_Th = Static_var_X**2 + Static_var_Y**2 + Static_var_Z**2 #Static threshold static_timer = 0 <API key> = 1 <API key> = <API key> + 1 <API key> = [] <API key> = [] num_static = 0 Max = -999999 Min = 999999 #loop through the dataset and map the static intervals for i in range(0,total_samples): # update time static_timer = static_timer + Time[i] running_var_x.update(Data_in[i,0]) running_var_y.update(Data_in[i,1]) running_var_z.update(Data_in[i,2]) m = max([Data_in[i,0],Data_in[i,1],Data_in[i,2]]) mn = min([Data_in[i,0],Data_in[i,1],Data_in[i,2]]) # Store maximum for constructing the visualization of this later if(m > Max): Max = m if(mn < Min): Min = mn # Check current (rolling) variance current_norm = running_var_x.getVar()**2 + running_var_y.getVar()**2 + running_var_z.getVar()**2 if(current_norm > Static_Th*var_mult): #check if the latest interval is valid length if(static_timer >= static_window): num_static += 1 <API key> = i -1 # skip the point that caused it to go beyond threshold <API key>.append(<API key>) <API key>.append(<API key>) # Reset running variances running_var_x.reset() running_var_y.reset() running_var_z.reset() # Reset the current static interval starting and ending index <API key> = i <API key> = <API key> # Reset timer static_timer = 0 # Main loop ended visualize = total_samples * [28000] for i in range(0,num_static): length = <API key>[i] - <API key>[i] + 1 visualize[<API key>[i]:(<API key>[i]+1)] = [.6*Max]*length return <API key>, <API key>, visualize, index def accel_resid(params, accel_staticx,accel_staticy,accel_staticz): scaling_Matrix = np.diag([params['kx'], params['ky'],params['kz']]) misalignment_Matrix = np.array([[1, -params['ax'],params['ay']], [0, 1, -params['az']], [0,0,1]]) adjustment_matrix = np.dot(misalignment_Matrix,scaling_Matrix) local_gravity = 9.81744 bx = params['bx'] by = params['by'] bz = params['bz'] # apply theta to the accelerometer accel_static = np.zeros((3,len(accel_staticx))) accel_static[0,:] = accel_staticx - bx accel_static[1,:] = accel_staticy - by accel_static[2,:] = accel_staticz - bz accel_static = np.dot(adjustment_matrix,accel_static) residual = len(accel_staticx)*[0.0] for i in range (0,len(accel_staticx)): residual[i] = (local_gravity**2 - (accel_static[0,i]**2 + accel_static[1,i]**2 + accel_static[2,i]**2)) return residual def gyro_resid(params,GyroData,AccelData,Time): index = 0 interval_count = len(GyroData.Gyro) resid = interval_count*[0.0] for Gyro in GyroData.Gyro: # Apply calibration of the gyroscope dt = Time[index] G = np.array(Gyro) G_calib = GyroData.applyCalib(params,G.transpose()) R = quaternion_RK4(G_calib,dt) # Extract gravity vector from accelerometer a = AccelData.Accel[:,index] Ua = AccelData.Accel[:,index+1] # Apply predicted rotation to accelerometer and compare to observed Ug = np.dot(R,a) diff = Ua - Ug # store the magnitude of the difference and update the static interval index resid[index] = diff[0]**2 + diff[1]**2 + diff[2]**2 index += 1 return resid #TODO: Move to misc. kinematics def quaternion_RK4(gyro,dt): num_samples = gyro.shape[1] q_k = np.array([1,0,0,0]) # RK loop for i in range(0,(num_samples-1)): q1 = q_k S1 = gyro_cross4(gyro[:,i]) k_1 = 1.0/2.0*np.dot(S1,q1) q2 = q_k + dt*1.0/2.0*k_1 half_gyro_left = 1.0/2.0*(gyro[:,i] + gyro[:,i+1]) S_half = gyro_cross4(half_gyro_left) k_2 = 1.0/2.0*np.dot(S_half,q2) q3 = q_k + dt*1.0/2.0*k_2 k_3 = 1.0/2.0*np.dot(S_half,q3) q4 = q_k + dt*k_3 S_2 = gyro_cross4(gyro[:,i+1]) k_4 = 1.0/2.0*np.dot(S_2,q4) q_k = q_k + dt*(1.0/6.0*k_1 + 1.0/3.0*k_2 + 1.0/3.0*k_3 + 1.0/6.0*k_4) q_k = q_k*1.0/np.linalg.norm(q_k) # Convert quaternion to rotation matrix # TODO: MOVE TO MISC KIN r11 = q_k[0]**2 + q_k[1]**2 - q_k[2]**2 - q_k[3]**2 r12 = 2.0*(q_k[1]*q_k[2] - q_k[0]*q_k[3]) r13 = 2.0*(q_k[1]*q_k[3] + q_k[0]*q_k[2]) r21 = 2.0*(q_k[1]*q_k[2] + q_k[0]*q_k[3]) r22 = q_k[0]**2 - q_k[1]**2 + q_k[2]**2 - q_k[3]**2 r23 = 2.0*(q_k[2]*q_k[3] - q_k[0]*q_k[1]) r31 = 2.0*(q_k[1]*q_k[3] - q_k[0]*q_k[2]) r32 = 2.0*(q_k[2]*q_k[3] + q_k[0]*q_k[1]) r33 = q_k[0]**2 - q_k[1]**2 - q_k[2]**2 + q_k[3]**2 # Note that R is actually the transpose of what it would normally be R = np.array([[r11, r21, r31], [r12, r22, r32], [r13, r23, r33]]) return R def gyro_cross4(gyro): gx = gyro[0] gy = gyro[1] gz = gyro[2] Sx = np.array([[0, -gx, -gy, -gz], [gx, 0, gz, -gy], [gy, -gz, 0, gx], [gz, gy, -gx, 0]]) return Sx
<!DOCTYPE HTML PUBLIC "- <html> <head> <meta http-equiv="content-type" content="text/html; charset=UTF-8"> <title>Sets the mode of iteration</title> </head> <body><div class="manualnavbar" style="text-align: center;"> <div class="prev" style="text-align: left; float: left;"><a href="splstack.construct.html">SplStack::__construct</a></div> <div class="next" style="text-align: right; float: right;"><a href="class.splqueue.html">SplQueue</a></div> <div class="up"><a href="class.splstack.html">SplStack</a></div> <div class="home"><a href="index.html">PHP Manual</a></div> </div><hr /><div id="splstack.setiteratormode" class="refentry"> <div class="refnamediv"> <h1 class="refname">SplStack::setIteratorMode</h1> <p class="verinfo">(PHP 5 &gt;= 5.3.0)</p><p class="refpurpose"><span class="refname">SplStack::setIteratorMode</span> &mdash; <span class="dc-title">Sets the mode of iteration</span></p> </div> <div class="refsect1 description" id="refsect1-splstack.<API key>"> <h3 class="title">Description</h3> <div class="methodsynopsis dc-description"> <span class="type"><span class="type void">void</span></span> <span class="methodname"><strong>SplStack::setIteratorMode</strong></span> ( <span class="methodparam"><span class="type">int</span> <code class="parameter">$mode</code></span> )</div> </div> <div class="refsect1 parameters" id="refsect1-splstack.<API key>"> <h3 class="title">Parameters</h3> <p class="para"> <dl> <dt> <span class="term"><em><code class="parameter">mode</code></em></span> <dd> <p class="para"> There is only one iteration parameter you can modify. </p> <ul class="itemizedlist"> <li class="listitem"> <span class="simpara">The behavior of the iterator (either one or the other):</span> <ul class="itemizedlist"> <li class="listitem"><span class="simpara">SplDoublyLinkedList::IT_MODE_DELETE (Elements are deleted by the iterator)</span></li> <li class="listitem"><span class="simpara">SplDoublyLinkedList::IT_MODE_KEEP (Elements are traversed by the iterator)</span></li> </ul> </li> </ul> <p class="para"> The default mode is 0x2 : SplDoublyLinkedList::IT_MODE_LIFO | SplDoublyLinkedList::IT_MODE_KEEP </p> <div class="warning"><strong class="warning">Warning</strong> <p class="para"> The direction of iteration can no longer be changed for SplStacks. Trying to do so will result in a <a href="class.runtimeexception.html" class="classname">RuntimeException</a> being thrown. </p> </div> </dd> </dt> </dl> </p> </div> <div class="refsect1 returnvalues" id="refsect1-splstack.<API key>"> <h3 class="title">Return Values</h3> <p class="para"> No value is returned. </p> </div> </div><hr /><div class="manualnavbar" style="text-align: center;"> <div class="prev" style="text-align: left; float: left;"><a href="splstack.construct.html">SplStack::__construct</a></div> <div class="next" style="text-align: right; float: right;"><a href="class.splqueue.html">SplQueue</a></div> <div class="up"><a href="class.splstack.html">SplStack</a></div> <div class="home"><a href="index.html">PHP Manual</a></div> </div></body></html>
package me.markeh.factionsframework.layer.layer_1_6; import java.util.HashMap; import java.util.Set; import java.util.TreeSet; import org.bukkit.Chunk; import org.bukkit.World; import com.massivecraft.factions.Board; import com.massivecraft.factions.FLocation; import com.massivecraft.factions.struct.Relation; import me.markeh.factionsframework.entities.Faction; import me.markeh.factionsframework.entities.Factions; import me.markeh.factionsframework.enums.Rel; public class Factions_1_6 extends Factions { // FIELDS private HashMap<String, Faction> factionsMap = new HashMap<String, Faction>(); private String noneId = null; private String warzoneId = null; private String safezoneId = null; // METHODS @Override public Faction get(String id) { if ( ! this.factionsMap.containsKey(id)) { Faction_1_6 faction = new Faction_1_6(id); this.factionsMap.put(id, faction); } if ( ! this.factionsMap.get(id).isValid()) { this.factionsMap.remove(id); return null; } return this.factionsMap.get(id); } @Override public Faction getUsingName(String name, String universe) { return this.get(com.massivecraft.factions.Factions.getInstance().getByTag(name).getId()); } @Override public Faction getAt(Chunk chunk) { FLocation flocation = new FLocation(chunk.getWorld().getName(), chunk.getX(), chunk.getZ()); String id = Board.getInstance().getIdAt(flocation); if (id == null) return null; return get(id); } @Override public Faction getFactionNone(World world) { if (this.noneId == null) this.noneId = com.massivecraft.factions.Factions.getInstance().getWilderness().getId(); return this.get(this.noneId); } @Override public Faction getFactionWarZone(World world) { if (this.warzoneId == null) this.warzoneId = com.massivecraft.factions.Factions.getInstance().getWarZone().getId(); return this.get(this.warzoneId); } @Override public Faction getFactionSafeZone(World world) { if (this.safezoneId == null) this.safezoneId = com.massivecraft.factions.Factions.getInstance().getSafeZone().getId(); return this.get(this.safezoneId); } @Override public Set<Faction> getAllFactions() { Set<Faction> factions = new TreeSet<Faction>(); for (com.massivecraft.factions.Faction faction : com.massivecraft.factions.Factions.getInstance().getAllFactions()) { factions.add(Factions.getById(faction.getId())); } return factions; } // UTILS // Convert to our relation type public static Rel convertRelationship(Relation relation) { if (relation == Relation.ALLY) return Rel.ALLY; if (relation == Relation.ENEMY) return Rel.ENEMY; if (relation == Relation.MEMBER) return Rel.MEMBER; if (relation == Relation.TRUCE) return Rel.TRUCE; return null; } }
#ifndef <API key> #define <API key> #include <boost/thread.hpp> #include <ossie/Resource_impl.h> #include <bulkio/bulkio.h> #include <boost/date_time/posix_time/posix_time.hpp> #include "<API key>.h" #include <sstream> #define NOOP 0 #define FINISH -1 #define NORMAL 1 class divide_ff_2i_base; template < typename TargetClass > class ProcessThread { public: ProcessThread(TargetClass *_target, float _delay) : target(_target) { _mythread = 0; _thread_running = false; _udelay = (__useconds_t)(_delay * 1000000); }; // kick off the thread void start() { if (_mythread == 0) { _thread_running = true; _mythread = new boost::thread(&ProcessThread::run, this); } }; // manage calls to target's service function void run() { int state = NORMAL; while (_thread_running and (state != FINISH)) { state = target->serviceFunction(); if (state == NOOP) { boost::this_thread::sleep( boost::posix_time::microseconds( _udelay ) ); } else { boost::this_thread::yield(); } } }; // stop thread and wait for termination bool release(unsigned long secs = 0, unsigned long usecs = 0) { _thread_running = false; if (_mythread) { if ((secs == 0) and (usecs == 0)){ _mythread->join(); } else { boost::system_time waitime= boost::get_system_time() + boost::posix_time::seconds(secs) + boost::posix_time::microseconds(usecs) ; if (!_mythread->timed_join(waitime)) { return 0; } } delete _mythread; _mythread = 0; } return 1; }; virtual ~ProcessThread(){ if (_mythread != 0) { release(0); _mythread = 0; } }; void updateDelay(float _delay) { _udelay = (__useconds_t)(_delay * 1000000); }; void stop() { _thread_running = false; if ( _mythread ) _mythread->interrupt(); }; bool threadRunning() { return _thread_running; }; private: boost::thread *_mythread; bool _thread_running; TargetClass *target; __useconds_t _udelay; boost::condition_variable _end_of_run; boost::mutex _eor_mutex; }; class divide_ff_2i_base : public GnuHawkBlock { public: divide_ff_2i_base(const char *uuid, const char *label); void start() throw (CF::Resource::StartError, CORBA::SystemException); void stop() throw (CF::Resource::StopError, CORBA::SystemException); CORBA::Object_ptr getPort(const char* _id) throw (CF::PortSupplier::UnknownPort, CORBA::SystemException); void releaseObject() throw (CF::LifeCycle::ReleaseError, CORBA::SystemException); void initialize() throw (CF::LifeCycle::InitializeError, CORBA::SystemException); ~divide_ff_2i_base(); void loadProperties(); protected: ProcessThread<divide_ff_2i_base> *serviceThread; boost::mutex serviceThreadLock; // Member variables exposed as properties CORBA::ULong vlen; // Ports bulkio::InFloatPort *float_in_0; bulkio::InFloatPort *float_in_1; bulkio::OutFloatPort *float_out; std::vector< std::string > inputPortOrder; std::vector< std::string > outputPortOrder; private: void construct(); protected: static const int RealMode=0; static const int ComplexMode=1; std::vector<std::vector<int> > io_mapping; typedef boost::posix_time::ptime TimeMark; typedef boost::posix_time::time_duration TimeDuration; typedef BULKIO::PrecisionUTCTime TimeStamp; // Enable or disable to adjusting of timestamp based on output rate inline void maintainTimeStamp( bool onoff=false ) { _maintainTimeStamp = onoff; }; // Enable or disable throttling of processing inline void setThrottle( bool onoff=false ) { _throttle = onoff; }; // getTargetDuration // Target duration defines the expected time the service function requires // to produce/consume elements. For source patterns, the data output rate // will be used to defined the target duration. For sink patterns, the // input rate of elements is used to define the target duration virtual TimeDuration getTargetDuration(); // calcThrottle // Calculate the duration about that we should sleep based on processing time // based on value from getTargetDuration() minus processing time ( end time // minus start time) // If the value is a positive duration then the boost::this_thread::sleep // method is called with 1/4 of the calculated duration. virtual TimeDuration calcThrottle( TimeMark &stime, TimeMark &etime ); // createBlock // Create the actual GNU Radio Block to that will perform the work method. The resulting // block object is assigned to gr_sptr // Add property change callbacks for getter/setter methods virtual void createBlock() = 0; // setupIOMappings // Sets up mappings for input and output ports and GnuRadio Stream indexes // A Gnu Radio input or output streams will be created for each defined RedHawk port. // The streams will be ordered 0..N-1 as they are defined in inputPortOrder and outputPortOrder // lists created during Component initialization. // For Gnu Radio blocks that define -1 for maximum number of input streams. The number of // input streams created will be restricted to the number of RedHawk ports. // RESOLVE - need to base mapping for -1 condition on "connections" and not streams // RESOLVE - need to add parameters to define expected modes for input ports.. i.e. real or complex and // not have to wait for SRI. virtual void setupIOMappings(); // getNOutputStreams // Called by setupIOMappings when the number of Gnu Radio output streams == -1 (variable ) and number of // Redhawk ports == 1. // @return uint32_t : Number of output streams to build virtual uint32_t getNOutputStreams(); // createOutputSRI // Called by setupIOMappings when an output mapping is defined. For each output mapping // defined, a call to createOutputSRI will be issued with the associated output index. // This default SRI and StreamID will be saved to the mapping and pushed down stream via pushSRI. // The subclass is responsible for overriding behavior of this method. The index provide matches // the stream index number that will be use by the Gnu Radio Block object // @param idx : output stream index number to associate the returned SRI object with // @return sri : default SRI object passed down stream over a RedHawk port virtual BULKIO::StreamSRI createOutputSRI( int32_t oidx, int32_t &in_idx); virtual BULKIO::StreamSRI createOutputSRI( int32_t oidx); // adjustOutputRate // Called by seOutputStreamSRI method when pushing SRI down stream to adjust the // the xdelta and/or ydelta values accordingly. The provided method will perform the following: // gr_blocks, gr_sync_block - no modifications are performed // gr_sync_decimator - sri.xdelta * gr_sptr->decimation() // <API key> - sri.xdelta / gr_sptr->interpolate() virtual void adjustOutputRate(BULKIO::StreamSRI &sri ); // callback when a new Stream ID is detected on the port so we can add to istream/ostream mapping list void <API key>( BULKIO::StreamSRI &sri ); // callback when a new Stream ID is detected on the port so we can add to istream/ostream mapping list void <API key>( BULKIO::StreamSRI &sri ); void <API key>(); // setOutputSteamSRI // Set the SRI context for an output stream from a Gnu Radio Block, when a pushPacket call occurs. Whenever the SRI is established // for an output stream it is sent down stream to the next component. virtual void setOutputStreamSRI( int streamIdx, BULKIO::StreamSRI &in_sri, bool sendSRI=true, bool setStreamID=true ) { for (int i = 0; i < (int)io_mapping[streamIdx].size(); i++){ int o_idx = io_mapping[streamIdx][i]; _ostreams[o_idx]->adjustSRI(in_sri, o_idx, setStreamID ); if ( sendSRI ) _ostreams[o_idx]->pushSRI(); } } // setOutputSteamSRI // Use the same SRI context for all output streams from a Gnu Radio Block, when a pushPacket call occurs. Whenever the SRI is established // for an output stream it is sent down stream to the next component. virtual void setOutputStreamSRI( BULKIO::StreamSRI &in_sri , bool sendSRI = true, bool setStreamID = true ) { OStreamList::iterator ostream=_ostreams.begin(); for( int o_idx=0; ostream != _ostreams.end(); o_idx++, ostream++ ) { (*ostream)->adjustSRI(in_sri, o_idx, setStreamID ); if ( sendSRI ) (*ostream)->pushSRI(); } } // gr_istream - Mapping of Provides Ports to Gnu Radio Stream indexes // Gnu Radio Block input stream definition: // Input = 1 .. N then each Provides Port type of X is mapped to a stream index 0..N-1 // This assumes the component will only have 1 input port type. (i.e float ports) // Input = -1 and single Provides Port interface then each unique stream definition will map to a stream index 0..N // Input = -1 and N Provides Port interface then each port will map to a stream index 0..N-1 // The mapping items are stored in a vector and maintain by setIOMappings and notifySRI methods, and // the service function when "end of stream" happens. struct gr_istream_base { GNU_RADIO_BLOCK_PTR grb; // shared pointer to our gr_block int _idx; // index of stream in gr_block std::string streamID; // redhawk stream id int _spe; // scalars per element int _vlen; // vector length in items, the gr_block process data int _hlen; // history length in items, the gr_blocks expects bool _eos; // if EOS was received from port bool _sri; // that we received an SRI call // Functions for child to implement virtual int SizeOfElement( int mode) = 0; virtual uint64_t nelems () = 0; virtual int read( int64_t ritems=-1 ) = 0; virtual bool overrun() = 0; virtual bool sriChanged() = 0; virtual void *read_pointer( int32_t items ) = 0; virtual void consume( int32_t n_items ) = 0; virtual void consume_elements( int32_t inNelems ) = 0; virtual void close() = 0; virtual void resizeData(int newSize) = 0; virtual void * getPort() = 0; virtual std::string getPktStreamId() = 0; virtual BULKIO::StreamSRI& getPktSri() = 0; virtual bool pktNull() = 0; virtual TimeStamp getPktTimeStamp() = 0; gr_istream_base( GNU_RADIO_BLOCK_PTR in_grb, int idx, int mode, std::string &sid ) : grb(in_grb), _idx(idx), streamID(sid), _spe(1), _vlen(1), _hlen(1), _eos(false), _sri(true) { }; gr_istream_base( GNU_RADIO_BLOCK_PTR in_grb, int idx, std::string &sid ) : grb(in_grb), _idx(idx), streamID(sid), _spe(1), _vlen(1), _hlen(1), _eos(false), _sri(false) { }; // translate scalars per element for incoming data // mode == 0 : real, mode == 1 : complex static inline int ScalarsPerElement( int mode ) { int spe=1; if ( mode == 1 ) spe=2; return spe; }; // translate scalars per element for incoming data // mode == 0 : real, mode == 1 : complex static inline int ScalarsPerElement( BULKIO::StreamSRI &sri ) { return ScalarsPerElement( sri.mode ); }; // return scalars per element inline int spe () { return _spe; } // set scalars per element inline int spe( int mode ) { _check( mode ); return _spe; } // return state if SRI was set inline bool sri() { return _sri; } inline bool sri( bool newSri ) { _sri = newSri; return _sri; } // return if End of Stream was seen inline bool eos() { return _eos; } inline bool eos( bool newEos ) { _eos = newEos; return _eos; } inline int vlen () { return _vlen; } void _check( int inMode , bool force=false) { // calc old history value int32_t old_hlen = (_hlen-1) * (_vlen*_spe); int32_t spe=ScalarsPerElement(inMode); int32_t nvlen=_vlen; bool newVlen=false; bool newSpe=false; try { if ( grb && grb->input_signature() ) nvlen = grb->input_signature()->sizeof_stream_item(_idx) / SizeOfElement(inMode); } catch(...) { LOG_TRACE( divide_ff_2i_base, "UNABLE TO SET VLEN, BAD INDEX:" << _idx ); } if ( nvlen != _vlen && nvlen >= 1 ) { _vlen=nvlen; newVlen=true; } if ( spe != _spe ) { _spe = spe; newSpe = true; } if ( force || newSpe || newVlen ) { // seed history for buffer with empty items int32_t new_hlen = ( grb->history()-1)* ( _vlen * _spe ); if ( (old_hlen != new_hlen) && ( new_hlen > -1 ) ) { _hlen = grb->history(); resizeData( new_hlen ); } } } // reset our association to a GR Block void associate( GNU_RADIO_BLOCK_PTR newBlock ) { grb = newBlock; if ( grb ) _check( _spe, true ); } inline uint64_t nitems () { uint64_t tmp = nelems(); if ( _vlen > 0 ) tmp /= _vlen; return tmp; } uint64_t itemsToScalars( uint64_t N ) { return N*_vlen*_spe; }; }; template < typename IN_PORT_TYPE > struct gr_istream : gr_istream_base { IN_PORT_TYPE *port; // RH port object std::vector< typename IN_PORT_TYPE::NativeType > _data; // buffered data from port typename IN_PORT_TYPE::dataTransfer *pkt; // pointer to last packet read from port gr_istream( IN_PORT_TYPE *in_port, GNU_RADIO_BLOCK_PTR in_grb, int idx, int mode, std::string &sid ) : gr_istream_base(in_grb, idx, mode, sid), port(in_port), _data(0), pkt(NULL) { _spe = ScalarsPerElement(mode); _check(mode, true); }; gr_istream( IN_PORT_TYPE *in_port, GNU_RADIO_BLOCK_PTR in_grb, int idx, std::string &sid ) : gr_istream_base(in_grb, idx, sid), port(in_port), _data(0), pkt(NULL) { int mode=0; _spe = ScalarsPerElement(mode); _check(mode, true); }; // Return the size of an element (sample) in bytes inline int SizeOfElement(int mode ) { return sizeof( typename IN_PORT_TYPE::NativeType)*ScalarsPerElement( mode); }; // Return the size of an element (sample) in bytes static inline int SizeOfElement( BULKIO::StreamSRI &sri ) { return sizeof( typename IN_PORT_TYPE::NativeType)*ScalarsPerElement(sri); } inline uint64_t nelems () { uint64_t tmp = _data.size(); if ( _spe > 0 ) tmp /= _spe; return tmp; } // RESOLVE: need to allow for requests of certain size, and blocking and timeouts int read( int64_t ritems=-1 ) { int retval = -1; typename IN_PORT_TYPE::dataTransfer *tpkt; if ( port && _sri ) { tpkt = port->getPacket( -1, streamID ); if ( tpkt == NULL ) { if ( port != NULL && port->blocked() ) retval = 0; } else { _data.insert( _data.end(), tpkt->dataBuffer.begin(), tpkt->dataBuffer.end() ); if ( tpkt->sriChanged ) { spe(tpkt->SRI.mode); } // resolve need to keep time stamp accurate for first sample of data.... we could loose this if we // end having residual data left in the buffer when output_multiple and vlen are used // by the gr_block - read and consume_elements need refactoring _eos = tpkt->EOS; if ( pkt != NULL ) delete pkt; pkt = tpkt; retval=nitems(); } } return retval; } inline bool overrun() { return ( pkt && pkt->inputQueueFlushed); } inline bool sriChanged() { return ( pkt && pkt->sriChanged ); } inline std::string getPktStreamId() { return pkt->streamID; } inline BULKIO::StreamSRI& getPktSri() { return pkt->SRI; } inline bool pktNull() { return pkt == NULL; } inline TimeStamp getPktTimeStamp() { return pkt->T; } void *read_pointer( int32_t items ) { uint32_t idx = itemsToScalars(items); if ( idx < _data.size() ) return (void*) &_data[ idx ]; else return (void*) &_data[0]; } // compress data buffer for requested number of items void consume( int32_t n_items ) { if ( n_items > 0 ) { consume_elements( n_items*_vlen ); } } // compress data buffer for requested number of items void consume_elements( int32_t inNelems ) { int d_idx = inNelems*_spe; int n = std::distance( _data.begin() + d_idx, _data.end() ); if ( d_idx > 0 && n >= 0 ) { std::copy( _data.begin() + d_idx, _data.end(), _data.begin() ); _data.resize(n); } } // perform clean up of stream state and mapping void close() { _data.clear(); _vlen = 1; _hlen=1; _eos = false; _sri = false; if ( pkt ) { delete pkt; pkt=NULL; } } void resizeData(int new_hlen) { _data.resize( new_hlen ); } void * getPort(){ return (void*) port; } }; // gr_ostream // Provides a mapping of output ports to a Gnu Radio Block's output stream. These items // are stored in a vector for managing output from the Gnu Radio Block and pushing // the data down stream to the next component over the port object. // Items in the vector are maintain by setIOMappings, notifySRI and the // the service function when "end of stream" happens struct gr_ostream_base { GNU_RADIO_BLOCK_PTR grb; // shared pointer ot GR_BLOCK int _idx; // output index (loose association) std::string streamID; // Stream Id to send down stream BULKIO::StreamSRI sri; // SRI to send down stream bool _m_tstamp; // set to true if we are maintaining outgoing time stamp BULKIO::PrecisionUTCTime tstamp; // time stamp to use for pushPacket calls bool _eos; // if EOS was sent uint64_t _nelems; // number of elements in that have been pushed down stream int _vlen; // vector length in items, to allocate output buffer for GR_BLOCK // Functions for child to implement virtual int SizeOfElement( int mode) = 0; virtual void pushSRI() = 0; virtual void pushSRI( BULKIO::StreamSRI &inSri ) = 0; virtual uint64_t nelems() = 0; virtual void resize( int32_t n_items ) = 0; virtual void *write_pointer() = 0; virtual int write( int32_t n_items, bool eos, TimeStamp &ts, bool adjust_ts=false ) = 0; virtual int write( int32_t n_items, bool eos, bool adjust_ts ) = 0; virtual int write( int32_t n_items, bool eos ) = 0; virtual void close() = 0; gr_ostream_base( GNU_RADIO_BLOCK_PTR ingrb, int idx, int mode, std::string &in_sid ) : grb(ingrb), _idx(idx), streamID(in_sid), _m_tstamp(false), _eos(false), _nelems(0), _vlen(1) { sri.hversion = 1; sri.xstart = 0.0; sri.xdelta = 1; sri.xunits = BULKIO::UNITS_TIME; sri.subsize = 0; sri.ystart = 0.0; sri.ydelta = 0.0; sri.yunits = BULKIO::UNITS_NONE; sri.mode = mode; sri.streamID = streamID.c_str(); // RESOLVE sri.blocking=0; to block or not tstamp.tcmode = BULKIO::TCM_CPU; tstamp.tcstatus = (short)1; tstamp.toff = 0.0; setTimeStamp(); } // translate scalars per element for incoming data // mode == 0 : real, mode == 1 : complex static inline int ScalarsPerElement( int mode ) { int spe=1; if ( mode == 1 ) spe=2; return spe; }; // translate scalars per element for incoming data // mode == 0 : real, mode == 1 : complex static inline int ScalarsPerElement( BULKIO::StreamSRI &sri ) { return ScalarsPerElement( sri.mode ); }; // Establish and SRI context for this output stream void setSRI( BULKIO::StreamSRI &inSri, int idx ) { sri=inSri; streamID = sri.streamID; // check if history, spe and vlen need to be adjusted _check(idx); }; // Only adjust stream id and output rate for SRI object void adjustSRI( BULKIO::StreamSRI &inSri, int idx, bool setStreamID=true ) { if ( setStreamID ) { streamID = inSri.streamID; sri.streamID = inSri.streamID; } double ret=inSri.xdelta; if ( grb ) ret = ret *grb->relative_rate(); sri.xdelta = ret; _check(idx); }; // Set our stream ID ... void setStreamID( std::string &sid ) { streamID=sid; }; // Return the number of scalars per element (sample) that we use inline int spe() { return ScalarsPerElement(sri.mode); } // return the state if EOS was pushed down stream inline bool eos () { return _eos; } // return the vector length to process data by the GR_BLOCK inline int vlen() { return _vlen; } inline bool eos ( bool inEos ) { _eos=inEos; return _eos; } void _check( int idx ) { if ( grb ) { int nvlen=1; try { if ( grb && grb->output_signature() ) nvlen = grb->output_signature()->sizeof_stream_item(idx) / SizeOfElement(sri.mode); if ( nvlen != _vlen && nvlen >= 1 ) _vlen=nvlen; } catch(...) { LOG_TRACE( divide_ff_2i_base, "UNABLE TO SET VLEN, BAD INDEX:" << _idx ); } } } // establish and assocation with a new GR_BLOCK void associate( GNU_RADIO_BLOCK_PTR newblock ) { grb = newblock; _check( _idx ); } // return the number of items in the output buffer inline uint64_t nitems () { uint64_t tmp=nelems(); if ( _vlen > 0 ) tmp /= _vlen; return tmp; } // return the number of scalars used for N number of items inline uint64_t itemsToScalars( uint64_t N ) { return N*_vlen*spe(); }; // return the number of output elements sent down stream inline uint64_t oelems() { return _nelems; }; // return the number of output items sent down stream inline uint64_t oitems() { uint64_t tmp = _nelems; if ( _vlen > 0 ) tmp /= _vlen; return tmp; }; // Turn time stamp calculations on or off void setAutoAdjustTime( bool onoff ) { _m_tstamp = onoff; }; // sets time stamp value to be time of day void setTimeStamp( ) { struct timeval tmp_time; struct timezone tmp_tz; gettimeofday(&tmp_time, &tmp_tz); tstamp.twsec = tmp_time.tv_sec; tstamp.tfsec = tmp_time.tv_usec / 1e6; }; // set time stamp value for the stream to a specific value, turns on // stream's monitoring of time stamp void setTimeStamp( TimeStamp &inTimeStamp, bool adjust_ts=true ) { _m_tstamp = adjust_ts; tstamp = inTimeStamp; }; void forwardTimeStamp( int32_t noutput_items, TimeStamp &ts ) { double twsec = ts.twsec; double tfsec = ts.tfsec; double sdelta=sri.xdelta; sdelta = sdelta * noutput_items*_vlen; double new_time = (twsec+tfsec)+sdelta; ts.tfsec = std::modf( new_time, &ts.twsec ); }; void forwardTimeStamp( int32_t noutput_items ) { double twsec = tstamp.twsec; double tfsec = tstamp.tfsec; double sdelta=sri.xdelta; sdelta = sdelta * noutput_items*_vlen; double new_time = (twsec+tfsec)+sdelta; tstamp.tfsec = std::modf( new_time, &tstamp.twsec ); }; }; template < typename OUT_PORT_TYPE > struct gr_ostream : gr_ostream_base { OUT_PORT_TYPE *port; // handle to Port object std::vector< typename OUT_PORT_TYPE::NativeType > _data; // output buffer used by GR_Block gr_ostream( OUT_PORT_TYPE *out_port, GNU_RADIO_BLOCK_PTR ingrb, int idx, int mode, std::string &in_sid ) : gr_ostream_base(ingrb, idx, mode, in_sid), port(out_port),_data(0) { }; // Return the size of an element (sample) in bytes inline int SizeOfElement(int mode ) { return sizeof( typename OUT_PORT_TYPE::NativeType)*ScalarsPerElement( mode); }; // Return the size of an element (sample) in bytes static inline int SizeOfElement( BULKIO::StreamSRI &sri ) { return sizeof( typename OUT_PORT_TYPE::NativeType)*ScalarsPerElement(sri); }; // push our SRI object down stream void pushSRI() { if ( port ) port->pushSRI( sri ); }; // push incoming SRI object down stream, do not save this object void pushSRI( BULKIO::StreamSRI &inSri ) { if ( port ) port->pushSRI( inSri ); }; // return the number of elements (samples) in the output buffer inline uint64_t nelems() { uint64_t tmp = _data.size(); if ( spe() > 0 ) tmp /= spe(); return tmp; }; // resize the output buffer to N number of items void resize( int32_t n_items ) { if ( _data.size() != (size_t)(n_items*spe()*_vlen) ) { _data.resize( n_items*spe()*_vlen ); } } void *write_pointer(){ // push ostream's buffer address onto list of output buffers return (void*) &(_data[0]); } // write data to output ports using the provided time stamp and adjust the time // accordingly using the xdelta value of the SRI and the number of items int write( int32_t n_items, bool eos, TimeStamp &ts, bool adjust_ts=false ) { resize( n_items ); if ( port ) port->pushPacket( _data, ts, eos, streamID ); if ( adjust_ts ) forwardTimeStamp( n_items, ts ); _eos = eos; _nelems += (n_items*_vlen); return n_items; }; // write data to the output port using the map object's timestamp // if the adjust_ts value equals true. otherwise use time of // day for the time stamp int write( int32_t n_items, bool eos, bool adjust_ts ) { if ( !adjust_ts ) setTimeStamp(); resize( n_items ); if ( port ) port->pushPacket( _data, tstamp, eos, streamID ); if ( adjust_ts ) forwardTimeStamp( n_items ); _eos = eos; _nelems += (n_items*_vlen); return n_items; }; // write data to the output port using the map object's timestamp and // adjust the time stamp if the maps's m_tstamp value == true int write( int32_t n_items, bool eos ) { if ( !_m_tstamp ) setTimeStamp(); resize( n_items ); if ( port ) port->pushPacket( _data, tstamp, eos, streamID ); if ( _m_tstamp ) forwardTimeStamp( n_items ); _eos = eos; _nelems += n_items*_vlen; return n_items; }; // perform clean up on the stream state and map void close() { _data.clear(); _vlen=1; _eos = false; _m_tstamp=false; }; }; typedef <API key> GR_IN_BUFFERS; typedef gr_vector_void_star GR_OUT_BUFFERS; typedef gr_vector_int GR_BUFFER_LENGTHS; int <API key>( std::vector< gr_istream_base * > &istreams ); int _forecastAndProcess( bool &eos, std::vector< gr_istream_base * > &istreams ); int <API key>( std::vector< gr_ostream_base * > &ostreams ); int <API key>( std::vector< gr_istream_base * > &istreams, std::vector< gr_ostream_base * > &ostreams ); int _forecastAndProcess( bool &eos, std::vector< gr_istream_base * > &istreams, std::vector< gr_ostream_base * > &ostreams ); typedef std::deque< std::pair< void*, BULKIO::StreamSRI > > SRIQueue; typedef std::vector< gr_istream_base * > IStreamList; typedef std::vector< gr_ostream_base * > OStreamList; // cache variables to transferring data to/from a GNU Radio Block std::vector<bool> _input_ready; GR_BUFFER_LENGTHS <API key>; GR_BUFFER_LENGTHS _ninput_items; GR_IN_BUFFERS _input_items; GR_OUT_BUFFERS _output_items; int32_t noutput_items; boost::mutex _sriMutex; SRIQueue _sriQueue; // mapping of RH ports to GNU Radio streams IStreamList _istreams; OStreamList _ostreams; bool sentEOS; ENABLE_LOGGING; protected: bool _maintainTimeStamp; bool _throttle; TimeMark p_start_time; TimeMark p_end_time; public: int serviceFunction() { int retval = NOOP; retval = <API key>( _istreams, _ostreams ); p_end_time = boost::posix_time::microsec_clock::local_time(); if ( retval == NORMAL && _throttle ) { TimeDuration delta = calcThrottle( p_start_time, p_end_time ); if ( delta.is_not_a_date_time() == false && delta.is_negative() == false ) { LOG_TRACE( divide_ff_2i_base, " SLEEP ...." << delta ); boost::this_thread::sleep( delta ); } else { LOG_TRACE( divide_ff_2i_base, " NO SLEEPING...." ); } } p_start_time = p_end_time; LOG_TRACE( divide_ff_2i_base, " serviceFunction: retval:" << retval); return retval; }; }; #endif
'use strict'; var redis = require('redis'); module.exports = function (config) { var client = redis.createClient( config.redis.port, config.redis.host ); if (config.redis.password) { client.auth(config.redis.password, function (err) { if (err) { console.log(err); console.log('Can\'t connect to redis'); } }); } return client; };
package com.picosms.hermash.ifaces; public interface IGate { public String getBalance() throws Exception; public String sendMessage(String number, String text) throws Exception; public String sendMessageBatch(String number[], String[] text) throws Exception; }
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN"> <html><head><meta http-equiv="Content-Type" content="text/html;charset=iso-8859-1"> <title>Member List</title> <link href="doxygen.css" rel="stylesheet" type="text/css"> </head><body> <!-- Generated by Doxygen 1.2.14 --> <center> <a class="qindex" href="index.html">Main Page</a> &nbsp; <a class="qindex" href="classes.html">Alphabetical List</a> &nbsp; <a class="qindex" href="annotated.html">Compound List</a> &nbsp; <a class="qindex" href="files.html">File List</a> &nbsp; <a class="qindex" href="functions.html">Compound Members</a> &nbsp; <a class="qindex" href="globals.html">File Members</a> &nbsp; <a class="qindex" href="pages.html">Related Pages</a> &nbsp; </center> <hr><h1>moldendata Member List</h1>This is the complete list of members for <a class="el" href="structmoldendata.html">moldendata</a>, including all inherited members.<table> <tr bgcolor="#f0f0f0"><td><a class="el" href="structmoldendata.html#m4">atomlist</a></td><td><a class="el" href="structmoldendata.html">moldendata</a></td><td></td></tr> <tr bgcolor="#f0f0f0"><td><a class="el" href="structmoldendata.html#m0">file</a></td><td><a class="el" href="structmoldendata.html">moldendata</a></td><td></td></tr> <tr bgcolor="#f0f0f0"><td><a class="el" href="structmoldendata.html#m3">file_name</a></td><td><a class="el" href="structmoldendata.html">moldendata</a></td><td></td></tr> <tr bgcolor="#f0f0f0"><td><a class="el" href="structmoldendata.html#m2">numatoms</a></td><td><a class="el" href="structmoldendata.html">moldendata</a></td><td></td></tr> <tr bgcolor="#f0f0f0"><td><a class="el" href="structmoldendata.html#m1">trajectory</a></td><td><a class="el" href="structmoldendata.html">moldendata</a></td><td></td></tr> </table><hr><address><small>Generated on Wed Mar 22 13:15:36 2006 for VMD Plugins (current) by <a href="http: <img src="doxygen.gif" alt="doxygen" align="middle" border=0 width=110 height=53></a>1.2.14 written by <a href="mailto:dimitri@stack.nl">Dimitri van Heesch</a>, &copy;&nbsp;1997-2002</small></address> </body> </html>
#include <fstream> #include <ctime> #include <cstring> #include "Log.h" std::ofstream gLogStream; std::ostream& _LogError(const char * pFile, int pLine) { return std::cerr << clock() * 1000 / CLOCKS_PER_SEC << " - (" << ((strrchr(pFile, '/') ? strrchr(pFile, '/') : (strrchr(pFile, '\\') ? strrchr(pFile, '\\') : pFile - 1)) + 1) << ":" << pLine << "): [Error] "; } std::ostream& _LogDebug(const char * pFile, int pLine) { return std::clog << clock() * 1000 / CLOCKS_PER_SEC << " - (" << ((strrchr(pFile, '/') ? strrchr(pFile, '/') : (strrchr(pFile, '\\') ? strrchr(pFile, '\\') : pFile - 1)) + 1) << ":" << pLine << "): [Debug] "; } std::ostream& _Log(const char * pFile, int pLine) { return std::cout << clock() * 1000 / CLOCKS_PER_SEC << " - (" << ((strrchr(pFile, '/') ? strrchr(pFile, '/') : (strrchr(pFile, '\\') ? strrchr(pFile, '\\') : pFile - 1)) + 1) << ":" << pLine << "): "; } void InitLogging() { #if <API key> LogDebug << "Logging to console window." << std::endl; #else // Set up log. gLogStream.open("log.txt", std::ios_base::out | std::ios_base::trunc); if (gLogStream) { std::cout.rdbuf(gLogStream.rdbuf()); std::clog.rdbuf(gLogStream.rdbuf()); std::cerr.rdbuf(gLogStream.rdbuf()); } #endif }
package com.redhat.lightblue.query; import com.redhat.lightblue.util.JsonUtils; import com.redhat.lightblue.util.Path; import org.junit.Assert; import org.junit.Test; public class <API key> { final String p_field_inclusion = "{\"field\":\"field.x\", \"include\": true}"; final String <API key> = "{\"field\":\"field.x\",\"include\": true, \"recursive\": true}"; final String p_field_exclusion= "{\"field\":\"field.y.x\",\"include\": false}"; final String <API key> = "{\"field\":\"field.y.x\",\"include\": false, \"recursive\": true}"; final String p_array_match = "{\"field\":\"field.x\",\"include\":true,\"match\":{\"field\":\"field.z\",\"op\":\"$eq\",\"rvalue\":1},\"project\":{\"field\":\"member\"}}"; final String p_array_range = "{\"field\":\"field.x\",\"include\":true, \"range\":[1,4],\"project\":{\"field\":\"member\"}}"; final String p_array_match_p = "{\"field\":\"field.x\",\"include\":true,\"match\":{\"field\":\"field.z\",\"op\":\"$eq\",\"rvalue\":1},\"projection\":{\"field\":\"member\"}}"; final String p_array_range_p = "{\"field\":\"field.x\",\"include\":true, \"range\":[1,4],\"projection\":{\"field\":\"member\"}}"; final String p_list = "[{\"field\":\"field.x\", \"include\": true},{\"field\":\"field.x.z\",\"include\": true, \"recursive\": true},{\"field\":\"field.y\",\"include\": false, \"recursive\": true}]"; @Test public void field_inclusion() throws Exception { Projection p = Projection.fromJson(JsonUtils.json(p_field_inclusion)); Assert.assertEquals(Projection.Inclusion.explicit_inclusion,p.getFieldInclusion(new Path("field.x"))); Assert.assertEquals(Projection.Inclusion.explicit_inclusion,p.getFieldInclusion(new Path("field"))); Assert.assertEquals(Projection.Inclusion.undecided,p.getFieldInclusion(new Path("field.x.y"))); } @Test public void <API key>() throws Exception { Projection p = Projection.fromJson(JsonUtils.json(<API key>)); Assert.assertEquals(Projection.Inclusion.explicit_inclusion,p.getFieldInclusion(new Path("field.x"))); Assert.assertEquals(Projection.Inclusion.explicit_inclusion,p.getFieldInclusion(new Path("field"))); Assert.assertEquals(Projection.Inclusion.implicit_inclusion,p.getFieldInclusion(new Path("field.x.y"))); Assert.assertEquals(Projection.Inclusion.undecided,p.getFieldInclusion(new Path("field.z"))); } @Test public void field_exclusion() throws Exception { Projection p = Projection.fromJson(JsonUtils.json(p_field_exclusion)); Assert.assertEquals(Projection.Inclusion.explicit_exclusion,p.getFieldInclusion(new Path("field.y.x"))); Assert.assertEquals(Projection.Inclusion.undecided,p.getFieldInclusion(new Path("field.y.x.z"))); Assert.assertEquals(Projection.Inclusion.undecided,p.getFieldInclusion(new Path("field"))); Assert.assertEquals(Projection.Inclusion.undecided,p.getFieldInclusion(new Path("field.y"))); } @Test public void <API key>() throws Exception { Projection p = Projection.fromJson(JsonUtils.json(<API key>)); Assert.assertEquals(Projection.Inclusion.explicit_exclusion,p.getFieldInclusion(new Path("field.y.x"))); Assert.assertEquals(Projection.Inclusion.implicit_exclusion,p.getFieldInclusion(new Path("field.y.x.z"))); Assert.assertEquals(Projection.Inclusion.undecided,p.getFieldInclusion(new Path("field"))); Assert.assertEquals(Projection.Inclusion.undecided,p.getFieldInclusion(new Path("field.y"))); } @Test public void array_match() throws Exception { Projection p = Projection.fromJson(JsonUtils.json(p_array_match)); Assert.assertEquals(Projection.Inclusion.explicit_inclusion,p.getFieldInclusion(new Path("field.x"))); Assert.assertEquals(Projection.Inclusion.explicit_inclusion,p.getFieldInclusion(new Path("field"))); Assert.assertEquals(Projection.Inclusion.explicit_inclusion,p.getFieldInclusion(new Path("field.x.*.member"))); Assert.assertEquals(Projection.Inclusion.undecided,p.getFieldInclusion(new Path("field.a"))); Assert.assertEquals(Projection.Inclusion.undecided,p.getFieldInclusion(new Path("field.x.*.field.z"))); Assert.assertTrue(p.<API key>(new Path("field.x"))); Assert.assertTrue(p.<API key>(new Path("field.x.field"))); Assert.assertTrue(p.<API key>(new Path("field.x.*.field.z"))); Assert.assertFalse(p.<API key>(new Path("field.a"))); } @Test public void array_range() throws Exception { Projection p = Projection.fromJson(JsonUtils.json(p_array_range)); Assert.assertEquals(Projection.Inclusion.explicit_inclusion,p.getFieldInclusion(new Path("field.x"))); Assert.assertEquals(Projection.Inclusion.explicit_inclusion,p.getFieldInclusion(new Path("field"))); Assert.assertEquals(Projection.Inclusion.explicit_inclusion,p.getFieldInclusion(new Path("field.x.*.member"))); Assert.assertEquals(Projection.Inclusion.undecided,p.getFieldInclusion(new Path("field.a"))); Assert.assertEquals(Projection.Inclusion.undecided,p.getFieldInclusion(new Path("field.x.*.b"))); Assert.assertTrue(p.<API key>(new Path("field"))); Assert.assertTrue(p.<API key>(new Path("field.x"))); Assert.assertTrue(p.<API key>(new Path("field.x.*.member"))); Assert.assertFalse(p.<API key>(new Path("field.a"))); } @Test public void array_match_p() throws Exception { Projection p = Projection.fromJson(JsonUtils.json(p_array_match_p)); Assert.assertEquals(Projection.Inclusion.explicit_inclusion,p.getFieldInclusion(new Path("field.x"))); Assert.assertEquals(Projection.Inclusion.explicit_inclusion,p.getFieldInclusion(new Path("field"))); Assert.assertEquals(Projection.Inclusion.explicit_inclusion,p.getFieldInclusion(new Path("field.x.*.member"))); Assert.assertEquals(Projection.Inclusion.undecided,p.getFieldInclusion(new Path("field.a"))); Assert.assertEquals(Projection.Inclusion.undecided,p.getFieldInclusion(new Path("field.x.*.field.z"))); Assert.assertTrue(p.<API key>(new Path("field.x"))); Assert.assertTrue(p.<API key>(new Path("field.x.field"))); Assert.assertTrue(p.<API key>(new Path("field.x.*.field.z"))); Assert.assertFalse(p.<API key>(new Path("field.a"))); } @Test public void array_range_p() throws Exception { Projection p = Projection.fromJson(JsonUtils.json(p_array_range_p)); Assert.assertEquals(Projection.Inclusion.explicit_inclusion,p.getFieldInclusion(new Path("field.x"))); Assert.assertEquals(Projection.Inclusion.explicit_inclusion,p.getFieldInclusion(new Path("field"))); Assert.assertEquals(Projection.Inclusion.explicit_inclusion,p.getFieldInclusion(new Path("field.x.*.member"))); Assert.assertEquals(Projection.Inclusion.undecided,p.getFieldInclusion(new Path("field.a"))); Assert.assertEquals(Projection.Inclusion.undecided,p.getFieldInclusion(new Path("field.x.*.b"))); Assert.assertTrue(p.<API key>(new Path("field"))); Assert.assertTrue(p.<API key>(new Path("field.x"))); Assert.assertTrue(p.<API key>(new Path("field.x.*.member"))); Assert.assertFalse(p.<API key>(new Path("field.a"))); } @Test public void projection_list() throws Exception { Projection p = Projection.fromJson(JsonUtils.json(p_list)); Assert.assertTrue(p instanceof ProjectionList); ProjectionList pl = (ProjectionList)p; //"[{\"field\":\"field.x\", \"include\": true},{\"field\":\"field.x.z\",\"include\": true, \"recursive\": true},{\"field\":\"field.y\",\"include\": false, \"recursive\": true}]"; Assert.assertEquals(Projection.Inclusion.explicit_inclusion,p.getFieldInclusion(new Path("field.x"))); Assert.assertEquals(Projection.Inclusion.explicit_inclusion,p.getFieldInclusion(new Path("field"))); Assert.assertEquals(Projection.Inclusion.explicit_inclusion,p.getFieldInclusion(new Path("field.x.z"))); Assert.assertEquals(Projection.Inclusion.implicit_inclusion,p.getFieldInclusion(new Path("field.x.z.b"))); Assert.assertEquals(Projection.Inclusion.explicit_exclusion,p.getFieldInclusion(new Path("field.y"))); Assert.assertEquals(Projection.Inclusion.implicit_exclusion,p.getFieldInclusion(new Path("field.y.q"))); Assert.assertEquals(Projection.Inclusion.undecided,p.getFieldInclusion(new Path("field.t"))); Assert.assertTrue(p.<API key>(new Path("field"))); Assert.assertTrue(p.<API key>(new Path("field.x"))); Assert.assertTrue(p.<API key>(new Path("field.x.z"))); Assert.assertTrue(p.<API key>(new Path("field.x.z.b"))); Assert.assertFalse(p.<API key>(new Path("field.y"))); Assert.assertFalse(p.<API key>(new Path("field.y.q"))); Assert.assertFalse(p.<API key>(new Path("field.t"))); } }
var NAVTREEINDEX0 = { "annotated.html":[1,0], "<API key>.html":[1,0,0,0,0], "<API key>.html#<API key>":[1,0,0,0,0,0], "<API key>.html#<API key>":[1,0,0,0,0,1], "<API key>.html":[1,0,0,0,1], "<API key>.html#<API key>":[1,0,0,0,1,1], "<API key>.html#<API key>":[1,0,0,0,1,0], "<API key>.html":[1,0,0,0,2], "<API key>.html#<API key>":[1,0,0,0,2,1], "<API key>.html#<API key>":[1,0,0,0,2,4], "<API key>.html#<API key>":[1,0,0,0,2,2], "<API key>.html#<API key>":[1,0,0,0,2,3], "<API key>.html#<API key>":[1,0,0,0,2,0], "<API key>.html":[1,0,0,1,0], "<API key>.html#<API key>":[1,0,0,1,0,3], "<API key>.html#<API key>":[1,0,0,1,0,0], "<API key>.html#<API key>":[1,0,0,1,0,4], "<API key>.html#<API key>":[1,0,0,1,0,1], "<API key>.html#<API key>":[1,0,0,1,0,2], "<API key>.html":[1,0,0,1,1], "<API key>.html#<API key>":[1,0,0,1,1,0], "<API key>.html#<API key>":[1,0,0,1,1,1], "<API key>.html":[1,0,0,1,2], "<API key>.html#<API key>":[1,0,0,1,2,0], "<API key>.html#<API key>":[1,0,0,1,2,1], "<API key>.html":[1,0,0,1,3], "<API key>.html#<API key>":[1,0,0,1,3,7], "<API key>.html#<API key>":[1,0,0,1,3,4], "<API key>.html#<API key>":[1,0,0,1,3,3], "<API key>.html#<API key>":[1,0,0,1,3,1], "<API key>.html#<API key>":[1,0,0,1,3,5], "<API key>.html#<API key>":[1,0,0,1,3,2], "<API key>.html#<API key>":[1,0,0,1,3,6], "<API key>.html#<API key>":[1,0,0,1,3,0], "<API key>.html":[1,0,0,1,4], "<API key>.html#<API key>":[1,0,0,1,4,8], "<API key>.html#<API key>":[1,0,0,1,4,5], "<API key>.html#<API key>":[1,0,0,1,4,3], "<API key>.html#<API key>":[1,0,0,1,4,2], "<API key>.html#<API key>":[1,0,0,1,4,4], "<API key>.html#<API key>":[1,0,0,1,4,0], "<API key>.html#<API key>":[1,0,0,1,4,1], "<API key>.html#<API key>":[1,0,0,1,4,7], "<API key>.html#<API key>":[1,0,0,1,4,6], "<API key>.html":[1,0,0,1,5], "<API key>.html#<API key>":[1,0,0,1,5,3], "<API key>.html#<API key>":[1,0,0,1,5,0], "<API key>.html#<API key>":[1,0,0,1,5,2], "<API key>.html#<API key>":[1,0,0,1,5,6], "<API key>.html#<API key>":[1,0,0,1,5,1], "<API key>.html#<API key>":[1,0,0,1,5,7], "<API key>.html#<API key>":[1,0,0,1,5,5], "<API key>.html#<API key>":[1,0,0,1,5,4], "<API key>.html":[1,0,0,1,6], "<API key>.html#<API key>":[1,0,0,1,6,0], "<API key>.html#<API key>":[1,0,0,1,6,5], "<API key>.html#<API key>":[1,0,0,1,6,2], "<API key>.html#<API key>":[1,0,0,1,6,1], "<API key>.html#<API key>":[1,0,0,1,6,3], "<API key>.html#<API key>":[1,0,0,1,6,4], "<API key>.html#<API key>":[1,0,0,1,6,7], "<API key>.html#<API key>":[1,0,0,1,6,8], "<API key>.html#<API key>":[1,0,0,1,6,6], "<API key>.html":[1,0,0,1,7], "<API key>.html#<API key>":[1,0,0,1,7,7], "<API key>.html#<API key>":[1,0,0,1,7,6], "<API key>.html#<API key>":[1,0,0,1,7,0], "<API key>.html#<API key>":[1,0,0,1,7,8], "<API key>.html#<API key>":[1,0,0,1,7,5], "<API key>.html#<API key>":[1,0,0,1,7,2], "<API key>.html#<API key>":[1,0,0,1,7,4], "<API key>.html#<API key>":[1,0,0,1,7,3], "<API key>.html#<API key>":[1,0,0,1,7,1], "<API key>.html":[1,0,0,1,8], "<API key>.html#<API key>":[1,0,0,1,8,3], "<API key>.html#<API key>":[1,0,0,1,8,6], "<API key>.html#<API key>":[1,0,0,1,8,0], "<API key>.html#<API key>":[1,0,0,1,8,4], "<API key>.html#<API key>":[1,0,0,1,8,2], "<API key>.html#<API key>":[1,0,0,1,8,1], "<API key>.html#<API key>":[1,0,0,1,8,5], "<API key>.html#<API key>":[1,0,0,1,8,7], "<API key>.html":[1,0,0,1,9], "<API key>.html#<API key>":[1,0,0,1,9,7], "<API key>.html#<API key>":[1,0,0,1,9,13], "<API key>.html#<API key>":[1,0,0,1,9,1], "<API key>.html#<API key>":[1,0,0,1,9,8], "<API key>.html#<API key>":[1,0,0,1,9,14], "<API key>.html#<API key>":[1,0,0,1,9,17], "<API key>.html#<API key>":[1,0,0,1,9,0], "<API key>.html#<API key>":[1,0,0,1,9,2], "<API key>.html#<API key>":[1,0,0,1,9,4], "<API key>.html#<API key>":[1,0,0,1,9,10], "<API key>.html#<API key>":[1,0,0,1,9,11], "<API key>.html#<API key>":[1,0,0,1,9,6], "<API key>.html#<API key>":[1,0,0,1,9,15], "<API key>.html#<API key>":[1,0,0,1,9,12], "<API key>.html#<API key>":[1,0,0,1,9,3], "<API key>.html#<API key>":[1,0,0,1,9,5], "<API key>.html#<API key>":[1,0,0,1,9,16], "<API key>.html#<API key>":[1,0,0,1,9,9], "<API key>.html":[1,0,0,2,0,0], "<API key>.html#<API key>":[1,0,0,2,0,0,5], "<API key>.html#<API key>":[1,0,0,2,0,0,3], "<API key>.html#<API key>":[1,0,0,2,0,0,6], "<API key>.html#<API key>":[1,0,0,2,0,0,2], "<API key>.html#<API key>":[1,0,0,2,0,0,4], "<API key>.html#<API key>":[1,0,0,2,0,0,0], "<API key>.html#<API key>":[1,0,0,2,0,0,1], "<API key>.html":[1,0,0,2,0,1], "<API key>.html#<API key>":[1,0,0,2,0,1,0], "<API key>.html#<API key>":[1,0,0,2,0,1,1], "<API key>.html":[1,0,0,2,0,2], "<API key>.html#<API key>":[1,0,0,2,0,2,5], "<API key>.html#<API key>":[1,0,0,2,0,2,7], "<API key>.html#<API key>":[1,0,0,2,0,2,4], "<API key>.html#<API key>":[1,0,0,2,0,2,6], "<API key>.html#<API key>":[1,0,0,2,0,2,2], "<API key>.html#<API key>":[1,0,0,2,0,2,1], "<API key>.html#<API key>":[1,0,0,2,0,2,3], "<API key>.html#<API key>":[1,0,0,2,0,2,0], "<API key>.html":[1,0,0,2,0,3], "<API key>.html#<API key>":[1,0,0,2,0,3,1], "<API key>.html#<API key>":[1,0,0,2,0,3,0], "<API key>.html#<API key>":[1,0,0,2,0,3,2], "<API key>.html":[1,0,0,2,0,4], "<API key>.html#<API key>":[1,0,0,2,0,4,1], "<API key>.html#<API key>":[1,0,0,2,0,4,0], "<API key>.html#<API key>":[1,0,0,2,0,4,3], "<API key>.html#<API key>":[1,0,0,2,0,4,2], "<API key>.html#<API key>":[1,0,0,2,0,4,4], "<API key>.html":[1,0,0,2,0,5], "<API key>.html#<API key>":[1,0,0,2,0,5,3], "<API key>.html#<API key>":[1,0,0,2,0,5,2], "<API key>.html#<API key>":[1,0,0,2,0,5,0], "<API key>.html#<API key>":[1,0,0,2,0,5,1], "<API key>.html#<API key>":[1,0,0,2,0,5,4], "<API key>.html":[1,0,0,2,0,6], "<API key>.html#<API key>":[1,0,0,2,0,6,3], "<API key>.html#<API key>":[1,0,0,2,0,6,0], "<API key>.html#<API key>":[1,0,0,2,0,6,1], "<API key>.html#<API key>":[1,0,0,2,0,6,2], "<API key>.html":[1,0,0,2,0,7], "<API key>.html#<API key>":[1,0,0,2,0,7,5], "<API key>.html#<API key>":[1,0,0,2,0,7,1], "<API key>.html#<API key>":[1,0,0,2,0,7,0], "<API key>.html#<API key>":[1,0,0,2,0,7,2], "<API key>.html#<API key>":[1,0,0,2,0,7,4], "<API key>.html#<API key>":[1,0,0,2,0,7,3], "<API key>.html":[1,0,0,2,0,8], "<API key>.html#<API key>":[1,0,0,2,0,8,4], "<API key>.html#<API key>":[1,0,0,2,0,8,2], "<API key>.html#<API key>":[1,0,0,2,0,8,7], "<API key>.html#<API key>":[1,0,0,2,0,8,5], "<API key>.html#<API key>":[1,0,0,2,0,8,3], "<API key>.html#<API key>":[1,0,0,2,0,8,1], "<API key>.html#<API key>":[1,0,0,2,0,8,0], "<API key>.html#<API key>":[1,0,0,2,0,8,6], "<API key>.html":[1,0,0,3,0], "<API key>.html#<API key>":[1,0,0,3,0,1], "<API key>.html#<API key>":[1,0,0,3,0,0], "<API key>.html":[1,0,0,3,1], "<API key>.html#<API key>":[1,0,0,3,1,0], "<API key>.html#<API key>":[1,0,0,3,1,1], "<API key>.html":[1,0,0,3,2], "<API key>.html#<API key>":[1,0,0,3,2,2], "<API key>.html#<API key>":[1,0,0,3,2,1], "<API key>.html#<API key>":[1,0,0,3,2,3], "<API key>.html#<API key>":[1,0,0,3,2,0], "<API key>.html#<API key>":[1,0,0,3,2,5], "<API key>.html#<API key>":[1,0,0,3,2,6], "<API key>.html#<API key>":[1,0,0,3,2,4], "<API key>.html#<API key>":[1,0,0,3,2,7], "<API key>.html":[1,0,0,3,3], "<API key>.html#<API key>":[1,0,0,3,3,4], "<API key>.html#<API key>":[1,0,0,3,3,2], "<API key>.html#<API key>":[1,0,0,3,3,3], "<API key>.html#<API key>":[1,0,0,3,3,5], "<API key>.html#<API key>":[1,0,0,3,3,6], "<API key>.html#<API key>":[1,0,0,3,3,8], "<API key>.html#<API key>":[1,0,0,3,3,7], "<API key>.html#<API key>":[1,0,0,3,3,1], "<API key>.html#<API key>":[1,0,0,3,3,0], "<API key>.html":[1,0,0,3,4], "<API key>.html#<API key>":[1,0,0,3,4,3], "<API key>.html#<API key>":[1,0,0,3,4,4], "<API key>.html#<API key>":[1,0,0,3,4,1], "<API key>.html#<API key>":[1,0,0,3,4,5], "<API key>.html#<API key>":[1,0,0,3,4,0], "<API key>.html#<API key>":[1,0,0,3,4,7], "<API key>.html#<API key>":[1,0,0,3,4,6], "<API key>.html#<API key>":[1,0,0,3,4,2], "<API key>.html":[1,0,0,3,5], "<API key>.html#<API key>":[1,0,0,3,5,5], "<API key>.html#<API key>":[1,0,0,3,5,3], "<API key>.html#<API key>":[1,0,0,3,5,4], "<API key>.html#<API key>":[1,0,0,3,5,6], "<API key>.html#<API key>":[1,0,0,3,5,0], "<API key>.html#<API key>":[1,0,0,3,5,2], "<API key>.html#<API key>":[1,0,0,3,5,7], "<API key>.html#<API key>":[1,0,0,3,5,1], "<API key>.html#<API key>":[1,0,0,3,5,8], "<API key>.html":[1,0,0,3,6], "<API key>.html#<API key>":[1,0,0,3,6,2], "<API key>.html#<API key>":[1,0,0,3,6,6], "<API key>.html#<API key>":[1,0,0,3,6,3], "<API key>.html#<API key>":[1,0,0,3,6,0], "<API key>.html#<API key>":[1,0,0,3,6,7], "<API key>.html#<API key>":[1,0,0,3,6,8], "<API key>.html#<API key>":[1,0,0,3,6,1], "<API key>.html#<API key>":[1,0,0,3,6,4], "<API key>.html#<API key>":[1,0,0,3,6,5], "<API key>.html":[1,0,0,3,7], "<API key>.html#<API key>":[1,0,0,3,7,4], "<API key>.html#<API key>":[1,0,0,3,7,0], "<API key>.html#<API key>":[1,0,0,3,7,2], "<API key>.html#<API key>":[1,0,0,3,7,6], "<API key>.html#<API key>":[1,0,0,3,7,5], "<API key>.html#<API key>":[1,0,0,3,7,3], "<API key>.html#<API key>":[1,0,0,3,7,7], "<API key>.html#<API key>":[1,0,0,3,7,1], "<API key>.html":[1,0,0,3,8], "<API key>.html#<API key>":[1,0,0,3,8,16], "<API key>.html#<API key>":[1,0,0,3,8,4], "<API key>.html#<API key>":[1,0,0,3,8,5], "<API key>.html#<API key>":[1,0,0,3,8,10], "<API key>.html#<API key>":[1,0,0,3,8,15], "<API key>.html#<API key>":[1,0,0,3,8,7], "<API key>.html#<API key>":[1,0,0,3,8,13], "<API key>.html#<API key>":[1,0,0,3,8,11], "<API key>.html#<API key>":[1,0,0,3,8,9], "<API key>.html#<API key>":[1,0,0,3,8,1], "<API key>.html#<API key>":[1,0,0,3,8,3], "<API key>.html#<API key>":[1,0,0,3,8,17], "<API key>.html#<API key>":[1,0,0,3,8,14], "<API key>.html#<API key>":[1,0,0,3,8,6], "<API key>.html#<API key>":[1,0,0,3,8,12], "<API key>.html#<API key>":[1,0,0,3,8,2], "<API key>.html#<API key>":[1,0,0,3,8,8], "<API key>.html#<API key>":[1,0,0,3,8,0], "<API key>.html":[1,0,1,0,0], "<API key>.html#<API key>":[1,0,1,0,0,3], "<API key>.html#<API key>":[1,0,1,0,0,0], "<API key>.html#<API key>":[1,0,1,0,0,2], "<API key>.html#<API key>":[1,0,1,0,0,1], "<API key>.html":[1,0,1,0,1], "<API key>.html#<API key>":[1,0,1,0,1,1], "<API key>.html#<API key>":[1,0,1,0,1,0], "<API key>.html":[1,0,1,0,2], "<API key>.html#<API key>":[1,0,1,0,2,2] };
package org.vrodic.hshopper; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; import android.os.Environment; import android.util.Log; public class DataXmlExporter { private static final String DATASUBDIRECTORY = HShopperDbAdapter.TAG; private SQLiteDatabase db; private XmlBuilder xmlBuilder; public DataXmlExporter(SQLiteDatabase db) { this.db = db; } public void export(String dbName, String <API key>) throws IOException { Log.i(HShopperDbAdapter.TAG, "exporting database - " + dbName + " <API key>=" + <API key>); this.xmlBuilder = new XmlBuilder(); this.xmlBuilder.start(dbName); // get the tables String sql = "select * from sqlite_master"; Cursor c = this.db.rawQuery(sql, new String[0]); Log.d(HShopperDbAdapter.TAG, "select * from sqlite_master, cur size " + c.getCount()); if (c.moveToFirst()) { do { String tableName = c.getString(c.getColumnIndex("name")); Log.d(HShopperDbAdapter.TAG, "table name " + tableName); // skip metadata, sequence, and uidx (unique indexes) if (!tableName.equals("android_metadata") && !tableName.equals("sqlite_sequence") && !tableName.startsWith("uidx")) { this.exportTable(tableName); } } while (c.moveToNext()); } String xmlString = this.xmlBuilder.end(); this.writeToFile(xmlString, <API key> + ".xml"); Log.i(HShopperDbAdapter.TAG, "exporting database complete"); } private void exportTable(final String tableName) throws IOException { Log.d(HShopperDbAdapter.TAG, "exporting table - " + tableName); this.xmlBuilder.openTable(tableName); String sql = "select * from " + tableName; Cursor c = this.db.rawQuery(sql, new String[0]); if (c.moveToFirst()) { int cols = c.getColumnCount(); do { this.xmlBuilder.openRow(); for (int i = 0; i < cols; i++) { this.xmlBuilder.addColumn(c.getColumnName(i), c.getString(i)); } this.xmlBuilder.closeRow(); } while (c.moveToNext()); } c.close(); this.xmlBuilder.closeTable(); } private void writeToFile(String xmlString, String exportFileName) throws IOException { File dir = new File(Environment.<API key>(), DATASUBDIRECTORY); if (!dir.exists()) { dir.mkdirs(); } File file = new File(dir, exportFileName); file.createNewFile(); ByteBuffer buff = ByteBuffer.wrap(xmlString.getBytes()); FileChannel channel = new FileOutputStream(file).getChannel(); try { channel.write(buff); } finally { if (channel != null) channel.close(); } } class XmlBuilder { private static final String OPEN_XML_STANZA = "<?xml version=\"1.0\" encoding=\"utf-8\"?>"; private static final String CLOSE_WITH_TICK = "'>"; private static final String DB_OPEN = "<database name='"; private static final String DB_CLOSE = "</database>"; private static final String TABLE_OPEN = "<table name='"; private static final String TABLE_CLOSE = "</table>"; private static final String ROW_OPEN = "<row>"; private static final String ROW_CLOSE = "</row>"; private static final String COL_OPEN = "<col name='"; private static final String COL_CLOSE = "</col>"; private final StringBuilder sb; public XmlBuilder() throws IOException { this.sb = new StringBuilder(); } void start(String dbName) { this.sb.append(OPEN_XML_STANZA); this.sb.append(DB_OPEN + dbName + CLOSE_WITH_TICK); } String end() throws IOException { this.sb.append(DB_CLOSE); return this.sb.toString(); } void openTable(String tableName) { this.sb.append(TABLE_OPEN + tableName + CLOSE_WITH_TICK); } void closeTable() { this.sb.append(TABLE_CLOSE); } void openRow() { this.sb.append(ROW_OPEN); } void closeRow() { this.sb.append(ROW_CLOSE); } void addColumn(final String name, final String val) throws IOException { this.sb.append(COL_OPEN + name + CLOSE_WITH_TICK + val + COL_CLOSE); } } }
import os import rospkg import lxml.etree as ET from openag_brain import params from openag.models import SoftwareModule, SoftwareModuleType from openag.db_names import SOFTWARE_MODULE, <API key> # maping from python types to roslaunch acceptable ones PARAM_TYPE_MAPPING = {'float' : 'double'} def create_node(parent, pkg, type, name, args=None): """ Creates an xml node for the launch file that represents a ROS node. `parent` is the parent xml node. `pkg` is the ROS package of the node. `type` is the name of the executable for the node. `name` is the name of the ROS node. """ e = ET.SubElement(parent, 'node') e.attrib['pkg'] = pkg e.attrib['type'] = type e.attrib['name'] = name if args: e.attrib['args'] = args return e def create_param(parent, name, value, type): """ Creates an xml node for the launch file that represents a ROS parameter. `parent` is the parent xml node. `name` is the name of the parameter to set. `value` is the value of the parameter. `type` is the type of the paremeter (e.g. int, float) """ e = ET.SubElement(parent, 'param') e.attrib['name'] = name e.attrib['value'] = value e.attrib['type'] = PARAM_TYPE_MAPPING.get(type, type) return e def create_group(parent, ns): """ Creates an xml node for the launch file that represents a ROS group. `parent` is the parent xml node. `ns` is the namespace of the group. """ e = ET.SubElement(parent, 'group') e.attrib['ns'] = ns return e def create_remap(parent, from_val, to_val): """ Creates an xml node for the launch file that represents a name remapping. `parent` is the parent xml node. `from_val` is the name that is to be remapped. `to_val` is the target name. """ e = ET.SubElement(parent, 'remap') e.attrib['from'] = from_val e.attrib['to'] = to_val def create_arg(parent, name, default=None, value=None): """ Creates an xml node for the launch file that represents a command line argument. `parent` is the parent xml node. `default` is the default value of the argument. `value` is the value of the argument. At most one of `default` and `value` can be provided. """ e = ET.SubElement(parent, 'arg') e.attrib['name'] = name if default and value: raise ValueError( "Argument cannot have both a default value and a value" ) if default is not None: e.attrib['default'] = str(default) if value is not None: e.attrib['value'] = str(value) def update_launch(server): """ Write a roslaunch file to `modules.launch` based on the software module configuration read from the `couchdb.Server` instance `server`. """ # Form a launch file from the parameter configuration root = ET.Element('launch') groups = {None: root} module_db = server[SOFTWARE_MODULE] module_types_db = server[<API key>] modules = { module_id: SoftwareModule(module_db[module_id]) for module_id in module_db if not module_id.startswith('_') } for module_id, module in modules.items(): print 'Processing module "{}" from server'.format(module_id) mod_ns = module.get("namespace", module.get("environment", None)) if not mod_ns in groups: group = create_group(root, mod_ns) groups[mod_ns] = group else: group = groups[mod_ns] if module["type"] in module_types_db: module_type = SoftwareModuleType(module_types_db[module["type"]]) else: raise RuntimeError( 'Module "{}" references nonexistant module type "{}'.format( module_id, module["type"] ) ) args = module.get("arguments", []) args_str = ", ".join(args) node = create_node( group, module_type["package"], module_type["executable"], module_id, args_str ) for param_name, param_info in module_type["parameters"].items(): param_value = module.get("parameters", {}).get( param_name, param_info.get("default", None) ) param_type = param_info["type"] if param_value is None: if param_info.get("required", False): raise RuntimeError( 'Parameter "{param}" is not defined for software ' 'module "{mod_id}"'.format( param=param_name, mod_id=module.id ) ) else: param_value = str(param_value) \ if not isinstance(param_value, bool) else \ str(param_value).lower() param_type = str(param_type) create_param(node, param_name, param_value, param_type) for k,v in module.get("mappings", {}).items(): create_remap(node, k, v) doc = ET.ElementTree(root) # Figure out where to write the launch file rospack = rospkg.RosPack() pkg_path = rospack.get_path('openag_brain') launch_path = os.path.join(pkg_path, 'modules.launch') doc.write(launch_path, pretty_print=True)
/* Foma: a finite-state toolkit and library. */ /* This file is part of foma. */ /* Foma is free software: you can redistribute it and/or modify */ /* published by the Free Software Foundation. */ /* Foma is distributed in the hope that it will be useful, */ /* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the */ #include <stdlib.h> #include <assert.h> #include <limits.h> #include <stdint.h> #include "foma.h" static struct fsm *fsm_minimize_brz(struct fsm *net); static struct fsm *fsm_minimize_hop(struct fsm *net); static struct fsm *rebuild_machine(struct fsm *net); static int *single_sigma_array, *double_sigma_array, *memo_table, *temp_move, *temp_group, maxsigma, epsilon_symbol, num_states, num_symbols, num_finals, mainloop, total_states; static Boolean *finals; struct statesym { int target; unsigned short int symbol; struct state_list *states; struct statesym *next; }; struct state_list { int state; struct state_list *next; }; struct p { struct e *first_e; struct e *last_e; struct p *current_split; struct p *next; struct agenda *agenda; int count; int t_count; int inv_count; int inv_t_count; }; struct e { struct p *group; struct e *left; struct e *right; int inv_count; }; struct agenda { struct p *p; struct agenda *next; Boolean index; }; // HFST MODIFICATIONS: struct trans_list -> struct trans_list_struct // struct trans_array -> struct trans_list_array // because some compilers complain about struct and variable having the same name static struct trans_list_struct { int inout; int source; } *trans_list; static struct trans_array_struct { struct trans_list_struct *transitions; unsigned int size; unsigned int tail; } *trans_array; static struct p *P, *Phead, *Pnext, *current_w; static struct e *E; static struct agenda *Agenda_head, *Agenda_top, *Agenda_next, *Agenda; static INLINE int refine_states(int sym); static void init_PE(); static void agenda_add(struct p *pptr, int start); static void sigma_to_pairs(struct fsm *net); /* static void <API key>(int symbol, int *symbol_in, int *symbol_out); */ static INLINE int <API key>(int in, int out); static void generate_inverse(struct fsm *net); struct fsm *fsm_minimize(struct fsm *net) { extern int g_minimal; extern int g_minimize_hopcroft; if (net == NULL) { return NULL; } /* The network needs to be deterministic and trim before we minimize */ if (net->is_deterministic != YES) net = fsm_determinize(net); if (net->is_pruned != YES) net = fsm_coaccessible(net); if (net->is_minimized != YES && g_minimal == 1) { if (g_minimize_hopcroft != 0) { net = fsm_minimize_hop(net); } else net = fsm_minimize_brz(net); fsm_update_flags(net,YES,YES,YES,YES,UNK,UNK); } return(net); } static struct fsm *fsm_minimize_brz(struct fsm *net) { return(fsm_determinize(fsm_reverse(fsm_determinize(fsm_reverse(net))))); } static struct fsm *fsm_minimize_hop(struct fsm *net) { struct e *temp_E; struct trans_array_struct *tptr; struct trans_list_struct *transitions; int i,j,minsym,next_minsym,current_i, stateno, thissize, source; unsigned int tail; fsm_count(net); if (net->finalcount == 0) { fsm_destroy(net); return(fsm_empty_set()); } num_states = net->statecount; P = NULL; /* 1. generate the inverse lookup table 2. generate P and E (partitions, states linked list) 3. Init Agenda = {Q, Q-F} 4. Split until Agenda is empty */ sigma_to_pairs(net); init_PE(); if (total_states == num_states) { goto bail; } generate_inverse(net); Agenda_head->index = 0; if (Agenda_head->next != NULL) Agenda_head->next->index = 0; for (Agenda = Agenda_head; Agenda != NULL; ) { /* Remove current_w from agenda */ current_w = Agenda->p; current_i = Agenda->index; Agenda->p->agenda = NULL; Agenda = Agenda->next; /* Store current group state number in tmp_group */ /* And figure out minsym */ /* If index is 0 we start splitting from the first symbol */ /* Otherwise we split from where we left off last time */ thissize = 0; minsym = INT_MAX; for (temp_E = current_w->first_e; temp_E != NULL; temp_E = temp_E->right) { stateno = temp_E - E; *(temp_group+thissize) = stateno; thissize++; tptr = trans_array+stateno; /* Clear tails if symloop should start from 0 */ if (current_i == 0) tptr->tail = 0; tail = tptr->tail; transitions = (tptr->transitions)+tail; if (tail < tptr->size && transitions->inout < minsym) { minsym = transitions->inout; } } for (next_minsym = INT_MAX; minsym != INT_MAX ; minsym = next_minsym, next_minsym = INT_MAX) { /* Add states to temp_move */ for (i = 0, j = 0; i < thissize; i++) { tptr = trans_array+*(temp_group+i); tail = tptr->tail; transitions = (tptr->transitions)+tail; while (tail < tptr->size && transitions->inout == minsym) { source = transitions->source; if (*(memo_table+(source)) != mainloop) { *(memo_table+(source)) = mainloop; *(temp_move+j) = source; j++; } tail++; transitions++; } tptr->tail = tail; if (tail < tptr->size && transitions->inout < next_minsym) { next_minsym = transitions->inout; } } if (j == 0) { continue; } mainloop++; if (refine_states(j) == 1) { break; /* break loop if we split current_w */ } } if (total_states == num_states) { break; } } net = rebuild_machine(net); xxfree(trans_array); xxfree(trans_list); bail: xxfree(Agenda_top); xxfree(memo_table); xxfree(temp_move); xxfree(temp_group); xxfree(finals); xxfree(E); xxfree(Phead); xxfree(single_sigma_array); xxfree(double_sigma_array); return(net); } static struct fsm *rebuild_machine(struct fsm *net) { int i,j, group_num, source, target, new_linecount = 0, arccount = 0; struct fsm_state *fsm; struct p *myp; struct e *thise; if (net->statecount == total_states) { return(net); } fsm = net->states; /* We need to make sure state 0 is first in its group */ /* to get the proper numbering of states */ if (E->group->first_e != E) { E->group->first_e = E; } /* Recycling t_count for group numbering use here */ group_num = 1; myp = P; while (myp != NULL) { myp->count = 0; myp = myp->next; } for (i=0; (fsm+i)->state_no != -1; i++) { thise = E+((fsm+i)->state_no); if (thise->group->first_e == thise) { new_linecount++; if ((fsm+i)->start_state == 1) { thise->group->t_count = 0; thise->group->count = 1; } else if (thise->group->count == 0) { thise->group->t_count = group_num++; thise->group->count = 1; } } } for (i=0, j=0; (fsm+i)->state_no != -1; i++) { thise = E+((fsm+i)->state_no); if (thise->group->first_e == thise) { source = thise->group->t_count; target = ((fsm+i)->target == -1) ? -1 : (E+((fsm+i)->target))->group->t_count; add_fsm_arc(fsm, j, source, (fsm+i)->in, (fsm+i)->out, target, finals[(fsm+i)->state_no], (fsm+i)->start_state); arccount = ((fsm+i)->target == -1) ? arccount : arccount+1; j++; } } add_fsm_arc(fsm, j, -1, -1, -1, -1, -1, -1); fsm = xxrealloc(fsm,sizeof(struct fsm_state)*(new_linecount+1)); net->states = fsm; net->linecount = j+1; net->arccount = arccount; net->statecount = total_states; return(net); } static INLINE int refine_states(int invstates) { int i, selfsplit; struct e *thise; struct p *tP, *newP = NULL; /* 1. add inverse(P,a) to table of inverses, disallowing duplicates 2. first pass on S, touch each state once, increasing P->t_count 3. for each P where counter != count, split and add to agenda */ /* Inverse to table of inverses */ selfsplit = 0; /* touch and increase P->counter */ for (i=0; i < invstates; i++) { ((E+(*(temp_move+i)))->group)->t_count++; ((E+(*(temp_move+i)))->group)->inv_t_count += ((E+(*(temp_move+i)))->inv_count); assert((E+(*(temp_move+i)))->group->t_count <= (E+(*(temp_move+i)))->group->count); } /* Split (this is the tricky part) */ for (i=0; i < invstates; i++) { thise = E+*(temp_move+i); tP = thise->group; /* Do we need to split? if we've touched as many states as there are in the partition we don't split */ if (tP->t_count == tP->count) { tP->t_count = 0; tP->inv_t_count = 0; continue; } if ((tP->t_count != tP->count) && (tP->count > 1) && (tP->t_count > 0)) { /* Check if we already split this */ newP = tP->current_split; if (newP == NULL) { /* printf("tP [%i] newP [%i]\n",tP->inv_count,tP->inv_t_count); */ /* Create new group newP */ total_states++; if (total_states == num_states) return(1); /* Abort now, machine is already minimal */ tP->current_split = Pnext++; newP = tP->current_split; newP->first_e = newP->last_e = thise; newP->count = 0; newP->inv_count = tP->inv_t_count; newP->inv_t_count = 0; newP->t_count = 0; newP->current_split = NULL; newP->agenda = NULL; /* Add to agenda */ /* If the current block (tP) is on the agenda, we add both back */ /* to the agenda */ /* In practice we need only add newP since tP stays where it is */ /* However, we mark the larger one as not starting the symloop */ /* from zero */ if (tP->agenda != NULL) { /* Is tP smaller */ if (tP->inv_count < tP->inv_t_count) { agenda_add(newP, 1); tP->agenda->index = 0; } else { agenda_add(newP, 0); } /* In the event that we're splitting the partition we're currently */ /* splitting with, we can simply add both new partitions to the agenda */ /* and break out of the entire sym loop after we're */ /* done with the current sym and move on with the agenda */ /* We process the larger one for all symbols */ /* and the smaller one for only the ones remaining in this symloop */ } else if (tP == current_w) { agenda_add(((tP->inv_count < tP->inv_t_count) ? tP : newP),0); agenda_add(((tP->inv_count >= tP->inv_t_count) ? tP : newP),1); selfsplit = 1; } else { /* If the block is not on the agenda, we add */ /* the smaller of tP, newP and start the symloop from 0 */ agenda_add((tP->inv_count < tP->inv_t_count ? tP : newP),0); } /* Add to middle of P-chain */ newP->next = P->next; P->next = newP; } thise->group = newP; newP->count++; /* need to make tP->last_e point to the last untouched e */ if (thise == tP->last_e) tP->last_e = thise->left; if (thise == tP->first_e) tP->first_e = thise->right; /* Adjust links */ if (thise->left != NULL) thise->left->right = thise->right; if (thise->right != NULL) thise->right->left = thise->left; if (newP->last_e != thise) { newP->last_e->right = thise; thise->left = newP->last_e; newP->last_e = thise; } thise->right = NULL; if (newP->first_e == thise) thise->left = NULL; /* Are we done for this block? Adjust counters */ if (newP->count == tP->t_count) { tP->count = tP->count - newP->count; tP->inv_count = tP->inv_count - tP->inv_t_count; tP->current_split = NULL; tP->t_count = 0; tP->inv_t_count = 0; } } } /* We return 1 if we just split the partition we were working with */ return (selfsplit); } static void agenda_add(struct p *pptr, int start) { /* Use FILO strategy here */ struct agenda *ag; //ag = xxmalloc(sizeof(struct agenda)); ag = Agenda_next++; if (Agenda != NULL) ag->next = Agenda; else ag->next = NULL; ag->p = pptr; ag->index = start; Agenda = ag; pptr->agenda = ag; } static void init_PE() { /* Create two members of P (nonfinals,finals) and put both of them on the agenda */ int i; struct e *last_f, *last_nonf; struct p *nonFP, *FP; struct agenda *ag; mainloop = 1; memo_table = xxcalloc(num_states,sizeof(int)); temp_move = xxcalloc(num_states,sizeof(int)); temp_group = xxcalloc(num_states,sizeof(int)); Phead = P = Pnext = xxcalloc(num_states+1, sizeof(struct p)); nonFP = Pnext++; FP = Pnext++; nonFP->next = FP; nonFP->count = <API key>; FP->next = NULL; FP->count = num_finals; FP->t_count = 0; nonFP->t_count = 0; FP->current_split = NULL; nonFP->current_split = NULL; FP->inv_count = nonFP->inv_count = FP->inv_t_count = nonFP->inv_t_count = 0; /* How many groups can we put on the agenda? */ Agenda_top = Agenda_next = xxcalloc(num_states*2, sizeof(struct agenda)); Agenda_head = NULL; P = NULL; total_states = 0; if (num_finals > 0) { ag = Agenda_next++; FP->agenda = ag; P = FP; P->next = NULL; ag->p = FP; Agenda_head = ag; ag->next = NULL; total_states++; } if (num_states - num_finals > 0) { ag = Agenda_next++; nonFP->agenda = ag; ag->p = nonFP; ag->next = NULL; total_states++; if (Agenda_head != NULL) { Agenda_head->next = ag; P->next = nonFP; P->next->next = NULL; } else { P = nonFP; P->next = NULL; Agenda_head = ag; } } /* Initialize doubly linked list E */ E = xxcalloc(num_states,sizeof(struct e)); last_f = NULL; last_nonf = NULL; for (i=0; i < num_states; i++) { if (finals[i]) { (E+i)->group = FP; (E+i)->left = last_f; if (i > 0 && last_f != NULL) last_f->right = (E+i); if (last_f == NULL) FP->first_e = (E+i); last_f = (E+i); FP->last_e = (E+i); } else { (E+i)->group = nonFP; (E+i)->left = last_nonf; if (i > 0 && last_nonf != NULL) last_nonf->right = (E+i); if (last_nonf == NULL) nonFP->first_e = (E+i); last_nonf = (E+i); nonFP->last_e = (E+i); } (E+i)->inv_count = 0; } if (last_f != NULL) last_f->right = NULL; if (last_nonf != NULL) last_nonf->right = NULL; } static int trans_sort_cmp(const void *a, const void *b) { return (((const struct trans_list_struct *)a)->inout - ((const struct trans_list_struct *)b)->inout); } static void generate_inverse(struct fsm *net) { struct fsm_state *fsm; struct trans_array_struct *tptr; struct trans_list_struct *listptr; int i, source, target, offsetcount, symbol, size; fsm = net->states; trans_array = xxcalloc(net->statecount, sizeof(struct trans_array_struct)); trans_list = xxcalloc(net->arccount, sizeof(struct trans_list_struct)); /* Figure out the number of transitions each one has */ for (i=0; (fsm+i)->state_no != -1; i++) { if ((fsm+i)->target == -1) { continue; } target = (fsm+i)->target; (E+target)->inv_count++; (E+target)->group->inv_count++; (trans_array+target)->size++; } offsetcount = 0; for (i=0; i < net->statecount; i++) { (trans_array+i)->transitions = trans_list + offsetcount; offsetcount += (trans_array+i)->size; } for (i=0; (fsm+i)->state_no != -1; i++) { if ((fsm+i)->target == -1) { continue; } symbol = <API key>((fsm+i)->in,(fsm+i)->out); source = (fsm+i)->state_no; target = (fsm+i)->target; tptr = trans_array + target; ((tptr->transitions)+(tptr->tail))->inout = symbol; ((tptr->transitions)+(tptr->tail))->source = source; tptr->tail++; } /* Sort arcs */ for (i=0; i < net->statecount; i++) { listptr = (trans_array+i)->transitions; size = (trans_array+i)->size; if (size > 1) qsort(listptr, size, sizeof(struct trans_list_struct), trans_sort_cmp); } } static void sigma_to_pairs(struct fsm *net) { int i, j, x, y, z, next_x = 0; struct fsm_state *fsm; fsm = net->states; epsilon_symbol = -1; maxsigma = sigma_max(net->sigma); maxsigma++; single_sigma_array = xxmalloc(2*maxsigma*maxsigma*sizeof(int)); double_sigma_array = xxmalloc(maxsigma*maxsigma*sizeof(int)); for (i=0; i < maxsigma; i++) { for (j=0; j< maxsigma; j++) { *(double_sigma_array+maxsigma*i+j) = -1; } } /* f(x) -> y,z sigma pair */ /* f(y,z) -> x simple entry */ /* if exists f(n) <-> EPSILON, EPSILON, save n */ /* symbol(x) x>=1 */ /* Forward mapping: */ /* *(double_sigma_array+maxsigma*in+out) */ /* Backmapping: */ /* *(single_sigma_array+(symbol*2) = in(symbol) */ /* *(single_sigma_array+(symbol*2+1) = out(symbol) */ /* Table for checking whether a state is final */ finals = xxcalloc(num_states, sizeof(Boolean)); x = 0; num_finals = 0; net->arity = 1; for (i=0; (fsm+i)->state_no != -1; i++) { if ((fsm+i)->final_state == 1 && finals[(fsm+i)->state_no] != 1) { num_finals++; finals[(fsm+i)->state_no] = 1; } y = (fsm+i)->in; z = (fsm+i)->out; if (y != z || y == UNKNOWN || z == UNKNOWN) net->arity = 2; if ((y == -1) || (z == -1)) continue; if (*(double_sigma_array+maxsigma*y+z) == -1) { *(double_sigma_array+maxsigma*y+z) = x; *(single_sigma_array+next_x) = y; next_x++; *(single_sigma_array+next_x) = z; next_x++; if (y == EPSILON && z == EPSILON) { epsilon_symbol = x; } x++; } } num_symbols = x; } static INLINE int <API key>(int in, int out) { return(*(double_sigma_array+maxsigma*in+out)); }
<?php /** * Classes for error logging. * * @subpackage ErrorLog * @author Selihov Sergei Stanislavovich <red331@mail.ru> */ /** * Documents the plcAbstractErrorLog class. * * Following class is abstract class for plcAbstractErrorLog. * * @subpackage plcAbstractErrorLog * @author Selihov Sergei Stanislavovich <red331@mail.ru> */ abstract class plcAbstractErrorLog { abstract public function LogError($usrError); abstract public function GetInstance(); abstract public function SetDir($usrDir); } ?>
package sky.pvprank; import org.bukkit.ChatColor; import org.bukkit.command.Command; import org.bukkit.command.CommandExecutor; import org.bukkit.command.CommandSender; import org.bukkit.entity.Player; public class RankCommand implements CommandExecutor { private DatabaseHandler databaseHandler; private Ranks ranks; public RankCommand(DatabaseHandler databaseHandler, Ranks ranks) { this.databaseHandler = databaseHandler; this.ranks = ranks; } @Override public boolean onCommand(CommandSender sender, Command cmd, String arg, String[] args) { Player player = null; if (sender instanceof Player) { player = (Player) sender; } if (cmd.getName().equalsIgnoreCase("rank")) { this.HandleRankCmd(player); } if (args.length > 0) { player.sendMessage(ChatColor.RED + ""); } return true; } private void HandleRankCmd(Player player) { this.databaseHandler.LoadPlayerFame(player.getName()); this.databaseHandler.LoadConfig(); int fame = this.databaseHandler.PlayerFame(); String rank = this.ranks.GetRank(fame); int rankup = this.ranks.FameToRankUp(); String tag = this.databaseHandler.getTag(); if (rank == "") { player.sendMessage("§2========"); player.sendMessage("§2: "); } else { player.sendMessage("§2========"); player.sendMessage("§2: " + rank); } player.sendMessage(ChatColor.GREEN + tag + ": " + fame); if (rankup == 999999) { player.sendMessage("§2."); } else { player.sendMessage("§2: " + rankup); } } }
require 'data_mapper' require 'daitss/archive' require 'daitss/model/package' module Daitss class Batch include DataMapper::Resource property :id, String, :key => true has n, :batch_assignments has n, :packages, :through => :batch_assignments # helper methods - calculating the size and number of files is a best guess based on available package information # since batches can contain packages that errored out, rejected or are simply missing information. ex. daitss1 sips vs daitss2 sips def size_in_bytes size = 0 self.packages.each do |p| size += p.sip.size_in_bytes || 0 end size end def num_datafiles num = 0 self.packages.each do |p| num += p.sip.number_of_datafiles || p.sip.submitted_datafiles end num end end class BatchAssignment include DataMapper::Resource belongs_to :batch, :key => true belongs_to :package, :key => true end end
'use strict'; var requireBookmarks = require('sdk/places/bookmarks'); var requireChrome = require('chrome'); var requireHeritage = require('sdk/core/heritage'); var requirePagemod = require('sdk/page-mod'); var requirePanel = require('sdk/panel'); var requirePreferences = require('sdk/preferences/service'); var requireSelf = require('sdk/self'); var requireTabs = require('sdk/tabs'); var requireToggle = require('sdk/ui/button/toggle'); var requireXpcom = require('sdk/platform/xpcom'); requireChrome.Cu.import("resource:///modules/NewTabURL.jsm"); requireChrome.Cu.import('resource://gre/modules/NetUtil.jsm'); requireChrome.Cu.import('resource://gre/modules/PlacesUtils.jsm'); requireChrome.Cu.import('resource://gre/modules/Services.jsm'); var Controller = { init: function() { }, dispel: function() { }, bind: function(bindHandle) { }, onBeginUpdateBatch: function() { }, onEndUpdateBatch: function() { }, onItemAdded: function() { { requirePreferences.set('extensions.BookRect.Controller.longTimestamp', String(new Date().getTime())); } }, onItemChanged: function() { { requirePreferences.set('extensions.BookRect.Controller.longTimestamp', String(new Date().getTime())); } }, onItemMoved: function() { { requirePreferences.set('extensions.BookRect.Controller.longTimestamp', String(new Date().getTime())); } }, onItemRemoved: function() { { requirePreferences.set('extensions.BookRect.Controller.longTimestamp', String(new Date().getTime())); } }, onItemVisited: function() { } }; Controller.init(); var Browser = { init: function() { }, dispel: function() { }, bind: function(bindHandle) { bindHandle.port.on('browserNewtab', function(objectArguments) { Browser.newtab.call(bindHandle, objectArguments, function(objectArguments) { bindHandle.port.emit('browserNewtab', objectArguments); }); }); bindHandle.port.on('browserNavigate', function(objectArguments) { Browser.navigate.call(bindHandle, objectArguments, function(objectArguments) { bindHandle.port.emit('browserNavigate', objectArguments); }); }); }, newtab: function(objectArguments, functionCallback) { { if (objectArguments.strOverride === '') { NewTabURL.reset(); } else if (objectArguments.strOverride !== '') { NewTabURL.override(objectArguments.strOverride); } } functionCallback({}); }, navigate: function(objectArguments, functionCallback) { { if (this.hide !== undefined) { this.hide(); } } { if (objectArguments.strOpen === 'openOverwrite') { requireTabs.activeTab.url = objectArguments.strLink; } else if (objectArguments.strOpen === 'openTab') { requireTabs.open(objectArguments.strLink); } else if (objectArguments.strOpen === 'openWindow') { } } functionCallback({}); } }; Browser.init(); var Bookmarks = { init: function() { }, dispel: function() { }, bind: function(bindHandle) { bindHandle.port.on('bookmarksPeek', function(objectArguments) { Bookmarks.peek.call(bindHandle, objectArguments, function(objectArguments) { bindHandle.port.emit('bookmarksPeek', objectArguments); }); }); bindHandle.port.on('bookmarksList', function(objectArguments) { Bookmarks.list.call(bindHandle, objectArguments, function(objectArguments) { bindHandle.port.emit('bookmarksList', objectArguments); }); }); bindHandle.port.on('bookmarksFavicon', function(objectArguments) { Bookmarks.favicon.call(bindHandle, objectArguments, function(objectArguments) { bindHandle.port.emit('bookmarksFavicon', objectArguments); }); }); bindHandle.port.on('bookmarksSearch', function(objectArguments) { Bookmarks.search.call(bindHandle, objectArguments, function(objectArguments) { bindHandle.port.emit('bookmarksSearch', objectArguments); }); }); }, peek: function(objectArguments, functionCallback) { var Lookup_resultHandle = []; var functionLookup = function() { { for (var intFor1 = 0; intFor1 < objectArguments.intIdent.length; intFor1 += 1) { var intIdent = objectArguments.intIdent[intFor1]; { try { PlacesUtils.bookmarks.getItemType(intIdent) } catch (e) { continue; } } { if (PlacesUtils.bookmarks.getItemType(intIdent) === PlacesUtils.bookmarks.TYPE_FOLDER) { Lookup_resultHandle.push({ 'intIdent': intIdent, 'strType': 'typeFolder', 'strImage': 'chrome://bookrect/content/images/folder.png', 'strTitle': PlacesUtils.bookmarks.getItemTitle(intIdent), 'strLink': '' }); } else if (PlacesUtils.bookmarks.getItemType(intIdent) === PlacesUtils.bookmarks.TYPE_BOOKMARK) { Lookup_resultHandle.push({ 'intIdent': intIdent, 'strType': 'typeBookmark', 'strImage': 'chrome://bookrect/content/images/bookmark.png', 'strTitle': PlacesUtils.bookmarks.getItemTitle(intIdent) || PlacesUtils.bookmarks.getBookmarkURI(intIdent).spec, 'strLink': PlacesUtils.bookmarks.getBookmarkURI(intIdent).spec }); } else if (PlacesUtils.bookmarks.getItemType(intIdent) === PlacesUtils.bookmarks.TYPE_SEPARATOR) { Lookup_resultHandle.push({ 'intIdent': intIdent, 'strType': 'typeSeparator', 'strImage': '', 'strTitle': '', 'strLink': '' }); } } } } functionCallback({ 'strCallback': objectArguments.strCallback, 'resultHandle': Lookup_resultHandle }); }; functionLookup(); }, list: function(objectArguments, functionCallback) { var Lookup_resultHandle = []; var functionLookup = function() { { if (objectArguments.intIdent === 0) { { Lookup_resultHandle.push({ 'intIdent': PlacesUtils.toolbarFolderId, 'longTimestamp': 0, 'intParent': 0, 'strType': 'typeFolder', 'strImage': 'chrome://bookrect/content/images/folder.png', 'strTitle': PlacesUtils.bookmarks.getItemTitle(PlacesUtils.toolbarFolderId), 'strLink': '', 'strTags': '', 'intAccesscount': 0 }); } { Lookup_resultHandle.push({ 'intIdent': PlacesUtils.<API key>, 'longTimestamp': 0, 'intParent': 0, 'strType': 'typeFolder', 'strImage': 'chrome://bookrect/content/images/folder.png', 'strTitle': PlacesUtils.bookmarks.getItemTitle(PlacesUtils.<API key>), 'strLink': '', 'strTags': '', 'intAccesscount': 0 }); } { Lookup_resultHandle.push({ 'intIdent': PlacesUtils.<API key>, 'longTimestamp': 0, 'intParent': 0, 'strType': 'typeFolder', 'strImage': 'chrome://bookrect/content/images/folder.png', 'strTitle': PlacesUtils.bookmarks.getItemTitle(PlacesUtils.<API key>), 'strLink': '', 'strTags': '', 'intAccesscount': 0 }); } } else if (objectArguments.intIdent !== 0) { { var objectFolder = PlacesUtils.getFolderContents(objectArguments.intIdent); for (var intFor1 = 0; intFor1 < objectFolder.root.childCount; intFor1 += 1) { var objectNode = objectFolder.root.getChild(intFor1); { if (PlacesUtils.nodeIsFolder(objectNode) === true) { Lookup_resultHandle.push({ 'intIdent': objectNode.itemId, 'longTimestamp': objectNode.lastModified, 'intParent': objectArguments.intIdent, 'strType': 'typeFolder', 'strImage': 'chrome://bookrect/content/images/folder.png', 'strTitle': objectNode.title, 'strLink': '', 'strTags': '', 'intAccesscount': 0 }); } else if (PlacesUtils.nodeIsBookmark(objectNode) === true) { Lookup_resultHandle.push({ 'intIdent': objectNode.itemId, 'longTimestamp': objectNode.lastModified, 'intParent': objectArguments.intIdent, 'strType': 'typeBookmark', 'strImage': 'chrome://bookrect/content/images/bookmark.png', 'strTitle': objectNode.title || objectNode.uri, 'strLink': objectNode.uri, 'strTags': objectNode.tags, 'intAccesscount': objectNode.accessCount }); } else if (PlacesUtils.nodeIsSeparator(objectNode) === true) { Lookup_resultHandle.push({ 'intIdent': objectNode.itemId, 'longTimestamp': objectNode.lastModified, 'intParent': objectArguments.intIdent, 'strType': 'typeSeparator', 'strImage': '', 'strTitle': '', 'strLink': '', 'strTags': '', 'intAccesscount': 0 }); } } { if (PlacesUtils.nodeIsBookmark(objectNode) === true) { if (objectNode.icon !== null) { if (objectNode.icon.indexOf('moz-anno:favicon:') !== -1) { Lookup_resultHandle[Lookup_resultHandle.length - 1].strImage = objectNode.icon; } } } } } } } } functionCallback({ 'strCallback': objectArguments.strCallback, 'resultHandle': Lookup_resultHandle }); }; functionLookup(); }, favicon: function(objectArguments, functionCallback) { var Lookup_resultHandle = []; var functionLookup = function() { PlacesUtils.favicons.<API key>(NetUtil.newURI(objectArguments.strLink), function(objectLink) { if (objectLink !== null) { functionCallback({ 'strCallback': objectArguments.strCallback, 'strFavicon': 'moz-anno:favicon:' + objectLink.spec }); } }); }; functionLookup(); }, search: function(objectArguments, functionCallback) { var Lookup_resultHandle = []; var functionLookup = function() { requireBookmarks.search({ 'query': objectArguments.strSearch }, { 'count': 32, 'sort': 'title', 'descending': false }).on('end', function(resultHandle) { { for (var intFor1 = 0; intFor1 < resultHandle.length; intFor1 += 1) { Lookup_resultHandle.push({ 'intIdent': resultHandle[intFor1].id, 'strType': 'typeBookmark', 'strImage': 'chrome://bookrect/content/images/bookmark.png', 'strTitle': resultHandle[intFor1].title, 'strLink': resultHandle[intFor1].url }); } } functionCallback({ 'strCallback': objectArguments.strCallback, 'resultHandle': Lookup_resultHandle }); }); }; functionLookup(); } }; Bookmarks.init(); exports.main = function(optionsHandle) { { if (optionsHandle.loadReason === 'install') { requirePreferences.set('extensions.BookRect.Advanced.boolAutostart', true); } else if (optionsHandle.loadReason === 'enable') { requirePreferences.set('extensions.BookRect.Advanced.boolAutostart', true); } if (requirePreferences.get('extensions.BookRect.Advanced.boolAutostart', true) === true) { NewTabURL.override('about:bookrect'); } else if (requirePreferences.get('extensions.BookRect.Advanced.boolAutostart', true) === true) { NewTabURL.reset(); } } { PlacesUtils.bookmarks.addObserver(Controller, false); } { if (optionsHandle.loadReason === 'install') { var objectFirst = JSON.parse(requirePreferences.get('extensions.BookRect.Layout.strFirst', '[]')); var objectSecond = JSON.parse(requirePreferences.get('extensions.BookRect.Layout.strSecond', '[]')); var objectThird = JSON.parse(requirePreferences.get('extensions.BookRect.Layout.strThird', '[]')); if (objectFirst.length === 0) { if (objectSecond.length === 0) { if (objectThird.length === 0) { objectFirst.push(PlacesUtils.toolbarFolderId); objectFirst.push(PlacesUtils.<API key>); objectFirst.push(PlacesUtils.<API key>); } } } requirePreferences.set('extensions.BookRect.Layout.strFirst', JSON.stringify(objectFirst)); requirePreferences.set('extensions.BookRect.Layout.strSecond', JSON.stringify(objectSecond)); requirePreferences.set('extensions.BookRect.Layout.strThird', JSON.stringify(objectThird)); } } { requireXpcom.Factory({ 'contract': '@mozilla.org/network/protocol/about;1?what=bookrect', 'Component': requireHeritage.Class({ 'extends': requireXpcom.Unknown, 'interfaces': [ 'nsIAboutModule' ], 'newChannel': function(uriHandle) { var channelHandle = Services.io.newChannel('chrome://bookrect/content/index.html', null, null); { channelHandle.originalURI = uriHandle; } return channelHandle; }, 'getURIFlags': function(uriHandle) { return requireChrome.Ci.nsIAboutModule.ALLOW_SCRIPT; } }) }); } { requirePagemod.PageMod({ 'include': [ 'about:bookrect', 'about:bookrect#*', 'chrome://bookrect/content/index.html', 'chrome://bookrect/content/index.html#*' ], 'contentScriptFile': [ requireSelf.data.url('./index.js') ], '<API key>': { 'strType': 'typePagemod', 'intBookmarks': [ PlacesUtils.toolbarFolderId, PlacesUtils.<API key>, PlacesUtils.<API key> ] }, 'onAttach': function(workerHandle) { { Controller.bind(workerHandle); Browser.bind(workerHandle); Bookmarks.bind(workerHandle); } } }); } { var toolbarbuttonHandle = requireToggle.ToggleButton({ 'id': 'idToolbarbutton', 'label': 'BookRect', 'icon': 'chrome://bookrect/content/images/icon.png' }); { toolbarbuttonHandle.on('click', function(stateHandle) { if (stateHandle.checked === true) { toolbarpanelHandle.show({ 'position': toolbarbuttonHandle }); } }); toolbarbuttonHandle.on('click', function(stateHandle) { if (stateHandle.checked === false) { toolbarpanelHandle.hide(); } }); } var toolbarpanelHandle = requirePanel.Panel({ 'width': 640, 'height': 480, 'contentURL': 'chrome://bookrect/content/index.html', 'contentScriptFile': [ requireSelf.data.url('./index.js') ], '<API key>': { 'strType': 'typePanel', 'intBookmarks': [ PlacesUtils.toolbarFolderId, PlacesUtils.<API key>, PlacesUtils.<API key> ] } }); { toolbarpanelHandle.on('show', function() { toolbarbuttonHandle.state('window', { 'checked': true }); toolbarpanelHandle.port.emit('panelShow', {}); }); toolbarpanelHandle.on('hide', function() { toolbarbuttonHandle.state('window', { 'checked': false }); toolbarpanelHandle.port.emit('panelHide', {}); }); } { Controller.bind(toolbarpanelHandle); Browser.bind(toolbarpanelHandle); Bookmarks.bind(toolbarpanelHandle); } } }; exports.onUnload = function(optionsHandle) { { NewTabURL.reset(); } { PlacesUtils.bookmarks.removeObserver(Controller); } };
#!/usr/bin/env python # coding: utf-8 import argparse import sys import yaml from pyarlo import PyArlo def parse_args(): parser = argparse.ArgumentParser() parser.add_argument('--username', '-u') parser.add_argument('--password', '-p') parser.add_argument('--config', '-c', type=argparse.FileType('r'), default='/config/secrets.yaml') parser.add_argument('MODE') return parser.parse_args() def arlo_mode(username, password, mode): arlo = PyArlo(username, password) base = arlo.base_stations[0] available_modes = base.available_modes if mode not in available_modes: raise RuntimeError("No such mode: {}\n" "Available Modes: {}".format( mode, available_modes)) base.mode = mode if __name__ == '__main__': ARGS = parse_args() USERNAME = ARGS.username PASSWORD = ARGS.password if not PASSWORD: CONFIG = yaml.load(ARGS.config) USERNAME = CONFIG.get('arlo_username') PASSWORD = CONFIG.get('arlo_password') if not PASSWORD: print("Credentials are required, set them either via -u " "and -p or a secrets file (-c)", file=sys.stderr) sys.exit(2) else: arlo_mode(USERNAME, PASSWORD, ARGS.MODE)
// <auto-generated> // This code was generated by a tool. // Changes to this file may cause incorrect behavior and will be lost if // the code is regenerated. // </auto-generated> namespace CustomControls.Properties { using System; <summary> A strongly-typed resource class, for looking up localized strings, etc. </summary> // This class was auto-generated by the <API key> // class via a tool like ResGen or Visual Studio. // To add or remove a member, edit your .ResX file then rerun ResGen // with the /str option, or rebuild your VS project. [global::System.CodeDom.Compiler.<API key>("System.Resources.Tools.<API key>", "2.0.0.0")] [global::System.Diagnostics.<API key>()] [global::System.Runtime.CompilerServices.<API key>()] internal class Resources { private static global::System.Resources.ResourceManager resourceMan; private static global::System.Globalization.CultureInfo resourceCulture; [global::System.Diagnostics.CodeAnalysis.<API key>("Microsoft.Performance", "CA1811:<API key>")] internal Resources() { } <summary> Returns the cached ResourceManager instance used by this class. </summary> [global::System.ComponentModel.<API key>(global::System.ComponentModel.<API key>.Advanced)] internal static global::System.Resources.ResourceManager ResourceManager { get { if (object.ReferenceEquals(resourceMan, null)) { global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("CustomControls.Properties.Resources", typeof(Resources).Assembly); resourceMan = temp; } return resourceMan; } } <summary> Overrides the current thread's CurrentUICulture property for all resource lookups using this strongly typed resource class. </summary> [global::System.ComponentModel.<API key>(global::System.ComponentModel.<API key>.Advanced)] internal static global::System.Globalization.CultureInfo Culture { get { return resourceCulture; } set { resourceCulture = value; } } internal static System.Drawing.Bitmap StromohabLogosmall { get { object obj = ResourceManager.GetObject("StromohabLogosmall", resourceCulture); return ((System.Drawing.Bitmap)(obj)); } } } }
/* classes: h_files */ #ifndef SCM_LOAD_H #define SCM_LOAD_H #include "libguile/__scm.h" SCM_API SCM scm_parse_path (SCM path, SCM tail); SCM_API SCM <API key> (SCM path, SCM base); SCM_API SCM scm_primitive_load (SCM filename); SCM_API SCM <API key> (const char *filename); SCM_API SCM <API key> (void); SCM_API SCM scm_sys_library_dir (void); SCM_API SCM scm_sys_site_dir (void); SCM_API SCM <API key> (void); SCM_API SCM <API key> (void); SCM_API SCM scm_search_path (SCM path, SCM filename, SCM rest); SCM_API SCM <API key> (SCM filename); SCM_API SCM <API key> (SCM <API key>); SCM_API SCM <API key> (const char *filename); SCM_INTERNAL SCM <API key> (void); SCM_INTERNAL void scm_init_load_path (void); SCM_INTERNAL void scm_init_load (void); SCM_INTERNAL void <API key> (void); SCM_INTERNAL void <API key> (void); #endif /* SCM_LOAD_H */ /* Local Variables: c-file-style: "gnu" End: */
# Contributing When contributing to this repository, please first discuss the change you wish to make via issue, email, or any other method with the owners of this repository before making a change. Please note we have a code of conduct, please follow it in all your interactions with the project. ## Pull Request Process 1. Ensure any install or build dependencies are removed before the end of the layer when doing a build. 2. Update the README.md with details of changes to the interface, this includes new environment variables, exposed ports, useful file locations and container parameters. 3. Increase the version numbers in any examples files and the README.md to the new version that this Pull Request would represent. The versioning scheme we use is [SemVer](http://semver.org/). 4. You may merge the Pull Request in once you have the sign-off of two other developers, or if you do not have permission to do that, you may request the second reviewer to merge it for you.
<API key>::Application.routes.draw do root "home#index" resources :rooms # The priority is based upon order of creation: first created -> highest priority. # See how all your routes lay out with "rake routes". # You can have the root of your site routed with "root" # root 'welcome#index' # Example of regular route: # get 'products/:id' => 'catalog#view' # Example of named route that can be invoked with purchase_url(id: product.id) # get 'products/:id/purchase' => 'catalog#purchase', as: :purchase # Example resource route (maps HTTP verbs to controller actions automatically): # resources :products # Example resource route with options: # resources :products do # member do # get 'short' # post 'toggle' # end # collection do # get 'sold' # end # end # Example resource route with sub-resources: # resources :products do # resources :comments, :sales # resource :seller # end # Example resource route with more complex sub-resources: # resources :products do # resources :comments # resources :sales do # get 'recent', on: :collection # end # end # Example resource route with concerns: # concern :toggleable do # post 'toggle' # end # resources :posts, concerns: :toggleable # resources :photos, concerns: :toggleable # Example resource route within a namespace: # namespace :admin do
package org.jpedal.function; /** * Class to handle Type 3 shading (Stitching) in PDF */ public class PDFStitching extends PDFGenericFunction implements PDFFunction { /** * composed of other Functions */ private PDFFunction[] functions; private float[] bounds; private int n; public PDFStitching(PDFFunction[] functions, float[] encode, float[] bounds, float[] domain, float[] range) { //setup global values needed super(domain, range); if (bounds!= null) this.bounds=bounds; if (encode!= null) this.encode=encode; if(functions!=null) this.functions=functions; n = encode.length/2; } /** * Calculate shading for current location * * @param values input values for shading calculation * @return float[] containing the color values for shading at this point */ public float[] compute(float[] values){ //take raw input number float x=min(max(values[0],domain[0*2]),domain[0*2+1]); //see if value lies outside a boundary int subi=bounds.length-1; for(; subi>=0; subi if(x >= bounds[subi]) break; subi++; //if it does, truncate it float[] subinput = new float[1]; float xmin=domain[0],xmax=domain[1]; if(subi>0) xmin=(bounds[subi-1]); if(subi<bounds.length) xmax=(bounds[subi]); float ymin=encode[subi*2]; float ymax=encode[subi*2+1]; subinput[0] = interpolate(x, xmin, xmax, ymin, ymax); float[] output = functions[subi].computeStitch(subinput); float[] result=new float[output.length]; if (range!=null){ for(int i=0; i!=range.length/2;i++) result[i]= min(max(output[i], range[0 * 2]), range[0 * 2 + 1]); }else{ for(int i=0; i!=output.length;i++) result[i]= output[i]; } return result; } /** * Calculate shading for current location (Only used by Stitching) * * @param values : input values for shading calculation * @return float[] containing the color values for shading at this point */ public float[] computeStitch(float[] values){ //take raw input number float x=min(max(values[0],domain[0*2]),domain[0*2+1]); //see if value lies outside a boundary int subi=bounds.length-1; for (; subi>=0; subi if (x >= bounds[subi]) break; } subi++; //if it does, truncate it float[] subinput = new float[1]; float xmin=domain[0],xmax=domain[1]; if(subi>0) xmin=(bounds[subi-1]); if(subi<bounds.length) xmax=(bounds[subi]); float ymin=encode[subi*2]; float ymax=encode[subi*2+1]; subinput[0] = interpolate(x, xmin, xmax, ymin, ymax); float[] output = functions[subi].compute(subinput); float[] result=new float[output.length]; for(int i=0; i!=range.length/2;i++){ if (range!=null) result[i]=min(max(output[i],range[0*2]),range[0*2+1]); else result[i]=output[i]; } return result; } }
package net.foxopen.fox.thread.persistence; import com.google.common.collect.HashBasedTable; import com.google.common.collect.Table; import net.foxopen.fox.database.UCon; import net.foxopen.fox.ex.ExInternal; import net.foxopen.fox.thread.RequestContext; import net.foxopen.fox.thread.persistence.SharedDOMManager.SharedDOMType; import net.foxopen.fox.track.Track; import net.foxopen.fox.track.TrackTimer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.EnumSet; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.WeakHashMap; /** * PersistenceContext which serialises thread data to relational database tables. */ public class <API key> implements PersistenceContext { private static class PersistenceEntry { Persistable mPersistable; PersistenceMethod mPersistenceMethod; PersistenceEntry(Persistable pPersistable, PersistenceMethod pMethod) { mPersistable = pPersistable; mPersistenceMethod = pMethod; } } //Priorities are lowest to highest so comparisons put higher priorities first in the list (i.e. lower value = higher priority) private static final Table<PersistableType, PersistenceMethod, Integer> <API key> = HashBasedTable.create(); static { //Thread create depends on user thread session being created first (relational integrity constraint) <API key>.put(PersistableType.USER_THREAD_SESSION, PersistenceMethod.CREATE, 10); //Thread delete nullifies any other actions so do it first <API key>.put(PersistableType.THREAD, PersistenceMethod.DELETE, 20); //Module call stack depends on thread being serialised first <API key>.put(PersistableType.THREAD, PersistenceMethod.CREATE, 30); //Causes module inserts/deletes <API key>.put(PersistableType.MODULE_CALL_STACK, PersistenceMethod.UPDATE, 40); //Delete state calls before inserting them <API key>.put(PersistableType.STATE_CALL_STACK, PersistenceMethod.DELETE, 50); //Causes state inserts/deletes <API key>.put(PersistableType.STATE_CALL_STACK, PersistenceMethod.UPDATE, 60); //All other operations (updates etc) are not dependent on others } private static int methodPriority(PersistenceMethod pPersistenceMethod){ switch(pPersistenceMethod) { case DELETE: return 3; case CREATE: return 2; case UPDATE: return 1; default: return 0; } } private static class <API key> implements Comparator<PersistenceEntry> { /** * Orders PersistenceEntries based on the priorities defined in the <API key> static map. Entries with * a priority defined are considered to be higher than those without one. If both have a priority defined, the highest * wins. Creates and deletes are higher priority than updates. * @param pO1 Object 1 * @param pO2 Object 2 * @return see {@link Comparator} */ @Override public int compare(PersistenceEntry pO1, PersistenceEntry pO2) { Integer lPriority1 = <API key>.get(pO1.mPersistable.getPersistableType(), pO1.mPersistenceMethod); Integer lPriority2 = <API key>.get(pO2.mPersistable.getPersistableType(), pO2.mPersistenceMethod); if(lPriority1 != null && lPriority2 == null) { return -1; } else if (lPriority1 == null && lPriority2 != null) { return 1; } else if(lPriority1 == null && lPriority2 == null) { //Priority not defined for either; use method to decide (delete > create > update) if(methodPriority(pO1.mPersistenceMethod) > methodPriority(pO2.mPersistenceMethod)){ return -1; } else if (methodPriority(pO1.mPersistenceMethod) < methodPriority(pO2.mPersistenceMethod)){ return 1; } else { return 0; } } else { //Compare the 2 defined priorities return lPriority1 - lPriority2; } } } private final List<PersistenceEntry> mRequirePersisting = new ArrayList<>(); private final Set<<API key>> <API key> = Collections.newSetFromMap(new WeakHashMap<>()); /** Facets which have been marked for persistence in the current cycle. */ private final Set<PersistenceFacet> mMarkedFacets = EnumSet.noneOf(PersistenceFacet.class); private final String mThreadId; private Serialiser mSerialiser; private Deserialiser mDeserialiser = null; public <API key>(String pThreadId) { mThreadId = pThreadId; } @Override public void <API key>(RequestContext pRequestContext) { for(<API key> lPersistable : <API key>) { lPersistable.<API key>(); } } @Override public void <API key>(<API key> pPersistable) { <API key>.add(pPersistable); } @Override public void requiresPersisting(Persistable pPersistable, PersistenceMethod pMethod, PersistenceFacet... pFacetsToMark) { mRequirePersisting.add(new PersistenceEntry(pPersistable, pMethod)); mMarkedFacets.addAll(Arrays.asList(pFacetsToMark)); } @Override public void endPersistenceCycle(RequestContext pRequestContext) { UCon lUCon = pRequestContext.getContextUCon().getUCon("Thread Serialise"); try { mSerialiser = new DatabaseSerialiser(this, lUCon); //Sort the pending map so important operations happen first to avoid violating DB constraints Collections.sort(mRequirePersisting, new <API key>()); Track.pushInfo("ThreadSerialise", "Serialising thread to database", TrackTimer.THREAD_SERIALISE); try { Set<Persistable> lAlreadyPersisted = new HashSet<>(); for(PersistenceEntry lEntry : mRequirePersisting) { Persistable lPersistable = lEntry.mPersistable; //Skip persistables if they are already persisted if(!lAlreadyPersisted.contains(lEntry.mPersistable)) { Collection<PersistenceResult> lImplicated; switch(lEntry.mPersistenceMethod) { case CREATE: lImplicated = lPersistable.create(this); break; case UPDATE: lImplicated = lPersistable.update(this); break; case DELETE: lImplicated = lPersistable.delete(this); break; default: throw new ExInternal("Unknown persistence method " + lEntry.mPersistenceMethod.toString()); //Shuts up compiler } //Record any implicated persistables as done so they are not serialised twice //(including what was just persisted) lAlreadyPersisted.add(lPersistable); for(PersistenceResult lPersistenceResult : lImplicated) { lAlreadyPersisted.add(lPersistenceResult.getPersistable()); //Also remove any deletes from the listening list - might not be in here, but just in case if(lPersistenceResult.getMethod() == PersistenceMethod.DELETE) { <API key>.remove(lPersistenceResult.getPersistable()); } } } } //Clear for next persistence cycle mRequirePersisting.clear(); mMarkedFacets.clear(); } finally { Track.pop("ThreadSerialise", TrackTimer.THREAD_SERIALISE); } } finally { pRequestContext.getContextUCon().returnUCon(lUCon, "Thread Serialise"); } } @Override public String getThreadId() { return mThreadId; } @Override public Serialiser getSerialiser() { return mSerialiser; } @Override public Deserialiser setupDeserialiser(RequestContext pRequestContext){ mDeserialiser = new <API key>(this, pRequestContext.getContextUCon()); return mDeserialiser; } @Override public Deserialiser getDeserialiser() { if(mDeserialiser == null){ throw new <API key>("Call setupDeserialiser first"); } return mDeserialiser; } @Override public SharedDOMManager getSharedDOMManager(SharedDOMType pDOMType, String pDOMId) { return <API key>.<API key>(pDOMType, pDOMId); } @Override public boolean isFacetMarked(PersistenceFacet pFacet) { return mMarkedFacets.contains(pFacet); } }
<!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8"> <title>@title</title> <link href="//cdn.bootcss.com/bootstrap/3.3.7/css/bootstrap.min.css" rel="stylesheet"> <link href="/static/trumbowyg/trumbowyg.min.css" rel="stylesheet"> <link href="/static/trumbowyg/colors.min.css" rel="stylesheet"> <link href="/static/common.css?@g.start_time" rel="stylesheet"> <link href="/static/blog.css?@g.start_time" rel="stylesheet"> </head> <body> @g.navbar <div class="container col-md-8 col-md-offset-2"> <form method="post"> <div class="form-group"> <div><input class="form-control" name="title" value="@blog.title" autofocus required></div> <div> <textarea class="form-control editor" name="content" required>@blog.content</textarea> </div> </div> <div class="form-group" style="text-align:center;"> <div class="btn-group"> <button class="btn btn-success"> </button> #if @modify != 0: <a href="/blog?id=@blog.id" class="btn btn-warning"> </a> #end </div> </div> </form> </div> </body> <script src="//cdn.bootcss.com/jquery/3.2.0/jquery.min.js"></script> <script src="//cdn.bootcss.com/bootstrap/3.3.7/js/bootstrap.min.js"></script> <script src="/static/trumbowyg/trumbowyg.min.js"></script> <script src="/static/trumbowyg/colors.min.js"></script> <script src="/static/trumbowyg/zh_cn.min.js"></script> <script> $(function(){ $('.editor').trumbowyg({ lang:'zh_cn', btns: [ ['viewHTML'], 'btnGrp-design', 'btnGrp-justify', 'btnGrp-lists', 'formatting', ['superscript', 'subscript'], ['foreColor', 'backColor'], ['link'], ['insertImage'], ['horizontalRule'], ['removeformat'], 'fullscreen', ], }); setTimeout(function(){$('[autofocus]').focus();}, 1000); }); </script> </html>
#!/usr/bin/env python # This file is part of GNU Radio # GNU Radio is free software; you can redistribute it and/or modify # the Free Software Foundation; either version 3, or (at your option) # any later version. # GNU Radio is distributed in the hope that it will be useful, # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # along with GNU Radio; see the file COPYING. If not, write to # the Free Software Foundation, Inc., 51 Franklin Street, # Boston, MA 02110-1301, USA. from gnuradio import gr, gr_unittest import vocoder_swig as vocoder import blocks_swig as blocks class test_ulaw_vocoder (gr_unittest.TestCase): def setUp (self): self.tb = gr.top_block() def tearDown (self): self.tb = None def test001_module_load (self): data = (8,24,40,56,72,88,104,120,132,148,164,180, 196,212,228,244,260,276,292,308,324,340) src = blocks.vector_source_s(data) enc = vocoder.ulaw_encode_sb() dec = vocoder.ulaw_decode_bs() snk = blocks.vector_sink_s() self.tb.connect(src, enc, dec, snk) self.tb.run() actual_result = snk.data() self.assertEqual(data, actual_result) if __name__ == '__main__': gr_unittest.run(test_ulaw_vocoder, "test_ulaw_vocoder.xml")
<?php namespace _2BitRealms\command; use _2BitRealms\_2BitFacs; use pocketmine\command\CommandSender; abstract class SubCommand{ /** @var _2BitFacs */ private $plugin; /** * @param _2BitFacs $plugin */ public function __construct(_2BitFacs $plugin){ $this->plugin = $plugin; } /** * @return _2BitFacs */ public final function getPlugin(){ return $this->plugin; } /** * @param CommandSender $sender * @return bool */ public abstract function canUse(CommandSender $sender); /** * @return string */ public abstract function getUsage(); /** * @return string */ public abstract function getName(); /** * @return string */ public abstract function getDescription(); /** * @return string[] */ public abstract function getAliases(); /** * @param CommandSender $sender * @param string[] $args * @return bool */ public abstract function execute(CommandSender $sender, array $args); }
#if !defined(_SPANDSP_DTMF_H_) #define _SPANDSP_DTMF_H_ /*! \page dtmf_rx_page DTMF receiver \section dtmf_rx_page_sec_1 What does it do? The DTMF receiver detects the standard DTMF digits. It is compliant with ITU-T Q.23, ITU-T Q.24, and the local DTMF specifications of most administrations. Its passes the test suites. It also scores *very* well on the standard talk-off tests. The current design uses floating point extensively. It is not tolerant of DC. It is expected that a DC restore stage will be placed before the DTMF detector. Unless the dial tone filter is switched on, the detector has poor tolerance of dial tone. Whether this matter depends on your application. If you are using the detector in an IVR application you will need proper echo cancellation to get good performance in the presence of speech prompts, so dial tone will not exist. If you do need good dial tone tolerance, a dial tone filter can be enabled in the detector. The DTMF receiver's design assumes the channel is free of any DC component. \section dtmf_rx_page_sec_2 How does it work? Like most other DSP based DTMF detector's, this one uses the Goertzel algorithm to look for the DTMF tones. What makes each detector design different is just how that algorithm is used. Basic DTMF specs: - Minimum tone on = 40ms - Minimum tone off = 50ms - Maximum digit rate = 10 per second - Normal twist <= 8dB accepted - Reverse twist <= 4dB accepted - S/N >= 15dB will detect OK - Attenuation <= 26dB will detect OK - Frequency tolerance +- 1.5% will detect, +-3.5% will reject TODO: */ /*! \page dtmf_tx_page DTMF tone generation \section dtmf_tx_page_sec_1 What does it do? The DTMF tone generation module provides for the generation of the repertoire of 16 DTMF dual tones. \section dtmf_tx_page_sec_2 How does it work? */ #define MAX_DTMF_DIGITS 128 typedef void (*<API key>)(void *user_data, const char *digits, int len); /*! DTMF generator state descriptor. This defines the state of a single working instance of a DTMF generator. */ typedef struct dtmf_tx_state_s dtmf_tx_state_t; /*! DTMF digit detector descriptor. */ typedef struct dtmf_rx_state_s dtmf_rx_state_t; #if defined(__cplusplus) extern "C" { #endif /*! \brief Generate a buffer of DTMF tones. \param s The DTMF generator context. \param amp The buffer for the generated signal. \param max_samples The required number of generated samples. \return The number of samples actually generated. This may be less than max_samples if the input buffer empties. */ SPAN_DECLARE(int) dtmf_tx(dtmf_tx_state_t *s, int16_t amp[], int max_samples); /*! \brief Put a string of digits in a DTMF generator's input buffer. \param s The DTMF generator context. \param digits The string of digits to be added. \param len The length of the string of digits. If negative, the string is assumed to be a NULL terminated string. \return The number of digits actually added. This may be less than the length of the digit string, if the buffer fills up. */ SPAN_DECLARE(int) dtmf_tx_put(dtmf_tx_state_t *s, const char *digits, int len); /*! \brief Change the transmit level for a DTMF tone generator context. \param s The DTMF generator context. \param level The level of the low tone, in dBm0. \param twist The twist, in dB. */ SPAN_DECLARE(void) dtmf_tx_set_level(dtmf_tx_state_t *s, int level, int twist); /*! \brief Change the transmit on and off time for a DTMF tone generator context. \param s The DTMF generator context. \param on-time The on time, in ms. \param off_time The off time, in ms. */ SPAN_DECLARE(void) dtmf_tx_set_timing(dtmf_tx_state_t *s, int on_time, int off_time); /*! \brief Initialise a DTMF tone generator context. \param s The DTMF generator context. \return A pointer to the DTMF generator context. */ SPAN_DECLARE(dtmf_tx_state_t *) dtmf_tx_init(dtmf_tx_state_t *s); /*! \brief Release a DTMF tone generator context. \param s The DTMF tone generator context. \return 0 for OK, else -1. */ SPAN_DECLARE(int) dtmf_tx_release(dtmf_tx_state_t *s); /*! \brief Free a DTMF tone generator context. \param s The DTMF tone generator context. \return 0 for OK, else -1. */ SPAN_DECLARE(int) dtmf_tx_free(dtmf_tx_state_t *s); /*! Set a optional realtime callback for a DTMF receiver context. This function is called immediately a confirmed state change occurs in the received DTMF. It is called with the ASCII value for a DTMF tone pair, or zero to indicate no tone is being received. \brief Set a realtime callback for a DTMF receiver context. \param s The DTMF receiver context. \param callback Callback routine used to report the start and end of digits. \param user_data An opaque pointer which is associated with the context, and supplied in callbacks. */ SPAN_DECLARE(void) <API key>(dtmf_rx_state_t *s, tone_report_func_t callback, void *user_data); /*! \brief Adjust a DTMF receiver context. \param s The DTMF receiver context. \param filter_dialtone TRUE to enable filtering of dialtone, FALSE to disable, < 0 to leave unchanged. \param twist Acceptable twist, in dB. < 0 to leave unchanged. \param reverse_twist Acceptable reverse twist, in dB. < 0 to leave unchanged. \param threshold The minimum acceptable tone level for detection, in dBm0. <= -99 to leave unchanged. */ SPAN_DECLARE(void) dtmf_rx_parms(dtmf_rx_state_t *s, int filter_dialtone, int twist, int reverse_twist, int threshold); /*! Process a block of received DTMF audio samples. \brief Process a block of received DTMF audio samples. \param s The DTMF receiver context. \param amp The audio sample buffer. \param samples The number of samples in the buffer. \return The number of samples unprocessed. */ SPAN_DECLARE(int) dtmf_rx(dtmf_rx_state_t *s, const int16_t amp[], int samples); /*! Get the status of DTMF detection during processing of the last audio chunk. \brief Get the status of DTMF detection during processing of the last audio chunk. \param s The DTMF receiver context. \return The current digit status. Either 'x' for a "maybe" condition, or the digit being detected. */ SPAN_DECLARE(int) dtmf_rx_status(dtmf_rx_state_t *s); /*! \brief Get a string of digits from a DTMF receiver's output buffer. \param s The DTMF receiver context. \param digits The buffer for the received digits. \param max The maximum number of digits to be returned, \return The number of digits actually returned. */ SPAN_DECLARE(size_t) dtmf_rx_get(dtmf_rx_state_t *s, char *digits, int max); /*! \brief Initialise a DTMF receiver context. \param s The DTMF receiver context. \param callback An optional callback routine, used to report received digits. If no callback routine is set, digits may be collected, using the dtmf_rx_get() function. \param user_data An opaque pointer which is associated with the context, and supplied in callbacks. \return A pointer to the DTMF receiver context. */ SPAN_DECLARE(dtmf_rx_state_t *) dtmf_rx_init(dtmf_rx_state_t *s, <API key> callback, void *user_data); /*! \brief Release a DTMF receiver context. \param s The DTMF receiver context. \return 0 for OK, else -1. */ SPAN_DECLARE(int) dtmf_rx_release(dtmf_rx_state_t *s); /*! \brief Free a DTMF receiver context. \param s The DTMF receiver context. \return 0 for OK, else -1. */ SPAN_DECLARE(int) dtmf_rx_free(dtmf_rx_state_t *s); #if defined(__cplusplus) } #endif #endif
presenter('form-layout',function () { var This=this; This.assign('selectConf',This.model(':selectConf')); this.assign('getForm',function (formAPI) { return function () { console.log(formAPI,formAPI.getData()); } }); this.assign('formConf',this.model('@form:formConf')); this.assign('validForm',function (formAPI) { return function () { console.log(formAPI,formAPI.valid()); } }); this.assign('window',window) this.layout('HOME@layout:default').display(); })
You are given coins of different denominations and a total amount of money. Write a function to compute the number of combinations that make up that amount. You may assume that you have infinite number of each kind of coin. Note: You can assume that 1. 0 <= amount <= 5000 1. 1 <= coin <= 5000 1. the number of coins is less than 500 1. the answer is guaranteed to fit into signed 32-bit integer Example 1: Input: amount = 5, coins = [1, 2, 5] Output: 4 Explanation: there are four ways to make up the amount: 5=5 5=2+2+1 5=2+1+1+1 5=1+1+1+1+1 Example 2: Input: amount = 3, coins = [2] Output: 0 Explanation: the amount of 3 cannot be made up just with coins of 2. Example 3: Input: amount = 10, coins = [10] Output: 1
package com.shatteredpixel.<API key>.actors.buffs; import com.shatteredpixel.<API key>.Dungeon; import com.shatteredpixel.<API key>.messages.Messages; import com.shatteredpixel.<API key>.scenes.GameScene; import com.shatteredpixel.<API key>.ui.BuffIndicator; import com.watabou.noosa.Image; public class MindVision extends FlavourBuff { public static final float DURATION = 20f; public int distance = 2; { type = buffType.POSITIVE; announced = true; } @Override public int icon() { return BuffIndicator.MIND_VISION; } @Override public void tintIcon(Image icon) { greyIcon(icon, 5f, cooldown()); } @Override public String toString() { return Messages.get(this, "name"); } @Override public void detach() { super.detach(); Dungeon.observe(); GameScene.updateFog(); } @Override public String desc() { return Messages.get(this, "desc", dispTurns()); } }
#include <vgStableHeaders.h> #include <vgMod/<API key>.h> #include <vgMod/vgFileDefinitionVG.h> #include <vgMod/<API key>.h> #include <vgMod/vgModUtility.h> #include <vgKernel/vgkStreamWriter.h> #include <vgSound/vgSoundManager.h> #include <vgCam/vgcaCamManager.h> #include <vgMovingManager.h> #include <vgAutoBuild/vgvAutoBuildManager.h> #include <vgCam/vggCamManagerSaver.h> //#include <vgMod/vggCamManagerSaver.h> //#include <vgMod/<API key>.h> //#include <vgSound/<API key>.h> #include <vgKernel/vgkPluginManager.h> #include <vgKernel/vgkCoordSystem.h> #include <vgSound/vgSoundDefinition.h> #include <vgAutoBuild/vgAutoBuildSaver.h> namespace vgMod{ FileWriterVGUpdate::FileWriterVGUpdate() { } FileWriterVGUpdate::~FileWriterVGUpdate() { } bool FileWriterVGUpdate::<API key>() { vgKernel::StreamWriter* pcfile = m_pfile.getPointer(); CFile* pfile = ((vgKernel::CFileStreamWriter*)pcfile)->getCFilePointer(); #if 0 String otherModulesSubDir = "ProAttachdata"; String attachDataPath = vgKernel::StringUtility::<API key>(m_strFilePath); attachDataPath += otherModulesSubDir ; attachDataPath += "\\"; vgKernel::SystemUtility::createDirectory(attachDataPath.c_str()); #endif String attachDataPath = m_strFilePath; vgMod::ModEffectConvertor::setProDataPath(attachDataPath); bool bDirect = vgMod::ModUtility::getFlagReadDirectly(); vgKernel::Plugin *plugin = vgKernel::PluginManager::getSingleton().getPluginRef( VGSOUND_DLL_NAME ); if (plugin != NULL) { <API key>( plugin, SoundManagerSaver, writeSoundMgrToFile, pfn1 ); assert( pfn1 != NULL ); (*pfn1)( m_strFilePath, attachDataPath, bDirect); } CamManagerSaver::writeCamMgrToFile( m_strFilePath , attachDataPath, bDirect); vgCore::MovingManager::getSingleton().writeToFile(m_strFilePath, attachDataPath); vgAutoBuild::AutoBuildSaver::<API key>(m_strFilePath, attachDataPath); return true; } bool FileWriterVGUpdate::<API key>() { vgKernel::StreamWriter* pcfile = m_pfile.getPointer(); CFile* pfile = ((vgKernel::CFileStreamWriter*)pcfile)->getCFilePointer(); #if 0 String attachDataPath = vgKernel::StringUtility::<API key>(m_strFilePath); attachDataPath += otherModulesSubDir ; attachDataPath += "\\"; vgKernel::SystemUtility::createDirectory(attachDataPath.c_str()); #endif String attachDataPath = m_strFilePath; vgMod::ModEffectConvertor::setProDataPath(attachDataPath); bool bDirect = vgMod::ModUtility::getFlagReadDirectly(); <API key>( VGSOUND_DLL_NAME, SoundManagerSaver, writeSoundMgrToFile)( m_strFilePath , attachDataPath, bDirect) //vgSound::SoundManagerSaver::writeSoundMgrToFile( m_strFilePath , attachDataPath , true); CamManagerSaver::writeCamMgrToFile( m_strFilePath , attachDataPath, bDirect); vgCore::MovingManager::getSingleton().writeToFile(m_strFilePath, attachDataPath); // vgAutoBuild::AutoBuildSaver::<API key>(m_strFilePath, attachDataPath); return true; } }//namespace vgMod
# -*- coding: utf-8 -*- # * Authors: # * TJEBBES Gaston <g.t@majerti.fr> # * Arezki Feth <f.a@majerti.fr>; # * Miotte Julien <j.m@majerti.fr>; from autonomie.utils.files import ( encode_path, decode_path, issubdir, filesizeformat, ) def test_encode_decode(): st = u"$deù % ù$ùdeù % - /// // \ \dekodok %spkoij idje ' kopk \"" encoded = encode_path(st) assert decode_path(encoded) == st def test_issubdir(): assert(issubdir("/root/foo", "/root/foo/bar")) assert(not issubdir("/root/foo", "/root/bar")) assert(not issubdir("/root/foo", "/root/../../foo/bar")) def test_filesizeformat(): assert(filesizeformat(1024, 0) == "1ko") assert(filesizeformat(1024, 1) == "1.0ko") assert(filesizeformat(1024*1024, 0) == "1Mo") assert(filesizeformat(1024*1024, 1) == "1.0Mo")
using CP77.CR2W.Reflection; using FastMember; using static CP77.CR2W.Types.Enums; namespace CP77.CR2W.Types { [REDMeta] public class <API key> : CVariable { [Ordinal(0)] [RED("weaponSlot")] public TweakDBID WeaponSlot { get; set; } [Ordinal(1)] [RED("shots", 8)] public CStatic<<API key>> Shots { get; set; } [Ordinal(2)] [RED("latestShotId")] public CUInt32 LatestShotId { get; set; } [Ordinal(3)] [RED("continuousAttack")] public <API key> ContinuousAttack { get; set; } public <API key>(CR2WFile cr2w, CVariable parent, string name) : base(cr2w, parent, name) { } } }
<?php defined('KOOWA') or die; $server_limit = <API key>::<API key>(); ?> <?= helper('translator.script', array('strings' => array( // file size "tb", "mb", "kb", "gb", "b", "N/A", // file status 'done', 'failed', 'delete', 'uploading', // koowa.uploader.overwritable.js 'A file with the same name already exists. Click OK to overwrite and Cancel to create a new version.', 'Following files already exist. Would you like to overwrite them? {names}', // errors "Init error.", "HTTP Error.", "Duplicate file error.", "File size error.", "File: %s", "File: %s, size: %d, max file size: %d", "%s already present in the queue.", "Upload element accepts only %d file(s) at a time. Extra files were stripped.", "Image format either wrong or not supported.", "File count error.", "Runtime ran out of available memory.", "Upload URL might be wrong or doesn't exist.", "File extension error." ))); ?> <?= helper('behavior.bootstrap', array( 'javascript' => true, 'css' => false )); ?> <?= helper('behavior.koowa'); ?> <?= helper('behavior.jquery'); ?> <ktml:style src="assets://files/css/uploader.css" /> <ktml:script src="assets://files/js/uploader<?= !empty($debug) ? '' : '.min' ?>.js" /> <script> kQuery.koowa.uploader.server_limit = <?= json_encode($server_limit) ?>; </script> <div class="<API key>"> <!-- Uploader content box --> <script data-inline type="text/html" class="<API key>" data-name="content-box"> <div class="<API key>"> <div class="<API key>"> <div class="k-upload__buttons"> <button type="button" class="k-upload__button js-choose-files" data-caption-update="<?= escape(translate('Update')) ?>"> <?= translate('Upload') ?> </button> <button type="button" class="k-upload__button <API key> js-start-upload"> <?= translate('Start') ?> </button> <button class="k-upload__button js-stop-upload disabled"> <?= translate('Stop') ?> </button> <span class="k-upload__divider">/</span> </div> <div class="k-upload__content"> <div class="js-content"></div> </div> </div> </div> </script> <!-- Error box --> <script data-inline type="text/html" class="<API key>" data-name="error-box"> <div class="<API key>"> <div class="k-upload__message <API key>"> <div class="<API key>"> <div class="<API key> js-message-body"></div> <div class="<API key>"><button type="button" class="k-upload__button js-close-error"><?= translate('OK') ?></button></div> </div> </div> </div> </script> <!-- Extra info --> <script data-inline type="text/html" class="<API key>" data-name="info-box"> <div class="k-upload__body-info"> <div class="k-upload__info"> <div class="<API key>"> <div class="<API key>"> <table> <thead> <tr> <th class="<API key>" width="1%" style="width: 1%;"><a href="#" class="<API key> js-clear-queue"><?= translate('Clear queue') ?></a></th width="1"> <th width="1%" style="width: 1%;"></th> <th width="1%" style="width: 1%;"><?= translate('Size') ?></th> <th width="99%" style="width: 99%;"><?= translate('Title') ?></th> </tr> </thead> <tbody class="<API key>"></tbody> </table> </div> <div class="<API key>"> </div> </div> </div> </div> </script> <!-- Progress bar --> <script data-inline type="text/html" class="<API key>" data-name="progress-bar"> <div class="k-upload__progress progress progress-striped"> <div class="<API key> bar" style="width: 0"></div> </div> </script> <script data-inline type="text/html" class="<API key>" data-name="single-file"> <div class="k-upload__files"> <div class="k-upload__file-list"> <div id="{{=it.id}}" class="js-uploader-file"> <span class="<API key>">{{=it.name}}</span> {{?it.size}} , <span class="<API key>">{{=it.size}}</span> {{?}} </div> </div> </div> <div class="<API key>"> <?= translate('Drop another file to update') ?> </div> </script> <script data-inline type="text/html" class="<API key>" data-name="multiple-files"> <tr id="{{=it.id}}" class="js-uploader-file"> <td class="<API key>"> <span class="<API key> js-file-status is-in-queue"><?= translate('in queue') ?></span> </td> <td> <a class="<API key> js-remove-file">x</a> </td> <td> <span class="<API key>">{{=it.size}}</span> </td> <td class="k-upload__overflow"> <span class="<API key>">{{=it.name}}</span> </td> </tr> </script> <script data-inline type="text/html" class="<API key>" data-name="upload-pending"> <div> <?= translate('{total} files in the queue', array( 'total' => '{{=it.total}}' )) ?> <a class="<API key> js-open-info"> <span class="<API key>"><?= translate('show queue') ?></span> <span class="<API key>"><?= translate('hide queue') ?></span> </a> </div> <div class="<API key>"> <?= translate('Drop files to add to the queue') ?> </div> </script> <script data-inline type="text/html" class="<API key>" data-name="uploading"> <div> <?= translate('Uploading {total} files, {remaining} to go', array( 'total' => '{{=it.total}}', 'remaining' => '{{=it.remaining}}' ), '</span>') ?> {{? it.failed > 0 }} - <?= translate('{failed} errors', array( 'failed' => '{{=it.failed}}' )) ?> {{?}} <a class="<API key> js-open-info"> <span class="<API key>"><?= translate('show queue') ?></span> <span class="<API key>"><?= translate('hide queue') ?></span> </a> </div> </script> <script data-inline type="text/html" class="<API key>" data-name="upload-finished"> <div> <?= translate('Uploaded {total} files', array( 'total' => '{{=it.total}}' )) ?> {{? it.failed > 0 }} - <?= translate('{failed} errors', array( 'failed' => '{{=it.failed}}' )) ?> {{?}} <a class="<API key> js-open-info"> <span class="<API key>"><?= translate('show queue') ?></span> <span class="<API key>"><?= translate('hide queue') ?></span> </a> </div> <div class="<API key>"> <?= translate('Drop more files here') ?> </div> </script> <script data-inline type="text/html" class="<API key>" data-name="empty-single"> <div class="<API key>"> <?= translate('Drop a file here') ?> </div> </script> <script data-inline type="text/html" class="<API key>" data-name="empty-multiple"> <div class="<API key>"> <?= translate('Drop files here') ?> </div> <div class="<API key>"> <?= translate('Select files to upload') ?> </div> </script> <script data-inline type="text/html" class="<API key>" data-name="drop-message-single"> <div class="<API key>"> <span><?= translate('Drop a file here') ?></span> </div> </script> <script data-inline type="text/html" class="<API key>" data-name="<API key>"> <div class="<API key>"> <span><?= translate('Drop files here') ?></span> </div> </script> </div>
<!DOCTYPE HTML PUBLIC "- <!-- NewPage --> <html lang="en"> <head> <!-- Generated by javadoc (1.8.0_152) on Fri Jun 28 21:50:19 MSK 2019 --> <title>Uses of Package bot.penguee.exception</title> <meta name="date" content="2019-06-28"> <link rel="stylesheet" type="text/css" href="../../../stylesheet.css" title="Style"> <script type="text/javascript" src="../../../script.js"></script> </head> <body> <script type="text/javascript"><! try { if (location.href.indexOf('is-external=true') == -1) { parent.document.title="Uses of Package bot.penguee.exception"; } } catch(err) { } </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <div class="topNav"><a name="navbar.top"> </a> <div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.top.firstrow"> </a> <ul class="navList" title="Navigation"> <li><a href="../../../overview-summary.html">Overview</a></li> <li><a href="package-summary.html">Package</a></li> <li>Class</li> <li class="navBarCell1Rev">Use</li> <li><a href="package-tree.html">Tree</a></li> <li><a href="../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../index-files/index-1.html">Index</a></li> <li><a href="../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../index.html?bot/penguee/exception/package-use.html" target="_top">Frames</a></li> <li><a href="package-use.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="<API key>"> <li><a href="../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><! allClassesLink = document.getElementById("<API key>"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } </script> </div> <a name="skip.navbar.top"> </a></div> <div class="header"> <h1 title="Uses of Package bot.penguee.exception" class="title">Uses of Package<br>bot.penguee.exception</h1> </div> <div class="contentContainer"> <ul class="blockList"> <li class="blockList"> <table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing packages, and an explanation"> <caption><span>Packages that use <a href="../../../bot/penguee/exception/package-summary.html">bot.penguee.exception</a></span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Package</th> <th class="colLast" scope="col">Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><a href="#bot.penguee">bot.penguee</a></td> <td class="colLast">&nbsp;</td> </tr> </tbody> </table> </li> <li class="blockList"><a name="bot.penguee"> </a> <table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing classes, and an explanation"> <caption><span>Classes in <a href="../../../bot/penguee/exception/package-summary.html">bot.penguee.exception</a> used by <a href="../../../bot/penguee/package-summary.html">bot.penguee</a></span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colOne" scope="col">Class and Description</th> </tr> <tbody> <tr class="altColor"> <td class="colOne"><a href="../../../bot/penguee/exception/class-use/<API key>.html#bot.penguee"><API key></a>&nbsp;</td> </tr> <tr class="rowColor"> <td class="colOne"><a href="../../../bot/penguee/exception/class-use/<API key>.html#bot.penguee"><API key></a>&nbsp;</td> </tr> </tbody> </table> </li> </ul> </div> <div class="bottomNav"><a name="navbar.bottom"> </a> <div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.bottom.firstrow"> </a> <ul class="navList" title="Navigation"> <li><a href="../../../overview-summary.html">Overview</a></li> <li><a href="package-summary.html">Package</a></li> <li>Class</li> <li class="navBarCell1Rev">Use</li> <li><a href="package-tree.html">Tree</a></li> <li><a href="../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../index-files/index-1.html">Index</a></li> <li><a href="../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../index.html?bot/penguee/exception/package-use.html" target="_top">Frames</a></li> <li><a href="package-use.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="<API key>"> <li><a href="../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><! allClassesLink = document.getElementById("<API key>"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } </script> </div> <a name="skip.navbar.bottom"> </a></div> </body> </html>
// This program is free software: you can redistribute it and/or modify // (at your option) any later version. // This program is distributed in the hope that it will be useful, // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #include "H_MEM_SCHED_FIFO.h" Define_Module(H_MEM_SCHED_FIFO); H_MEM_SCHED_FIFO::~H_MEM_SCHED_FIFO() { } void H_MEM_SCHED_FIFO::initialize(){ <API key>::initialize(); memoryCells.clear(); totalMemory_Blocks = <API key>((int)memorySize_MB * 1024); // memory in KB <API key> = totalMemory_Blocks ; memoryCell* cell = new memoryCell(); cell->uId = 0; cell->pId = 0; cell->vmGate = nodeGate; cell->vmTotalBlocks_KB = totalMemory_Blocks; cell->remainingBlocks_KB = totalMemory_Blocks; memoryCells.push_back(cell); } void H_MEM_SCHED_FIFO::finish(){ <API key>::finish(); } void H_MEM_SCHED_FIFO::schedulingMemory(icancloud_Message *msg){ memoryCell* cell; bool found = false; int operation = msg->getOperation(); int requiredBlocks; <API key>* sm_mem; int <API key> = 0; // The operation come from the physical resources if (msg->arrivedOn("fromNodeMemoryO") || msg->arrivedOn("fromNodeMemoryI")) sendMemoryMessage(msg); // The operation is a remote operation. So it will go to the OS else if (msg->getRemoteOperation()){ sendRequestMessage(msg, toVMMemoryI->getGate(nodeGate)); } else if (operation == SM_MEM_ALLOCATE) { sm_mem = dynamic_cast <<API key>*> (msg); // Memory account requiredBlocks = <API key>(sm_mem->getMemSize()); // Get the memory cell for (int i = 0; (i < (int)memoryCells.size()) && (!found); i++){ cell = (*(memoryCells.begin() + i)); if ((cell->uId == msg->getUid()) && (cell->pId == msg->getPid())){ found = true; <API key> = cell->remainingBlocks_KB - requiredBlocks; if (cell->remainingBlocks_KB < 0) cell->remainingBlocks_KB = 0; } } if (!found) throw cRuntimeError ("H_MEM_SCHED_FIFO::schedulingMemory->the user:%i with vm id:%i not found..\n",msg->getUid(), msg->getPid()); if (<API key> <= 0){ // Not enough memory showDebugMessage ("Not enough memory!. Free memory blocks: %d - Requested blocks: %d", cell->remainingBlocks_KB, requiredBlocks); // Cast! sm_mem->setResult (<API key>); // Response message sm_mem->setIsResponse(true); // Send message back! sendResponseMessage (sm_mem); }else if (<API key> >= 0){ // Decrement the memory in the Hypervisor cell->remainingBlocks_KB = <API key>; sendMemoryMessage(sm_mem); } else { // The size is 0! sendRequestMessage(msg, toVMMemoryO->getGate(msg->getArrivalGate()->getIndex())); } } else if (operation == SM_MEM_RELEASE) { sm_mem = dynamic_cast <<API key>*> (msg); requiredBlocks = <API key>(sm_mem->getMemSize()); for (int i = 0; (i < (int)memoryCells.size()) && (!found); i++){ cell = (*(memoryCells.begin() + i)); if ((cell->uId == msg->getUid()) && (cell->pId) == (cell->pId)){ found = true; cell->remainingBlocks_KB += requiredBlocks; if (cell->remainingBlocks_KB > cell->vmTotalBlocks_KB) cell->remainingBlocks_KB = cell->vmTotalBlocks_KB; sendMemoryMessage(sm_mem); } } if (!found) throw cRuntimeError ("H_MEM_SCHED_FIFO::schedulingMemory->the user%i with vm id:%i not found..\n",msg->getUid(), msg->getPid()); } else{ //Get the gate and the index of the arrival msg sendMemoryMessage(msg); } } int H_MEM_SCHED_FIFO::setVM (cGate* oGateI, cGate* oGateO, cGate* iGateI, cGate* iGateO, int uId, int pId, int requestedMemory_KB){ int gateidx = <API key>::setVM(oGateI, oGateO, iGateI, iGateO, requestedMemory_KB, uId, pId); memoryCell* cell = new memoryCell(); cell->uId = uId; cell->pId = pId; cell->vmGate = gateidx; cell->vmTotalBlocks_KB = <API key>(requestedMemory_KB); cell->remainingBlocks_KB = cell->vmTotalBlocks_KB; memoryCells.push_back(cell); <API key> -= cell->vmTotalBlocks_KB; return 0; } void H_MEM_SCHED_FIFO::freeVM(int uId, int pId){ memoryCell* cell; bool found = false; for (int i = 0; (i < (int)memoryCells.size()) && (!found); i++){ cell = (*(memoryCells.begin() + i)); if ((cell->uId == uId) && ((cell->pId) == pId)){ found = true; <API key> += (cell->vmTotalBlocks_KB); memoryCells.erase(memoryCells.begin() + i); } } <API key>::freeVM(uId, pId); } int H_MEM_SCHED_FIFO::getVMGateIdx(int uId, int pId){ memoryCell* cell; bool found = false; int gateIdx = -1; for (int i = 0; (i < (int)memoryCells.size()) && (!found); i++){ cell = (*(memoryCells.begin() + i)); if ((cell->uId == uId) && ((cell->pId) == pId)){ found = true; gateIdx = cell->vmGate; } } return gateIdx; } void H_MEM_SCHED_FIFO::printCells(string methodName){ memoryCell* cell; for (int i = 0; (i < (int)memoryCells.size()); i++){ cell = (*(memoryCells.begin() + i)); printf("H_MEM_SCHED_FIFO::printCells [%s] -->cell[%i] - uId-%i pId-%i gate-%i TotalBlocks-%i remainingBlocks-%i\n",methodName.c_str(), i, cell->uId, cell->pId, cell->vmGate, cell->vmTotalBlocks_KB, cell->remainingBlocks_KB); } } double H_MEM_SCHED_FIFO::<API key>(int uId, int pId){ memoryCell* cell; bool found = false; int size = -1; for (int i = 0; (i < (int)memoryCells.size()) && (!found); i++){ cell = (*(memoryCells.begin() + i)); if ((cell->uId == uId) && ((cell->pId) == pId)){ found = true; size = (((cell->vmTotalBlocks_KB - cell->remainingBlocks_KB) * blockSize_KB) / 1024); } } return size; }
#ifdef HAVE_CONFIG_H #include "config.h" #endif #include <stdlib.h> #include <string.h> #include "../weechat.h" #include "../wee-hook.h" #include "../wee-infolist.h" #include "../wee-log.h" #include "../wee-string.h" #include "../../gui/gui-color.h" #include "../../gui/gui-line.h" /* * Hooks a message printed by WeeChat. * * Returns pointer to new hook, NULL if error. */ struct t_hook * hook_print (struct t_weechat_plugin *plugin, struct t_gui_buffer *buffer, const char *tags, const char *message, int strip_colors, <API key> *callback, const void *callback_pointer, void *callback_data) { struct t_hook *new_hook; struct t_hook_print *new_hook_print; if (!callback) return NULL; new_hook = malloc (sizeof (*new_hook)); if (!new_hook) return NULL; new_hook_print = malloc (sizeof (*new_hook_print)); if (!new_hook_print) { free (new_hook); return NULL; } hook_init_data (new_hook, plugin, HOOK_TYPE_PRINT, <API key>, callback_pointer, callback_data); new_hook->hook_data = new_hook_print; new_hook_print->callback = callback; new_hook_print->buffer = buffer; new_hook_print->tags_array = string_split_tags (tags, &new_hook_print->tags_count); new_hook_print->message = (message) ? strdup (message) : NULL; new_hook_print->strip_colors = strip_colors; hook_add_to_list (new_hook); return new_hook; } /* * Executes a print hook. */ void hook_print_exec (struct t_gui_buffer *buffer, struct t_gui_line *line) { struct t_hook *ptr_hook, *next_hook; char *prefix_no_color, *message_no_color; if (!weechat_hooks[HOOK_TYPE_PRINT]) return; if (!line->data->message || !line->data->message[0]) return; prefix_no_color = (line->data->prefix) ? gui_color_decode (line->data->prefix, NULL) : NULL; message_no_color = gui_color_decode (line->data->message, NULL); if (!message_no_color) { if (prefix_no_color) free (prefix_no_color); return; } hook_exec_start (); ptr_hook = weechat_hooks[HOOK_TYPE_PRINT]; while (ptr_hook) { next_hook = ptr_hook->next_hook; if (!ptr_hook->deleted && !ptr_hook->running && (!HOOK_PRINT(ptr_hook, buffer) || (buffer == HOOK_PRINT(ptr_hook, buffer))) && (!HOOK_PRINT(ptr_hook, message) || !HOOK_PRINT(ptr_hook, message)[0] || string_strcasestr (prefix_no_color, HOOK_PRINT(ptr_hook, message)) || string_strcasestr (message_no_color, HOOK_PRINT(ptr_hook, message))) && (!HOOK_PRINT(ptr_hook, tags_array) || gui_line_match_tags (line->data, HOOK_PRINT(ptr_hook, tags_count), HOOK_PRINT(ptr_hook, tags_array)))) { /* run callback */ ptr_hook->running = 1; (void) (HOOK_PRINT(ptr_hook, callback)) (ptr_hook->callback_pointer, ptr_hook->callback_data, buffer, line->data->date, line->data->tags_count, (const char **)line->data->tags_array, (int)line->data->displayed, (int)line->data->highlight, (HOOK_PRINT(ptr_hook, strip_colors)) ? prefix_no_color : line->data->prefix, (HOOK_PRINT(ptr_hook, strip_colors)) ? message_no_color : line->data->message); ptr_hook->running = 0; } ptr_hook = next_hook; } if (prefix_no_color) free (prefix_no_color); if (message_no_color) free (message_no_color); hook_exec_end (); } /* * Frees data in a print hook. */ void <API key> (struct t_hook *hook) { if (!hook || !hook->hook_data) return; if (HOOK_PRINT(hook, tags_array)) { <API key> (HOOK_PRINT(hook, tags_array)); HOOK_PRINT(hook, tags_array) = NULL; } if (HOOK_PRINT(hook, message)) { free (HOOK_PRINT(hook, message)); HOOK_PRINT(hook, message) = NULL; } free (hook->hook_data); hook->hook_data = NULL; } /* * Adds print hook data in the infolist item. * * Returns: * 1: OK * 0: error */ int <API key> (struct t_infolist_item *item, struct t_hook *hook) { if (!item || !hook || !hook->hook_data) return 0; if (!<API key> (item, "callback", HOOK_PRINT(hook, callback))) return 0; if (!<API key> (item, "buffer", HOOK_PRINT(hook, buffer))) return 0; if (!<API key> (item, "tags_count", HOOK_PRINT(hook, tags_count))) return 0; if (!<API key> (item, "tags_array", HOOK_PRINT(hook, tags_array))) return 0; if (!<API key> (item, "message", HOOK_PRINT(hook, message))) return 0; if (!<API key> (item, "strip_colors", HOOK_PRINT(hook, strip_colors))) return 0; return 1; } /* * Prints print hook data in WeeChat log file (usually for crash dump). */ void <API key> (struct t_hook *hook) { int i, j; if (!hook || !hook->hook_data) return; log_printf (" print data:"); log_printf (" callback. . . . . . . : 0x%lx", HOOK_PRINT(hook, callback)); log_printf (" buffer. . . . . . . . : 0x%lx", HOOK_PRINT(hook, buffer)); log_printf (" tags_count. . . . . . : %d", HOOK_PRINT(hook, tags_count)); log_printf (" tags_array. . . . . . : 0x%lx", HOOK_PRINT(hook, tags_array)); if (HOOK_PRINT(hook, tags_array)) { for (i = 0; i < HOOK_PRINT(hook, tags_count); i++) { for (j = 0; HOOK_PRINT(hook, tags_array)[i][j]; j++) { log_printf (" tags_array[%03d][%03d]: '%s'", i, j, HOOK_PRINT(hook, tags_array)[i][j]); } } } log_printf (" message . . . . . . . : '%s'", HOOK_PRINT(hook, message)); log_printf (" strip_colors. . . . . : %d", HOOK_PRINT(hook, strip_colors)); }
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace Gap.Network.Helper { public static class ByteHelper { public static string AsString(this byte[] buffer) { return Encoding.UTF8.GetString(buffer); } public static string AsString(this byte[] buffer, int legth) { return Encoding.UTF8.GetString(buffer, 0, legth); } } }
using System; using System.Diagnostics; using System.Runtime.CompilerServices; namespace Bss.Core.Utils { public static class BenchmarkUtils { public static void Benchmark(Action action, [CallerMemberName] string messageConsole = "") { var timer = new Stopwatch(); Debug.WriteLine(messageConsole); timer.Start(); action(); timer.Stop(); Debug.WriteLine("Time elapsed: {0} {1}", timer.Elapsed.ToString("G"),messageConsole); } } }
#include <stdlib.h> #include <stdio.h> #include <string.h> #include "../dogechat-plugin.h" #include "irc.h" #include "irc-upgrade.h" #include "irc-buffer.h" #include "irc-channel.h" #include "irc-config.h" #include "irc-input.h" #include "irc-nick.h" #include "irc-notify.h" #include "irc-raw.h" #include "irc-redirect.h" #include "irc-server.h" struct t_irc_server *<API key> = NULL; struct t_irc_channel *<API key> = NULL; /* * Saves servers/channels/nicks info to irc upgrade file. * * Returns: * 1: OK * 0: error */ int <API key> (struct t_upgrade_file *upgrade_file) { struct t_infolist *infolist; struct t_irc_server *ptr_server; struct t_irc_channel *ptr_channel; struct t_irc_nick *ptr_nick; struct t_irc_redirect *ptr_redirect; struct <API key> *<API key>; struct t_irc_notify *ptr_notify; struct t_irc_raw_message *ptr_raw_message; int rc; for (ptr_server = irc_servers; ptr_server; ptr_server = ptr_server->next_server) { /* save server */ infolist = <API key> (); if (!infolist) return 0; if (!<API key> (infolist, ptr_server)) { <API key> (infolist); return 0; } rc = <API key> (upgrade_file, <API key>, infolist); <API key> (infolist); if (!rc) return 0; /* save server channels and nicks */ for (ptr_channel = ptr_server->channels; ptr_channel; ptr_channel = ptr_channel->next_channel) { /* save channel */ infolist = <API key> (); if (!infolist) return 0; if (!<API key> (infolist, ptr_channel)) { <API key> (infolist); return 0; } rc = <API key> (upgrade_file, <API key>, infolist); <API key> (infolist); if (!rc) return 0; for (ptr_nick = ptr_channel->nicks; ptr_nick; ptr_nick = ptr_nick->next_nick) { /* save nick */ infolist = <API key> (); if (!infolist) return 0; if (!<API key> (infolist, ptr_nick)) { <API key> (infolist); return 0; } rc = <API key> (upgrade_file, <API key>, infolist); <API key> (infolist); if (!rc) return 0; } } /* save server redirects */ for (ptr_redirect = ptr_server->redirects; ptr_redirect; ptr_redirect = ptr_redirect->next_redirect) { infolist = <API key> (); if (!infolist) return 0; if (!<API key> (infolist, ptr_redirect)) { <API key> (infolist); return 0; } rc = <API key> (upgrade_file, <API key>, infolist); <API key> (infolist); if (!rc) return 0; } /* save server notify list */ for (ptr_notify = ptr_server->notify_list; ptr_notify; ptr_notify = ptr_notify->next_notify) { infolist = <API key> (); if (!infolist) return 0; if (!<API key> (infolist, ptr_notify)) { <API key> (infolist); return 0; } rc = <API key> (upgrade_file, <API key>, infolist); <API key> (infolist); if (!rc) return 0; } } /* save raw messages */ for (ptr_raw_message = irc_raw_messages; ptr_raw_message; ptr_raw_message = ptr_raw_message->next_message) { infolist = <API key> (); if (!infolist) return 0; if (!<API key> (infolist, ptr_raw_message)) { <API key> (infolist); return 0; } rc = <API key> (upgrade_file, <API key>, infolist); <API key> (infolist); if (!rc) return 0; } /* save redirect patterns */ for (<API key> = <API key>; <API key>; <API key> = <API key>->next_redirect) { if (<API key>->temp_pattern) { infolist = <API key> (); if (!infolist) return 0; if (!<API key> (infolist, <API key>)) { <API key> (infolist); return 0; } rc = <API key> (upgrade_file, <API key>, infolist); <API key> (infolist); if (!rc) return 0; } } return 1; } /* * Saves irc upgrade file. * * Returns: * 1: OK * 0: error */ int irc_upgrade_save () { int rc; struct t_upgrade_file *upgrade_file; upgrade_file = <API key> (<API key>, 1); if (!upgrade_file) return 0; rc = <API key> (upgrade_file); <API key> (upgrade_file); return rc; } void <API key> () { struct t_infolist *infolist; struct t_gui_buffer *ptr_buffer; struct t_irc_server *ptr_server; const char *type; infolist = <API key> ("buffer", NULL, NULL); if (infolist) { while (<API key> (infolist)) { if (<API key> (infolist, "plugin") == dogechat_irc_plugin) { ptr_buffer = <API key> (infolist, "pointer"); <API key> (ptr_buffer, "close_callback", &irc_buffer_close_cb); <API key> (ptr_buffer, "input_callback", &irc_input_data_cb); type = <API key> (ptr_buffer, "localvar_type"); if (type && (strcmp (type, "channel") == 0)) { ptr_server = irc_server_search ( <API key> (ptr_buffer, "localvar_server")); <API key> (ptr_buffer, "nickcmp_callback", &<API key>); if (ptr_server) { <API key> (ptr_buffer, "<API key>", ptr_server); } } if (strcmp (<API key> (infolist, "name"), IRC_RAW_BUFFER_NAME) == 0) { irc_raw_buffer = ptr_buffer; } } } <API key> (infolist); } } /* * Reads callback for irc upgrade. */ int irc_upgrade_read_cb (void *data, struct t_upgrade_file *upgrade_file, int object_id, struct t_infolist *infolist) { int flags, sock, size, i, index, nicks_count, num_items; long number; time_t join_time; char *buf, option_name[64], **nicks, *nick_join, *pos, *error; char **items; const char *buffer_name, *str, *nick; struct t_irc_nick *ptr_nick; struct t_irc_redirect *ptr_redirect; struct t_irc_notify *ptr_notify; struct t_gui_buffer *ptr_buffer; /* make C compiler happy */ (void) data; (void) upgrade_file; <API key> (infolist); while (<API key> (infolist)) { switch (object_id) { case <API key>: <API key> = irc_server_search (<API key> (infolist, "name")); if (<API key>) { <API key>->temp_server = <API key> (infolist, "temp_server"); <API key>->buffer = NULL; buffer_name = <API key> (infolist, "buffer_name"); if (buffer_name && buffer_name[0]) { ptr_buffer = <API key> (IRC_PLUGIN_NAME, buffer_name); if (ptr_buffer) <API key>->buffer = ptr_buffer; } <API key>-><API key> = <API key> (infolist, "<API key>"); str = <API key> (infolist, "current_address"); if (str) { <API key>->current_address = strdup (str); <API key>->current_port = <API key> (infolist, "current_port"); } else { if (<API key>-><API key> < <API key>->addresses_count) { <API key>->current_address = strdup (<API key>->addresses_array[<API key>-><API key>]); <API key>->current_port = <API key>->ports_array[<API key>-><API key>]; } } str = <API key> (infolist, "current_ip"); if (str) <API key>->current_ip = strdup (str); sock = <API key> (infolist, "sock"); if (sock >= 0) { <API key>->sock = sock; <API key>->hook_fd = dogechat_hook_fd (<API key>->sock, 1, 0, 0, &irc_server_recv_cb, <API key>); } <API key>->is_connected = <API key> (infolist, "is_connected"); <API key>->ssl_connected = <API key> (infolist, "ssl_connected"); <API key>->disconnected = <API key> (infolist, "disconnected"); str = <API key> (infolist, "<API key>"); if (str) <API key>-><API key> = strdup (str); str = <API key> (infolist, "nick"); if (str) irc_server_set_nick (<API key>, str); str = <API key> (infolist, "nick_modes"); if (str) <API key>->nick_modes = strdup (str); <API key>->cap_away_notify = <API key> (infolist, "cap_away_notify"); <API key>->cap_account_notify = <API key> (infolist, "cap_account_notify"); <API key>->cap_extended_join = <API key> (infolist, "cap_extended_join"); str = <API key> (infolist, "isupport"); if (str) <API key>->isupport = strdup (str); /* * "prefix" is not any more in this infolist (since * DogeChat 0.3.4), but we read it to keep compatibility * with old DogeChat versions, on /upgrade) */ str = <API key> (infolist, "prefix"); if (str) <API key> (<API key>, str); /* "prefix_modes" is new in DogeChat 0.3.4 */ str = <API key> (infolist, "prefix_modes"); if (str) { if (<API key>->prefix_modes) free (<API key>->prefix_modes); <API key>->prefix_modes = strdup (str); } /* "prefix_chars" is new in DogeChat 0.3.4 */ str = <API key> (infolist, "prefix_chars"); if (str) { if (<API key>->prefix_chars) free (<API key>->prefix_chars); <API key>->prefix_chars = strdup (str); } <API key>->nick_max_length = <API key> (infolist, "nick_max_length"); <API key>->casemapping = <API key> (infolist, "casemapping"); str = <API key> (infolist, "chantypes"); if (str) <API key>->chantypes = strdup (str); str = <API key> (infolist, "chanmodes"); if (str) <API key>->chanmodes = strdup (str); else { str = <API key> (<API key>, "CHANMODES"); if (str) <API key>->chanmodes = strdup (str); } /* "monitor" is new in DogeChat 0.4.3 */ if (<API key> (infolist, "monitor")) { <API key>->monitor = <API key> (infolist, "monitor"); } else { /* DogeChat <= 0.4.2 */ str = <API key> (<API key>, "MONITOR"); if (str) { error = NULL; number = strtol (str, &error, 10); if (error && !error[0]) <API key>->monitor = (int)number; } } <API key>->reconnect_delay = <API key> (infolist, "reconnect_delay"); <API key>->reconnect_start = <API key> (infolist, "reconnect_start"); <API key>->command_time = <API key> (infolist, "command_time"); <API key>->reconnect_join = <API key> (infolist, "reconnect_join"); <API key>->disable_autojoin = <API key> (infolist, "disable_autojoin"); <API key>->is_away = <API key> (infolist, "is_away"); str = <API key> (infolist, "away_message"); if (str) <API key>->away_message = strdup (str); <API key>->away_time = <API key> (infolist, "away_time"); <API key>->lag = <API key> (infolist, "lag"); <API key>->lag_displayed = <API key> (infolist, "lag_displayed"); buf = <API key> (infolist, "lag_check_time", &size); if (buf) memcpy (&(<API key>->lag_check_time), buf, size); <API key>->lag_next_check = <API key> (infolist, "lag_next_check"); <API key>->lag_last_refresh = <API key> (infolist, "lag_last_refresh"); <API key>->last_user_message = <API key> (infolist, "last_user_message"); <API key>->last_away_check = <API key> (infolist, "last_away_check"); <API key>->last_data_purge = <API key> (infolist, "last_data_purge"); } break; case <API key>: if (<API key>) { <API key> = irc_channel_new (<API key>, <API key> (infolist, "type"), <API key> (infolist, "name"), 0, 0); if (<API key>) { str = <API key> (infolist, "topic"); if (str) <API key> (<API key>, str); str = <API key> (infolist, "modes"); if (str) <API key>->modes = strdup (str); <API key>->limit = <API key> (infolist, "limit"); str = <API key> (infolist, "key"); if (str) <API key>->key = strdup (str); str = <API key> (infolist, "join_msg_received"); if (str) { items = <API key> (str, ",", 0, 0, &num_items); if (items) { for (i = 0; i < num_items; i++) { <API key> (<API key>->join_msg_received, items[i], "1"); } <API key> (items); } } <API key>->checking_whox = <API key> (infolist, "checking_whox"); str = <API key> (infolist, "away_message"); if (str) <API key>->away_message = strdup (str); <API key>->has_quit_server = <API key> (infolist, "has_quit_server"); <API key>->cycle = <API key> (infolist, "cycle"); <API key>->part = <API key> (infolist, "part"); <API key>-><API key> = <API key> (infolist, "<API key>"); for (i = 0; i < 2; i++) { index = 0; while (1) { snprintf (option_name, sizeof (option_name), "nick_speaking%d_%05d", i, index); nick = <API key> (infolist, option_name); if (!nick) break; <API key> (<API key>, nick, i); index++; } } index = 0; while (1) { snprintf (option_name, sizeof (option_name), "<API key>%05d", index); nick = <API key> (infolist, option_name); if (!nick) break; snprintf (option_name, sizeof (option_name), "<API key>%05d", index); <API key> (<API key>, <API key>, nick, <API key> (infolist, option_name)); index++; } str = <API key> (infolist, "join_smart_filtered"); if (str) { nicks = <API key> (str, ",", 0, 0, &nicks_count); if (nicks) { for (i = 0; i < nicks_count; i++) { pos = strchr (nicks[i], ':'); if (pos) { nick_join = dogechat_strndup (nicks[i], pos - nicks[i]); if (nick_join) { error = NULL; number = strtol (pos + 1, &error, 10); if (error && !error[0]) { join_time = (time_t)number; <API key> (<API key>, nick_join, join_time); } free (nick_join); } } } <API key> (nicks); } } } } break; case <API key>: if (<API key> && <API key>) { ptr_nick = irc_nick_new (<API key>, <API key>, <API key> (infolist, "name"), <API key> (infolist, "host"), <API key> (infolist, "prefixes"), <API key> (infolist, "away"), <API key> (infolist, "account"), <API key> (infolist, "realname")); if (ptr_nick) { /* * "flags" is not any more in this infolist (since * DogeChat 0.3.4), but we read it to keep compatibility * with old DogeChat versions, on /upgrade) * We try to restore prefixes with old flags, but * this is approximation, it's not sure we will * restore good prefixes here (a /names on channel * will fix problem if prefixes are wrong). * Flags were defined in irc-nick.h: * #define IRC_NICK_CHANOWNER 1 * #define IRC_NICK_CHANADMIN 2 * #define IRC_NICK_CHANADMIN2 4 * #define IRC_NICK_OP 8 * #define IRC_NICK_HALFOP 16 * #define IRC_NICK_VOICE 32 * #define IRC_NICK_AWAY 64 * #define IRC_NICK_CHANUSER 128 */ flags = <API key> (infolist, "flags"); if (flags > 0) { /* channel owner */ if (flags & 1) { irc_nick_set_mode (<API key>, <API key>, ptr_nick, 1, 'q'); } /* channel admin */ if ((flags & 2) || (flags & 4)) { irc_nick_set_mode (<API key>, <API key>, ptr_nick, 1, 'a'); } if (flags & 8) { irc_nick_set_mode (<API key>, <API key>, ptr_nick, 1, 'o'); } /* half-op */ if (flags & 16) { irc_nick_set_mode (<API key>, <API key>, ptr_nick, 1, 'h'); } /* voice */ if (flags & 32) { irc_nick_set_mode (<API key>, <API key>, ptr_nick, 1, 'v'); } /* away */ if (flags & 64) { irc_nick_set_away (<API key>, <API key>, ptr_nick, 1); } /* channel user */ if (flags & 128) { irc_nick_set_mode (<API key>, <API key>, ptr_nick, 1, 'u'); } } } } break; case <API key>: if (<API key>) { ptr_redirect = <API key> ( <API key>, <API key> (infolist, "pattern"), <API key> (infolist, "signal"), <API key> (infolist, "count"), <API key> (infolist, "string"), <API key> (infolist, "timeout"), <API key> (infolist, "cmd_start"), <API key> (infolist, "cmd_stop"), <API key> (infolist, "cmd_extra"), <API key> (infolist, "cmd_filter")); if (ptr_redirect) { ptr_redirect->current_count = <API key> (infolist, "current_count"); str = <API key> (infolist, "command"); if (str) ptr_redirect->command = strdup (str); ptr_redirect->assigned_to_command = <API key> (infolist, "assigned_to_command"); ptr_redirect->start_time = <API key> (infolist, "start_time"); ptr_redirect->cmd_start_received = <API key> (infolist, "cmd_start_received"); ptr_redirect->cmd_stop_received = <API key> (infolist, "cmd_stop_received"); str = <API key> (infolist, "output"); if (str) ptr_redirect->output = strdup (str); ptr_redirect->output_size = <API key> (infolist, "output_size"); } } break; case <API key>: <API key> ( <API key> (infolist, "name"), <API key> (infolist, "temp_pattern"), <API key> (infolist, "timeout"), <API key> (infolist, "cmd_start"), <API key> (infolist, "cmd_stop"), <API key> (infolist, "cmd_extra")); break; case <API key>: if (<API key>) { ptr_notify = irc_notify_search (<API key>, <API key> (infolist, "nick")); if (ptr_notify) { ptr_notify->is_on_server = <API key> (infolist, "is_on_server"); str = <API key> (infolist, "away_message"); if (str) ptr_notify->away_message = strdup (str); } } break; case <API key>: <API key> (<API key> (infolist, "date"), <API key> (infolist, "prefix"), <API key> (infolist, "message")); break; } } return DOGECHAT_RC_OK; } /* * Loads irc upgrade file. * * Returns: * 1: OK * 0: error */ int irc_upgrade_load () { int rc; struct t_upgrade_file *upgrade_file; <API key> (); upgrade_file = <API key> (<API key>, 0); if (!upgrade_file) return 0; rc = <API key> (upgrade_file, &irc_upgrade_read_cb, NULL); <API key> (upgrade_file); return rc; }
#ifndef MIAMITEMMODEL_H #define MIAMITEMMODEL_H #include <QStandardItemModel> #include <<API key>> #include <model/genericdao.h> #include "separatoritem.h" #include "miamlibrary_global.hpp" class MIAMLIBRARY_LIBRARY MiamItemModel : public QStandardItemModel { Q_OBJECT protected: /** This hash is a cache, used to insert nodes in this tree at the right location. */ QHash<uint, QStandardItem*> _hash; /** Letters are items to groups separate of top levels items (items without parent). */ QHash<QString, SeparatorItem*> _letters; /** Letter L returns all Artists (e.g.) starting with L. */ QMultiHash<SeparatorItem*, QModelIndex> _topLevelItems; QHash<QString, QStandardItem*> _tracks; public: explicit MiamItemModel(QObject *parent = nullptr); virtual ~MiamItemModel(); virtual QChar currentLetter(const QModelIndex &index) const = 0; inline QStandardItem* letterItem(const QString &letter) const { return _letters.value(letter); } virtual <API key>* proxy() const = 0; protected: SeparatorItem *insertSeparator(const QStandardItem *node); /** Recursively remove node and its parent if the latter has no more children. */ void removeNode(const QModelIndex &node); public slots: virtual void insertNode(GenericDAO *) {} virtual void updateNode(GenericDAO *node); }; #endif // MIAMITEMMODEL_H
// Library for I/O of genetic data #include <stdio.h> #include <string.h> #include <math.h> #include "marker.h" #include "personbits.h" #include "util.h" // initialize static members int PersonBits::_numDuos = 0; int PersonBits::_numTrioKids = 0; dynarray<PersonBits *> PersonBits::_allIndivs; Hashtable<char *, PersonBits *> PersonBits::_idToPerson(2003, stringHash, stringcmp); PersonBits::PersonBits(char *id, char sex, int popIndex, uint32_t sampNum, short familyIdLength) : SuperPerson(id, sex, popIndex, familyIdLength) { if (!_ignore) { // Will update _trioDuoType and _tdData later as we read in relationships _trioDuoType = UNRELATED; _tdData = NULL; int numHapChunks = Marker::getNumHapChunks(); assert(numHapChunks > 0); _homozyLoci = new chunk[numHapChunks]; _knownHap = new chunk[numHapChunks]; _missingLoci = new chunk[numHapChunks]; // _expDiffBit = 0; // initialize these arrays: for(int i = 0; i < numHapChunks; i++) { _homozyLoci[i] = _knownHap[i] = _missingLoci[i] = 0; } _resolvedHaplotype[0] = _resolvedHaplotype[1] = NULL; _sampledHaplotypes = NULL; if (_idToPerson.lookup(_id)) { fprintf(stderr, "\nERROR: multiple individuals with id %s!\n", _id); exit(3); } _idToPerson.add(_id, this); } } PersonBits::~PersonBits() { if (!_ignore) { delete [] _homozyLoci; delete [] _knownHap; delete [] _missingLoci; assert(_tdData == NULL || getTrioDuoType() == TRIO_CHILD); // if (_tdData != NULL) { // delete [] _tdData->_tdKnownLoci; // if (_tdData->_childIsHet != NULL) // delete [] _tdData->_childIsHet; // delete _tdData; assert(_resolvedHaplotype[0] == NULL); assert(_sampledHaplotypes == NULL); } } // Deletes genotype data for <this>. Used for trio children after their data // were already used to infer the known phase of their parents void PersonBits::empty() { delete [] _homozyLoci; delete [] _knownHap; delete [] _missingLoci; _homozyLoci = _knownHap = _missingLoci = NULL; } // Given the parents of <this>, sets the relationships, and infers the trio or // duo phase. // See comment above <API key>() regarding <numMendelError> and // <numMendelCounted> void PersonBits::setParents(char *familyid, PersonBits *parents[2], int numParents, bool &warningPrinted, FILE *log, int *numMendelError, int *numMendelCounted) { if (numParents == 2) { setTrioDuoType(TRIO_CHILD); _numTrioKids++; } else { setTrioDuoType(DUO_CHILD); _numDuos++; } for(int p = 0; p < 2; p++) { if (parents[p] == NULL) continue; // ensure this parent isn't part of another trio/duo relationship: if (parents[p]->getTrioDuoType() != UNRELATED) { if (!warningPrinted) { printf("\n"); if (log != NULL) fprintf(log, "\n"); warningPrinted = true; } fprintf(stderr, "ERROR: parent %s is a member of another trio or duo\n", parents[p]->getId()); exit(1); } if (numParents == 1) parents[p]->setTrioDuoType(PARENT_0); else { if (p == 0) parents[p]->setTrioDuoType(PARENT_0); else parents[p]->setTrioDuoType(PARENT_1); } } if (numParents == 1 && parents[0] == NULL) { parents[0] = parents[1]; parents[1] = NULL; assert(parents[0] != NULL); } <API key>(this, parents, numMendelError, numMendelCounted); if (numMendelError != NULL || numMendelCounted != NULL) { assert(numMendelError != NULL && numMendelCounted != NULL); // Don't want errors for the purpose of counting non-Mendelian errors when // other relationships for the same person (e.g., parents of several // children, etc.) come along this->resetTrioDuoType(); for(int p = 0; p < 2; p++) { if (parents[p] != NULL) parents[p]->resetTrioDuoType(); } } } // Infers trio and duo haplotypes for <child> and its parent(s): // Note that no duos reach here at present since we warn about and ignore // them in the code that parses the .fam file // If non-NULL, <numMendelError> is set to be the count of the number of // Mendelian errors, and <numMendelCounted> is the number of relationships with // non-missing data that were examined for Mendelian errors. These values are // assumed to point to arrays with size of the number of markers; counts are // cumulative across calls to <API key>(), so this method does // not reset these values, but only increments them. void PersonBits::<API key>(PersonBits *child, PersonBits *parents[2], int *numMendelError, int *numMendelCounted) { int numMarkers = Marker::getNumMarkers(); int chromIdx = Marker::getMarker(/*marker=*/ 0)->getChromIdx(); bool isTrio = parents[1] != NULL; if (isTrio) { parents[0]->allocTrioDuoData(isTrio, Marker::getNumHapChunks(), parents[1]); parents[1]->_tdData = parents[0]->_tdData; // Note: cheap hack -- having this point to the parent, a PersonBits object, // rather than a TrioDuoData object. Doing this so that we don't need an // extra field in all individuals child->_tdData = (TrioDuoData *) parents[0]; } else { // a duo parents[0]->allocTrioDuoData(isTrio, Marker::getNumHapChunks(), child); child->_tdData = parents[0]->_tdData; assert(child->getTrioDuoType() == DUO_CHILD); } // because we haven't changed known loci at all due to trios and duos, // the known loci are the homozygous loci. chunk childHomozyLoci = child->getHomozyLoci(/*curHapChunk=*/ 0); chunk childHomozyGenos = child->getKnownHaplotype(/*curHapChunk=*/ 0); chunk childMissingLoci = child->getMissingLoci(/*curHapChunk=*/ 0); chunk parentsHomozyLoci[2], parentsHomozyGenos[2], parentsMissingLoci[2]; for(int p = 0; p < 2; p++) { if (parents[p] == NULL) { // all missing data for any NULL parent: parentsHomozyLoci[p] = 0; parentsHomozyGenos[p] = 0; parentsMissingLoci[p] = ALL_CHUNK_BITS_SET; continue; } parentsHomozyLoci[p] = parents[p]->getHomozyLoci(/*curHapChunk=*/ 0); parentsHomozyGenos[p] = parents[p]->getKnownHaplotype(/*curHapChunk=*/ 0); parentsMissingLoci[p] = parents[p]->getMissingLoci(/*curHapChunk=*/ 0); } if (isTrio) parents[0]->_tdData->_childIsHet[0] = ~(child->getHomozyLoci(/*curHapChunk=*/ 0) | child->getMissingLoci(/*curHapChunk=*/ 0)); // curHapChunk: which haplotype chunk are we on? (BITS_PER_CHUNK bit chunks) // curChunkIdx: which bit/locus within the chunk are we on? uint32_t curHapChunk = 0, curChunkIdx = 0; for(int m = 0; m < numMarkers; m++, curChunkIdx++) { if (Marker::getLastMarkerNum(chromIdx) == m - 1) { chromIdx = Marker::getMarker(m)->getChromIdx(); if (curChunkIdx < BITS_PER_CHUNK) { // clear out the final bits in _childIsHet that aren't defined because // the chromosome ends: // The following will clear bits (curChunkIdx..BITS_PER_CHUNK-1); since // curChunkIdx is one more than the last bit of the previous chrom, this // is exactly what we want. int numClearBits = BITS_PER_CHUNK - curChunkIdx; chunk clearBits = setLastNumBits(numClearBits); if (isTrio) parents[0]->_tdData->_childIsHet[curHapChunk] &= ~clearBits; } // Now on next chromosome; update chunk indices if (curChunkIdx != 0) { // markers from prev chrom on current chunk? curHapChunk++; // markers for current chrom are on next chunk number curChunkIdx = 0; } } else if (curChunkIdx == BITS_PER_CHUNK) { curHapChunk++; curChunkIdx = 0; childHomozyLoci = child->getHomozyLoci(curHapChunk); childHomozyGenos = child->getKnownHaplotype(curHapChunk); childMissingLoci = child->getMissingLoci(curHapChunk); for(int p = 0; p < 2; p++) { if (parents[p] == NULL) continue; parentsHomozyLoci[p] = parents[p]->getHomozyLoci(curHapChunk); parentsHomozyGenos[p] = parents[p]->getKnownHaplotype(curHapChunk); parentsMissingLoci[p] = parents[p]->getMissingLoci(curHapChunk); } if (isTrio) parents[0]->_tdData->_childIsHet[curHapChunk] = ~(child->getHomozyLoci(curHapChunk) | child->getMissingLoci(curHapChunk)); } int childIsHomozy = (childHomozyLoci >> curChunkIdx) & 1; int childHomozyAllele = (childHomozyGenos >> curChunkIdx) & 1; int childMissing = (childMissingLoci >> curChunkIdx) & 1; int parentsHomozy[2], parentsHomozyAllele[2], parentsMissing[2]; for(int p = 0; p < 2; p++) { parentsHomozy[p] = (parentsHomozyLoci[p] >> curChunkIdx) & 1; parentsHomozyAllele[p] = (parentsHomozyGenos[p] >> curChunkIdx) & 1; parentsMissing[p] = (parentsMissingLoci[p] >> curChunkIdx) & 1; } // check for Mendelian errors if (!childMissing && (!parentsMissing[0] || !parentsMissing[1]) && numMendelCounted != NULL) // for rate calculation: should only count observations where child and // at least one parent is non-missing numMendelCounted[m]++; // Can only find Mendelian errors if child is non-missing and at least one // parent is homozygous: if (!childMissing && (parentsHomozy[0] || parentsHomozy[1])) { if (parentsHomozy[0] && parentsHomozy[1]) { int childGeno = parentsHomozyAllele[0] + parentsHomozyAllele[1]; if (childIsHomozy) { if (childGeno == 1) { // Mendelian error! setTrioDuoMissing(child, parents, curHapChunk, curChunkIdx); if (numMendelError != NULL) numMendelError[m]++; continue; } else if (childGeno / 2 != childHomozyAllele) { // Mendelian error! setTrioDuoMissing(child, parents, curHapChunk, curChunkIdx); if (numMendelError != NULL) numMendelError[m]++; continue; } } else if (childGeno != 1) { // Mendelian error! setTrioDuoMissing(child, parents, curHapChunk, curChunkIdx); if (numMendelError != NULL) numMendelError[m]++; continue; } } else { // only one homozygous parent; can only get Mendelian error if child is // homozygous if (childIsHomozy) { int parHomozyAllele; if (parentsHomozy[0]) { parHomozyAllele = parentsHomozyAllele[0]; } else { assert(parentsHomozy[1]); parHomozyAllele = parentsHomozyAllele[1]; } if (parHomozyAllele != childHomozyAllele) { // Mendelian error! setTrioDuoMissing(child, parents, curHapChunk, curChunkIdx); if (numMendelError != NULL) numMendelError[m]++; continue; } } } } // No Mendelian error: infer phase if (parentsHomozy[0] && parentsHomozy[1]) { // both parents homozygous continue; // phase of both parents is known } // one or both parents' phase unknown if (childIsHomozy) { // child homozygous: don't need parents to infer: for(int p = 0; p < 2; p++) { if (parents[p] == NULL) continue; // assign phase parents[p]->_knownHap[curHapChunk] |= ((chunk) childHomozyAllele) << curChunkIdx; } parents[0]->_tdData->_knownLoci[curHapChunk] |= 1ul << curChunkIdx; continue; } // child is heterozygous or missing; try to infer phase: if ((!parentsHomozy[0] || parentsMissing[0]) && (!parentsHomozy[1] || parentsMissing[1])) { // both parents either heterozygous or missing => ambiguous: skip continue; } if (!isTrio) { // Duo! // At this point, we know the child is heterozygous or missing, but at // least one parent is homozygous. Since we only have data for one // parent, that parent must be homozygous, and that resolves the // transmitted haplotype for the child assert(parentsHomozy[0]); child->_knownHap[curHapChunk] |= ((chunk) parentsHomozyAllele[0]) << curChunkIdx; child->_tdData->_knownLoci[curHapChunk] |= 1ul << curChunkIdx; continue; } // Only trios from here down! if (childMissing) { // Note: could impute the child's genotype if both parents are // homozygous, but we aren't phasing the (trio) child anyway, so can just // move on. The parents don't need to be phased at homozygous sites. continue; } // child is heterozygous with one parent homozygous int homozyPar = 0; for(; homozyPar < 2; homozyPar++) { if (parentsHomozy[homozyPar]) break; } assert(homozyPar < 2); int otherParent = homozyPar ^ 1; assert(!parentsHomozy[otherParent]); // ensure other parent not homozygous // child is heterozygous, so its genotype is 1 and it received whatever // allele from the heterozygous parent that the homozygous parent didn't // transmit: int otherParTransAllele = 1 - parentsHomozyAllele[homozyPar]; parents[otherParent]->_knownHap[curHapChunk] |= ((chunk) otherParTransAllele) << curChunkIdx; parents[otherParent]->_tdData->_knownLoci[curHapChunk] |= 1ul <<curChunkIdx; } if (curChunkIdx < BITS_PER_CHUNK) { // identical to code above // clear out the final bits in _childIsHet that aren't defined because // the chromosome ends: // The following will clear bits (curChunkIdx..BITS_PER_CHUNK-1); since // curChunkIdx is one more than the last bit of the previous chrom, this // is exactly what we want. int numClearBits = BITS_PER_CHUNK - curChunkIdx; chunk clearBits = setLastNumBits(numClearBits); if (isTrio) parents[0]->_tdData->_childIsHet[curHapChunk] &= ~clearBits; } } // For sites in trios and duos that are Mendelian errors, sets the locus to // missing in all individuals void PersonBits::setTrioDuoMissing(PersonBits *child, PersonBits *parents[2], int chunkNum, int chunkIdx) { child->setMissing(chunkNum, chunkIdx); parents[0]->setMissing(chunkNum, chunkIdx); if (parents[1] != NULL) parents[1]->setMissing(chunkNum, chunkIdx); } // To be run prior to removeIgnoreIndivs() in PersonIO::readData(): // After parsing and performing trio-based phasing in // setParents(), trio children are no longer needed and // their memory can be reclaimed. Method either sets these children's // population to Ignore (so the Person object will be removed), or clears // their genotype data so they take up minimal space. The latter occurs // if we're going to later print their inferred haplotypes void PersonBits::cleanUpPostParse(bool keepTrioKids) { if (!keepTrioKids) { // no kids after this and removeIgnoreIndivs() run _numTrioKids = 0; } int length = _allIndivs.length(); for(int p = 0; p < length; p++) { PersonBits *cur = _allIndivs[p]; if (cur->_ignore) continue; if (cur->getTrioDuoType() == TRIO_CHILD) { if (keepTrioKids) // need not retain genotype data for trio child, even though we are // keeping the child's entry for later printing cur->empty(); else cur->setIgnore(); } } } // Prints parent's haplotypes for trios/duos. For each parent, the first // haplotype printed is the transmitted haplotype and the second is // untransmitted. At present, this method only gets called before population // phasing takes place, so the triple het sites are untrustworthy and are // set to missing in <API key>(). void PersonBits::<API key>(FILE *out) { int numMarkers = Marker::getNumMarkers(); int numIndivs = _allIndivs.length(); int chromIdx = Marker::getMarker(/*marker=*/ 0)->getChromIdx(); // curHapChunk: which haplotype chunk are we on? (BITS_PER_CHUNK bit chunks) // curChunkIdx: which bit/locus within the chunk are we on? for(int m = 0, curHapChunk = 0, curChunkIdx = 0; m < numMarkers; m++, curChunkIdx++) { if (Marker::getLastMarkerNum(chromIdx) == m - 1) { // Now on next chromosome; update chunk indices if (curChunkIdx != 0) { // markers from prev chrom on current chunk? curHapChunk++; // markers for current chrom are on next chunk number curChunkIdx = 0; } chromIdx = Marker::getMarker(m)->getChromIdx(); } if (curChunkIdx == BITS_PER_CHUNK) { curHapChunk++; curChunkIdx = 0; } for(int i = 0; i < numIndivs; i++) { PersonBits *thePerson = _allIndivs[i]; // only print phase for the parents of trios/duos: if (thePerson->getTrioDuoType() == UNRELATED) continue; chunk haplotype; chunk knownLoci; // minimally the person's haplotype will contain the alleles at // homozygous sites (this matters at sites where the child is // missing data -- there _trioDuoHaplotype->defined won't be set to 1): haplotype = thePerson->_knownHap[curHapChunk]; knownLoci = thePerson->_homozyLoci[curHapChunk] | (thePerson->_tdData->_knownLoci[curHapChunk] & (~thePerson->getMissingLoci(curHapChunk))); if (((~knownLoci) >> curChunkIdx) & 1) { // site is missing/ambiguous: fprintf(out, "99"); continue; } bool subtract = false; for(int h = 0; h < 2; h++) { if (subtract) { haplotype ^= (~thePerson->_homozyLoci[curHapChunk]); } int hapAllele = (haplotype >> curChunkIdx) & 1; fprintf(out, "%d", hapAllele); subtract = true; } } fprintf(out, "\n"); } } // Allocates space for sampled haplotypes and initializes them using random // values for heterozygous sites void PersonBits::initRandSampledHaps() { int numSamples = _allIndivs.length(); int numHapChunks = Marker::getNumHapChunks(); std::<API key><int> randGeno(0,1); // uniform random genotype for(int id = 0; id < numSamples; id++) { PersonBits *thePerson = _allIndivs[id]; // To ensure that the transmitted haplotypes match between the parent // and duo child (necessary to avoid a case in which the randomly // initialized haplotypes are infeasible according to the relationship // and genotypes), and the case where the transmitted haplotypes of two // trio parents do not match the trio child, we initialize parent-offspring // duos and the two trio parents together. PersonBits *otherPerson = NULL; TDTYPE otherType = UNRELATED; if (thePerson->getTrioDuoType() == PARENT_0) { otherPerson = thePerson->_tdData->_otherParentOrChild; otherType = otherPerson->getTrioDuoType(); } else if (thePerson->getTrioDuoType() != UNRELATED) // skip DUO_CHILD and PARENT_1 indivs, they are initialized with PARENT_0 continue; thePerson->_sampledHaplotypes = new chunk*[numHapChunks]; if (otherPerson != NULL) otherPerson->_sampledHaplotypes = new chunk*[numHapChunks]; for(int curHapChunk = 0; curHapChunk < numHapChunks; curHapChunk++) { thePerson->_sampledHaplotypes[curHapChunk] = new chunk[<API key>*2]; if (otherPerson != NULL) otherPerson->_sampledHaplotypes[curHapChunk] = new chunk[<API key>*2]; for(int s = 0; s < <API key>; s++) { thePerson->_sampledHaplotypes[curHapChunk][s*2+0] = thePerson->_knownHap[curHapChunk]; chunk knownSites = thePerson->getKnownLoci(curHapChunk); chunk missingSites = thePerson->getMissingLoci(curHapChunk); for(uint32_t curChunkIdx = 0; curChunkIdx < BITS_PER_CHUNK; curChunkIdx++) { int isKnown = (knownSites >> curChunkIdx) & 1; int isMiss = (missingSites >> curChunkIdx) & 1; int randGenoVal = randGeno(RandGen::v); // only randomize if the haplotype value is not known and the // genotype is not missing: // Note: we won't bother randomizing missing data sites since the // first window size has only 4 markers, so we will seed all possible // values for missing data sites (since 4 < <API key>) chunk genoVal = (1 - isKnown) & (1 - isMiss) & randGenoVal; thePerson->_sampledHaplotypes[curHapChunk][s*2+0] |= (genoVal << curChunkIdx); } // invert for haplotype 1: chunk homozySites = thePerson->getHomozyLoci(curHapChunk); chunk hetSites = ~(homozySites | missingSites); thePerson->_sampledHaplotypes[curHapChunk][s*2+1] = thePerson->_sampledHaplotypes[curHapChunk][s*2+0] ^ hetSites; if (otherPerson != NULL) { // Set haplotype 0 (different for duo children and other trio parents: if (otherType == DUO_CHILD) { // haplotype 0 identical to parent: otherPerson->_sampledHaplotypes[curHapChunk][s*2+0] = thePerson->_sampledHaplotypes[curHapChunk][s*2+0]; } else { assert(otherType == PARENT_1); chunk thePersonTrans = thePerson->_sampledHaplotypes[curHapChunk][s*2+0]; chunk trioChildHet = thePerson->getTrioChildHet(curHapChunk); otherPerson->_sampledHaplotypes[curHapChunk][s*2+0] = otherPerson->_knownHap[curHapChunk] | ((~thePersonTrans) & trioChildHet); chunk knownSites = otherPerson->getKnownLoci(curHapChunk) | trioChildHet; chunk missingSites = thePerson->getMissingLoci(curHapChunk); for(uint32_t curChunkIdx = 0; curChunkIdx < BITS_PER_CHUNK; curChunkIdx++) { int isKnown = (knownSites >> curChunkIdx) & 1; int isMiss = (missingSites >> curChunkIdx) & 1; // TODO: may be slow to do this; probably should branch on missing int randGenoVal = randGeno(RandGen::v); chunk genoVal = (1 - isKnown) & (1 - isMiss) & randGenoVal; otherPerson->_sampledHaplotypes[curHapChunk][s*2+0] |= (genoVal << curChunkIdx); } } // haplotype 1 inverted at heterozygous sites: chunk otherHomozySites = otherPerson->getHomozyLoci(curHapChunk); chunk otherMissingSites = otherPerson->getMissingLoci(curHapChunk); chunk otherHetSites = ~(otherHomozySites | otherMissingSites); otherPerson->_sampledHaplotypes[curHapChunk][s*2+1] = otherPerson->_sampledHaplotypes[curHapChunk][s*2+0] ^ otherHetSites; } } } } } void PersonBits::setGenotype(int hapChunkNum, int chunkIdx, int chromIdx, int chrMarkerIdx, int geno[2]) { if (_ignore) return; // no need/nowhere to store genotypes if (geno[0] < 0) { // missing data _missingLoci[hapChunkNum] += 1ul << chunkIdx; return; } if ((geno[0] == 0 && geno[1] == 1) || (geno[0] == 1 && geno[1] == 0)) return; // heterozygote => do nothing assert(geno[0] <= 1 && geno[1] <= 1); // have homozygote: need to set the corresponding bit: chunk homozyVal = 1ul << chunkIdx; // want lowest order bit of genotype (0 or 1) to be set at chunkIdx in the // chunk: chunk genoVal = ((unsigned long) geno[0]) << chunkIdx; _homozyLoci[hapChunkNum] += homozyVal; _knownHap[hapChunkNum] += genoVal; } // Sets <this>'s genotype to missing at <hapChunkNum>, <chunkIdx> void PersonBits::setMissing(int hapChunkNum, int chunkIdx) { chunk bitToSetMiss = 1ul << chunkIdx; // make sure the locus is not set to be homozygous and that the homozygous // genotype value is 0: _homozyLoci[hapChunkNum] &= ~bitToSetMiss; _knownHap[hapChunkNum] &= ~bitToSetMiss; // set missing: _missingLoci[hapChunkNum] |= bitToSetMiss; } // Sets all heterozygous sites to missing. Used for males on the X chromosome // where heterozygous sites are errors. // If non-NULL, sets *numHets and *numCalls to be the number of heterozygous // sites and the number of called (non-missing) sites for <this>. Those // counts are *before* setting heterozygous sites to missing. // Code assumes that the entire dataset is the X chromosome void PersonBits::setXHetToMissing(FILE *log, int *numHets, int *numCalls) { // assert(Marker::getMarker(0)->getChrom() == CHR_X); // assert(Marker::getNumChromMarkers(CHR_X) == Marker::getNumMarkers()); assert(getSex() == 'M'); if (numHets != NULL) { assert(numCalls != NULL); *numHets = 0; *numCalls = 0; } int numMissing = 0; for(int curChunk = 0; curChunk < Marker::getNumHapChunks(); curChunk++) { chunk homozyBits = getHomozyLoci(curChunk); chunk missingBits = getMissingLoci(curChunk); numMissing += countBitsSet(missingBits); chunk hets = (~homozyBits) & (~missingBits); if (curChunk+1 == Marker::getNumHapChunks()) { // Don't count as heterozygous those sites that appear beyond int numValid = Marker::getNumMarkers() % BITS_PER_CHUNK; if (numValid > 0) hets &= setBitsToIdx( numValid - 1); } if (numHets != NULL) *numHets += countBitsSet(hets); assert((_missingLoci[curChunk] & hets) == 0ul); _missingLoci[curChunk] |= hets; } if (numCalls != NULL) { *numCalls = Marker::getNumMarkers() - numMissing; } } // prints the portion of the haplotype for <this> that is defined by its // homozygous loci, with ? for heterozygous loci void PersonBits::printChunkHap(FILE *out, int chunkNum) { printHap(out, _knownHap[chunkNum], _homozyLoci[chunkNum]); } // Sets all sampled haplotypes to 0 so that we can call orSampledHaplotypes() // to set newly sampled haplotypes void PersonBits::<API key>() { int numHapChunks = Marker::getNumHapChunks(); for(int c = 0; c < numHapChunks; c++) { for(int s = 0; s < <API key>; s++) { for(int h = 0; h < 2; h++) { _sampledHaplotypes[c][s*2+h] = 0; } } } } void PersonBits::orSampledHaplotype(int sampNum, int homolog, int chunkNum, chunk haplotype) { _sampledHaplotypes[chunkNum][sampNum*2+homolog] |= haplotype; } // Allocates space in which to store the final (Viterbi decoded) haplotypes for // this individual void PersonBits::initFinalHaplotype() { // don't need the genotype data anymore, so steal their memory: _resolvedHaplotype[0] = _knownHap; _resolvedHaplotype[1] = _homozyLoci; int numHapChunks = Marker::getNumHapChunks(); for(int h = 0; h < 2; h++) { for(int c = 0; c < numHapChunks; c++) { _resolvedHaplotype[h][c] = 0; } } _knownHap = _homozyLoci = NULL; } void PersonBits::orFinalHaplotype(int homolog, int chunkNum, chunk haplotype) { _resolvedHaplotype[homolog][chunkNum] |= haplotype; }
#!/usr/bin/python # Generates commands for the muscle alignment program import sys, os, core, argparse # Options parser = argparse.ArgumentParser(description="MUSCLE command generator"); parser.add_argument("-i", dest="input", help="Directory of input FASTA files.", default=False); parser.add_argument("-o", dest="output", help="Desired output directory for aligned files. Job name (-n) will be appended to output directory name.", default=False); parser.add_argument("-n", dest="name", help="A short name for all files associated with this job.", default=False); parser.add_argument("-p", dest="path", help="The path to MUSCLE. Default: muscle", default="muscle"); parser.add_argument("--overwrite", dest="overwrite", help="If the output directory already exists and you wish to overwrite it, set this option.", action="store_true", default=False); parser.add_argument("--outname", dest="outname", help="Use the end of the output directory path as the job name.", action="store_true", default=False); # IO options parser.add_argument("-part", dest="part", help="SLURM partition option.", default=False); parser.add_argument("-tasks", dest="tasks", help="SLURM --ntasks option.", type=int, default=1); parser.add_argument("-cpus", dest="cpus", help="SLURM --cpus-per-task option.", type=int, default=1); parser.add_argument("-mem", dest="mem", help="SLURM --mem option.", type=int, default=0); # SLURM options args = parser.parse_args(); if not args.input or not os.path.isdir(args.input): sys.exit( " * Error 1: An input directory must be defined with -i."); args.input = os.path.abspath(args.input); if not args.name: name = core.getRandStr(); else: name = args.name; if not args.output: sys.exit( " * Error 2: An output directory must be defined with -o."); args.output = os.path.abspath(args.output); # if args.outname: # name = os.path.basename(args.output); # else: # args.output = args.output + "-" + name + "/"; if os.path.isdir(args.output) and not args.overwrite: sys.exit( " * Error 3: Output directory (-o) already exists! Explicity specify --overwrite to overwrite it."); # IO option error checking if not args.part: sys.exit( " * Error 4: -part must be defined as a valid node partition on your clutser."); if args.tasks < 1: sys.exit( " * Error 5: -tasks must be a positive integer."); if args.tasks < 1: sys.exit( " * Error 6: -cpus must be a positive integer."); if args.tasks < 1: sys.exit( " * Error 7: -mem must be a positive integer."); # SLURM option error checking pad = 26 cwd = os.getcwd(); # Job vars output_file = os.path.join(cwd, "jobs", "muscle_cmds_" + name + ".sh"); submit_file = os.path.join(cwd, "submit", "muscle_submit_" + name + ".sh"); logdir = os.path.join(args.output, "logs"); # Job files # Reporting run-time info for records. with open(output_file, "w") as outfile: core.runTime("#!/bin/bash\n# MUSCLE command generator", outfile); core.PWS("# IO OPTIONS", outfile); core.PWS(core.spacedOut("# Input directory:", pad) + args.input, outfile); if args.outname: core.PWS(core.spacedOut("# --outname:", pad) + "Using end of output directory path as job name.", outfile); if not args.name: core.PWS("# -n not specified --> Generating random string for job name", outfile); core.PWS(core.spacedOut("# Job name:", pad) + name, outfile); core.PWS(core.spacedOut("# Output directory:", pad) + args.output, outfile); if args.overwrite: core.PWS(core.spacedOut("# --overwrite set:", pad) + "Overwriting previous files in output directory.", outfile); if not os.path.isdir(args.output): core.PWS("# Creating output directory.", outfile); os.system("mkdir " + args.output); core.PWS(core.spacedOut("# Logfile directory:", pad) + logdir, outfile); if not os.path.isdir(logdir): core.PWS("# Creating logfile directory.", outfile); os.system("mkdir " + logdir); core.PWS(core.spacedOut("# Job file:", pad) + output_file, outfile); core.PWS(" core.PWS("# SLURM OPTIONS", outfile); core.PWS(core.spacedOut("# Submit file:", pad) + submit_file, outfile); core.PWS(core.spacedOut("# SLURM partition:", pad) + args.part, outfile); core.PWS(core.spacedOut("# SLURM ntasks:", pad) + str(args.tasks), outfile); core.PWS(core.spacedOut("# SLURM cpus-per-task:", pad) + str(args.cpus), outfile); core.PWS(core.spacedOut("# SLURM mem:", pad) + str(args.mem), outfile); core.PWS(" core.PWS("# BEGIN CMDS", outfile); # Generating the commands in the job file. for f in os.listdir(args.input): base_input = os.path.splitext(f)[0]; cur_infile = os.path.join(args.input, f); cur_outfile = os.path.join(args.output, base_input + "-muscle.fa"); cur_logfile = os.path.join(logdir, base_input + "-muscle.log"); muscle_cmd = args.path + " -in '" + cur_infile + "' -out '" + cur_outfile +"' > " + cur_logfile + " 2>&1"; outfile.write(muscle_cmd + "\n"); # Generating the submit script. with open(submit_file, "w") as sfile: sfile.write(submit.format(name=name, partition=args.part, tasks=args.tasks, cpus=args.cpus, mem=args.mem, output_file=output_file));
from __future__ import absolute_import import ast import re import operator as op import pyparsing from ..exceptions import <API key> from . import <API key> _OP_MAP = { ast.Add: op.add, ast.Sub: op.sub, ast.Mult: op.mul, ast.Div: op.truediv, ast.Invert: op.neg, } class Calc(ast.NodeVisitor): def visit_BinOp(self, node): return _OP_MAP[type(node.op)](self.visit(node.left), self.visit(node.right)) def visit_Num(self, node): return node.n def visit_Expr(self, node): return self.visit(node.value) @classmethod def doMath(cls, expression): tree = ast.parse(expression) calc = cls() return calc.visit(tree.body[0]) class Parentheses(object): def fix(self, s): res = [] self.visited = set([s]) self.dfs(s, self.invalid(s), res) return res def dfs(self, s, n, res): if n == 0: res.append(s) return for i in range(len(s)): if s[i] in ['(', ')']: s_new = s[:i] + s[i + 1:] if s_new not in self.visited and self.invalid(s_new) < n: self.visited.add(s_new) self.dfs(s_new, self.invalid(s_new), res) def invalid(self, s): plus = minus = 0 memo = {"(": 1, ")": -1} for c in s: plus += memo.get(c, 0) minus += 1 if plus < 0 else 0 plus = max(0, plus) return plus + minus class <API key>(<API key>): def __init__(self): super(<API key>, self).__init__('native') def eval(self, body, domain): operators = { '+': op.add, '-': op.sub, '*': op.mul, '/': op.truediv } def flatten(lists): return sum(map(flatten, lists), []) if isinstance(lists, list) else [lists] def jsfuckToNumber(jsFuck): # "Clean Up" JSFuck jsFuck = jsFuck.replace('!+[]', '1').replace('!![]', '1').replace('[]', '0') jsFuck = jsFuck.lstrip('+').replace('(+', '(').replace(' ', '') jsFuck = Parentheses().fix(jsFuck)[0] # Hackery Parser for Math stack = [] bstack = [] for i in flatten(pyparsing.nestedExpr().parseString(jsFuck).asList()): if i == '+': stack.append(bstack) bstack = [] continue bstack.append(i) stack.append(bstack) return int(''.join([str(Calc.doMath(''.join(i))) for i in stack])) def divisorMath(payload, needle, domain): jsfuckMath = payload.split('/') if needle in jsfuckMath[1]: expression = re.findall(r"^(.*?)(.)\(function", jsfuckMath[1])[0] expression_value = operators[expression[1]]( float(jsfuckToNumber(expression[0])), float(ord(domain[jsfuckToNumber(jsfuckMath[1][ jsfuckMath[1].find('"("+p+")")}') + len('"("+p+")")}'):-2 ])])) ) else: expression_value = jsfuckToNumber(jsfuckMath[1]) expression_value = jsfuckToNumber(jsfuckMath[0]) / float(expression_value) return expression_value def challengeSolve(body, domain): jschl_answer = 0 try: jsfuckChallenge = re.search( r"setTimeout\(function\(\){\s+var.*?f,\s*(?P<variable>\w+).*?:(?P<init>\S+)};" r".*?\('challenge-form'\);.*?;(?P<challenge>.*?a\.value)\s*=\s*\S+\.toFixed\(10\);", body, re.DOTALL | re.MULTILINE ).groupdict() except AttributeError: raise <API key>('There was an issue extracting "jsfuckChallenge" from the Cloudflare challenge.') kJSFUCK = re.search(r'(;|)\s*k.=(?P<kJSFUCK>\S+);', jsfuckChallenge['challenge'], re.S | re.M) if kJSFUCK: try: kJSFUCK = jsfuckToNumber(kJSFUCK.group('kJSFUCK')) except IndexError: raise <API key>('There was an issue extracting "kJSFUCK" from the Cloudflare challenge.') try: kID = re.search(r"\s*k\s*=\s*'(?P<kID>\S+)';", body).group('kID') except IndexError: raise <API key>('There was an issue extracting "kID" from the Cloudflare challenge.') try: r = re.compile(r'<div id="{}(?P<id>\d+)">\s*(?P<jsfuck>[^<>]*)</div>'.format(kID)) kValues = {} for m in r.finditer(body): kValues[int(m.group('id'))] = m.group('jsfuck') jsfuckChallenge['k'] = kValues[kJSFUCK] except (AttributeError, IndexError): raise <API key>('There was an issue extracting "kValues" from the Cloudflare challenge.') jsfuckChallenge['challenge'] = re.finditer( r'{}.*?([+\-*/])=(.*?);(?=a\.value|{})'.format( jsfuckChallenge['variable'], jsfuckChallenge['variable'] ), jsfuckChallenge['challenge'] ) if '/' in jsfuckChallenge['init']: val = jsfuckChallenge['init'].split('/') jschl_answer = jsfuckToNumber(val[0]) / float(jsfuckToNumber(val[1])) else: jschl_answer = jsfuckToNumber(jsfuckChallenge['init']) for expressionMatch in jsfuckChallenge['challenge']: oper, expression = expressionMatch.groups() if '/' in expression: expression_value = divisorMath(expression, 'function(p)', domain) else: if 'Element' in expression: expression_value = divisorMath(jsfuckChallenge['k'], '"("+p+")")}', domain) else: expression_value = jsfuckToNumber(expression) jschl_answer = operators[oper](jschl_answer, expression_value) # if not jsfuckChallenge['k'] and '+ t.length' in body: # jschl_answer += len(domain) return '{0:.10f}'.format(jschl_answer) return challengeSolve(body, domain) <API key>()
#include "../common.h" class Solution { public: bool isMatch(string s, string p) { const static char ESCAPE_CHRACTER = '\\'; const static char ANY_COUNT = '*'; const static char ANY_VALUE = '.'; const static char INVALID_SUBJECT = '\0'; // check if pattern is emtpy if (p.empty()) return s.empty(); // invalid character '\0' stands for empty subject char subject = s.empty()? INVALID_SUBJECT: s[0]; int pLen = p.length(); bool bHasEscape = false; bool bMatched = false; int regexHeadLength; bool bAnyValueEnabled = true; char match = '\0'; for (regexHeadLength = 1; regexHeadLength <= pLen; regexHeadLength++) { bAnyValueEnabled = true; match = p[regexHeadLength - 1]; if (match == ESCAPE_CHRACTER) { bHasEscape = !bHasEscape; if (bHasEscape) continue; } if (bHasEscape) { // last character is escape and current character is not, current character can only be dot or star switch (match) { case ANY_VALUE: bAnyValueEnabled = false; break; case ANY_COUNT: // do nothing break; default: return false; // syntax error } } else if (ANY_COUNT == match) // syntax error return false; // matched, break the loop if (subject != INVALID_SUBJECT && (match == subject || (bAnyValueEnabled && ANY_VALUE == match))) { bMatched = true; break; } // unmatch, detect whether next character is star if (regexHeadLength >= pLen || ANY_COUNT != p[regexHeadLength++]) return false; } if (!bMatched) return s.empty(); // matched, detect whether next character is star if (regexHeadLength < pLen && ANY_COUNT == p[regexHeadLength]) { string subPattern = p.substr(regexHeadLength + 1, string::npos); if (isMatch(s, subPattern)) return true; int sLen = s.length(); // greed or not int i; for (i = 1; i <= sLen; i++) { bool bGreedMatch = (match == s[i - 1] || (bAnyValueEnabled && ANY_VALUE == match)); if (!bGreedMatch) break; if (isMatch(s.substr(i, string::npos), subPattern)) return true; } if (i == sLen + 1 && isMatch("", subPattern)) return true; } string subSubject = s.empty()? s: s.substr(1, string::npos); return isMatch(subSubject, p.substr(regexHeadLength, string::npos)); } };
package net.usikkert.kouinject.testbeans.scanned.factory; import javax.inject.Named; import net.usikkert.kouinject.annotation.Component; import net.usikkert.kouinject.annotation.Produces; import net.usikkert.kouinject.testbeans.scanned.qualifier.OrangeBean; /** * Creates a new ColorBean, for testing qualifiers and injection with interface of bean from factory. * * @author Christian Ihle */ @Component public class OrangeFactoryBean { @Produces @Named("orange") public OrangeBean createOrangeBean() { final OrangeBean orangeBean = new OrangeBean(); orangeBean.setCreatedByFactory(true); return orangeBean; } }
@import url("basic.css"); body { font-family: Arial, sans-serif; font-size: 100%; background-color: #111; color: #555; margin: 0; padding: 0; } div.documentwrapper { float: left; width: 100%; } div.bodywrapper { margin: 0 0 0 230px; } hr { border: 1px solid #B1B4B6; } div.document { background-color: #eee; } div.body { background-color: #ffffff; color: #3E4349; padding: 0 30px 30px 30px; font-size: 0.9em; } div.footer { color: #555; width: 100%; padding: 13px 0; text-align: center; font-size: 75%; } div.footer a { color: #444; text-decoration: underline; } div.related { background-color: #6BA81E; line-height: 32px; color: #fff; text-shadow: 0px 1px 0 #444; font-size: 0.9em; } div.related a { color: #E2F3CC; } div.sphinxsidebar { font-size: 0.75em; line-height: 1.5em; } div.<API key>{ padding: 20px 0; } div.sphinxsidebar h3, div.sphinxsidebar h4 { font-family: Arial, sans-serif; color: #222; font-size: 1.2em; font-weight: normal; margin: 0; padding: 5px 10px; background-color: #ddd; text-shadow: 1px 1px 0 white } div.sphinxsidebar h4{ font-size: 1.1em; } div.sphinxsidebar h3 a { color: #444; } div.sphinxsidebar p { color: #888; padding: 5px 20px; } div.sphinxsidebar p.topless { } div.sphinxsidebar ul { margin: 10px 20px; padding: 0; color: #000; } div.sphinxsidebar a { color: #444; } div.sphinxsidebar input { border: 1px solid #ccc; font-family: sans-serif; font-size: 1em; } div.sphinxsidebar input[type=text]{ margin-left: 20px; } a { color: #005B81; text-decoration: none; } a:hover { color: #E32E00; text-decoration: underline; } div.body h1, div.body h2, div.body h3, div.body h4, div.body h5, div.body h6 { font-family: Arial, sans-serif; background-color: #BED4EB; font-weight: normal; color: #212224; margin: 30px 0px 10px 0px; padding: 5px 0 5px 10px; text-shadow: 0px 1px 0 white } div.body h1 { border-top: 20px solid white; margin-top: 0; font-size: 200%; } div.body h2 { font-size: 150%; background-color: #C8D5E3; } div.body h3 { font-size: 120%; background-color: #D8DEE3; } div.body h4 { font-size: 110%; background-color: #D8DEE3; } div.body h5 { font-size: 100%; background-color: #D8DEE3; } div.body h6 { font-size: 100%; background-color: #D8DEE3; } a.headerlink { color: #c60f0f; font-size: 0.8em; padding: 0 4px 0 4px; text-decoration: none; } a.headerlink:hover { background-color: #c60f0f; color: white; } div.body p, div.body dd, div.body li { line-height: 1.5em; } div.admonition p.admonition-title + p { display: inline; } div.highlight{ background-color: white; } div.note { background-color: #eee; border: 1px solid #ccc; } div.seealso { background-color: #ffc; border: 1px solid #ff6; } div.topic { background-color: #eee; } div.warning { background-color: #ffe4e4; border: 1px solid #f66; } p.admonition-title { display: inline; } p.admonition-title:after { content: ":"; } pre { padding: 10px; background-color: White; color: #222; line-height: 1.2em; border: 1px solid #C6C9CB; font-size: 1.1em; margin: 1.5em 0 1.5em 0; -webkit-box-shadow: 1px 1px 1px #d8d8d8; -moz-box-shadow: 1px 1px 1px #d8d8d8; } tt { background-color: #ecf0f3; color: #222; /* padding: 1px 2px; */ font-size: 1.1em; font-family: monospace; } .viewcode-back { font-family: Arial, sans-serif; } div.viewcode-block:target { background-color: #f4debf; border-top: 1px solid #ac9; border-bottom: 1px solid #ac9; } /* Experimental additions by Abhijit Mahabal */ div.links-block ul li { display: inline; list-style-type: none; line-height: 2.5em; padding-right: 20px; float: left; width: 47%; } div.links-block a { font-size: 2em; }
#include "config.h" #include "conky.h" #include "common.h" #include "logging.h" #include "text_object.h" #include "timed-thread.h" #include <errno.h> #include <stdio.h> #include <string.h> #include <unistd.h> #include <limits.h> #include <mutex> #include <netinet/in.h> #include <netdb.h> #include <sys/socket.h> #include <sys/stat.h> #include <sys/time.h> #include <sys/param.h> #include <dirent.h> #include <errno.h> #include <termios.h> /* MAX() is defined by a header included from conky.h * maybe once this is not true anymore, so have an alternative * waiting to drop in. * * #define MAX(a, b) ((a > b) ? a : b) */ #define POP3_TYPE 1 #define IMAP_TYPE 2 #define MAXFOLDERSIZE 128 struct mail_s { // for imap and pop3 unsigned long unseen; unsigned long messages; unsigned long used; unsigned long quota; unsigned long port; unsigned int retries; float interval; double last_update; char host[128]; char user[128]; char pass[128]; char command[1024]; timed_thread_ptr p_timed_thread; char secure; char folder[MAXFOLDERSIZE]; mail_s() : unseen(0), messages(0), used(0), quota(0), port(0), retries(0), interval(0), last_update(0), secure(0) { host[0] = 0; user[0] = 0; pass[0] = 0; command[0] = 0; memset(folder, 0, MAXFOLDERSIZE); /* to satisfy valgrind */ } }; struct local_mail_s { char *mbox; int mail_count; int new_mail_count; int seen_mail_count; int unseen_mail_count; int flagged_mail_count; int <API key>; int <API key>; int <API key>; int replied_mail_count; int <API key>; int draft_mail_count; int trashed_mail_count; float interval; time_t last_mtime; double last_update; }; char *current_mail_spool; static struct mail_s *global_mail; static int global_mail_use = 0; static void update_mail_count(struct local_mail_s *mail) { struct stat st; if (mail == NULL) { return; } /* TODO: use that fine file modification notify on Linux 2.4 */ /* don't check mail so often (9.5s is minimum interval) */ if (current_update_time - mail->last_update < 9.5) { return; } else { mail->last_update = current_update_time; } if (stat(mail->mbox, &st)) { static int rep = 0; if (!rep) { NORM_ERR("can't stat %s: %s", mail->mbox, strerror(errno)); rep = 1; } return; } #if HAVE_DIRENT_H /* maildir format */ if (S_ISDIR(st.st_mode)) { DIR *dir; char *dirname; struct dirent *dirent; char *mailflags; mail->mail_count = mail->new_mail_count = 0; mail->seen_mail_count = mail->unseen_mail_count = 0; mail->flagged_mail_count = mail-><API key> = 0; mail-><API key> = mail-><API key> = 0; mail->replied_mail_count = mail-><API key> = 0; mail->draft_mail_count = mail->trashed_mail_count = 0; dirname = (char *) malloc(sizeof(char) * (strlen(mail->mbox) + 5)); if (!dirname) { NORM_ERR("malloc"); return; } strcpy(dirname, mail->mbox); strcat(dirname, "/"); /* checking the cur subdirectory */ strcat(dirname, "cur"); dir = opendir(dirname); if (!dir) { NORM_ERR("cannot open directory"); free(dirname); return; } dirent = readdir(dir); while (dirent) { /* . and .. are skipped */ if (dirent->d_name[0] != '.') { mail->mail_count++; mailflags = (char *) malloc(sizeof(char) * strlen(strrchr(dirent->d_name, ','))); if (!mailflags) { NORM_ERR("malloc"); free(dirname); return; } strcpy(mailflags, strrchr(dirent->d_name, ',')); if (!strchr(mailflags, 'T')) { /* The message is not in the trash */ if (strchr(mailflags, 'S')) { /*The message has been seen */ mail->seen_mail_count++; } else { mail->unseen_mail_count++; } if (strchr(mailflags, 'F')) { /*The message was flagged */ mail->flagged_mail_count++; } else { mail-><API key>++; } if (strchr(mailflags, 'P')) { /*The message was forwarded */ mail-><API key>++; } else { mail-><API key>++; } if (strchr(mailflags, 'R')) { /*The message was replied */ mail->replied_mail_count++; } else { mail-><API key>++; } if (strchr(mailflags, 'D')) { /*The message is a draft */ mail->draft_mail_count++; } } else { mail->trashed_mail_count++; } free(mailflags); } dirent = readdir(dir); } closedir(dir); dirname[strlen(dirname) - 3] = '\0'; strcat(dirname, "new"); dir = opendir(dirname); if (!dir) { NORM_ERR("cannot open directory"); free(dirname); return; } dirent = readdir(dir); while (dirent) { /* . and .. are skipped */ if (dirent->d_name[0] != '.') { mail->new_mail_count++; mail->mail_count++; mail->unseen_mail_count++; /* new messages cannot have been seen */ } dirent = readdir(dir); } closedir(dir); free(dirname); return; } #endif /* mbox format */ if (st.st_mtime != mail->last_mtime) { /* yippee, modification time has changed, let's read mail count! */ static int rep; FILE *fp; int reading_status = 0; /* could lock here but I don't think it's really worth it because * this isn't going to write mail spool */ mail->new_mail_count = mail->mail_count = 0; /* these flags are not supported for mbox */ mail->seen_mail_count = mail->unseen_mail_count = -1; mail->flagged_mail_count = mail-><API key> = -1; mail-><API key> = mail-><API key> = -1; mail->replied_mail_count = mail-><API key> = -1; mail->draft_mail_count = mail->trashed_mail_count = -1; fp = open_file(mail->mbox, &rep); if (!fp) { return; } /* NOTE: adds mail as new if there isn't Status-field at all */ while (!feof(fp)) { char buf[128]; int was_new = 0; if (fgets(buf, 128, fp) == NULL) { break; } if (strncmp(buf, "From ", 5) == 0) { /* ignore MAILER-DAEMON */ if (strncmp(buf + 5, "MAILER-DAEMON ", 14) != 0) { mail->mail_count++; was_new = 0; if (reading_status == 1) { mail->new_mail_count++; } else { reading_status = 1; } } } else { if (reading_status == 1 && strncmp(buf, "X-Mozilla-Status:", 17) == 0) { int xms = strtol(buf + 17, NULL, 16); /* check that mail isn't marked for deletion */ if (xms & 0x0008) { mail->trashed_mail_count++; reading_status = 0; /* Don't check whether the trashed email is unread */ continue; } /* check that mail isn't already read */ if (!(xms & 0x0001)) { mail->new_mail_count++; was_new = 1; } /* check for an additional X-Status header */ reading_status = 2; continue; } if (reading_status == 1 && strncmp(buf, "Status:", 7) == 0) { /* check that mail isn't already read */ if (strchr(buf + 7, 'R') == NULL) { mail->new_mail_count++; was_new = 1; } reading_status = 2; continue; } if (reading_status >= 1 && strncmp(buf, "X-Status:", 9) == 0) { /* check that mail isn't marked for deletion */ if (strchr(buf + 9, 'D') != NULL) { mail->trashed_mail_count++; /* If the mail was previously detected as new, subtract it from the new mail count */ if (was_new) mail->new_mail_count } reading_status = 0; continue; } } /* skip until \n */ while (strchr(buf, '\n') == NULL && !feof(fp)) { if (!fgets(buf, 128, fp)) break; } } fclose(fp); if (reading_status) { mail->new_mail_count++; } mail->last_mtime = st.st_mtime; } } void <API key>(struct text_object *obj, const char *arg) { float n1; char mbox[256], dst[256]; struct local_mail_s *locmail; if (!arg) { n1 = 9.5; /* Kapil: Changed from MAIL_FILE to current_mail_spool since the latter is a copy of the former if undefined but the latter should take precedence if defined */ strncpy(mbox, current_mail_spool, sizeof(mbox)); } else { if (sscanf(arg, "%s %f", mbox, &n1) != 2) { n1 = 9.5; strncpy(mbox, arg, sizeof(mbox)); } } variable_substitute(mbox, dst, sizeof(dst)); locmail = (struct local_mail_s*)malloc(sizeof(struct local_mail_s)); memset(locmail, 0, sizeof(struct local_mail_s)); locmail->mbox = strndup(dst, text_buffer_size); locmail->interval = n1; obj->data.opaque = locmail; } #define <API key>(x) \ void print_##x##mails(struct text_object *obj, char *p, int p_max_size) \ { \ struct local_mail_s *locmail = (struct local_mail_s *)obj->data.opaque; \ if (!locmail) \ return; \ update_mail_count(locmail); \ snprintf(p, p_max_size, "%d", locmail->x##mail_count); \ } <API key>() <API key>(new_) <API key>(seen_) <API key>(unseen_) <API key>(flagged_) <API key>(unflagged_) <API key>(forwarded_) <API key>(unforwarded_) <API key>(replied_) <API key>(unreplied_) <API key>(draft_) <API key>(trashed_) void free_local_mails(struct text_object *obj) { struct local_mail_s *locmail = (struct local_mail_s *)obj->data.opaque; if (!locmail) return; free_and_zero(locmail->mbox); free_and_zero(obj->data.opaque); } #define MAXDATASIZE 1000 struct mail_s *parse_mail_args(char type, const char *arg) { struct mail_s *mail; char *tmp; mail = new mail_s; if (sscanf(arg, "%128s %128s %128s", mail->host, mail->user, mail->pass) != 3) { if (type == POP3_TYPE) { NORM_ERR("Scanning POP3 args failed"); } else if (type == IMAP_TYPE) { NORM_ERR("Scanning IMAP args failed"); } delete mail; return 0; } // see if password needs prompting if (mail->pass[0] == '*' && mail->pass[1] == '\0') { int fp = fileno(stdin); struct termios term; tcgetattr(fp, &term); term.c_lflag &= ~ECHO; tcsetattr(fp, TCSANOW, &term); printf("Enter mailbox password (%s@%s): ", mail->user, mail->host); if (scanf("%128s", mail->pass)) mail->pass[0] = 0; printf("\n"); term.c_lflag |= ECHO; tcsetattr(fp, TCSANOW, &term); } // now we check for optional args tmp = (char*)strstr(arg, "-r "); if (tmp) { tmp += 3; sscanf(tmp, "%u", &mail->retries); } else { mail->retries = 5; // 5 retries after failure } tmp = (char*)strstr(arg, "-i "); if (tmp) { tmp += 3; sscanf(tmp, "%f", &mail->interval); } else { mail->interval = 300; // 5 minutes } tmp = (char*)strstr(arg, "-p "); if (tmp) { tmp += 3; sscanf(tmp, "%lu", &mail->port); } else { if (type == POP3_TYPE) { mail->port = 110; // default pop3 port } else if (type == IMAP_TYPE) { mail->port = 143; // default imap port } } if (type == IMAP_TYPE) { tmp = (char*)strstr(arg, "-f "); if (tmp) { int len = MAXFOLDERSIZE-1; tmp += 3; if (tmp[0] == '\'') { len = (char*)strstr(tmp + 1, "'") - tmp - 1; if (len > MAXFOLDERSIZE-1) { len = MAXFOLDERSIZE-1; } tmp++; } strncpy(mail->folder, tmp, len); } else { strncpy(mail->folder, "INBOX", MAXFOLDERSIZE-1); // default imap inbox } } tmp = (char*)strstr(arg, "-e "); if (tmp) { int len = 1024; tmp += 3; if (tmp[0] == '\'') { len = (char*)strstr(tmp + 1, "'") - tmp - 1; if (len > 1024) { len = 1024; } } strncpy(mail->command, tmp + 1, len); } else { mail->command[0] = '\0'; } return mail; } void <API key>(struct text_object *obj, const char *arg) { static int rep = 0; if (!arg) { if (!global_mail && !rep) { // something is wrong, warn once then stop NORM_ERR("There's a problem with your mail settings. " "Check that the global mail settings are properly defined" " (line %li).", obj->line); rep = 1; return; } obj->data.opaque = global_mail; global_mail_use++; return; } // proccss obj->data.opaque = parse_mail_args(IMAP_TYPE, arg); } void <API key>(struct text_object *obj, const char *arg) { static int rep = 0; if (!arg) { if (!global_mail && !rep) { // something is wrong, warn once then stop NORM_ERR("There's a problem with your mail settings. " "Check that the global mail settings are properly defined" " (line %li).", obj->line); rep = 1; return; } obj->data.opaque = global_mail; global_mail_use++; return; } // proccss obj->data.opaque = parse_mail_args(POP3_TYPE, arg); } void <API key>(const char *value) { global_mail = parse_mail_args(IMAP_TYPE, value); } void <API key>(const char *value) { global_mail = parse_mail_args(POP3_TYPE, value); } void free_mail_obj(struct text_object *obj) { if (!obj->data.opaque) return; if (obj->data.opaque == global_mail) { if (--global_mail_use == 0) { delete global_mail; global_mail = 0; } } else { struct mail_s *mail = (struct mail_s*)obj->data.opaque; delete mail; obj->data.opaque = 0; } } int imap_command(int sockfd, const char *command, char *response, const char *verify) { struct timeval fetchtimeout; fd_set fdset; int res, numbytes = 0; if (send(sockfd, command, strlen(command), 0) == -1) { perror("send"); return -1; } fetchtimeout.tv_sec = 60; // 60 second timeout i guess fetchtimeout.tv_usec = 0; FD_ZERO(&fdset); FD_SET(sockfd, &fdset); res = select(sockfd + 1, &fdset, NULL, NULL, &fetchtimeout); if (res > 0) { if ((numbytes = recv(sockfd, response, MAXDATASIZE - 1, 0)) == -1) { perror("recv"); return -1; } } DBGP2("imap_command() command: %s", command); DBGP2("imap_command() received: %s", response); response[numbytes] = '\0'; if (strstr(response, verify) == NULL) { return -1; } return 0; } int imap_check_status(char *recvbuf, struct mail_s *mail, thread_handle &handle) { char *reply; reply = (char*)strstr(recvbuf, " (MESSAGES "); if (!reply || strlen(reply) < 2) { return -1; } reply += 2; *strchr(reply, ')') = '\0'; if (reply == NULL) { NORM_ERR("Error parsing IMAP response: %s", recvbuf); return -1; } else { std::lock_guard<std::mutex> lock(handle.mutex()); sscanf(reply, "MESSAGES %lu UNSEEN %lu", &mail->messages, &mail->unseen); } return 0; } void imap_unseen_command(struct mail_s *mail, unsigned long old_unseen, unsigned long old_messages) { if (strlen(mail->command) > 1 && (mail->unseen > old_unseen || (mail->messages > old_messages && mail->unseen > 0))) { // new mail goodie if (system(mail->command) == -1) { perror("system()"); } } } static void ensure_mail_thread(struct mail_s *mail, const std::function<void(thread_handle &, struct mail_s *mail)> &func, const char *text) { if (mail->p_timed_thread) return; mail->p_timed_thread = timed_thread::create(std::bind(func, std::placeholders::_1, mail), std::chrono::microseconds(long(mail->interval * 1000000))); if (!mail->p_timed_thread) { NORM_ERR("Error creating %s timed thread", text); } } static void imap_thread(thread_handle &handle, struct mail_s *mail) { int sockfd, numbytes; char recvbuf[MAXDATASIZE]; char sendbuf[MAXDATASIZE]; unsigned int fail = 0; unsigned long old_unseen = ULONG_MAX; unsigned long old_messages = ULONG_MAX; struct stat stat_buf; struct hostent *he_res = 0; struct sockaddr_in their_addr; // connector's address information int has_idle = 0; int threadfd = handle.readfd(); char resolved_host = 0; while (fail < mail->retries) { struct timeval fetchtimeout; int res; fd_set fdset; if (!resolved_host) { #ifdef <API key> int he_errno; struct hostent he; char hostbuff[2048]; if (gethostbyname_r(mail->host, &he, hostbuff, sizeof(hostbuff), &he_res, &he_errno)) { // get the host info NORM_ERR("IMAP gethostbyname_r: %s", hstrerror(h_errno)); fail++; break; } #else /* <API key> */ if ((he_res = gethostbyname(mail->host)) == NULL) { // get the host info herror("gethostbyname"); fail++; break; } #endif /* <API key> */ resolved_host = 1; } if (fail > 0) { NORM_ERR("Trying IMAP connection again for %s@%s (try %u/%u)", mail->user, mail->host, fail + 1, mail->retries); } do { if ((sockfd = socket(PF_INET, SOCK_STREAM, 0)) == -1) { perror("socket"); fail++; break; } // host byte order their_addr.sin_family = AF_INET; // short, network byte order their_addr.sin_port = htons(mail->port); their_addr.sin_addr = *((struct in_addr *) he_res->h_addr); // zero the rest of the struct memset(&(their_addr.sin_zero), '\0', 8); if (connect(sockfd, (struct sockaddr *) &their_addr, sizeof(struct sockaddr)) == -1) { perror("connect"); fail++; break; } fetchtimeout.tv_sec = 60; // 60 second timeout i guess fetchtimeout.tv_usec = 0; FD_ZERO(&fdset); FD_SET(sockfd, &fdset); res = select(sockfd + 1, &fdset, NULL, NULL, &fetchtimeout); if (res > 0) { if ((numbytes = recv(sockfd, recvbuf, MAXDATASIZE - 1, 0)) == -1) { perror("recv"); fail++; break; } } else { NORM_ERR("IMAP connection failed: timeout"); fail++; break; } recvbuf[numbytes] = '\0'; DBGP2("imap_thread() received: %s", recvbuf); if (strstr(recvbuf, "* OK") != recvbuf) { NORM_ERR("IMAP connection failed, probably not an IMAP server"); fail++; break; } strncpy(sendbuf, "abc CAPABILITY\r\n", MAXDATASIZE); if (imap_command(sockfd, sendbuf, recvbuf, "abc OK")) { fail++; break; } if (strstr(recvbuf, " IDLE ") != NULL) { has_idle = 1; } strncpy(sendbuf, "a1 login ", MAXDATASIZE); strncat(sendbuf, mail->user, MAXDATASIZE - strlen(sendbuf) - 1); strncat(sendbuf, " ", MAXDATASIZE - strlen(sendbuf) - 1); strncat(sendbuf, mail->pass, MAXDATASIZE - strlen(sendbuf) - 1); strncat(sendbuf, "\r\n", MAXDATASIZE - strlen(sendbuf) - 1); if (imap_command(sockfd, sendbuf, recvbuf, "a1 OK")) { fail++; break; } strncpy(sendbuf, "a2 STATUS \"", MAXDATASIZE); strncat(sendbuf, mail->folder, MAXDATASIZE - strlen(sendbuf) - 1); strncat(sendbuf, "\" (MESSAGES UNSEEN)\r\n", MAXDATASIZE - strlen(sendbuf) - 1); if (imap_command(sockfd, sendbuf, recvbuf, "a2 OK")) { fail++; break; } if (imap_check_status(recvbuf, mail, handle)) { fail++; break; } imap_unseen_command(mail, old_unseen, old_messages); fail = 0; old_unseen = mail->unseen; old_messages = mail->messages; if (has_idle) { strncpy(sendbuf, "a4 SELECT \"", MAXDATASIZE); strncat(sendbuf, mail->folder, MAXDATASIZE - strlen(sendbuf) - 1); strncat(sendbuf, "\"\r\n", MAXDATASIZE - strlen(sendbuf) - 1); if (imap_command(sockfd, sendbuf, recvbuf, "a4 OK")) { fail++; break; } strncpy(sendbuf, "a5 IDLE\r\n", MAXDATASIZE); if (imap_command(sockfd, sendbuf, recvbuf, "+ idling")) { fail++; break; } recvbuf[0] = '\0'; while (1) { /* * RFC 2177 says we have to re-idle every 29 minutes. * We'll do it every 20 minutes to be safe. */ fetchtimeout.tv_sec = 1200; fetchtimeout.tv_usec = 0; DBGP2("idling..."); FD_ZERO(&fdset); FD_SET(sockfd, &fdset); FD_SET(threadfd, &fdset); res = select(MAX(sockfd + 1, threadfd + 1), &fdset, NULL, NULL, &fetchtimeout); if (handle.test(1) || (res == -1 && errno == EINTR) || FD_ISSET(threadfd, &fdset)) { if ((fstat(sockfd, &stat_buf) == 0) && S_ISSOCK(stat_buf.st_mode)) { /* if a valid socket, close it */ close(sockfd); } return; } else if (res > 0) { if ((numbytes = recv(sockfd, recvbuf, MAXDATASIZE - 1, 0)) == -1) { perror("recv idling"); fail++; break; } } else { fail++; break; } recvbuf[numbytes] = '\0'; DBGP2("imap_thread() received: %s", recvbuf); if (strlen(recvbuf) > 2) { unsigned long messages, recent = 0; char *buf = recvbuf; char force_check = 0; buf = (char*)strstr(buf, "EXISTS"); while (buf && strlen(buf) > 1 && strstr(buf + 1, "EXISTS")) { buf = (char*)strstr(buf + 1, "EXISTS"); } if (buf) { // back up until we reach '*' while (buf >= recvbuf && buf[0] != '*') { buf } if (sscanf(buf, "* %lu EXISTS\r\n", &messages) == 1) { std::lock_guard<std::mutex> lock(handle.mutex()); if (mail->messages != messages) { force_check = 1; mail->messages = messages; } } } buf = recvbuf; buf = (char*)strstr(buf, "RECENT"); while (buf && strlen(buf) > 1 && strstr(buf + 1, "RECENT")) { buf = (char*)strstr(buf + 1, "RECENT"); } if (buf) { // back up until we reach '*' while (buf >= recvbuf && buf[0] != '*') { buf } if (sscanf(buf, "* %lu RECENT\r\n", &recent) != 1) { recent = 0; } } /* * check if we got a FETCH from server, recent was * something other than 0, or we had a timeout */ buf = recvbuf; if (recent > 0 || (buf && strstr(buf, " FETCH ")) || fetchtimeout.tv_sec == 0 || force_check) { // re-check messages and unseen if (imap_command(sockfd, "DONE\r\n", recvbuf, "a5 OK")) { fail++; break; } strncpy(sendbuf, "a2 STATUS \"", MAXDATASIZE); strncat(sendbuf, mail->folder, MAXDATASIZE - strlen(sendbuf) - 1); strncat(sendbuf, "\" (MESSAGES UNSEEN)\r\n", MAXDATASIZE - strlen(sendbuf) - 1); if (imap_command(sockfd, sendbuf, recvbuf, "a2 OK")) { fail++; break; } if (imap_check_status(recvbuf, mail, handle)) { fail++; break; } strncpy(sendbuf, "a5 IDLE\r\n", MAXDATASIZE); if (imap_command(sockfd, sendbuf, recvbuf, "+ idling")) { fail++; break; } } /* * check if we got a BYE from server */ buf = recvbuf; if (buf && strstr(buf, "* BYE")) { // need to re-connect break; } } else { fail++; break; } imap_unseen_command(mail, old_unseen, old_messages); fail = 0; old_unseen = mail->unseen; old_messages = mail->messages; } if (fail) break; } else { strncpy(sendbuf, "a3 logout\r\n", MAXDATASIZE); if (send(sockfd, sendbuf, strlen(sendbuf), 0) == -1) { perror("send a3"); fail++; break; } fetchtimeout.tv_sec = 60; // 60 second timeout i guess fetchtimeout.tv_usec = 0; FD_ZERO(&fdset); FD_SET(sockfd, &fdset); res = select(sockfd + 1, &fdset, NULL, NULL, &fetchtimeout); if (res > 0) { if ((numbytes = recv(sockfd, recvbuf, MAXDATASIZE - 1, 0)) == -1) { perror("recv a3"); fail++; break; } } recvbuf[numbytes] = '\0'; DBGP2("imap_thread() received: %s", recvbuf); if (strstr(recvbuf, "a3 OK") == NULL) { NORM_ERR("IMAP logout failed: %s", recvbuf); fail++; break; } } } while (0); if ((fstat(sockfd, &stat_buf) == 0) && S_ISSOCK(stat_buf.st_mode)) { /* if a valid socket, close it */ close(sockfd); } if (handle.test(0)) { return; } } mail->unseen = 0; mail->messages = 0; } void print_imap_unseen(struct text_object *obj, char *p, int p_max_size) { struct mail_s *mail = (struct mail_s*)obj->data.opaque; if (!mail) return; ensure_mail_thread(mail, std::bind(imap_thread, std::placeholders::_1, std::placeholders::_2), "imap"); if (mail && mail->p_timed_thread) { std::lock_guard<std::mutex> lock(mail->p_timed_thread->mutex()); snprintf(p, p_max_size, "%lu", mail->unseen); } } void print_imap_messages(struct text_object *obj, char *p, int p_max_size) { struct mail_s *mail = (struct mail_s*)obj->data.opaque; if (!mail) return; ensure_mail_thread(mail, std::bind(imap_thread, std::placeholders::_1, std::placeholders::_2), "imap"); if (mail && mail->p_timed_thread) { std::lock_guard<std::mutex> lock(mail->p_timed_thread->mutex()); snprintf(p, p_max_size, "%lu", mail->messages); } } int pop3_command(int sockfd, const char *command, char *response, const char *verify) { struct timeval fetchtimeout; fd_set fdset; int res, numbytes = 0; if (send(sockfd, command, strlen(command), 0) == -1) { perror("send"); return -1; } fetchtimeout.tv_sec = 60; // 60 second timeout i guess fetchtimeout.tv_usec = 0; FD_ZERO(&fdset); FD_SET(sockfd, &fdset); res = select(sockfd + 1, &fdset, NULL, NULL, &fetchtimeout); if (res > 0) { if ((numbytes = recv(sockfd, response, MAXDATASIZE - 1, 0)) == -1) { perror("recv"); return -1; } } DBGP2("pop3_command() received: %s", response); response[numbytes] = '\0'; if (strstr(response, verify) == NULL) { return -1; } return 0; } static void pop3_thread(thread_handle &handle, struct mail_s *mail) { int sockfd, numbytes; char recvbuf[MAXDATASIZE]; char sendbuf[MAXDATASIZE]; char *reply; unsigned int fail = 0; unsigned long old_unseen = ULONG_MAX; struct stat stat_buf; struct hostent *he_res = 0; struct sockaddr_in their_addr; // connector's address information char resolved_host = 0; while (fail < mail->retries) { struct timeval fetchtimeout; int res; fd_set fdset; if (!resolved_host) { #ifdef <API key> int he_errno; struct hostent he; char hostbuff[2048]; if (gethostbyname_r(mail->host, &he, hostbuff, sizeof(hostbuff), &he_res, &he_errno)) { // get the host info NORM_ERR("POP3 gethostbyname_r: %s", hstrerror(h_errno)); fail++; break; } #else /* <API key> */ if ((he_res = gethostbyname(mail->host)) == NULL) { // get the host info herror("gethostbyname"); fail++; break; } #endif /* <API key> */ resolved_host = 1; } if (fail > 0) { NORM_ERR("Trying POP3 connection again for %s@%s (try %u/%u)", mail->user, mail->host, fail + 1, mail->retries); } do { if ((sockfd = socket(PF_INET, SOCK_STREAM, 0)) == -1) { perror("socket"); fail++; break; } // host byte order their_addr.sin_family = AF_INET; // short, network byte order their_addr.sin_port = htons(mail->port); their_addr.sin_addr = *((struct in_addr *) he_res->h_addr); // zero the rest of the struct memset(&(their_addr.sin_zero), '\0', 8); if (connect(sockfd, (struct sockaddr *) &their_addr, sizeof(struct sockaddr)) == -1) { perror("connect"); fail++; break; } fetchtimeout.tv_sec = 60; // 60 second timeout i guess fetchtimeout.tv_usec = 0; FD_ZERO(&fdset); FD_SET(sockfd, &fdset); res = select(sockfd + 1, &fdset, NULL, NULL, &fetchtimeout); if (res > 0) { if ((numbytes = recv(sockfd, recvbuf, MAXDATASIZE - 1, 0)) == -1) { perror("recv"); fail++; break; } } else { NORM_ERR("POP3 connection failed: timeout\n"); fail++; break; } DBGP2("pop3_thread received: %s", recvbuf); recvbuf[numbytes] = '\0'; if (strstr(recvbuf, "+OK ") != recvbuf) { NORM_ERR("POP3 connection failed, probably not a POP3 server"); fail++; break; } strncpy(sendbuf, "USER ", MAXDATASIZE); strncat(sendbuf, mail->user, MAXDATASIZE - strlen(sendbuf) - 1); strncat(sendbuf, "\r\n", MAXDATASIZE - strlen(sendbuf) - 1); if (pop3_command(sockfd, sendbuf, recvbuf, "+OK ")) { fail++; break; } strncpy(sendbuf, "PASS ", MAXDATASIZE); strncat(sendbuf, mail->pass, MAXDATASIZE - strlen(sendbuf) - 1); strncat(sendbuf, "\r\n", MAXDATASIZE - strlen(sendbuf) - 1); if (pop3_command(sockfd, sendbuf, recvbuf, "+OK ")) { NORM_ERR("POP3 server login failed: %s", recvbuf); fail++; break; } strncpy(sendbuf, "STAT\r\n", MAXDATASIZE); if (pop3_command(sockfd, sendbuf, recvbuf, "+OK ")) { perror("send STAT"); fail++; break; } // now we get the data reply = recvbuf + 4; if (reply == NULL) { NORM_ERR("Error parsing POP3 response: %s", recvbuf); fail++; break; } else { std::lock_guard<std::mutex> lock(handle.mutex()); sscanf(reply, "%lu %lu", &mail->unseen, &mail->used); } strncpy(sendbuf, "QUIT\r\n", MAXDATASIZE); if (pop3_command(sockfd, sendbuf, recvbuf, "+OK")) { NORM_ERR("POP3 logout failed: %s", recvbuf); fail++; break; } if (strlen(mail->command) > 1 && mail->unseen > old_unseen) { // new mail goodie if (system(mail->command) == -1) { perror("system()"); } } fail = 0; old_unseen = mail->unseen; } while (0); if ((fstat(sockfd, &stat_buf) == 0) && S_ISSOCK(stat_buf.st_mode)) { /* if a valid socket, close it */ close(sockfd); } if (handle.test(0)) { return; } } mail->unseen = 0; mail->used = 0; } void print_pop3_unseen(struct text_object *obj, char *p, int p_max_size) { struct mail_s *mail = (struct mail_s *)obj->data.opaque; if (!mail) return; ensure_mail_thread(mail, std::bind(pop3_thread, std::placeholders::_1, std::placeholders::_2), "pop3"); if (mail && mail->p_timed_thread) { std::lock_guard<std::mutex> lock(mail->p_timed_thread->mutex()); snprintf(p, p_max_size, "%lu", mail->unseen); } } void print_pop3_used(struct text_object *obj, char *p, int p_max_size) { struct mail_s *mail = (struct mail_s *)obj->data.opaque; if (!mail) return; ensure_mail_thread(mail, std::bind(pop3_thread, std::placeholders::_1, std::placeholders::_2), "pop3"); if (mail && mail->p_timed_thread) { std::lock_guard<std::mutex> lock(mail->p_timed_thread->mutex()); snprintf(p, p_max_size, "%.1f", mail->used / 1024.0 / 1024.0); } }
/* -*- c-basic-offset: 4 indent-tabs-mode: nil -*- vi:set ts=8 sts=4 sw=4: */ #ifndef <API key> #define <API key> #include "<API key>.h" #include "base/Thread.h" #include <set> class WavFileReader; class ProgressReporter; class <API key> : public <API key> { Q_OBJECT public: enum ResampleMode { ResampleAtOnce, // resample the file on construction, with progress dialog ResampleThreaded // resample in a background thread after construction }; <API key>(FileSource source, ResampleMode resampleMode, CacheMode cacheMode, size_t targetRate = 0, ProgressReporter *reporter = 0); virtual ~<API key>(); virtual QString getError() const { return m_error; } virtual QString getLocation() const { return m_source.getLocation(); } static void <API key>(std::set<QString> &extensions); static bool supportsExtension(QString ext); static bool supportsContentType(QString type); static bool supports(FileSource &source); virtual int getDecodeCompletion() const { return m_completion; } virtual bool isUpdating() const { return m_decodeThread && m_decodeThread->isRunning(); } public slots: void cancelled(); protected: FileSource m_source; QString m_path; QString m_error; bool m_cancelled; size_t m_processed; int m_completion; WavFileReader *m_original; ProgressReporter *m_reporter; void addBlock(const SampleBlock &frames); class DecodeThread : public Thread { public: DecodeThread(<API key> *reader) : m_reader(reader) { } virtual void run(); protected: <API key> *m_reader; }; DecodeThread *m_decodeThread; }; #endif
import com.teamdev.jxbrowser.chromium.Browser; import com.teamdev.jxbrowser.chromium.BrowserContext; import com.teamdev.jxbrowser.chromium.<API key>; import java.io.File; /** * By default all Browser instances with same BrowserContext share * cookies and cache data. This sample demonstrates how to create two isolated * Browser instances that don't share cookies and cache data. */ public class <API key> { public static void main(String[] args) { // This Browser instance will store cookies and user data files in "user-data-dir-one" dir. String <API key> = new File("user-data-dir-one").getAbsolutePath(); Browser browserOne = new Browser( new BrowserContext(new <API key>(<API key>))); // This Browser instance will store cookies and user data files in "user-data-dir-two" dir. String <API key> = new File("user-data-dir-two").getAbsolutePath(); Browser browserTwo = new Browser( new BrowserContext(new <API key>(<API key>))); // The browserOne and browserTwo will not see the cookies and cache data files of each other. } }
package com.jeffbrower.tuple; public class Tuple7<T1, T2, T3, T4, T5, T6, T7> implements Tuple<T1, T7> { public final T1 _1; public final T2 _2; public final T3 _3; public final T4 _4; public final T5 _5; public final T6 _6; public final T7 _7; public Tuple7(T1 _1, T2 _2, T3 _3, T4 _4, T5 _5, T6 _6, T7 _7) { this._1 = _1; this._2 = _2; this._3 = _3; this._4 = _4; this._5 = _5; this._6 = _6; this._7 = _7; } @Override public final T1 first() { return _1; } @Override public final T7 last() { return _7; } }
; ; Minicom-like program for MikeOS ; ; With this program and an appropriate serial (null modem) cable, you ; can log in to Linux machines. You will need your Linux box to establish ; a serial terminal in /etc/inittab, with something like this: ; ; T0:2345:respawn:/sbin/getty/ -L ttyS0 9600 vt100 ; ; Connect the serial cable to your MikeOS machine and Linux machine. ; Start MikeOS and run SERIAL.BIN, then start your Linux box with the ; above serial terminal entry. On the MikeOS machine, you will see a ; familiar Linux login prompt -- you're ready to go! Note that very few ; VT100 terminal features are implemented at present, so complicated ; programs (eg Emacs) will not display correctly. ; ; Press the F8 key to exit. BITS 16 %INCLUDE "mikedev.inc" ORG 32768 start: mov ax, warnmsg_1 mov bx, warnmsg_2 mov cx, 0 mov dx, 1 call os_dialog_box cmp ax, 0 je .proceed call os_clear_screen ret .proceed: call os_clear_screen mov ax, 0 ; 9600 baud mode call <API key> mov si, start_msg call os_print_string main_loop: mov dx, 0 ; Set port to COM1 mov ax, 0 mov ah, 03h ; Check COM1 status int 14h bt ax, 8 ; Data received? jc received_byte mov ax, 0 ; If not, have we something to send? call os_check_for_key cmp ax, 4200h ; F8 key pressed? je finish ; Quit if so cmp al, 0 ; If no other key pressed, go back je main_loop call os_send_via_serial ; Otherwise send it jmp main_loop received_byte: ; Print data received call os_get_via_serial cmp al, 1Bh ; 'Esc' character received? je esc_received mov ah, 0Eh ; Otherwise print char int 10h jmp main_loop finish: mov si, finish_msg call os_print_string call os_wait_for_key ret esc_received: call os_get_via_serial ; Get next character... cmp al, '[' ; Is it a screen control code? jne main_loop mov bl, al ; Store for now call os_get_via_serial ; If control code, parse it cmp al, 'H' je near move_to_home cmp al, 'J' je near erase_to_bottom cmp al, 'K' je near <API key> ; If it wasn't a control char that ; we can parse yet, print the whole ; thing for debugging purposes mov cl, al ; Store second char mov al, bl ; Get first mov ah, 0Eh ; Print them int 10h mov al, cl int 10h jmp main_loop move_to_home: mov dx, 0 call os_move_cursor jmp main_loop erase_to_bottom: call os_get_cursor_pos push dx ; Store where we are call erase_sub inc dh ; Move to start of next line mov dl, 0 call os_move_cursor mov ah, 0Ah ; Get ready to print 80 spaces mov al, ' ' mov bx, 0 mov cx, 80 .more: int 10h inc dh ; Next line... call os_move_cursor cmp dh, 25 ; Reached bottom of screen? jne .more pop dx ; Put cursor back to where we started call os_move_cursor jmp main_loop <API key>: call erase_sub jmp main_loop erase_sub: call os_get_cursor_pos push dx ; Store where we are mov ah, 80 ; Calculate how many spaces sub ah, dl ; we need to print... mov cx, 0 ; And drop into CL mov cl, ah mov ah, 0Ah ; Print spaces CL number of times mov al, ' ' mov bx, 0 int 10h pop dx call os_move_cursor ret warnmsg_1 db 'Serial terminal program -- may lock up', 0 warnmsg_2 db 'if you have no serial ports! Proceed?', 0 start_msg db 'MikeOS minicom -- Press F8 to quit', 13, 10, 'Connecting via serial at 9600 baud...', 13, 10, 13, 10, 0 finish_msg db 13, 10, 13, 10, 'Exiting MikeOS minicom; press a key to return to MikeOS', 13, 10, 0 ;
/* * commandCoolDown.js * * Manage cooldowns for commands * * To use the cooldown in other scipts use the $.coolDown API */ (function() { var defaultCooldownTime = $.getSetIniDbNumber('cooldownSettings', 'defaultCooldownTime', 5), modCooldown = $.getSetIniDbBoolean('cooldownSettings', 'modCooldown', false), defaultCooldowns = {}, cooldowns = {}; $.raffleCommand = null; /* * @class Cooldown * * @param {String} command * @param {Number} seconds * @param {Boolean} isGlobal */ function Cooldown(command, seconds, isGlobal) { this.isGlobal = isGlobal; this.command = command; this.seconds = seconds; this.cooldowns = []; this.time = 0; } /* * @function loadCooldowns */ function loadCooldowns() { var commands = $.inidb.GetKeyList('cooldown', ''), json, i; for (i in commands) { json = JSON.parse($.inidb.get('cooldown', commands[i])); cooldowns[commands[i]] = new Cooldown(json.command, json.seconds, json.isGlobal.toString().equals('true')); } } /* * @function canIgnore * * @param {String} username * @param {Boolean} isMod * @return {Boolean} */ function canIgnore(username, isMod) { return (!modCooldown && isMod) || $.isAdmin(username); } /* * @function isSpecial * * @param {String} command * @return {Boolean} */ function isSpecial(command) { return command == 'bet' || command == 'tickets' || command == 'bid' || command == 'adventure' || command == $.raffleCommand; } /* * @function get * * @export $.coolDown * @param {String} command * @param {String} username * @param {Boolean} isMod * @return {Number} */ function get(command, username, isMod) { var cooldown = cooldowns[command]; if (isSpecial(command)) { if (command == 'adventure' && defaultCooldowns[command] !== undefined && defaultCooldowns[command] > $.systemTime()) { return defaultCooldowns[command]; } else { return 0; } } else { if (cooldown !== undefined) { if (cooldown.isGlobal) { if (cooldown.time > $.systemTime()) { return (canIgnore(username, isMod) ? 0 : cooldown.time); } else { return set(command, true, cooldown.seconds, isMod); } } else { if (cooldown.cooldowns[username] !== undefined && cooldown.cooldowns[username] > $.systemTime()) { return (canIgnore(username, isMod) ? 0 : cooldown.cooldowns[username]); } else { return set(command, true, cooldown.seconds, isMod, username); } } } else { if (defaultCooldowns[command] !== undefined && defaultCooldowns[command] > $.systemTime()) { return (canIgnore(username, isMod) ? 0 : defaultCooldowns[command]); } else { return set(command, false, defaultCooldownTime, isMod); } } } } /* * @function getSecs * * @export $.coolDown * @param {String} command * @param {String} username * @return {Number} */ function getSecs(username, command) { var cooldown = cooldowns[command]; if (cooldown !== undefined) { if (cooldown.isGlobal) { if (cooldown.time > $.systemTime()) { return (cooldown.time - $.systemTime() > 1000 ? Math.floor(((cooldown.time - $.systemTime()) / 1000)) : 1); } else { return set(command, true, cooldown.seconds, isMod); } } else { if (cooldown.cooldowns[username] !== undefined && cooldown.cooldowns[username] > $.systemTime()) { return (cooldown.cooldowns[username] - $.systemTime() > 1000 ? Math.floor(((cooldown.cooldowns[username] - $.systemTime()) / 1000)) : 1); } } } else { if (defaultCooldowns[command] !== undefined && defaultCooldowns[command] > $.systemTime()) { return (defaultCooldowns[command] - $.systemTime() > 1000 ? Math.floor(((defaultCooldowns[command] - $.systemTime()) / 1000)) : 1); } else { return set(command, false, defaultCooldownTime, isMod); } } return 0; } /* * @function set * * @export $.coolDown * @param {String} command * @param {Boolean} hasCooldown * @param {Number} seconds * @param {Boolean} isMod * @param {String} username * @return {Number} */ function set(command, hasCooldown, seconds, isMod, username) { seconds = ((parseInt(seconds) * 1e3) + $.systemTime()); if (hasCooldown) { if (username === undefined) { cooldowns[command].time = seconds; } else { cooldowns[command].cooldowns[username] = seconds; } } else { defaultCooldowns[command] = seconds; } return 0; } /* * @function add * * @export $.coolDown * @param {String} command * @param {Number} seconds * @param {Boolean} isGlobal */ function add(command, seconds, isGlobal) { if (cooldowns[command] === undefined) { cooldowns[command] = new Cooldown(command, seconds, isGlobal); $.inidb.set('cooldown', command, JSON.stringify({command: String(command), seconds: String(seconds), isGlobal: String(isGlobal)})); } else { cooldowns[command].isGlobal = isGlobal; cooldowns[command].seconds = seconds; $.inidb.set('cooldown', command, JSON.stringify({command: String(command), seconds: String(seconds), isGlobal: String(isGlobal)})); } } /* * @function remove * * @export $.coolDown * @param {String} command */ function remove(command) { $.inidb.del('cooldown', command); if (cooldowns[command] !== undefined) { delete cooldowns[command]; } } /* * @function clear * * @export $.coolDown * @param {String} command */ function clear(command) { if (cooldowns[command] !== undefined) { cooldowns[command].time = 0; } } /* * @event command */ $.bind('command', function(event) { var sender = event.getSender(), command = event.getCommand(), args = event.getArgs(), action = args[0], subAction = args[1], actionArgs = args[2]; /* * @commandpath coolcom [command] [seconds] [type (global / user)] - Sets a cooldown for a command, default is global. Using -1 for the seconds removes the cooldown. */ if (command.equalsIgnoreCase('coolcom')) { if (action === undefined || isNaN(parseInt(subAction))) { $.say($.whisperPrefix(sender) + $.lang.get('cooldown.coolcom.usage')); return; } actionArgs = (actionArgs !== undefined && actionArgs == 'user' ? false : true); action = action.replace('!', '').toLowerCase(); subAction = parseInt(subAction); if (subAction > -1) { $.say($.whisperPrefix(sender) + $.lang.get('cooldown.coolcom.set', action, subAction)); add(action, subAction, actionArgs); } else { $.say($.whisperPrefix(sender) + $.lang.get('cooldown.coolcom.remove', action)); remove(action); } clear(command); return; } if (command.equalsIgnoreCase('cooldown')) { if (action === undefined) { $.say($.whisperPrefix(sender) + $.lang.get('cooldown.cooldown.usage')); return; } /* * @commandpath cooldown togglemoderators - Toggles if moderators ignore command cooldowns. */ if (action.equalsIgnoreCase('togglemoderators')) { modCooldown = !modCooldown; $.setIniDbBoolean('cooldownSettings', 'modCooldown', modCooldown); $.say($.whisperPrefix(sender) + $.lang.get('cooldown.set.togglemodcooldown', (modCooldown ? $.lang.get('common.enabled') : $.lang.get('common.disabled')))); return; } /* * @commandpath cooldown setdefault [seconds] - Sets a default global cooldown for commands without a cooldown. */ if (action.equalsIgnoreCase('setdefault')) { if (isNaN(parseInt(subAction))) { $.say($.whisperPrefix(sender) + $.lang.get('cooldown.default.usage')); return; } defaultCooldownTime = parseInt(subAction); $.setIniDbNumber('cooldownSettings', 'defaultCooldownTime', defaultCooldownTime); $.say($.whisperPrefix(sender) + $.lang.get('cooldown.default.set', defaultCooldownTime)); } } }); /* * @event initReady */ $.bind('initReady', function() { $.registerChatCommand('./core/commandCoolDown.js', 'coolcom', 1); $.registerChatCommand('./core/commandCoolDown.js', 'cooldown', 1); $.<API key>('cooldown', 'togglemoderators', 1); $.<API key>('cooldown', 'setdefault', 1); loadCooldowns(); }); /* * @event <API key> */ $.bind('<API key>', function(event) { if (event.getScript().equalsIgnoreCase('./core/commandCoolDown.js')) { if (event.getArgs()[0] == 'add') { add(event.getArgs()[1], event.getArgs()[2], event.getArgs()[3].equals('true')); } else { remove(event.getArgs()[1]); } } }); /* Export to the $. API */ $.coolDown = { remove: remove, clear: clear, get: get, set: set, add: add, getSecs: getSecs }; })();
#ifndef TOOLSETTINGSDOCK_H #define TOOLSETTINGSDOCK_H #include "tools/tool.h" #include "tools/toolproperties.h" #include <QDockWidget> class QStackedWidget; class Color_Dialog; namespace tools { class ToolSettings; class AnnotationSettings; class ColorPickerSettings; class <API key>; class FillSettings; class SelectionSettings; } namespace paintcore { class Brush; } namespace widgets { class ToolSlotButton; } namespace docks { /** * @brief Tool settings window * A dock widget that displays settings for the currently selected tool. */ class ToolSettings : public QDockWidget { Q_OBJECT public: //! Number of quick tool change slots static const int QUICK_SLOTS = 5; ToolSettings(QWidget *parent=0); ToolSettings(const ToolSettings& ts) = delete; ToolSettings& operator=(const ToolSettings& ts) = delete; ~ToolSettings(); //! Get a brush with the current settings paintcore::Brush getBrush() const; //! Get the annotation settings page tools::AnnotationSettings *<API key>() { return _textsettings; } //! Get the color picker page tools::ColorPickerSettings *<API key>() { return _pickersettings; } //! Get the laser pointer settings page tools::<API key> *<API key>() { return _lasersettings; } tools::SelectionSettings *<API key>() { return _selectionsettings; } tools::SelectionSettings *<API key>() { return <API key>; } //! Get flood fill settings page tools::FillSettings *getFillSettings() { return _fillsettings; } //! Get the current foreground color QColor foregroundColor() const; //! Get the current background color QColor backgroundColor() const; //! Get the currently selected tool slot int currentToolSlot() const; //! Get the currently selected tool tools::Tool::Type currentTool() const; //! Load tool related settings void readSettings(); //! Save tool related settings void saveSettings(); public slots: //! Set the tool for which settings are shown void setTool(tools::Tool::Type tool); //! Quick adjust current tool void quickAdjustCurrent1(qreal adjustment); //! Select the tool previosly set with setTool void setPreviousTool(); //! Set the currently active quick tool slot void setToolSlot(int i); //! Select the tool slot previously set with setToolSlot void setPreviousToolSlot(); //! Set foreground color void setForegroundColor(const QColor& color); //! Pop up a dialog for changing the foreground color void <API key>(); //! Pop up a dialog for changing the foreground color void <API key>(); //! Set background color void setBackgroundColor(const QColor& color); //! Swap current foreground and background colors void <API key>(); //! Switch tool when eraser is brought near the tablet void eraserNear(bool near); /** * @brief Change the eraser override tool if it matches the given * * This is used to prevent a disabled tool from being reselected * after the tablet eraser is lifted. * * The chain of events is: * 1. User selects a tool that can be disabled (annotation or laser pointer) * 2. User places the tablet eraser near the tablet surface, triggering eraser override tool * 3. User disables annotations/lasers * 4. User lifts the eraser, causing the previous tool (which is now disabled) to be reselected. * * @param tool the tool to disable */ void <API key>(tools::Tool::Type tool); //! Query current tool's subpixel mode and emit subpixelModeChanged void updateSubpixelMode(); signals: //! This signal is emitted when the current tool changes its size void sizeChanged(int size); //! This signal is emitted when tool subpixel drawing mode is changed void subpixelModeChanged(bool subpixel); //! Current foreground color selection changed void <API key>(const QColor &color); //! Current background color selection changed void <API key>(const QColor &color); //! Currently active tool was changed void toolChanged(tools::Tool::Type tool); private: tools::ToolSettings *getToolSettingsPage(tools::Tool::Type tool); void updateToolSlot(int i, bool typeChanged); void selectTool(tools::Tool::Type tool); void selectToolSlot(int i); void saveCurrentTool(); tools::ToolSettings *_pensettings; tools::ToolSettings *_brushsettings; tools::ToolSettings *_smudgesettings; tools::ToolSettings *_erasersettings; tools::ColorPickerSettings *_pickersettings; tools::ToolSettings *_linesettings; tools::ToolSettings *_rectsettings; tools::ToolSettings *_ellipsesettings; tools::FillSettings *_fillsettings; tools::AnnotationSettings *_textsettings; tools::SelectionSettings *_selectionsettings; tools::SelectionSettings *<API key>; tools::<API key> *_lasersettings; tools::ToolSettings *_currenttool; QStackedWidget *_widgets; widgets::ToolSlotButton *_quickslot[QUICK_SLOTS]; int _currentQuickslot; int _eraserOverride; bool _eraserActive; QList<tools::ToolsetProperties> _toolprops; tools::Tool::Type _previousTool; int _previousToolSlot; QColor _foreground; QColor _background; Color_Dialog *_fgdialog, *_bgdialog; }; } #endif
namespace WorldServer.Game.ObjectDefines { public class ActionButton { public uint Action; public byte SlotId; public byte SpecGroup; } }
#!/bin/bash # This file is part of the Pi Entertainment System (PES). # PES provides an interactive GUI for games console emulators # and is designed to work on the Raspberry Pi. # PES is free software: you can redistribute it and/or modify # (at your option) any later version. # PES is distributed in the hope that it will be useful, # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the functions=`realpath $( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/../common/functions.sh` source $functions || exit 1 header "Installing rasum" run cd $buildDir rmSourceDir rasum run git clone https://github.com/<API key>/rasum checkDir rasum cd rasum run git submodule init run git submodule update run make run checkFile rasum run sudo install -D -m 0755 -v rasum $pesDir/bin/rasum
from test_basics import setUp, app def test_visit_login(): r = app.get('/mod/login') assert "<form name=\"login\"" in r def test_login(): r = app.get('/mod/login') r.form['username'] = 'admin' r.form['password'] = 'adminadmin1' r = r.form.submit() assert "You are now logged in" in r
#include "kicadmodule2svg.h" #include "../utils/textutils.h" #include "../debugdialog.h" #include "../viewlayer.h" #include "../fsvgrenderer.h" #include <QFile> #include <QFileInfo> #include <QTextStream> #include <QObject> #include <QDomDocument> #include <QDomElement> #include <QDateTime> #include <qmath.h> #include <limits> #define KicadSilkscreenTop 21 #define <API key> 20 // TODO: // non-centered drill holes? // trapezoidal pads (shape delta may or may not be a separate issue)? // non-copper holes? // find true bounding box of arcs instead of using the whole circle double checkStrokeWidth(double w) { if (w >= 0) return w; DebugDialog::debug("stroke width < 0"); return 0; } KicadModule2Svg::KicadModule2Svg() : Kicad2Svg() { } QStringList KicadModule2Svg::listModules(const QString & filename) { QStringList modules; QFile file(filename); if (!file.open(QFile::ReadOnly)) return modules; QTextStream textStream(&file); bool gotIndex = false; while (true) { QString line = textStream.readLine(); if (line.isNull()) break; if (line.compare("$INDEX") == 0) { gotIndex = true; break; } } if (!gotIndex) return modules; while (true) { QString line = textStream.readLine(); if (line.isNull()) break; if (line.compare("$EndINDEX") == 0) { return modules; } modules.append(line); } modules.clear(); return modules; } QString KicadModule2Svg::convert(const QString & filename, const QString & moduleName, bool allowPadsAndPins) { <API key> = 0; initLimits(); QFile file(filename); if (!file.open(QFile::ReadOnly)) { throw QObject::tr("unable to open %1").arg(filename); } QString text; QTextStream textStream(&file); QString metadata = makeMetadata(filename, "module", moduleName); bool gotModule = false; while (true) { QString line = textStream.readLine(); if (line.isNull()) { break; } if (line.contains("$MODULE") && line.contains(moduleName, Qt::CaseInsensitive)) { gotModule = true; break; } } if (!gotModule) { throw QObject::tr("footprint %1 not found in %2").arg(moduleName).arg(filename); } bool gotT0; QString line; while (true) { line = textStream.readLine(); if (line.isNull()) { throw QObject::tr("unexpected end of file in footprint %1 in file %2").arg(moduleName).arg(filename); } if (line.startsWith("T0") || line.startsWith("DS") || line.startsWith("DA") || line.startsWith("DC")) { gotT0 = true; break; } else if (line.startsWith("Cd")) { metadata += m_comment.arg(TextUtils::<API key>(TextUtils::escapeAnd(line.remove(0,3)))); } else if (line.startsWith("Kw")) { QStringList keywords = line.split(" "); for (int i = 1; i < keywords.count(); i++) { metadata += m_attribute.arg("keyword").arg(TextUtils::<API key>(TextUtils::escapeAnd(keywords[i]))); } } } metadata += endMetadata(); if (!gotT0) { throw QObject::tr("unexpected format (1) in %1 from %2").arg(moduleName).arg(filename); } while (line.startsWith("T")) { line = textStream.readLine(); if (line.isNull()) { throw QObject::tr("unexpected end of file in footprint %1 in file %2").arg(moduleName).arg(filename); } } bool done = false; QString copper0; QString copper1; QString silkscreen0; QString silkscreen1; while (true) { if (line.startsWith("$PAD")) break; if (line.startsWith("$EndMODULE")) { done = true; break; } int layer = 0; QString svgElement; if (line.startsWith("DS")) { layer = drawDSegment(line, svgElement); } else if (line.startsWith("DA")) { layer = drawDArc(line, svgElement); } else if (line.startsWith("DC")) { layer = drawDCircle(line, svgElement); } switch (layer) { case KicadSilkscreenTop: silkscreen1 += svgElement; break; case <API key>: silkscreen0 += svgElement; break; default: break; } line = textStream.readLine(); if (line.isNull()) { throw QObject::tr("unexpected end of file in footprint %1 in file %2").arg(moduleName).arg(filename); } } if (!done) { QList<int> numbers; for (int i = 0; i < 512; i++) { numbers << i; } int pads = 0; int pins = 0; while (!done) { try { QString pad; PadLayer padLayer = convertPad(textStream, pad, numbers); switch (padLayer) { case ToCopper0: copper0 += pad; pins++; break; case ToCopper1: copper1 += pad; pads++; break; default: break; } } catch (const QString & msg) { DebugDialog::debug(QString("kicad pad %1 conversion failed in %2: %3").arg(moduleName).arg(filename).arg(msg)); } while (true) { line = textStream.readLine(); if (line.isNull()) { throw QObject::tr("unexpected end of file in footprint %1 in file %2").arg(moduleName).arg(filename); } if (line.contains("$SHAPE3D")) { done = true; break; } if (line.contains("$EndMODULE")) { done = true; break; } if (line.contains("$PAD")) { break; } } } if (!allowPadsAndPins && pins > 0 && pads > 0) { throw QObject::tr("Sorry, Fritzing can't yet handle both pins and pads together (in %1 in %2)").arg(moduleName).arg(filename); } } if (!copper0.isEmpty()) { copper0 = offsetMin("\n<g id='copper0'><g id='copper1'>" + copper0 + "</g></g>\n"); } if (!copper1.isEmpty()) { copper1 = offsetMin("\n<g id='copper1'>" + copper1 + "</g>\n"); } if (!silkscreen1.isEmpty()) { silkscreen1 = offsetMin("\n<g id='silkscreen'>" + silkscreen1 + "</g>\n"); } if (!silkscreen0.isEmpty()) { silkscreen0 = offsetMin("\n<g id='silkscreen0'>" + silkscreen0 + "</g>\n"); } QString svg = TextUtils::makeSVGHeader(10000, 10000, m_maxX - m_minX, m_maxY - m_minY) + m_title + m_description + metadata + copper0 + copper1 + silkscreen0 + silkscreen1 + "</svg>"; return svg; } int KicadModule2Svg::drawDCircle(const QString & ds, QString & circle) { // DC Xcentre Ycentre Xend Yend width layer QStringList params = ds.split(" "); if (params.count() < 7) return -1; int cx = params.at(1).toInt(); int cy = params.at(2).toInt(); int x2 = params.at(3).toInt(); int y2 = params.at(4).toInt(); double radius = qSqrt((cx - x2) * (cx - x2) + (cy - y2) * (cy - y2)); int w = params.at(5).toInt(); double halfWidth = w / 2.0; checkXLimit(cx + radius + halfWidth); checkXLimit(cx - radius - halfWidth); checkYLimit(cy + radius + halfWidth); checkYLimit(cy - radius - halfWidth); int layer = params.at(6).toInt(); circle = QString("<circle cx='%1' cy='%2' r='%3' stroke-width='%4' stroke='white' fill='none' />") .arg(cx) .arg(cy) .arg(radius) .arg(checkStrokeWidth(w)); return layer; } int KicadModule2Svg::drawDSegment(const QString & ds, QString & line) { // DS Xstart Ystart Xend Yend Width Layer QStringList params = ds.split(" "); if (params.count() < 7) return -1; int x1 = params.at(1).toInt(); int y1 = params.at(2).toInt(); int x2 = params.at(3).toInt(); int y2 = params.at(4).toInt(); checkXLimit(x1); checkXLimit(x2); checkYLimit(y1); checkYLimit(y2); int layer = params.at(6).toInt(); line = QString("<line x1='%1' y1='%2' x2='%3' y2='%4' stroke-width='%5' stroke='white' fill='none' />") .arg(x1) .arg(y1) .arg(x2) .arg(y2) .arg(checkStrokeWidth(params.at(5).toDouble())); return layer; } int KicadModule2Svg::drawDArc(const QString & ds, QString & arc) { //DA x0 y0 x1 y1 angle width layer QStringList params = ds.split(" "); if (params.count() < 8) return -1; int cx = params.at(1).toInt(); int cy = params.at(2).toInt(); int x2 = params.at(3).toInt(); int y2 = params.at(4).toInt(); int width = params.at(6).toInt(); double diffAngle = (params.at(5).toInt() % 3600) / 10.0; double radius = qSqrt((cx - x2) * (cx - x2) + (cy - y2) * (cy - y2)); double endAngle = asin((y2 - cy) / radius); if (x2 < cx) { endAngle += M_PI; } double startAngle = endAngle + (diffAngle * M_PI / 180.0); double x1 = (radius * cos(startAngle)) + cx; double y1 = (radius * sin(startAngle)) + cy; // TODO: figure out bounding box for circular arc and set min and max accordingly /* You have radius R, start angle S, end angle T, and I'll assume that the arc is swept counterclockwise from S to T. start.x = R * cos(S) start.y = R * sin(S) end.x = R * cos(T) end.y = R * sin(T) Determine the axis crossings by analyzing the start and end angles. For discussion sake, I'll describe angles using degrees. Provide a function, wrap(angle), that returns an angle in the range [0 to 360). cross0 = wrap(S) > wrap(T) cross90 = wrap(S-90) > wrap(T-90) cross180 = wrap(S-180) > wrap(T-180) cross270 = wrap(S-270) > wrap(T-270) Now the axis aligned bounding box is defined by: right = cross0 ? +R : max(start.x, end.x) top = cross90 ? +R : max(start.y, end.y) left = cross180 ? -R : min(start.x, end.x) bottom = cross270 ? -R : min(start.y, end.y) */ checkXLimit(cx + radius); checkXLimit(cx - radius); checkYLimit(cy + radius); checkYLimit(cy - radius); int layer = params.at(7).toInt(); arc = QString("<path stroke-width='%1' stroke='white' d='M%2,%3a%4,%5 0 %6,%7 %8,%9' fill='none' />") .arg(checkStrokeWidth(width / 2.0)) .arg(x1) .arg(y1) .arg(radius) .arg(radius) .arg(qAbs(diffAngle) >= 180 ? 1 : 0) .arg(diffAngle > 0 ? 0 : 1) .arg(x2 - x1) .arg(y2 - y1); return layer; } KicadModule2Svg::PadLayer KicadModule2Svg::convertPad(QTextStream & stream, QString & pad, QList<int> & numbers) { PadLayer padLayer = UnableToTranslate; QStringList padStrings; while (true) { QString line = stream.readLine(); if (line.isNull()) { throw QObject::tr("unexpected end of file"); } if (line.contains("$EndPAD")) { break; } padStrings.append(line); } QString shape; QString drill; QString attributes; QString position; foreach (QString string, padStrings) { if (string.startsWith("Sh")) { shape = string; } else if (string.startsWith("Po")) { position = string; } else if (string.startsWith("At")) { attributes = string; } else if (string.startsWith("Dr")) { drill = string; } } if (drill.isEmpty()) { throw QObject::tr("pad missing drill"); } if (attributes.isEmpty()) { throw QObject::tr("pad missing attributes"); } if (position.isEmpty()) { throw QObject::tr("pad missing position"); } if (shape.isEmpty()) { throw QObject::tr("pad missing shape"); } QStringList positionStrings = position.split(" "); if (positionStrings.count() < 3) { throw QObject::tr("position missing params"); } int posX = positionStrings.at(1).toInt(); int posY = positionStrings.at(2).toInt(); QStringList drillStrings = drill.split(" "); if (drillStrings.count() < 4) { throw QObject::tr("drill missing params"); } int drillX = drillStrings.at(1).toInt(); int drillXOffset = drillStrings.at(2).toInt(); int drillYOffset = drillStrings.at(3).toInt(); int drillY = drillX; if (drillXOffset != 0 || drillYOffset != 0) { throw QObject::tr("drill offset not implemented"); } if (drillStrings.count() > 4) { if (drillStrings.at(4) == "O") { if (drillStrings.count() < 7) { throw QObject::tr("drill missing ellipse params"); } drillY = drillStrings.at(6).toInt(); } } QStringList attributeStrings = attributes.split(" "); if (attributeStrings.count() < 4) { throw QObject::tr("attributes missing params"); } bool ok; int layerMask = attributeStrings.at(3).toInt(&ok, 16); if (!ok) { throw QObject::tr("bad layer mask parameter"); } QString padType = attributeStrings.at(1); if (padType == "MECA") { // seems to be the same thing padType = "STD"; } if (padType == "STD") { padLayer = ToCopper0; } else if (padType == "SMD") { padLayer = ToCopper1; } else if (padType == "CONN") { if (layerMask & 1) { padLayer = ToCopper0; } else { padLayer = ToCopper1; } } else if (padType == "HOLE") { padLayer = ToCopper0; } else { throw QObject::tr("Sorry, can't handle pad type %1").arg(padType); } QStringList shapeStrings = shape.split(" "); if (shapeStrings.count() < 8) { throw QObject::tr("pad shape missing params"); } QString padName = unquote(shapeStrings.at(1)); int padNumber = padName.toInt(&ok) - 1; if (!ok) { padNumber = padName.isEmpty() ? -1 : numbers.takeFirst(); //DebugDialog::debug(QString("name:%1 padnumber %2").arg(padName).arg(padNumber)); } else { numbers.removeOne(padNumber); } QString shapeIdentifier = shapeStrings.at(2); int xSize = shapeStrings.at(3).toInt(); int ySize = shapeStrings.at(4).toInt(); if (ySize <= 0) { DebugDialog::debug(QString("ySize is zero %1").arg(padName)); ySize = xSize; } if (xSize <= 0) { throw QObject::tr("pad shape size is invalid"); } int xDelta = shapeStrings.at(5).toInt(); int yDelta = shapeStrings.at(6).toInt(); int orientation = shapeStrings.at(7).toInt(); if (shapeIdentifier == "T") { throw QObject::tr("trapezoidal pads not implemented"); // eventually polygon? } if (xDelta != 0 || yDelta != 0) { // note: so far, all cases of non-zero delta go with shape "T" throw QObject::tr("shape delta not implemented"); } if (padType == "HOLE") { if (shapeIdentifier != "C") { throw QObject::tr("non-circular holes not implemented"); } if (drillX == xSize) { throw QObject::tr("non-copper holes not implemented"); } } if (shapeIdentifier == "C") { checkLimits(posX, xSize, posY, ySize); pad += drawCPad(posX, posY, xSize, ySize, drillX, drillY, padName, padNumber, padType, padLayer); } else if (shapeIdentifier == "R") { checkLimits(posX, xSize, posY, ySize); pad += drawRPad(posX, posY, xSize, ySize, drillX, drillY, padName, padNumber, padType, padLayer); } else if (shapeIdentifier == "O") { checkLimits(posX, xSize, posY, ySize); QString id = getID(padNumber, padLayer); pad += QString("<g %1 connectorname='%2'>") .arg(id).arg(padName) + drawOblong(posX, posY, xSize, ySize, drillX, drillY, padType, padLayer) + "</g>"; } else { throw QObject::tr("unable to handle pad shape %1").arg(shapeIdentifier); } if (orientation != 0) { if (orientation < 0) { orientation = (orientation % 3600) + 3600; } orientation = 3600 - (orientation % 3600); QTransform t = QTransform().translate(-posX, -posY) * QTransform().rotate(orientation / 10.0) * QTransform().translate(posX, posY); pad = TextUtils::svgTransform(pad, t, true, QString("_x='%1' _y='%2' _r='%3'").arg(posX).arg(posY).arg(orientation / 10.0)); } return padLayer; } QString KicadModule2Svg::drawVerticalOblong(int posX, int posY, double xSize, double ySize, int drillX, int drillY, const QString & padType, KicadModule2Svg::PadLayer padLayer) { QString color = getColor(padLayer); double rad = xSize / 4.0; QString bot; if (drillX == drillY) { bot = QString("<path d='M%1,%2a%3,%3 0 0 1 %4,0' fill='%5' stroke-width='0' />") .arg(posX - rad - rad) .arg(posY - (ySize / 2.0) + (xSize / 2.0)) .arg(rad * 2) .arg(rad * 4) .arg(color); bot += QString("<path d='M%1,%2a%3,%3 0 1 1 %4,0' fill='%5' stroke-width='0' />") .arg(posX + rad + rad) .arg(posY + (ySize / 2.0) - (xSize / 2.0)) .arg(rad * 2) .arg(-rad * 4) .arg(color); } else { double w = (ySize - drillY) / 2.0; double newrad = rad - w / 4; bot = QString("<g id='oblong' stroke-width='%1'>").arg(checkStrokeWidth(drillX)); bot += QString("<path d='M%1,%2a%3,%3 0 0 1 %4,0' fill='none' stroke='%5' stroke-width='%6' />") .arg(posX - rad - rad + (w / 2)) .arg(posY - (ySize / 2.0) + (xSize / 2.0)) .arg(newrad * 2) .arg(newrad * 4) .arg(color) .arg(checkStrokeWidth(w)); bot += QString("<path d='M%1,%2a%3,%3 0 1 1 %4,0' fill='none' stroke='%5' stroke-width='%6' />") .arg(posX + rad + rad - (w / 2)) .arg(posY + (ySize / 2.0) - (xSize / 2.0)) .arg(newrad * 2) .arg(-newrad * 4) .arg(color) .arg(checkStrokeWidth(w)); bot += QString("<line fill='none' stroke-width='0' x1='%1' y1='%2' x2='%3' y2='%4' />") .arg(posX).arg(posY - (ySize / 2.0) + (xSize / 2.0)).arg(posX).arg(posY + (ySize / 2.0) - (xSize / 2.0)); bot += "</g>"; } QString middle; if (padType == "SMD") { middle = QString("<rect x='%1' y='%2' width='%3' height='%4' stroke-width='0' fill='%5' />") .arg(posX - (xSize / 2.0)) .arg(posY - (ySize / 2.0) + (xSize / 2.0)) .arg(xSize) .arg(ySize - xSize) .arg(color); } else { if (drillX == drillY) { middle = QString("<circle fill='none' cx='%1' cy='%2' r='%3' stroke-width='%4' stroke='%5' />") .arg(posX) .arg(posY) .arg((qMin(xSize, ySize) / 2.0) - (drillX / 4.0)) .arg(checkStrokeWidth(drillX / 2.0)) .arg(color); } middle += QString("<line x1='%1' y1='%2' x2='%1' y2='%3' fill='none' stroke-width='%4' stroke='%5' />") .arg(posX - (xSize / 2.0) + (drillX / 4.0)) .arg(posY - (ySize / 2.0) + (xSize / 2.0)) .arg(posY + (ySize / 2.0) - (xSize / 2.0)) .arg(checkStrokeWidth(drillX / 2.0)) .arg(color); middle += QString("<line x1='%1' y1='%2' x2='%1' y2='%3' fill='none' stroke-width='%4' stroke='%5' />") .arg(posX + (xSize / 2.0) - (drillX / 4.0)) .arg(posY - (ySize / 2.0) + (xSize / 2.0)) .arg(posY + (ySize / 2.0) - (xSize / 2.0)) .arg(checkStrokeWidth(drillX / 2.0)) .arg(color); } return middle + bot; } QString KicadModule2Svg::<API key>(int posX, int posY, double xSize, double ySize, int drillX, int drillY, const QString & padType, KicadModule2Svg::PadLayer padLayer) { QString color = getColor(padLayer); double rad = ySize / 4.0; QString bot; if (drillX == drillY) { bot = QString("<path d='M%1,%2a%3,%3 0 0 0 0,%4' fill='%5' stroke-width='0' />") .arg(posX - (xSize / 2.0) + (ySize / 2.0)) .arg(posY - rad - rad) .arg(rad * 2) .arg(rad * 4) .arg(color); bot += QString("<path d='M%1,%2a%3,%3 0 1 0 0,%4' fill='%5' stroke-width='0' />") .arg(posX + (xSize / 2.0) - (ySize / 2.0)) .arg(posY + rad + rad) .arg(rad * 2) .arg(-rad * 4) .arg(color); } else { double w = (xSize - drillX) / 2.0; double newrad = rad - w / 4; bot = QString("<g id='oblong' stroke-width='%1'>").arg(checkStrokeWidth(drillY)); bot += QString("<path d='M%1,%2a%3,%3 0 0 0 0,%4' fill='none' stroke='%5' stroke-width='%6' />") .arg(posX - (xSize / 2.0) + (ySize / 2.0)) .arg(posY - rad - rad + (w / 2)) .arg(newrad * 2) .arg(newrad * 4) .arg(color) .arg(checkStrokeWidth(w)); bot += QString("<path d='M%1,%2a%3,%3 0 1 0 0,%4' fill='none' stroke='%5' stroke-width='%6' />") .arg(posX + (xSize / 2.0) - (ySize / 2.0)) .arg(posY + rad + rad - (w / 2)) .arg(newrad * 2) .arg(-newrad * 4) .arg(color) .arg(checkStrokeWidth(w)); bot += QString("<line fill='none' stroke-width='0' x1='%1' y1='%2' x2='%3' y2='%4' />") .arg(posX - (xSize / 2.0) + (ySize / 2.0)).arg(posY).arg(posX + (xSize / 2.0) - (ySize / 2.0)).arg(posY); bot += "</g>"; } QString middle; bool gotID = false; if (padType == "SMD") { middle = QString("<rect x='%1' y='%2' width='%3' height='%4' stroke-width='0' fill='%5' />") .arg(posX - (xSize / 2.0) + (ySize / 2.0)) .arg(posY - (ySize / 2.0)) .arg(xSize - ySize) .arg(ySize) .arg(color); } else { if (drillX == drillY) { gotID = true; middle = QString("<circle fill='none' cx='%1' cy='%2' r='%3' stroke-width='%4' stroke='%5' />") .arg(posX) .arg(posY) .arg((qMin(xSize, ySize) / 2.0) - (drillY / 4.0)) .arg(checkStrokeWidth(drillY / 2.0)) .arg(color); } middle += QString("<line x1='%1' y1='%2' x2='%3' y2='%2' fill='none' stroke-width='%4' stroke='%5' />") .arg(posX - (xSize / 2.0) + (ySize / 2.0)) .arg(posY - (ySize / 2.0) + (drillY / 4.0)) .arg(posX + (xSize / 2.0) - (ySize / 2.0)) .arg(checkStrokeWidth(drillY / 2.0)) .arg(color); middle += QString("<line x1='%1' y1='%2' x2='%3' y2='%2' fill='none' stroke-width='%4' stroke='%5' />") .arg(posX - (xSize / 2.0) + (ySize / 2.0)) .arg(posY + (ySize / 2.0) - (drillY / 4.0)) .arg(posX + (xSize / 2.0) - (ySize / 2.0)) .arg(checkStrokeWidth(drillY / 2.0)) .arg(color); } return middle + bot; } void KicadModule2Svg::checkLimits(int posX, int xSize, int posY, int ySize) { checkXLimit(posX - (xSize / 2.0)); checkXLimit(posX + (xSize / 2.0)); checkYLimit(posY - (ySize / 2.0)); checkYLimit(posY + (ySize / 2.0)); } QString KicadModule2Svg::drawCPad(int posX, int posY, int xSize, int ySize, int drillX, int drillY, const QString & padName, int padNumber, const QString & padType, KicadModule2Svg::PadLayer padLayer) { QString color = getColor(padLayer); QString id = getID(padNumber, padLayer); Q_UNUSED(ySize); if (padType == "SMD") { return QString("<circle cx='%1' cy='%2' r='%3' %4 fill='%5' stroke-width='0' connectorname='%6'/>") .arg(posX) .arg(posY) .arg(xSize / 2.0) .arg(id) .arg(color) .arg(padName); } if (drillX == drillY) { double w = (xSize - drillX) / 2.0; QString pad = QString("<g %1 connectorname='%2'>").arg(id).arg(padName); pad += QString("<circle cx='%1' cy='%2' r='%3' stroke-width='%4' stroke='%5' fill='none' />") .arg(posX) .arg(posY) .arg((drillX / 2.0) + (w / 2)) .arg(checkStrokeWidth(w)) .arg(color); if (drillX > 500) { pad += QString("<circle cx='%1' cy='%2' r='%3' stroke-width='0' fill='black' drill='0' />") .arg(posX) .arg(posY) .arg(drillX / 2.0); } pad += "</g>"; return pad; } QString pad = QString("<g %1>").arg(id); double w = (xSize - qMax(drillX, drillY)) / 2.0; pad += QString("<circle cx='%1' cy='%2' r='%3' stroke-width='%4' stroke='%5' fill='none' drill='0' />") .arg(posX) .arg(posY) .arg((xSize / 2.0) - (w / 2)) .arg(checkStrokeWidth(w)) .arg(color); pad += drawOblong(posX, posY, drillX + w, drillY + w, drillX, drillY, "", padLayer); // now fill the gaps between the oblong and the circle if (drillX >= drillY) { double angle = asin(((drillY + w) / 2) / (ySize / 2.0)); double opp = (ySize / 2.0) * cos(angle); pad += QString("<polygon stroke-width='0' fill='%1' points='%2,%3,%4,%5,%6,%7' />") .arg(color) .arg(posX) .arg(posY - (ySize / 2.0)) .arg(posX - opp) .arg(posY - (drillY / 2.0)) .arg(posX + opp) .arg(posY - (drillY / 2.0)); pad += QString("<polygon stroke-width='0' fill='%1' points='%2,%3,%4,%5,%6,%7' />") .arg(color) .arg(posX) .arg(posY + (ySize / 2.0)) .arg(posX - opp) .arg(posY + (drillY / 2.0)) .arg(posX + opp) .arg(posY + (drillY / 2.0)); } else { double angle = acos(((drillX + w) / 2) / (xSize / 2.0)); double adj = (xSize / 2.0) * sin(angle); pad += QString("<polygon stroke-width='0' fill='%1' points='%2,%3,%4,%5,%6,%7' />") .arg(color) .arg(posX - (xSize / 2.0)) .arg(posY) .arg(posX - (drillX / 2.0)) .arg(posY - adj) .arg(posX - (drillX / 2.0)) .arg(posY + adj); pad += QString("<polygon stroke-width='0' fill='%1' points='%2,%3,%4,%5,%6,%7' />") .arg(color) .arg(posX + (xSize / 2.0)) .arg(posY) .arg(posX + (drillX / 2.0)) .arg(posY - adj) .arg(posX + (drillX / 2.0)) .arg(posY + adj); } pad += "</g>"; return pad; } QString KicadModule2Svg::drawRPad(int posX, int posY, int xSize, int ySize, int drillX, int drillY, const QString & padName, int padNumber, const QString & padType, KicadModule2Svg::PadLayer padLayer) { QString color = getColor(padLayer); QString id = getID(padNumber, padLayer); if (padType == "SMD") { return QString("<rect x='%1' y='%2' width='%3' height='%4' %5 stroke-width='0' fill='%6' connectorname='%7'/>") .arg(posX - (xSize / 2.0)) .arg(posY - (ySize / 2.0)) .arg(xSize) .arg(ySize) .arg(id) .arg(color) .arg(padName); } QString pad = QString("<g %1 connectorname='%2'>").arg(id).arg(padName); if (drillX == drillY) { double w = (qMin(xSize, ySize) - drillX) / 2.0; pad += QString("<circle fill='none' cx='%1' cy='%2' r='%3' stroke-width='%4' stroke='%5' />") .arg(posX) .arg(posY) .arg((w / 2) + (drillX / 2.0)) .arg(checkStrokeWidth(w)) .arg(color); } else { double w = (drillX >= drillY) ? (xSize - drillX) / 2.0 : (ySize - drillY) / 2.0 ; pad += QString("<circle fill='none' cx='%1' cy='%2' r='%3' stroke-width='%4' stroke='%5' />") .arg(posX) .arg(posY) .arg((w / 2) + (qMax(drillX, drillY) / 2.0)) .arg(checkStrokeWidth(w)) .arg(color); pad += drawOblong(posX, posY, drillX + w, drillY + w, drillX, drillY, "", padLayer); } // draw 4 lines otherwise there may be gaps if one pair of sides is much longer than the other pair of sides double w = (ySize - drillY) / 2.0; double tlx = posX - xSize / 2.0; double tly = posY - ySize / 2.0; pad += QString("<line x1='%1' y1='%2' x2='%3' y2='%2' fill='none' stroke-width='%4' stroke='%5' />") .arg(tlx) .arg(tly + w / 2) .arg(tlx + xSize) .arg(checkStrokeWidth(w)) .arg(color); pad += QString("<line x1='%1' y1='%2' x2='%3' y2='%2' fill='none' stroke-width='%4' stroke='%5' />") .arg(tlx) .arg(tly + ySize - w / 2) .arg(tlx + xSize) .arg(checkStrokeWidth(w)) .arg(color); w = (xSize - drillX) / 2.0; pad += QString("<line x1='%1' y1='%2' x2='%1' y2='%3' fill='none' stroke-width='%4' stroke='%5' />") .arg(tlx + w / 2) .arg(tly) .arg(tly + ySize) .arg(checkStrokeWidth(w)) .arg(color); pad += QString("<line x1='%1' y1='%2' x2='%1' y2='%3' fill='none' stroke-width='%4' stroke='%5' />") .arg(tlx + xSize - w / 2) .arg(tly) .arg(tly + ySize) .arg(checkStrokeWidth(w)) .arg(color); pad += "</g>"; return pad; } QString KicadModule2Svg::drawOblong(int posX, int posY, double xSize, double ySize, int drillX, int drillY, const QString & padType, KicadModule2Svg::PadLayer padLayer) { if (xSize <= ySize) { return drawVerticalOblong(posX, posY, xSize, ySize, drillX, drillY, padType, padLayer); } else { return <API key>(posX, posY, xSize, ySize, drillX, drillY, padType, padLayer); } } QString KicadModule2Svg::getID(int padNumber, KicadModule2Svg::PadLayer padLayer) { if (padNumber < 0) { return QString("id='%1%2'").arg(FSvgRenderer::NonConnectorName).arg(<API key>++); } return QString("id='connector%1%2'").arg(padNumber).arg((padLayer == ToCopper1) ? "pad" : "pin"); } QString KicadModule2Svg::getColor(KicadModule2Svg::PadLayer padLayer) { switch (padLayer) { case ToCopper0: return ViewLayer::Copper0Color; break; case ToCopper1: return ViewLayer::Copper1Color; break; default: DebugDialog::debug("kicad getcolor with unknown layer"); return "#FF0000"; } }
package org.openmuc.framework.datalogger.ascii.test; import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.List; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.Test; import org.openmuc.framework.data.Record; import org.openmuc.framework.data.ValueType; import org.openmuc.framework.datalogger.ascii.LogFileReader; public class <API key> { private String fileDate; String dateFormat = "yyyyMMdd HH:mm:ss"; private static final int loggingInterval = 1000; static int loggingTimeOffset = 0; private static final String Channel0Name = "power"; LogChannelTestImpl channelTestImpl = new LogChannelTestImpl(Channel0Name, "", "Comment", "W", ValueType.DOUBLE, 0.0, 0.0, false, 1000, 0, "", loggingInterval, loggingTimeOffset, false, false); @Test public void <API key>() { System.out.println("### Begin test <API key>"); fileDate = "20131201"; long t1 = TestUtils.stringToDate(dateFormat, fileDate + " 12:00:00").getTimeInMillis(); long t2 = TestUtils.stringToDate(dateFormat, fileDate + " 12:00:10").getTimeInMillis(); LogFileReader fr = new LogFileReader(TestUtils.TESTFOLDERPATH, channelTestImpl); List<Record> records = fr.getValues(t1, t2).get(channelTestImpl.getId()); long expectedRecords = 0; System.out.print(Thread.currentThread().getStackTrace()[1].getMethodName()); System.out.println(" records = " + records.size() + " (" + expectedRecords + " expected)"); if (records.size() == expectedRecords) { assertTrue(true); } else { assertTrue(false); } } @AfterAll public static void tearDown() { System.out.println("tearing down"); TestUtils.deleteTestFolder(); } // @Ignore // @Test // public void <API key>() { // System.out.println("### Begin test <API key>"); // fileDate = "20131202"; // String ext = ".dat"; // long startTimestampFile = TestUtils.stringToDate(dateFormat, fileDate + " 12:00:00").getTime(); // long endTimestampFile = TestUtils.stringToDate(dateFormat, fileDate + " 12:00:30").getTime(); // String[] channelIds = new String[] { "power", "energy" }; // String filename = TestUtils.TESTFOLDER + "/" + fileDate + "_" + loggingInterval + ext; // <API key>(filename, channelIds, startTimestampFile, endTimestampFile, loggingInterval); // long t1 = TestUtils.stringToDate(dateFormat, fileDate + " 12:00:00").getTime(); // long t2 = TestUtils.stringToDate(dateFormat, fileDate + " 12:00:10").getTime(); // LogFileReader fr = new LogFileReader(TestUtils.TESTFOLDERPATH, channelTestImpl); // List<Record> records = fr.getValues(t1, t2); // long expectedRecords = 0; // System.out.print(Thread.currentThread().getStackTrace()[1].getMethodName()); // System.out.println(" records = " + records.size() + " (" + expectedRecords + " expected)"); // if (records.size() == expectedRecords) { // assertTrue(true); // else { // assertTrue(false); // @Ignore // @Test // public void <API key>() { // System.out.println("### Begin test <API key>"); // fileDate = "20131202"; // String ext = ".dat"; // long startTimestampFile = TestUtils.stringToDate(dateFormat, fileDate + " 12:00:00").getTime(); // long endTimestampFile = TestUtils.stringToDate(dateFormat, fileDate + " 12:00:30").getTime(); // String[] channelIds = new String[] { "energy" }; // String filename = TestUtils.TESTFOLDER + "/" + fileDate + "_" + loggingInterval + ext; // createLogFile(filename, channelIds, startTimestampFile, endTimestampFile, loggingInterval); // long t1 = TestUtils.stringToDate(dateFormat, fileDate + " 12:00:00").getTime(); // long t2 = TestUtils.stringToDate(dateFormat, fileDate + " 12:00:10").getTime(); // LogFileReader fr = new LogFileReader(TestUtils.TESTFOLDERPATH, channelTestImpl); // List<Record> records = fr.getValues(t1, t2); // long expectedRecords = 0; // System.out.print(Thread.currentThread().getStackTrace()[1].getMethodName()); // System.out.println(" records = " + records.size() + " (" + expectedRecords + " expected)"); // if (records.size() == expectedRecords) { // assertTrue(true); // else { // assertTrue(false); }
@extends('layouts.app') @section('page_title') Forgot Password @endsection @section('content') <div class="col-xs-8 col-sm-6 col-md-4 col-lg-4 col-xs-offset-2 col-sm-offset-3 col-md-offset-4 col-lg-offset-4"> @include('common.validationErrors') <p class="text-center">Enter the email address used to create your account and instructions for resetting your password will be sent to that address.</p> <form method="POST" action=""> {!! csrf_field() !!} <div class="form-group"> <label for="emailInput">Email</label> <input type="email" class="form-control" id="emailInput" name="email" value="{{ old('email') }}" placeholder="Email"> </div> <div class="col-sm-8 col-sm-offset-2 text-center"> <button type="submit" class="btn btn-default">Submit</button> </div> </form> </div> @endsection
#pragma once #include "../../faworld/world.h" #include <cstdint> #include <fa_nuklear.h> #include <optional> #include <vector> namespace FAWorld { class Player; class PlayerInput; } namespace Engine { class <API key> { public: virtual ~<API key>() = default; virtual std::optional<std::vector<FAWorld::PlayerInput>> getAndClearInputs(FAWorld::Tick tick) = 0; virtual void update() = 0; virtual void verify(FAWorld::Tick tick) = 0; virtual bool isServer() const = 0; virtual bool isMultiplayer() const = 0; virtual bool isPlayerRegistered(uint32_t peerId) const = 0; virtual void registerNewPlayer(FAWorld::Player* player, uint32_t peerId) = 0; virtual void doMultiplayerGui(nk_context*){}; enum { RELIABLE_CHANNEL_ID = 10, <API key>, <API key>, CHANNEL_ID_END }; enum class MessageType : uint8_t { // server-to-client MapToClient, InputsToClient, VerifyToClient, // client-to-server <API key>, <API key> }; }; }
<style> .thead_white table thead { background-color: #FFFFFF; } </style> <?php use yii\helpers\Html; use yii\grid\GridView; use yii\helpers\ArrayHelper; use kartik\select2\Select2; use vendor\meta_grid\helper\RBACHelper; use yii\helpers\Url; /* @var $this yii\web\View */ /* @var $searchModel app\models\ProjectSearch */ /* @var $dataProvider yii\data\ActiveDataProvider */ $this->title = Yii::t('app', 'Projects'); $this->params['breadcrumbs'][] = Yii::t('app', $this->title); ?> <div class="project-index"> <h1><?= Html::encode($this->title) ?></h1> <?php // echo $this->render('_search', ['model' => $searchModel]); ?> <?php // Das ist nicht der Yii2-Way, ... @ToDo if (isset($_GET["searchShow"])) { echo $this->render('_search', ['model' =>$searchModel]); } else { echo "<a class='btn btn-default' href='index.php?r=".$_GET["r"]."&searchShow=1'>".Yii::t('app', 'Advanced Search')."</a></br></br>"; } ?> <p> <?= Yii::$app->user->identity->isAdmin || Yii::$app->User->can('create-project') ? Html::a( Yii::t('app', 'Create {modelClass}', ['modelClass' => Yii::t('app', 'Project'),]), ['create'], ['class' => 'btn btn-success']) : "" ?> </p> <?php $session = Yii::$app->session; // Inform user about set perspective_filter if (array_key_exists("fk_object_type_id", $searchModel->attributes) === true && (isset($searchModel->find()->select(['fk_object_type_id'])->one()->fk_object_type_id) === true)) { $fk_object_type_id=$searchModel->find()->select(['fk_object_type_id'])->one()->fk_object_type_id; if ($session->hasFlash('<API key>' . $fk_object_type_id)) { echo yii\bootstrap\Alert::widget([ 'options' => [ 'class' => 'alert-info', ], 'body' => $session->getFlash('<API key>' . $fk_object_type_id), ]); } } if ($session->hasFlash('deleteError')) { echo yii\bootstrap\Alert::widget([ 'options' => [ 'class' => 'alert alert-danger alert-dismissable', ], 'body' => $session->getFlash('deleteError'), ]); } Url::remember(); ?> <?= GridView::widget([ 'tableOptions' => ['id' => 'grid-view-project', 'class' => 'table table-striped table-bordered'], 'dataProvider' => $dataProvider, 'pager' => [ 'firstPageLabel' => '<span class="glyphicon <API key>"></span><span class="glyphicon <API key>"></span>', 'lastPageLabel' => '<span class="glyphicon <API key>"></span><span class="glyphicon <API key>"></span>', 'prevPageLabel' => '<span class="glyphicon <API key>"></span>', 'nextPageLabel' => '<span class="glyphicon <API key>"></span>', 'maxButtonCount' => 15, ], 'layout' => "{pager}\n{summary}{items}\n{pager}", 'rowOptions' => function ($model, $key, $index, $grid) { $controller = Yii::$app->controller->id; return [ 'ondblclick' => 'location.href="' . Yii::$app->urlManager->createUrl([$controller . '/view','id'=>$key]) . '"', ]; }, 'options' => [ 'class' => 'thead_white', ], 'filterModel' => $searchModel, 'columns' => [ ['class' => 'yii\grid\ActionColumn', 'contentOptions'=>[ 'style'=>'white-space: nowrap;'] , 'template' => RBACHelper::<API key>('{view} {update} {delete} {documentation}'), 'buttons' => [ 'documentation' => function ($url, $model) { return Html::a('<span class="glyphicon glyphicon-list-alt"></span>', $url, [ 'title' => Yii::t('app', 'Documentation'), ]); } ], 'urlCreator' => function ($action, $model, $key, $index) { $controller = Yii::$app->controller->id; if ($action === 'view') { return Yii::$app->urlManager->createUrl([$controller . '/' . $action ,'id'=>$key]); } if ($action === 'update') { return Yii::$app->urlManager->createUrl([$controller . '/' . $action ,'id'=>$key]); } if ($action === 'delete') { return Yii::$app->urlManager->createUrl([$controller . '/' . $action ,'id'=>$key]); } if ($action === 'documentation') { $url = "?r=documentation/createdocumentation&project_id=" . $key; return $url; } } ], ['class' => 'yii\grid\SerialColumn'], [ 'label' => Yii::t('app', 'Client'), 'value' => function($model) { return $model->fk_client_id == "" ? $model->fk_client_id : (isset($_GET["searchShow"]) ? $model->fkClient->name . ' [' . $model->fk_client_id . ']' : $model->fkClient->name); }, 'filter' => Select2::widget([ 'model' => $searchModel, 'attribute' => 'fk_client_id', 'data' => ArrayHelper::map(app\models\Client::find()->asArray()->all(), 'id', 'name'), 'options' => ['placeholder' => Yii::t('app', 'Select ...'), 'id' =>'select2_fkClient', 'multiple' => true], 'pluginOptions' => [ 'allowClear' => true ], ]), ], 'name:ntext', 'description:html', [ 'label' => Yii::t('app', 'Object Persistence Method'), 'value' => function($model) { return $model-><API key> == "" ? $model-><API key> : (isset($_GET["searchShow"]) ? $model-><API key>->name . ' [' . $model-><API key> . ']' : $model-><API key>->name); }, 'filter' => Select2::widget([ 'model' => $searchModel, 'attribute' => '<API key>', 'data' => ArrayHelper::map(app\models\<API key>::find()->asArray()->all(), 'id', 'name'), 'options' => ['placeholder' => Yii::t('app', 'Select ...'), 'id' =>'<API key>', 'multiple' => true], 'pluginOptions' => [ 'allowClear' => true ], ]), ], [ 'label' => Yii::t('app', 'Datamanagement Process'), 'value' => function($model) { return $model-><API key> == "" ? $model-><API key> : (isset($_GET["searchShow"]) ? $model-><API key>->name . ' [' . $model-><API key> . ']' : $model-><API key>->name); }, 'filter' => Select2::widget([ 'model' => $searchModel, 'attribute' => '<API key>', 'data' => ArrayHelper::map(app\models\<API key>::find()->asArray()->all(), 'id', 'name'), 'options' => ['placeholder' => Yii::t('app', 'Select ...'), 'id' =>'<API key>', 'multiple' => true], 'pluginOptions' => [ 'allowClear' => true ], ]), ], ], ]); ?> <?php if (\vendor\meta_grid\helper\Utils::get_app_config("<API key>") == 1) { \bluezed\floatThead\FloatThead::widget( [ 'tableId' => 'grid-view-project', 'options' => [ 'top'=>'50' ] ] ); } ?> </div>
#!/usr/bin/env node const getUnixTimeNow = () => Math.round((new Date()).getTime() / 1000) const displayTime = (secs) => { const mins = Math.floor(secs / 60) const remainingSecs = (secs % 60) return `${mins.toString().padStart(2, '0')}:${remainingSecs.toString().padStart(2, '0')}` } async function main () { const start = getUnixTimeNow() const mockContext = require('../mock-context') const v2 = require('./v2') await v2.process(mockContext.log) const end = getUnixTimeNow() const timeTakenSeconds = (end - start) console.log('Manual run completed in ', displayTime(timeTakenSeconds)) } main() .then(() => { console.log('all done') process.exit(0) // prevent the open DB connections from failing to terminate }) .catch(error => { console.error(error) process.exit(1) })
#ifndef SERVER_UTILS_H #define SERVER_UTILS_H #include <QString> #include <QHostAddress> #include "httprequest.h" namespace SJ { class Utils { public: static QString substring(const QString & str, int beginIndex, int endIndex = -1); static QString version() { return "0.3"; } static QHostAddress createAddress(QString interface); static QString <API key>(HttpRequest::RequestMethod method); }; } //namespace SJ #endif // SERVER_UTILS_H
#ifndef <API key> #define <API key> #include <string> #include <memory> #include <chrono> #include "changelog_entry.hpp" #include "semantic_version.hpp" namespace dbmig { Represents the changelog of a given database installation class changelog { public: changelog(const std::string &conn_str, const std::string &changeset); ~changelog(); Is a changelog table installed on the database? const bool installed() const; Get the currently-installed version of the database. const semver version() const; Get the most recent <API key> version of the database. const semver previous_version() const; Get the version that the database could be rolled back to const semver rollback_version() const; Get a list of steps to take to roll back to a given version const rollback_step_list rollback_steps(const semver &ver) const; Get a list of the last batch of contiguous changelog entries const <API key> contiguous_history(bool exclude_rolled_back) const; Force the changelog to a certain version. void override_version(const semver &ver); private: struct impl; std::unique_ptr<impl> pimpl_; }; } #endif // <API key>
function LineGraph(argsMap) { /* public methods */ var self = this; /** * This allows appending new data points to the end of the lines and sliding them within the time window: * - x-axis will slide to new range * - new data will be added to the end of the lines * - equivalent number of data points will be removed from beginning of lines * - lines will be transitioned through horizontoal slide to show progression over time */ this.slideData = function(newData) { // validate data var tempData = processDataMap(newData); debug("Existing startTime: " + data.startTime + " endTime: " + data.endTime); debug("New startTime: " + tempData.startTime + " endTime: " + tempData.endTime); // validate step is the same on each if(tempData.step != newData.step) { throw new Error("The step size on appended data must be the same as the existing data => " + data.step + " != " + tempData.step); } if(tempData.values[0].length == 0) { throw new Error("There is no data to append."); } var numSteps = tempData.values[0].length; console.log("slide => add num new values: " + numSteps); console.log(tempData.values[0]) tempData.values.forEach(function(dataArrays, i) { var <API key> = data.values[i]; dataArrays.forEach(function(v) { console.log("slide => add new value: " + v); // push each new value onto the existing data array <API key>.push(v); // shift the front value off to compensate for what we just added <API key>.shift(); }) }) // shift domain by number of data elements we just added // == numElements * step data.startTime = new Date(data.startTime.getTime() + (data.step * numSteps)); data.endTime = tempData.endTime; debug("Updated startTime: " + data.startTime + " endTime: " + data.endTime); // redraw each of the lines // Transitions are turned off on this since the small steps we're taking // don't actually look good when animated and it uses unnecessary CPU // The quick-steps look cleaner, and keep the axis/line in-sync instead of jittering redrawAxes(false); redrawLines(false); // slide the lines left graph.selectAll("g .lines path") .attr("transform", "translate(-" + x(numSteps*data.step) + ")"); handleDataUpdate(); // fire an event that data was updated $(container).trigger('LineGraph:dataModification') } /** * This does a full refresh of the data: * - x-axis will slide to new range * - lines will change in place */ this.updateData = function(newData) { // data is being replaced, not appended so we re-assign 'data' data = processDataMap(newData); // and then we rebind data.values to the lines graph.selectAll("g .lines path").data(data.values) // redraw (with transition) redrawAxes(true); // transition is 'false' for lines because the transition is really weird when the data significantly changes // such as going from 700 points to 150 to 400 // and because of that we rebind the data anyways which doesn't work with transitions very well at all redrawLines(false); handleDataUpdate(); // fire an event that data was updated $(container).trigger('LineGraph:dataModification') } this.switchToPowerScale = function() { yScale = 'pow'; redrawAxes(true); redrawLines(true); // fire an event that config was changed $(container).trigger('LineGraph:configModification') } this.switchToLogScale = function() { yScale = 'log'; redrawAxes(true); redrawLines(true); // fire an event that config was changed $(container).trigger('LineGraph:configModification') } this.switchToLinearScale = function() { yScale = 'linear'; redrawAxes(true); redrawLines(true); // fire an event that config was changed $(container).trigger('LineGraph:configModification') } /** * Return the current scale value: pow, log or linear */ this.getScale = function() { return yScale; } /* private variables */ // the div we insert the graph into var containerId; var container; // functions we use to display and interact with the graphs and lines var graph, x, yLeft, yRight, xAxis, yAxisLeft, yAxisRight, <API key>, linesGroup, linesGroupText, lines, lineFunction, <API key> = -1; var yScale = 'linear'; // can be pow, log, linear var scales = [['linear','Linear'], ['pow','Power'], ['log','Log']]; var hoverContainer, hoverLine, hoverLineXOffset, hoverLineYOffset, hoverLineGroup; var legendFontSize = 12; // we can resize dynamically to make fit so we remember it here // instance storage of data to be displayed var data; // define dimensions of graph var margin = [-1, -1, -1, -1]; // margins (top, right, bottom, left) var w, h; // width & height var transitionDuration = 300; var formatNumber = d3.format(",.0f") // for formatting integers var <API key> = function(d) { return formatNumber(d) }; // used to track if the user is interacting via mouse/finger instead of trying to determine // by analyzing various element class names to see if they are visible or not var <API key> = false; var <API key> = -1; /* initialization and validation */ var _init = function() { // required variables that we'll throw an error on if we don't find containerId = getRequiredVar(argsMap, 'containerId'); container = document.querySelector('#' + containerId); // margins with defaults (do this before processDataMap since it can modify the margins) margin[0] = getOptionalVar(argsMap, 'marginTop', 20) // marginTop allows fitting the actions, date and top of axis labels margin[1] = getOptionalVar(argsMap, 'marginRight', 20) margin[2] = getOptionalVar(argsMap, 'marginBottom', 35) // marginBottom allows fitting the legend along the bottom margin[3] = getOptionalVar(argsMap, 'marginLeft', 90) // marginLeft allows fitting the axis labels // assign instance vars from dataMap data = processDataMap(getRequiredVar(argsMap, 'data')); /* set the default scale */ yScale = data.scale; // do this after processing margins and executing processDataMap above initDimensions(); createGraph() //debug("Initialization successful for container: " + containerId) // window resize listener // de-dupe logic from http://stackoverflow.com/questions/667426/<API key>/668185#668185 var TO = false; $(window).resize(function(){ if(TO !== false) clearTimeout(TO); TO = setTimeout(<API key>, 200); // time in miliseconds }); } /* private methods */ /* * Return a validated data map * * Expects a map like this: * {"start": 1335035400000, "end": 1335294600000, "step": 300000, "values": [[28,22,45,65,34], [45,23,23,45,65]]} */ var processDataMap = function(dataMap) { // assign data values to plot over time var dataValues = getRequiredVar(dataMap, 'values', "The data object must contain a 'values' value with a data array.") var startTime = new Date(getRequiredVar(dataMap, 'start', "The data object must contain a 'start' value with the start time in milliseconds since epoch.")) var endTime = new Date(getRequiredVar(dataMap, 'end', "The data object must contain an 'end' value with the end time in milliseconds since epoch.")) var step = getRequiredVar(dataMap, 'step', "The data object must contain a 'step' value with the time in milliseconds between each data value.") var names = getRequiredVar(dataMap, 'names', "The data object must contain a 'names' array with the same length as 'values' with a name for each data value array.") var displayNames = getOptionalVar(dataMap, 'displayNames', names); var <API key> = getOptionalVar(dataMap, '<API key>', 6); var <API key> = getOptionalVar(dataMap, '<API key>', 6); var axis = getOptionalVar(dataMap, 'axis', []); // default axis values if(axis.length == 0) { displayNames.forEach(function (v, i) { // set the default to left axis axis[i] = "left"; }) } else { var hasRightAxis = false; axis.forEach(function(v) { if(v == 'right') { hasRightAxis = true; } }) if(hasRightAxis) { // add space to right margin margin[1] = margin[1] + 50; } } var colors = getOptionalVar(dataMap, 'colors', []); // default colors values if(colors.length == 0) { displayNames.forEach(function (v, i) { // set the default colors[i] = "black"; }) } var maxValues = []; var rounding = getOptionalVar(dataMap, 'rounding', []); // default rounding values if(rounding.length == 0) { displayNames.forEach(function (v, i) { // set the default to 0 decimals rounding[i] = 0; }) } /* copy the dataValues array, do NOT assign the reference otherwise we modify the original source when we shift/push data */ var newDataValues = []; dataValues.forEach(function (v, i) { newDataValues[i] = v.slice(0); maxValues[i] = d3.max(newDataValues[i]) }) return { "values" : newDataValues, "startTime" : startTime, "endTime" : endTime, "step" : step, "names" : names, "displayNames": displayNames, "axis" : axis, "colors": colors, "scale" : getOptionalVar(dataMap, 'scale', yScale), "maxValues" : maxValues, "rounding" : rounding, "<API key>": <API key>, "<API key>": <API key> } } var redrawAxes = function(withTransition) { initY(); initX(); if(withTransition) { // slide x-axis to updated location graph.selectAll("g .x.axis").transition() .duration(transitionDuration) .ease("linear") .call(xAxis) // slide y-axis to updated location graph.selectAll("g .y.axis.left").transition() .duration(transitionDuration) .ease("linear") .call(yAxisLeft) if(yAxisRight != undefined) { // slide y-axis to updated location graph.selectAll("g .y.axis.right").transition() .duration(transitionDuration) .ease("linear") .call(yAxisRight) } } else { // slide x-axis to updated location graph.selectAll("g .x.axis") .call(xAxis) // slide y-axis to updated location graph.selectAll("g .y.axis.left") .call(yAxisLeft) if(yAxisRight != undefined) { // slide y-axis to updated location graph.selectAll("g .y.axis.right") .call(yAxisRight) } } } var redrawLines = function(withTransition) { /** * This is a hack to deal with the left/right axis. * * See createGraph for a larger comment explaining this. * * Yes, it's ugly. If you can suggest a better solution please do. */ <API key> =-1; // redraw lines if(withTransition) { graph.selectAll("g .lines path") .transition() .duration(transitionDuration) .ease("linear") .attr("d", lineFunction) .attr("transform", null); } else { graph.selectAll("g .lines path") .attr("d", lineFunction) .attr("transform", null); } } /* * Allow re-initializing the y function at any time. * - it will properly determine what scale is being used based on last user choice (via public switchScale methods) */ var initY = function() { initYleft(); initYright(); } var initYleft = function() { var maxYscaleLeft = calculateMaxY(data, 'left') //debug("initY => maxYscale: " + maxYscaleLeft); var numAxisLabels = 6; if(yScale == 'pow') { yLeft = d3.scale.pow().exponent(0.3).domain([0, maxYscaleLeft]).range([h, 0]).nice(); numAxisLabels = data.<API key>; } else if(yScale == 'log') { // we can't have 0 so will represent 0 with a very small number // 0.1 works to represent 0, 0.01 breaks the tickFormatter yLeft = d3.scale.log().domain([0.1, maxYscaleLeft]).range([h, 0]).nice(); } else if(yScale == 'linear') { yLeft = d3.scale.linear().domain([0, maxYscaleLeft]).range([h, 0]).nice(); numAxisLabels = data.<API key>; } yAxisLeft = d3.svg.axis().scale(yLeft).ticks(numAxisLabels, <API key>).orient("left"); } var initYright = function() { var maxYscaleRight = calculateMaxY(data, 'right') // only create the right axis if it has values if(maxYscaleRight != undefined) { //debug("initY => maxYscale: " + maxYscaleRight); var numAxisLabels = 6; if(yScale == 'pow') { yRight = d3.scale.pow().exponent(0.3).domain([0, maxYscaleRight]).range([h, 0]).nice(); numAxisLabels = data.<API key>; } else if(yScale == 'log') { // we can't have 0 so will represent 0 with a very small number // 0.1 works to represent 0, 0.01 breaks the tickFormatter yRight = d3.scale.log().domain([0.1, maxYscaleRight]).range([h, 0]).nice(); } else if(yScale == 'linear') { yRight = d3.scale.linear().domain([0, maxYscaleRight]).range([h, 0]).nice(); numAxisLabels = data.<API key>; } yAxisRight = d3.svg.axis().scale(yRight).ticks(numAxisLabels, <API key>).orient("right"); } } /* * Whenever we add/update data we want to re-calculate if the max Y scale has changed */ var calculateMaxY = function(data, whichAxis) { // Y scale will fit values from 0-10 within pixels h-0 (Note the inverted domain for the y-scale: bigger is up!) // we get the max of the max of values for the given index since we expect an array of arrays // we can shortcut to using data.maxValues since we've already calculated the max of each series in processDataMap var maxValuesForAxis = []; data.maxValues.forEach(function(v, i) { if(data.axis[i] == whichAxis) { maxValuesForAxis.push(v); } }) // we now have the max values for the axis we're interested in so get the max of them return d3.max(maxValuesForAxis); } /* * Allow re-initializing the x function at any time. */ var initX = function() { // X scale starts at epoch time 1335035400000, ends at 1335294600000 with 300s increments x = d3.time.scale().domain([data.startTime, data.endTime]).range([0, w]); // create yAxis (with ticks) xAxis = d3.svg.axis().scale(x).tickSize(-h).tickSubdivide(1); // without ticks //xAxis = d3.svg.axis().scale(x); } /** * Creates the SVG elements and displays the line graph. * * Expects to be called once during instance initialization. */ var createGraph = function() { // Add an SVG element with the desired dimensions and margin. graph = d3.select("#" + containerId).append("svg:svg") .attr("class", "line-graph") .attr("width", w + margin[1] + margin[3]) .attr("height", h + margin[0] + margin[2]) .append("svg:g") .attr("transform", "translate(" + margin[3] + "," + margin[0] + ")"); initX() // Add the x-axis. graph.append("svg:g") .attr("class", "x axis") .attr("transform", "translate(0," + h + ")") .call(xAxis); // y is all done in initY because we need to re-assign vars quite often to change scales initY(); // Add the y-axis to the left graph.append("svg:g") .attr("class", "y axis left") .attr("transform", "translate(-10,0)") .call(yAxisLeft); if(yAxisRight != undefined) { // Add the y-axis to the right if we need one graph.append("svg:g") .attr("class", "y axis right") .attr("transform", "translate(" + (w+10) + ",0)") .call(yAxisRight); } // create line function used to plot our data lineFunction = d3.svg.line() // assign the X function to plot our line as we wish .x(function(d,i) { /* * Our x value is defined by time and since our data doesn't have per-metric timestamps * we calculate time as (startTime + the step between metrics * the index) * * We also reach out to the persisted 'data' object for time * since the 'd' passed in here is one of the children, not the parent object */ var _x = x(data.startTime.getTime() + (data.step*i)); // verbose logging to show what's actually being done //debug("Line X => index: " + i + " scale: " + _x) // return the X coordinate where we want to plot this datapoint return _x; }) .y(function(d, i) { if(yScale == 'log' && d < 0.1) { // log scale can't have 0s, so we set it to the smallest value we set on y d = 0.1; } /** * This is a hack that relies on: * a) the single-threaded nature of javascript that this will not be interleaved * b) that lineFunction will always be passed the data[] for all lines in the same way each time * * We then use an external variable to track each time we move from one series to the next * so that we can have its seriesIndex to access information in the data[] object, particularly * so we can determine what axis this data is supposed to be on. * * I didn't want to split the line function into left and right lineFunctions as that would really * complicate the data binding. * * Also ... I can't figure out nested functions to keep it scoped so I had to put <API key> * as a variable in the same scope as lineFunction. Ugly. And worse ... reset it in redrawAxes. * * Anyone reading this who knows a better solution please let me know. */ if(i == 0) { <API key>++; } var axis = data.axis[<API key>]; var _y; if(axis == 'right') { _y = yRight(d); } else { _y = yLeft(d); } // verbose logging to show what's actually being done //debug("Line Y => data: " + d + " scale: " + _y) // return the Y coordinate where we want to plot this datapoint return _y; }) .defined(function(d) { // handle missing data gracefully return d >= 0; }); // append a group to contain all lines lines = graph.append("svg:g") .attr("class", "lines") .selectAll("path") .data(data.values); // bind the array of arrays // persist this reference so we don't do the selector every mouse event hoverContainer = container.querySelector('g .lines'); $(container).mouseleave(function(event) { handleMouseOutGraph(event); }) $(container).mousemove(function(event) { <API key>(event); }) // add a line group for each array of values (it will iterate the array of arrays bound to the data function above) linesGroup = lines.enter().append("g") .attr("class", function(d, i) { return "line_group series_" + i; }); // add path (the actual line) to line group linesGroup.append("path") .attr("class", function(d, i) { //debug("Appending line [" + containerId + "]: " + i) return "line series_" + i; }) .attr("fill", "none") .attr("stroke", function(d, i) { return data.colors[i]; }) .attr("d", lineFunction) // use the 'lineFunction' to create the data points in the correct x,y axis .on('mouseover', function(d, i) { handleMouseOverLine(d, i); }); // add line label to line group linesGroupText = linesGroup.append("svg:text"); linesGroupText.attr("class", function(d, i) { //debug("Appending line [" + containerId + "]: " + i) return "line_label series_" + i; }) .text(function(d, i) { return ""; }); // add a 'hover' line that we'll show as a user moves their mouse (or finger) // so we can use it to show detailed values of each line hoverLineGroup = graph.append("svg:g") .attr("class", "hover-line"); // add the line to the group hoverLine = hoverLineGroup .append("svg:line") .attr("x1", 10).attr("x2", 10) // vertical line so same value on each .attr("y1", 0).attr("y2", h); // top to bottom // hide it by default hoverLine.classed("hide", true); createScaleButtons(); createDateLabel(); createLegend(); <API key>(); } /** * Create a legend that displays the name of each line with appropriate color coding * and allows for showing the current value when doing a mouseOver */ var createLegend = function() { // append a group to contain all lines var legendLabelGroup = graph.append("svg:g") .attr("class", "legend-group") .selectAll("g") .data(data.displayNames) .enter().append("g") .attr("class", "legend-labels"); legendLabelGroup.append("svg:text") .attr("class", "legend name") .text(function(d, i) { return d; }) .attr("font-size", legendFontSize) .attr("fill", function(d, i) { // return the color for this row return data.colors[i]; }) .attr("y", function(d, i) { return h+28; }) // put in placeholders with 0 width that we'll populate and resize dynamically legendLabelGroup.append("svg:text") .attr("class", "legend value") .attr("font-size", legendFontSize) .attr("fill", function(d, i) { return data.colors[i]; }) .attr("y", function(d, i) { return h+28; }) // x values are not defined here since those get dynamically calculated when data is set in <API key>() } var <API key> = function(animate) { var legendText = graph.selectAll('g.legend-group text'); if(animate) { legendText.transition() .duration(transitionDuration) .ease("linear") .attr("y", function(d, i) { return h+28; }); } else { legendText.attr("y", function(d, i) { return h+28; }); } } /** * Create scale buttons for switching the y-axis */ var createScaleButtons = function() { var cumulativeWidth = 0; // append a group to contain all lines var buttonGroup = graph.append("svg:g") .attr("class", "scale-button-group") .selectAll("g") .data(scales) .enter().append("g") .attr("class", "scale-buttons") .append("svg:text") .attr("class", "scale-button") .text(function(d, i) { return d[1]; }) .attr("font-size", "12") // this must be before "x" which dynamically determines width .attr("fill", function(d) { if(d[0] == yScale) { return "black"; } else { return "blue"; } }) .classed("selected", function(d) { if(d[0] == yScale) { return true; } else { return false; } }) .attr("x", function(d, i) { // return it at the width of previous labels (where the last one ends) var returnX = cumulativeWidth; // increment cumulative to include this one cumulativeWidth += this.<API key>()+5; return returnX; }) .attr("y", -4) .on('click', function(d, i) { <API key>(this, d, i); }); } var <API key> = function(button, buttonData, index) { if(index == 0) { self.switchToLinearScale(); } else if(index == 1) { self.switchToPowerScale(); } else if(index == 2) { self.switchToLogScale(); } // change text decoration graph.selectAll('.scale-button') .attr("fill", function(d) { if(d[0] == yScale) { return "black"; } else { return "blue"; } }) .classed("selected", function(d) { if(d[0] == yScale) { return true; } else { return false; } }) } /** * Create a data label */ var createDateLabel = function() { var date = new Date(); // placeholder just so we can calculate a valid width // create the date label to the left of the scaleButtons group var buttonGroup = graph.append("svg:g") .attr("class", "date-label-group") .append("svg:text") .attr("class", "date-label") .attr("text-anchor", "end") // set at end so we can position at far right edge and add text from right to left .attr("font-size", "10") .attr("y", -4) .attr("x", w) .text(date.toDateString() + " " + date.toLocaleTimeString()) } /** * Called when a user mouses over a line. */ var handleMouseOverLine = function(lineData, index) { //debug("MouseOver line [" + containerId + "] => " + index) // user is interacting <API key> = true; } /** * Called when a user mouses over the graph. */ var <API key> = function(event) { var mouseX = event.<API key>; var mouseY = event.<API key>; //debug("MouseOver graph [" + containerId + "] => x: " + mouseX + " y: " + mouseY + " height: " + h + " event.clientY: " + event.clientY + " offsetY: " + event.offsetY + " pageY: " + event.pageY + " hoverLineYOffset: " + hoverLineYOffset) if(mouseX >= 0 && mouseX <= w && mouseY >= 0 && mouseY <= h) { // show the hover line hoverLine.classed("hide", false); // set position of hoverLine hoverLine.attr("x1", mouseX).attr("x2", mouseX) <API key>(mouseX) // user is interacting <API key> = true; <API key> = mouseX; } else { // proactively act as if we've left the area since we're out of the bounds we want handleMouseOutGraph(event) } } var handleMouseOutGraph = function(event) { // hide the hover-line hoverLine.classed("hide", true); <API key>(); //debug("MouseOut graph [" + containerId + "] => " + mouseX + ", " + mouseY) // user is no longer interacting <API key> = false; <API key> = -1; } /* // if we need to support older browsers without pageX/pageY we can use this var <API key> = function(e, element) { var posx = 0; var posy = 0; if (!e) var e = window.event; if (e.pageX || e.pageY) { posx = e.pageX; posy = e.pageY; } else if (e.clientX || e.clientY) { posx = e.clientX + document.body.scrollLeft + document.documentElement.scrollLeft; posy = e.clientY + document.body.scrollTop + document.documentElement.scrollTop; } return {x: posx, y: posy}; } */ /* * Handler for when data is updated. */ var handleDataUpdate = function() { if(<API key>) { // user is interacting, so let's update values to wherever the mouse/finger is on the updated data if(<API key> > -1) { <API key>(<API key>) } } else { // the user is not interacting with the graph, so we'll update the labels to the latest <API key>(); } } /** * Display the data values at position X in the legend value labels. */ var <API key> = function(xPosition, withTransition) { var animate = false; if(withTransition != undefined) { if(withTransition) { animate = true; } } var dateToShow; var labelValueWidths = []; graph.selectAll("text.legend.value") .text(function(d, i) { var valuesForX = <API key>(xPosition, i); dateToShow = valuesForX.date; return valuesForX.value; }) .attr("x", function(d, i) { labelValueWidths[i] = this.<API key>(); }) // position label names var cumulativeWidth = 0; var labelNameEnd = []; graph.selectAll("text.legend.name") .attr("x", function(d, i) { // return it at the width of previous labels (where the last one ends) var returnX = cumulativeWidth; // increment cumulative to include this one + the value label at this index cumulativeWidth += this.<API key>()+4+labelValueWidths[i]+8; // store where this ends labelNameEnd[i] = returnX + this.<API key>()+5; return returnX; }) // remove last bit of padding from cumulativeWidth cumulativeWidth = cumulativeWidth - 8; if(cumulativeWidth > w) { // decrease font-size to make fit legendFontSize = legendFontSize-1; //debug("making legend fit by decreasing font size to: " + legendFontSize) graph.selectAll("text.legend.name") .attr("font-size", legendFontSize); graph.selectAll("text.legend.value") .attr("font-size", legendFontSize); // recursively call until we get ourselves fitting <API key>(xPosition); return; } // position label values graph.selectAll("text.legend.value") .attr("x", function(d, i) { return labelNameEnd[i]; }) // show the date graph.select('text.date-label').text(dateToShow.toDateString() + " " + dateToShow.toLocaleTimeString()) // move the group of labels to the right side if(animate) { graph.selectAll("g.legend-group g") .transition() .duration(transitionDuration) .ease("linear") .attr("transform", "translate(" + (w-cumulativeWidth) +",0)") } else { graph.selectAll("g.legend-group g") .attr("transform", "translate(" + (w-cumulativeWidth) +",0)") } } /** * Set the value labels to whatever the latest data point is. */ var <API key> = function(withTransition) { <API key>(w, withTransition); } /** * Convert back from an X position on the graph to a data value from the given array (one of the lines) * Return {value: value, date, date} */ var <API key> = function(xPosition, dataSeriesIndex) { var d = data.values[dataSeriesIndex] // get the date on x-axis for the current location var xValue = x.invert(xPosition); // Calculate the value from this date by determining the 'index' // within the data array that applies to this value var index = (xValue.getTime() - data.startTime) / data.step; if(index >= d.length) { index = d.length-1; } // The date we're given is interpolated so we have to round off to get the nearest // index in the data array for the xValue we're given. // Once we have the index, we then retrieve the data from the d[] array index = Math.round(index); // bucketDate is the date rounded to the correct 'step' instead of interpolated var bucketDate = new Date(data.startTime.getTime() + data.step * (index+1)); // index+1 as it is 0 based but we need 1-based for this math var v = d[index]; var roundToNumDecimals = data.rounding[dataSeriesIndex]; return {value: roundNumber(v, roundToNumDecimals), date: bucketDate}; } /** * Called when the window is resized to redraw graph accordingly. */ var <API key> = function() { //debug("Window Resize Event [" + containerId + "] => resizing graph") initDimensions(); initX(); // reset width/height of SVG d3.select("#" + containerId + " svg") .attr("width", w + margin[1] + margin[3]) .attr("height", h + margin[0] + margin[2]); // reset transform of x axis graph.selectAll("g .x.axis") .attr("transform", "translate(0," + h + ")"); if(yAxisRight != undefined) { // Reset the y-axisRight transform if it exists graph.selectAll("g .y.axis.right") .attr("transform", "translate(" + (w+10) + ",0)"); } // reset legendFontSize on window resize so it has a chance to re-calculate to a bigger size if it can now fit legendFontSize = 12; //debug("making legend fit by decreasing font size to: " + legendFontSize) graph.selectAll("text.legend.name") .attr("font-size", legendFontSize); graph.selectAll("text.legend.value") .attr("font-size", legendFontSize); // move date label graph.select('text.date-label') .transition() .duration(transitionDuration) .ease("linear") .attr("x", w) // redraw the graph with new dimensions redrawAxes(true); redrawLines(true); // reposition legend if necessary <API key>(true); // force legend to redraw values <API key>(true); } /** * Set height/width dimensions based on container. */ var initDimensions = function() { // automatically size to the container using JQuery to get width/height w = $("#" + containerId).width() - margin[1] - margin[3]; // width h = $("#" + containerId).height() - margin[0] - margin[2]; // height // make sure to use offset() and not position() as we want it relative to the document, not its parent hoverLineXOffset = margin[3]+$(container).offset().left; hoverLineYOffset = margin[0]+$(container).offset().top; } /** * Return the value from argsMap for key or throw error if no value found */ var getRequiredVar = function(argsMap, key, message) { if(!argsMap[key]) { if(!message) { throw new Error(key + " is required") } else { throw new Error(message) } } else { return argsMap[key] } } /** * Return the value from argsMap for key or defaultValue if no value found */ var getOptionalVar = function(argsMap, key, defaultValue) { if(!argsMap[key]) { return defaultValue } else { return argsMap[key] } } var error = function(message) { console.log("ERROR: " + message) } var debug = function(message) { console.log("DEBUG: " + message) } /* round a number to X digits: num => the number to round, dec => the number of decimals */ /* private */ function roundNumber(num, dec) { var result = Math.round(num*Math.pow(10,dec))/Math.pow(10,dec); var resultAsString = result.toString(); if(dec > 0) { if(resultAsString.indexOf('.') == -1) { resultAsString = resultAsString + '.'; } // make sure we have a decimal and pad with 0s to match the number we were asked for var indexOfDecimal = resultAsString.indexOf('.'); while(resultAsString.length <= (indexOfDecimal+dec)) { resultAsString = resultAsString + '0'; } } return resultAsString; }; /* execute init now that everything is defined */ _init(); };
using System; using System.Collections.Generic; using System.ComponentModel.DataAnnotations; using System.Globalization; using System.Linq; using System.Web; namespace GitHub.ViewModels { public class FutureDate : ValidationAttribute { public override bool IsValid(object value) { DateTime dateTime; var isValid = DateTime.TryParseExact(Convert.ToString(value), "d MMM yyyy", CultureInfo.CurrentCulture, DateTimeStyles.None, out dateTime); return (isValid && dateTime > DateTime.Now); } } }
TOP = ../.. SWIG = $(TOP)/../preinst-swig INTERFACE = example.i SRCS = CXXSRCS = example.cxx TARGET = class INCLUDE = SWIGOPT = CFLAGS = VARIANT = # uncomment the following lines to build a static exe (only pick one of the CHICKEN_MAIN lines) #CHICKEN_MAIN = test-lowlevel-class.scm #CHICKEN_MAIN = test-tinyclos-class.scm #VARIANT = _static all:: $(TARGET) $(TARGET)_proxy $(TARGET): $(INTERFACE) $(SRCS) $(MAKE) -f $(TOP)/Makefile \ SRCS='$(SRCS)' CXXSRCS='$(CXXSRCS)' CHICKEN_MAIN='$(CHICKEN_MAIN)' \ INCLUDE='$(INCLUDE)' SWIGOPT='$(SWIGOPT)' TARGET='$(TARGET)' \ SWIG='$(SWIG)' INTERFACE='$(INTERFACE)' CHICKENOPTS='$(CHICKENOPTS)' chicken$(VARIANT)_cpp $(TARGET)_proxy: $(INTERFACE) $(SRCS) $(MAKE) -f $(TOP)/Makefile \ SRCS='$(SRCS)' CXXSRCS='$(CXXSRCS)' CHICKEN_MAIN='$(CHICKEN_MAIN)' \ INCLUDE='$(INCLUDE)' SWIGOPT='$(SWIGOPT) -proxy' TARGET='$(TARGET)_proxy' \ SWIG='$(SWIG)' INTERFACE='$(INTERFACE)' CHICKENOPTS='$(CHICKENOPTS)' chicken$(VARIANT)_cpp clean:: $(MAKE) -f $(TOP)/Makefile chicken_clean rm -f example.scm rm -f $(TARGET) check:: env LD_LIBRARY_PATH=.:$$LD_LIBRARY_PATH csi test-lowlevel-class.scm env LD_LIBRARY_PATH=.:$$LD_LIBRARY_PATH csi test-tinyclos-class.scm
#include <sys/select.h> #include <arpa/inet.h> #include <arpa/nameser.h> #include <ares.h> #include <talloc.h> #include <tevent.h> #include <errno.h> #include <netdb.h> #include <stddef.h> #include <string.h> #include <unistd.h> #include "config.h" #include "resolv/async_resolv.h" #include "util/dlinklist.h" #include "util/util.h" #ifndef HAVE_ARES_DATA #define <API key>(abuf, alen, srv_out) \ <API key>(abuf, alen, srv_out) #define <API key>(abuf, alen, txt_out) \ <API key>(abuf, alen, txt_out) #define ares_free_data(dataptr) \ _ares_free_data(dataptr) #define ares_malloc_data(data) \ _ares_malloc_data(data) #endif /* HAVE_ARES_DATA */ #ifndef <API key> #define ares_addrttl addrttl #endif #ifndef <API key> #define ares_addr6ttl addr6ttl #endif #define DNS__16BIT(p) (((p)[0] << 8) | (p)[1]) #define DNS_HEADER_ANCOUNT(h) DNS__16BIT((h) + 6) #define RESOLV_TIMEOUTMS 5000 enum host_database default_host_dbs[] = { DB_FILES, DB_DNS, DB_SENTINEL }; struct fd_watch { struct fd_watch *prev; struct fd_watch *next; int fd; struct resolv_ctx *ctx; struct tevent_fd *fde; }; struct resolv_ctx { struct tevent_context *ev_ctx; ares_channel channel; /* List of file descriptors that are watched by tevent. */ struct fd_watch *fds; /* Time in milliseconds before canceling a DNS request */ int timeout; /* The timeout watcher periodically calls ares_process_fd() to check * if our pending requests didn't timeout. */ int pending_requests; struct tevent_timer *timeout_watcher; }; struct request_watch { struct tevent_req *req; struct resolv_request *rr; }; struct resolv_request { struct resolv_ctx *ctx; struct request_watch *rwatch; struct tevent_timer *request_timeout; }; static int return_code(int ares_code) { switch (ares_code) { case ARES_SUCCESS: return EOK; case ARES_ENOMEM: return ENOMEM; case ARES_EFILE: default: return EIO; } } const char * resolv_strerror(int ares_code) { return ares_strerror(ares_code); } static int fd_watch_destructor(struct fd_watch *f) { DLIST_REMOVE(f->ctx->fds, f); f->fd = -1; return 0; } static void fd_input_available(struct tevent_context *ev, struct tevent_fd *fde, uint16_t flags, void *data) { struct fd_watch *watch = talloc_get_type(data, struct fd_watch); if (watch->ctx->channel == NULL) { DEBUG(1, ("Invalid ares channel - this is likely a bug\n")); return; } if (flags & TEVENT_FD_READ) { ares_process_fd(watch->ctx->channel, watch->fd, ARES_SOCKET_BAD); } if (flags & TEVENT_FD_WRITE) { ares_process_fd(watch->ctx->channel, ARES_SOCKET_BAD, watch->fd); } } static void check_fd_timeouts(struct tevent_context *ev, struct tevent_timer *te, struct timeval current_time, void *private_data); static void add_timeout_timer(struct tevent_context *ev, struct resolv_ctx *ctx) { struct timeval tv = { 0, 0 }; struct timeval *tvp; if (ctx->timeout_watcher) { return; } tvp = ares_timeout(ctx->channel, NULL, &tv); if (tvp == NULL) { tvp = &tv; } /* Enforce a minimum of 1 second. */ if (tvp->tv_sec < 1) { tv = <API key>(1, 0); } else { tv = <API key>(tvp->tv_sec, tvp->tv_usec); } ctx->timeout_watcher = tevent_add_timer(ev, ctx, tv, check_fd_timeouts, ctx); if (ctx->timeout_watcher == NULL) { DEBUG(1, ("Out of memory\n")); } } static void check_fd_timeouts(struct tevent_context *ev, struct tevent_timer *te, struct timeval current_time, void *private_data) { struct resolv_ctx *ctx = talloc_get_type(private_data, struct resolv_ctx); DEBUG(9, ("Checking for DNS timeouts\n")); /* NULLify the timeout_watcher so we don't * free it in the _done() function if it * gets called. Now that we're already in * the handler, tevent will take care of * freeing it when it returns. */ ctx->timeout_watcher = NULL; ares_process_fd(ctx->channel, ARES_SOCKET_BAD, ARES_SOCKET_BAD); if (ctx->pending_requests > 0) { add_timeout_timer(ev, ctx); } } static void <API key>(struct tevent_context *ev, struct tevent_timer *te, struct timeval tv, void *pvt) { struct resolv_request *rreq; DEBUG(<API key>, ("The resolve request timed out\n")); rreq = talloc_get_type(pvt, struct resolv_request); if (rreq->rwatch == NULL) { DEBUG(SSSDBG_CRIT_FAILURE, ("The request already completed\n")); return; } tevent_req_error(rreq->rwatch->req, ETIMEDOUT); rreq->rwatch = NULL; } static int <API key>(struct request_watch *rwatch) { DEBUG(SSSDBG_TRACE_FUNC, ("Deleting request watch\n")); if (rwatch->rr) rwatch->rr->rwatch = NULL; return 0; } static struct resolv_request * <API key>(struct tevent_context *ev, struct resolv_ctx *ctx, struct tevent_req *req) { struct resolv_request *rreq; struct timeval tv; DEBUG(<API key>, ("Scheduling a timeout of %d seconds\n", ctx->timeout)); tv = <API key>(ctx->timeout, 0); /* Intentionally allocating on ctx, because the request might go away * before c-ares returns */ rreq = talloc(ctx, struct resolv_request); if (!rreq) { talloc_zfree(req); return NULL; } rreq->ctx = ctx; rreq->request_timeout = tevent_add_timer(ev, rreq, tv, <API key>, rreq); if (rreq->request_timeout == NULL) { talloc_free(rreq); return NULL; } /* The watch will go away when the request finishes */ rreq->rwatch = talloc(req, struct request_watch); if (!rreq->rwatch) { talloc_zfree(req); return NULL; } rreq->rwatch->req = req; rreq->rwatch->rr = rreq; <API key>(rreq->rwatch, <API key>); return rreq; } static struct resolv_request * <API key>(struct tevent_context *ev, struct resolv_ctx *ctx, struct tevent_req *req) { struct resolv_request *rreq; rreq = <API key>(ev, ctx, req); if (!rreq) return NULL; ctx->pending_requests++; DEBUG(<API key>, ("Scheduling DNS timeout watcher\n")); add_timeout_timer(ev, ctx); return rreq; } static void <API key>(struct resolv_ctx *ctx, struct resolv_request *rreq) { /* Unlink the watch if the request is still active */ if (rreq->rwatch) { rreq->rwatch->rr = NULL; } talloc_free(rreq); /* Cancels the tevent timeout as well */ if (ctx->pending_requests <= 0) { DEBUG(1, ("Pending DNS requests mismatch\n")); return; } ctx->pending_requests if (ctx->pending_requests == 0) { DEBUG(9, ("Unscheduling DNS timeout watcher\n")); talloc_zfree(ctx->timeout_watcher); } } static void fd_event_add(struct resolv_ctx *ctx, int s, int flags); static void fd_event_close(struct resolv_ctx *ctx, int s); /* * When ares is ready to read or write to a file descriptor, it will * call this callback. If both read and write are 0, it means that ares * will soon close the socket. We are mainly using this function to register * new file descriptors with tevent. */ static void fd_event(void *data, int s, int fd_read, int fd_write) { struct resolv_ctx *ctx = talloc_get_type(data, struct resolv_ctx); struct fd_watch *watch; int flags; /* The socket is about to get closed. */ if (fd_read == 0 && fd_write == 0) { fd_event_close(ctx, s); return; } flags = fd_read ? TEVENT_FD_READ : 0; flags |= fd_write ? TEVENT_FD_WRITE : 0; /* Are we already watching this file descriptor? */ watch = ctx->fds; while (watch) { if (watch->fd == s) { tevent_fd_set_flags(watch->fde, flags); return; } watch = watch->next; } fd_event_add(ctx, s, flags); } static void fd_event_add(struct resolv_ctx *ctx, int s, int flags) { struct fd_watch *watch; /* The file descriptor is new, register it with tevent. */ watch = talloc(ctx, struct fd_watch); if (watch == NULL) { DEBUG(1, ("Out of memory allocating fd_watch structure\n")); return; } <API key>(watch, fd_watch_destructor); watch->fd = s; watch->ctx = ctx; watch->fde = tevent_add_fd(ctx->ev_ctx, watch, s, flags, fd_input_available, watch); if (watch->fde == NULL) { DEBUG(1, ("tevent_add_fd() failed\n")); talloc_free(watch); return; } DLIST_ADD(ctx->fds, watch); } static void fd_event_close(struct resolv_ctx *ctx, int s) { struct fd_watch *watch; /* Remove the socket from list */ watch = ctx->fds; while (watch) { if (watch->fd == s) { talloc_free(watch); return; } watch = watch->next; } } static int <API key>(struct resolv_ctx *ctx) { ares_channel channel; if (ctx->channel == NULL) { DEBUG(1, ("Ares channel already destroyed?\n")); return -1; } /* Set ctx->channel to NULL first, so that callbacks that get * ARES_EDESTRUCTION won't retry. */ channel = ctx->channel; ctx->channel = NULL; ares_destroy(channel); return 0; } static int <API key>(struct resolv_ctx *ctx) { int ret; ares_channel new_channel; ares_channel old_channel; struct ares_options options; DEBUG(4, ("Initializing new c-ares channel\n")); /* FIXME: the options would contain * the nameservers to contact, the domains * to search... => get from confdb */ options.sock_state_cb = fd_event; options.sock_state_cb_data = ctx; options.timeout = RESOLV_TIMEOUTMS; /* Only affects ares_gethostbyname */ options.lookups = discard_const("f"); options.tries = 1; ret = ares_init_options(&new_channel, &options, <API key> | ARES_OPT_TIMEOUTMS | ARES_OPT_LOOKUPS | ARES_OPT_TRIES); if (ret != ARES_SUCCESS) { DEBUG(1, ("Failed to initialize ares channel: %s\n", resolv_strerror(ret))); return return_code(ret); } old_channel = ctx->channel; ctx->channel = new_channel; if (old_channel != NULL) { DEBUG(4, ("Destroying the old c-ares channel\n")); ares_destroy(old_channel); } return EOK; } int resolv_init(TALLOC_CTX *mem_ctx, struct tevent_context *ev_ctx, int timeout, struct resolv_ctx **ctxp) { int ret; struct resolv_ctx *ctx; if (timeout < 1) { DEBUG(<API key>, ("The timeout is too short, DNS operations are going to fail. " "This is a bug outside unit tests\n")); } ctx = talloc_zero(mem_ctx, struct resolv_ctx); if (ctx == NULL) return ENOMEM; ctx->ev_ctx = ev_ctx; ctx->timeout = timeout; ret = <API key>(ctx); if (ret != EOK) { goto done; } <API key>(ctx, <API key>); *ctxp = ctx; return EOK; done: talloc_free(ctx); return ret; } void <API key>(struct resolv_ctx *ctx) { <API key>(ctx); } static errno_t resolv_copy_in_addr(TALLOC_CTX *mem_ctx, struct resolv_addr *ret, struct ares_addrttl *attl) { ret->ipaddr = talloc_array(mem_ctx, uint8_t, sizeof(struct in_addr)); if (!ret->ipaddr) return ENOMEM; memcpy(ret->ipaddr, &attl->ipaddr, sizeof(struct in_addr)); ret->ttl = attl->ttl; return EOK; } static errno_t <API key>(TALLOC_CTX *mem_ctx, struct resolv_addr *ret, struct ares_addr6ttl *a6ttl) { ret->ipaddr = talloc_array(mem_ctx, uint8_t, sizeof(struct in6_addr)); if (!ret->ipaddr) return ENOMEM; memcpy(ret->ipaddr, &a6ttl->ip6addr, sizeof(struct in6_addr)); ret->ttl = a6ttl->ttl; return EOK; } static struct resolv_hostent * <API key>(TALLOC_CTX *mem_ctx, struct hostent *src) { struct resolv_hostent *ret; int len; int i; ret = talloc_zero(mem_ctx, struct resolv_hostent); if (ret == NULL) { return NULL; } if (src->h_name != NULL) { ret->name = talloc_strdup(ret, src->h_name); if (ret->name == NULL) { goto fail; } } if (src->h_aliases != NULL) { for (len = 0; src->h_aliases[len] != NULL; len++); ret->aliases = talloc_array(ret, char *, len + 1); if (ret->aliases == NULL) { goto fail; } for (i = 0; i < len; i++) { ret->aliases[i] = talloc_strdup(ret->aliases, src->h_aliases[i]); if (ret->aliases[i] == NULL) { goto fail; } } ret->aliases[len] = NULL; } ret->family = src->h_addrtype; return ret; fail: talloc_free(ret); return NULL; } struct resolv_hostent * resolv_copy_hostent(TALLOC_CTX *mem_ctx, struct hostent *src) { struct resolv_hostent *ret; int len; int i; ret = <API key>(mem_ctx, src); if (ret == NULL) { return NULL; } if (src->h_addr_list != NULL) { for (len = 0; src->h_addr_list[len] != NULL; len++); ret->addr_list = talloc_array(ret, struct resolv_addr *, len + 1); if (ret->addr_list == NULL) { goto fail; } for (i = 0; i < len; i++) { ret->addr_list[i] = talloc_zero(ret->addr_list, struct resolv_addr); if (ret->addr_list[i] == NULL) { goto fail; } ret->addr_list[i]->ipaddr = talloc_memdup(ret->addr_list[i], src->h_addr_list[i], src->h_length); if (ret->addr_list[i]->ipaddr == NULL) { goto fail; } ret->addr_list[i]->ttl = RESOLV_DEFAULT_TTL; } ret->addr_list[len] = NULL; } return ret; fail: talloc_free(ret); return NULL; } struct resolv_hostent * <API key>(TALLOC_CTX *mem_ctx, struct hostent *src, int family, void *ares_ttl_data, int num_ares_ttl_data) { struct resolv_hostent *ret; errno_t cret; int i; ret = <API key>(mem_ctx, src); if (ret == NULL) { return NULL; } if (num_ares_ttl_data > 0) { ret->addr_list = talloc_array(ret, struct resolv_addr *, num_ares_ttl_data + 1); if (ret->addr_list == NULL) { goto fail; } for (i = 0; i < num_ares_ttl_data; i++) { ret->addr_list[i] = talloc_zero(ret->addr_list, struct resolv_addr); if (ret->addr_list[i] == NULL) { goto fail; } switch (family) { case AF_INET: cret = resolv_copy_in_addr(ret->addr_list, ret->addr_list[i], &((struct ares_addrttl *) ares_ttl_data)[i]); break; case AF_INET6: cret = <API key>(ret->addr_list, ret->addr_list[i], &((struct ares_addr6ttl *) ares_ttl_data)[i]); break; default: DEBUG(SSSDBG_CRIT_FAILURE, ("Unknown address family %d\n", family)); goto fail; } if (cret != EOK) { DEBUG(1, ("Could not copy address\n")); goto fail; } } ret->addr_list[num_ares_ttl_data] = NULL; } ret->family = family; return ret; fail: talloc_free(ret); return NULL; } struct <API key> { struct resolv_ctx *resolv_ctx; /* Part of the query. */ const char *name; int family; /* query result */ struct resolv_hostent *rhostent; /* returned by ares. */ int status; }; /* Fake up an async interface even though files would * always be blocking */ static struct tevent_req * <API key>(TALLOC_CTX *mem_ctx, struct tevent_context *ev, struct resolv_ctx *ctx, const char *name, int family) { struct tevent_req *req; struct <API key> *state; struct hostent *hostent = NULL; req = tevent_req_create(mem_ctx, &state, struct <API key>); if (req == NULL) { tevent_req_error(req, ENOMEM); goto done; } state->resolv_ctx = ctx; state->name = name; state->rhostent = NULL; state->family = family; DEBUG(4, ("Trying to resolve %s record of '%s' in files\n", state->family == AF_INET ? "A" : "AAAA", state->name)); state->status = <API key>(state->resolv_ctx->channel, state->name, state->family, &hostent); if (state->status == ARES_SUCCESS) { state->rhostent = resolv_copy_hostent(state, hostent); if (state->rhostent == NULL) { tevent_req_error(req, ENOMEM); goto done; } } else if (state->status == ARES_ENOTFOUND || state->status == ARES_ENODATA) { /* Just say we didn't find anything and let the caller decide * about retrying */ tevent_req_error(req, ENOENT); goto done; } else { tevent_req_error(req, return_code(state->status)); goto done; } tevent_req_done(req); done: if (hostent) ares_free_hostent(hostent); tevent_req_post(req, ev); return req; } static errno_t <API key>(struct tevent_req *req, TALLOC_CTX *mem_ctx, int *status, struct resolv_hostent **rhostent) { struct <API key> *state = tevent_req_data(req, struct <API key>); /* Fill in even in case of error as status contains the * c-ares return code */ if (status) { *status = state->status; } if (rhostent) { *rhostent = talloc_steal(mem_ctx, state->rhostent); } <API key>(req); return EOK; } struct <API key> { struct resolv_ctx *resolv_ctx; struct tevent_context *ev; /* Part of the query. */ const char *name; int family; /* query result */ struct resolv_hostent *rhostent; /* These are returned by ares. */ int status; int timeouts; int retrying; }; static void <API key>(struct tevent_req *subreq); static void <API key>(struct tevent_req *req, struct <API key> *state); static void <API key>(void *arg, int status, int timeouts, unsigned char *abuf, int alen); static int <API key>(struct <API key> *state, int status, int timeouts, unsigned char *abuf, int alen); static struct tevent_req * <API key>(TALLOC_CTX *mem_ctx, struct tevent_context *ev, struct resolv_ctx *ctx, const char *name, int family) { struct tevent_req *req, *subreq; struct <API key> *state; struct timeval tv = { 0, 0 }; if (ctx->channel == NULL) { DEBUG(1, ("Invalid ares channel - this is likely a bug\n")); return NULL; } req = tevent_req_create(mem_ctx, &state, struct <API key>); if (req == NULL) { return NULL; } state->resolv_ctx = ctx; state->ev = ev; state->name = name; state->rhostent = NULL; state->status = 0; state->timeouts = 0; state->retrying = 0; state->family = family; /* We need to have a wrapper around ares async calls, because * they can in some cases call it's callback immediately. * This would not let our caller to set a callback for req. */ subreq = tevent_wakeup_send(req, ev, tv); if (subreq == NULL) { DEBUG(1, ("Failed to add critical timer to run next operation!\n")); talloc_zfree(req); return NULL; } <API key>(subreq, <API key>, req); return req; } static void <API key>(struct tevent_req *subreq) { struct tevent_req *req = <API key>(subreq, struct tevent_req); struct <API key> *state = tevent_req_data(req, struct <API key>); if (!tevent_wakeup_recv(subreq)) { tevent_req_error(req, EIO); return; } talloc_zfree(subreq); if (state->resolv_ctx->channel == NULL) { DEBUG(1, ("Invalid ares channel - this is likely a bug\n")); tevent_req_error(req, EIO); return; } <API key>(req, state); } static void <API key>(struct tevent_req *req, struct <API key> *state) { struct resolv_request *rreq; DEBUG(4, ("Trying to resolve %s record of '%s' in DNS\n", state->family == AF_INET ? "A" : "AAAA", state->name)); rreq = <API key>(state->ev, state->resolv_ctx, req); if (!rreq) { tevent_req_error(req, ENOMEM); return; } ares_search(state->resolv_ctx->channel, state->name, ns_c_in, (state->family == AF_INET) ? ns_t_a : ns_t_aaaa, <API key>, rreq); } static void <API key>(void *arg, int status, int timeouts, unsigned char *abuf, int alen) { errno_t ret; struct <API key> *state; struct resolv_request *rreq = talloc_get_type(arg, struct resolv_request); struct tevent_req *req; if (rreq->rwatch == NULL) { /* The tevent request was cancelled while the ares call was still in * progress so nobody cares about the result now. Quit. */ <API key>(rreq->ctx, rreq); return; } req = rreq->rwatch->req; <API key>(rreq->ctx, rreq); state = tevent_req_data(req, struct <API key>); state->status = status; state->timeouts = timeouts; /* If resolv.conf changed during processing of a request we might * destroy the old channel before the request has a chance to finish. * We must resend the request in this case */ if (state->retrying == 0 && status == ARES_EDESTRUCTION && state->resolv_ctx->channel != NULL) { state->retrying = 1; <API key>(req, state); return; } if (status == ARES_ENOTFOUND || status == ARES_ENODATA) { /* Just say we didn't find anything and let the caller decide * about retrying */ tevent_req_error(req, ENOENT); return; } if (status != ARES_SUCCESS) { /* Any other error indicates a server error, * so don't bother trying again */ tevent_req_error(req, return_code(status)); return; } ret = <API key>(state, status, timeouts, abuf, alen); if (ret != EOK) { tevent_req_error(req, ret); return; } tevent_req_done(req); } static int <API key>(struct <API key> *state, int status, int timeouts, unsigned char *abuf, int alen) { TALLOC_CTX *tmp_ctx; struct hostent *hostent; int naddrttls; errno_t ret; void *addr; tmp_ctx = talloc_new(NULL); if (!tmp_ctx) return ENOMEM; naddrttls = DNS_HEADER_ANCOUNT(abuf); switch (state->family) { case AF_INET: DEBUG(7, ("Parsing an A reply\n")); addr = talloc_array(state, struct ares_addrttl, naddrttls); if (!addr) { ret = ENOMEM; goto fail; } status = ares_parse_a_reply(abuf, alen, &hostent, (struct ares_addrttl *) addr, &naddrttls); break; case AF_INET6: DEBUG(7, ("Parsing an AAAA reply\n")); addr = talloc_array(state, struct ares_addr6ttl, naddrttls); if (!addr) { ret = ENOMEM; goto fail; } status = <API key>(abuf, alen, &hostent, (struct ares_addr6ttl *) addr, &naddrttls); break; default: DEBUG(1, ("Unknown family %d\n", state->family)); ret = EAFNOSUPPORT; goto fail; } if (hostent != NULL) { state->rhostent = <API key>(state, hostent, state->family, addr, naddrttls); ares_free_hostent(hostent); if (state->rhostent == NULL) { ret = ENOMEM; goto fail; } /* The address list is NULL. This is probably a bug in * c-ares, but we need to handle it gracefully. */ if (state->rhostent->addr_list == NULL) { talloc_zfree(state->rhostent); return ENOENT; } } talloc_free(tmp_ctx); return return_code(status); fail: talloc_free(tmp_ctx); return ret; } static int <API key>(struct tevent_req *req, TALLOC_CTX *mem_ctx, int *status, int *timeouts, struct resolv_hostent **rhostent) { struct <API key> *state = tevent_req_data(req, struct <API key>); /* Fill in even in case of error as status contains the * c-ares return code */ if (status) { *status = state->status; } if (timeouts) { *timeouts = state->timeouts; } <API key>(req); if (rhostent) { *rhostent = talloc_steal(mem_ctx, state->rhostent); } return EOK; } struct gethostbyname_state { struct resolv_ctx *resolv_ctx; struct tevent_context *ev; /* Part of the query. */ const char *name; int family; /* In which order to use IPv4, or v6 */ enum restrict_family family_order; /* Known hosts databases and index to the current one */ enum host_database *db; int dbi; /* These are returned by ares. The hostent struct will be freed * when the user callback returns. */ struct resolv_hostent *rhostent; int status; int timeouts; int retrying; }; static errno_t <API key>(TALLOC_CTX *mem_ctx, const char *address, struct resolv_hostent **_rhostent); static inline int <API key>(enum restrict_family family_order); static bool resolv_is_address(const char *name); static errno_t <API key>(struct tevent_req *req); struct tevent_req * <API key>(TALLOC_CTX *mem_ctx, struct tevent_context *ev, struct resolv_ctx *ctx, const char *name, enum restrict_family family_order, enum host_database *db) { struct tevent_req *req; struct gethostbyname_state *state; errno_t ret; if (ctx->channel == NULL) { DEBUG(1, ("Invalid ares channel - this is likely a bug\n")); return NULL; } req = tevent_req_create(mem_ctx, &state, struct gethostbyname_state); if (req == NULL) { return NULL; } state->resolv_ctx = ctx; state->ev = ev; state->name = talloc_strdup(state, name); if (state->name == NULL) { DEBUG(SSSDBG_CRIT_FAILURE, ("talloc_strdup() failed\n")); goto fail; } state->rhostent = NULL; state->status = 0; state->timeouts = 0; state->retrying = 0; state->family_order = family_order; state->family = <API key>(state->family_order); state->db = db; state->dbi = 0; /* Do not attempt to resolve IP addresses */ if (resolv_is_address(state->name)) { ret = <API key>(state, state->name, &state->rhostent); if (ret != EOK) { DEBUG(1, ("Canot create a fake hostent structure\n")); goto fail; } tevent_req_done(req); tevent_req_post(req, ev); return req; } ret = <API key>(req); if (ret != EOK) { DEBUG(1, ("Cannot start the resolving\n")); goto fail; } return req; fail: talloc_zfree(req); return NULL; } static bool resolv_is_address(const char *name) { struct addrinfo hints; struct addrinfo *res = NULL; int ret; memset((void *) &hints, 0, sizeof(struct addrinfo)); hints.ai_family = AF_UNSPEC; hints.ai_flags = AI_NUMERICHOST; /* No network lookups */ ret = getaddrinfo(name, NULL, &hints, &res); freeaddrinfo(res); if (ret != 0) { if (ret == -2) { DEBUG(9, ("[%s] does not look like an IP address\n", name)); } else { DEBUG(2, ("getaddrinfo failed [%d]: %s\n", ret, gai_strerror(ret))); } } return ret == 0; } static errno_t <API key>(TALLOC_CTX *mem_ctx, const char *address, struct resolv_hostent **_rhostent) { struct resolv_hostent *rhostent; TALLOC_CTX *tmp_ctx; errno_t ret; int family; tmp_ctx = talloc_new(NULL); if (!tmp_ctx) return ENOMEM; rhostent = talloc_zero(tmp_ctx, struct resolv_hostent); if (!rhostent) { ret = ENOMEM; goto done; } rhostent->name = talloc_strdup(rhostent, address); rhostent->addr_list = talloc_array(rhostent, struct resolv_addr *, 2); if (!rhostent->name || !rhostent->addr_list) { ret = ENOMEM; goto done; } rhostent->addr_list[0] = talloc_zero(rhostent->addr_list, struct resolv_addr); if (!rhostent->addr_list[0]) { ret = ENOMEM; goto done; } rhostent->addr_list[0]->ipaddr = talloc_array(rhostent->addr_list[0], uint8_t, sizeof(struct in6_addr)); if (!rhostent->addr_list[0]->ipaddr) { ret = ENOMEM; goto done; } family = AF_INET; ret = inet_pton(family, address, rhostent->addr_list[0]->ipaddr); if (ret != 1) { family = AF_INET6; ret = inet_pton(family, address, rhostent->addr_list[0]->ipaddr); if (ret != 1) { DEBUG(1, ("Could not parse address as neither v4 nor v6\n")); ret = EINVAL; goto done; } } rhostent->addr_list[0]->ttl = RESOLV_DEFAULT_TTL; rhostent->addr_list[1] = NULL; rhostent->family = family; rhostent->aliases = NULL; *_rhostent = talloc_move(mem_ctx, &rhostent); ret = EOK; done: talloc_free(tmp_ctx); return ret; } static inline int <API key>(enum restrict_family family_order) { switch(family_order) { case IPV4_ONLY: case IPV4_FIRST: return AF_INET; case IPV6_ONLY: case IPV6_FIRST: return AF_INET6; } DEBUG(1, ("Unknown address family order %d\n", family_order)); return -1; } static int <API key>(struct gethostbyname_state *state) { if (state->family_order == IPV4_FIRST && state->family == AF_INET) { state->family = AF_INET6; return EOK; } else if (state->family_order == IPV6_FIRST && state->family == AF_INET6) { state->family = AF_INET; return EOK; } else { /* No more address families for this DB, check if * there is another DB to try */ DEBUG(5, ("No more address families to retry\n")); state->dbi++; if (state->db[state->dbi] != DB_SENTINEL) { state->family = <API key>( state->family_order); return EOK; } } DEBUG(4, ("No more hosts databases to retry\n")); return ENOENT; } static void <API key>(struct tevent_req *subreq); static errno_t <API key>(struct tevent_req *req) { struct gethostbyname_state *state = tevent_req_data(req, struct gethostbyname_state); struct tevent_req *subreq; switch(state->db[state->dbi]) { case DB_FILES: DEBUG(8, ("Querying files\n")); subreq = <API key>(state, state->ev, state->resolv_ctx, state->name, state->family); break; case DB_DNS: DEBUG(8, ("Querying DNS\n")); subreq = <API key>(state, state->ev, state->resolv_ctx, state->name, state->family); break; default: DEBUG(1, ("Invalid hosts database\n")); return EINVAL; } if (subreq == NULL) { return ENOMEM; } <API key>(subreq, <API key>, req); return EOK; } static void <API key>(struct tevent_req *subreq) { struct tevent_req *req = <API key>(subreq, struct tevent_req); struct gethostbyname_state *state = tevent_req_data(req, struct gethostbyname_state); errno_t ret; switch(state->db[state->dbi]) { case DB_FILES: ret = <API key>(subreq, state, &state->status, &state->rhostent); /* files is synchronous, there can be no timeouts */ state->timeouts = 0; break; case DB_DNS: ret = <API key>(subreq, state, &state->status, &state->timeouts, &state->rhostent); break; default: DEBUG(1, ("Invalid hosts database\n")); tevent_req_error(req, EINVAL); return; } talloc_zfree(subreq); if (ret == ENOENT) { ret = <API key>(state); if (ret == EOK) { ret = <API key>(req); if (ret != EOK) { tevent_req_error(req, ret); } return; } /* No more databases and/or address families */ tevent_req_error(req, ENOENT); return; } else if (ret == ETIMEDOUT) { /* In case we killed the request before c-ares answered */ state->status = ARES_ETIMEOUT; } if (ret != EOK) { DEBUG(2, ("querying hosts database failed [%d]: %s\n", ret, strerror(ret))); tevent_req_error(req, ret); return; } tevent_req_done(req); } int <API key>(struct tevent_req *req, TALLOC_CTX *mem_ctx, int *status, int *timeouts, struct resolv_hostent **rhostent) { struct gethostbyname_state *state = tevent_req_data(req, struct gethostbyname_state); /* Fill in even in case of error as status contains the * c-ares return code */ if (status) { *status = state->status; } if (timeouts) { *timeouts = state->timeouts; } if (rhostent) { *rhostent = talloc_steal(mem_ctx, state->rhostent); } <API key>(req); return EOK; } char * <API key>(TALLOC_CTX *mem_ctx, struct resolv_hostent *hostent, unsigned int addrindex) { char *address; if (!hostent) return NULL; address = talloc_zero_size(mem_ctx, 128); if (address == NULL) { DEBUG(1, ("talloc_zero failed.\n")); return NULL; } errno = 0; if (inet_ntop(hostent->family, hostent->addr_list[addrindex]->ipaddr, address, 128) == NULL) { DEBUG(1, ("inet_ntop failed [%d][%s].\n", errno, strerror(errno))); talloc_free(address); return NULL; } return address; } char * <API key>(TALLOC_CTX *mem_ctx, int family, uint8_t *address) { char *straddr; if (family == AF_INET6) { int i; char hexbyte[3]; straddr = talloc_strdup(mem_ctx, "\0"); if (!straddr) { return NULL; } for (i = 15; i >= 0; i snprintf(hexbyte, 3, "%02x", address[i]); straddr = <API key>(straddr, "%c.%c.", hexbyte[1], hexbyte[0]); } straddr = <API key>(straddr, "ip6.arpa."); } else if (family == AF_INET) { straddr = talloc_asprintf(mem_ctx, "%u.%u.%u.%u.in-addr.arpa.", (address[3]), (address[2]), (address[1]), (address[0])); } else { DEBUG(SSSDBG_CRIT_FAILURE, ("Unknown address family\n")); return NULL; } return straddr; } struct sockaddr_storage * <API key>(TALLOC_CTX *mem_ctx, struct resolv_hostent *hostent, int port, int addrindex) { struct sockaddr_storage *sockaddr; if (!hostent) return NULL; sockaddr = talloc_zero(mem_ctx, struct sockaddr_storage); if (sockaddr == NULL) { DEBUG(1, ("talloc_zero failed.\n")); return NULL; } switch(hostent->family) { case AF_INET: sockaddr->ss_family = AF_INET; memcpy(&((struct sockaddr_in *) sockaddr)->sin_addr, hostent->addr_list[addrindex]->ipaddr, sizeof(struct in_addr)); ((struct sockaddr_in *) sockaddr)->sin_port = (in_port_t) htons(port); break; case AF_INET6: sockaddr->ss_family = AF_INET6; memcpy(&((struct sockaddr_in6 *) sockaddr)->sin6_addr, hostent->addr_list[addrindex]->ipaddr, sizeof(struct in6_addr)); ((struct sockaddr_in6 *) sockaddr)->sin6_port = (in_port_t) htons(port); break; default: DEBUG(SSSDBG_CRIT_FAILURE, ("Unknown address family %d\n", hostent->family)); return NULL; } return sockaddr; } /* * A simple helper function that will take an array of struct ares_srv_reply that * was allocated by malloc() in c-ares and copies it using talloc. The old one * is freed and the talloc one is put into 'reply_list' instead. */ static int <API key>(TALLOC_CTX *mem_ctx, struct ares_srv_reply **reply_list) { struct ares_srv_reply *ptr = NULL; struct ares_srv_reply *new_list = NULL; struct ares_srv_reply *old_list = *reply_list; /* Nothing to do, but not an error */ if (!old_list) { return EOK; } /* Copy the linked list */ while (old_list) { /* Special case for the first node */ if (!new_list) { new_list = talloc_zero(mem_ctx, struct ares_srv_reply); if (new_list == NULL) { ares_free_data(*reply_list); return ENOMEM; } ptr = new_list; } else { ptr->next = talloc_zero(new_list, struct ares_srv_reply); if (ptr->next == NULL) { ares_free_data(*reply_list); talloc_free(new_list); return ENOMEM; } ptr = ptr->next; } ptr->weight = old_list->weight; ptr->priority = old_list->priority; ptr->port = old_list->port; ptr->host = talloc_strdup(ptr, old_list->host); if (ptr->host == NULL) { ares_free_data(*reply_list); talloc_free(new_list); return ENOMEM; } old_list = old_list->next; } /* Free the old one (uses malloc). */ ares_free_data(*reply_list); /* And now put our own new_list in place. */ *reply_list = new_list; return EOK; } struct getsrv_state { struct tevent_context *ev; struct resolv_ctx *resolv_ctx; /* the SRV query - for example _ldap._tcp.example.com */ const char *query; /* parsed data returned by ares */ struct ares_srv_reply *reply_list; int status; int timeouts; int retrying; }; static void ares_getsrv_wakeup(struct tevent_req *subreq); static void resolv_getsrv_query(struct tevent_req *req, struct getsrv_state *state); struct tevent_req * resolv_getsrv_send(TALLOC_CTX *mem_ctx, struct tevent_context *ev, struct resolv_ctx *ctx, const char *query) { struct tevent_req *req, *subreq; struct getsrv_state *state; struct timeval tv = { 0, 0 }; DEBUG(4, ("Trying to resolve SRV record of '%s'\n", query)); if (ctx->channel == NULL) { DEBUG(1, ("Invalid ares channel - this is likely a bug\n")); return NULL; } req = tevent_req_create(mem_ctx, &state, struct getsrv_state); if (req == NULL) return NULL; state->resolv_ctx = ctx; state->query = query; state->reply_list = NULL; state->status = 0; state->timeouts = 0; state->retrying = 0; state->ev = ev; subreq = tevent_wakeup_send(req, ev, tv); if (subreq == NULL) { DEBUG(1, ("Failed to add critical timer to run next operation!\n")); talloc_zfree(req); return NULL; } <API key>(subreq, ares_getsrv_wakeup, req); return req; } static void resolv_getsrv_done(void *arg, int status, int timeouts, unsigned char *abuf, int alen) { struct resolv_request *rreq = talloc_get_type(arg, struct resolv_request); struct tevent_req *req; struct getsrv_state *state; int ret; struct ares_srv_reply *reply_list; if (rreq->rwatch == NULL) { /* The tevent request was cancelled while the ares call was still in * progress so nobody cares about the result now. Quit. */ <API key>(rreq->ctx, rreq); return; } req = rreq->rwatch->req; <API key>(rreq->ctx, rreq); state = tevent_req_data(req, struct getsrv_state); if (state->retrying == 0 && status == ARES_EDESTRUCTION && state->resolv_ctx->channel != NULL) { state->retrying = 1; resolv_getsrv_query(req, state); return; } state->status = status; state->timeouts = timeouts; if (status != ARES_SUCCESS) { ret = return_code(status); goto fail; } ret = <API key>(abuf, alen, &reply_list); if (ret != ARES_SUCCESS) { DEBUG(2, ("SRV record parsing failed: %d: %s\n", ret, ares_strerror(ret))); ret = return_code(ret); goto fail; } ret = <API key>(req, &reply_list); if (ret != EOK) { goto fail; } state->reply_list = reply_list; tevent_req_done(req); return; fail: state->reply_list = NULL; tevent_req_error(req, ret); } int resolv_getsrv_recv(TALLOC_CTX *mem_ctx, struct tevent_req *req, int *status, int *timeouts, struct ares_srv_reply **reply_list) { struct getsrv_state *state = tevent_req_data(req, struct getsrv_state); if (status) *status = state->status; if (timeouts) *timeouts = state->timeouts; if (reply_list) *reply_list = talloc_steal(mem_ctx, state->reply_list); <API key>(req); return EOK; } static void ares_getsrv_wakeup(struct tevent_req *subreq) { struct tevent_req *req = <API key>(subreq, struct tevent_req); struct getsrv_state *state = tevent_req_data(req, struct getsrv_state); if (!tevent_wakeup_recv(subreq)) { return; } talloc_zfree(subreq); if (state->resolv_ctx->channel == NULL) { DEBUG(1, ("Invalid ares channel - this is likely a bug\n")); tevent_req_error(req, EIO); return; } return resolv_getsrv_query(req, state); } static void resolv_getsrv_query(struct tevent_req *req, struct getsrv_state *state) { struct resolv_request *rreq; rreq = <API key>(state->ev, state->resolv_ctx, req); if (!rreq) { tevent_req_error(req, ENOMEM); return; } ares_query(state->resolv_ctx->channel, state->query, ns_c_in, ns_t_srv, resolv_getsrv_done, rreq); } /* TXT parsing is not used anywhere in the code yet, so we disable it * for now */ #ifdef BUILD_TXT /* * A simple helper function that will take an array of struct txt_reply that * was allocated by malloc() in c-ares and copies it using talloc. The old one * is freed and the talloc one is put into 'reply_list' instead. */ static int <API key>(TALLOC_CTX *mem_ctx, struct ares_txt_reply **reply_list) { struct ares_txt_reply *ptr = NULL; struct ares_txt_reply *new_list = NULL; struct ares_txt_reply *old_list = *reply_list; /* Nothing to do, but not an error */ if (!old_list) { return EOK; } /* Copy the linked list */ while (old_list) { /* Special case for the first node */ if (!new_list) { new_list = talloc_zero(mem_ctx, struct ares_txt_reply); if (new_list == NULL) { ares_free_data(*reply_list); talloc_free(new_list); return ENOMEM; } ptr = new_list; } else { ptr->next = talloc_zero(new_list, struct ares_txt_reply); if (ptr->next == NULL) { ares_free_data(*reply_list); talloc_free(new_list); return ENOMEM; } ptr = ptr->next; } ptr->length = old_list->length; ptr->txt = talloc_memdup(ptr, old_list->txt, old_list->length); if (ptr->txt == NULL) { ares_free_data(*reply_list); talloc_free(new_list); return ENOMEM; } old_list = old_list->next; } ares_free_data(*reply_list); /* And now put our own new_list in place. */ *reply_list = new_list; return EOK; } struct gettxt_state { struct tevent_context *ev; struct resolv_ctx *resolv_ctx; /* the TXT query */ const char *query; /* parsed data returned by ares */ struct ares_txt_reply *reply_list; int status; int timeouts; int retrying; }; static void ares_gettxt_wakeup(struct tevent_req *subreq); static void resolv_gettxt_query(struct tevent_req *req, struct gettxt_state *state); struct tevent_req * resolv_gettxt_send(TALLOC_CTX *mem_ctx, struct tevent_context *ev, struct resolv_ctx *ctx, const char *query) { struct tevent_req *req, *subreq; struct gettxt_state *state; struct timeval tv = { 0, 0 }; DEBUG(4, ("Trying to resolve TXT record of '%s'\n", query)); if (ctx->channel == NULL) { DEBUG(1, ("Invalid ares channel - this is likely a bug\n")); return NULL; } req = tevent_req_create(mem_ctx, &state, struct gettxt_state); if (req == NULL) return NULL; state->resolv_ctx = ctx; state->query = query; state->reply_list = NULL; state->status = 0; state->timeouts = 0; state->retrying = 0; state->ev = ev; subreq = tevent_wakeup_send(req, ev, tv); if (subreq == NULL) { DEBUG(1, ("Failed to add critical timer to run next operation!\n")); talloc_zfree(req); return NULL; } <API key>(subreq, ares_gettxt_wakeup, req); return req; } static void resolv_gettxt_done(void *arg, int status, int timeouts, unsigned char *abuf, int alen) { struct resolv_request *rreq = talloc_get_type(arg, struct resolv_request); struct tevent_req *req; struct gettxt_state *state; int ret; struct ares_txt_reply *reply_list; if (rreq->rwatch == NULL) { /* The tevent request was cancelled while the ares call was still in * progress so nobody cares about the result now. Quit. */ <API key>(rreq->ctx, rreq); return; } req = rreq->rwatch->req; <API key>(rreq->ctx, rreq); state = tevent_req_data(req, struct gettxt_state); if (state->retrying == 0 && status == ARES_EDESTRUCTION && state->resolv_ctx->channel != NULL) { state->retrying = 1; ares_query(state->resolv_ctx->channel, state->query, ns_c_in, ns_t_txt, resolv_gettxt_done, req); return; } state->status = status; state->timeouts = timeouts; if (status != ARES_SUCCESS) { ret = return_code(status); goto fail; } ret = <API key>(abuf, alen, &reply_list); if (status != ARES_SUCCESS) { DEBUG(2, ("TXT record parsing failed: %d: %s\n", ret, ares_strerror(ret))); ret = return_code(ret); goto fail; } ret = <API key>(req, &reply_list); if (ret != EOK) { goto fail; } state->reply_list = reply_list; tevent_req_done(req); return; fail: state->reply_list = NULL; tevent_req_error(req, ret); } int resolv_gettxt_recv(TALLOC_CTX *mem_ctx, struct tevent_req *req, int *status, int *timeouts, struct ares_txt_reply **reply_list) { struct gettxt_state *state = tevent_req_data(req, struct gettxt_state); if (status) *status = state->status; if (timeouts) *timeouts = state->timeouts; if (reply_list) *reply_list = talloc_steal(mem_ctx, state->reply_list); <API key>(req); return EOK; } static void ares_gettxt_wakeup(struct tevent_req *subreq) { struct tevent_req *req = <API key>(subreq, struct tevent_req); struct gettxt_state *state = tevent_req_data(req, struct gettxt_state); if (!tevent_wakeup_recv(subreq)) { return; } talloc_zfree(subreq); if (state->resolv_ctx->channel == NULL) { DEBUG(1, ("Invalid ares channel - this is likely a bug\n")); tevent_req_error(req, EIO); return; } return resolv_gettxt_query(req, state); } static void resolv_gettxt_query(struct tevent_req *req, struct gettxt_state *state) { struct resolv_request *rreq; rreq = <API key>(state->ev, state->resolv_ctx, req); if (!rreq) { tevent_req_error(req, ENOMEM); return; } ares_query(state->resolv_ctx->channel, state->query, ns_c_in, ns_t_txt, resolv_gettxt_done, rreq); } #endif static struct ares_srv_reply *split_reply_list(struct ares_srv_reply *list) { struct ares_srv_reply *single_step, *double_step, *prev; if (!list) { return NULL; } prev = list; single_step = list->next; double_step = single_step->next; while (double_step && double_step->next) { prev = single_step; single_step = single_step->next; double_step = double_step->next->next; } prev->next = NULL; return single_step; } static struct ares_srv_reply *merge_reply_list(struct ares_srv_reply *left, struct ares_srv_reply *right) { struct ares_srv_reply *l, *r; struct ares_srv_reply *res, *res_start; if (!left) return right; if (!right) return left; if (left->priority < right->priority) { res_start = left; l = left->next; r = right; } else { res_start = right; l = left; r = right->next; } res = res_start; while(l && r) { if (l->priority < r->priority) { res->next = l; res = l; l = l->next; } else { res->next = r; res = r; r = r->next; } } res->next = l ? l : r; return res_start; } /** * sort linked list of struct ares_srv_reply by priority using merge sort. * * Merge sort is ideal for sorting linked lists as there is no problem * with absence of random access into the list. The complexity is O(n log n) * * For reference, see Robert Sedgewick's "Algorithms in C", Addison-Wesley, * ISBN 0-201-51425 */ static struct ares_srv_reply *reply_priority_sort(struct ares_srv_reply *list) { struct ares_srv_reply *half; if (!list || !list->next) return list; half = split_reply_list(list); list = merge_reply_list(reply_priority_sort(list), reply_priority_sort(half)); return list; } static int <API key>(TALLOC_CTX *mem_ctx, int len, struct ares_srv_reply **start, struct ares_srv_reply **end) { int i; int total, selected; int *totals; struct ares_srv_reply *r, *prev, *tmp; struct ares_srv_reply *new_start = NULL; struct ares_srv_reply *new_end = NULL; if (len <= 1) { return EOK; } totals = talloc_array(mem_ctx, int, len); if (!totals) { return ENOMEM; } srand(time(NULL) * getpid()); /* promote all servers with weight==0 to the top */ r = *(start); prev = NULL; while (r != NULL) { if (r->weight == 0) { /* remove from the old list */ if (prev) { prev->next = r->next; } else { *start = r->next; } /* add to the head of the new list */ tmp = r; r = r->next; tmp->next = *start; *start = tmp; } else { prev = r; r = r->next; } } *end = prev ? prev : *start; while (*start != NULL) { /* Commpute the sum of the weights of those RRs, and with each RR * associate the running sum in the selected order. */ total = 0; memset(totals, -1, sizeof(int) * len); for (i = 0, r = *start; r != NULL; r=r->next, ++i) { totals[i] = r->weight + total; total = totals[i]; } /* choose a uniform random number between 0 and the sum computed * (inclusive), and select the RR whose running sum value is the * first in the selected order which is greater than or equal to * the random number selected. */ selected = (int)((total + 1) * (rand()/(RAND_MAX + 1.0))); for (i = 0, r = *start, prev = NULL; r != NULL; r=r->next, ++i) { if (totals[i] >= selected) break; prev = r; } if (r == NULL || totals[i] == -1) { DEBUG(1, ("Bug: did not select any server!\n")); return EIO; } /* remove r from the old list */ if (prev) { prev->next = r->next; } else { *start = r->next; } /* add r to the end of the new list */ if (!new_start) { new_start = r; new_end = r; } else { new_end->next = r; new_end = r; } } new_end->next = NULL; /* return the rearranged list */ *start = new_start; *end = new_end; talloc_free(totals); return EOK; } int <API key>(TALLOC_CTX *mem_ctx, struct ares_srv_reply **reply) { int ret; struct ares_srv_reply *pri_start, *pri_end, *next, *prev_end; int len; /* RFC 2782 says: If there is precisely one SRV RR, and its Target is "." * (the root domain), abort. */ if (*reply && !(*reply)->next && strcmp((*reply)->host, ".") == 0) { DEBUG(1, ("DNS returned only the root domain, aborting\n")); return EIO; } /* sort the list by priority */ *reply = reply_priority_sort(*reply); pri_start = *reply; prev_end = NULL; while (pri_start) { pri_end = pri_start; /* Find nodes with the same priority */ len = 1; while (pri_end->next && pri_end->priority == pri_end->next->priority) { pri_end = pri_end->next; len++; } /* rearrange each priority level according to the weight field */ next = pri_end->next; pri_end->next = NULL; ret = <API key>(mem_ctx, len, &pri_start, &pri_end); if (ret) { DEBUG(1, ("Error rearranging priority level [%d]: %s\n", ret, strerror(ret))); return ret; } /* Hook the level back into the list */ if (prev_end) { prev_end->next = pri_start; } else { *reply = pri_start; } pri_end->next = next; /* Move on to the next level */ prev_end = pri_end; pri_start = next; } return EOK; }
// Purpose: #ifndef DECALS_H #define DECALS_H #ifdef _WIN32 #pragma once #endif #define CHAR_TEX_CONCRETE 'C' #define CHAR_TEX_METAL 'M' #define CHAR_TEX_DIRT 'D' #define CHAR_TEX_VENT 'V' #define CHAR_TEX_GRATE 'G' #define CHAR_TEX_TILE 'T' #define CHAR_TEX_SLOSH 'S' #define CHAR_TEX_WOOD 'W' #define CHAR_TEX_COMPUTER 'P' #define CHAR_TEX_GLASS 'Y' #define CHAR_TEX_FLESH 'F' #define <API key> 'B' #define CHAR_TEX_CLIP 'I' #define CHAR_TEX_ANTLION 'A' #define CHAR_TEX_ALIENFLESH 'H' #define CHAR_TEX_FOLIAGE 'O' class IDecalEmitterSystem { public: virtual int <API key>( char const *decalname ) = 0; virtual char const *<API key>( char const *decalName, unsigned char gamematerial ) = 0; }; extern IDecalEmitterSystem *decalsystem; #endif // DECALS_H
package com.joslittho.popmov.adapter.trailer; import android.content.Context; import android.databinding.DataBindingUtil; import android.support.v7.widget.RecyclerView; import android.view.LayoutInflater; import android.view.ViewGroup; import com.joslittho.popmov.R; import com.joslittho.popmov.data.model.trailers.Result; import com.joslittho.popmov.databinding.TrailerItemBinding; import java.util.List; /** * Adapter to populate the trailers {@link RecyclerView} */ // begin class TrailerAdapter public class TrailerAdapter extends RecyclerView.Adapter< TrailerViewHolder > { /* CONSTANTS */ /* Integers */ /* Strings */ /* VARIABLES */ /* Contexts */ private Context mContext; // ditto /* Lists */ private List< Result > mTrailers; // ditto /* <API key> */ public <API key> <API key>; // ditto /* CONSTRUCTOR */ // begin default constructor public TrailerAdapter( Context context, List< Result > trailers, <API key> handler ) { // 0. initialize members // 0. initialize members mContext = context; mTrailers = trailers; <API key> = handler; } // end default constructor /* METHODS */ /* Getters and Setters */ /* Overrides */ @Override // begin onCreateViewHolder public TrailerViewHolder onCreateViewHolder( ViewGroup parent, int viewType ) { // 0. inflate the correct layout // last. return the inflated layout in a view holder // 0. inflate the correct layout TrailerItemBinding binding = DataBindingUtil.inflate( LayoutInflater.from( mContext ), R.layout.trailer_item, parent, false ); // last. return the inflated layout in a view holder return new TrailerViewHolder( binding, this ); } // end onCreateViewHolder @Override // begin onBindViewHolder public void onBindViewHolder( TrailerViewHolder holder, int position ) { // 0. read the trailer title from the trailers list // 1. display the trailer title // 0. read the trailer title from the trailers list String currentTrailerTitle = mTrailers.get( position ).getName(); // 1. display the trailer title holder.<API key>.setText( currentTrailerTitle ); } // end onBindViewHolder @Override // getItemCount public int getItemCount() { return mTrailers != null ? mTrailers.size() : 0; } /* Other Methods */ } // end class TrailerAdapter
<?php /** * Admin Functionality * - this file is included only if is_admin() * * @package McBoots */ add_filter( 'admin_init', function () { // block access to /wp-admin if current user can't edit if ( !current_user_can( 'edit_posts' ) && !( defined( 'DOING_AJAX' ) && DOING_AJAX ) ) { wp_redirect( '/' ); exit; } });
package ase2016.introclass.median; public class <API key> { public <API key>() { } public int median( int a, int b, int c ) { int small, big, median; if (a > b) { //mutGenLimit 1 small = b; //mutGenLimit 1 big = a; //mutGenLimit 1 } else { big = b; //mutGenLimit 1 small = b; //mutGenLimit 1 } if (c > big) { //mutGenLimit 1 median = big; //mutGenLimit 1 } else if (c < small) { //mutGenLimit 1 median = small; //mutGenLimit 1 } else { median = c; //mutGenLimit 1 } return median; } }
module Murlsh class UrlResultSet def initialize(query, page, per_page, filters={}) @query, @page, @per_page, @filters = query, page, per_page, filters @order = 'time DESC' end def search_conditions @search_conditions ||= SearchConditions.new(query).conditions end def conditions @conditions ||= if filters[:content_type] result = if filters[:content_type].is_a?(String) ['content_type = ?', filters[:content_type]] else ['content_type IN (?)', filters[:content_type]] end unless search_conditions.empty? result[0] << " AND (#{search_conditions[0]})" result.push(*search_conditions[1..-1]) end result else search_conditions end end def total_entries @total_entries ||= Url.count(:conditions => conditions) end def total_pages @total_pages ||= [(total_entries / per_page.to_f).ceil, 1].max end def offset; @offset ||= (page - 1) * per_page; end def results Url.all(:conditions => conditions, :order => order, :limit => per_page, :offset => offset) end def prev_page unless <API key>?(:@prev_page) @prev_page = page - 1 if (2..total_pages) === page end @prev_page end def next_page unless <API key>?(:@next_page) @next_page = page + 1 if page < total_pages end @next_page end attr_reader :query attr_reader :page attr_reader :per_page attr_reader :filters attr_reader :order end end
package com.zhilianxinke.schoolinhand.modules.autoupdate; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import java.io.InputStream; import java.util.HashMap; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.<API key>; public class ParseXmlService { public HashMap<String, String> parseXml(InputStream inStream) throws Exception { HashMap<String, String> hashMap = new HashMap<String, String>(); <API key> factory = <API key>.newInstance(); DocumentBuilder builder = factory.newDocumentBuilder(); Document document = builder.parse(inStream); //XML Element root = document.getDocumentElement(); NodeList childNodes = root.getChildNodes(); for (int j = 0; j < childNodes.getLength(); j++) { Node childNode = (Node) childNodes.item(j); if (childNode.getNodeType() == Node.ELEMENT_NODE) { Element childElement = (Element) childNode; if ("version".equals(childElement.getNodeName())) { hashMap.put("version",childElement.getFirstChild().getNodeValue()); } else if (("name".equals(childElement.getNodeName()))) { hashMap.put("name",childElement.getFirstChild().getNodeValue()); } else if (("url".equals(childElement.getNodeName()))) { hashMap.put("url",childElement.getFirstChild().getNodeValue()); } } } return hashMap; } }
package ua.sgkhmja.wboard.repository; import ua.sgkhmja.wboard.domain.<API key>; import org.springframework.data.jpa.repository.JpaRepository; import java.util.List; import java.util.Set; /** * Spring Data JPA repository for the Social User Connection entity. */ public interface <API key> extends JpaRepository<<API key>, Long> { List<<API key>> <API key>(String providerId, String providerUserId); List<<API key>> <API key>(String providerId, Set<String> providerUserIds); List<<API key>> <API key>(String userId); List<<API key>> <API key>(String userId, String providerId); List<<API key>> <API key>(String userId, String providerId, List<String> provideUserId); <API key> <API key>(String userId, String providerId, String providerUserId); void <API key>(String userId, String providerId); void <API key>(String userId, String providerId, String providerUserId); }