text stringlengths 1 1.05M |
|---|
def add(nums):
# create an increment variable
inc = 1
# create a result variable
result = 0
# loop through the numbers in reverse order
for i in range(len(nums)-1, -1, -1):
# add the numbers to the result multiplied by the increment
result += (nums[i] * inc)
# increment by 10
inc *= 10
# return the result
return result |
import datetime
from decimal import Decimal
from inspect import signature
from typing import (Callable, Dict, Iterable, Iterator, # noqa: W0611
Optional, Set)
import dateutil
import holidays
class Rule():
"""
Defines a situation in which an employee might receive extra pay, e.g.
"on 24th of december, pay 50% more".
:param slug: a machine and human-ish readable name for this rule (see below).
:param description: a human readable short-form description
:param impl: actual matching function, accepting one, two or three parameters.
:param multiply: heigth of the bonus, as a factor. Supplying `0.25` results in a pay increase of 25%.
:param add: heigth of the bonus, as an absolute currency value. Either ``multiply`` or ``add`` must be given, but not both.
The actual logic of a rule is passed in via the ``impl`` parameter. This
function must accept 1-3 arguments: ``minute``, ``start`` and ``holidays``.
Refer to :meth:`match` for the meaning of those parameters.
"""
def __init__(self, slug: str, description: str, impl: Callable[..., bool], *, multiply: Optional[Decimal] = None, add: Optional[Decimal] = None, tests=[]) -> None:
self._slug = slug
self._description = description
self._impl = impl
self._multiply = multiply
self._add = add
self._tests = tests
if multiply is not None and add is None:
assert isinstance(multiply, Decimal)
assert multiply > 0
elif add is not None and multiply is None:
assert isinstance(add, Decimal)
assert add > 0
else:
assert False, "provide either multiply or add, but not both."
assert len(slug) > 0
assert len(description) > 0
assert 1 <= len(self._impl_parameters) <= 3
@property
def _impl_parameters(self):
return signature(self._impl).parameters
@property
def _bonus(self):
if self._multiply:
return ('multiply', self._multiply)
if self._add:
return ('add', self._add)
assert False
def _parse_test_time(self, tt):
default = datetime.datetime(2018, 1, 10)
return dateutil.parser.parse(tt, default=default) if tt else default
def _examples(self):
for t in self._tests:
start, minute = t[0].split('~')
minute = self._parse_test_time(minute)
start = self._parse_test_time(start)
if minute.time() < start.time():
minute = minute + datetime.timedelta(days=1)
yield [
minute,
start,
t[1],
]
def match(self, minute: datetime.datetime, start: datetime.datetime, holidays: holidays.HolidayBase) -> bool:
"""
For matching, a shift must be split into its individual minutes. Each of
these minutes is then passed into this method. Additionally the very first
minute is provided, to enable rules like (worked after midnight, but
started before).
>>> from decimal import Decimal
>>> import datetime as DT
>>> from libestg3b.rule import Rule
>>> m = Rule("NIGHT", "Nachtarbeit", lambda m, f: m.hour >= 20, multiply=Decimal(2))
# Shift started at 2018-02-02 21:00 and this is the first minute: match!
>>> m.match(DT.datetime(2018, 2, 2, 21), DT.datetime(2018, 2, 2, 21), None)
True
# Shift started at 2018-02-02 20:00 and 21:00 is checked: match!
>>> m.match(DT.datetime(2018, 2, 2, 21), DT.datetime(2018, 2, 2, 20), None)
True
# Shift started at 2018-02-02 18:00 and 19:00 is checked: no match
>>> m.match(DT.datetime(2018, 2, 2, 19), DT.datetime(2018, 2, 2, 18), None)
False
# Shift started at 2018-02-02 23:00 and 01:00 on the following day is checked
# even though the start of this shift is within the timeframe "after 21:00",
# the checked minute is not, so we don't match.
>>> m.match(DT.datetime(2018, 2, 3, 1), DT.datetime(2018, 2, 2, 23), None)
False
:param minute: current minute to be matched
:param start: very fist minute in this shift
:param holidays: holidays in the currently active country (see `python-holidays <https://github.com/dr-prodigy/python-holidays>`_)
"""
narg = len(self._impl_parameters)
if narg == 1:
r = self._impl(minute)
elif narg == 2:
r = self._impl(minute, start)
elif narg == 3:
r = self._impl(minute, start, holidays)
assert isinstance(r, bool)
return r
def __repr__(self):
return f'<Rule: {self._slug} {self._description}>'
def __hash__(self):
return hash(self._slug)
def __eq__(self, other):
return self._slug == other._slug
def __gt__(self, other):
if self._bonus[0] != other._bonus[0]:
raise Exception("cannot compare multiply to add rules")
return self._bonus > other._bonus
def __lt__(self, other):
if self._bonus[0] != other._bonus[0]:
raise Exception("cannot compare multiply to add rules")
return self._bonus < other._bonus
class DayRule(Rule):
"""
Match, if the given minute is within the given day. This can be useful to
increase pay on days, which are not official holidays, but still get a
special treatment in the law (for example: 31th of December in Germany).
>>> from decimal import Decimal
>>> import datetime as DT
>>> from libestg3b.rule import DayRule
>>> m = DayRule("Helloween", 10, 31, multiply=Decimal("2"))
>>> m
<Rule: Helloween YYYY-10-31>
>>> m.match(DT.datetime(2018, 10, 31, 13), DT.datetime(2018, 10, 31, 12), None)
True
>>> m.match(DT.datetime(2018, 10, 30, 13), DT.datetime(2018, 10, 30, 12), None)
False
:param slug: machine-readable name of this rule, see :class:`Rule`
:param month: only match, if shift is within this month, counted from 1 = January
:param day: only match, if shift is on this day, counted from 1
Additionally all keyword arguments defined for :class:`Rule` can be used.
"""
def __init__(self, slug: str, month: int, day: int, **kwargs) -> None:
super().__init__(
slug, f'YYYY-{month:02d}-{day:02d}',
lambda m: m.month == month and m.day == day,
**kwargs,
)
class DayTimeRule(Rule):
"""
Like :class:`DayRule`, but additionally require the shift to be after a certain time.
>>> from decimal import Decimal
>>> import datetime as DT
>>> from libestg3b.rule import DayTimeRule
>>> m = DayTimeRule("NEWYEARSEVE", 12, 31, 14, multiply=Decimal("1"))
>>> m
<Rule: NEWYEARSEVE YYYY-12-31 14:00+>
>>> m.match(DT.datetime(2018, 12, 31, 13), DT.datetime(2018,12, 31, 13), None)
False
>>> m.match(DT.datetime(2018, 12, 31, 14), DT.datetime(2018,12, 31, 14), None)
True
:param slug: machine-readable name of this rule, see :class:`Rule`
:param month: only match, if shift is within this month, counted from 1 = January
:param day: only match, if shift is on this day, counted from 1
:param hour: only match, if shift is after or in this hour. Supplying ``14`` results in ``14:00`` to ``24:00`` to be matched.
Additionally all keyword arguments defined for :class:`Rule` can be used.
"""
def __init__(self, slug: str, month: int, day: int, hour: int, **kwargs) -> None:
super().__init__(
slug, f'YYYY-{month:02d}-{day:02d} {hour:02d}:00+',
lambda m: m.month == month and m.day == day and m.hour >= hour,
**kwargs,
)
class RuleGroup():
"""
A collection of similar :class:`Rule` instances. When the group is evaluated, only the highest matching machter is returned.
:param slug: a machine and human-ish readable name for this rule, must not change.
:param description: a short, human-readable text, explaining why the given rules are grouped together.
:param rules: the initial set of rules.
"""
def __init__(self, slug: str, description: str, rules: Iterable[Rule]) -> None:
self._slug = slug
self._description = description
self._rules = {} # type: Dict[str, Rule]
self.extend(rules)
def append(self, rule: Rule, replace: bool = False) -> None:
"""
:param rule: rule to add; it must not yet exist in the group.
:param replace: if rule duplicates an existing one, overwrite it.
"""
if not isinstance(rule, Rule):
raise Exception('Rules must be derived from libestg3b.Rule')
if rule._slug in self._rules and not replace:
raise Exception(f'Slug {rule._slug} is already in this group')
if self._rules:
my_type = next(iter(self._rules.values()))._bonus[0]
if my_type != rule._bonus[0]:
raise Exception(f'cannot add a {rule._bonus[0]} rule to a group containing {my_type} rules.')
self._rules[rule._slug] = rule
def match(self, minute: datetime.datetime, start: datetime.datetime, holidays: holidays.HolidayBase) -> Optional[Rule]:
"""
Evaluate this group. The given shift is tested using each of the stored
rules. The rule with the highest bonus is the returned. If not a
single one matches, ``None`` is returned.
This method is normally used by :class:`libestg3b.EStG3b`, but you can
use it to implement more complex scenarios yourself.
:param minute: minute to evaluate (see :class:`libestgb3.EStG3b`)
:param start: the first minute in this shift (see :class:`libestgb3.EStG3b`)
"""
try:
return max(filter(lambda rule: rule.match(minute, start, holidays), self))
except ValueError: # no match found
return None
def extend(self, rules: Iterable[Rule], replace: bool = False) -> None:
"""
Add the given rules to this group.
:param rules:
:param replace: if one of the given rule duplicates an existing one, overwrite it instead of raising an exception.
"""
for m in rules:
self.append(m, replace)
def __contains__(self, item) -> bool:
if isinstance(item, Rule):
return item._slug in self._rules
else:
return item in self._rules
def __iter__(self) -> Iterator[Rule]:
return self._rules.values().__iter__()
|
<reponame>daniraja/dd-wc
import { Component, h, State, Element, Prop, Listen } from '@stencil/core';
import { StockService } from '../../services/stock.service';
const stockService = new StockService();
@Component({
tag: 'wc-stock-price',
styleUrl: './stock-price.css',
shadow: true,
})
export class StockPrice {
@Element() el: HTMLElement;
symbol: HTMLInputElement;
@State() rate: number = 0;
@State() userInput = '';
@State() validInput = false;
@State() showError = false;
@State() loading = false;
@Prop({ reflect: true, mutable: true }) alertShow: any;
@Prop({ reflect: true, mutable: true }) alertMsg: any;
@Prop({ reflect: true, mutable: true }) alertHeading: any;
@Prop({ reflect: true, mutable: true }) alertType: any;
@Listen('wcSelectedSymbol', { target: 'body' })
onSelectedSymbol(e: CustomEvent) {
if (e.detail) {
this.userInput = e.detail;
this.fetchPrice(e.detail);
}
}
hostData() {
return { class: this.showError ? 'error' : '' };
}
onUserInput(event: Event) {
this.userInput = (event.target as HTMLInputElement).value;
this.validInput = this.userInput.trim() !== '';
}
showAlert(type: any, message: string, heading: string) {
console.log({ ...arguments });
this.alertShow = true;
this.alertHeading = heading;
this.alertMsg = message;
this.alertType = type;
}
onFetch(event: Event) {
event.preventDefault();
// let symbol = (this.el.shadowRoot.querySelector('#stock-symbol') as HTMLInputElement).value;
const symbol = this.symbol.value;
console.log('Submitted!');
this.fetchPrice(symbol);
}
fetchPrice(symbol: string) {
this.validInput = true;
this.showError = false;
this.loading = true;
stockService
.getStockPrice(symbol)
.then(res => {
return res.json();
})
.then(parsedRes => {
if (!parsedRes['Global Quote']['05. price']) {
throw 'Please enter a valid symbol!';
}
this.rate = +parsedRes['Global Quote']['05. price'];
})
.catch(err => {
this.showError = true;
this.showAlert('error', err, 'Oops');
console.error(err);
})
.finally(() => {
this.loading = false;
});
}
render() {
return [
<h3>Stock price checker <wc-tool-tip>Please enter a stock symbol and click fetch.</wc-tool-tip></h3>,,
<wc-alert show={this.alertShow} heading={this.alertHeading} message={this.alertMsg} type={this.alertType}></wc-alert>,
<form onSubmit={this.onFetch.bind(this)}>
<input autocomplete="off" id="stock-symbol" ref={el => (this.symbol = el)} value={this.userInput} onInput={this.onUserInput.bind(this)} />
<button type="submit" disabled={!this.validInput || this.loading}>
Fetch
</button>
</form>,
<div>
<h4 class="bold uppercase text-purple-800">
{this.userInput && this.rate ? this.userInput : 'Price'}: $ {this.rate}
</h4>
</div>,
];
}
}
|
<gh_stars>10-100
package es.upm.etsisi.cf4j.recommender.matrixFactorization;
import es.upm.etsisi.cf4j.data.DataModel;
import es.upm.etsisi.cf4j.data.MockDataSet;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
class HPFTest {
private static final int seed = 69;
private static final int numFactors = 2;
private static final int numIters = 1;
private static final int testUserId = 1;
private static final int testItemId = 1;
private static DataModel datamodel;
@BeforeAll
static void initAll() {
datamodel = new DataModel(new MockDataSet());
}
@Test
void hpfTest() {
HPF hpf = new HPF(datamodel, numFactors, numIters, seed);
hpf.fit();
assertEquals(0.06484894290927823, hpf.predict(testUserId, testItemId));
assertEquals(
hpf.predict(testUserId, testItemId),
hpf.predict(datamodel.getTestUser(testUserId))[testItemId]);
assertEquals(numFactors, hpf.getNumFactors());
assertEquals(numIters, hpf.getNumIters());
}
}
|
for file in `find wav/ -name *.wav.txt`
do
cmd=`cat $file`
cat fenci.txt | while read line
do
utt=`echo $line | awk -F':' '{print $1}'`
fenci=`echo $line | awk -F':' '{print $2}'`
if [ "${cmd}" = "${utt}" ];then
echo $fenci > $file
fi
done
done
|
<reponame>bianapis/sd-market-data-switch-operation-v2.0
package org.bian.dto;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonCreator;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import javax.validation.Valid;
/**
* BQFeedUploadRetrieveOutputModelFeedUploadInstanceAnalysis
*/
public class BQFeedUploadRetrieveOutputModelFeedUploadInstanceAnalysis {
private Object feedUploadInstanceAnalysisRecord = null;
private String feedUploadInstanceAnalysisReportType = null;
private String feedUploadInstanceAnalysisParameters = null;
private Object feedUploadInstanceAnalysisReport = null;
/**
* `status: Not Mapped` core-data-type-reference: BIAN::DataTypesLibrary::CoreDataTypes::UNCEFACT::Binary general-info: The inputs and results of the instance analysis that can be on-going, periodic and actual and projected
* @return feedUploadInstanceAnalysisRecord
**/
public Object getFeedUploadInstanceAnalysisRecord() {
return feedUploadInstanceAnalysisRecord;
}
public void setFeedUploadInstanceAnalysisRecord(Object feedUploadInstanceAnalysisRecord) {
this.feedUploadInstanceAnalysisRecord = feedUploadInstanceAnalysisRecord;
}
/**
* `status: Not Mapped` core-data-type-reference: BIAN::DataTypesLibrary::CoreDataTypes::UNCEFACT::Code general-info: The type of external performance analysis report available
* @return feedUploadInstanceAnalysisReportType
**/
public String getFeedUploadInstanceAnalysisReportType() {
return feedUploadInstanceAnalysisReportType;
}
public void setFeedUploadInstanceAnalysisReportType(String feedUploadInstanceAnalysisReportType) {
this.feedUploadInstanceAnalysisReportType = feedUploadInstanceAnalysisReportType;
}
/**
* `status: Not Mapped` core-data-type-reference: BIAN::DataTypesLibrary::CoreDataTypes::UNCEFACT::Text general-info: The selection parameters for the analysis (e.g. period, algorithm type)
* @return feedUploadInstanceAnalysisParameters
**/
public String getFeedUploadInstanceAnalysisParameters() {
return feedUploadInstanceAnalysisParameters;
}
public void setFeedUploadInstanceAnalysisParameters(String feedUploadInstanceAnalysisParameters) {
this.feedUploadInstanceAnalysisParameters = feedUploadInstanceAnalysisParameters;
}
/**
* `status: Not Mapped` core-data-type-reference: BIAN::DataTypesLibrary::CoreDataTypes::UNCEFACT::Binary general-info: The external analysis report in any suitable form including selection filters where appropriate
* @return feedUploadInstanceAnalysisReport
**/
public Object getFeedUploadInstanceAnalysisReport() {
return feedUploadInstanceAnalysisReport;
}
public void setFeedUploadInstanceAnalysisReport(Object feedUploadInstanceAnalysisReport) {
this.feedUploadInstanceAnalysisReport = feedUploadInstanceAnalysisReport;
}
}
|
<reponame>ysulyma/rp-paint<filename>src/settings/Sheets.tsx<gh_stars>1-10
import * as React from "react";
import {useCallback, useContext, useEffect, useMemo, useRef, useState} from "react";
import {Utils} from "ractive-player";
const {range} = Utils.misc,
{onClick} = Utils.mobile;
import {Sheets as Icon} from "../images";
import {PaintContext} from "../Canvas";
interface Props {
listen: (fn: (e: KeyboardEvent) => void) => void;
}
export default function Sheets(props: Props) {
const {consumer} = useContext(PaintContext);
const [open, setOpen] = useState(false);
const snapshots = useRef<string[]>([]);
useEffect(() => {
props.listen(e => {
if (!e.altKey) return;
if (e.key === "ArrowDown") {
snapshots.current[consumer.state.activeSheet] = consumer.layers.stable.toDataURL();
const action = {
type: "change-sheet" as const,
sheet: consumer.state.activeSheet + 1
};
consumer.record(action);
consumer.repaint(true);
} else if (e.key === "ArrowUp") {
if (consumer.state.activeSheet === 0)
return;
snapshots.current[consumer.state.activeSheet] = consumer.layers.stable.toDataURL();
const action = {
type: "change-sheet" as const,
sheet: consumer.state.activeSheet - 1
};
consumer.record(action);
consumer.repaint(true);
}
});
}, []);
const classNames = ["rp-paint-tool"];
const openDialog = useMemo(() => onClick(() => {
setOpen(prev => {
if (!prev)
snapshots.current[consumer.state.activeSheet] = consumer.layers.stable.toDataURL();
return !prev;
});
}), []);
return (
<>
<aside className="rp-sheets-dialog" style={{display: open ? "block" : "none"}}>
<ol>
{range(consumer.state.numSheets).map(i => (
<li className={i === consumer.state.activeSheet ? "selected" : ""} key={i}>
<img src={snapshots.current[i]}/>
</li>
))}
</ol>
</aside>
<button className={classNames.join(" ")} {...openDialog}>
<Icon/>
</button>
</>
);
}
|
<gh_stars>0
//
// Filter.hpp
// GameBT
//
// Created by <NAME> on 15/1/2018.
// Copyright © 2018 <NAME>. All rights reserved.
//
// Filter is a Sequence with a single condition and a single action.
//
#ifndef Filter_h
#define Filter_h
#include "Sequence.hpp"
namespace BT
{
class Filter : public Sequence
{
public:
virtual ~Filter() { }
inline void addCondition(Behavior* condition) { m_Children.insert(m_Children.begin(), condition); }
inline void addAction(Behavior* action) { m_Children.push_back(action); }
};
}
#endif /* Filter_h */
|
<filename>qd/cae/dyna_cpp/dyna/binout/lsda/lsda.c
/*
Copyright (C) 2002
by Livermore Software Technology Corp. (LSTC)
All rights reserved
NOTE: things to consider working on/fixing some day. In the open/openmany
routines, I'm not sure what will happen if the file name passed in has
a %XXX on the end already. In the many case, should also watch for duplicate
names, or overlapping sets (ie, "file file%002 file" should collapse to just
"file"). Probably shouldn't try to expand "file%002" into a list, as it
would either give "file%002%XXX" or maybe "file%003". But should 002 imply 003?
All I'm sure about at the moment is that things should work correctly if you
pass in a series of distinct base file names, and you want to open EVERYTHING
*/
/* alpha does not seem to have the int64_t type */
#if defined ALPHA || defined NEC
#define int64_t long long
#endif
#define __BUILD_LSDA__
#include <ctype.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <sys/types.h>
#if !defined _WIN32 && !defined WIN64 && !defined MPPWIN
#include <dirent.h>
#include <unistd.h>
#define DIR_SEP '/'
#else
#include <direct.h>
#include <windows.h>
#define DIR_SEP '\\'
#define _errno win_errno
#define int64_t __int64
#endif
#include "lsda.h"
#ifdef _WIN32
#include <windows.h>
#ifndef MAXPATH
#define MAXPATH 2048
#endif
struct dirent
{
char d_name[MAXPATH];
};
typedef struct
{
WIN32_FIND_DATA wfd;
HANDLE hfind, hfind0;
struct dirent de;
char dn_ptr[MAXPATH], filter[MAXPATH];
} DIR;
#undef MAXPATH
EXTERN DIR*
opendir(char* spec, char* filter);
EXTERN struct dirent*
readdir(DIR* pdir);
EXTERN void
rewinddir(DIR* pdir);
EXTERN void
closedir(DIR* pdir);
EXTERN int
truncate(char* fname, size_t length);
#endif
#include "lsda_internal.h"
#define Offset LSDA_Offset
#define Length LSDA_Length
#define Command LSDA_Command
#define TypeID LSDA_TypeID
typedef unsigned char octet;
#ifdef VISIBLE
#define STATIC
#else
#define STATIC static
#endif
static char*
fullfilename(IFile* daf);
STATIC char*
finddirmatch(char* name, DIR* dp);
STATIC int
write_initialize(LSDAFile* daf, char* salt);
STATIC int
read_initialize(LSDAFile* daf, int keepst);
STATIC int
rw_initialize(LSDAFile* daf);
STATIC int
closeout_var(LSDAFile* daf);
STATIC int
lsda_writesymboltable(LSDAFile* daf);
STATIC int
lsda_writesymbol(char* ppath, char* curpath, LSDATable* symbol, LSDAFile* daf);
STATIC LSDATable*
lsda_readsymbol(LSDAFile* daf);
STATIC int
lsda_readsymboltable(LSDAFile* daf);
STATIC void
lsda_createbasictypes(LSDAFile* daf);
STATIC void
CreateTypeAlias(LSDAFile* daf, char* alias, char* oldtype);
STATIC Length
ReadLength(LSDAFile* daf);
STATIC Offset
ReadOffset(LSDAFile* daf);
STATIC Command
ReadCommand(LSDAFile* daf);
STATIC TypeID
ReadTypeID(LSDAFile* daf);
STATIC int
ReadSalt(LSDAFile* daf);
STATIC Length
ReadData(unsigned char* data, size_t size, size_t count, LSDAFile* daf);
STATIC int
WriteSalt(LSDAFile* daf);
STATIC Length
WriteData(octet* data, size_t size, size_t count, LSDAFile* daf, int flush);
STATIC void*
ReadTrans(LSDAFile* daf, int FileLength, _CF Convert);
STATIC char*
findpath(char* from, char* to);
STATIC int
lsda_writecd(int handle, char* path);
extern _CF
GetConversionFunction(IFile* ifile, LSDAType* typein, LSDAType* typeout);
STATIC void
PruneSymbols(LSDAFile* daf, LSDATable* symbol);
static size_t
SymbolSizes(LSDAFile* daf, LSDATable* symbol);
static int
alloc_more_daf(int count);
static int
lsda_open2(char* filen, int mode, int handle_in, char* key, char* salt);
static char*
link_path(char* path, char* link);
static int num_daf = 0;
static LSDAFile* da_store = NULL;
static int _errno = ERR_NONE;
static int report_level = 0;
static char _scbuf[1024];
static int little_i = 1;
#define little_endian (*(char*)(&little_i))
#ifndef HAVE_AES
/*
Dummy routines so things will link OK
*/
void
get_salt(void* s)
{}
void
aes_enc_key(char* inkey, int len, aes_ctx* ctx)
{}
void
aes_enc_blk(char* inblk, char* outblk, aes_ctx* ctx)
{}
#endif
#if defined _WIN32 || defined MPPWIN
/*
Start off with a few UNIX functions Win32 doesn't have....
*/
// int fsync(int fd) {}
DIR*
opendir(char* spec, char* filter)
{
/*
opendir now not only search for the filter matches, but also search for
"*" wild card, which suppose to match every body.(if i cant find the matched
filter)
*/
DIR* pdir;
char myfilter[MAX_PATH];
char* ptr;
pdir = (DIR*)malloc(sizeof(DIR));
memset(pdir, 0, sizeof(DIR));
_chdir(spec);
strcpy(myfilter, filter);
ptr = strrchr(myfilter, '*');
if (!ptr)
strcat(myfilter, "*");
pdir->hfind0 = pdir->hfind = FindFirstFile(myfilter, &pdir->wfd);
if (pdir->hfind0 != (void*)0xFFFFFFFF) {
strcpy(pdir->filter, myfilter);
strcpy(pdir->dn_ptr, spec);
} else {
pdir->hfind0 = pdir->hfind = FindFirstFile("*", &pdir->wfd);
if (pdir->hfind0 != (void*)0xFFFFFFFF) {
strcpy(pdir->dn_ptr, spec);
strcpy(pdir->filter, "*");
} else {
free(pdir);
return NULL;
}
}
return pdir;
}
void
rewinddir(DIR* pdir)
{
FindClose(pdir->hfind0);
pdir->hfind0 = pdir->hfind = FindFirstFile(pdir->filter, &pdir->wfd);
}
struct dirent*
readdir(DIR* pdir)
{
if (pdir->hfind) {
strcpy(pdir->de.d_name, pdir->wfd.cFileName);
if (!FindNextFile(pdir->hfind, &pdir->wfd))
pdir->hfind = NULL;
return &pdir->de;
}
return NULL;
}
void
closedir(DIR* pdir)
{
FindClose(pdir->hfind0);
free(pdir);
}
int
truncate(char* fname, size_t length)
{
char* tmpfile;
char buf[1024];
FILE *fout, *fin;
int rdsize = 0, cursize = 0;
int breach = 0;
tmpfile = tempnam(NULL, "LSDA");
fout = fopen(tmpfile, "wb");
fin = fopen(fname, "rb");
if (fout == NULL || fin == NULL)
return -1;
while (feof(fin) == 0) {
rdsize = 1024;
if (cursize > (int)length) {
rdsize = cursize - length + 1024;
breach = 1;
}
rdsize = fread(buf, sizeof(char), rdsize, fin);
fwrite(buf, sizeof(char), rdsize, fout);
if (breach)
break;
cursize += 1024;
}
fclose(fout);
fclose(fin);
rdsize = 1024;
fout = fopen(fname, "wb");
fin = fopen(tmpfile, "rb");
if (fout == NULL || fin == NULL)
return -1;
while (feof(fin) == 0) {
rdsize = fread(buf, sizeof(char), rdsize, fin);
fwrite(buf, sizeof(char), rdsize, fout);
}
fclose(fin);
fclose(fout);
return 0;
}
#endif
static char*
link_path(char* path, /* Full path of link in data file */
char* link) /* value of the link, ie where it points */
{
/* Return the full path to the thing the link points to.
This is just simple string manipulation */
int lp = strlen(path);
int ll = strlen(link);
int i, ncomp, nkeep, comp[256];
char *ret, *cp, *cp2;
if (link[0] == '/') { /* simple, link is absolute */
ret = strdup(link);
goto cleanup;
}
ret = (char*)malloc(lp + ll + 2);
strcpy(ret, path);
/* path should include the name of the link itself, so remove
everything after the last / */
for (i = lp; i > 0 && ret[i] != '/'; i--)
ret[i] = 0;
/* append link contents */
strcat(ret, link);
cleanup: /* clean up the resulting string */
/*
Replace each / in ret with a NULL, saving
poitners to each component in the path.
*/
ncomp = 0;
for (i = 0; ret[i]; i++)
if (ret[i] == '/') {
comp[ncomp++] = i + 1;
ret[i] = 0;
}
/* Collapse the list of components, as needed,
removing instances of '.' and '..' and the like.
*/
nkeep = 0;
for (i = 0; i < ncomp; i++) {
cp = ret + comp[i];
if (*cp == 0)
continue; /* empty component: must have been // in path */
if (strcmp(cp, ".") == 0)
continue; /* remove '.' */
if (strcmp(cp, "..") == 0) {
if (nkeep > 0)
nkeep--;
continue;
}
comp[nkeep++] = comp[i];
}
/* rebuild path in place. */
cp = ret;
for (i = 0; i < nkeep; i++) {
cp2 = ret + comp[i];
*cp++ = '/';
while (*cp2) {
*cp++ = *cp2++;
}
}
if (cp == ret)
*cp++ = '/';
*cp = 0;
return ret;
}
static IFile*
newIFile()
{
IFile* ret = (IFile*)malloc(sizeof(IFile));
memset(ret, 0, sizeof(IFile));
return ret;
}
int*
_lsda_errno()
{
return &_errno;
}
int
lsda_fopen_aes(char* filen,
int filenum,
Offset offset,
int mode,
int want,
char* key);
int
lsda_reopen(char* filen, int filenum, Offset offset, int mode)
{
int i = -1;
return lsda_fopen_aes(filen, filenum, offset, mode, i, NULL);
}
int
lsda_reopen_aes(char* filen, int filenum, Offset offset, int mode, char* key)
{
int i = -1;
return lsda_fopen_aes(filen, filenum, offset, mode, i, key);
}
int
lsda_fopen(char* filen, int filenum, Offset offset, int mode, int want)
{
return lsda_fopen_aes(filen, filenum, offset, mode, want, NULL);
}
int
lsda_fopen_aes(char* filen,
int filenum,
Offset offset,
int mode,
int want,
char* key)
{
int i, j;
char lfilen[MAXPATH];
_errno = ERR_NONE; /* reset error */
/*
If the user specified a particular handle, get it if it is available
else take next available handle.
*/
if (want < 0) {
for (i = 0; i < num_daf; i++) {
if (da_store[i].free)
break;
}
if (i == num_daf && alloc_more_daf(10) < 0)
return -1;
} else {
if (want >= num_daf) {
if (alloc_more_daf(want + 10 - num_daf) < 0)
return -1;
} else {
if (da_store[want].free == 0)
return -1;
}
i = want;
}
/*
don't truncate READ type files when reopening them
*/
if (mode == LSDA_WRITEREAD)
mode = LSDA_READWRITE;
if (mode == LSDA_WRITEONLY || mode == LSDA_APPEND) {
j = lsda_truncate_aes(filen, filenum, offset, key);
if (j != LSDA_SUCCESS)
return j;
/* Reset open mode to APPEND if the file length > 0, else WRITEONLY */
if (filenum > 0 || offset > 0)
mode = LSDA_APPEND;
else
mode = LSDA_WRITEONLY;
}
strcpy(lfilen, filen);
if (filenum > 0) {
char ext[8];
sprintf(ext, "%%%3.3d", filenum);
strcat(lfilen, ext);
}
return lsda_open2(lfilen, mode, i, key, NULL);
}
int
lsda_truncate(char* filen, int filenum, Offset offset)
{
return lsda_truncate_aes(filen, filenum, offset, NULL);
}
int
lsda_truncate_aes(char* filen, int filenum, Offset offset, char* key)
{
DIR* dp;
int i, j, len;
LSDAFile *daf, dafd;
char* name;
unsigned char header[16];
char basename[64], *cp;
unsigned char buf[64], *bp;
int lastnum;
char tname[8];
Command cmd;
LSDAType *type1, *type2;
Offset loff;
_CF Convert;
_errno = ERR_NONE; /* reset error */
daf = &dafd;
InitLSDAFile(daf);
lsda_createbasictypes(daf);
daf->maxsize = DEF_MAX_SIZE;
daf->num_list = 1;
daf->ifile = (IFile**)malloc(sizeof(IFile*));
daf->ifile[0] = daf->ifr = daf->ifw = newIFile();
daf->encrypted = 0;
#ifdef HAVE_AES
if (key && *key) {
unsigned char* cp = key;
unsigned char lkey[16];
int i;
for (i = 0; i < 16; i++) {
lkey[i] = *cp++;
if (*cp == 0)
cp = key;
}
daf->encrypted = 1;
aes_enc_key(lkey, 16, daf->ctx);
}
#endif
len = strlen(filen);
if (filen[len - 1] == DIR_SEP)
filen[--len] = 0;
for (j = len - 1; j > 0; j--)
if (filen[j] == DIR_SEP) {
daf->ifw->dirname = (char*)malloc(j + 1);
memcpy(daf->ifw->dirname, filen, j);
daf->ifw->dirname[j] = (char)0;
daf->ifw->filename = (char*)malloc(len - j + 8);
strcpy(daf->ifw->filename, filen + j + 1);
break;
}
if (j == 0) {
daf->ifw->dirname = (char*)malloc(2);
strcpy(daf->ifw->dirname, ".");
daf->ifw->filename = (char*)malloc(len + 1 + 8);
strcpy(daf->ifw->filename, filen);
}
if (filenum == 0 && offset == 0)
filenum = -1;
if (filenum > 0) {
char ext[8];
sprintf(ext, "%%%3.3d", filenum);
strcat(daf->ifw->filename, ext);
}
/*
Make sure the indicated file exists and we can access it, if the offset > 0.
If the offset==0, just create it.
*/
if (filenum >= 0 && offset > 0) {
daf->fpr = daf->fpw = fopen(fullfilename(daf->ifw), "r+b");
if (daf->fpw == NULL) { /* indicated file does not exist */
_errno = ERR_NOFILE;
if (report_level > 0)
fprintf(stderr, "lsda_truncate: file does not exist\n");
goto cleanup;
}
if (daf->encrypted)
ReadSalt(daf);
ReadData(header, 1, 8, daf);
/*
Check to make sure offset given is a valid end of symbol table location
*/
loff = offset - header[1] - header[2] - header[3];
if (daf->encrypted)
loff -= 16;
if (fseek(daf->fpw, loff, SEEK_SET) != 0) {
_errno = ERR_FSEEK;
if (report_level > 0)
fprintf(stderr, "lsda_truncate: fseek failed\n");
goto cleanup;
}
/* To properly read the "command" field is a bit of a pain. First, find the
* proper conversion function, then feed it to the ReadTrans routine, along
* with the in file size of the field */
sprintf(tname, "I*%d", header[3]);
type1 = daf->FindType(daf, tname);
sprintf(tname, "I*%d", (int)sizeof(Command));
type2 = daf->FindType(daf, tname);
daf->ifw->bigendian = header[5];
daf->ifw->ConvertCommand = GetConversionFunction(daf->ifw, type1, type2);
/*
For encrypted files, we have to start our read at the beginning
of the record, to keep in sync. Also, we want to save all the
raw (untranslated) data so we can rewrite this record down below.
*/
if (daf->encrypted)
ReadSalt(daf);
ReadData(buf, header[1], 1, daf);
bp = buf + header[1];
ReadData(bp, header[3], 1, daf);
Convert = GetConversionFunction(daf->ifr, type1, type2);
if (Convert)
Convert(bp, &cmd, 1);
else
memcpy(&cmd, bp, header[3]);
if (_errno != ERR_NONE) {
_errno = ERR_READ;
if (report_level > 0)
fprintf(stderr, "lsda_truncate: fread failed to read 1 byte\n");
goto cleanup;
}
if (cmd != LSDA_ENDSYMBOLTABLE) {
_errno = ERR_NOENDSYMBOLTABLE;
if (report_level > 0)
fprintf(stderr, "lsda_truncate: end of symbol table not found\n");
goto cleanup;
}
/*
Set the "next symbol table" offset to 0
*/
daf->ifw->stoffset = offset - header[2];
daf->ifw->ateof = 0;
if (daf->encrypted) {
if (fseek(daf->fpw, loff, SEEK_SET) != 0) {
_errno = ERR_FSEEK;
if (report_level > 0)
fprintf(stderr,
"lsda_truncate: fseek to %ld failed\n",
(long)daf->ifw->stoffset);
goto cleanup;
}
/* Rewrite whole record, but with 0s for the offset part */
memset(bp + header[3], 0, header[2]);
if (WriteSalt(daf) || WriteData(buf, header[1], 1, daf, 1) != 1 ||
WriteData(bp, header[3], 1, daf, 1) != 1 ||
WriteData(bp + header[3], header[2], 1, daf, 1) != 1) {
_errno = ERR_WRITE;
if (report_level > 0)
fprintf(stderr, "lsda_truncate: failed to rewrite record\n");
goto cleanup;
}
} else {
if (fseek(daf->fpw, daf->ifw->stoffset, SEEK_SET) != 0) {
_errno = ERR_FSEEK;
if (report_level > 0)
fprintf(stderr,
"lsda_truncate: fseek to %ld failed\n",
(long)daf->ifw->stoffset);
goto cleanup;
}
/* have to write the correct number of words -- fortunately since we
* are writing 0 there is no endianness problem... */
memset(buf, 0, 32);
if (fwrite(buf, header[2], 1, daf->fpw) != 1) {
_errno = ERR_WRITE;
if (report_level > 0)
fprintf(stderr, "lsda_truncate: fwrite failed\n");
goto cleanup;
}
}
/*
Truncate file.
*/
fclose(daf->fpw);
#if !defined _WIN32
truncate(fullfilename(daf->ifw), offset);
#endif
daf->fpr = daf->fpw = NULL;
}
/*
remove all files in this series with numbers > filenum
*/
lastnum = -1;
strcpy(basename, daf->ifw->filename);
for (i = 0, j = strlen(basename) - 1; j > 0; j--)
if (isdigit(basename[j]))
i = j;
else
break;
if (i && j && basename[j] == '%')
basename[j] = 0;
#if defined _WIN32 || defined MPPWIN
dp = opendir(daf->ifw->dirname, basename);
#else
dp = opendir(daf->ifw->dirname);
#endif
if (!dp) {
_errno = ERR_OPENDIR;
if (report_level > 0)
fprintf(stderr,
"lsda_truncate: error opening directory %s\n",
daf->ifw->dirname);
goto cleanup;
}
while (name = finddirmatch(basename, dp)) {
len = strlen(name);
for (i = 0, j = strlen(name) - 1; j > 0; j--)
if (isdigit(name[j]))
i = j;
else
break;
if (name[j] != '%')
continue;
if (i > 0) {
j = atoi(name + i);
if (j > lastnum)
lastnum = j;
if (j > filenum && filenum >= 0) {
sprintf(
_scbuf, "%s%c%s%%%3.3d", daf->ifw->dirname, DIR_SEP, basename, j);
remove(_scbuf);
}
}
}
closedir(dp);
if (daf->fpw)
fclose(daf->fpw);
daf->fpr = daf->fpw = NULL;
daf->FreeTable(daf, daf->top);
daf->FreeTypes(daf);
return LSDA_SUCCESS;
cleanup:
if (daf->fpw)
fclose(daf->fpw);
daf->fpr = daf->fpw = NULL;
daf->FreeTable(daf, daf->top);
daf->FreeTypes(daf);
return -1;
}
int
lsda_open_many_aes(char** filen, int num, char* key);
int
lsda_open_many(char** filen, int num)
{
return lsda_open_many_aes(filen, num, NULL);
}
int
lsda_open_many_aes(char** filen, int num, char* key)
{
int i, j, k, len, handle;
LSDAFile* daf;
/*
Get next available handle
*/
_errno = ERR_NONE; /* reset error */
for (i = 0; i < num_daf; i++) {
if (da_store[i].free)
break;
}
if (i == num_daf && alloc_more_daf(10) < 0)
return -1;
handle = i;
daf = da_store + i;
InitLSDAFile(daf);
daf->num_list = num;
daf->maxsize = DEF_MAX_SIZE;
/* If we are worried about the user passing in duplicate file names,
* could go through the list here and wipe out any that are dups */
daf->ifile = (IFile**)malloc(num * sizeof(IFile*));
for (k = 0; k < num; k++) {
len = strlen(filen[k]);
daf->ifile[k] = daf->ifr = newIFile();
if (filen[k][len - 1] == DIR_SEP)
filen[k][--len] = 0;
for (j = len - 1; j > 0; j--)
if (filen[k][j] == DIR_SEP) {
daf->ifr->dirname = (char*)malloc(j + 1);
memcpy(daf->ifr->dirname, filen[k], j);
daf->ifr->dirname[j] = (char)0;
daf->ifr->filename = (char*)malloc(len - j + 8);
strcpy(daf->ifr->filename, filen[k] + j + 1);
break;
}
if (j == 0) {
daf->ifr->dirname = (char*)malloc(2);
strcpy(daf->ifr->dirname, ".");
daf->ifr->filename = (char*)malloc(len + 1 + 8);
strcpy(daf->ifr->filename, filen[k]);
}
}
daf->openmode = LSDA_READONLY;
daf->encrypted = 0;
#ifdef HAVE_AES
if (key && *key) {
unsigned char* cp = key;
unsigned char lkey[16];
int i;
for (i = 0; i < 16; i++) {
lkey[i] = *cp++;
if (*cp == 0)
cp = key;
}
daf->encrypted = 1;
aes_enc_key(lkey, 16, daf->ctx);
}
#endif
/*
Open and initialize the file(s)
*/
if (read_initialize(daf, 1) >= 0)
return handle;
/*
Some kind of error occured -- free what we allocated and
get out.
*/
if (daf->ifile) {
for (k = 0; k < num; k++) {
if (daf->ifile[k]) {
if (daf->ifile[k]->dirname)
free(daf->ifile[k]->dirname);
if (daf->ifile[k]->filename)
free(daf->ifile[k]->filename);
free(daf->ifile[k]);
}
}
free(daf->ifile);
daf->ifile = NULL;
}
daf->free = 1;
return -1;
}
STATIC int
lsda_checkforsymboltable(LSDAFile* daf)
{
Command cmd;
Offset table_pos, end_pos;
Length stlen;
/*
Check to see if this looks like a symbol table....
*/
table_pos = ReadOffset(daf);
if (table_pos == 0)
return 0; /* end of file -- no more symbol tables */
if (_errno != ERR_NONE)
return -1;
if (fseek(daf->fpr, table_pos, SEEK_SET) < 0)
return -1;
if (daf->encrypted && ReadSalt(daf))
return -1;
stlen = ReadLength(daf);
if (_errno != ERR_NONE)
return -1;
cmd = ReadCommand(daf);
if (_errno != ERR_NONE)
return -1;
if (cmd != LSDA_BEGINSYMBOLTABLE)
return -1;
end_pos = stlen + table_pos -
(daf->ifr->FileLengthSize + daf->ifr->FileCommandSize +
daf->ifr->FileOffsetSize);
/*
The above is OK for encrypted or not -- in encrypted case, "table_pos"
points to the SALT at the start of the table, which is not included
in the table length. But that is OK, because we want to back up over
the salt when we read the ENDSYMBOLTABLE record...
if(daf->encrypted) end_pos -= 16;
*/
if (fseek(daf->fpr, end_pos, SEEK_SET) < 0)
return -1;
/* see if end of symbol table is where it should be */
if (daf->encrypted && ReadSalt(daf))
return -1;
stlen = ReadLength(daf);
if (_errno != ERR_NONE)
return -1;
cmd = ReadCommand(daf);
if (_errno != ERR_NONE)
return -1;
if (cmd != LSDA_ENDSYMBOLTABLE)
return -1;
/* Leave offset word for next call */
return 1;
}
int
lsda_test_aes(char* filen, char* key);
int
lsda_test(char* filen)
{
return lsda_test_aes(filen, NULL);
}
int
lsda_test_aes(char* filen, char* key)
{
/* check to see if this might be a legit LSDA file */
unsigned char header[8];
char tname[8];
Command cmd;
LSDAType *type1, *type2;
int first = 1, len;
int i, j, retval = 0;
LSDAFile daf0, *daf;
daf = &daf0;
_errno = ERR_NONE;
InitLSDAFile(daf);
len = strlen(filen);
daf->ifr = newIFile();
if (filen[len - 1] == DIR_SEP)
filen[--len] = 0;
daf->ifr->dirname = (char*)malloc(len + 10);
daf->ifr->filename = (char*)malloc(len + 10);
for (j = len - 1; j > 0; j--)
if (filen[j] == DIR_SEP) {
strcpy(daf->ifr->dirname, filen);
daf->ifr->dirname[j] = 0;
strcpy(daf->ifr->filename, filen + j + 1);
break;
}
if (j == 0) {
strcpy(daf->ifr->dirname, ".");
strcpy(daf->ifr->filename, filen);
}
daf->openmode = LSDA_READONLY;
daf->encrypted = 0;
#ifdef HAVE_AES
if (key && *key) {
unsigned char* cp = key;
unsigned char lkey[16];
int i;
for (i = 0; i < 16; i++) {
if (*cp == 0)
cp = key;
lkey[i] = *cp++;
}
daf->encrypted = 1;
aes_enc_key(lkey, 16, daf->ctx);
}
#endif
/*
Try opening and reading this file.
*/
if ((daf->fpr = fopen(filen, "rb")) == NULL)
return 0; /* failed */
if (daf->encrypted)
ReadSalt(daf);
if (ReadData(header, 1, 8, daf) < 8) { /* fail */
fclose(daf->fpr);
goto done0;
}
daf->ifr->FileLengthSize = header[1];
daf->ifr->FileOffsetSize = header[2];
daf->ifr->FileCommandSize = header[3];
daf->ifr->FileTypeIDSize = header[4];
daf->ifr->bigendian = header[5];
daf->ifr->fp_format = header[6];
if (daf->ifr->FileLengthSize < 1 || daf->ifr->FileLengthSize > 8 ||
daf->ifr->FileOffsetSize < 1 || daf->ifr->FileOffsetSize > 8 ||
daf->ifr->FileCommandSize < 1 || daf->ifr->FileCommandSize > 8 ||
daf->ifr->FileTypeIDSize < 1 || daf->ifr->FileTypeIDSize > 8 ||
daf->ifr->bigendian < 0 || daf->ifr->bigendian > 1) {
fclose(daf->fpr);
goto done0;
}
lsda_createbasictypes(daf);
/*
Set conversion functions for length, offset, etc
*/
sprintf(tname, "I*%d", daf->ifr->FileLengthSize);
type1 = daf->FindType(daf, tname);
sprintf(tname, "I*%d", (int)sizeof(Length));
type2 = daf->FindType(daf, tname);
daf->ifr->ConvertLength = GetConversionFunction(daf->ifr, type1, type2);
sprintf(tname, "I*%d", daf->ifr->FileOffsetSize);
type1 = daf->FindType(daf, tname);
sprintf(tname, "I*%d", (int)sizeof(Offset));
type2 = daf->FindType(daf, tname);
daf->ifr->ConvertOffset = GetConversionFunction(daf->ifr, type1, type2);
sprintf(tname, "I*%d", daf->ifr->FileCommandSize);
type1 = daf->FindType(daf, tname);
sprintf(tname, "I*%d", (int)sizeof(Command));
type2 = daf->FindType(daf, tname);
daf->ifr->ConvertCommand = GetConversionFunction(daf->ifr, type1, type2);
sprintf(tname, "I*%d", daf->ifr->FileTypeIDSize);
type1 = daf->FindType(daf, tname);
sprintf(tname, "I*%d", (int)sizeof(TypeID));
type2 = daf->FindType(daf, tname);
daf->ifr->ConvertTypeID = GetConversionFunction(daf->ifr, type1, type2);
/*
OK, now check for what look like reasonable symbol table(s).
*/
if (daf->encrypted) {
fseek(daf->fpr, 16 + header[0], SEEK_SET); /* skip initial salt */
ReadSalt(daf);
} else {
fseek(daf->fpr, header[0], SEEK_SET);
}
ReadLength(daf);
cmd = ReadCommand(daf);
if (_errno == ERR_READ || (cmd != LSDA_SYMBOLTABLEOFFSET &&
cmd != LSDA_ENDSYMBOLTABLE)) { /* error */
_errno = ERR_NONE; /* reset read error */
goto done1;
}
daf->ifr->stoffset = ftell(daf->fpr);
for (i = 0; i < 5; i++) {
j = lsda_checkforsymboltable(daf);
if (j == -1)
break; /* bad file */
if (j == 0)
i = 5; /* end of file -- count as OK */
}
retval = i < 5 ? 0 : 1;
done1:
fclose(daf->fpr);
daf->fpr = NULL;
daf->FreeTypes(daf);
done0:
free(daf->ifr->dirname);
free(daf->ifr->filename);
free(daf->ifr);
return retval;
}
int
lsda_open(char* filen, int mode)
{
return lsda_open2(filen, mode, -1, NULL, NULL);
}
int
lsda_open_aes(char* filen, int mode, char* key)
{
return lsda_open2(filen, mode, -1, key, NULL);
}
int
lsda_open_salt(char* filen, int mode, char* key, char* salt)
{
return lsda_open2(filen, mode, -1, key, salt);
}
static int
lsda_open2(char* filen, int mode, int handle_in, char* key, char* salt)
{
int i, j, len, handle;
LSDAFile* daf;
char* cp;
DIR* dp;
_errno = ERR_NONE; /* reset error */
/*
Get next available handle
*/
if (handle_in < 0) {
for (i = 0; i < num_daf; i++) {
if (da_store[i].free)
break;
}
if (i == num_daf && alloc_more_daf(10) < 0) {
_errno = ERR_MALLOC;
fprintf(stderr, "lsda_open: memory allocation error");
return -1;
}
handle = i;
} else {
i = handle = handle_in; /* guaranteed to be ok.... */
}
daf = da_store + i;
InitLSDAFile(daf);
daf->num_list = 1;
daf->maxsize = DEF_MAX_SIZE;
daf->ifile = (IFile**)malloc(sizeof(IFile*));
daf->ifile[0] = newIFile();
daf->ifr = daf->ifw = daf->ifile[0];
len = strlen(filen);
if (filen[len - 1] == DIR_SEP)
filen[--len] = 0;
for (j = len - 1; j > 0; j--)
if (filen[j] == DIR_SEP) {
daf->ifr->dirname = (char*)malloc(j + 1);
memcpy(daf->ifr->dirname, filen, j);
daf->ifr->dirname[j] = (char)0;
daf->ifr->filename = (char*)malloc(len - j + 8);
strcpy(daf->ifr->filename, filen + j + 1);
break;
}
if (j == 0) {
daf->ifr->dirname = (char*)malloc(2);
strcpy(daf->ifr->dirname, ".");
daf->ifr->filename = (char*)malloc(len + 1 + 8);
strcpy(daf->ifr->filename, filen);
}
daf->openmode = mode;
daf->encrypted = 0;
#ifdef HAVE_AES
if (key && *key) {
unsigned char* cp = key;
unsigned char lkey[16];
int i;
for (i = 0; i < 16; i++) {
if (*cp == 0)
cp = key;
lkey[i] = *cp++;
}
daf->encrypted = 1;
aes_enc_key(lkey, 16, daf->ctx);
}
#endif
/*
Open and initialize the file
*/
switch (mode) {
case LSDA_READONLY: /* open existing file and preserve data */
if (read_initialize(daf, 1) < 0)
goto cleanup;
return handle;
case LSDA_READWRITE: /* open existing file and preserve data */
if (read_initialize(daf, 1) < 0)
goto cleanup;
rw_initialize(daf);
return handle;
case LSDA_APPEND: /* open existing file for writing */
if (read_initialize(daf, 0) > 0) {
rw_initialize(daf);
daf->openmode = LSDA_WRITEONLY;
return handle;
}
/*
file not found, so create WRITEONLY by
falling through to next case.
Reset openmode and the file handles (which were set to NULL
by read_initialize)
*/
daf->openmode = mode = LSDA_WRITEONLY;
daf->ifr = daf->ifw = daf->ifile[0];
case LSDA_WRITEONLY: /* create file */
case LSDA_WRITEREAD: /* create file */
#if defined _WIN32 || defined MPPWIN
if ((dp = opendir(daf->ifw->dirname, daf->ifw->filename)) == NULL) {
#else
if ((dp = opendir(daf->ifw->dirname)) == NULL) {
#endif
_errno = ERR_OPENDIR;
if (report_level > 0)
fprintf(stderr,
"lsda_open: Cannot open directory %s\nCheck permissions\n",
daf->ifr->dirname);
goto cleanup;
}
while (cp = finddirmatch(daf->ifw->filename, dp)) {
remove(cp);
}
closedir(dp);
if ((daf->fpw = fopen(filen, "w+b")) == NULL) {
_errno = ERR_OPENFILE;
if (report_level > 0)
fprintf(stderr,
"lsda_open: Cannot open file %s\nCheck permissions\n",
filen);
goto cleanup;
}
if (write_initialize(daf, salt) < 0)
goto cleanup;
return handle;
}
cleanup:
if (daf->ifile) {
if (daf->ifr) {
if (daf->ifr->dirname)
free(daf->ifr->dirname);
if (daf->ifr->filename)
free(daf->ifr->filename);
free(daf->ifr);
}
free(daf->ifile);
daf->ifile = NULL;
}
daf->free = 1;
return -1;
}
static int
alloc_more_daf(int count)
{
int i;
if (da_store)
da_store =
(LSDAFile*)realloc(da_store, (num_daf + count) * sizeof(LSDAFile));
else
da_store = (LSDAFile*)malloc(count * sizeof(LSDAFile));
if (!da_store) {
_errno = ERR_MALLOC;
if (report_level > 0)
fprintf(stderr, "alloc_more_daf: malloc of %d failed\n", count);
return -1;
}
for (i = num_daf; i < num_daf + count; i++)
da_store[i].free = 1;
num_daf += count;
return 1;
}
STATIC int
write_initialize(LSDAFile* daf, char* salt)
{
unsigned char header[16];
int handle = daf - da_store;
Length rlen;
Command cmd;
Offset offset;
header[0] = 8; /* number of bytes in header, this included */
header[1] = sizeof(Length); /* size of int used in data record lengths */
header[2] = sizeof(Offset); /* size of int used in data file offsets */
header[3] = sizeof(Command); /* size of int used in data file commands */
header[4] = sizeof(TypeID); /* size of int used in data file typeids */
header[5] = little_endian; /* 0 for bigendian, 1 for little_endian */
header[6] = FP_FORMAT; /* 0 = IEEE */
header[7] = 0;
fseek(daf->fpw, 0, SEEK_SET);
if (daf->encrypted) {
if (salt) { /* use user supplied salt and skip the encryption step */
memcpy(daf->salt, salt, 16);
if (fwrite(daf->salt, 1, 16, daf->fpw) != 16)
goto write_error;
} else {
get_salt(daf->salt); /* first time, need to initialize salt */
if (WriteSalt(daf))
goto write_error;
}
}
if (WriteData(header, 1, 8, daf, 1) < 8)
goto write_error;
/*
Create empty space for symbol table pointer
*/
rlen = sizeof(Length) + sizeof(Command) + sizeof(Offset);
cmd = LSDA_SYMBOLTABLEOFFSET;
if (daf->encrypted && WriteSalt(daf))
goto write_error;
if (WriteData((octet*)&rlen, sizeof(Length), 1, daf, 1) < 1)
goto write_error;
if (WriteData((octet*)&cmd, sizeof(Command), 1, daf, 1) < 1)
goto write_error;
offset = 0;
daf->ifw->stoffset = ftell(daf->fpw);
daf->ifw->ateof = 0;
if (WriteData((octet*)&offset, sizeof(Offset), 1, daf, 1) < 1)
goto write_error;
if (lsda_cd(handle, "/") < 0) {
fclose(daf->fpw);
daf->fpw = NULL;
daf->ifw = NULL;
return -1;
}
if (lsda_writecd(handle, "/") < 0) {
fclose(daf->fpw);
daf->fpw = NULL;
daf->ifw = NULL;
return -1;
}
strcpy(daf->lastpath, "/");
daf->ifw->FileLengthSize = header[1];
daf->ifw->FileOffsetSize = header[2];
daf->ifw->FileCommandSize = header[3];
daf->ifw->FileTypeIDSize = header[4];
daf->ifw->bigendian = header[5];
daf->ifw->fp_format = header[6];
lsda_createbasictypes(daf);
return 1;
write_error:
_errno = ERR_WRITE;
if (report_level > 0) {
fprintf(stderr,
"write_initialize: Write error on file %s\n",
fullfilename(daf->ifw));
}
if (daf->fpw)
fclose(daf->fpw);
daf->fpw = NULL;
daf->ifw = NULL;
return -1;
}
STATIC void
lsda_createbasictypes(LSDAFile* daf)
{
LSDAType* type;
char tname[32];
/*
Create the necessary intrinsic types
*/
if (daf->ntypes > 0)
return; /* have already been in here... */
type = daf->CreateType(daf, "I*1");
type->length_on_disk = type->length = 1;
type = daf->CreateType(daf, "I*2");
type->length_on_disk = type->length = 2;
type = daf->CreateType(daf, "I*4");
type->length_on_disk = type->length = 4;
type = daf->CreateType(daf, "I*8");
type->length_on_disk = type->length = 8;
type = daf->CreateType(daf, "U*1");
type->length_on_disk = type->length = 1;
type = daf->CreateType(daf, "U*2");
type->length_on_disk = type->length = 2;
type = daf->CreateType(daf, "U*4");
type->length_on_disk = type->length = 4;
type = daf->CreateType(daf, "U*8");
type->length_on_disk = type->length = 8;
type = daf->CreateType(daf, "R*4");
type->length_on_disk = type->length = 4;
type = daf->CreateType(daf, "R*8");
type->length_on_disk = type->length = 8;
type = daf->CreateType(daf, "LINK");
type->length_on_disk = type->length = 1;
/*
And type aliases
*/
sprintf(tname, "I*%d", (int)sizeof(int));
CreateTypeAlias(daf, "int", tname);
sprintf(tname, "I*%d", (int)sizeof(short));
CreateTypeAlias(daf, "short", tname);
sprintf(tname, "I*%d", (int)sizeof(long));
CreateTypeAlias(daf, "long", tname);
sprintf(tname, "U*%d", (int)sizeof(unsigned int));
CreateTypeAlias(daf, "uint", tname);
sprintf(tname, "U*%d", (int)sizeof(unsigned short));
CreateTypeAlias(daf, "ushort", tname);
sprintf(tname, "U*%d", (int)sizeof(unsigned long));
CreateTypeAlias(daf, "ulong", tname);
sprintf(tname, "R*%d", (int)sizeof(float));
CreateTypeAlias(daf, "float", tname);
sprintf(tname, "R*%d", (int)sizeof(double));
CreateTypeAlias(daf, "double", tname);
sprintf(tname, "I*%d", (int)sizeof(FortranInteger));
CreateTypeAlias(daf, "integer", tname);
sprintf(tname, "R*%d", (int)sizeof(FortranReal));
CreateTypeAlias(daf, "real", tname);
sprintf(tname, "R*%d", (int)sizeof(FortranDouble));
CreateTypeAlias(daf, "double precision", tname);
}
STATIC void
CreateTypeAlias(LSDAFile* daf, char* alias, char* oldtype)
{
LSDAType *otype, *ntype;
ntype = daf->CreateType(daf, alias);
otype = daf->FindType(daf, oldtype);
ntype->alias = otype;
}
STATIC int
read_initialize(LSDAFile* daf, int keepst)
{
/*
Read in the existing symbol table (or reconstruct it as needed....someday)
If keepst==0, don't actually keep ST data. But we DO read it to make
sure the file is valid.
*/
unsigned char header[8];
char tname[8];
Command cmd;
LSDAType *type1, *type2;
char *name, fullname[1024];
char base_directory[2048];
IFile* ifile;
int namelen;
int retval = -1;
int i, j, is_newfile;
DIR* dp;
int org_num_list = daf->num_list;
// Extract directory name - needed for a hands on bugfix
#ifdef _WIN32
int last_separator = -1;
int ii;
for (ii = 0; ii < strlen(daf->ifile[0]->filename); ++ii) {
if ((daf->ifile[0]->filename[ii] == '/') ||
(daf->ifile[0]->filename[ii] == '\\')) {
last_separator = ii;
}
}
if (last_separator >= 0) {
strncpy(base_directory, daf->ifile[0]->filename, last_separator);
base_directory[last_separator] = '\0';
} else {
sprintf(base_directory, ".");
}
#endif
for (i = 0; i == 0 || i < org_num_list; i++) {
daf->ifr = daf->ifile[i];
namelen = strlen(daf->ifile[i]->filename);
#if defined _WIN32 || defined MPPWIN
dp = opendir(daf->ifile[i]->dirname, daf->ifile[i]->filename);
#else
dp = opendir(daf->ifile[i]->dirname);
#endif
if (dp == NULL) {
_errno = ERR_OPENDIR;
if (report_level > 0)
fprintf(
stderr,
"read_initialize: Cannot open directory %s\nCheck permissions\n",
daf->ifile[i]->dirname);
return -1;
}
/*
Try opening and reading all files of the form filename[%digits]
where the [%digits] are optional. As far as I can see, I
don't really care what order we open them in. But remember which one
is the highest numbered -- that is the one we will do any writing to.
*/
while (daf->ifile[i] &&
(name = finddirmatch(daf->ifile[i]->filename, dp))) {
if (strlen(name) == namelen) { /* opened base file */
ifile = daf->ifile[i];
is_newfile = 0;
} else {
ifile = newIFile();
ifile->dirname = (char*)malloc(strlen(daf->ifile[i]->dirname) + 1);
ifile->filename = (char*)malloc(strlen(name) + 1);
strcpy(ifile->dirname, daf->ifile[i]->dirname);
strcpy(ifile->filename, name);
is_newfile = 1;
}
// sprintf(fullname,"%s%c%s",ifile->dirname,DIR_SEP,ifile->filename); //
// ?!?!?!
#ifdef _WIN32
sprintf(fullname, "%s%c%s", base_directory, DIR_SEP, ifile->filename);
#else
sprintf(fullname, "%s%c%s", ifile->dirname, DIR_SEP, ifile->filename);
#endif
if ((daf->fpr = fopen(fullname, "rb")) == NULL) { /* skip this file */
free(ifile->dirname);
free(ifile->filename);
free(ifile);
if (!is_newfile)
daf->ifile[i] = NULL;
continue;
}
if (daf->encrypted)
ReadSalt(daf);
if (ReadData(header, 1, 8, daf) < 8) { /* skip this file */
fclose(daf->fpr);
free(ifile->dirname);
free(ifile->filename);
free(ifile);
if (!is_newfile)
daf->ifile[i] = NULL;
daf->fpr = NULL;
continue;
}
ifile->FileLengthSize = header[1];
ifile->FileOffsetSize = header[2];
ifile->FileCommandSize = header[3];
ifile->FileTypeIDSize = header[4];
ifile->bigendian = header[5];
ifile->fp_format = header[6];
lsda_createbasictypes(daf);
/*
Set conversion functions for length, offset, etc
*/
sprintf(tname, "I*%d", ifile->FileLengthSize);
type1 = daf->FindType(daf, tname);
sprintf(tname, "I*%d", (int)sizeof(Length));
type2 = daf->FindType(daf, tname);
ifile->ConvertLength = GetConversionFunction(ifile, type1, type2);
sprintf(tname, "I*%d", ifile->FileOffsetSize);
type1 = daf->FindType(daf, tname);
sprintf(tname, "I*%d", (int)sizeof(Offset));
type2 = daf->FindType(daf, tname);
ifile->ConvertOffset = GetConversionFunction(ifile, type1, type2);
sprintf(tname, "I*%d", ifile->FileCommandSize);
type1 = daf->FindType(daf, tname);
sprintf(tname, "I*%d", (int)sizeof(Command));
type2 = daf->FindType(daf, tname);
ifile->ConvertCommand = GetConversionFunction(ifile, type1, type2);
sprintf(tname, "I*%d", ifile->FileTypeIDSize);
type1 = daf->FindType(daf, tname);
sprintf(tname, "I*%d", (int)sizeof(TypeID));
type2 = daf->FindType(daf, tname);
ifile->ConvertTypeID = GetConversionFunction(ifile, type1, type2);
/*
Read in symbol table
Should put reconstruction code in here eventually...
*/
if (daf->encrypted) {
fseek(daf->fpr, 16 + header[0], SEEK_SET); /* skip initial salt */
ReadSalt(daf);
} else {
fseek(daf->fpr, header[0], SEEK_SET);
}
daf->ifr = ifile; /* so ReadTrans routines will work */
ReadLength(daf);
cmd = ReadCommand(daf);
if (_errno == ERR_READ ||
(cmd != LSDA_SYMBOLTABLEOFFSET &&
cmd != LSDA_ENDSYMBOLTABLE)) { /* skip this file for now */
_errno = ERR_NONE; /* reset read error */
if (report_level > 0) {
fprintf(stderr, "Error reading symbol table in file %s\n", name);
fprintf(stderr, " Skipping this file\n");
}
free(ifile->dirname);
free(ifile->filename);
free(ifile);
if (!is_newfile)
daf->ifile[i] = NULL;
fclose(daf->fpr);
daf->fpr = NULL;
continue;
}
daf->ifr->stoffset = ftell(daf->fpr);
if (lsda_readsymboltable(daf) == 1) { /* OK, keep this one */
if (is_newfile) {
daf->ifile =
(IFile**)realloc(daf->ifile, (daf->num_list + 1) * sizeof(IFile*));
daf->ifile[daf->num_list++] = ifile;
}
retval = 1;
if (!keepst)
PruneSymbols(daf, daf->top);
} else {
free(ifile->dirname);
free(ifile->filename);
free(ifile);
if (!is_newfile)
daf->ifile[i] = NULL;
}
fclose(daf->fpr);
daf->fpr = NULL;
}
closedir(dp);
}
daf->ifw = NULL;
daf->fpw = NULL;
daf->ifr = NULL;
daf->fpr = NULL;
daf->stpendlen = 0;
daf->cwd = daf->top;
/*
* In case we had problems opening one or more of the files, reduce the ifile
* list here
*/
for (i = j = 0; i < daf->num_list; i++) {
if (daf->ifile[i] != NULL)
daf->ifile[j++] = daf->ifile[i];
}
daf->num_list = j;
if (retval < 0)
_errno = ERR_OPENFILE;
return retval;
}
STATIC int
rw_initialize(LSDAFile* daf)
{
/*
Routine to do the "write" part of the initialization of a READWRITE
open call. read_initialize has already been called.
steps: put highest numbered file last in ifile list. Check to see
if it is compatible. If so, use it, if not, open another one.
*/
IFile* ifile;
int i, largest, index, val;
char* cp;
/*
Find the file with the largest extension number
*/
largest = -1;
index = -1;
for (i = 0; i < daf->num_list; i++) {
cp = strrchr(daf->ifile[i]->filename, '%'); /* find % */
if (!cp) {
val = 0;
} else {
val = atoi(cp + 1); /* convert following to number */
for (cp++; *cp; cp++)
if (!isdigit(*cp))
val = 0; /* but only if all following are digits */
}
if (val > largest) {
largest = val;
index = i;
}
}
/*
If it is not last in the list, make it last in the list
*/
if (index < daf->num_list - 1) {
ifile = daf->ifile[index];
daf->ifile[index] = daf->ifile[daf->num_list - 1];
daf->ifile[daf->num_list - 1] = ifile;
} else {
ifile = daf->ifile[daf->num_list - 1];
}
/*
Check to see if this file is compatible with the way I want to write
it
*/
daf->ifw = ifile;
daf->ifw->ateof = 0;
daf->npend = 0;
daf->continued = 0;
daf->stpendlen = 0;
if (ifile->FileLengthSize == sizeof(Length) &&
ifile->FileOffsetSize == sizeof(Offset) &&
ifile->FileCommandSize == sizeof(Command) &&
ifile->FileTypeIDSize == sizeof(TypeID) &&
ifile->bigendian == little_endian &&
ifile->fp_format == FP_FORMAT) { /* file is compatible, use it */
daf->fpw = fopen(fullfilename(ifile), "r+b");
if (daf->fpw == NULL) {
daf->ifw = NULL;
return -1;
}
return 0;
}
return lsda_nextfile(daf - da_store);
}
STATIC int
closeout_var(LSDAFile* daf)
{
Length len = ftell(daf->fpw) - daf->var->offset;
Command cmd;
TypeID tid;
char nlen;
/*
If encryption, have to rewrite the whole header, not just the
length
*/
daf->ifw->ateof = 0;
if (fseek(daf->fpw, daf->var->offset, SEEK_SET) < 0) {
_errno = ERR_FSEEK;
if (report_level > 0) {
fprintf(stderr,
"closeout_var: seek error on file %s\n",
fullfilename(daf->ifw));
}
return -1;
}
if (daf->encrypted) {
cmd = LSDA_DATA;
nlen = strlen(daf->var->name);
len -= 16; /* don't include length of first salt in record length */
tid = LSDAId(daf->var->type);
if (WriteSalt(daf) || /* write new salt value */
WriteData((octet*)&len, sizeof(Length), 1, daf, 1) != 1 ||
WriteData((octet*)&cmd, sizeof(Command), 1, daf, 1) != 1 ||
WriteData((octet*)&tid, sizeof(TypeID), 1, daf, 1) != 1 ||
WriteData((octet*)&nlen, 1, 1, daf, 1) != 1 ||
WriteData((octet*)daf->var->name, nlen, 1, daf, 1) != 1) {
_errno = ERR_WRITE;
if (report_level > 0) {
fprintf(stderr,
"closeout_var: write error on file %s\n",
fullfilename(daf->ifw));
}
return -1;
}
len -= 16; /* don't include length of second salt in variable length */
} else {
if (WriteData((octet*)&len, sizeof(Length), 1, daf, 1) < 1) {
_errno = ERR_WRITE;
if (report_level > 0) {
fprintf(stderr,
"closeout_var: write error on file %s\n",
fullfilename(daf->ifw));
}
return -1;
}
}
daf->continued = 0;
daf->var->length = (len - sizeof(Length) - sizeof(Command) - sizeof(TypeID) -
strlen(daf->var->name) - 1) /
LSDASizeOf(daf->var->type);
return 1;
}
STATIC int
SwitchFamilyMember(LSDAFile* daf, LSDATable* var)
{
/* This is only ever called while reading */
if (daf->fpr && (daf->fpw != daf->fpr))
fclose(daf->fpr);
daf->ifr = var->ifile;
if (daf->ifr == daf->ifw) {
daf->fpr = daf->fpw;
} else {
/*
Opening file read/write: if the user calls lsda_rewrite, we could
need to write to a pre-existing file.
*/
if ((daf->fpr = fopen(fullfilename(daf->ifr), "r+b")) == NULL) {
_errno = ERR_OPENFILE;
if (report_level > 0)
fprintf(stderr,
"lsda_SwitchFamilyMember: error opening %s",
fullfilename(daf->ifr));
return -1;
}
}
return 1;
}
Length
lsda_write(int handle, int type_id, char* name, Length length, void* data)
{
LSDAFile* daf;
int tsize;
TypeID tid;
Length rlen;
Command cmd = LSDA_DATA;
char nlen;
LSDATable *var, *pvar;
LSDAType* type;
char prevpath[MAXPATH], cwd[MAXPATH];
int j, retval;
char lname[256], ldir[256];
if (handle < 0 || handle >= num_daf) {
_errno = ERR_NOFILE;
if (report_level > 0)
fprintf(stderr, "lsda_write: invalid handle %d", handle);
return -1;
}
daf = da_store + handle;
if (name[0] == 0) { /* continue writing previous variable */
if (!daf->var) {
_errno = ERR_NOCONT;
if (report_level > 0)
fprintf(
stderr,
"Empty variable name used while not currently writing a variable\n");
return -1;
}
daf->continued = 1;
tsize = LSDASizeOf(daf->var->type);
retval = WriteData((octet*)data, tsize, length, daf, 0);
if (retval < length)
_errno = ERR_WRITE;
daf->var->length += retval;
return retval;
}
cwd[0] = 0;
/*
Writing new variable. If were not finished with the old one,
close it out.
*/
if (daf->npend)
WriteData(NULL, 1, 0, daf, 1);
if (daf->continued) {
if (closeout_var(daf) < 0)
return -1;
}
if (!daf->ifw->ateof) {
fseek(daf->fpw, 0, SEEK_END);
daf->ifw->ateof = 1;
}
/*
If we are getting too big, wrap to a new file
*/
if (ftell(daf->fpw) + daf->stpendlen > daf->maxsize)
lsda_nextfile(handle);
/*
Check for directory portion in variable name
*/
nlen = strlen(name);
for (j = nlen - 1; j > 0; j--)
if (name[j] == '/') {
strcpy(ldir, name);
ldir[j] = (char)0;
strcpy(lname, name + j + 1);
break;
}
if (j == 0) {
strcpy(lname, name);
} else {
strcpy(cwd, daf->GetCWD(daf));
lsda_cd(handle, ldir);
}
/*
Update CWD in file if necessary
*/
if (daf->pathchanged) {
strcpy(prevpath, daf->lastpath);
strcpy(daf->lastpath, daf->GetCWD(daf));
if (lsda_writecd(handle, findpath(prevpath, daf->lastpath)) < 0) {
if (report_level > 0)
fprintf(stderr, "lsda_write: updating CWD\n");
if (cwd[0])
lsda_cd(handle, cwd);
return -1;
}
}
if ((type = daf->FindTypeByID(daf, type_id)) == NULL) {
_errno = ERR_DATATYPE;
if (report_level > 0)
fprintf(stderr, "lsda_write: unrecognized data type %d\n", type_id);
if (cwd[0])
lsda_cd(handle, cwd);
return -1;
}
var = daf->CreateVar(daf, type, lname);
var->offset = (Offset)ftell(daf->fpw);
var->length = (Length)length;
var->ifile = daf->ifw;
/* mark variable and all its parents dirty, so they will get checked
when it is time to dump this into the symbol table */
for (pvar = var; pvar; pvar = pvar->parent)
pvar->dirty = 1;
nlen = (char)strlen(var->name);
daf->stpendlen += sizeof(Length) + sizeof(Command) + sizeof(TypeID) + nlen +
sizeof(Offset) + sizeof(Length);
daf->var = var;
tsize = LSDASizeOf(type);
tid = LSDAId(type);
rlen = sizeof(Length) + sizeof(Command) + sizeof(TypeID) + nlen + 1 +
length * tsize;
if (daf->encrypted) {
rlen += 16; /* increase record length to account for salt before data */
if (daf->encrypted && WriteSalt(daf))
goto write_error;
}
if (WriteData((octet*)&rlen, sizeof(Length), 1, daf, 1) < 1)
goto write_error;
if (WriteData((octet*)&cmd, sizeof(Command), 1, daf, 1) < 1)
goto write_error;
if (WriteData((octet*)&tid, sizeof(TypeID), 1, daf, 1) < 1)
goto write_error;
/*
The variable name is stored as a 1 char length and then a non-terminated
string
*/
if (WriteData((octet*)&nlen, 1, 1, daf, 1) < 1)
goto write_error;
if (WriteData((octet*)lname, 1, (int)nlen, daf, 1) < nlen)
goto write_error;
if (daf->encrypted && WriteSalt(daf))
goto write_error;
retval = WriteData((octet*)data, tsize, length, daf, 0);
if (retval < length)
_errno = ERR_WRITE;
if (cwd[0])
lsda_cd(handle, cwd);
return retval;
write_error:
_errno = ERR_WRITE;
if (report_level > 0) {
fprintf(
stderr, "lsda_write: write error on file %s\n", fullfilename(daf->ifw));
}
if (cwd[0])
lsda_cd(handle, cwd);
return -1;
}
Length
lsda_rewrite(int handle,
int type_id,
char* name,
Length offset,
Length number,
void* data)
/*
Rewrite part of an existing variable. For now type_id is ignored -- it is
assumed the user knows what they are doing and is writing exactly the same
type of data as was in the file originally. Someday we should allow a
different type input, and convert to the proper output type on the fly.
*/
{
LSDAFile* daf;
LSDATable* var;
Offset foffset;
int j, retval;
int tsizedisk;
if (handle < 0 || handle >= num_daf) {
_errno = ERR_NOFILE;
if (report_level > 0)
fprintf(stderr, "lsda_rewrite: invalid handle %d", handle);
return -1;
}
daf = da_store + handle;
if (daf->openmode != LSDA_READWRITE && daf->openmode != LSDA_WRITEREAD) {
_errno = ERR_WRITE;
if (report_level > 0)
fprintf(
stderr,
"lsda_rewrite: File %s\n is must be opened READWRITE or WRITEREAD\n",
daf->ifw ? daf->ifw->filename : NULL);
return -1;
}
/*
Find the existing variable they want to rewrite.
*/
var = daf->FindVar(daf, name, 0, 1);
if (var == NULL) {
_errno = ERR_NOVAR;
if (report_level > 0)
fprintf(
stderr,
"lsda_rewrite: variable %s not found while writing file %s\n CWD=%s\n",
name,
daf->ifw ? daf->ifw->filename : NULL,
daf->GetCWD(daf));
return -1;
}
if (offset >= var->length)
return 0;
if (offset + number > var->length)
number = var->length - offset;
tsizedisk = LSDASizeOf(var->type);
if (daf->encrypted && (16 % tsizedisk)) {
/*
This cannot happen at the moment -- no data types are this large...
*/
fprintf(stderr,
"Error: rewriting varaible %s/%s in LSDA file %s:",
daf->GetCWD(daf),
name,
daf->ifr->filename);
fprintf(stderr,
" File is encrypted and variable size does not divide 16\n");
_errno = ERR_READ;
return 0;
}
/*
Writing a variable. If were not finished with the last one,
close it out.
*/
if (daf->npend)
WriteData(NULL, 1, 0, daf, 1);
if (daf->continued) {
if (closeout_var(daf) < 0)
return -1;
}
if (((daf->fpr == NULL) || (var->ifile != daf->ifr)) &&
SwitchFamilyMember(daf, var) < 0) {
if (report_level > 0)
fprintf(stderr, "lsda_rewrite: error\n");
return -1;
}
daf->ifr->ateof = 0;
if (daf->encrypted) { /* have encryption */
char salt[32], salt2[16];
char buf[16], tmpblk[16];
int i, enddata;
octet* dp = (octet*)data;
octet* dextra = 0;
int spill1, spill2, gap1, gap2, nwhole;
Offset voffset, start, finish;
retval = number; /* unless we hit an error... */
/*
Offset to start of actual data for this variable. Include
the 16 byte salt before and after the header
*/
voffset = 16 + var->offset + var->ifile->FileLengthSize +
var->ifile->FileCommandSize + var->ifile->FileTypeIDSize +
strlen(var->name) + 1 + 16;
/*
Determine # extra bytes at the beginning and end to get us to
16 byte boundaries, plus # of whole blocks between
*/
start = offset * tsizedisk;
finish = start + number * tsizedisk;
gap1 = start % 16; /* bytes between block boundary and start of data */
spill1 =
(16 - gap1) % 16; /* bytes between start of data and next boundary */
spill2 = finish % 16; /* bytes between block boundary and end of data */
gap2 = (16 - spill2) % 16; /* bytes between end of data and next boundar */
nwhole = ((finish - spill2) - (start + spill1)) /
16; /* # whole blocks our data spans */
enddata = (var->length - offset - number) *
tsizedisk; /* # bytes left to deal with at the end */
/*
If nwhole<1, check to see if we begin and end in a single block
Simplify by reading in and decrypting this block, modifying
it, and then pretending that that is what was passed in.
Also, read and keep the existing file data just before the
end of what we will be writing, so we can decrypt all following
data and re-encrypt it afterward.
*/
if (nwhole == -1 || (nwhole == 0 && (gap1 == 0 || gap2 == 0))) {
foffset = voffset + start - gap1 - 16;
fseek(daf->fpr, foffset, SEEK_SET);
fread(salt, 1, 32, daf->fpr);
aes_enc_blk(salt, buf, daf->ctx);
for (i = 0; i < 16; i++) {
salt2[i] = salt[i + 16]; /* save for dealing with dextra below */
tmpblk[i] = salt[i + 16] ^ buf[i]; /* get decrypted data from file */
}
for (i = gap1; i < (16 - gap2);
i++) /* fill in values user wants to change */
tmpblk[i] = *dp++;
start -= gap1; /* fake data so this tmpblk gets written out */
gap1 = spill1 = 0;
finish += gap2;
if (finish >
var->length * tsizedisk) { /* var ends before next boundary */
finish = var->length * tsizedisk;
spill2 = finish % 16;
gap2 = (16 - spill2) % 16;
enddata = 0;
nwhole = 0;
} else {
enddata -= gap2;
gap2 = spill2 = 0;
nwhole = 1;
}
dp = tmpblk;
} else if (enddata) { /* read in salt for decrypting dextra below */
foffset = voffset + finish - spill2 - 16;
fseek(daf->fpr, foffset, SEEK_SET);
fread(salt2, 1, 16, daf->fpr);
}
/*
Read salt and deal with any extra bytes at the beginning
to get things going
*/
if (gap1 == 0) {
foffset = voffset + start - 16;
fseek(daf->fpr, foffset, SEEK_SET);
fread(salt, 1, 16, daf->fpr);
fseek(
daf->fpr, foffset + 16, SEEK_SET); /* switching from read to write */
} else {
foffset = voffset + start - 16 - gap1;
fseek(daf->fpr, foffset, SEEK_SET);
fread(salt, 1, 16 + gap1, daf->fpr);
aes_enc_blk(salt, buf, daf->ctx);
for (i = 0; i < gap1; i++)
salt[i] = salt[16 + i];
for (; i < 16; i++)
salt[i] = (*dp++) ^ buf[i];
foffset = voffset + start;
fseek(daf->fpr, foffset, SEEK_SET);
fwrite(salt + gap1, 1, spill1, daf->fpr);
}
for (j = 0; j < nwhole; j++) { /* deal with any whole blocks */
aes_enc_blk(salt, buf, daf->ctx);
for (i = 0; i < 16; i++)
salt[i] = (*dp++) ^ buf[i];
fwrite(salt, 1, 16, daf->fpr);
}
if (enddata) { /* there is data to be re-encrypted */
int n = (enddata + spill2) / 16 + 1;
/*
Don't want to process this in a rolling fashion because that would
require a lot of fseek calls -- much better to just read in all
the rest of this data.
Don't worry if the variable does not end on a 16 byte boundary --
and garbage at the end (including read failure due to EOF) will
end up not actually being used.
*/
dextra = (octet*)malloc(16 * n);
foffset = voffset + finish - spill2;
fseek(daf->fpr, foffset, SEEK_SET);
fread(dextra, 1, 16 * n, daf->fpr);
for (j = 0; j < n; j++) { /* read data and decrypt it */
aes_enc_blk(salt2, buf, daf->ctx);
for (i = 0; i < 16; i++) {
salt2[i] = dextra[16 * j + i];
dextra[16 * j + i] = salt2[i] ^ buf[i];
}
}
for (j = 0; j < spill2;
j++) /* overwrite head of decrypted buffer with new data */
dextra[j] = *dp++;
dp = dextra;
enddata += spill2;
spill2 = enddata % 16;
nwhole = enddata / 16;
fseek(daf->fpr, foffset, SEEK_SET);
} else {
enddata = spill2;
nwhole = 0;
}
for (j = 0; j < nwhole; j++) { /* encrypt and write out full blocks */
aes_enc_blk(salt, buf, daf->ctx);
for (i = 0; i < 16; i++)
salt[i] = (*dp++) ^ buf[i];
fwrite(salt, 1, 16, daf->fpr);
}
if (spill2 > 0) { /* deal with the end bit if there is one */
aes_enc_blk(salt, buf, daf->ctx);
for (i = 0; i < spill2; i++)
salt[i] = (*dp++) ^ buf[i];
fwrite(salt, 1, spill2, daf->fpr);
}
if (dextra)
free(dextra); /* free buffer if we used it */
} else {
/*
If not using encryption, things are easy: just find the correct place
in the file and write the new data. Write to the ifr file because
that is the one SwitchFamilyMember sets: we are not normally expected to
ever WRITE to any file other than the highest numbered one.
*/
foffset = var->offset + var->ifile->FileLengthSize +
var->ifile->FileCommandSize + var->ifile->FileTypeIDSize +
strlen(var->name) + 1 + offset * tsizedisk;
fseek(daf->fpr, foffset, SEEK_SET);
retval = fwrite((octet*)data, tsizedisk, number, daf->fpr);
if (retval < number)
_errno = ERR_WRITE;
}
return retval;
}
int
lsda_cd(int handle, char* path)
{
LSDAFile* daf;
int flag = 1;
if (handle < 0 || handle >= num_daf) {
_errno = ERR_NOFILE;
if (report_level > 0)
fprintf(stderr, "lsda_cd: invalid handle %d", handle);
return -1;
}
daf = da_store + handle;
if (daf->openmode == LSDA_READONLY)
flag = 0;
if (daf->ChangeDir(daf, path, flag) == NULL) {
_errno = ERR_CD;
if (report_level > 0) {
if (daf->num_list > 0)
fprintf(stderr,
"lsda_cd: Cannot cd to %s in file %s. Most likely a component "
"of\nthe path is not a directory\n",
path,
daf->ifile[0]->filename);
else
fprintf(stderr, "lsda_cd: Cannot cd to %s.\n", path);
}
return -1;
}
daf->pathchanged = 1;
return 1;
}
STATIC int
lsda_writecd(int handle, char* path)
{
LSDAFile* daf = da_store + handle;
Length rlen;
int len;
Command cmd = LSDA_CD;
if (path == NULL) {
daf->pathchanged = 0;
return 1;
}
/*
If were not finished with the previous variable close it out.
*/
if (daf->npend)
WriteData(NULL, 1, 0, daf, 1);
if (daf->continued) {
if (closeout_var(daf) < 0) {
if (report_level > 0)
fprintf(stderr, "lsda_writecd: error closing out variable\n");
return -1;
}
}
if (!daf->ifw->ateof) {
fseek(daf->fpw, 0, SEEK_END);
daf->ifw->ateof = 1;
}
len = strlen(path);
rlen = sizeof(Length) + sizeof(Command) + len;
if (daf->encrypted && WriteSalt(daf))
goto write_error;
if (WriteData((octet*)&rlen, sizeof(Length), 1, daf, 1) < 1)
goto write_error;
if (WriteData((octet*)&cmd, sizeof(Command), 1, daf, 1) < 1)
goto write_error;
if (WriteData((octet*)path, 1, len, daf, 1) < len)
goto write_error;
daf->pathchanged = 0;
daf->stpendlen += rlen;
return 1;
write_error:
_errno = ERR_WRITE;
if (report_level > 0) {
fprintf(
stderr, "lsda_writecd: write error on file %s\n", fullfilename(daf->ifw));
}
return -1;
}
Length
lsda_fsize(int handle)
{
LSDAFile* daf;
FILE* fp;
if (handle < 0 || handle >= num_daf) {
_errno = ERR_NOFILE;
if (report_level > 0)
fprintf(stderr, "lsda_fsize: invalid handle %d", handle);
return 0;
}
_errno = ERR_NONE;
daf = da_store + handle;
fp = daf->fpw;
if (fp == NULL)
fp = daf->fpr;
if (!fp)
return 0; /* no file currently opened */
fseek(fp, 0, SEEK_END);
return ((Length)ftell(fp) + daf->stpendlen);
}
int
lsda_filenum(int handle)
/*
This is only used to find the "end of file" so to speak -- the last
file in the series that we are writing to. It doesn't really make
sense for files that have been opened READONLY
*/
{
LSDAFile* daf;
int ret;
char* cp;
if (handle < 0 || handle >= num_daf) {
_errno = ERR_NOFILE;
if (report_level > 0)
fprintf(stderr, "lsda_filenum: invalid handle %d", handle);
return -1;
}
daf = da_store + handle;
if (daf->openmode == LSDA_READONLY)
return 0;
if (daf->ifw == NULL)
return 0;
cp = strrchr(daf->ifw->filename, '%'); /* find % */
if (!cp)
return 0;
ret = atoi(cp + 1); /* convert following to number */
for (cp++; *cp; cp++)
if (!isdigit(*cp))
ret = 0; /* but only if all following are digits */
return (ret);
}
int
lsda_nextfile(int handle)
/*
This also is only for writing -- to open the next file for writing.
*/
{
LSDAFile* daf;
IFile* ifile = NULL;
int cur;
char *cp, pwd[MAXPATH];
if (handle < 0 || handle >= num_daf) {
_errno = ERR_NOFILE;
if (report_level > 0)
fprintf(stderr, "lsda_nextfile: invalid handle %d", handle);
return -1;
}
daf = da_store + handle;
if (daf->openmode == LSDA_READONLY)
return 0;
if (daf->npend)
WriteData(NULL, 1, 0, daf, 1);
if (daf->continued) {
if (closeout_var(daf) < 0)
goto cleanup;
}
if (daf->stpendlen && lsda_writesymboltable(daf) < 0)
goto cleanup;
if (daf->fpw && (daf->fpw != daf->fpr))
fclose(daf->fpw);
daf->fpw = NULL;
strcpy(pwd, daf->GetCWD(daf));
daf->cwd = daf->top;
cp = strrchr(daf->ifw->filename, '%'); /* find % */
if (!cp) {
cur = 0;
} else {
cur = atoi(cp + 1); /* convert following to number */
for (cp++; *cp; cp++)
if (!isdigit(*cp))
cur = 0; /* but only if all following are digits */
}
/* Get new ifile to store stuff in */
ifile = newIFile();
ifile->dirname = (char*)malloc(strlen(daf->ifw->dirname) + 1);
strcpy(ifile->dirname, daf->ifw->dirname);
ifile->filename = (char*)malloc(strlen(daf->ifw->filename) + 5);
strcpy(ifile->filename, daf->ifw->filename);
/* Build new file name. */
if (cur == 0) {
strcat(ifile->filename, "%001");
} else {
cp = strrchr(ifile->filename, '%');
sprintf(cp + 1, "%3.3d", cur + 1);
}
/*
Store new Ifile in list
*/
daf->ifile =
(IFile**)realloc(daf->ifile, (daf->num_list + 1) * sizeof(IFile*));
daf->ifile[daf->num_list++] = ifile;
daf->ifw = ifile;
if ((daf->fpw = fopen(fullfilename(ifile), "w+b")) != NULL) {
if (write_initialize(daf, NULL) < 0)
goto cleanup;
} else {
_errno = ERR_OPENFILE;
if (report_level > 0)
fprintf(
stderr, "lsda_nextfile: error opening file %s", fullfilename(daf->ifw));
goto cleanup;
}
/*
Go back to the same directory we were in
*/
lsda_cd(handle, pwd);
return cur + 1;
cleanup:
if (report_level > 0)
fprintf(stderr,
"lsda_nextfile: error processing file %s\n",
fullfilename(daf->ifw));
if (daf->fpw && (daf->fpw != daf->fpr))
fclose(daf->fpw);
daf->fpw = NULL;
if (daf->ifw) {
if (daf->ifw->filename)
free(daf->ifw->filename);
if (daf->ifw->dirname)
free(daf->ifw->dirname);
free(daf->ifw);
}
daf->ifw = NULL;
if (ifile)
daf->ifile[--daf->num_list] = NULL;
return -1;
}
int
lsda_setmaxsize(int handle, Offset size)
/*
Sets the handle's idea of the maximum allowable file size
*/
{
LSDAFile* daf;
Offset oldmax;
if (handle < 0 || handle >= num_daf) {
_errno = ERR_NOFILE;
if (report_level > 0)
fprintf(stderr, "lsda_setmaxsize: invalid handle %d", handle);
return -1;
}
daf = da_store + handle;
oldmax = daf->maxsize;
if (size > DEF_MAX_SIZE)
daf->maxsize = DEF_MAX_SIZE;
else
daf->maxsize = size;
if (daf->fpw && daf->maxsize < oldmax) {
/*
If we are getting too big, wrap to a new file
*/
if (!daf->ifw->ateof) {
fseek(daf->fpw, 0, SEEK_END);
daf->ifw->ateof = 1;
}
if (ftell(daf->fpw) + daf->stpendlen > daf->maxsize)
lsda_nextfile(handle);
}
return 1;
}
int
lsda_close(int handle)
{
LSDAFile* daf = da_store + handle;
if (handle < 0 || handle >= num_daf)
goto cleanup;
if (daf->npend)
WriteData(NULL, 1, 0, daf, 1);
if (daf->continued && closeout_var(daf) < 0)
goto cleanup;
if (daf->stpendlen && lsda_writesymboltable(daf) < 0)
goto cleanup;
if (daf->fpw == daf->fpr) {
if (daf->fpw)
fclose(daf->fpw);
} else {
if (daf->fpr)
fclose(daf->fpr);
if (daf->fpw)
fclose(daf->fpw);
}
daf->ifr = NULL;
daf->fpr = NULL;
daf->ifw = NULL;
daf->fpw = NULL;
daf->FreeTable(daf, daf->top);
daf->FreeTypes(daf);
if (daf->num_list) {
int i;
for (i = 0; i < daf->num_list; i++) {
if (daf->ifile[i]) {
if (daf->ifile[i]->dirname)
free(daf->ifile[i]->dirname);
if (daf->ifile[i]->filename)
free(daf->ifile[i]->filename);
free(daf->ifile[i]);
}
}
free(daf->ifile);
}
daf->free = 1;
return 1;
cleanup:
if (report_level > 0) {
if (daf->ifr)
fprintf(
stderr, "lsda_close: error closing file %s\n", fullfilename(daf->ifr));
else if (daf->ifw)
fprintf(
stderr, "lsda_close: error closing file %s\n", fullfilename(daf->ifw));
}
_errno = ERR_CLOSE;
return -1;
}
int
lsda_flush(int handle)
{
LSDAFile* daf = da_store + handle;
if (handle < 0 || handle >= num_daf) {
_errno = ERR_NOFILE;
if (report_level > 0)
fprintf(stderr, "lsda_flush: invalid handle %d", handle);
return -1;
}
if (daf->openmode == LSDA_READONLY)
return LSDA_SUCCESS;
if (daf->npend)
WriteData(NULL, 1, 0, daf, 1);
if (daf->continued) {
if (closeout_var(daf) < 0)
goto cleanup;
}
if (daf->stpendlen && lsda_writesymboltable(daf) < 0)
goto cleanup;
if (daf->fpw) {
fflush(daf->fpw);
/*
If we are getting too big, wrap to a new file
*/
if (!daf->ifw->ateof) {
fseek(daf->fpw, 0, SEEK_END);
daf->ifw->ateof = 1;
}
if (ftell(daf->fpw) + daf->stpendlen > daf->maxsize)
lsda_nextfile(handle);
}
return LSDA_SUCCESS;
cleanup:
if (report_level > 0) {
fprintf(
stderr, "lsda_flush: error processing file %s\n", fullfilename(daf->ifw));
}
return -1;
}
int
lsda_sync(int handle)
{
LSDAFile* daf = da_store + handle;
if (handle < 0 || handle >= num_daf) {
_errno = ERR_NOFILE;
if (report_level > 0)
fprintf(stderr, "lsda_sync: invalid handle %d", handle);
return -1;
}
if (daf->openmode != LSDA_READONLY && daf->fpw)
fflush(fileno(daf->fpw)); // fsync(fileno(daf->fpw));
return LSDA_SUCCESS;
}
LSDATable*
LSDAresolve_link(LSDAFile* daf, LSDATable* varin)
{
Offset foffset;
char data[1024];
LSDATable* var = varin;
int k;
char *pname, *newpname;
pname = strdup(daf->GetPath(daf, var)); /* absolute path to var */
/*
If file is write/read mode, flush stuff out to the file before we do a seek.
*/
if (daf->npend)
WriteData(NULL, 1, 0, daf, 1);
if (daf->continued) {
if (closeout_var(daf) < 0)
return NULL;
}
/*
Loop reading link info and following it.
*/
while (1) {
if (!var->type || var->type->id != LSDA_LINK)
return var;
if (((daf->fpr == NULL) || (var->ifile != daf->ifr)) &&
SwitchFamilyMember(daf, var) < 0) {
if (report_level > 0)
fprintf(stderr, "LSDAresolve_link: error\n");
free(pname);
return NULL;
}
daf->ifr->ateof = 0;
foffset = var->offset + var->ifile->FileLengthSize +
var->ifile->FileCommandSize + var->ifile->FileTypeIDSize +
strlen(var->name) + 1;
/*
If we have encryption, we have to read the salt from the 16
bytes before the start of the data.
*/
if (daf->encrypted)
foffset += 16;
fseek(daf->fpr, foffset, SEEK_SET);
if (daf->encrypted)
ReadSalt(daf);
k = ReadData((unsigned char*)data, 1, var->length, daf);
if (k != var->length) {
if (report_level > 0)
fprintf(
stderr,
"LSDAresolve_link: error reading variable %s while reading file %s\n",
pname,
daf->ifr ? daf->ifr->filename : NULL);
free(pname);
return NULL;
}
/* need some kind of infinite loop protection. Simplest would be
to compare # times through here with the total size of the ST */
data[var->length] = 0; /* file data is not NULL terminated */
newpname = link_path(pname, data);
free(pname);
pname = newpname;
/* don't let FindVar follow links, since that would call this routine and
could lead to infinite recursion.
But DO let it create directories, since we might be referencing dirs
that were flushed to disk and no longer in the ST */
var = daf->FindVar(daf, pname, 1, 0);
if (var == NULL) {
_errno = ERR_NOVAR;
if (report_level > 0)
fprintf(
stderr,
"LSDAresolve_link: variable %s not found while reading file %s\n",
pname,
daf->ifr ? daf->ifr->filename : NULL);
free(pname);
return NULL;
}
}
}
static Length
lsda_realread(int handle,
int type_id,
char* name,
Length offset,
Length number,
void* data,
int follow)
{
LSDAFile* daf;
LSDAType* type;
_CF Convert;
int tsize;
Offset foffset;
Length ret;
char buf[BUFSIZE], *cp;
LSDATable* var;
int tsizedisk;
int k, kk, perbuf;
Offset doff;
if (handle < 0 || handle >= num_daf) {
_errno = ERR_NOFILE;
if (report_level > 0)
fprintf(stderr, "lsda_read: invalid handle %d", handle);
return -1;
}
if (number == 0)
return 0;
daf = da_store + handle;
type = daf->FindTypeByID(daf, type_id);
if (type == NULL) {
_errno = ERR_DATATYPE;
if (report_level > 0) {
fprintf(stderr, "lsda_read: unrecognized data type %d", type_id);
fprintf(stderr,
" while reading file %s\n",
daf->ifr ? daf->ifr->filename : NULL);
}
return -1;
}
/*
If file is write/read mode, flush stuff out to the file before we do a seek.
*/
if (daf->npend)
WriteData(NULL, 1, 0, daf, 1);
if (daf->continued) {
if (closeout_var(daf) < 0)
return -1;
}
/*
get var info from symbol table: size of each item, starting offset
*/
var = daf->FindVar(daf, name, 0, follow);
if (var == NULL) {
_errno = ERR_NOVAR;
if (report_level > 0)
fprintf(
stderr,
"lsda_read: variable %s not found while reading file %s\n CWD=%s\n",
name,
daf->ifr ? daf->ifr->filename : NULL,
daf->GetCWD(daf));
return -1;
}
if (offset >= var->length)
return 0;
if (offset + number > var->length)
number = var->length - offset;
tsize = LSDASizeOf(type);
tsizedisk = LSDASizeOf(var->type);
if (daf->encrypted && (16 % tsizedisk)) {
/*
This cannot happen at the moment -- no data types are this large...
*/
fprintf(stderr,
"Error: reading varaible %s/%s from LSDA file %s:",
daf->GetCWD(daf),
name,
daf->ifr->filename);
fprintf(stderr,
" File is encrypted and variable size does not divide 16\n");
_errno = ERR_READ;
return 0;
}
Convert = GetConversionFunction(var->ifile, var->type, type);
if (((daf->fpr == NULL) || (var->ifile != daf->ifr)) &&
SwitchFamilyMember(daf, var) < 0) {
if (report_level > 0)
fprintf(stderr, "lsda_read: error\n");
return -1;
}
daf->ifr->ateof = 0;
cp = buf;
/*
If we have encryption, we can't just jump into the middle of
the variable -- we have to read on 16 byte boundaries, and read
the 16 bytes before the start of the data we want.
*/
if (daf->encrypted) {
int firstpad; /* # bytes to throw out from first read */
int64_t numwhole; /* # whole reads to do */
int lastreadlen; /* # bytes to read for last block */
int lastuse; /* # bytes to use from last block */
Offset fstart, fend, otmp;
fstart = offset * tsizedisk; /* Where data starts */
fend = fstart + number * tsizedisk; /* Where data ends */
firstpad = fstart % 16; /* # extra bytes to read at start */
otmp =
((fend + 15) / 16) * 16; /* round end of data up to 16 byte boundary */
if (otmp > var->length * tsizedisk) { /* but not past end of record */
lastreadlen = 16 - (otmp - var->length * tsizedisk);
} else {
lastreadlen = 16;
}
lastuse = 16 - (otmp - fend);
if (lastuse == 16)
lastuse = 0;
numwhole = number * tsizedisk;
if (firstpad)
numwhole -= (16 - firstpad);
if (lastuse)
numwhole -= lastuse;
numwhole /= 16;
/*
Add length of other stuff written at file location "var->offset"
The 16 byte synchronization starts AFTER this stuff, so just
add this in without any 16 byte boundary considerations. Also
note that this offset calculation does NOT include the 16 byte
salt written at the head of the record, which is fine because
we would just have to subtract it off since we want to back up
16 bytes in the file anyway. But it does include the 16 byte salt
written just before the actual data.
*/
foffset = fstart - firstpad; /* Round down to 16 byte boundary */
foffset += var->ifile->FileLengthSize + var->ifile->FileCommandSize +
var->ifile->FileTypeIDSize + strlen(var->name) + 1 + 16;
fseek(daf->fpr, var->offset + foffset, SEEK_SET);
ReadSalt(daf);
/*
Read first block
*/
doff = 0;
if (firstpad) {
if (numwhole < 0) /* truncate data at both ends... */
kk = (ReadData((unsigned char*)cp, 1, lastreadlen, daf) - firstpad) /
tsizedisk;
else
kk = (ReadData((unsigned char*)cp, 1, 16, daf) - firstpad) / tsizedisk;
if (Convert) {
Convert(cp + firstpad, (char*)data, kk);
} else {
memcpy((char*)data, cp + firstpad, kk * tsize);
}
doff = kk;
}
if (Convert) {
int64_t i;
perbuf = BUFSIZE / 16; /* 16 byte blocks per buffer */
k = perbuf;
for (i = 0; i < numwhole; i += perbuf) {
if (i + k > numwhole)
k = numwhole - i;
kk = ReadData((unsigned char*)cp, 1, 16 * k, daf) / tsizedisk;
Convert(cp, ((char*)data + doff * tsize), kk);
doff += kk;
}
} else if (numwhole > 0) {
doff += ReadData(
(unsigned char*)data + doff * tsize, tsize, numwhole * 16 / tsize, daf);
}
if (lastuse && numwhole >= 0) {
kk = ReadData((unsigned char*)cp, 1, lastreadlen, daf);
if (kk > lastuse)
kk = lastuse;
kk /= tsizedisk;
if (Convert) {
Convert(cp, (char*)data + doff * tsize, kk);
} else {
memcpy((char*)data + doff * tsize, cp, kk * tsize);
}
doff += kk;
}
ret = doff;
} else {
foffset = var->offset + var->ifile->FileLengthSize +
var->ifile->FileCommandSize + var->ifile->FileTypeIDSize +
strlen(var->name) + 1 + offset * tsizedisk;
/*
Will this seek be a performance hit if we are in fact already at
the correct file location? I should hope not. If this turns out
to be the case, then maybe a call to ftell should be done first
in case the caller is reading through some data in chunks
*/
fseek(daf->fpr, foffset, SEEK_SET);
if (Convert) {
/*
Read, in chunks, as many items as we can that will fit into
our buffer, then convert them into the user's space
*/
perbuf = BUFSIZE / tsizedisk;
if (perbuf < 1) { /* Yoikes! Big data item! */
cp = (char*)malloc(tsizedisk);
if (!cp)
fprintf(stderr, "lsda_read: Malloc failed!\n");
exit(0);
perbuf = 1;
}
k = perbuf;
ret = 0;
for (doff = 0; doff < number; doff += perbuf) {
if (doff + k > number)
k = number - doff;
kk = fread(cp, tsizedisk, k, daf->fpr);
Convert(cp, ((char*)data) + doff * tsize, kk);
ret = ret + kk;
if (kk < k)
break;
}
if (cp != buf)
free(cp);
} else {
ret = ReadData((unsigned char*)data, tsize, number, daf);
}
}
if (ret < number) {
_errno = ERR_READ;
if (report_level > 0) {
fprintf(
stderr, "lsda_read: error reading file %s\n", fullfilename(daf->ifr));
}
}
return ret;
}
Length
lsda_lread(int handle,
int type_id,
char* name,
Length offset,
Length number,
void* data)
{
return lsda_realread(handle, type_id, name, offset, number, data, 0);
}
Length
lsda_read(int handle,
int type_id,
char* name,
Length offset,
Length number,
void* data)
{
return lsda_realread(handle, type_id, name, offset, number, data, 1);
}
STATIC int
lsda_writesymboltable(LSDAFile* daf)
{
Command cmd;
Length rlen;
Offset table_pos, cur_pos, offset_pos;
char path1[MAXPATH], path2[MAXPATH];
/*
If were not finished with the previous variable close it out.
*/
if (daf->npend)
WriteData(NULL, 1, 0, daf, 1);
if (daf->continued) {
if (closeout_var(daf) < 0)
goto cleanup1;
}
if (!daf->ifw->ateof) {
fseek(daf->fpw, 0, SEEK_END);
daf->ifw->ateof = 1;
}
table_pos = ftell(daf->fpw);
rlen = 0;
if (daf->encrypted && WriteSalt(daf))
goto cleanup;
if (WriteData((octet*)&rlen, sizeof(Length), 1, daf, 1) < 1)
goto cleanup;
cmd = LSDA_BEGINSYMBOLTABLE;
if (WriteData((octet*)&cmd, sizeof(Command), 1, daf, 1) < 1)
goto cleanup;
if (daf->encrypted && WriteSalt(daf))
goto cleanup; /* 1 salt for all symbols */
path1[0] = path2[0] = 0;
if (lsda_writesymbol(path1, path2, daf->top, daf) < 0)
goto cleanup1;
/*
Write end of symbol table record
*/
rlen = sizeof(Length) + sizeof(Command) + sizeof(Offset);
if (daf->encrypted && WriteSalt(daf))
goto cleanup;
if (WriteData((octet*)&rlen, sizeof(Length), 1, daf, 1) < 1)
goto cleanup;
cmd = LSDA_ENDSYMBOLTABLE;
if (WriteData((octet*)&cmd, sizeof(Command), 1, daf, 1) < 1)
goto cleanup;
offset_pos = ftell(daf->fpw);
cur_pos = 0; /* offset to next piece of table -- 0=> no next piece */
if (WriteData((octet*)&cur_pos, sizeof(Offset), 1, daf, 1) < 1)
goto cleanup;
/*
Update length of this symbol table block
*/
cur_pos = ftell(daf->fpw);
rlen = cur_pos - table_pos;
daf->ifw->ateof = 0;
fseek(daf->fpw, table_pos, SEEK_SET);
if (daf->encrypted) { /* have to rewrite the whole record */
if (WriteSalt(daf))
goto cleanup;
rlen -= 16; /* "table_pos" included leading SALT, which isn't properly
inside of the record, so don't count it in the length */
if (WriteData((octet*)&rlen, sizeof(Length), 1, daf, 1) < 1)
goto cleanup;
cmd = LSDA_BEGINSYMBOLTABLE;
if (WriteData((octet*)&cmd, sizeof(Command), 1, daf, 1) < 1)
goto cleanup;
} else {
if (WriteData((octet*)&rlen, sizeof(Length), 1, daf, 1) < 1)
goto cleanup;
}
/*
Update the file offset that points to this chunk of the symbol table
*/
if (daf->encrypted) { /* have to rewrite the whole record */
fseek(daf->fpw,
daf->ifw->stoffset - sizeof(Length) - sizeof(Command) - 16,
SEEK_SET);
if (WriteSalt(daf))
goto cleanup;
rlen = sizeof(Length) + sizeof(Command) + sizeof(Offset);
if (WriteData((octet*)&rlen, sizeof(Length), 1, daf, 1) < 1)
goto cleanup;
cmd = LSDA_ENDSYMBOLTABLE;
if (WriteData((octet*)&cmd, sizeof(Command), 1, daf, 1) < 1)
goto cleanup;
if (WriteData((octet*)&table_pos, sizeof(Offset), 1, daf, 1) < 1)
goto cleanup;
} else {
fseek(daf->fpw, daf->ifw->stoffset, SEEK_SET);
if (WriteData((octet*)&table_pos, sizeof(Offset), 1, daf, 1) < 1)
goto cleanup;
}
daf->ifw->stoffset = offset_pos;
daf->stpendlen = 0;
/*
If file is not being read, delete unneeded symbols
*/
if (daf->openmode == LSDA_WRITEONLY)
PruneSymbols(daf, daf->top);
return 1;
cleanup1:
if (report_level > 0)
fprintf(stderr,
"lsda_writesymboltable: error processing file %s\n",
fullfilename(daf->ifw));
return -1;
cleanup:
_errno = ERR_WRITE;
if (report_level > 0) {
fprintf(stderr,
"lsda_writesymboltable: write error on file %s\n",
fullfilename(daf->ifw));
}
return -1;
}
STATIC void
PruneSymbols(LSDAFile* daf, LSDATable* symbol)
{
int i;
LSDATable** kids;
int numkids;
if (symbol->type) { /* a variable */
if (!symbol->dirty) {
daf->FreeTable(daf, symbol);
return;
}
} else {
if (symbol->children) {
numkids = BT_numentries(symbol->children);
if (numkids) {
kids = (LSDATable**)BT_list(symbol->children);
for (i = 0; i < numkids; i++)
PruneSymbols(daf, kids[i]);
free(kids);
}
}
if (!symbol->children || BT_numentries(symbol->children) == 0) {
if (symbol != daf->top && symbol != daf->cwd)
daf->FreeTable(daf, symbol);
}
}
}
size_t
lsda_totalmemory(int handle)
{
LSDAFile* daf = da_store + handle;
if (!daf->top)
return (size_t)0;
return SymbolSizes(daf, daf->top);
}
STATIC size_t
SymbolSizes(LSDAFile* daf, LSDATable* symbol)
{
int cont;
LSDATable* child;
size_t tot = 0;
if (symbol->type) { /* a variable */
tot = symbol->length * LSDASizeOf(symbol->type);
} else {
if (symbol->children) {
for (cont = 0;;) {
child = (LSDATable*)BT_enumerate(symbol->children, &cont);
if (!child)
break;
tot += SymbolSizes(daf, child);
}
}
}
return tot;
}
STATIC int
lsda_readsymboltable(LSDAFile* daf)
{
Command cmd;
Offset table_pos, offset_pos, end_pos;
Length rlen;
/*
Read symbol table from file.
*/
if (fseek(daf->fpr, daf->ifr->stoffset, SEEK_SET) < 0) {
_errno = ERR_FSEEK;
goto cleanup;
}
table_pos = ReadOffset(daf);
if (_errno != ERR_NONE)
goto cleanup;
while (table_pos) {
if (fseek(daf->fpr, table_pos, SEEK_SET) < 0) {
_errno = ERR_FSEEK;
goto cleanup;
}
if (daf->encrypted && ReadSalt(daf))
goto cleanup;
rlen = ReadLength(daf);
if (_errno != ERR_NONE)
goto cleanup;
end_pos =
table_pos + rlen - sizeof(Length) - sizeof(Command) - sizeof(Offset);
cmd = ReadCommand(daf);
if (_errno != ERR_NONE)
goto cleanup;
if (cmd != LSDA_BEGINSYMBOLTABLE) {
_errno = ERR_NOBEGINSYMBOLTABLE;
goto cleanup;
}
if (daf->encrypted && ReadSalt(daf))
goto cleanup; /* 1 salt for all symbols */
while (ftell(daf->fpr) < end_pos && lsda_readsymbol(daf))
;
/*
Check for end of symbol table record
*/
if (daf->encrypted && ReadSalt(daf))
goto cleanup;
ReadLength(daf);
if (_errno != ERR_NONE)
goto cleanup;
cmd = ReadCommand(daf);
if (_errno != ERR_NONE)
goto cleanup;
if (cmd != LSDA_ENDSYMBOLTABLE) {
_errno = ERR_NOENDSYMBOLTABLE;
goto cleanup;
}
offset_pos = ftell(daf->fpr);
table_pos = ReadOffset(daf);
if (_errno != ERR_NONE)
goto cleanup;
}
daf->ifr->stoffset = offset_pos;
daf->ifr->ateof = 0;
return 1;
cleanup:
if (report_level > 0)
fprintf(stderr,
"lsda_readsymboltable: error %d on file %s at byte %ld\n",
_errno,
fullfilename(daf->ifr),
(long)ftell(daf->fpr));
return -1;
}
STATIC int
lsda_writesymbol(char* ppath, char* curpath, LSDATable* symbol, LSDAFile* daf)
{
Command cmd;
Length rlen;
int nlen;
char* pp;
int pplen;
int i, keeplen, cont;
int retval = 0;
LSDATable* child;
if (!symbol->dirty)
return 0;
if (symbol->type) { /* dirty variable */
nlen = strlen(symbol->name);
if (strcmp(ppath, curpath)) { /* have to write a directory entry */
pp = findpath(ppath, curpath);
pplen = strlen(pp);
rlen = pplen + sizeof(Length) + sizeof(Command);
if (WriteData((octet*)&rlen, sizeof(Length), 1, daf, 1) < 1)
goto cleanup;
cmd = LSDA_CD;
if (WriteData((octet*)&cmd, sizeof(Command), 1, daf, 1) < 1)
goto cleanup;
if (WriteData((octet*)pp, 1, pplen, daf, 1) < pplen)
goto cleanup;
strcpy(ppath, curpath);
}
rlen = sizeof(Length) + sizeof(Command) + sizeof(TypeID) + nlen +
sizeof(Offset) + sizeof(Length);
if (WriteData((octet*)&rlen, sizeof(Length), 1, daf, 1) < 1)
goto cleanup;
cmd = LSDA_VARIABLE;
if (WriteData((octet*)&cmd, sizeof(Command), 1, daf, 1) < 1)
goto cleanup;
if (WriteData((octet*)symbol->name, 1, nlen, daf, 1) < nlen)
goto cleanup;
if (WriteData((octet*)&symbol->type->id, sizeof(TypeID), 1, daf, 1) < 1)
goto cleanup;
if (WriteData((octet*)&symbol->offset, sizeof(Offset), 1, daf, 1) < 1)
goto cleanup;
if (WriteData((octet*)&symbol->length, sizeof(Length), 1, daf, 1) < 1)
goto cleanup;
retval = 1;
} else if (symbol->children) { /* do subdir */
keeplen = strlen(curpath);
if (keeplen == 0)
strcpy(curpath, "/");
else if (keeplen == 1)
sprintf(curpath + keeplen, "%s", symbol->name);
else
sprintf(curpath + keeplen, "/%s", symbol->name);
/*
Go through two times: First write any simple variables,
then do subdirectories
*/
for (cont = 0;;) {
child = (LSDATable*)BT_enumerate(symbol->children, &cont);
if (!child)
break;
if (child->type) {
i = lsda_writesymbol(ppath, curpath, child, daf);
if (i < 0) {
if (report_level > 0)
fprintf(stderr, "lsda_writesymbol: error\n");
return -1;
}
}
}
for (cont = 0;;) {
child = (LSDATable*)BT_enumerate(symbol->children, &cont);
if (!child)
break;
if (!child->type) {
i = lsda_writesymbol(ppath, curpath, child, daf);
if (i < 0) {
if (report_level > 0)
fprintf(stderr, "lsda_writesymbol: error\n");
return -1;
}
}
}
retval = 1;
curpath[keeplen] = 0;
}
symbol->dirty = 0;
return (retval);
cleanup:
_errno = ERR_WRITE;
if (report_level > 0) {
fprintf(stderr,
"lsda_writesymbol: write error on file %s",
fullfilename(daf->ifw));
}
return -1;
}
STATIC LSDATable*
lsda_readsymbol(LSDAFile* daf)
{
Command cmd;
Length rlen;
int nlen;
char name[256];
TypeID type_id;
LSDATable* symbol;
LSDAType* type;
top:
rlen = ReadLength(daf);
if (_errno == ERR_READ)
goto cleanup;
cmd = ReadCommand(daf);
if (_errno == ERR_READ)
goto cleanup;
if (cmd == LSDA_VARIABLE) {
nlen = rlen - 2 * daf->ifr->FileLengthSize - daf->ifr->FileCommandSize -
daf->ifr->FileTypeIDSize - daf->ifr->FileOffsetSize;
if (ReadData((unsigned char*)name, 1, nlen, daf) < nlen) {
_errno = ERR_READ;
goto cleanup;
}
name[nlen] = 0;
type_id = ReadTypeID(daf);
if (_errno == ERR_READ)
goto cleanup;
type = daf->FindTypeByID(daf, type_id);
if (type == NULL) {
_errno = ERR_NOTYPEID;
if (report_level > 0) {
fprintf(stderr,
"lsda_readsymbol: No corresponding id for %d in file %s\n",
(int)type_id,
fullfilename(daf->ifr));
}
return NULL;
}
symbol = daf->CreateVar(daf, type, name);
symbol->offset = ReadOffset(daf);
if (_errno == ERR_READ)
goto cleanup;
symbol->length = ReadLength(daf);
if (_errno == ERR_READ)
goto cleanup;
symbol->ifile = daf->ifr;
} else if (cmd == LSDA_CD) {
nlen = rlen - daf->ifr->FileLengthSize - daf->ifr->FileCommandSize;
if (ReadData((unsigned char*)name, 1, nlen, daf) < nlen) {
_errno = ERR_READ;
goto cleanup;
}
name[nlen] = 0;
if (daf->ChangeDir(daf, name, 1) == NULL) {
_errno = ERR_CD;
if (report_level > 0) {
fprintf(stderr,
"lsda_readsymbol: Cannot cd to %s in file %s\n",
name,
fullfilename(daf->ifr));
fprintf(stderr,
"Most likely a component of\nthe path is not a directory\n");
}
return NULL;
}
symbol = daf->cwd;
} else if (cmd == LSDA_NULL) { /* ignore NULL commands */
nlen = rlen - daf->ifr->FileLengthSize - daf->ifr->FileCommandSize;
fseek(daf->fpr, nlen, SEEK_CUR);
goto top;
} else {
fseek(daf->fpr,
-daf->ifr->FileLengthSize - daf->ifr->FileCommandSize,
SEEK_CUR);
return NULL;
}
return symbol;
cleanup:
if (report_level > 0) {
fprintf(
stderr, "lsda_readsymbol: read error on file %s", fullfilename(daf->ifr));
}
return NULL;
}
#ifdef DUMP_DEBUG
lsda_dumpst(int handle)
{
LSDAFile* daf = da_store + handle;
dumpit("/", daf->top);
}
dumpit(char* cwd, LSDATable* symbol)
{
char dir[1024];
int cont;
LSDATable* child;
if (symbol->type) {
printf("Var %s%s, type = %s, file %s, offset = %d, length = %d\n",
cwd,
symbol->name,
symbol->type->name,
symbol->ifile->filename,
symbol->offset,
symbol->length);
} else {
if (strcmp(cwd, "/") == 0 && strcmp(symbol->name, "/") == 0) {
strcpy(dir, "/");
} else {
sprintf(dir, "%s%s/", cwd, symbol->name);
}
printf("Dir %s\n", dir);
if (symbol->children) {
for (cont = 0;;) {
child = (LSDATable*)BT_enumerate(symbol->children, &cont);
if (!child)
break;
dumpit(dir, child);
}
}
}
}
#endif
STATIC Length
ReadLength(LSDAFile* daf)
{
return *(Length*)ReadTrans(
daf, daf->ifr->FileLengthSize, daf->ifr->ConvertLength);
}
STATIC Offset
ReadOffset(LSDAFile* daf)
{
return *(Offset*)ReadTrans(
daf, daf->ifr->FileOffsetSize, daf->ifr->ConvertOffset);
}
STATIC Command
ReadCommand(LSDAFile* daf)
{
return *(Command*)ReadTrans(
daf, daf->ifr->FileCommandSize, daf->ifr->ConvertCommand);
}
STATIC TypeID
ReadTypeID(LSDAFile* daf)
{
return *(TypeID*)ReadTrans(
daf, daf->ifr->FileTypeIDSize, daf->ifr->ConvertTypeID);
}
STATIC int
ReadSalt(LSDAFile* daf)
{
/*
Read salt from the file... pretty straight forward.
*/
if (fread(daf->salt, 1, 16, daf->fpr) != 16)
return 1;
return 0;
}
STATIC Length
ReadData(octet* data, size_t size, size_t count, LSDAFile* daf)
{
size_t bytes = size * count;
octet buf[16];
Length doff;
int i;
if (daf->encrypted) {
/*
Decrypt data using CFB mode
*/
doff = 0;
while (bytes >= 16) {
aes_enc_blk((char*)daf->salt, (char*)buf, daf->ctx);
i = fread(daf->salt, 1, 16, daf->fpr);
if (i < 16)
return (doff / size);
for (i = 0; i < 16; i++)
data[i] = daf->salt[i] ^ buf[i];
doff += 16;
data += 16;
bytes -= 16;
}
if (bytes > 0) {
aes_enc_blk((char*)daf->salt, (char*)buf, daf->ctx);
for (i = bytes; i < 16; i++)
daf->salt[i - bytes] = daf->salt[i];
doff += fread(daf->salt + 16 - bytes, 1, bytes, daf->fpr);
for (i = 0; i < bytes; i++)
data[i] = daf->salt[16 - bytes + i] ^ buf[i];
}
return (doff / size);
} else {
return fread(data, size, count, daf->fpr);
}
}
STATIC int
WriteSalt(LSDAFile* daf)
{
/*
Encrypt salt to increment it, so it will not be whatever was
used for the last encryption. Then write it to the file, which
is equivalent to encrypting 16 bytes of 0 and writing that to the
file in CFB mode. The point is that we need to write out the salt at the
beginning of each record so we have it to synchronize decryption.
*/
aes_enc_blk((char*)daf->salt, (char*)daf->salt, daf->ctx);
if (fwrite(daf->salt, 1, 16, daf->fpw) != 16)
return 1;
return 0;
}
STATIC Length
WriteData(octet* data, size_t size, size_t count, LSDAFile* daf, int flush)
{
size_t bytes = size * count;
octet buf[16];
int i, j, k;
Length doff;
if (daf->encrypted) {
/*
Encrypt data using CFB mode
Copy data into pending buffer until buffer is full or we
run out of data...
*/
for (doff = 0; doff < bytes && daf->npend < 16; doff++)
daf->pending[daf->npend++] = *data++;
bytes -= doff;
/*
If pending buffer is full, write it out
*/
if (daf->npend == 16) {
aes_enc_blk((char*)daf->salt, (char*)buf, daf->ctx);
for (i = 0; i < 16; i++)
daf->salt[i] = daf->pending[i] ^ buf[i];
k = fwrite(daf->salt, 1, 16, daf->fpw);
if (k < 16)
return ((doff + k - 16) / size);
daf->npend = 0;
}
/*
If there is more data to write, write as many 16 byte
chunks as we can
*/
while (bytes >= 16) {
aes_enc_blk((char*)daf->salt, (char*)buf, daf->ctx);
for (i = 0; i < 16; i++)
daf->salt[i] = data[i] ^ buf[i];
k = fwrite(daf->salt, 1, 16, daf->fpw);
if (k < 16)
return ((doff + k) / size);
doff += 16;
data += 16;
bytes -= 16;
}
/*
If there is data left, put it in the pending buffer.
If there is data, the pending buffer must be empty, because
we would have flushed it above
*/
if (bytes > 0) {
for (i = 0; i < bytes; i++)
daf->pending[daf->npend++] = *data++;
doff += bytes;
}
/*
Finally, flush the pending buffer if requested
*/
if (flush && daf->npend) {
aes_enc_blk((char*)daf->salt, (char*)buf, daf->ctx);
for (i = 0; i < daf->npend; i++)
buf[i] ^= daf->pending[i];
k = fwrite(buf, 1, daf->npend, daf->fpw);
for (i = daf->npend; i < 16; i++)
daf->salt[i - daf->npend] = daf->salt[i];
for (i = 0; i < daf->npend; i++)
daf->salt[16 - daf->npend + i] = buf[i];
k = daf->npend - k; /* # bytes short on write */
daf->npend = 0;
if (k)
return (count - (k + size - 1) / size);
}
return (doff / size);
} else {
return fwrite(data, size, count, daf->fpw);
}
}
STATIC void*
ReadTrans(LSDAFile* daf, int FileLength, _CF Convert)
{
static char buf[16], buf2[16];
if (ReadData((unsigned char*)buf, 1, FileLength, daf) < 1) {
memset(buf, 0, 16);
_errno = ERR_READ;
if (report_level > 0) {
fprintf(stderr,
"ReadTrans: error reading %d bytes from file %s\n",
FileLength,
fullfilename(daf->ifr));
}
return (void*)buf;
}
if (Convert) {
Convert(buf, buf2, 1);
return (void*)buf2;
} else {
return (void*)buf;
}
}
STATIC char*
findpath(char* from, char* to)
{
int i, j, k;
int lastdir;
int lento = strlen(to);
int lenrel;
static char relpath[256];
lastdir = 0;
if (from[0] == 0)
return to; /* we HAVE no previous path.... */
if (from[1] == 0)
return to; /* "from" is root dir */
for (i = 0; from[i] && from[i] == to[i]; i++) /* find common part of path */
if (from[i] == '/')
lastdir = i;
if (from[i] == '/' && !to[i])
lastdir = i;
if (!from[i] && !to[i])
return NULL; /* identical */
if (!from[i] && to[i] == '/')
return to + i + 1; /* to is a subdir of from */
/*
Count number of ".." we'd need to do a relative path
*/
for (j = lastdir, k = 0; from[j]; j++)
if (from[j] == '/')
k++;
lenrel = 3 * k + (lento - lastdir);
if (lenrel < lento) {
for (i = 0; i < 3 * k; i += 3) {
relpath[i] = '.';
relpath[i + 1] = '.';
relpath[i + 2] = '/';
}
if (lento == lastdir) /* to is a subdir of from */
relpath[i - 1] = 0; /* strip off last / */
else
sprintf(relpath + i, "%s", to + lastdir + 1);
return relpath;
} else
return to;
}
/*
STATIC char *finddirmatch(char *name,DIR *dp)
{
struct dirent *file;
int i,nlen;
char *cp;
again:
while(file = readdir(dp)) {
// return this file if it is of the form "name%XXXX"
return file->d_name;
nlen = strlen(name);
cp = file->d_name;
for(i=0; i<nlen; i++,cp++)
if(*cp != name[i]) goto again;
if(*cp == 0) return name;
if(*cp != '%') goto again;
for(cp++; *cp; cp++)
if(!isdigit(*cp)) goto again;
return file->d_name;
}
return NULL;
}
*/
STATIC char*
finddirmatch(char* name, DIR* dp)
{
struct dirent* file;
int i, nlen;
char* cp;
again:
while (file = readdir(dp)) {
// return this file if it is of the form "name%XXXX"
#ifdef _WIN32
return file->d_name;
#endif
nlen = strlen(name);
cp = file->d_name;
for (i = 0; i < nlen; i++, cp++)
if (*cp != name[i])
goto again;
if (*cp == 0)
return name;
if (*cp != '%')
goto again;
for (cp++; *cp; cp++)
if (!isdigit(*cp))
goto again;
return file->d_name;
}
return NULL;
}
LSDADir*
lsda_opendir(int handle, char* path)
{
LSDAFile* daf;
LSDATable* var;
LSDADir* dir;
if (handle < 0 || handle >= num_daf) {
_errno = ERR_NOFILE;
if (report_level > 0)
fprintf(stderr, "lsda_opendir: invalid handle %d", handle);
return NULL;
}
daf = da_store + handle;
var = daf->FindVar(daf, path, 0, 1);
if (var == NULL || var->type) {
_errno = ERR_OPNDIR;
if (report_level > 0)
fprintf(stderr,
"lsda_opendir: cannot find directory %s in file %s%c%s",
path,
daf->ifr->dirname,
DIR_SEP,
daf->ifr->filename);
return NULL;
}
dir = (LSDADir*)malloc(sizeof(LSDADir));
dir->btree = var->children;
dir->cont = 0;
dir->daf = (void*)daf;
return dir;
}
void
lsda_readdir(LSDADir* dir,
char* name,
int* type_id,
Length* length,
int* filenum)
{
LSDATable* var;
if (!dir || !dir->btree) { /* they forgot to call opendir, or dir is empty */
name[0] = 0;
*type_id = -1;
*length = *filenum = -1;
return;
}
var = (LSDATable*)BT_enumerate(dir->btree, &dir->cont);
if (var) {
strcpy(name, var->name);
if (var->type) {
*type_id = LSDAId(var->type);
*length = var->length;
*filenum = 0; /* obsolete */
} else {
*type_id = 0;
if (var->children)
*length = BT_numentries(var->children);
else
*length = 0;
*filenum = -1;
}
} else {
name[0] = 0;
*type_id = -1;
*length = *filenum = -1;
}
}
void
lsda_closedir(LSDADir* dir)
{
if (dir)
free(dir);
}
void
lsda_realquery(int handle, char* name, int* type_id, Length* length, int follow)
{
LSDAFile* daf;
LSDATable* var;
if (handle < 0 || handle >= num_daf) {
_errno = ERR_NOFILE;
*type_id = -1;
*length = 0;
if (report_level > 0)
fprintf(stderr, "lsda_query: invalid handle %d", handle);
return;
}
daf = da_store + handle;
var = daf->FindVar(daf, name, 0, follow);
if (var) {
if (var->type) {
*type_id = LSDAId(var->type);
*length = var->length;
} else {
*type_id = 0;
if (var->children)
*length = BT_numentries(var->children);
else
*length = 0;
}
} else {
*type_id = -1;
*length = 0;
}
}
void
lsda_query(int handle, char* name, int* type_id, Length* length)
{
lsda_realquery(handle, name, type_id, length, 1);
}
void
lsda_lquery(int handle, char* name, int* type_id, Length* length)
{
lsda_realquery(handle, name, type_id, length, 0);
}
void
lsda_queryvar(int handle,
char* name,
int* type_id,
Length* length,
int* filenum)
{
LSDAFile* daf;
LSDATable* var;
if (handle < 0 || handle >= num_daf) {
_errno = ERR_NOFILE;
*type_id = -1;
*length = 0;
*filenum = -1;
if (report_level > 0)
fprintf(stderr, "lsda_queryvar: invalid file handle %d", handle);
return;
}
daf = da_store + handle;
var = daf->FindVar(daf, name, 0, 1);
if (var) {
if (var->type) {
*type_id = LSDAId(var->type);
*length = var->length;
*filenum = 0; /* obsolete */
/*
*offset = var->offset +daf->FileLengthSize+daf->FileCommandSize+
daf->FileTypeIDSize + strlen(var->name)+1;
*/
} else {
*type_id = 0;
if (var->children)
*length = BT_numentries(var->children);
else
*length = 0;
*filenum = -1;
}
} else {
*type_id = -1;
*length = 0;
*filenum = -1;
}
}
#define MAX_BUFSIZE 10485760 /* 10 MB max buffer size */
int
lsda_copydir(int h1, char* dir1, int h2, char* dir2)
{
/* Recursively copy the contents of dir1 in file h1 into
dir2 in file h2
*/
char name[MAXPATH], name2[MAXPATH];
int type, fno, bper;
Length btot, dsize;
Length length, offset;
unsigned char* data;
LSDADir* dir;
static int bufsize = 0, depth = 0;
static unsigned char* buf = NULL;
int rc;
depth++; /* keep track of recursion depth */
if (buf == NULL) { /* get read/write buffer */
bufsize = 65536;
buf = (unsigned char*)malloc(bufsize);
}
rc = 1; /* default return code is "error" */
if (lsda_cd(h1, dir1) < 0 || lsda_cd(h2, dir2) < 0)
goto done;
dir = lsda_opendir(h1, ".");
if (dir == NULL)
goto done;
do {
lsda_readdir(dir, name, &type, &length, &fno); /* get next entry */
if (type == 0) {
if (lsda_copydir(h1, name, h2, name))
goto done; /* subdir -- recur */
lsda_cd(h1, ".."); /* return to my directory */
lsda_cd(h2, "..");
} else if (type > 0) { /* variable */
bper = lsda_util_id2size(type);
btot = bper * length; /* total # bytes */
if (btot > bufsize && bufsize < MAX_BUFSIZE) { /* is buffer big enough? */
bufsize = btot < MAX_BUFSIZE ? btot : MAX_BUFSIZE;
free(buf);
buf = (unsigned char*)malloc(bufsize);
}
dsize = bufsize / bper; /* entries per buffer */
offset = 0;
strcpy(name2, name);
do { /* read/write in buffer size chunks */
if (dsize > length - offset)
dsize = length - offset;
lsda_read(h1, type, name, offset, dsize, buf);
lsda_write(h2, type, name2, dsize, buf);
offset = offset + dsize;
name2[0] = 0; /* reset name for "continuation" */
} while (offset < length);
}
} while (type >= 0);
lsda_closedir(dir);
rc = 0; /* Success */
done:
depth--;
if (depth == 0) { /* if done, free buffer */
free(buf);
buf = NULL;
bufsize = 0;
lsda_flush(h2); /* flush receiving file */
}
return rc;
}
char*
lsda_getpwd(int handle)
{
LSDAFile* daf;
if (handle < 0 || handle >= num_daf) {
_errno = ERR_NOFILE;
_scbuf[0] = 0;
if (report_level > 0)
fprintf(stderr, "lsda_getpwd: invalid handle %d", handle);
return _scbuf;
}
daf = da_store + handle;
return daf->GetCWD(daf);
}
void
lsda_setreportlevel(int level)
{
report_level = level;
}
int
lsda_nextopen(int handle)
{
int i;
if (handle < 0)
handle = -1;
for (i = handle + 1; i < num_daf; i++)
if (da_store[i].free == 0)
return i;
return -1;
}
static char*
fullfilename(IFile* ifp)
{
sprintf(_scbuf, "%s%c%s", ifp->dirname, DIR_SEP, ifp->filename);
return _scbuf;
}
void
lsda_perror(char* string)
{
fprintf(stderr, "%s : ", string);
switch (_errno) {
case ERR_NONE: /* no error */
fprintf(stderr, "No error\n");
break;
case ERR_MALLOC: /* malloc failed */
fprintf(stderr, "Malloc failed\n");
break;
case ERR_NOFILE: /* non-existent file */
fprintf(stderr, "Attempt to reopen non-existant file\n");
break;
case ERR_FSEEK: /* fseek failed */
fprintf(stderr, "Fseek failed\n");
break;
case ERR_READ: /* read error on file */
fprintf(stderr, "Read error\n");
break;
case ERR_WRITE: /* write error on file */
fprintf(stderr, "Write error\n");
break;
case ERR_NOENDSYMBOLTABLE: /* append, but end of symbol table not found */
fprintf(stderr, "Attempt to truncate file at invalid location\n");
break;
case ERR_OPENDIR: /* error opening directory for file */
fprintf(stderr, "Error opening directory for file operation\n");
break;
case ERR_OPENFILE: /* error opening file */
fprintf(stderr, "Error opening file\n");
break;
case ERR_NOCONT: /* empty name to write when not continuing */
fprintf(stderr, "Write with empty variable name when\n");
fprintf(stderr, "last file operation was not a write\n");
break;
case ERR_DATATYPE: /* write with unknown data type */
fprintf(stderr, "Write attempt with unknown variable type\n");
break;
case ERR_NOTYPEID: /* read unknown type id from file */
fprintf(stderr, "Read unknown type id from file\n");
break;
case ERR_CD: /* illegal cd attempt in file */
fprintf(stderr, "Illegal directory change\n");
fprintf(stderr, "Most likely a component in the specified path");
fprintf(stderr, "already exists as a\n variable\n");
break;
case ERR_CLOSE: /* error on close ?? */
fprintf(stderr, "Error closing file\n");
break;
case ERR_NOVAR: /* read on non-existant variable */
fprintf(stderr, "Attempt to read on non-existant variable\n");
break;
case ERR_NOBEGINSYMBOLTABLE: /* missing Begin Symbol Table */
fprintf(stderr, "Error: missing BEGINSYMBOLTABLE\n");
break;
case ERR_OPNDIR: /* open directory in file for query */
fprintf(stderr, "Error opening directory for query\n");
break;
default:
fprintf(stderr, "Unknown error %d\n", _errno);
break;
}
_errno = ERR_NONE;
}
char*
lsda_getname(int handle)
{
LSDAFile* daf;
char *cp, *c;
if (handle < 0 || handle >= num_daf) {
_errno = ERR_NOFILE;
_scbuf[0] = 0;
if (report_level > 0)
fprintf(stderr, "lsda_getname: invalid handle %d", handle);
return _scbuf;
}
daf = da_store + handle;
if (!daf->ifr && daf->ifile)
daf->ifr = daf->ifile[0];
if (!daf->ifr) {
_scbuf[0] = 0;
return _scbuf;
}
sprintf(_scbuf, "%s%c%s", daf->ifr->dirname, DIR_SEP, daf->ifr->filename);
cp = strrchr(_scbuf, '%'); /* strip out the part after % */
if (cp) {
for (c = cp + 1; *c; c++)
if (!isdigit(*c))
cp = NULL;
}
if (cp)
*cp = 0;
return _scbuf;
}
char*
lsda_getbasename(int handle)
{
LSDAFile* daf;
char *cp, *c;
if (handle < 0 || handle >= num_daf) {
_errno = ERR_NOFILE;
_scbuf[0] = 0;
if (report_level > 0)
fprintf(stderr, "lsda_getbasename: invalid handle %d", handle);
return _scbuf;
}
daf = da_store + handle;
if (!daf->ifr && daf->ifile)
daf->ifr = daf->ifile[0];
if (!daf->ifr) {
_scbuf[0] = 0;
return _scbuf;
}
strcpy(_scbuf, daf->ifr->filename);
cp = strrchr(_scbuf, '%'); /* strip out the part after % */
if (cp) {
for (c = cp + 1; *c; c++)
if (!isdigit(*c))
cp = NULL;
}
if (cp)
*cp = 0;
return _scbuf;
}
int
lsda_getmode(int handle)
{
if (handle < 0 || handle >= num_daf) {
_errno = ERR_NOFILE;
if (report_level > 0)
fprintf(stderr, "lsda_getmode: invalid handle %d", handle);
return -1;
}
return da_store[handle].openmode;
}
int
lsda_gethandle(char* filen)
{
int i, len;
LSDAFile* daf;
/*
Scan open handles and check for one matching the given name
*/
_errno = ERR_NONE; /* reset error */
len = strlen(filen);
for (i = 0; i < num_daf; i++) {
if (!da_store[i].free) {
daf = da_store + i;
if (!daf->ifr && daf->ifile)
daf->ifr = daf->ifile[0];
if (!daf->ifr)
continue;
strcpy(_scbuf, daf->ifr->filename);
if (_scbuf[len] == '%')
_scbuf[len] = 0;
if (strcmp(_scbuf, filen) == 0)
return i;
}
}
return -1;
}
int
lsda_util_countdir(int fhandle, char* dirname, int* ndir)
{
LSDADir* dir;
char childdirname[80];
int tid, fno;
size_t len;
if (lsda_cd(fhandle, dirname) < 0)
return -1;
dir = lsda_opendir(fhandle, ".");
if (dir == NULL)
return -1;
do {
lsda_readdir(dir, childdirname, &tid, &len, &fno);
if (childdirname[0])
(*ndir)++;
} while (childdirname[0]);
lsda_closedir(dir);
return *ndir;
}
int
lsda_util_id2kind(int type_id)
{
switch (type_id) {
case LSDA_I1:
case LSDA_I2:
case LSDA_I4:
case LSDA_I8:
case LSDA_INT:
case LSDA_SHORT:
case LSDA_LONG:
case LSDA_INTEGER:
return LSDA_INT;
case LSDA_U1:
case LSDA_U2:
case LSDA_U4:
case LSDA_U8:
case LSDA_UINT:
case LSDA_USHORT:
case LSDA_ULONG:
return LSDA_UINT;
case LSDA_R4:
case LSDA_R8:
case LSDA_FLOAT:
case LSDA_DOUBLE:
case LSDA_REAL:
case LSDA_DP:
return LSDA_FLOAT;
}
return 0;
}
int
lsda_util_id2size(int type_id)
{
switch (type_id) {
/* most common ones first */
case LSDA_I4:
return 4;
case LSDA_R4:
return 4;
case LSDA_I8:
return 8;
case LSDA_R8:
return 8;
case LSDA_I1:
return 1;
case LSDA_I2:
return 2;
case LSDA_U4:
return 4;
case LSDA_U8:
return 8;
case LSDA_U1:
return 1;
case LSDA_U2:
return 2;
case LSDA_LINK:
return 1;
/* I don't think these ever show up in a file in practice */
case LSDA_SHORT:
return sizeof(short);
case LSDA_INT:
return sizeof(int);
case LSDA_INTEGER:
return sizeof(FortranInteger);
case LSDA_LONG:
return sizeof(long);
case LSDA_USHORT:
return sizeof(unsigned short);
case LSDA_UINT:
return sizeof(unsigned int);
case LSDA_ULONG:
return sizeof(unsigned long);
case LSDA_FLOAT:
return sizeof(float);
case LSDA_REAL:
return sizeof(FortranReal);
case LSDA_DOUBLE:
return sizeof(double);
case LSDA_DP:
return sizeof(FortranDouble);
}
return 0;
}
int
lsda_util_db2sg(int type_id)
{
switch (type_id) {
case LSDA_DOUBLE:
return LSDA_FLOAT;
case LSDA_DP:
return LSDA_REAL;
case LSDA_R8:
return LSDA_R4;
case LSDA_U8:
return LSDA_U4;
case LSDA_ULONG:
return LSDA_UINT;
case LSDA_I8:
return LSDA_I4;
case LSDA_LONG:
return LSDA_INT;
}
return type_id;
}
extern void
free_all_tables(void);
extern void
free_all_types(void);
#ifndef NO_FORTRAN
extern void
free_all_fdirs(void);
#endif
void
free_all_lsda(void)
{
int i;
/*
First close all open files
*/
for (i = 0; i < num_daf; i++) {
if (!da_store[i].free)
lsda_close(i);
}
/*
Now free everything
*/
if (da_store)
free(da_store);
free_all_tables();
free_all_types();
#ifndef NO_FORTRAN
free_all_fdirs();
#endif
da_store = NULL;
num_daf = 0;
_errno = ERR_NONE;
report_level = 0;
}
// qd additions
char**
lsda_get_children_names(int handle, char* name, int follow, Length* length)
{
*length = 0;
LSDAFile* daf;
LSDATable* t;
LSDATable* current_table;
// get file
if (handle < 0 || handle >= num_daf) {
_errno = ERR_NOFILE;
//*type_id = -1;
*length = 0;
if (report_level > 0)
fprintf(stderr, "lsda_query: invalid handle %d", handle);
return 0;
}
daf = da_store + handle;
// find symbol
t = daf->FindVar(daf, name, 0, follow);
// check for error
if (t) {
if (t->type == 0) {
// yay im a dir
*length = BT_numentries(t->children);
} else {
// do something coz im a var, not a dir
return 0; // return empty
}
} else {
// do someting because i failed
fprintf(stderr, "lsda_get_children_names: path %s does not exist.", name);
return 0;
}
// no children
if ((*length) < 1)
return 0;
int count = 0;
LSDATable* child = 0;
char** ret = (char**)malloc((*length) * sizeof(char*));
/*
int l = (int) strlen(t->name);
strcpy(q, t->name);
if (!t->type) {
// let string finish correctly
if (t->name[l - 1] != '/') {
q[l++] = '/';
q[l] = '\0';
}
}
*/
int iChild = 0;
while (1) {
child = (LSDATable*)BT_enumerate(t->children, &count);
if (!child)
break;
char* entry = (char*)malloc(strlen(child->name) * sizeof(char));
strcpy(entry, child->name);
ret[iChild++] = entry;
}
return ret;
} |
#
load '../../.bats/common.bats.bash'
#
get_fixtures_root() {
printf "${BATS_TEST_DIRNAME}/fixtures/$1"
}
#
@test 'get_fixtures_root() <name>: should return the path to the <name> fixtures' {
run get_fixtures_root "kitchen"
assert_output "${BATS_TEST_DIRNAME}/fixtures/kitchen"
assert_success
}
@test 'copy a fiture to a temp folder' {
teardown() {
temp_del "$TEST_TEMP_DIR"
}
TEST_TEMP_DIR="$(temp_make)"
TEST_FIXTURE_ROOT=$(get_fixtures_root "kitchen")
cp -r ${TEST_FIXTURE_ROOT} ${TEST_TEMP_DIR}
assert_file_exist "${TEST_FIXTURE_ROOT}/fork"
}
|
export const CREATE_ROOM = 'createRoom'
export const DELETE_ROOM = 'deleteRoom'
export const FETCH_ROOMS = 'fetchRooms'
/* for users */
export const USER_CREATE = 'createUser';
export const USER_GET = 'getUser';
/* for posts */
export const CREATE_POST = 'createPost'
export const FETCH_POSTS = 'fetchPosts' |
<reponame>temojudo/stanford-cs106a-solutions
/*
* File: PythagoreanTheorem.java
* Name:
* Section Leader:
* -----------------------------
* This file is the starter file for the PythagoreanTheorem problem.
*/
import acm.program.*;
public class PythagoreanTheorem extends ConsoleProgram {
public void run() {
println("Enter values to compute Pythagorean theorem.");
int a = readInt("a: ");
int b = readInt("b: ");
if (a <= 0 || b <= 0) {
println("Egdes of triangle must be positive number");
} else {
double d = a * a + b * b;
double c = Math.sqrt(d);
println("C = " + c);
}
}
}
|
<filename>src/org/livepeer/LivepeerWowza/LivepeerAPIException.java
package org.livepeer.LivepeerWowza;
public class LivepeerAPIException extends RuntimeException {
public LivepeerAPIException(String message) {
super(message);
}
}
|
package proxy.gumballmonitor;
public class GumballMachineTestDrive {
public static void main(String[] args) {
int count = 10;
String gumballName = "Machineo";
GumballMachine gumballMachine = new GumballMachine(gumballName, count);
GumballMonitor monitor = new GumballMonitor(gumballMachine);
monitor.report();
gumballMachine.insertQuarter();
gumballMachine.turnCrank();
gumballMachine.insertQuarter();
gumballMachine.turnCrank();
monitor.report();
gumballMachine.insertQuarter();
gumballMachine.turnCrank();
gumballMachine.insertQuarter();
gumballMachine.turnCrank();
monitor.report();
gumballMachine.insertQuarter();
gumballMachine.turnCrank();
gumballMachine.insertQuarter();
gumballMachine.turnCrank();
monitor.report();
gumballMachine.insertQuarter();
gumballMachine.turnCrank();
gumballMachine.insertQuarter();
gumballMachine.turnCrank();
monitor.report();
gumballMachine.insertQuarter();
gumballMachine.turnCrank();
gumballMachine.insertQuarter();
gumballMachine.turnCrank();
monitor.report();
}
}
|
#!/bin/bash
set -eux
SOURCE_DIR=${SOURCE_DIR:-$HOME/apt}
APTLY_CONFIG=${APTLY_CONFIG:-/etc/aptly.conf}
if [ -z "$1" ]; then
echo "Usage: ./publish_all new_version"
echo " | "
echo " ----- "
echo " 1.9.1 "
echo
exit 1
fi
VERSION="$1"
MAJOR_VERSION=${MAJOR_VERSION:-$VERSION##\.*}
if [[ ! -d "$SOURCE_DIR" ]]; then
echo "Missing source directory: $SOURCE_DIR"
fi
# Amazon Linux 2
echo "Publishing AmazonLinux 2"
find "$SOURCE_DIR/amazonlinux/" -iname "*-bit-$VERSION-*aarch64*.rpm" -exec cp -fv {} "/var/www/apt.fluentbit.io/amazonlinux/2/aarch64" \;
createrepo -dvp "/var/www/apt.fluentbit.io/amazonlinux/2/aarch64"
find "$SOURCE_DIR/amazonlinux/" -iname "*-bit-$VERSION-*x86_64*.rpm" -exec cp -fv {} "/var/www/apt.fluentbit.io/amazonlinux/2/x86_64" \;
createrepo -dvp "/var/www/apt.fluentbit.io/amazonlinux/2/x86_64"
# Centos 7
echo "Publishing Centos 7"
find "$SOURCE_DIR/centos/7/" -iname "*-bit-$VERSION-*aarch64*.rpm" -exec cp -fv {} "/var/www/apt.fluentbit.io/centos/7/aarch64" \;
createrepo -dvp "/var/www/apt.fluentbit.io/centos/7/aarch64"
find "$SOURCE_DIR/centos/7/" -iname "*-bit-$VERSION-*x86_64*.rpm" -exec cp -fv {} "/var/www/apt.fluentbit.io/centos/7/x86_64" \;
createrepo -dvp "/var/www/apt.fluentbit.io/centos/7/x86_64"
# Centos 8
echo "Publishing Centos 8"
find "$SOURCE_DIR/centos/8/" -iname "*-bit-$VERSION-*aarch64*.rpm" -exec cp -fv {} "/var/www/apt.fluentbit.io/centos/8/aarch64" \;
createrepo -dvp "/var/www/apt.fluentbit.io/centos/8/aarch64"
find "$SOURCE_DIR/centos/8/" -iname "*-bit-$VERSION-*x86_64*.rpm" -exec cp -fv {} "/var/www/apt.fluentbit.io/centos/8/x86_64" \;
createrepo -dvp "/var/www/apt.fluentbit.io/centos/8/x86_64"
# Debian 10 Buster
echo "Publishing Debian 10 Buster"
# Conflicts otherwise with existing
find "$SOURCE_DIR/debian/buster/" -iname "*-bit_$VERSION*.deb" -exec aptly -config="$APTLY_CONFIG" repo add flb-debian-buster {} \;
aptly -config="$APTLY_CONFIG" snapshot create "fluent-bit-debian-buster-${VERSION}" from repo flb-debian-buster
if ! aptly -config="$APTLY_CONFIG" publish switch -gpg-key="releases@fluentbit.io" buster filesystem:debian/buster: "fluent-bit-debian-buster-${VERSION}" ; then
# Cleanup snapshot in case we want to retry later
aptly -config="$APTLY_CONFIG" snapshot drop "fluent-bit-debian-buster-${VERSION}"
exit 1
fi
# Debian 11 Bullseye - notice tweak in repo location
echo "Publishing Debian 11 Bullseye"
find "$SOURCE_DIR/debian/bullseye/" -iname "*-bit_$VERSION*.deb" -exec aptly -config="$APTLY_CONFIG" repo add flb-debian-bullseye {} \;
aptly -config="$APTLY_CONFIG" snapshot create "fluent-bit-debian-bullseye-${VERSION}" from repo flb-debian-bullseye
if ! aptly -config="$APTLY_CONFIG" publish switch -gpg-key="releases@fluentbit.io" bullseye filesystem:debian/bullseye:bullseye "fluent-bit-debian-bullseye-${VERSION}"; then
# Cleanup snapshot in case we want to retry later
aptly -config="$APTLY_CONFIG" snapshot drop "fluent-bit-debian-bullseye-${VERSION}"
exit 1
fi
# Raspbian 10 Buster
echo "Publishing Raspbian 10 Buster"
find "$SOURCE_DIR/raspbian/buster/" -iname "*-bit_$VERSION*.deb" -exec aptly -config="$APTLY_CONFIG" repo add flb-raspbian-buster {} \;
aptly -config="$APTLY_CONFIG" snapshot create "fluent-bit-raspbian-buster-${VERSION}" from repo flb-raspbian-buster
if ! aptly -config="$APTLY_CONFIG" publish switch -gpg-key="releases@fluentbit.io" buster filesystem:raspbian/buster: "fluent-bit-raspbian-buster-${VERSION}" ; then
# Cleanup snapshot in case we want to retry later
aptly -config="$APTLY_CONFIG" snapshot drop "fluent-bit-raspbian-buster-${VERSION}"
exit 1
fi
# Raspbian 11 Bullseye - notice tweak in repo location
echo "Publishing Raspbian 11 Bullseye"
find "$SOURCE_DIR/raspbian/bullseye/" -iname "*-bit_$VERSION*.deb" -exec aptly -config="$APTLY_CONFIG" repo add flb-raspbian-bullseye {} \;
aptly -config="$APTLY_CONFIG" snapshot create "fluent-bit-raspbian-bullseye-${VERSION}" from repo flb-raspbian-bullseye
if ! aptly -config="$APTLY_CONFIG" publish switch -gpg-key="releases@fluentbit.io" bullseye filesystem:raspbian/bullseye:bullseye "fluent-bit-raspbian-bullseye-${VERSION}" ; then
# Cleanup snapshot in case we want to retry later
aptly -config="$APTLY_CONFIG" snapshot drop "fluent-bit-raspbian-bullseye-${VERSION}"
exit 1
fi
# Ubuntu 16.04 Xenial
echo "Publishing Ubuntu 16.04 Xenial"
find "$SOURCE_DIR/ubuntu/xenial/" -iname "*-bit_$VERSION*.deb" -exec aptly -config="$APTLY_CONFIG" repo add flb-ubuntu-xenial {} \;
aptly -config="$APTLY_CONFIG" snapshot create "fluent-bit-ubuntu-xenial-${VERSION}" from repo flb-ubuntu-xenial
if ! aptly -config="$APTLY_CONFIG" publish switch -gpg-key="releases@fluentbit.io" xenial filesystem:ubuntu/xenial: "fluent-bit-ubuntu-xenial-${VERSION}" ; then
# Cleanup snapshot in case we want to retry later
aptly -config="$APTLY_CONFIG" snapshot drop "fluent-bit-ubuntu-xenial-${VERSION}"
exit 1
fi
# Ubuntu 18.04 Bionic
echo "Publishing Ubuntu 18.04 Bionic"
find "$SOURCE_DIR/ubuntu/bionic/" -iname "*-bit_$VERSION*.deb" -exec aptly -config="$APTLY_CONFIG" repo add flb-ubuntu-bionic {} \;
aptly -config="$APTLY_CONFIG" snapshot create "fluent-bit-ubuntu-bionic-${VERSION}" from repo flb-ubuntu-bionic
if ! aptly -config="$APTLY_CONFIG" publish switch -gpg-key="releases@fluentbit.io" bionic filesystem:ubuntu/bionic: "fluent-bit-ubuntu-bionic-${VERSION}" ; then
# Cleanup snapshot in case we want to retry later
aptly -config="$APTLY_CONFIG" snapshot drop "fluent-bit-ubuntu-bionic-${VERSION}"
exit 1
fi
# Ubuntu 20.04 Focal
echo "Publishing Ubuntu 20.04 Focal"
find "$SOURCE_DIR/ubuntu/focal/" -iname "*-bit_$VERSION*.deb" -exec aptly -config="$APTLY_CONFIG" repo add flb-ubuntu-focal {} \;
aptly -config="$APTLY_CONFIG" snapshot create "fluent-bit-ubuntu-focal-${VERSION}" from repo flb-ubuntu-focal
if ! aptly -config="$APTLY_CONFIG" publish switch -gpg-key="releases@fluentbit.io" -gpg-key="releases@fluentbit.io" focal filesystem:ubuntu/focal: "fluent-bit-ubuntu-focal-${VERSION}"; then
# Cleanup snapshot in case we want to retry later
aptly -config="$APTLY_CONFIG" snapshot drop "fluent-bit-ubuntu-focal-${VERSION}"
exit 1
fi
# Sign YUM repo meta-data
find "/var/www/apt.fluentbit.io" -name repomd.xml -exec gpg --detach-sign --armor --yes -u "releases@fluentbit.io" {} \;
# Windows - we do want word splitting and ensure some files exist
if compgen -G "$SOURCE_DIR/windows/*$VERSION*" > /dev/null; then
echo "Copying Windows artefacts"
# shellcheck disable=SC2086
cp -vf "$SOURCE_DIR"/windows/*$VERSION* /var/www/releases.fluentbit.io/releases/"$MAJOR_VERSION"/
else
echo "Missing Windows builds"
fi
# Handle the JSON schema by copying in the new versions (if they exist) and then updating the symlinks that point at the latest.
if compgen -G "$SOURCE_DIR/fluent-bit-schema*.json" > /dev/null; then
echo "Updating JSON schema"
cp -vf "$SOURCE_DIR"/fluent-bit-schema*.json /var/www/releases.fluentbit.io/releases/"$MAJOR_VERSION/"
# Simpler than 'ln --relative --target-directory=/var/www/releases.fluentbit.io/releases/"$MAJOR_VERSION"'
pushd /var/www/releases.fluentbit.io/releases/"$MAJOR_VERSION"
ln -sf "fluent-bit-schema-$VERSION.json" fluent-bit-schema.json
ln -sf "fluent-bit-schema-pretty-$VERSION.json" fluent-bit-schema-pretty.json
popd
else
echo "Missing JSON schema"
fi
|
import { Link } from "gatsby"
import React from "react"
import SEO from "../components/seo"
import FullLogo from "../img/Full_name_logo-removebg-preview.svg"
import NotFound from "../img/undraw_page_not_found_su7k.svg"
export default function Error(){
return(
<div>
<SEO title="Error!" description="Errore nel caricamento della pagina. Forse hai digitato male il link o qualcosa è andato storto..."></SEO>
<div className="flex flex-col place-items-center gap-6 p-10 h-screen">
<NotFound className="w-3/12 h-full"></NotFound>
<h1 className="text-5xl font-bold text-giallo">Ops...</h1>
<p className="text-xl text-gray-600 text-center font-semibold xl:w-3/5 mx-auto w-11/12">
Pare che la pagina che hai cercato non esista...
</p>
<p className="text-xl text-gray-600 text-center xl:w-3/5 mx-auto w-11/12">
Se hai riscontrato qualche problema, per favore, contattami con il form dedicato disponibile sul sito
</p>
<div className="bg-gray-700 hover:bg-giallo text-2xl m-5 p-5 rounded-xl transition duration-500 ease-in-out transform hover:-translate-y-1 hover:scale-110">
<Link to="/" className="h-full w-full text-gray-200 hover:text-gray-700">Torna al sito</Link>
</div>
<FullLogo className="w-2/3 h-full "></FullLogo>
</div>
</div>
)
}
|
alias toggleFn='osascript ~/Dropbox/Cooper/toggle-f-keys.scpt'
#shortcut to convertify directory
alias ccc='cd ~/sites/cfy'
#shortcut to sites
alias sites='cd ~/sites'
#google
alias ggl='google'
#xattr remove quarantine (Required to run unsigned apps on MacOS 10.12)
alias xletrun='xattr -d -r com.apple.quarantine'
|
<gh_stars>0
#include <ncadtoollib/NGProperty.h>
#include <ncadtoollib/NGStream.h>
#include <stdexcept>
#include <string>
#include <sstream>
namespace ncadtoollib {
std::string NGProperty::valueAsString() const {
return {};
}
std::string NGProperty::valueAsVerilogFriendlyToken() const {
return "1";
}
std::string NGPropertyInteger::valueAsString() const {
return std::to_string(propertyValue);
}
std::string NGPropertyInteger::valueAsVerilogFriendlyToken() const {
return std::to_string(propertyValue);
}
std::string NGPropertyFloat::valueAsString() const {
return std::to_string(propertyValue);
}
std::string NGPropertyFloat::valueAsVerilogFriendlyToken() const {
return std::to_string(propertyValue);
}
std::string NGPropertyString::valueAsString() const {
return propertyValue.string();
}
std::string NGPropertyString::valueAsVerilogFriendlyToken() const {
std::stringstream quotedValue;
quotedValue << "\"";
for (char character : propertyValue.string()) {
if (character < 32 || character > 127) {
quotedValue << "\\";
quotedValue.width(3);
quotedValue.fill('0');
quotedValue << std::oct;
quotedValue << character;
}
else if (character == '\\' || character == '\"') {
quotedValue << "\\";
quotedValue << character;
}
else {
quotedValue << character;
}
}
quotedValue << "\"";
return quotedValue.str();
}
std::string NGPropertyTiming::valueAsString() const {
std::stringstream ss;
ss << "NGPropertyTiming<" << unknown1 << ", " << unknown2 << ", " << unknown3 << ">";
return ss.str();
}
std::string NGPropertyPinref::valueAsString() const {
std::stringstream ss;
ss << "NGPropertyPinref<" << unknown1 << ", " << unknown2 << ", " << unknown3 << ", " << unknown4 << ", " << unknown5 << ", " << unknown6 << ">";
return ss.str();
}
std::string NGPropertyParameter::valueAsString() const {
std::stringstream ss;
ss << "NGPropertyParameter<" << unknown2 << ">";
return ss.str();
}
std::string NGPropertyTimingString::valueAsString() const {
std::stringstream ss;
ss << "NGPropertyTimingString<" << propertyValue.string() << ">";
return ss.str();
}
NGStream& operator <<(NGStream& stream, const NGProperty& obj) {
return stream << obj.propertyName;
}
NGStream& operator >>(NGStream& stream, NGProperty& obj) {
return stream >> obj.propertyName;
}
NGStream& operator <<(NGStream& stream, const NGPropertyInteger& obj) {
return stream << static_cast<const NGProperty&>(obj) << obj.propertyValue;
}
NGStream& operator >>(NGStream& stream, NGPropertyInteger& obj) {
return stream >> static_cast<NGProperty&>(obj) >> obj.propertyValue;
}
NGStream& operator <<(NGStream& stream, const NGPropertyFloat& obj) {
return stream << static_cast<const NGProperty&>(obj) << obj.propertyValue;
}
NGStream& operator >>(NGStream& stream, NGPropertyFloat& obj) {
return stream >> static_cast<NGProperty&>(obj) >> obj.propertyValue;
}
NGStream& operator <<(NGStream& stream, const NGPropertyString& obj) {
return stream << static_cast<const NGProperty&>(obj) << obj.propertyValue;
}
NGStream& operator >>(NGStream& stream, NGPropertyString& obj) {
return stream >> static_cast<NGProperty&>(obj) >> obj.propertyValue;
}
NGStream& operator <<(NGStream& stream, const NGPropertyObject& obj) {
throw std::logic_error("NGPropertyObject is not implemented");
}
NGStream& operator >>(NGStream& stream, NGPropertyObject& obj) {
throw std::logic_error("NGPropertyObject is not implemented");
}
NGStream& operator <<(NGStream& stream, const NGPropertyTiming& obj) {
return stream << static_cast<const NGProperty&>(obj) << obj.unknown1 << obj.unknown2 << obj.unknown3;
}
NGStream& operator >>(NGStream& stream, NGPropertyTiming& obj) {
return stream >> static_cast<NGProperty&>(obj) >> obj.unknown1 >> obj.unknown2 >> obj.unknown3;
}
NGStream& operator <<(NGStream& stream, const NGPropertyPinref& obj) {
return stream
<< static_cast<const NGProperty&>(obj)
<< obj.unknown1
<< obj.unknown2
<< obj.unknown3
<< obj.unknown4
<< obj.unknown5
<< obj.unknown6;
}
NGStream& operator >>(NGStream& stream, NGPropertyPinref& obj) {
return stream
>> static_cast<NGProperty&>(obj)
>> obj.unknown1
>> obj.unknown2
>> obj.unknown3
>> obj.unknown4
>> obj.unknown5
>> obj.unknown6;
}
NGStream& operator <<(NGStream& stream, const NGPropertyParameter& obj) {
return stream << static_cast<const NGProperty&>(obj) << obj.unknown2;
}
NGStream& operator >>(NGStream& stream, NGPropertyParameter& obj) {
return stream >> static_cast<NGProperty&>(obj) >> obj.unknown2;
}
NGStream& operator <<(NGStream& stream, const NGPropertyTimingString& obj) {
return stream << static_cast<const NGProperty&>(obj) << obj.propertyValue;
}
NGStream& operator >>(NGStream& stream, NGPropertyTimingString& obj) {
return stream << static_cast<const NGProperty&>(obj) >> obj.propertyValue;
}
} |
<reponame>AngelPASTORROJAS/L3S6-GL-webmagic-PASTOR_ROJAS-BENAOUD<gh_stars>1000+
var name=xpath("//h1[@class='entry-title public']/strong/a/text()")
var readme=xpath("//div[@id='readme']/tidyText()")
var star=xpath("//ul[@class='pagehead-actions']/li[1]//a[@class='social-count js-social-count']/text()")
var fork=xpath("//ul[@class='pagehead-actions']/li[2]//a[@class='social-count']/text()")
var url=page.getUrl().toString()
if (name!=null){
println(name)
println(readme)
println(star)
println(url)
}
urls("(https://github\\.com/\\w+/\\w+)")
urls("(https://github\\.com/\\w+)") |
public class ReCaptchaV2
{
private ReCaptchaSettings _settings;
public ReCaptchaV2(ReCaptchaSettings settings)
{
this._settings = settings;
}
public ReCaptchaV2Request GetReCaptchaV2Request(string secretKey, string responseKey, string remoteIp)
{
var form = new NameValueCollection() { { "g-recaptcha-response", responseKey } };
var serverVariables = new NameValueCollection() { { "REMOTE_ADDR", remoteIp } };
// Assuming ReCaptchaV2Request class exists with appropriate properties
var reCaptchaRequest = new ReCaptchaV2Request
{
SecretKey = secretKey,
ResponseKey = responseKey,
RemoteIp = remoteIp,
Form = form,
ServerVariables = serverVariables
};
// Assuming CreateReCaptchaV2Request method exists in ReCaptchaSettings class
return this._settings.CreateReCaptchaV2Request(reCaptchaRequest);
}
}
public class ReCaptchaSettings : IDisposable
{
// Other members and methods
public ReCaptchaV2Request CreateReCaptchaV2Request(ReCaptchaV2Request request)
{
// Implementation to create and return reCAPTCHA v2 request
}
public void Dispose()
{
// Implementation for cleanup
}
}
public class ReCaptchaV2Request
{
public string SecretKey { get; set; }
public string ResponseKey { get; set; }
public string RemoteIp { get; set; }
public NameValueCollection Form { get; set; }
public NameValueCollection ServerVariables { get; set; }
} |
import pino, { Logger } from "pino";
import { EventEmitter } from "events";
import { JsonRpcProvider } from "@walletconnect/jsonrpc-provider";
import {
formatJsonRpcResult,
IJsonRpcProvider,
isJsonRpcRequest,
JsonRpcPayload,
JsonRpcRequest,
} from "@walletconnect/jsonrpc-utils";
import WsConnection from "@walletconnect/jsonrpc-ws-connection";
import {
generateChildLogger,
getDefaultLoggerOptions,
getLoggerContext,
} from "@walletconnect/logger";
import { RelayJsonRpc } from "@walletconnect/relay-api";
import { toMiliseconds } from "@walletconnect/time";
import {
ICore,
IMessageTracker,
IPublisher,
IRelayer,
ISubscriber,
RelayerOptions,
RelayerTypes,
} from "@walletconnect/types";
import { formatRelayRpcUrl, ERROR } from "@walletconnect/utils";
import {
RELAYER_CONTEXT,
RELAYER_DEFAULT_LOGGER,
RELAYER_DEFAULT_RELAY_URL,
RELAYER_EVENTS,
RELAYER_PROVIDER_EVENTS,
RELAYER_RECONNECT_TIMEOUT,
RELAYER_SUBSCRIBER_SUFFIX,
} from "../constants";
import { MessageTracker } from "./messages";
import { Publisher } from "./publisher";
import { Subscriber } from "./subscriber";
export class Relayer extends IRelayer {
public readonly protocol = "irn";
public readonly version = 1;
public core: ICore;
public logger: Logger;
public events = new EventEmitter();
public provider: IJsonRpcProvider;
public messages: IMessageTracker;
public subscriber: ISubscriber;
public publisher: IPublisher;
public name = RELAYER_CONTEXT;
private initialized = false;
constructor(opts: RelayerOptions) {
super(opts);
this.core = opts.core;
this.logger =
typeof opts.logger !== "undefined" && typeof opts.logger !== "string"
? generateChildLogger(opts.logger, this.name)
: pino(getDefaultLoggerOptions({ level: opts.logger || RELAYER_DEFAULT_LOGGER }));
const rpcUrl =
opts.rpcUrl ||
formatRelayRpcUrl(this.protocol, this.version, RELAYER_DEFAULT_RELAY_URL, opts.projectId);
this.provider =
typeof opts.relayProvider !== "string" && typeof opts.relayProvider !== "undefined"
? opts.relayProvider
: new JsonRpcProvider(new WsConnection(rpcUrl));
this.messages = new MessageTracker(this.logger, opts.core);
this.subscriber = new Subscriber(this, this.logger);
this.publisher = new Publisher(this, this.logger);
}
public async init() {
this.logger.trace(`Initialized`);
await Promise.all([this.messages.init(), this.provider.connect(), this.subscriber.init()]);
this.registerEventListeners();
this.initialized = true;
}
get context() {
return getLoggerContext(this.logger);
}
get connected() {
return this.provider.connection.connected;
}
get connecting() {
return this.provider.connection.connecting;
}
public async publish(topic: string, message: string, opts?: RelayerTypes.PublishOptions) {
this.isInitialized();
await this.publisher.publish(topic, message, opts);
await this.recordMessageEvent({ topic, message });
}
public async subscribe(topic: string, opts?: RelayerTypes.SubscribeOptions) {
this.isInitialized();
const id = await this.subscriber.subscribe(topic, opts);
return id;
}
public async unsubscribe(topic: string, opts?: RelayerTypes.UnsubscribeOptions) {
this.isInitialized();
await this.subscriber.unsubscribe(topic, opts);
}
public on(event: string, listener: any) {
this.events.on(event, listener);
}
public once(event: string, listener: any) {
this.events.once(event, listener);
}
public off(event: string, listener: any) {
this.events.off(event, listener);
}
public removeListener(event: string, listener: any) {
this.events.removeListener(event, listener);
}
// ---------- Private ----------------------------------------------- //
private async recordMessageEvent(messageEvent: RelayerTypes.MessageEvent) {
const { topic, message } = messageEvent;
await this.messages.set(topic, message);
}
private shouldIgnoreMessageEvent(messageEvent: RelayerTypes.MessageEvent) {
const { topic, message } = messageEvent;
if (!this.subscriber.topics.includes(topic)) return true;
const exists = this.messages.has(topic, message);
return exists;
}
private async onProviderPayload(payload: JsonRpcPayload) {
this.logger.debug(`Incoming Relay Payload`);
this.logger.trace({ type: "payload", direction: "incoming", payload });
if (isJsonRpcRequest(payload)) {
if (!payload.method.endsWith(RELAYER_SUBSCRIBER_SUFFIX)) return;
const event = (payload as JsonRpcRequest<RelayJsonRpc.SubscriptionParams>).params;
const { topic, message } = event.data;
const messageEvent = { topic, message } as RelayerTypes.MessageEvent;
this.logger.debug(`Emitting Relayer Payload`);
this.logger.trace({ type: "event", event: event.id, ...messageEvent });
this.events.emit(event.id, messageEvent);
await this.acknowledgePayload(payload);
await this.onMessageEvent(messageEvent);
}
}
private async onMessageEvent(messageEvent: RelayerTypes.MessageEvent) {
if (this.shouldIgnoreMessageEvent(messageEvent)) return;
this.events.emit(RELAYER_EVENTS.message, messageEvent);
await this.recordMessageEvent(messageEvent);
}
private async acknowledgePayload(payload: JsonRpcPayload) {
const response = formatJsonRpcResult(payload.id, true);
await this.provider.connection.send(response);
}
private registerEventListeners() {
this.provider.on(RELAYER_PROVIDER_EVENTS.payload, (payload: JsonRpcPayload) =>
this.onProviderPayload(payload),
);
this.provider.on(RELAYER_PROVIDER_EVENTS.connect, () => {
this.events.emit(RELAYER_EVENTS.connect);
});
this.provider.on(RELAYER_PROVIDER_EVENTS.disconnect, () => {
this.events.emit(RELAYER_EVENTS.disconnect);
// Attempt reconnection after one second.
setTimeout(() => {
this.provider.connect();
}, toMiliseconds(RELAYER_RECONNECT_TIMEOUT));
});
this.provider.on(RELAYER_PROVIDER_EVENTS.error, (err: unknown) =>
this.events.emit(RELAYER_EVENTS.error, err),
);
}
private isInitialized() {
if (!this.initialized) {
throw new Error(ERROR.NOT_INITIALIZED.stringify(this.name));
}
}
}
|
#!/bin/sh
## By Hebert F. Barros 2021
## Removing any apt ## crashes
echo "What name do you want to use in GIT user.name?"
echo "For example, mine will be \"John Doe\""
read git_config_user_name
git config --global user.name "$git_config_user_name"
clear
echo "What email do you want to use in GIT user.email?"
echo "For example, mine will be \"doe.john@gmail.com\""
read git_config_user_email
git config --global user.email $git_config_user_email
clear
|
class Model:
def __init__(self, X, y):
self.DSEL_data_ = X
self.DSEL_target_ = y
self.processed_data_ = None
def process_data(self, operation):
self.processed_data_ = operation(self.DSEL_data_)
# Example usage
# Instantiate the Model class
X = [1, 2, 3, 4, 5]
y = [0, 1, 0, 1, 0]
model = Model(X, y)
# Define the operation function
def custom_operation(data):
return [x * 2 for x in data]
# Apply the custom operation to the data and store the result in processed_data_
model.process_data(custom_operation)
print(model.processed_data_) # Output: [2, 4, 6, 8, 10] |
#!/bin/bash
#
# The MIT License
#
# Copyright (c) 1997-2021 The University of Utah
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
SCRIPT="`dirname \"$0\"`"/AMRFDView-bld.sh
${SCRIPT} -p HeatProblem -v CC -s P5 -c FCBilinear
|
<reponame>dragonli-people/dragonli-account-change-service
/**
*
*/
package org.dragonli.service.modules.accountchangeservice;
import java.util.HashSet;
import java.util.Set;
/**
* @author mac
*
*/
public class AccountChangeVars {
public final static Set<Integer> groups = new HashSet<>();//需要初始化
public static int groupCount ;//需要注入
public static boolean pauseBefore = false;
}
|
require 'chronic'
module Ginatra
class Chart
class << self
def rc_commits params = {}
round_chart Ginatra::Stat.commits(params).inject({}) { |result, repo|
repo_id = repo[0]
commits = repo[1]
result[repo_id] = {'value' => commits.size}
result
}
end
def rc_lines params = {}
round_chart Ginatra::Stat.lines(params).inject({}) { |result, line_data|
repo_id = line_data[0]
result[repo_id] = {'value' => line_data[1]}
result
}
end
def rc_hours params = {}
res = Ginatra::Activity.hours(params).inject([]) { |output, repo|
repo_id = repo[0]
color = Ginatra::Helper.get_repo(repo_id).color
output << {
'value' => total_hours(repo[1]),
'label' => repo_id,
'color' => color,
'highlight' => color
}
output
}
return res
end
def rc_sprint_commits params = {}
dates = Ginatra::Config.sprint_dates
params[:from] = dates[0]
params[:til] = dates[-1] + 24 * 3600 - 1
rc_commits params
end
def rc_sprint_lines params = {}
dates = Ginatra::Config.sprint_dates
params[:from] = dates[0]
params[:til] = dates[-1] + 24 * 3600 - 1
rc_lines params
end
def rc_sprint_hours params = {}
dates = Ginatra::Config.sprint_dates
params[:from] = dates[0]
params[:til] = dates[-1] + 24 * 3600 - 1
rc_hours params
end
def lc_commits params = {}
line_chart lc_data params, 'commits'
end
def lc_sprint_commits params = {}
dates = Ginatra::Config.sprint_dates
params[:from] = dates[0]
params[:til] = dates[-1] + 24 * 3600 - 1
line_chart lc_data(params, 'commits')
end
def lc_lines params = {}
line_chart lc_data params, 'lines'
end
def lc_sprint_lines params = {}
dates = Ginatra::Config.sprint_dates
dates << dates.last + 24 * 3600 - 1
params[:time_stamps] = dates
line_chart lc_data params, 'lines'
end
def lc_hours params = {}
line_chart lc_data params, 'hours'
end
def lc_sprint_hours params = {}
dates = Ginatra::Config.sprint_dates
params[:from] = dates[0]
params[:til] = dates[-1] + 24 * 3600 - 1
line_chart lc_data params, 'hours'
end
def lc_lines_commits params = {}
line_chart lc_combine_data lc_lines_data(params), lc_commits_data(params)
end
def lc_hours_commits params = {}
line_chart lc_combine_data lc_hours_data(params), lc_commits_data(params)
end
def lc_sprint_hours_commits params = {}
dates = Ginatra::Config.sprint_dates
params[:from] = dates[0]
params[:til] = dates[-1] + 24 * 3600 - 1
lc_hours_commits params
end
def timeline_commits params = {}
# default to 1 week from now
params = timeline_prepare_params(params)
time_stamp_str = params[:labels]
time_stamps = params[:time_stamps]
params.reject! { |k| [:time_stamps, :labels].include? k }
commits = Ginatra::Stat.commits params
color = params[:in].nil? ? nil : Ginatra::Helper.get_repo(params[:in]).color
init_data = {'labels' => [], 'datasets' => [{'label' => 'Commits', 'data' => [], 'color' => color}]}
count = 0
line_chart time_stamps[0..-2].inject(init_data) { |output, time_stamp|
from = time_stamp
til = time_stamps[count + 1]
commits_count = 0
commits.each do |repo_id, repo_commits|
unless repo_commits.nil? || repo_commits.empty?
repo_commits.each_with_index do |commit, i|
commit_date = Time.parse commit.flatten[1]['date']
commits_count += 1 if from <= commit_date && commit_date < til
end
end
end
output['datasets'][0]['data'] << commits_count
count += 1
output
}.merge({'labels' => time_stamp_str[0..-2]})
end
def timeline_hours params = {}
params = timeline_prepare_params(params)
time_stamp_str = params[:labels]
time_stamps = params[:time_stamps]
params.reject! { |k| [:time_stamps, :labels].include? k }
commits = Ginatra::Stat.commits params
init_data = {'labels' => [], 'datasets' => [{'label' => 'Hours', 'data' => []}]}
count = 0
line_chart time_stamps[0..-2].inject(init_data) { |output, time_stamp|
from = time_stamp
til = time_stamps[count + 1]
hours = 0
commits.each do |repo_id, repo_commits|
unless repo_commits.nil? || repo_commits.empty?
hours += Ginatra::Activity.compute_hours repo_commits.select { |commit|
commit_date = Time.parse commit.flatten[1]['date']
from <= commit_date && commit_date < til
}
end
end
output['datasets'][0]['data'] << hours
count += 1
output
}.merge({'labels' => time_stamp_str[0..-2]})
end
def timeline_sprint_commits params = {}
timeline_sprint params, 'commits'
end
def timeline_sprint_hours params = {}
timeline_sprint params, 'hours'
end
def timeline_sprint_hours_commits params = {}
spr_commits = timeline_sprint_commits params
spr_hours = timeline_sprint_hours params
spr_commits['datasets'] += spr_hours['datasets']
spr_commits
end
def timeline_sprint params = {}, type = 'commits'
dates = Ginatra::Config.sprint_dates
dates << dates.last + 24 * 3600 - 1
params[:time_stamps] = dates
case type
when 'commits'
timeline_commits params
when 'hours'
timeline_hours params
end
end
private
def get_repo_color repo_id
Ginatra::Helper.get_repo(repo_id).color
end
def round_chart data = {}
data.inject([]) { |output, v|
repo_id = v[0]
color = get_repo_color repo_id
params = v[1]
params['label'] = v[0]
params['highlight'] ||= color
params['color'] ||= color
output << params
output
}
end
def line_chart data = {}
data['datasets'].each_with_index do |dataset, i|
color = dataset['color'].nil? ? '#97BBCD' : dataset['color']
data['datasets'][i].merge! ({
'fillColor' => rgba(color, 0.5),
'strokeColor' => rgba(color),
'pointColor' => rgba(color),
'pointStrokeColor' => '#ffffff',
'pointHighlightFill' => '#ffffff',
'pointHighlightStroke' => rgba(color)
})
end
data
end
def lc_data params = {}, data_type = 'commits'
case data_type
when 'commits'
lc_commits_data params
when 'lines'
lc_lines_data params
else
# type = 'hours'
lc_hours_data params
end
end
def lc_commits_data params = {}
init_data = {'labels' => [], 'datasets' => [{}]}
Ginatra::Stat.commits(params).inject(init_data) { |result, repo|
repo_id = repo[0]
commits = repo[1]
result['labels'] << repo_id
result['datasets'][0]['label'] ||= "Commits"
result['datasets'][0]['data'] ||= []
result['datasets'][0]['data'] << commits.size
result['datasets'][0]['color'] = params[:color]
result
}
end
def lc_lines_data params = {}
init_data = {'labels' => [], 'datasets' => [{}]}
Ginatra::Stat.lines(params).inject(init_data) { |result, repo|
repo_id = repo[0]
lines_count = repo[1]
result['labels'] << repo_id
result['datasets'][0]['label'] ||= "Lines"
result['datasets'][0]['data'] ||= []
result['datasets'][0]['data'] << lines_count
result['datasets'][0]['color'] = params[:color]
result
}
end
def lc_hours_data params = {}
init_data = {'labels' => [], 'datasets' => [{}]}
Ginatra::Activity.hours(params).inject(init_data) { |result, repo|
repo_id = repo[0]
result['labels'] << repo_id
result['datasets'][0]['label'] ||= "Hours"
result['datasets'][0]['data'] ||= []
result['datasets'][0]['data'] << total_hours(repo[1])
result['datasets'][0]['color'] = params[:color]
result
}
end
def lc_combine_data data1, data2
if data1['labels'] == data2['labels']
data1['datasets'][0]['color'] = '#ce0000'
data2['datasets'][0]['color'] = '#9ccf31'
{'labels' => data1['labels'],
'datasets' => data1['datasets'] + data2['datasets']}
else
false
end
end
def default_timeline_stamps
['8 days ago at 0:00', '7 days ago at 0:00', '6 days ago at 0:00',
'5 days ago at 0:00', '4 days ago at 0:00', '3 days ago at 0:00',
'yesterday at 0:00', 'today at 0:00', 'today at 23:59:59']
end
def timeline_prepare_params params = {}
# default to 1 week from now
params[:time_stamps] ||= default_timeline_stamps
params[:time_stamps].map! { |time_stamp|
if time_stamp.class.to_s == 'Time'
time_stamp
else
Chronic.parse time_stamp
end
}
params[:labels] ||= params[:time_stamps][0..-1].map { |time_stamp|
time_stamp.strftime("%a %d %b")
}
params[:from] = params[:time_stamps][0]
params[:til] = params[:time_stamps][-1]
params
end
def total_hours hours_data = {}
hours_data.inject(0.00) { |total, author|
total += author['hours']
total
}
end
def rgba hex, a = 1
hex += hex[1..-1] if hex.length == 4
hex.match(/#(..)(..)(..)/).to_a[1..-1].inject('rgba(') { |rgba, v|
rgba += "#{v.hex},"
rgba
} + "#{a})"
end
end
end
end
|
<filename>Practice/Intro-To-Java-8th-Ed-Daniel-Y.-Liang/Chapter-6/Chapter06P29/src/main/PickingFourCards.java
package main;
/**
* @author <NAME>
*
*/
public class PickingFourCards
{
public static void main(String[] args)
{
final int NUMBER_OF_CARDS = 52;
int picks = 0;
int sum = 0;
while (sum != 24)
{
int card = (int) (Math.random() * NUMBER_OF_CARDS) + 1;
int cardValue = cardValue(card);
if ((sum + cardValue) <= 24)
{
sum += cardValue;
}
picks++;
}
System.out.println("Number of picks to get the sum of 24: " + picks);
}
/**
* Returns the card value of the specified integer argument.
* <ul>
* <li>
* If the integer argument is not between 1 and 52 inclusive, the card value will default to 0.
* </li>
* </ul>
*
* @param card card number
* @return the card value
*/
public static int cardValue(int card)
{
if ((card < 1) || (card > 52))
{
return 0;
}
switch ((card - 1) % 13)
{
case 0:
return 1;
case 1:
return 2;
case 2:
return 3;
case 3:
return 4;
case 4:
return 5;
case 5:
return 6;
case 6:
return 7;
case 7:
return 8;
case 8:
return 9;
case 9:
return 10;
case 10:
return 11;
case 11:
return 12;
case 12:
return 13;
}
return 0;
}
} |
package org.museautomation.ui.valuesource.parser;
/**
* @author <NAME> (see LICENSE.txt for license details)
*/
public enum ParseStackMarker
{
ElementExpression,
ElementLookupExpression,
ArgumentedExpression,
ArrayExpression
}
|
#!/usr/bin/env bash
echo "Copying to /tmp for reproducible builds"
rm -rf /tmp/totally-build
mkdir /tmp/totally-build
cp -r . /tmp/totally-build/
SOURCE_DIRECTORY=$(pwd)
cd /tmp/totally-build/
echo "Running build"
npm ci
npm run build
rm addon.zip
rm totally.zip
echo "Zipping addon"
cd addon/
zip -r addon.zip . -x .arcconfig .arclint bower.json .directory .eslintrc .git/\* .gitignore .gitlab-ci.yml node_modules/\* sync.sh .travis.yml public/\* screenshots/\*
cd ..
mv addon/addon.zip addon.zip
echo "Zipping source code"
zip -r totally.zip . -x .arcconfig .arclint bower.json .directory .eslintrc .git/\* .gitignore .gitlab-ci.yml node_modules/\* sync.sh .travis.yml public/\* screenshots/\*
echo "Returning to $SOURCE_DIRECTORY"
cd $SOURCE_DIRECTORY
echo "Moving built artifacts into $SOURCE_DIRECTORY"
rm addon.zip
rm totally.zip
rm addon/build/tota11y.js
rm addon/build/sidebar.js
rm build/tota11y.js
rm build/tota11y.min.js
rm build/sidebar.js
rm build/sidebar.min.js
mv /tmp/totally-build/addon.zip addon.zip
mv /tmp/totally-build/totally.zip totally.zip
mv /tmp/totally-build/addon/build/tota11y.js addon/build/tota11y.js
mv /tmp/totally-build/addon/build/sidebar.js addon/build/sidebar.js
mv /tmp/totally-build/build/tota11y.js build/tota11y.js
mv /tmp/totally-build/build/tota11y.min.js build/tota11y.min.js
mv /tmp/totally-build/build/sidebar.js build/sidebar.js
mv /tmp/totally-build/build/sidebar.min.js build/sidebar.min.js
echo "Deleting /tmp copy"
rm -rf /tmp/totally-build
|
#!/bin/bash
set -e
# Mandatory arguments with no default values provided:
# PR_REPO_SLUG - the Github name of the repo to be merged into the origin/master
# PR_BRANCH - the branch to be merged, if set to "master" no merge will happen
# IPA_DEPLOY_LOCATION - the location understandable by the "scp" command
# executed at the end of the script to deploy the output .ipa file
# LIB_JITSI_MEET_PKG (optional) - the npm package for lib-jitsi-meet which will
# be put in place of the current version in the package.json file.
#
# Other than that the script requires the following env variables to be set:
#
# DEPLOY_SSH_CERT_URL - the SSH private key used by the 'scp' command to deploy
# the .ipa. It is expected to be encrypted with the $ENCRYPTION_PASSWORD.
# ENCRYPTION_PASSWORD - the password used to decrypt certificate/key files used
# in the script.
# IOS_TEAM_ID - the team ID inserted into build-ipa-.plist.template file in
# place of "YOUR_TEAM_ID".
function echoAndExit1() {
echo $1
exit 1
}
if [ -z $PR_REPO_SLUG ]; then
echoAndExit1 "No PR_REPO_SLUG defined"
fi
if [ -z $PR_BRANCH ]; then
echoAndExit1 "No PR_BRANCH defined"
fi
if [ -z $IPA_DEPLOY_LOCATION ]; then
echoAndExit1 "No IPA_DEPLOY_LOCATION defined"
fi
echo "PR_REPO_SLUG=${PR_REPO_SLUG} PR_BRANCH=${PR_BRANCH}"
# do the marge and git log
if [ $PR_BRANCH != "master" ]; then
echo "Will merge ${PR_REPO_SLUG}/${PR_BRANCH} into master"
git config remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*"
git fetch origin master
git checkout master
git pull https://github.com/${PR_REPO_SLUG}.git $PR_BRANCH --no-edit
fi
# Link this lib-jitsi-meet checkout in jitsi-meet through the package.json
if [ ! -z ${LIB_JITSI_MEET_PKG} ];
then
echo "Adjusting lib-jitsi-meet package in package.json to ${LIB_JITSI_MEET_PKG}"
# escape for the sed
LIB_JITSI_MEET_PKG=$(echo $LIB_JITSI_MEET_PKG | sed -e 's/\\/\\\\/g; s/\//\\\//g; s/&/\\\&/g')
sed -i.bak -e "s/\"lib-jitsi-meet.*/\"lib-jitsi-meet\"\: \"${LIB_JITSI_MEET_PKG}\",/g" package.json
echo "Package.json lib-jitsi-meet line:"
grep lib-jitsi-meet package.json
else
echo "LIB_JITSI_MEET_PKG var not set - will not modify the package.json"
fi
git log -20 --graph --pretty=format':%C(yellow)%h%Cblue%d%Creset %s %C(white) %an, %ar%Creset'
# certificates
CERT_DIR="ios/ci/certs"
mkdir -p $CERT_DIR
curl -L -o ${CERT_DIR}/id_rsa.enc ${DEPLOY_SSH_CERT_URL}
openssl aes-256-cbc -k "$ENCRYPTION_PASSWORD" -in ${CERT_DIR}/id_rsa.enc -d -a -out ${CERT_DIR}/id_rsa
chmod 0600 ${CERT_DIR}/id_rsa
ssh-add ${CERT_DIR}/id_rsa
npm install
# Ever since the Apple Watch app has been added the bitcode for WebRTC needs to be downloaded in order to build successfully
./node_modules/react-native-webrtc/tools/downloadBitcode.sh
cd ios
pod install --repo-update --no-ansi
cd ..
mkdir -p /tmp/jitsi-meet/
xcodebuild archive -quiet -workspace ios/jitsi-meet.xcworkspace -scheme jitsi-meet -configuration Release -archivePath /tmp/jitsi-meet/jitsi-meet.xcarchive
sed -e "s/YOUR_TEAM_ID/${IOS_TEAM_ID}/g" ios/ci/build-ipa.plist.template > ios/ci/build-ipa.plist
IPA_EXPORT_DIR=/tmp/jitsi-meet/jitsi-meet-ipa
xcodebuild -quiet -exportArchive -archivePath /tmp/jitsi-meet/jitsi-meet.xcarchive -exportPath $IPA_EXPORT_DIR -exportOptionsPlist ios/ci/build-ipa.plist
echo "Will try deploy the .ipa to: ${IPA_DEPLOY_LOCATION}"
if [ ! -z ${SCP_PROXY_HOST} ];
then
scp -o ProxyCommand="ssh -t -A -l %r ${SCP_PROXY_HOST} -o \"StrictHostKeyChecking no\" -o \"BatchMode yes\" -W %h:%p" -o StrictHostKeyChecking=no -o LogLevel=DEBUG "${IPA_EXPORT_DIR}/jitsi-meet.ipa" "${IPA_DEPLOY_LOCATION}"
else
scp -o StrictHostKeyChecking=no -o LogLevel=DEBUG "${IPA_EXPORT_DIR}/jitsi-meet.ipa" "${IPA_DEPLOY_LOCATION}"
fi
rm -r /tmp/jitsi-meet/
rm -r $CERT_DIR
|
import React from 'react';
import styled from 'styled-components';
import { Link } from 'gatsby';
import kebabCase from 'lodash/kebabCase';
export const TagButton = styled(Link)`
cursor: pointer;
display: inline-block;
margin: 0 5px 10px 5px;
background-color: var(--color-category-button);
padding: var(--sizing-sm) var(--sizing-base);
border-radius: var(--border-radius-base);
font-size: 0.875rem;
font-weight: var(--font-weight-semi-bold);
:focus {
outline: none;
}
&:hover {
color: var(--color-white);
background-color: var(--color-blue);
}
&:focus-visible {
color: var(--color-white);
background-color: var(--color-blue);
}
`;
export default function Tags({ tags }) {
return (
<div>
{tags?.map((tag) => (
<TagButton key={`tag-${tag}`} to={`/tag/${kebabCase(tag)}`}>
{tag}
</TagButton>
))}
</div>
);
}
|
#!/bin/bash
RUNTIMES="provided go1.x nodejs4.3 nodejs6.10 nodejs8.10 nodejs10.x nodejs12.x python2.7 python3.6 python3.7 python3.8 ruby2.5 ruby2.7 java8 java11 dotnetcore2.0 dotnetcore2.1 dotnetcore3.1"
TOP_DIR="${PWD}/.."
for RUNTIME in $RUNTIMES; do
echo $RUNTIME
cd ${TOP_DIR}/${RUNTIME}/run
[ -x ./update_libs.sh ] && ./update_libs.sh
docker build --no-cache -t justinram11/lambda:${RUNTIME} .
done
for RUNTIME in $RUNTIMES; do
echo build-${RUNTIME}
cd ${TOP_DIR}/${RUNTIME}/build
docker build --no-cache -t justinram11/lambda:build-${RUNTIME} .
done
|
package block
import (
"context"
"time"
"gx/ipfs/QmNf3wujpV2Y7Lnj2hy2UrmuX8bhMDStRHbnSLh7Ypf36h/go-hamt-ipld"
"gx/ipfs/QmRu7tiRnFk9mMPpVECQTBQJqXtmG132jJxA1w9A7TtpBz/go-ipfs-blockstore"
"github.com/filecoin-project/go-filecoin/actor/builtin"
"github.com/filecoin-project/go-filecoin/address"
"github.com/filecoin-project/go-filecoin/chain"
"github.com/filecoin-project/go-filecoin/consensus"
"github.com/filecoin-project/go-filecoin/core"
"github.com/filecoin-project/go-filecoin/mining"
"github.com/filecoin-project/go-filecoin/porcelain"
"github.com/filecoin-project/go-filecoin/sampling"
"github.com/filecoin-project/go-filecoin/state"
"github.com/filecoin-project/go-filecoin/types"
)
// API provides an interface to protocols.
type API struct {
addNewBlockFunc func(context.Context, *types.Block) (err error)
blockStore blockstore.Blockstore
cborStore *hamt.CborIpldStore
chainReader chain.ReadStore
consensusProtocol consensus.Protocol
blockTime time.Duration
mineDelay time.Duration
msgPool *core.MessagePool
nodePorcelain *porcelain.API
powerTable consensus.PowerTableView
startMiningFunc func(context.Context) error
stopMiningFunc func(context.Context)
syncer chain.Syncer
ticketSigner consensus.TicketSigner
}
// New creates a new API instance with the provided deps
func New(
addNewBlockFunc func(context.Context, *types.Block) (err error),
bstore blockstore.Blockstore,
cborStore *hamt.CborIpldStore,
chainReader chain.ReadStore,
con consensus.Protocol,
blockTime, blockMineDelay time.Duration,
msgPool *core.MessagePool,
nodePorc *porcelain.API,
ptv consensus.PowerTableView,
startMiningFunc func(context.Context) error,
stopMiningfunc func(context.Context),
syncer chain.Syncer,
signer consensus.TicketSigner,
) API {
return API{
addNewBlockFunc: addNewBlockFunc,
blockStore: bstore,
cborStore: cborStore,
chainReader: chainReader,
consensusProtocol: con,
blockTime: blockTime,
mineDelay: blockMineDelay,
msgPool: msgPool,
nodePorcelain: nodePorc,
powerTable: ptv,
startMiningFunc: startMiningFunc,
stopMiningFunc: stopMiningfunc,
syncer: syncer,
ticketSigner: signer,
}
}
// MiningOnce mines a single block in the given context, and returns the new block.
func (a *API) MiningOnce(ctx context.Context) (*types.Block, error) {
getStateByKey := func(ctx context.Context, tsKey string) (state.Tree, error) {
tsas, err := a.chainReader.GetTipSetAndState(ctx, tsKey)
if err != nil {
return nil, err
}
return state.LoadStateTree(ctx, a.cborStore, tsas.TipSetStateRoot, builtin.Actors)
}
getState := func(ctx context.Context, ts types.TipSet) (state.Tree, error) {
return getStateByKey(ctx, ts.String())
}
getWeight := func(ctx context.Context, ts types.TipSet) (uint64, error) {
parent, err := ts.Parents()
if err != nil {
return uint64(0), err
}
if parent.Len() == 0 {
return a.consensusProtocol.Weight(ctx, ts, nil)
}
pSt, err := getStateByKey(ctx, parent.String())
if err != nil {
return uint64(0), err
}
return a.consensusProtocol.Weight(ctx, ts, pSt)
}
minerAddrIf, err := a.nodePorcelain.ConfigGet("mining.minerAddress")
if err != nil {
return nil, err
}
minerAddr := minerAddrIf.(address.Address)
minerOwnerAddr, err := a.nodePorcelain.MinerGetOwnerAddress(ctx, minerAddr)
if err != nil {
return nil, err
}
minerPubKey, err := a.nodePorcelain.MinerGetKey(ctx, minerAddr)
if err != nil {
return nil, err
}
getAncestors := func(ctx context.Context, ts types.TipSet, newBlockHeight *types.BlockHeight) ([]types.TipSet, error) {
return chain.GetRecentAncestors(ctx, ts, a.chainReader, newBlockHeight, consensus.AncestorRoundsNeeded, sampling.LookbackParameter)
}
worker := mining.NewDefaultWorker(
a.msgPool, getState, getWeight, getAncestors, consensus.NewDefaultProcessor(),
a.powerTable, a.blockStore, a.cborStore, minerAddr, minerOwnerAddr, minerPubKey,
a.ticketSigner, a.blockTime)
ts := a.chainReader.Head()
res, err := mining.MineOnce(ctx, worker, a.mineDelay, ts)
if err != nil {
return nil, err
}
if res.Err != nil {
return nil, res.Err
}
if err := a.addNewBlockFunc(ctx, res.NewBlock); err != nil {
return nil, err
}
return res.NewBlock, nil
}
// MiningStart calls the node's StartMining function
func (a *API) MiningStart(ctx context.Context) error {
return a.startMiningFunc(ctx)
}
// MiningStop calls the node's StopMining function
func (a *API) MiningStop(ctx context.Context) {
a.stopMiningFunc(ctx)
}
|
<filename>src/test/java/com/webapp/service/database/dao/impl/BuildingDaoImplTest.java
package com.webapp.service.database.dao.impl;
import com.webapp.model.Building;
import com.webapp.service.database.dao.BuildingDao;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.*;
/**
* @author <NAME>
*/
class BuildingDaoImplTest {
private BuildingDao buildingDao;
private Connection connection = mock(Connection.class);
private PreparedStatement preparedStatement = mock(PreparedStatement.class);
private SQLException test_sql_exception;
private ResultSet rs = mock(ResultSet.class);
private ByteArrayOutputStream outContent;
private ByteArrayOutputStream errContent;
private PrintStream originalOut;
private PrintStream originalErr;
class TestableBuildingDaoImpl extends BuildingDaoImpl {
@Override
public Connection getConnection() {
return connection;
}
}
@BeforeEach
void init() {
this.buildingDao = new TestableBuildingDaoImpl();
this.test_sql_exception = new SQLException();
this.outContent = new ByteArrayOutputStream();
this.errContent = new ByteArrayOutputStream();
this.originalOut = System.out;
this.originalErr = System.err;
System.setOut(new PrintStream(outContent));
System.setErr(new PrintStream(errContent));
}
@AfterEach
void tear_down() throws IOException {
System.setErr(this.originalErr);
System.setOut(this.originalOut);
this.outContent.close();
this.errContent.close();
}
@Test
void test_throws_sql_exception_when_list_building() throws SQLException {
when(connection.prepareStatement(anyString())).thenThrow(test_sql_exception);
this.buildingDao.listBuilding(5);
assertTrue(errContent.toString().contains("java.sql.SQLException"));
}
@Test
void test_when_list_building() throws SQLException {
int id = 305;
String name = "PengGe";
String description = "PGNB";
String price = "999";
Building building = new Building();
building.setId(id);
building.setName(name);
building.setDescription(description);
building.setPrice(price);
List<Building> buildingList = new ArrayList<>();
buildingList.add(building);
when(connection.prepareStatement(anyString())).thenReturn(preparedStatement);
when(preparedStatement.executeQuery()).thenReturn(rs);
when(rs.next()).thenReturn(true, false); /* First call returns true, second call returns false */
when(rs.getInt("id")).thenReturn(id);
when(rs.getString("name")).thenReturn(name);
when(rs.getString("description")).thenReturn(description);
when(rs.getString("price")).thenReturn(price);
assertEquals(buildingList, this.buildingDao.listBuilding(5));
verify(preparedStatement).setInt(1, 5);
}
@Test
void test_throws_sql_exception_query_Building_By_Id() throws SQLException {
when(connection.prepareStatement(anyString())).thenThrow(test_sql_exception);
this.buildingDao.queryBuildingById(5);
assertTrue(errContent.toString().contains("java.sql.SQLException"));
}
@Test
void test_query_Building_By_Id() throws SQLException {
int id = 305;
String name = "PengGe";
String description = "PGNB";
String price = "999";
Building building = new Building();
building.setId(id);
building.setName(name);
building.setDescription(description);
building.setPrice(price);
when(connection.prepareStatement(anyString())).thenReturn(preparedStatement);
when(preparedStatement.executeQuery()).thenReturn(rs);
when(rs.next()).thenReturn(true, false); /* First call returns true, second call returns false */
when(rs.getInt("id")).thenReturn(id);
when(rs.getString("name")).thenReturn(name);
when(rs.getString("description")).thenReturn(description);
when(rs.getString("price")).thenReturn(price);
assertEquals(building, this.buildingDao.queryBuildingById(5));
verify(preparedStatement).setInt(1, 5);
}
@Test
void test_throws_sql_exception_when_add_Building() throws SQLException {
when(connection.prepareStatement(anyString())).thenThrow(test_sql_exception);
this.buildingDao.addBuilding(new Building());
assertTrue(errContent.toString().contains("java.sql.SQLException"));
}
@Test
void test_add_Building_When_Result_Is_True() throws SQLException {
int id = 305;
String name = "PengGe";
String description = "PGNB";
String price = "999";
Building building = new Building();
building.setId(id);
building.setName(name);
building.setDescription(description);
building.setPrice(price);
when(connection.prepareStatement(anyString())).thenReturn(preparedStatement);
when(preparedStatement.executeUpdate()).thenReturn(1);
boolean result = this.buildingDao.addBuilding(building);
assertEquals(result, true);
verify(preparedStatement).setString(1, name);
verify(preparedStatement).setString(2, description);
verify(preparedStatement).setString(3, price);
}
@Test
void test_add_Building_When_Result_Is_False() throws SQLException {
int id = 305;
String name = "PengGe";
String description = "PGNB";
String price = "999";
Building building = new Building();
building.setId(id);
building.setName(name);
building.setDescription(description);
building.setPrice(price);
when(connection.prepareStatement(anyString())).thenReturn(preparedStatement);
when(preparedStatement.executeUpdate()).thenReturn(0);
boolean result = this.buildingDao.addBuilding(building);
assertEquals(result, false);
verify(preparedStatement).setString(1, name);
verify(preparedStatement).setString(2, description);
verify(preparedStatement).setString(3, price);
}
@Test
void test_delete_Building_When_Result_Is_True() throws SQLException {
when(connection.prepareStatement(anyString())).thenReturn(preparedStatement);
when(preparedStatement.executeUpdate()).thenReturn(1);
boolean result = this.buildingDao.deleteBuilding(5);
assertEquals(result, true);
verify(preparedStatement).setInt(1, 5);
}
@Test
void test_delete_Building_When_Result_Is_False() throws SQLException {
when(connection.prepareStatement(anyString())).thenReturn(preparedStatement);
when(preparedStatement.executeUpdate()).thenReturn(0);
boolean result = this.buildingDao.deleteBuilding(5);
assertEquals(result, false);
verify(preparedStatement).setInt(1, 5);
}
@Test
void test_throws_sql_exception_when_deleteBuilding() throws SQLException {
when(connection.prepareStatement(anyString())).thenThrow(test_sql_exception);
this.buildingDao.deleteBuilding(5);
assertTrue(errContent.toString().contains("java.sql.SQLException"));
}
@Test
void test_throws_sql_exception_when_updateBuilding() throws SQLException {
when(connection.prepareStatement(anyString())).thenThrow(test_sql_exception);
this.buildingDao.updateBuilding(new Building());
assertTrue(errContent.toString().contains("java.sql.SQLException"));
}
@Test
void test_update_Building_When_Result_Is_True() throws SQLException {
int id = 305;
String name = "PengGe";
String description = "PGNB";
String price = "999";
Building building = new Building();
building.setId(id);
building.setName(name);
building.setDescription(description);
building.setPrice(price);
when(connection.prepareStatement(anyString())).thenReturn(preparedStatement);
when(preparedStatement.executeUpdate()).thenReturn(1);
boolean result = this.buildingDao.updateBuilding(building);
assertEquals(result, true);
verify(preparedStatement).setString(1, name);
verify(preparedStatement).setString(2, description);
verify(preparedStatement).setString(3, price);
verify(preparedStatement).setInt(4, id);
}
@Test
void test_update_Building_When_Result_Is_False() throws SQLException {
int id = 305;
String name = "PengGe";
String description = "PGNB";
String price = "999";
Building building = new Building();
building.setId(id);
building.setName(name);
building.setDescription(description);
building.setPrice(price);
when(connection.prepareStatement(anyString())).thenReturn(preparedStatement);
when(preparedStatement.executeUpdate()).thenReturn(0);
boolean result = this.buildingDao.updateBuilding(building);
assertEquals(result, false);
verify(preparedStatement).setString(1, name);
verify(preparedStatement).setString(2, description);
verify(preparedStatement).setString(3, price);
verify(preparedStatement).setInt(4, id);
}
} |
from tkinter import (
Tk,
Frame,
Label,
Button,
PhotoImage,
StringVar,
LEFT
)
from tkinter.ttk import (
Separator,
Treeview,
Style
)
from models.user_interface.log_into import LogInto
from tkinter.messagebox import showinfo
from models.effects.animations import Animations
from models.sqlite.validator import Validator
from models.user_interface.add_new_profile import NewProfile
from models.sqlite.database import DataBase
class WelcomeScreen:
"""Classe resposável por exibir a janela inicial do programa."""
def __init__(self) -> None:
"""Construtor da classe. Responsável por atribuir as características da janela principal (__main_window)."""
self.__main_window: Tk = Tk()
self.__main_window.title("SinglePass")
self.__background_color = StringVar(value="#D7D7D7")
self.__main_window.configure(background=self.__background_color.get())
self.__main_window.iconbitmap("icons/key_icon_2.ico")
self.__main_window.resizable(width=0, height=0)
self.__main_window.geometry(newGeometry="%dx%d+%d+%d" % (
self.__main_window.winfo_screenwidth() / 2,
self.__main_window.winfo_screenheight() / 2,
self.__main_window.winfo_screenwidth() / 2 - (self.__main_window.winfo_screenwidth() / 2) / 2,
self.__main_window.winfo_screenheight() / 2 - (self.__main_window.winfo_screenheight() / 2 + 100) / 2
))
# Eixo X dos botões presentes no segundo frame (bottom_frame):
self.__X_axis = 100
# Variáveis dinâmicas:
self.__automatic_writing: StringVar = StringVar()
# Fotos que serão utilizadas no programa:
self.__one_key: PhotoImage = PhotoImage(file="images/keys_02_72px.png")
self.__new_profile: PhotoImage = PhotoImage(file="images/new_profile_02_32px.png")
self.__remove_profile: PhotoImage = PhotoImage(file="images/remove_profile_02_32px.png")
self.__exit: PhotoImage = PhotoImage(file="images/exit_01.png")
self.__to_entry = PhotoImage(file="images/entry_01_32px.png")
# Chamando todas as funções responsáveis por desenhar os ítens necessários na tela:
self.__make_frames()
self.__make_separators()
self.__make_labels()
self.__make_treeview()
self.__make_buttons()
# Chamando as animações da tela:
self.__effects()
def __make_frames(self) -> None:
"""Método responsável por desenhar as molduras (frames)."""
self.__top_frame: Frame = Frame(master=self.__main_window)
self.__top_frame.configure(relief="sunken", borderwidth=1, width=663, height=170) # flat, groove, raised, ridge, solid, or sunken
self.__top_frame.grid(row=0, column=0, padx=10, pady=10)
self.__bottom_frame: Frame = Frame(master=self.__main_window)
self.__bottom_frame.configure(relief="sunken", borderwidth=1, width=663, height=160)
self.__bottom_frame.grid(row=1, column=0, padx=10, pady=5)
def __make_labels(self) -> None:
"""Método responsável por desenhar os rótulos (labels)."""
Label(master=self.__top_frame, image=self.__one_key).place(x=280, y=20)
Label(master=self.__top_frame, text="Seja bem-vindo ao SinglePass", font=("Corbel Bold", 15, "underline"), foreground="#DAA520").place(x=190, y=100)
if not Validator().check_username():
self.__animated_label: Label = Label(master=self.__top_frame)
self.__animated_label.configure(font=("Ink Free", 15), textvariable=self.__automatic_writing, foreground="#000080")
self.__animated_label.place(x=6, y=130)
else:
self.__animated_label: Label = Label(master=self.__top_frame)
self.__animated_label.configure(font=("Ink Free", 15), textvariable=self.__automatic_writing, foreground="#000080")
self.__animated_label.place(x=25, y=130)
self.__created_by: Label = Label(master=self.__main_window, text="Criado por: <NAME>", foreground="#898989", font=("Segoe UI Black", 10), background=self.__background_color.get())
self.__created_by.place(x=7, y=356)
self.__github: Label = Label(master=self.__main_window, text="Github", foreground="#898989", font=("Segoe UI Black", 10), background=self.__background_color.get())
self.__github.bind("<Enter>", lambda event: Animations.change_cursor(self.__github, 1))
self.__github.bind("<Leave>", lambda event: Animations.change_cursor(self.__github, 0))
self.__github.bind("<Button-1>", Animations.redirect)
self.__github.place(x=625, y=356)
def __make_buttons(self) -> None:
"""Método responsável por desenhar os botões (buttons)."""
Button(master=self.__bottom_frame, image=self.__new_profile, text="Criar Perfil", font=("Ink Free", 15, "bold"), compound=LEFT, width=170, cursor="hand1", command=self.__add_new_profile).place(x=self.__X_axis - 35, y=5)
if Validator().check_username():
Button(master=self.__bottom_frame, image=self.__remove_profile, text="Remover Perfil", font=("Ink Free", 15, "bold"), compound=LEFT, width=190, cursor="hand1", command=self.__delete_profile).place(x=self.__X_axis - 45, y=55)
else:
Button(master=self.__bottom_frame, image=self.__remove_profile, text="Remover Perfil", font=("Ink Free", 15, "bold"), compound=LEFT, width=190, state="disabled", cursor="hand1").place(x=self.__X_axis - 45, y=55)
Button(master=self.__bottom_frame)
if Validator().check_username():
Button(master=self.__bottom_frame, text="Entrar", image=self.__to_entry, compound=LEFT, font=("Ink Free", 14, "bold"), cursor="hand1", command=self.__login_into).place(x=45, y=106)
else:
Button(master=self.__bottom_frame, text="Entrar", image=self.__to_entry, compound=LEFT, font=("Ink Free", 14, "bold"), cursor="hand1", state="disabled").place(x=45, y=106)
Button(master=self.__bottom_frame, image=self.__exit, text="Sair", font=("Ink Free", 15, "bold"), compound=LEFT, width=100, command=lambda: exit(0), cursor="hand1").place(x=self.__X_axis + 55, y=105)
def __make_separators(self) -> None:
"""Método responsável por desenhar os separadores."""
Separator(self.__bottom_frame, orient="vertical").place(x=320, y=0, height=160)
def __make_treeview(self) -> None:
"""Método responsável por desenhar a árvore de informações."""
style: Style = Style()
style.theme_use("clam")
style.configure('Treeview.Heading', font=("Courier New", 13, "bold"), foreground="#363636")
style.configure('Treeview', font=("Courier New", 14))
self.__registered_users: Treeview = Treeview(master=self.__bottom_frame, columns=(1,), show="headings")
self.__registered_users.heading("#1", text="Perfis Registrados")
self.__registered_users.column("#1", anchor="center")
for record in DataBase().all_records():
self.__registered_users.insert("", "end", values=[record])
self.__registered_users.place(x=325, y=0, width=335, height=158)
def __effects(self) -> None:
"""Método responsável por chamar todos os efeitos que ocorrem na tela inicial"""
if not Validator().check_username():
text = "Antes de armazenar as suas senhas, crie um perfil para ter mais segurança"
Animations(self.__main_window, self.__automatic_writing, self.__github, text).automatic_writing()
else:
text = "O gerenciador de senhas feito para quem quer segurança e simplicidade"
Animations(self.__main_window, self.__automatic_writing, self.__github, text).automatic_writing()
def __add_new_profile(self) -> None:
"""Método responsável por chamar a tela de novo usuário."""
self.__main_window.destroy()
NewProfile().run()
def __delete_profile(self) -> None:
"""Método responsável por deletar um usuário da tabela 'users' e da árvore de informações da tela inicial."""
try:
DataBase().delete_records(self.__registered_users.item(self.__registered_users.selection(), "values")[0].lower())
self.__registered_users.delete(self.__registered_users.selection())
self.__make_buttons()
except IndexError:
showinfo("Aviso", "Antes de remover um perfil, você deve selecioná-lo no campo 'perfis registrados'")
def __login_into(self) -> None:
"""Método responsável por chamar a tela de login para entrar num perfil específico."""
try:
# noinspection PyStatementEffect
self.__registered_users.item(self.__registered_users.selection(), "values")[0]
LogInto(selected_user=self.__registered_users.item(self.__registered_users.selection(), "values")[0], root=self.__main_window).execute()
except IndexError:
showinfo("Aviso", "Antes de entrar em um perfil, você deve selecioná-lo no campo 'perfis registrados'")
def run(self) -> None:
"""Método responsável por iniciar a execução da janela principal."""
self.__main_window.mainloop()
|
<filename>src/Domotica/Constants.hpp
#pragma once
#include <ESP8266WiFi.h>
namespace DomoticaInternals {
const char DOMOTICA_VERSION[] = "1.0.0";
const char DOMOTICA_ESP8266_VERSION[] = "1.0.0";
const uint8_t WIFI_RECONNECT_MAX_BACKOFF = 6;
const char DEFAULT_WIFI_SSID[] = "Domotica";
const char DEFAULT_WIFI_PASSWORD[] = "<PASSWORD>";
const char DEFAULT_HTTP_HOST[] = "sksmarthome.local";
const char DEFAULT_HTTP_PATH[] = "/esp/config.json";
const IPAddress ACCESS_POINT_IP(192, 168, 123, 1);
const char DEFAULT_MQTT_HOST[] = "sksmarthome.local";
const uint16_t DEFAULT_MQTT_PORT = 1883;
const char DEFAULT_MQTT_BASE_TOPIC[] = "domotica/";
const uint8_t DEFAULT_RESET_PIN = 0; // == D3 on nodeMCU
const uint8_t DEFAULT_RESET_STATE = LOW;
const uint16_t DEFAULT_RESET_TIME = 5 * 1000;
const char DEFAULT_BRAND[] = "Domotica";
const uint16_t CONFIG_SCAN_INTERVAL = 20 * 1000;
const uint32_t STATS_SEND_INTERVAL = 1 * 60 * 1000;
const uint16_t MQTT_RECONNECT_INITIAL_INTERVAL = 1000;
const uint8_t MQTT_RECONNECT_MAX_BACKOFF = 6;
const float LED_WIFI_DELAY = 1;
const float LED_MQTT_DELAY = 0.2;
const float LED_DEBUG_PIN = LED_BUILTIN;
const char CONFIG_UI_BUNDLE_PATH[] = "/domotica/ui_bundle.gz";
// const char CONFIG_NEXT_BOOT_MODE_FILE_PATH[] = "/domotica/NEXTMODE";
const char CONFIG_FILE_PATH[] = "/domotica/config.json";
}
|
<reponame>phetsims/dot
// Copyright 2013-2021, University of Colorado Boulder
/**
* Utility functions for Dot, placed into the dot.X namespace.
*
* @author <NAME> <<EMAIL>>
*/
import Vector2 from './Vector2.js';
import dot from './dot.js';
// constants
const EPSILON = Number.MIN_VALUE;
const TWO_PI = 2 * Math.PI;
// "static" variables used in boxMullerTransform
let generate;
let z0;
let z1;
const Utils = {
/**
* Returns the original value if it is inclusively within the [max,min] range. If it's below the range, min is
* returned, and if it's above the range, max is returned.
* @public
*
* @param {number} value
* @param {number} min
* @param {number} max
* @returns {number}
*/
clamp( value, min, max ) {
if ( value < min ) {
return min;
}
else if ( value > max ) {
return max;
}
else {
return value;
}
},
/**
* Returns a number in the range $n\in[\mathrm{min},\mathrm{max})$ with the same equivalence class as the input
* value mod (max-min), i.e. for a value $m$, $m\equiv n\ (\mathrm{mod}\ \mathrm{max}-\mathrm{min})$.
* @public
*
* The 'down' indicates that if the value is equal to min or max, the max is returned.
*
* @param {number} value
* @param {number} min
* @param {number} max
* @returns {number}
*/
moduloBetweenDown( value, min, max ) {
assert && assert( max > min, 'max > min required for moduloBetween' );
const divisor = max - min;
// get a partial result of value-min between [0,divisor)
let partial = ( value - min ) % divisor;
if ( partial < 0 ) {
// since if value-min < 0, the remainder will give us a negative number
partial += divisor;
}
return partial + min; // add back in the minimum value
},
/**
* Returns a number in the range $n\in(\mathrm{min},\mathrm{max}]$ with the same equivalence class as the input
* value mod (max-min), i.e. for a value $m$, $m\equiv n\ (\mathrm{mod}\ \mathrm{max}-\mathrm{min})$.
* @public
*
* The 'up' indicates that if the value is equal to min or max, the min is returned.
*
* @param {number} value
* @param {number} min
* @param {number} max
* @returns {number}
*/
moduloBetweenUp( value, min, max ) {
return -Utils.moduloBetweenDown( -value, -max, -min );
},
/**
* Returns an array of integers from A to B (inclusive), e.g. rangeInclusive( 4, 7 ) maps to [ 4, 5, 6, 7 ].
* @public
*
* @param {number} a
* @param {number} b
* @returns {Array.<number>}
*/
rangeInclusive( a, b ) {
if ( b < a ) {
return [];
}
const result = new Array( b - a + 1 );
for ( let i = a; i <= b; i++ ) {
result[ i - a ] = i;
}
return result;
},
/**
* Returns an array of integers from A to B (exclusive), e.g. rangeExclusive( 4, 7 ) maps to [ 5, 6 ].
* @public
*
* @param {number} a
* @param {number} b
* @returns {Array.<number>}
*/
rangeExclusive( a, b ) {
return Utils.rangeInclusive( a + 1, b - 1 );
},
/**
* Converts degrees to radians.
* @public
*
* @param {number} degrees
* @returns {number}
*/
toRadians( degrees ) {
return Math.PI * degrees / 180;
},
/**
* Converts radians to degrees.
* @public
*
* @param {number} radians
* @returns {number}
*/
toDegrees( radians ) {
return 180 * radians / Math.PI;
},
/**
* Workaround for broken modulo operator.
* E.g. on iOS9, 1e10 % 1e10 -> 2.65249474e-315
* See https://github.com/phetsims/dot/issues/75
* @param {number} a
* @param {number} b
* @returns {number}
*/
mod( a, b ) {
if ( a / b % 1 === 0 ) {
return 0; // a is a multiple of b
}
else {
return a % b;
}
},
/**
* Greatest Common Divisor, using https://en.wikipedia.org/wiki/Euclidean_algorithm. See
* https://en.wikipedia.org/wiki/Greatest_common_divisor
* @public
*
* @param {number} a
* @param {number} b
* @returns {number}
*/
gcd( a, b ) {
return Math.abs( b === 0 ? a : this.gcd( b, Utils.mod( a, b ) ) );
},
/**
* Least Common Multiple, https://en.wikipedia.org/wiki/Least_common_multiple
* @public
*
* @param {number} a
* @param {number} b
* @returns {number} lcm, an integer
*/
lcm( a, b ) {
return Utils.roundSymmetric( Math.abs( a * b ) / Utils.gcd( a, b ) );
},
/**
* Intersection point between the lines defined by the line segments p1-2 and p3-p4. If the
* lines are not properly defined, null is returned. If there are no intersections or infinitely many,
* e.g. parallel lines, null is returned.
* @public
*
* @param {Vector2} p1
* @param {Vector2} p2
* @param {Vector2} p3
* @param {Vector2} p4
* @returns {Vector2|null}
*/
lineLineIntersection( p1, p2, p3, p4 ) {
const epsilon = 1e-10;
// If the endpoints are the same, they don't properly define a line
if ( p1.equals( p2 ) || p3.equals( p4 ) ) {
return null;
}
// Taken from an answer in
// http://stackoverflow.com/questions/385305/efficient-maths-algorithm-to-calculate-intersections
const x12 = p1.x - p2.x;
const x34 = p3.x - p4.x;
const y12 = p1.y - p2.y;
const y34 = p3.y - p4.y;
const denom = x12 * y34 - y12 * x34;
// If the denominator is 0, lines are parallel or coincident
if ( Math.abs( denom ) < epsilon ) {
return null;
}
// define intersection using determinants, see https://en.wikipedia.org/wiki/Line%E2%80%93line_intersection
const a = p1.x * p2.y - p1.y * p2.x;
const b = p3.x * p4.y - p3.y * p4.x;
return new Vector2(
( a * x34 - x12 * b ) / denom,
( a * y34 - y12 * b ) / denom
);
},
/**
* Returns the center of a circle that will lie on 3 points (if it exists), otherwise null (if collinear).
* @public
*
* @param {Vector2} p1
* @param {Vector2} p2
* @param {Vector2} p3
* @returns {Vector2|null}
*/
circleCenterFromPoints( p1, p2, p3 ) {
// TODO: Can we make scratch vectors here, avoiding the circular reference?
// midpoints between p1-p2 and p2-p3
const p12 = new Vector2( ( p1.x + p2.x ) / 2, ( p1.y + p2.y ) / 2 );
const p23 = new Vector2( ( p2.x + p3.x ) / 2, ( p2.y + p3.y ) / 2 );
// perpendicular points from the minpoints
const p12x = new Vector2( p12.x + ( p2.y - p1.y ), p12.y - ( p2.x - p1.x ) );
const p23x = new Vector2( p23.x + ( p3.y - p2.y ), p23.y - ( p3.x - p2.x ) );
return Utils.lineLineIntersection( p12, p12x, p23, p23x );
},
/**
* Returns whether the point p is inside the circle defined by the other three points (p1, p2, p3).
* @public
*
* NOTE: p1,p2,p3 should be specified in a counterclockwise (mathematically) order, and thus should have a positive
* signed area.
*
* See notes in https://en.wikipedia.org/wiki/Delaunay_triangulation.
*
* @param {Vector2} p1
* @param {Vector2} p2
* @param {Vector2} p3
* @param {Vector2} p
* @returns {boolean}
*/
pointInCircleFromPoints( p1, p2, p3, p ) {
assert && assert( Utils.triangleAreaSigned( p1, p2, p3 ) > 0,
'Defined points should be in a counterclockwise order' );
const m00 = p1.x - p.x;
const m01 = p1.y - p.y;
const m02 = ( p1.x - p.x ) * ( p1.x - p.x ) + ( p1.y - p.y ) * ( p1.y - p.y );
const m10 = p2.x - p.x;
const m11 = p2.y - p.y;
const m12 = ( p2.x - p.x ) * ( p2.x - p.x ) + ( p2.y - p.y ) * ( p2.y - p.y );
const m20 = p3.x - p.x;
const m21 = p3.y - p.y;
const m22 = ( p3.x - p.x ) * ( p3.x - p.x ) + ( p3.y - p.y ) * ( p3.y - p.y );
const determinant = m00 * m11 * m22 + m01 * m12 * m20 + m02 * m10 * m21 - m02 * m11 * m20 - m01 * m10 * m22 - m00 * m12 * m21;
return determinant > 0;
},
/**
* Ray-sphere intersection, returning information about the closest intersection. Assumes the sphere is centered
* at the origin (for ease of computation), transform the ray to compensate if needed.
* @public
*
* If there is no intersection, null is returned. Otherwise an object will be returned like:
* <pre class="brush: js">
* {
* distance: {number}, // distance from the ray position to the intersection
* hitPoint: {Vector3}, // location of the intersection
* normal: {Vector3}, // the normal of the sphere's surface at the intersection
* fromOutside: {boolean}, // whether the ray intersected the sphere from outside the sphere first
* }
* </pre>
*
* @param {number} radius
* @param {Ray3} ray
* @param {number} epsilon
* @returns {Object}
*/
// assumes a sphere with the specified radius, centered at the origin
sphereRayIntersection( radius, ray, epsilon ) {
epsilon = epsilon === undefined ? 1e-5 : epsilon;
// center is the origin for now, but leaving in computations so that we can change that in the future. optimize away if needed
const center = new dot.Vector3( 0, 0, 0 );
const rayDir = ray.direction;
const pos = ray.position;
const centerToRay = pos.minus( center );
// basically, we can use the quadratic equation to solve for both possible hit points (both +- roots are the hit points)
const tmp = rayDir.dot( centerToRay );
const centerToRayDistSq = centerToRay.magnitudeSquared;
const det = 4 * tmp * tmp - 4 * ( centerToRayDistSq - radius * radius );
if ( det < epsilon ) {
// ray misses sphere entirely
return null;
}
const base = rayDir.dot( center ) - rayDir.dot( pos );
const sqt = Math.sqrt( det ) / 2;
// the "first" entry point distance into the sphere. if we are inside the sphere, it is behind us
const ta = base - sqt;
// the "second" entry point distance
const tb = base + sqt;
if ( tb < epsilon ) {
// sphere is behind ray, so don't return an intersection
return null;
}
const hitPositionB = ray.pointAtDistance( tb );
const normalB = hitPositionB.minus( center ).normalized();
if ( ta < epsilon ) {
// we are inside the sphere
// in => out
return {
distance: tb,
hitPoint: hitPositionB,
normal: normalB.negated(),
fromOutside: false
};
}
else {
// two possible hits
const hitPositionA = ray.pointAtDistance( ta );
const normalA = hitPositionA.minus( center ).normalized();
// close hit, we have out => in
return {
distance: ta,
hitPoint: hitPositionA,
normal: normalA,
fromOutside: true
};
}
},
/**
* Returns an array of the real roots of the quadratic equation $ax + b=0$, or null if every value is a solution.
* @public
*
* @param {number} a
* @param {number} b
* @returns {Array.<number>|null} - The real roots of the equation, or null if all values are roots. If the root has
* a multiplicity larger than 1, it will be repeated that many times.
*/
solveLinearRootsReal( a, b ) {
if ( a === 0 ) {
if ( b === 0 ) {
return null;
}
else {
return [];
}
}
else {
return [ -b / a ];
}
},
/**
* Returns an array of the real roots of the quadratic equation $ax^2 + bx + c=0$, or null if every value is a
* solution. If a is nonzero, there should be between 0 and 2 (inclusive) values returned.
* @public
*
* @param {number} a
* @param {number} b
* @param {number} c
* @returns {Array.<number>|null} - The real roots of the equation, or null if all values are roots. If the root has
* a multiplicity larger than 1, it will be repeated that many times.
*/
solveQuadraticRootsReal( a, b, c ) {
// Check for a degenerate case where we don't have a quadratic, or if the order of magnitude is such where the
// linear solution would be expected
const epsilon = 1E7;
if ( a === 0 || Math.abs( b / a ) > epsilon || Math.abs( c / a ) > epsilon ) {
return Utils.solveLinearRootsReal( b, c );
}
const discriminant = b * b - 4 * a * c;
if ( discriminant < 0 ) {
return [];
}
const sqrt = Math.sqrt( discriminant );
// TODO: how to handle if discriminant is 0? give unique root or double it?
// TODO: probably just use Complex for the future
return [
( -b - sqrt ) / ( 2 * a ),
( -b + sqrt ) / ( 2 * a )
];
},
/**
* Returns an array of the real roots of the quadratic equation $ax^3 + bx^2 + cx + d=0$, or null if every value is a
* solution. If a is nonzero, there should be between 0 and 3 (inclusive) values returned.
* @public
*
* @param {number} a
* @param {number} b
* @param {number} c
* @param {number} d
* @param {number} [discriminantThreshold] - for determining whether we have a single real root
* @returns {Array.<number>|null} - The real roots of the equation, or null if all values are roots. If the root has
* a multiplicity larger than 1, it will be repeated that many times.
*/
solveCubicRootsReal( a, b, c, d, discriminantThreshold = 1e-7 ) {
let roots;
// TODO: a Complex type!
// Check for a degenerate case where we don't have a cubic
if ( a === 0 ) {
roots = Utils.solveQuadraticRootsReal( b, c, d );
}
else {
//We need to test whether a is several orders of magnitude less than b, c, d
const epsilon = 1E7;
if ( a === 0 || Math.abs( b / a ) > epsilon || Math.abs( c / a ) > epsilon || Math.abs( d / a ) > epsilon ) {
roots = Utils.solveQuadraticRootsReal( b, c, d );
}
else {
if ( d === 0 || Math.abs( a / d ) > epsilon || Math.abs( b / d ) > epsilon || Math.abs( c / d ) > epsilon ) {
roots = [ 0 ].concat( Utils.solveQuadraticRootsReal( a, b, c ) );
}
else {
b /= a;
c /= a;
d /= a;
const q = ( 3.0 * c - ( b * b ) ) / 9;
const r = ( -( 27 * d ) + b * ( 9 * c - 2 * ( b * b ) ) ) / 54;
const discriminant = q * q * q + r * r;
const b3 = b / 3;
if ( discriminant > discriminantThreshold ) {
// a single real root
const dsqrt = Math.sqrt( discriminant );
roots = [ Utils.cubeRoot( r + dsqrt ) + Utils.cubeRoot( r - dsqrt ) - b3 ];
}
else if ( discriminant > -discriminantThreshold ) { // would truly be discriminant==0, but floating-point error
// contains a double root (but with three roots)
const rsqrt = Utils.cubeRoot( r );
const doubleRoot = -b3 - rsqrt;
roots = [ -b3 + 2 * rsqrt, doubleRoot, doubleRoot ];
}
else {
// all unique (three roots)
let qX = -q * q * q;
qX = Math.acos( r / Math.sqrt( qX ) );
const rr = 2 * Math.sqrt( -q );
roots = [
-b3 + rr * Math.cos( qX / 3 ),
-b3 + rr * Math.cos( ( qX + 2 * Math.PI ) / 3 ),
-b3 + rr * Math.cos( ( qX + 4 * Math.PI ) / 3 )
];
}
}
}
}
assert && roots && roots.forEach( root => assert( isFinite( root ), 'All returned solveCubicRootsReal roots should be finite' ) );
return roots;
},
/**
* Returns the unique real cube root of x, such that $y^3=x$.
* @public
*
* @param {number} x
* @returns {number}
*/
cubeRoot( x ) {
return x >= 0 ? Math.pow( x, 1 / 3 ) : -Math.pow( -x, 1 / 3 );
},
/**
* Defines and evaluates a linear mapping. The mapping is defined so that $f(a_1)=b_1$ and $f(a_2)=b_2$, and other
* values are interpolated along the linear equation. The returned value is $f(a_3)$.
* @public
*
* @param {number} a1
* @param {number} a2
* @param {number} b1
* @param {number} b2
* @param {number} a3
* @returns {number}
*/
linear( a1, a2, b1, b2, a3 ) {
assert && assert( typeof a3 === 'number', 'linear requires a number to evaluate' );
return ( b2 - b1 ) / ( a2 - a1 ) * ( a3 - a1 ) + b1;
},
/**
* Rounds using "Round half away from zero" algorithm. See dot#35.
* @public
*
* JavaScript's Math.round is not symmetric for positive and negative numbers, it uses IEEE 754 "Round half up".
* See https://en.wikipedia.org/wiki/Rounding#Round_half_up.
* For sims, we want to treat positive and negative values symmetrically, which is IEEE 754 "Round half away from zero",
* See https://en.wikipedia.org/wiki/Rounding#Round_half_away_from_zero
*
* Note that -0 is rounded to 0, since we typically do not want to display -0 in sims.
*
* @param {number} value `
* @returns {number}
*/
roundSymmetric( value ) {
return ( ( value < 0 ) ? -1 : 1 ) * Math.round( Math.abs( value ) ); // eslint-disable-line bad-sim-text
},
/**
* A predictable implementation of toFixed.
* @public
*
* JavaScript's toFixed is notoriously buggy, behavior differs depending on browser,
* because the spec doesn't specify whether to round or floor.
* Rounding is symmetric for positive and negative values, see Utils.roundSymmetric.
*
* @param {number} value
* @param {number} decimalPlaces
* @returns {string}
*/
toFixed( value, decimalPlaces ) {
const multiplier = Math.pow( 10, decimalPlaces );
const newValue = Utils.roundSymmetric( value * multiplier ) / multiplier;
return newValue.toFixed( decimalPlaces ); // eslint-disable-line bad-sim-text
},
/**
* A predictable implementation of toFixed, where the result is returned as a number instead of a string.
* @public
*
* JavaScript's toFixed is notoriously buggy, behavior differs depending on browser,
* because the spec doesn't specify whether to round or floor.
* Rounding is symmetric for positive and negative values, see Utils.roundSymmetric.
*
* @param {number} value
* @param {number} decimalPlaces
* @returns {number}
*/
toFixedNumber( value, decimalPlaces ) {
return parseFloat( Utils.toFixed( value, decimalPlaces ) );
},
/**
* Returns true if two numbers are within epsilon of each other.
*
* @param {number} a
* @param {number} b
* @param {number} epsilon
* @returns {boolean}
*/
equalsEpsilon( a, b, epsilon ) {
return Math.abs( a - b ) <= epsilon;
},
/**
* Computes the intersection of the two line segments $(x_1,y_1)(x_2,y_2)$ and $(x_3,y_3)(x_4,y_4)$. If there is no
* intersection, null is returned.
* @public
*
* @param {number} x1
* @param {number} y1
* @param {number} x2
* @param {number} y2
* @param {number} x3
* @param {number} y3
* @param {number} x4
* @param {number} y4
* @returns {Vector2|null}
*/
lineSegmentIntersection( x1, y1, x2, y2, x3, y3, x4, y4 ) {
// @private
// Determines counterclockwiseness. Positive if counterclockwise, negative if clockwise, zero if straight line
// Point1(a,b), Point2(c,d), Point3(e,f)
// See http://jeffe.cs.illinois.edu/teaching/373/notes/x05-convexhull.pdf
// @returns {number}
const ccw = ( a, b, c, d, e, f ) => ( f - b ) * ( c - a ) - ( d - b ) * ( e - a );
// Check if intersection doesn't exist. See http://jeffe.cs.illinois.edu/teaching/373/notes/x06-sweepline.pdf
// If point1 and point2 are on opposite sides of line 3 4, exactly one of the two triples 1, 3, 4 and 2, 3, 4
// is in counterclockwise order.
if ( ccw( x1, y1, x3, y3, x4, y4 ) * ccw( x2, y2, x3, y3, x4, y4 ) > 0 ||
ccw( x3, y3, x1, y1, x2, y2 ) * ccw( x4, y4, x1, y1, x2, y2 ) > 0
) {
return null;
}
const denom = ( x1 - x2 ) * ( y3 - y4 ) - ( y1 - y2 ) * ( x3 - x4 );
// If denominator is 0, the lines are parallel or coincident
if ( Math.abs( denom ) < 1e-10 ) {
return null;
}
// Check if there is an exact endpoint overlap (and then return an exact answer).
if ( ( x1 === x3 && y1 === y3 ) || ( x1 === x4 && y1 === y4 ) ) {
return new Vector2( x1, y1 );
}
else if ( ( x2 === x3 && y2 === y3 ) || ( x2 === x4 && y2 === y4 ) ) {
return new Vector2( x2, y2 );
}
// Use determinants to calculate intersection, see https://en.wikipedia.org/wiki/Line%E2%80%93line_intersection
const intersectionX = ( ( x1 * y2 - y1 * x2 ) * ( x3 - x4 ) - ( x1 - x2 ) * ( x3 * y4 - y3 * x4 ) ) / denom;
const intersectionY = ( ( x1 * y2 - y1 * x2 ) * ( y3 - y4 ) - ( y1 - y2 ) * ( x3 * y4 - y3 * x4 ) ) / denom;
return new Vector2( intersectionX, intersectionY );
},
/**
* Squared distance from a point to a line segment squared.
* See http://stackoverflow.com/questions/849211/shortest-distance-between-a-point-and-a-line-segment
* @public
*
* @param {Vector2} point - The point
* @param {Vector2} a - Starting point of the line segment
* @param {Vector2} b - Ending point of the line segment
* @returns {number}
*/
distToSegmentSquared( point, a, b ) {
// the square of the distance between a and b,
const segmentSquaredLength = a.distanceSquared( b );
// if the segment length is zero, the a and b point are coincident. return the squared distance between a and point
if ( segmentSquaredLength === 0 ) { return point.distanceSquared( a ); }
// the t value parametrize the projection of the point onto the a b line
const t = ( ( point.x - a.x ) * ( b.x - a.x ) + ( point.y - a.y ) * ( b.y - a.y ) ) / segmentSquaredLength;
let distanceSquared;
if ( t < 0 ) {
// if t<0, the projection point is outside the ab line, beyond a
distanceSquared = point.distanceSquared( a );
}
else if ( t > 1 ) {
// if t>1, the projection past is outside the ab segment, beyond b,
distanceSquared = point.distanceSquared( b );
}
else {
// if 0<t<1, the projection point lies along the line joining a and b.
distanceSquared = point.distanceSquared( new Vector2( a.x + t * ( b.x - a.x ), a.y + t * ( b.y - a.y ) ) );
}
return distanceSquared;
},
/**
* distance from a point to a line segment squared.
* @public
*
* @param {Vector2} point - The point
* @param {Vector2} a - Starting point of the line segment
* @param {Vector2} b - Ending point of the line segment
* @returns {number}
*/
distToSegment( point, a, b ) {
return Math.sqrt( this.distToSegmentSquared( point, a, b ) );
},
/**
* Determines whether the three points are approximately collinear.
* @public
*
* @param {Vector2} a
* @param {Vector2} b
* @param {Vector2} c
* @param {number} epsilon
* @returns {boolean}
*/
arePointsCollinear( a, b, c, epsilon ) {
if ( epsilon === undefined ) {
epsilon = 0;
}
return Utils.triangleArea( a, b, c ) <= epsilon;
},
/**
* The area inside the triangle defined by the three vertices.
* @public
*
* @param {Vector2} a
* @param {Vector2} b
* @param {Vector2} c
* @returns {number}
*/
triangleArea( a, b, c ) {
return Math.abs( Utils.triangleAreaSigned( a, b, c ) );
},
/**
* The area inside the triangle defined by the three vertices, but with the sign determined by whether the vertices
* provided are clockwise or counter-clockwise.
* @public
*
* If the vertices are counterclockwise (in a right-handed coordinate system), then the signed area will be
* positive.
*
* @param {Vector2} a
* @param {Vector2} b
* @param {Vector2} c
* @returns {number}
*/
triangleAreaSigned( a, b, c ) {
return a.x * ( b.y - c.y ) + b.x * ( c.y - a.y ) + c.x * ( a.y - b.y );
},
/**
* Returns the centroid of the simple planar polygon using Green's Theorem P=-y/2, Q=x/2 (similar to how kite
* computes areas). See also https://en.wikipedia.org/wiki/Shoelace_formula.
* @public
*
* @param {Array.<Vector2>} vertices
* @returns {Vector2}
*/
centroidOfPolygon( vertices ) {
const centroid = new Vector2( 0, 0 );
let area = 0;
vertices.forEach( ( v0, i ) => {
const v1 = vertices[ ( i + 1 ) % vertices.length ];
const doubleShoelace = v0.x * v1.y - v1.x * v0.y;
area += doubleShoelace / 2;
// Compute the centroid of the flat intersection with https://en.wikipedia.org/wiki/Centroid#Of_a_polygon
centroid.addXY(
( v0.x + v1.x ) * doubleShoelace,
( v0.y + v1.y ) * doubleShoelace
);
} );
centroid.divideScalar( 6 * area );
return centroid;
},
/**
* Function that returns the hyperbolic cosine of a number
* @public
*
* @param {number} value
* @returns {number}
*/
cosh( value ) {
return ( Math.exp( value ) + Math.exp( -value ) ) / 2;
},
/**
* Function that returns the hyperbolic sine of a number
* @public
*
* @param {number} value
* @returns {number}
*/
sinh( value ) {
return ( Math.exp( value ) - Math.exp( -value ) ) / 2;
},
/**
* Log base-10, since it wasn't included in every supported browser.
* @public
*
* @param {number} val
* @returns {number}
*/
log10( val ) {
return Math.log( val ) / Math.LN10;
},
/**
* Generates a random Gaussian sample with the given mean and standard deviation.
* This method relies on the "static" variables generate, z0, and z1 defined above.
* Random.js is the primary client of this function, but it is defined here so it can be
* used other places more easily if need be.
* Code inspired by example here: https://en.wikipedia.org/wiki/Box%E2%80%93Muller_transform.
* @public
*
* @param {number} mu - The mean of the Gaussian
* @param {number} sigma - The standard deviation of the Gaussian
* @param {Random} random - the source of randomness
* @returns {number}
*/
boxMullerTransform( mu, sigma, random ) {
generate = !generate;
if ( !generate ) {
return z1 * sigma + mu;
}
let u1;
let u2;
do {
u1 = random.nextDouble();
u2 = random.nextDouble();
}
while ( u1 <= EPSILON );
z0 = Math.sqrt( -2.0 * Math.log( u1 ) ) * Math.cos( TWO_PI * u2 );
z1 = Math.sqrt( -2.0 * Math.log( u1 ) ) * Math.sin( TWO_PI * u2 );
return z0 * sigma + mu;
},
/**
* Determines the number of decimal places in a value.
* @public
*
* @param {number} value - a finite number, scientific notation is not supported for decimal numbers
* @returns {number}
*/
numberOfDecimalPlaces( value ) {
assert && assert( typeof value === 'number' && isFinite( value ), `value must be a finite number ${value}` );
if ( Math.floor( value ) === value ) {
return 0;
}
else {
const string = value.toString();
// Handle scientific notation
if ( string.includes( 'e' ) ) {
// e.g. '1e-21', '5.6e+34', etc.
const split = string.split( 'e' );
const mantissa = split[ 0 ]; // The left part, e.g. '1' or '5.6'
const exponent = parseInt( split[ 1 ], 10 ); // The right part, e.g. '-21' or '+34'
// How many decimal places are there in the left part
const mantissaDecimalPlaces = mantissa.includes( '.' ) ? mantissa.split( '.' )[ 1 ].length : 0;
// We adjust the number of decimal places by the exponent, e.g. '1.5e1' has zero decimal places, and
// '1.5e-2' has three.
return Math.max( mantissaDecimalPlaces - exponent, 0 );
}
else { // Handle decimal notation. Since we're not an integer, we should be guaranteed to have a decimal
return string.split( '.' )[ 1 ].length;
}
}
},
/**
* Rounds a value to a multiple of a specified interval.
* Examples:
* roundToInterval( 0.567, 0.01 ) -> 0.57
* roundToInterval( 0.567, 0.02 ) -> 0.56
* roundToInterval( 5.67, 0.5 ) -> 5.5
*
* @param {number} value
* @param {number} interval
* @returns {number}
*/
roundToInterval( value, interval ) {
return Utils.toFixedNumber( Utils.roundSymmetric( value / interval ) * interval,
Utils.numberOfDecimalPlaces( interval ) );
}
};
dot.register( 'Utils', Utils );
// make these available in the main namespace directly (for now)
dot.clamp = Utils.clamp;
dot.moduloBetweenDown = Utils.moduloBetweenDown;
dot.moduloBetweenUp = Utils.moduloBetweenUp;
dot.rangeInclusive = Utils.rangeInclusive;
dot.rangeExclusive = Utils.rangeExclusive;
dot.toRadians = Utils.toRadians;
dot.toDegrees = Utils.toDegrees;
dot.lineLineIntersection = Utils.lineLineIntersection;
dot.lineSegmentIntersection = Utils.lineSegmentIntersection;
dot.sphereRayIntersection = Utils.sphereRayIntersection;
dot.solveQuadraticRootsReal = Utils.solveQuadraticRootsReal;
dot.solveCubicRootsReal = Utils.solveCubicRootsReal;
dot.cubeRoot = Utils.cubeRoot;
dot.linear = Utils.linear;
dot.boxMullerTransform = Utils.boxMullerTransform;
export default Utils; |
#!/bin/sh
#
# SETTINGS.BEGIN
#
# Set PREFIX if the environment did not offer it
PREFIX=${PREFIX-../PREFIX}
#
# SETTINGS.END
#
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# Work relative to this script's directory
#
cd $(dirname "$0")
# Test if there is a /lib/systemd/system directory
#
stat /lib/systemd/system/ >/dev/null 2>&1
if [ $? -eq 0 ]
then
HAVE_SYSTEMD=yes
else
HAVE_SYSTEMD=no
fi
# Check settings
#
echo 'Installation settings:'
echo 'PREFIX="'"$PREFIX"'"'
case "$PREFIX" in
/*)
;;
*)
echo 'This prefix is relative to "'"$PWD"'"'
esac
echo
case "$HAVE_SYSTEMD" in
yes)
echo 'I will install systemd files in /etc/systemd/system'
;;
*)
echo 'I will not install systemd files'
esac
echo
echo -n 'Is this okay? '
read OK
case $OK in
y*|Y*)
;;
n*|N*)
echo 'Please set $PREFIX or edit this script and try again'
exit 1
;;
*)
echo 'Inconclusive answer. Exiting.'
exit 1
esac
# Compile Python modules (which end in .py)
#
for src in lib/*.py ods-*/*.py
do
base="${src%.py}"
rm -f "$base.pyc" "$base.pyo"
echo PYTHONPATH=lib python -m py_compile "$src"
PYTHONPATH=lib python -m py_compile "$src"
done
# Create directories
#
mkdir -p "$PREFIX" "$PREFIX/bin" "$PREFIX/lib" "$PREFIX/doc"
# Install the rabbitdnssec.py library
#
echo cp -p lib/rabbitdnssec.py* "$PREFIX/lib"
cp -p lib/rabbitdnssec.py* "$PREFIX/lib"
# Install main-directory documentation
#
ls -1 -d README* readme* *.md *.MD *.txt *.TXT man doc/* 2>/dev/null | \
sort | uniq | \
while read f
do
if [ -n "$f" ]
then
echo cp -pr "$f" "$PREFIX/doc"
cp -pr "$f" "$PREFIX/doc"
fi
done
# Install commands and libraries
#
for d in ods-*
do
for f in "$d"/*
do
case "$f" in
*.py|*.pyc|*.pyo)
mkdir -p "$PREFIX/lib/$d"
echo cp -p "$f" "$PREFIX/lib/$d"
cp -p "$f" "$PREFIX/lib/$d"
;;
*.service|*.target)
case "$HAVE_SYSTEMD" in
yes)
o=/lib/systemd/system/$(basename $f)
p=$(readlink -f "$PREFIX" | sed -e 's/[/]/\\\//g')
echo sed -e "s/@PREFIX@/$p/g" \< "$f" \> "$o"
sed -e "s/@PREFIX@/$p/g" < "$f" > "$o"
;;
*)
echo "Skipping systemd file $f"
esac
;;
README*|readme*|*.md|*.MD|*.txt|*.TXT)
mkdir -p "$PREFIX/doc/$d"
echo cp -p "$f" "$PREFIX/doc/$d"
cp -p "$f" "$PREFIX/doc/$d"
;;
*)
echo cp -p "$f" "$PREFIX/bin"
cp -p "$f" "$PREFIX/bin"
esac
done
done
# Report done (and further instructions)
#
echo
echo 'Script installation done. You also need to setup RabbitMQ.'
echo
echo 'You can now setup accounts with configurations for the scripts.'
echo
echo 'Binaries need PATH="$PATH:'"$PREFIX"'/bin"'
echo 'Libraries need PYTHONPATH="$PYTHONPATH:'"$PREFIX"'/lib"'
echo
|
#!/bin/sh -e
SYSTEM=$(uname)
if [ "${SYSTEM}" = 'Darwin' ]; then
COMPOSER_INSTALLED=false
else
dpkg --list | grep --quiet 'ii composer' && COMPOSER_INSTALLED=true || COMPOSER_INSTALLED=false
fi
if [ "${COMPOSER_INSTALLED}" = true ]; then
COMPOSER='composer'
else
COMPOSER="php ${HOME}/src/php-tools/composer.phar"
if [ ! -f "${COMPOSER}" ]; then
wget --output-document "${COMPOSER}" https://getcomposer.org/download/1.9.0/composer.phar
chmod +x "${COMPOSER}"
fi
${COMPOSER} selfupdate
fi
${COMPOSER} global update
|
<gh_stars>0
import React, {Component} from 'react';
import { connect } from 'react-redux';
import moment from 'moment';
import momentPropTypes from 'react-moment-proptypes';
import PropTypes from 'prop-types';
import { Row, Col, Container, Carousel, CarouselItem, CarouselControl, CarouselIndicators, CarouselCaption} from 'reactstrap';
import FlightForm from './FlightForm';
// import Carousel from './Carousel';
import { bindActionCreators } from 'redux';
import * as ItinerariesActionCreators from '../../actions/itineraries';
const items = [
{index: 0, caption: 'QUEUE TO GET LOWER PRICE', subcaption: 'With Flylist.ID we can either buy flight tickets directlu or queue to get a lower price near departure time, if tickets are still available', src: '/images/queue002.jpeg'},
{index: 1, caption: 'WATCH PROBABILITY STATUS', subcaption: 'While queueing, observe your chance to get the ticket. You might want to stop waiting and buy ticket with normal price if you have low probability', src: '/images/queue002.jpeg'},
{index: 2, caption: 'PAY ONE YOU GET SEAT OFFER', subcaption: 'Starting from 24-hour prior to departure, system will after available seats to the people queueing. Once you receive sear offer, you have until one hour to pay before seat is offered to someone else', src: '/images/queue002.jpeg'}
]
class Home extends Component {
static propTypes = {
departure_date: momentPropTypes.momentObj,
arrival_date: momentPropTypes.momentObj,
origin: PropTypes.string,
destination: PropTypes.string,
originName: PropTypes.string,
destinationName: PropTypes.string,
originAirport: PropTypes.string,
destinationAirport: PropTypes.string,
adults: PropTypes.number,
children: PropTypes.number,
infants: PropTypes.number,
seat_class: PropTypes.string,
itineraries: PropTypes.array,
user: PropTypes.object,
openModal: PropTypes.func,
openRequesting: PropTypes.func,
token: PropTypes.string
}
constructor(props){
super(props);
this.state = {
requesting: false,
received: false,
seat_class: "Economy Class",
adults: 1,
}
this.state = { activeIndex: 0 };
this.next = this.next.bind(this);
this.previous = this.previous.bind(this);
this.goToIndex = this.goToIndex.bind(this);
this.onExiting = this.onExiting.bind(this);
this.onExited = this.onExited.bind(this);
}
// *** State Modifiers ***
onChangeItineraries = (itin) => {
this.setState({
itineraries: itin
});
}
onChangeRequesting = (val, params) => {
this.setState({
requesting: val
});
this.props.history.push({
pathname: '/searchresult',
state: {
origin: params.origin,
destination: params.destination,
departureDate: params.departureDate,
returnDate: params.returnDate,
seatclass: params.seatclass,
adults: params.adults,
children: params.children,
infants: params.infants,
search: true
}
});
window.scrollTo(0, 0);
}
onChangeReceived = (val) => {
this.setState({
received: val
});
}
onChangeSeatClass = (val) => {
this.setState({
seat_class: val
});
}
onChangeAdult = (val) => {
this.setState({
adults: val
});
}
onChangeChildren = (val) => {
this.setState({
children: val
});
}
onChangeInfant = (val) => {
this.setState({
infants: val
});
}
onExiting() {
this.animating = true;
}
onExited() {
this.animating = false;
}
next() {
if (this.animating) return;
const nextIndex = this.state.activeIndex === items.length - 1 ? 0 : this.state.activeIndex + 1;
this.setState({ activeIndex: nextIndex });
}
previous() {
if (this.animating) return;
const nextIndex = this.state.activeIndex === 0 ? items.length - 1 : this.state.activeIndex - 1;
this.setState({ activeIndex: nextIndex });
}
goToIndex(newIndex) {
if (this.animating) return;
this.setState({ activeIndex: newIndex });
}
// *** Render ***
render(){
const { activeIndex } = this.state;
const slides = items.map((item) => {
return (
<CarouselItem
onExiting={this.onExiting}
onExited={this.onExited}
key={item.src}
>
<img src={item.src} alt={item.altText} />
<CarouselCaption captionText={item.caption} captionHeader={item.caption} />
</CarouselItem>
);
});
return(
<Container>
<Row>
<Col md="6">
<Carousel
activeIndex={activeIndex}
next={this.next}
previous={this.previous}>
<CarouselIndicators items={items} activeIndex={activeIndex} onClickHandler={this.goToIndex} />
{slides}
<CarouselControl direction="prev" directionText="Previous" onClickHandler={this.previous} />
<CarouselControl direction="next" directionText="Next" onClickHandler={this.next} />
</Carousel>
</Col>
<Col md="6">
<FlightForm
onChangeItineraries={this.onChangeItineraries}
onChangeRequesting={this.onChangeRequesting}
onChangeReceived={this.onChangeReceived}
onChangeSeatClass={this.onChangeSeatClass}
onChangeAdult={this.onChangeAdult}
onChangeChildren={this.onChangeChildren}
onChangeInfant={this.onChangeInfant}
requesting={this.state.requesting}
received={this.state.received}
openModal={this.props.openModal}
openRequesting={this.props.openRequesting}
originName={this.props.originName}
/>
</Col>
</Row>
</Container>
);
}
};
// *** Redux State To Props ***
const mapStateToProps = state => (
{
departure_date: state.form.departure_date,
arrival_date: state.form.arrival_date,
origin: state.form.origin,
destination: state.form.destination,
originName: state.form.originName,
destinationName: state.form.destinationName,
originAirport: state.form.originAirport,
destinationAirport: state.form.destinationAirport,
adults: state.form.adults,
children: state.form.children,
infants: state.form.infants,
seat_class: state.form.seat_class,
itineraries: state.itineraries,
requesting: state.requesting
}
);
export default connect(mapStateToProps)(Home);
|
var path = require('path');
var webpack = require('webpack');
const MiniCssExtractPlugin = require('mini-css-extract-plugin');
var ManifestRevisionPlugin = require('manifest-revision-webpack-plugin');
//var BundleAnalyzerPlugin = require('webpack-bundle-analyzer').BundleAnalyzerPlugin;
module.exports = {
mode: 'production',
entry: {
main: ['./app/static/js/main.js'],
},
output: {
path: path.resolve(__dirname, 'app/static/gen'),
filename: '[name].[chunkhash].js',
chunkFilename: '[id].[chunkhash].js',
publicPath: '/static/gen/'
},
module: {
rules: [
{
test: /\.js$/,
exclude: /(node_modules|bower_components|ext)/,
use: {
loader: 'babel-loader',
}
},
{
test: /\.css$/,
use: [MiniCssExtractPlugin.loader, 'css-loader'],
},
{
test: [/\.pot?$/, /\.mo$/],
loader: require.resolve('messageformat-po-loader'),
options: {
biDiSupport: false,
defaultCharset: null,
defaultLocale: 'en',
forceContext: false,
pluralFunction: null,
verbose: false
}
},
{
test: /\.svg$/,
exclude: [/sprite\.svg/],
loader: 'svg-inline-loader'
},
{ test: /\.(woff|woff2|eot|ttf)$/, loader: 'url-loader?limit=100000' }
],
},
plugins: [
// new BundleAnalyzerPlugin(),
new MiniCssExtractPlugin({
filename: '[name].[chunkhash].css',
chunkFilename: '[id].[chunkhash].css'
}),
new ManifestRevisionPlugin('./app/manifest.json', {
rootAssetPath: './app/static/gen',
ignorePaths: ['/static']
})
],
node: {
fs: 'empty'
}
};
|
def process_card_data(cards):
formatted_info = []
for card in cards:
for effect in card['effects']:
if 'cnds_iname' in effect:
formatted_info.append("__**Condition(s):**__ " + effect['cnds_iname'])
if 'abil_iname' in effect:
formatted_info.append("__**Vision Ability:**__ " + effect['abil_iname'])
return formatted_info |
<gh_stars>0
export default {
data() {
return {
//每一个区域的高度
codeParent:[],
codeHeightArr:[],
//每个区域的显示状态
isShow:[],
}
},
methods:{
//根据子元素的高度 设置代码区域父元素的高度
showCode(index) {
this.$set(this.isShow, index, !this.isShow[index])
this.$nextTick(() => {
if (this.isShow[index] === true) {
this.codeParent[index].style.height = +this.codeHeightArr[index] + 25 + 'px'
} else {
this.codeParent[index].style.height = 0
}
})
},
//得到所有代码区域的高度
getCodesHeight() {
const arr = document.getElementsByClassName('code-content-height')
this.codeParent = document.getElementsByClassName('code-content')
const arrLength = arr.length
for (let i = 0; i < arrLength; i++) {
this.codeHeightArr.push(arr[i].getBoundingClientRect().height)
this.isShow.push(false)
}
},
},
mounted() {
//异步获取当前组件内部 code区域的高度 以便于给点击的时候使用
this.$nextTick(() => {
this.getCodesHeight()
})
},
} |
$(document).ready(function()
{
loadWizard('formCliente', '/clientes/listar');
select2Ubicacion('#cod_pais', '#cod_estado', '#cod_ciudad',pais, estado, ciudad);
$('#cod_pais', '#cod_estado', '#cod_ciudad').select2();
$('#foto').fileinput({
initialPreview: array_rutas_adjuntos,
initialPreviewAsData: true,
initialPreviewFileType: 'image',
allowedFileTypes:["image", "video"]
});
}); |
#!/bin/bash -e
echo "=== STARTING ADaPT-ML AND RUNNING ALL TESTS ==="
docker-compose --env-file .env --profile dev up -d
while docker-compose ps | grep -i "starting"
do
echo "Waiting for MLflow databases (5 sec)..."
sleep 5
done
echo "Checking for exited or restarting containers..."
exited=$(docker-compose ps | grep "Exit")
restarting=$(docker-compose ps | grep "Restarting")
if [ "$exited" = 0 ]
then
echo "Check failed: some containers exited."
exit 1
fi
if [ "$restarting" = 0 ]
then
echo "Check failed: some containers restarting. Did the CrateDB bootstrap checks fail?"
exit 1
fi
echo "Startup complete."
docker exec label-studio-dev python /test/ls-test.py
docker exec dp-mlflow sh -c ". ~/.bashrc && python /test/dp-test.py"
docker exec modelling-mlflow sh -c ". ~/.bashrc && python /test/ml-test.py"
docker network create test-deploy-network --subnet 192.168.2.0/24 --gateway 192.168.2.10
docker network connect --ip 192.168.2.4 test-deploy-network modelling-mlflow-deploy
docker network connect --ip 192.168.2.8 test-deploy-network modelling-mlflow
docker exec modelling-mlflow sh -c ". ~/.bashrc && python /test/deploy-test.py"
docker network disconnect test-deploy-network modelling-mlflow-deploy
docker network disconnect test-deploy-network modelling-mlflow
docker network rm test-deploy-network
echo "=== TESTING COMPLETE ==="
|
<reponame>Sherlock92/greentop
/**
* Copyright 2017 <NAME>. Distributed under the MIT license.
*/
#include "greentop/sport/MarketCatalogue.h"
namespace greentop {
namespace sport {
MarketCatalogue::MarketCatalogue() {
}
MarketCatalogue::MarketCatalogue(const std::string& marketId,
const std::string& marketName,
const std::tm& marketStartTime,
const MarketDescription& description,
const Optional<double>& totalMatched,
const std::vector<RunnerCatalog>& runners,
const EventType& eventType,
const Competition& competition,
const Event& event) :
marketId(marketId),
marketName(marketName),
marketStartTime(marketStartTime),
description(description),
totalMatched(totalMatched),
runners(runners),
eventType(eventType),
competition(competition),
event(event) {
}
void MarketCatalogue::fromJson(const Json::Value& json) {
if (json.isMember("marketId")) {
marketId = json["marketId"].asString();
}
if (json.isMember("marketName")) {
marketName = json["marketName"].asString();
}
if (json.isMember("marketStartTime")) {
strptime(json["marketStartTime"].asString().c_str(), "%Y-%m-%dT%H:%M:%S.000Z", &marketStartTime);
}
if (json.isMember("description")) {
description.fromJson(json["description"]);
}
if (json.isMember("totalMatched")) {
totalMatched = json["totalMatched"].asDouble();
}
if (json.isMember("runners")) {
for (unsigned i = 0; i < json["runners"].size(); ++i) {
RunnerCatalog runner;
runner.fromJson(json["runners"][i]);
runners.push_back(runner);
}
}
if (json.isMember("eventType")) {
eventType.fromJson(json["eventType"]);
}
if (json.isMember("competition")) {
competition.fromJson(json["competition"]);
}
if (json.isMember("event")) {
event.fromJson(json["event"]);
}
}
Json::Value MarketCatalogue::toJson() const {
Json::Value json(Json::objectValue);
if (marketId != "") {
json["marketId"] = marketId;
}
if (marketName != "") {
json["marketName"] = marketName;
}
if (marketStartTime.tm_year > 0) {
char buffer[25];
strftime(buffer, 25,"%Y-%m-%dT%H:%M:%S.000Z", &marketStartTime);
json["marketStartTime"] = std::string(buffer);
}
if (description.isValid()) {
json["description"] = description.toJson();
}
if (totalMatched.isValid()) {
json["totalMatched"] = totalMatched.toJson();
}
if (runners.size() > 0) {
for (unsigned i = 0; i < runners.size(); ++i) {
json["runners"].append(runners[i].toJson());
}
}
if (eventType.isValid()) {
json["eventType"] = eventType.toJson();
}
if (competition.isValid()) {
json["competition"] = competition.toJson();
}
if (event.isValid()) {
json["event"] = event.toJson();
}
return json;
}
bool MarketCatalogue::isValid() const {
return marketId != "" && marketName != "";
}
const std::string& MarketCatalogue::getMarketId() const {
return marketId;
}
void MarketCatalogue::setMarketId(const std::string& marketId) {
this->marketId = marketId;
}
const std::string& MarketCatalogue::getMarketName() const {
return marketName;
}
void MarketCatalogue::setMarketName(const std::string& marketName) {
this->marketName = marketName;
}
const std::tm& MarketCatalogue::getMarketStartTime() const {
return marketStartTime;
}
void MarketCatalogue::setMarketStartTime(const std::tm& marketStartTime) {
this->marketStartTime = marketStartTime;
}
const MarketDescription& MarketCatalogue::getDescription() const {
return description;
}
void MarketCatalogue::setDescription(const MarketDescription& description) {
this->description = description;
}
const Optional<double>& MarketCatalogue::getTotalMatched() const {
return totalMatched;
}
void MarketCatalogue::setTotalMatched(const Optional<double>& totalMatched) {
this->totalMatched = totalMatched;
}
const std::vector<RunnerCatalog>& MarketCatalogue::getRunners() const {
return runners;
}
void MarketCatalogue::setRunners(const std::vector<RunnerCatalog>& runners) {
this->runners = runners;
}
const EventType& MarketCatalogue::getEventType() const {
return eventType;
}
void MarketCatalogue::setEventType(const EventType& eventType) {
this->eventType = eventType;
}
const Competition& MarketCatalogue::getCompetition() const {
return competition;
}
void MarketCatalogue::setCompetition(const Competition& competition) {
this->competition = competition;
}
const Event& MarketCatalogue::getEvent() const {
return event;
}
void MarketCatalogue::setEvent(const Event& event) {
this->event = event;
}
}
}
|
#!/bin/bash
# clear current directory
rm -r *
# take building and running steps
cmake ../src
make
./containers |
<filename>pkg/loki/loki.go
/*
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package loki
import (
"context"
"strings"
"sync"
)
var (
availableSystems = make(map[string]func() System)
systemsMx sync.Mutex
availableDestroyers = make(map[string]Destroyer)
destroyersMx sync.Mutex
availableKillers = make(map[string]func(System) (Killer, error))
killersMx sync.Mutex
readyParsers = make(map[string]func(*Config) ReadyParser)
readyMx sync.Mutex
)
// ID uniquely represents any resource or operation in a system.
type ID string
// System is the core interface which represents any execution environment such as Kubernetes, AWS etc.
// Plugin implementations need to implement this interface which handles things such as parsing system
// definition, loading desired state, performing validation etc.
type System interface {
// Parse parses the definition of system.
Parse(map[string]interface{}) error
// Load loads the state of system as per its definition.
Load(context.Context) error
// Validate validates at any point of time whether the actual state of system matches with its desired state
// as determined by ReadyCond.
Validate(context.Context) (bool, error)
// Identifiers return Identifier values of all resources in the system.
Identifiers() Identifiers
// AsJSON returns the json representation of the state of the system. If `reload` is set to `true`, state of the system
// will be reloaded before preparing json representation of system.
AsJSON(ctx context.Context, reload bool) ([]byte, error)
}
// Destroyer parses the single section of destroy whether it be exclusions or scenarios. Plugin implementations
// need to implement this interface.
type Destroyer interface {
// ParseDestroySection parses the any section under destroy block.
ParseDestroySection(map[string]interface{}) (Identifiers, error)
}
// DestroyerFunc is the syntax sugar for single method Destroyer interface so that a simple function can implement
// Destroyer interface.
type DestroyerFunc func(map[string]interface{}) (Identifiers, error)
// ParseDestroySection calls d(m).
func (d DestroyerFunc) ParseDestroySection(m map[string]interface{}) (Identifiers, error) {
return d(m)
}
// Killer kills the given identifiers. Definition of kill depends on system. For example, in kubernetes it could be
// deleting resource and for networks it could creating disconnection between systems.
type Killer interface {
// Kill kills given identifiers.
Kill(context.Context, ...Identifier) error
}
// KillerFunc is the syntax sugar for single method Killer interface so that a simple function can implement
// Killer interface.
type KillerFunc func(context.Context, ...Identifier) error
// KillerFunc calls k(ctx, i).
func (k KillerFunc) Kill(ctx context.Context, i ...Identifier) error {
return k(ctx, i...)
}
// ReadyCond defines the condition where in all the systems are considered to be in desired state.
type ReadyCond interface {
// Ready checks whether system has reached desired state.
Ready(context.Context) (bool, error)
}
// ReadyFunc is the syntax sugar for single method ReadyCond interface so that a simple function can implement
// ReadyCond interface.
type ReadyFunc func(context.Context) (bool, error)
// Ready calls r(ctx).
func (r ReadyFunc) Ready(ctx context.Context) (bool, error) {
return r(ctx)
}
// ReadyParser parses the ready section to create ReadyCond.
type ReadyParser interface {
// Parse parses the ready section and creates ReadyCond.
Parse(map[string]interface{}) (ReadyCond, error)
}
// ReadyParserFunc is the syntax sugar for single method ReadyParser interface so that a simple function can implement
// ReadyParser interface.
type ReadyParserFunc func(map[string]interface{}) (ReadyCond, error)
// Parse calls r(readyConf).
func (r ReadyParserFunc) Parse(readyConf map[string]interface{}) (ReadyCond, error) {
return r(readyConf)
}
// Identifier uniquely identifies any resource or operation in a particular system.
type Identifier interface {
// ID returns the unique identifier of resource or operation in a particular system.
ID() ID
}
// Identifiers is group of Identifier which can be used to create scenarios for creating chaos, excluding the chaos
// scenarios etc.
type Identifiers []Identifier
// String method returns string representation of Identifiers.
func (idents Identifiers) String() string {
sb := strings.Builder{}
sb.WriteString("[\n")
for _, ident := range idents {
sb.WriteString("{")
sb.WriteString(string(ident.ID()))
sb.WriteString("}\n")
}
sb.WriteString("]")
return sb.String()
}
// RegisterSystem is used by plugins to register custom systems.
func RegisterSystem(name string, system func() System) {
systemsMx.Lock()
defer systemsMx.Unlock()
availableSystems[name] = system
}
// RegisterDestroyer is used by plugins to register custom destroyers.
func RegisterDestroyer(name string, destroyer Destroyer) {
destroyersMx.Lock()
defer destroyersMx.Unlock()
availableDestroyers[name] = destroyer
}
// RegisterKiller is used by plugins to register custom killers.
func RegisterKiller(name string, killer func(System) (Killer, error)) {
killersMx.Lock()
defer killersMx.Unlock()
availableKillers[name] = killer
}
// RegisterReadyParser registers ReadyParser creating functions that can be used by config file.
func RegisterReadyParser(key string, parser func(*Config) ReadyParser) {
readyMx.Lock()
defer readyMx.Unlock()
readyParsers[key] = parser
}
|
///////////////////////////////////////////////////////////////////////////////
// Name: MadEdit/MadEditSearch.cpp
// Description: searching and replacing functions
// Author: <EMAIL>
// Licence: GPL
///////////////////////////////////////////////////////////////////////////////
#include "MadEdit.h"
#include "MadEncoding.h"
#include <iostream>
#include <string>
//#include <boost/xpressive/xpressive.hpp>
#include <boost/xpressive/xpressive_dynamic.hpp>
#include <boost/xpressive/traits/null_regex_traits.hpp>
#include <boost/xpressive/traits/cpp_regex_traits.hpp>
using namespace std;
using namespace boost::xpressive;
#ifdef _DEBUG
#include <crtdbg.h>
#define new new(_NORMAL_BLOCK ,__FILE__, __LINE__)
#endif
template<typename char_type>
inline char_type xtolower(char_type ch)
{
if(ch<0 || ch>0xFFFF) return ch;
return towlower(wchar_t(ch));
}
template<>
inline wchar_t xtolower(wchar_t ch)
{
return towlower(ch);
}
template<>
inline wxByte xtolower(wxByte ch)
{
return ch;
}
template <typename char_type>
bool IsTheSame(const char_type *s1, const char_type *s2, int len)
{
while(--len >= 0)
{
if(*s1++ != *s2++) return false;
}
return true;
}
class JumpTable_Hex
{
private:
int m_Table[256];
std::basic_string<wxByte> m_Pattern;
public:
void Build(const wxByte* pat, size_t len)
{
if(m_Pattern.length()==len && IsTheSame(m_Pattern.c_str(), pat, (int)len)) return;
m_Pattern.assign(pat, len);
for(size_t i=0; i<256; ++i) m_Table[i] = (int)len+1;
for(size_t i=0; i<len; ++i) m_Table[pat[i]] = (int)(len-i);
}
int GetValue(const wxByte ch) const
{
return m_Table[ch];
}
};
WX_DECLARE_HASH_MAP( unsigned int, int, wxIntegerHash, wxIntegerEqual, UCS4_Map );
class JumpTable_UCS4
{
private:
int m_UCS2_Table[65536];
UCS4_Map m_Table;
ucs4string m_Pattern;
int m_Len_1;
public:
void Build(const ucs4_t* pat, size_t len)
{
if(m_Pattern.length()==len && IsTheSame(m_Pattern.c_str(), pat, (int)len)) return;
m_Pattern.assign(pat, len);
const int len1 = m_Len_1 = (int)len+1;
int *ptab = m_UCS2_Table;
for(size_t i=0;i<65536; ++i, ++ptab) *ptab = len1;
m_Table.clear();
const ucs4_t* p = pat;
for(size_t i=0; i<len; ++i, ++p)
{
const unsigned int idx = (unsigned int)(*p);
if(idx <= 0xFFFF)
{
m_UCS2_Table[idx] = (int)(len-i);
}
else
{
m_Table[idx] = (int)(len-i);
}
}
}
int GetValue(const ucs4_t ch) const
{
if(ch <= 0xFFFF && ch >= 0)
{
return m_UCS2_Table[(unsigned int)ch];
}
UCS4_Map::const_iterator it = m_Table.find((unsigned int)ch);
if(it==m_Table.end()) return m_Len_1;
return it->second;
}
};
template <typename char_type, typename CharIter, typename JumpTable>
bool Search(CharIter &begin, CharIter &end,
const char_type *pattern, size_t pat_len,
const JumpTable &jump_table, bool bCaseSensitive)
// if(bCaseSensitive==false) the content of 'pattern' must be lower case!!!
{
wxASSERT(pat_len != 0);
if(begin == end) return false;
register size_t idx=0;
register const char_type *p = pattern;
CharIter beginpos;
register char_type c1;
for(;;)
{
c1 = *begin;
if(bCaseSensitive==false)
{
c1=xtolower(c1);
}
if(c1 == *p)
{
if(idx==0)
{
beginpos = begin;
}
++idx;
++p;
if(idx==pat_len) // found
{
end = begin;
++end;
begin = beginpos;
return true;
}
// compare next char
if(++begin == end)
return false;
}
else // c1 != *p
{
// jump by the jump_table
CharIter it = begin;
int i = (int)(pat_len - idx);
do
{
if(++it == end)
return false;
}
while(--i > 0);
c1 = *it;
if(bCaseSensitive==false)
{
c1=xtolower(c1);
}
register int jv = jump_table.GetValue(c1);
if(jv >= (int)pat_len)
{
begin = it;
jv -= (int)pat_len;
idx = 0;
p = pattern;
}
else if(idx != 0)
{
begin = beginpos;
idx = 0;
p = pattern;
}
if(jv > 0)
{
do
{
if(++begin == end)
return false;
}
while(--jv > 0);
}
}
}
return false;
}
#ifdef __WXMSW__
namespace boost { namespace xpressive { namespace detail
{
#if BOOST_VERSION >= 103500
template<>
struct string_type<ucs4_t> // defined in xpressive/detail/detail_fwd.hpp
{
typedef ucs4string type;
};
#else
template<char Ch, wchar_t Wch>
struct char_literal<ucs4_t, Ch, Wch>
{
BOOST_STATIC_CONSTANT(ucs4_t, value = Wch);
};
#ifndef BOOST_NO_INCLASS_MEMBER_INITIALIZATION
template<char Ch, wchar_t Wch>
ucs4_t const char_literal<ucs4_t, Ch, Wch>::value;
#endif
#endif
template<>
struct string_literal<ucs4_t>
{
static ucs4string inter_str;
static ucs4_t const *pick(char const *, wchar_t const *cstr)
{
inter_str.clear();
while(*cstr != 0)
{
inter_str.push_back(ucs4_t(*cstr));
++cstr;
}
return inter_str.c_str();
}
static ucs4_t pick(char, wchar_t ch)
{
return ch;
}
};
ucs4string string_literal<ucs4_t>::inter_str;
}}}
struct ucs4_regex_traits: public null_regex_traits<ucs4_t>
{
typedef ucs4_t char_type;
typedef detail::umaskex_t char_class_type;
typedef ucs4string string_type;
typedef std::locale locale_type;
template<typename char_type2>
static char_type2 tolower(char_type2 ch)
{
if(ch<0 || ch>0xFFFF) return ch;
return towlower(wchar_t(ch));
}
static wchar_t tolower(wchar_t ch)
{
return towlower(ch);
}
template<typename char_type2>
static char_type2 toupper(char_type2 ch)
{
if(ch<0 || ch>0xFFFF) return ch;
return towupper(wchar_t(ch));
}
static wchar_t toupper(wchar_t ch)
{
return towupper(ch);
}
static char_type widen(char ch)
{
return char_type(ch);
}
char_type translate_nocase(char_type ch) const
{
return this->tolower(ch);
}
bool in_range_nocase(char_type first, char_type last, char_type ch) const
{
return this->in_range(first, last, ch)
|| this->in_range(first, last, this->toupper(ch))
|| this->in_range(first, last, this->tolower(ch));
}
template<typename FwdIter>
static char_class_type lookup_classname(FwdIter begin, FwdIter end, bool icase)
{
cpp_regex_traits<char> cpptraits;
char_class_type c=cpptraits.lookup_classname(begin, end, icase);
return c;
}
static bool isctype(char_type ch, char_class_type mask)
{
cpp_regex_traits<char> cpptraits;
int i=ch;
if(i<0 || i>0xFF) ch=0x0;//?
return cpptraits.isctype(char(ch), mask);
}
static int value(char_type ch, int radix)
{
switch(radix)
{
case 8:
if(ch>='0' && ch<='7') return ch-'0';
break;
case 10:
if(ch>='0' && ch<='9') return ch-'0';
break;
case 16:
if(ch>='0' && ch<='9') return ch-'0';
if(ch>='A' && ch<='F') return ch-'A'+10;
if(ch>='a' && ch<='f') return ch-'a'+10;
break;
}
return -1;
}
};
#else // __WXGTK__
typedef cpp_regex_traits<ucs4_t> ucs4_regex_traits;
#endif
struct UCQueueSet
{
MadUCQueue ucq;
int lock;
};
struct UCIterator // ucs4_t widechar iterator
{
typedef std::bidirectional_iterator_tag iterator_category;
typedef ucs4_t value_type;
typedef wxFileOffset difference_type;
typedef const value_type *pointer;
typedef const value_type &reference;
static MadLines *s_lines;
static MadLines::NextUCharFuncPtr s_NextUChar;
static wxFileOffset s_endpos;
static list<UCQueueSet> s_ucqueues;
static void Init(MadLines *lines, const wxFileOffset &endpos)
{
wxASSERT(endpos>=0 && endpos<=lines->GetSize());
s_lines=lines;
s_NextUChar=lines->NextUChar;
s_endpos=endpos;
s_ucqueues.clear();
}
typedef list<UCQueueSet>::iterator UCQIterator;
#define UCQ_MAXSIZE (10 * 1024)
#define BUF_MAXSIZE (5 * 1024)
wxFileOffset pos;
MadLineIterator lit;
wxFileOffset linepos;
UCQIterator ucqit;
int ucqidx;
UCIterator():ucqidx(-1) {}
~UCIterator()
{
if(ucqidx>=0 && --ucqit->lock == 0)
{
s_ucqueues.erase(ucqit);
}
}
//UCIterator(wxFileOffset pos0):ucqidx(-1), pos(pos0) {}
UCIterator(const UCIterator &ucit):ucqidx(-1)
{
this->operator =( ucit );
}
UCIterator(wxFileOffset pos0, const MadLineIterator &lit0, wxFileOffset linepos0)
:pos(pos0), lit(lit0), linepos(linepos0)
{
if(linepos == lit->m_Size && pos<s_lines->GetSize())
{
++lit;
linepos = 0;
}
s_ucqueues.push_back(UCQueueSet());
ucqit=s_ucqueues.end();
--ucqit;
ucqit->lock=1; // lock this ucqueue
ucqidx=0;
if(pos<s_lines->GetSize())
{
MadUCQueue &ucqueue=ucqit->ucq;
s_lines->InitNextUChar(lit, linepos);
int i = BUF_MAXSIZE;
if(pos>=s_endpos) i=10;
while(--i>0 && (s_lines->*s_NextUChar)(ucqueue))
{
//ucqit->size += ucqueue.back().second;
}
}
}
UCIterator & operator=(const UCIterator & it)
{
if(ucqidx>=0 && --ucqit->lock == 0)
{
wxASSERT(ucqit != it.ucqit);
s_ucqueues.erase(ucqit);
}
pos=it.pos;
lit=it.lit;
linepos=it.linepos;
ucqit=it.ucqit;
ucqidx=it.ucqidx;
if(ucqidx>=0) ucqit->lock++;
return *this;
}
const value_type operator*() const
{
wxASSERT(ucqidx>=0 && ucqidx < int(ucqit->ucq.size()));
return ucqit->ucq[ucqidx].first;
}
/***
ucs4_t *operator->() const
{
return _ws_ + pos;
}
***/
// pre-increment operator
UCIterator & operator++()
{
wxASSERT(ucqidx>=0 && ucqidx < int(ucqit->ucq.size()));
MadUCQueue *ucqueue = &(ucqit->ucq);
int len = (*ucqueue)[ucqidx].second;
pos += len;
linepos += len;
if(linepos == lit->m_Size)
{
if(pos==s_endpos) return *this; // end
++lit;
linepos = 0;
}
++ucqidx;
if(ucqidx == (int)(*ucqueue).size())
{
wxASSERT(pos <= s_endpos);
if(ucqidx>=UCQ_MAXSIZE)
{
if(--ucqit->lock == 0)
{
s_ucqueues.erase(ucqit);
}
s_ucqueues.push_back(UCQueueSet());
ucqit=s_ucqueues.end();
--ucqit;
ucqit->lock=1; // lock this ucqueue
ucqidx=0;
ucqueue = &(ucqit->ucq);
}
s_lines->InitNextUChar(lit, linepos);
int i = BUF_MAXSIZE;
while(--i>0 && (s_lines->*s_NextUChar)(*ucqueue))
{
//ucqit->size += ucqueue->back().second;
}
}
return *this;
}
/***
// post-increment operator
UCIterator operator++(int)
{
UCIterator tmp = *this;
++*this;
return tmp;
}
***/
//***
// pre-decrement operator
UCIterator & operator--()
{
wxASSERT(pos>0 && ucqidx>=0 && ucqidx <= int(ucqit->ucq.size()));
if(ucqidx==0) //rarely happen
{
if(--ucqit->lock == 0)
{
s_ucqueues.erase(ucqit);
}
s_ucqueues.push_back(UCQueueSet());
ucqit=s_ucqueues.end();
--ucqit;
ucqit->lock=1; // lock this ucqueue
ucqidx=0;
MadUCPair ucp=s_lines->PreviousUChar(lit, linepos);
wxASSERT(ucp.second!=0);
pos-=ucp.second;
ucqit->ucq.push_back(ucp);
return *this;
}
--ucqidx;
int len = ucqit->ucq[ucqidx].second;
pos -= len;
if(linepos == 0)
{
--lit;
linepos = lit->m_Size;
}
linepos -= len;
return *this;
}
//***/
/***
// post-decrement operator
UCIterator operator--(int)
{
UCIterator tmp = *this;
--*this;
return tmp;
}
***/
bool operator==(const UCIterator & it) const
{
if(pos == it.pos) return true;
return (pos>=s_endpos && it.pos>=s_endpos);
}
bool operator!=(const UCIterator & it) const
{
return ! (this->operator==(it)) ;
}
};
MadLines *UCIterator::s_lines=NULL;
MadLines::NextUCharFuncPtr UCIterator::s_NextUChar=NULL;
wxFileOffset UCIterator::s_endpos=0;
list<UCQueueSet> UCIterator::s_ucqueues;
MadSearchResult MadEdit::Search(/*IN_OUT*/MadCaretPos &beginpos, /*IN_OUT*/MadCaretPos &endpos,
const wxString &text, bool bRegex, bool bCaseSensitive, bool bWholeWord)
{
if(beginpos.pos>=endpos.pos || text.IsEmpty())
return SR_NO;
regex_constants::syntax_option_type opt = regex_constants::ECMAScript;
if(bCaseSensitive == false)
{
opt = opt | regex_constants::icase;
}
const wxString *text_ptr = &text;
if(!bCaseSensitive)
{
static wxString text_lower;
text_lower = text.Lower();
text_ptr = &text_lower;
}
#ifdef __WXMSW__
vector<ucs4_t> ucs;
TranslateText(text_ptr->c_str(), text_ptr->Len(), &ucs, true);
ucs4_t *puc=&ucs[0];
size_t len=ucs.size();
ucs4string exprstr(puc, puc+len);
#else
const ucs4_t *puc=text_ptr->c_str();
size_t len=text_ptr->Len();
ucs4string exprstr(puc, puc+len);
#endif
regex_compiler<UCIterator, ucs4_regex_traits > ucs4comp;
basic_regex<UCIterator> expression;
if(bRegex)
{
try
{
expression=ucs4comp.compile(exprstr, opt);
}
catch(regex_error)
{
wxMessageDialog dlg(this, wxString::Format(_("'%s' is not a valid regular expression."), text.c_str()),
wxT("MadEdit"), wxOK|wxICON_ERROR );
dlg.ShowModal();
return SR_EXPR_ERROR;
}
}
UCIterator::Init(m_Lines, endpos.pos);
UCIterator start(beginpos.pos, beginpos.iter, beginpos.linepos);
UCIterator end(endpos.pos, endpos.iter, endpos.linepos);
UCIterator fbegin, fend;
match_results < UCIterator > what;
bool found;
static JumpTable_UCS4 jtab;
jtab.Build(puc, len);
try
{
for(;;)
{
if(bRegex)
{
found=regex_search(start, end, what, expression);
}
else
{
fbegin=start;
fend=end;
found=::Search(fbegin, fend, puc, len, jtab, bCaseSensitive);
}
if(!found) break;
if(bWholeWord) // check if is WholeWord
{
UCIterator cpos1, cpos2;
if(bRegex)
{
cpos1=what[0].first;
cpos2=what[0].second;
}
else
{
cpos1=fbegin;
cpos2=fend;
}
// check cpos2
if(cpos2.linepos > cpos2.lit->m_RowIndices[0].m_Start // not at begin/end of line
&& cpos2.linepos < (cpos2.lit->m_Size - cpos2.lit->m_NewLineSize))
{
ucs4_t uc = *cpos2;
if(uc > 0x20 && !m_Syntax->IsDelimiter(uc) && uc != 0x3000)
{
// check prev-uchar of cpos2
--cpos2;
uc = *cpos2;
if(uc>0x20 && !m_Syntax->IsDelimiter(uc) && uc != 0x3000)
{
found = false;
}
}
}
// check cpos1
if(found)
{
wxFileOffset lpos = cpos1.lit->m_RowIndices[0].m_Start;
if(cpos1.linepos > lpos) // not at begin of line
{
ucs4_t uc = *cpos1;
if(uc > 0x20 && !m_Syntax->IsDelimiter(uc) && uc != 0x3000)
{
// check prev-uchar of cpos1
--cpos1;
uc = *cpos1;
if(uc > 0x20 && !m_Syntax->IsDelimiter(uc) && uc != 0x3000)
{
found = false;
}
}
}
}
}
if(found)
{
if(bRegex)
{
beginpos.pos = what[0].first.pos;
beginpos.iter= what[0].first.lit;
beginpos.linepos=what[0].first.linepos;
endpos.pos = what[0].second.pos;
endpos.iter= what[0].second.lit;
endpos.linepos=what[0].second.linepos;
}
else
{
beginpos.pos = fbegin.pos;
beginpos.iter= fbegin.lit;
beginpos.linepos=fbegin.linepos;
endpos.pos = fend.pos;
endpos.iter= fend.lit;
endpos.linepos=fend.linepos;
}
return SR_YES;
}
// not found, repeat...
if(bRegex)
{
start = what[0].second;
}
else
{
start = fend;
}
}
}
catch(regex_error)
{
wxMessageDialog dlg(this, _("Catched a exception of 'regex_error'.\nMaybe the regular expression is invalid."),
wxT("MadEdit"), wxOK|wxICON_ERROR );
dlg.ShowModal();
return SR_EXPR_ERROR;
}
return SR_NO;
}
// convert escape char to literal char
ucs4string ConvertEscape(const ucs4string &str)
{
ucs4string out;
detail::escape_value<ucs4_t, ucs4_regex_traits::char_class_type> esc;
ucs4string::const_iterator begin=str.begin();
ucs4string::const_iterator end=str.end();
compiler_traits<ucs4_regex_traits> ucs4traits;
while(begin!=end)
{
if(*begin=='\\')
{
if(++begin == end)
{
//out.push_back('\\'); // last char is '\'
throw regex_error(regex_constants::error_escape);
}
else
{
esc = detail::parse_escape(begin, end, ucs4traits);
out += esc.ch_;
}
}
else
{
out += *begin;
++begin;
}
}
return out;
}
MadSearchResult MadEdit::Replace(ucs4string &out, const MadCaretPos &beginpos, const MadCaretPos &endpos,
const wxString &expr, const wxString &fmt,
bool bRegex, bool bCaseSensitive, bool bWholeWord)
{
if(expr.IsEmpty()) return SR_NO;
if(bRegex == false)
{
// fmt is the wanted string
vector<ucs4_t> ucs;
TranslateText(fmt.c_str(), fmt.Len(), &ucs, true);
for(size_t i=0, size=ucs.size(); i<size; i++)
{
out += ucs[i] ;
}
return SR_YES;
}
regex_constants::syntax_option_type opt = regex_constants::ECMAScript;
if(bCaseSensitive == false)
{
opt = opt | regex_constants::icase;
}
#ifdef __WXMSW__
vector<ucs4_t> ucs;
TranslateText(expr.c_str(), expr.Len(), &ucs, true);
ucs4_t *puc=&ucs[0];
ucs4string exprstr(puc, puc+ucs.size());
#else
const ucs4_t *puc=expr.c_str();
ucs4string exprstr(puc, puc+expr.Len());
#endif
typedef ucs4string::const_iterator ucs4iter;
regex_compiler<ucs4iter, ucs4_regex_traits > ucs4comp;
basic_regex<ucs4iter> expression;
try
{
expression = ucs4comp.compile(exprstr, opt);
}
catch(regex_error)
{
wxMessageDialog dlg(this, wxString::Format(_("'%s' is not a valid regular expression."), expr.c_str()),
wxT("MadEdit"), wxOK|wxICON_ERROR );
dlg.ShowModal();
return SR_EXPR_ERROR;
}
#ifdef __WXMSW__
ucs.clear();
TranslateText(fmt.c_str(), fmt.Len(), &ucs, true);
puc=&ucs[0];
ucs4string fmtstr(puc, puc+ucs.size());
#else
puc=fmt.c_str();
ucs4string fmtstr(puc, puc+fmt.Len());
#endif
UCIterator begin(beginpos.pos, beginpos.iter, beginpos.linepos);
UCIterator end(endpos.pos, endpos.iter, endpos.linepos);
ucs4string str;
//back_insert_iterator<ucs4string> oi(str);
//std::copy(first, last, oi);
while(begin!=end)
{
str += *begin;
++begin;
}
try
{
out=regex_replace(str, expression, fmtstr);
out=ConvertEscape(out);
}
catch(regex_error)
{
wxMessageDialog dlg(this, wxString::Format(_("The format of '%s' is invalid."), fmt.c_str()),
wxT("MadEdit"), wxOK|wxICON_ERROR );
dlg.ShowModal();
return SR_EXPR_ERROR;
}
return SR_YES;
/***
back_insert_iterator<ucs4string> oi(out);
regex_replace(oi, first, last, expression, puc);
***/
}
// Hex Search
struct ByteIterator
{
typedef std::bidirectional_iterator_tag iterator_category;
typedef wxByte value_type;
typedef wxFileOffset difference_type;
typedef const value_type *pointer;
typedef const value_type &reference;
static MadLines *s_lines;
static wxFileOffset s_endpos;
static void Init(MadLines *lines, const wxFileOffset &endpos)
{
wxASSERT(endpos>=0 && endpos<=lines->GetSize());
s_lines=lines;
s_endpos=endpos;
}
wxFileOffset pos;
MadLineIterator lit;
wxFileOffset linepos;
ByteIterator() {}
ByteIterator(const ByteIterator &it)
{
this->operator =( it );
}
ByteIterator(wxFileOffset pos0, const MadLineIterator &lit0, wxFileOffset linepos0)
:pos(pos0), lit(lit0), linepos(linepos0)
{
if(linepos == lit->m_Size && pos<s_lines->GetSize())
{
++lit;
linepos = 0;
}
}
ByteIterator & operator=(const ByteIterator & it)
{
pos=it.pos;
lit=it.lit;
linepos=it.linepos;
return *this;
}
const value_type operator*()
{
wxASSERT(linepos < lit->m_Size);
return lit->Get(linepos);
}
/***
ucs4_t *operator->() const
{
return _ws_ + pos;
}
***/
// pre-increment operator
ByteIterator & operator++()
{
++pos;
++linepos;
if(linepos == lit->m_Size)
{
if(pos==s_endpos) return *this; // end
++lit;
linepos = 0;
}
return *this;
}
/***
// post-increment operator
ByteIterator operator++(int)
{
ByteIterator tmp = *this;
++*this;
return tmp;
}
***/
//***
// pre-decrement operator
ByteIterator & operator--()
{
wxASSERT(pos>0);
--pos;
if(linepos == 0)
{
--lit;
linepos = lit->m_Size;
}
--linepos;
return *this;
}
//***/
/***
// post-decrement operator
ByteIterator operator--(int)
{
ByteIterator tmp = *this;
--*this;
return tmp;
}
***/
bool operator==(const ByteIterator & it) const
{
if(pos == it.pos) return true;
return (pos>=s_endpos && it.pos>=s_endpos);
}
bool operator!=(const ByteIterator & it) const
{
return ! (this->operator==(it)) ;
}
};
MadLines *ByteIterator::s_lines=NULL;
wxFileOffset ByteIterator::s_endpos=0;
MadSearchResult MadEdit::SearchHex(/*IN_OUT*/MadCaretPos &beginpos, /*IN_OUT*/MadCaretPos &endpos,
const wxByte *hex, size_t count)
{
if(beginpos.pos>=endpos.pos || count==0)
return SR_NO;
ByteIterator::Init(m_Lines, endpos.pos);
ByteIterator start(beginpos.pos, beginpos.iter, beginpos.linepos);
ByteIterator end(endpos.pos, endpos.iter, endpos.linepos);
static JumpTable_Hex jtab;
jtab.Build(hex, count);
if(::Search(start, end, hex, count, jtab, true))
{
beginpos.pos = start.pos;
beginpos.iter= start.lit;
beginpos.linepos=start.linepos;
endpos.pos = end.pos;
endpos.iter= end.lit;
endpos.linepos=end.linepos;
return SR_YES;
}
return SR_NO;
}
|
<gh_stars>0
package pulse.problem.schemes.solvers;
import pulse.problem.statements.Problem;
/**
* A solver interface which provides the capability to use the {@code solve}
* method on a {@code Problem}. This interface is implemented by the subclasses
* of {@code DifferenceSCheme}.
*
* @param <T> an instance of Problem
*/
public interface Solver<T extends Problem> {
/**
* Calculates the solution of the {@code t} and stores it in the respective
* {@code HeatingCurve}.
*
* @param problem - an accepted instance of {@code T}
* @throws SolverException
*/
public void solve(T problem) throws SolverException;
}
|
<reponame>wfu8/lightwave
/*
* Copyright © 2012-2016 VMware, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the “License”); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an “AS IS” BASIS, without
* warranties or conditions of any kind, EITHER EXPRESS OR IMPLIED. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
'use strict';
var module = angular.module('lightwave.ui.shared.utils');
module.factory('certUtil', certUtil);
certUtil.$inject = ['dateUtil', 'popupUtil'];
function certUtil(dateUtil, popupUtil) {
var util = {};
util.getCertificateDetails = getCertificateDetails;
util.extractBase64Encoded = extractBase64Encoded;
util.decodeJWT = decodeJWT;
util.viewCertificate = viewCertificate;
util.isValidBase64 = isValidBase64;
return util;
function checkexpired(before){
var beforeDate = new Date(before);
var currentDate = new Date();
return beforeDate.getTime() < currentDate.getTime();
}
function getCertificateDetails(pem) {
var issuer='', subject= '', after = '', expired = '', before = '';
try {
var c = new X509();
c.readCertPEM(pem);
issuer = c.getIssuerString();
issuer = issuer.replace('undefined', 'DC');
issuer = reverse(issuer, '/');
subject = c.getSubjectString();
subject = subject.replace('undefined', 'DC');
subject = reverse(subject, '/');
after = c.getNotAfter();
expired = checkexpired(after);
after = dateUtil.toDate(after);
before = c.getNotBefore();
before = dateUtil.toDate(before);
}
catch(e)
{
}
return {
"issuer" : issuer,
"subject" : subject ,
"after" : after,
"before" : before,
"expired": expired
};
}
function reverse(text, delimiter)
{
var splitText = text.split(delimiter);
var reversedText = '';
for(var i=0;i<splitText.length;i++){
reversedText = reversedText + splitText[splitText.length-1-i] + '/';
}
if(splitText.length > 1){
reversedText = reversedText.substring(0, reversedText.length - 2);
}
return reversedText;
}
function extractBase64Encoded(pKey){
var beginRSA = "-----BEGIN RSA PRIVATE KEY-----";
var endRSA = "-----END RSA PRIVATE KEY-----";
var begin = "-----BEGIN PRIVATE KEY-----";
var end = "-----END PRIVATE KEY-----";
pKey = pKey.replace(begin, '');
pKey = pKey.replace(end, '');
pKey = pKey.replace(beginRSA, '');
pKey = pKey.replace(endRSA, '');
return pKey;
}
function decodeJWT(jwt) {
var sJWS = jwt;
var components = sJWS.split(".");
var uHeader = b64utos(components[1]).toString();
var decodedJWT =
{
header: JSON.parse(uHeader)
//claims: JSON.parse(uClaim)
};
return decodedJWT;
}
function viewCertificate(scope, encoded, template, controller){
scope.metadata = getCertificateDetails(encoded);
popupUtil.open(scope, template, controller);
}
function isValidBase64(str){
var base64regex = /^([0-9a-zA-Z+/]{4})*(([0-9a-zA-Z+/]{2}==)|([0-9a-zA-Z+/]{3}=))?$/;
return base64regex.test(str);
}
} |
#!/bin/bash
docker run -d -it -v ./test-code:/usr/src/app:rw \
--security-opt="seccomp=./chrome.json" \
--user=pptruser \
--name puppeteer \
puppeteer-image
|
<filename>util/util_test.go
package util
import (
"fmt"
"path"
"runtime"
"testing"
)
// 获取当前平台 操作系统 win or linux
func TestAll(t *testing.T) {
// 自己看控制台输出有无异常
osself := Os()
fmt.Println(osself)
// 文件分隔符
pathSeparator := DirsctorySeparator()
fmt.Println(pathSeparator)
}
// 判断当前 文件/文件夹是否存在
func TestFileExists(t *testing.T) {
// 测试文件
_, file, _, _ := runtime.Caller(0)
re := FileExists(file)
if re != true {
t.FailNow()
}
// 测试文件夹
re = FileExists(path.Dir(file))
if re != true {
t.FailNow()
}
// IsFile
re = IsFile(file)
if re != true {
t.FailNow()
}
// IsDir
re = IsDir(path.Dir(file))
if re != true {
t.FailNow()
}
}
func TestPathInfo(t *testing.T) {
var pathMap map[string]string
path := "/vagrant_data/tp6.0/config/app.php"
pathMap = PathInfo(path)
if pathMap["dirname"] != "/vagrant_data/tp6.0/config" {
t.FailNow()
}
if pathMap["basename"] != "app.php" {
t.FailNow()
}
if pathMap["extension"] != "php" {
t.FailNow()
}
if pathMap["filename"] != "app" {
t.FailNow()
}
path = "aa/bb/cc.go"
pathMap = PathInfo(path)
if pathMap["dirname"] != "aa/bb" {
t.FailNow()
}
if pathMap["basename"] != "cc.go" {
t.FailNow()
}
if pathMap["extension"] != "go" {
t.FailNow()
}
if pathMap["filename"] != "cc" {
t.FailNow()
}
}
|
// run via sbt:
// > sbt module2/run
object Main extends App {
val name = Hello.Name
println(s"Hello \$name")
}
|
package it.isislab.swiftlang.abm.mason.zombie;
import java.awt.Color;
import javax.swing.JFrame;
import sim.display.Controller;
import sim.display.Display2D;
import sim.display.GUIState;
import sim.engine.SimState;
import sim.portrayal.SimplePortrayal2D;
import sim.portrayal.continuous.ContinuousPortrayal2D;
import sim.portrayal.simple.AdjustablePortrayal2D;
import sim.portrayal.simple.MovablePortrayal2D;
import sim.portrayal.simple.OrientedPortrayal2D;
import sim.portrayal.simple.TrailedPortrayal2D;
public class JZombieWithUI extends GUIState
{
private static final long serialVersionUID = 1;
public Display2D display;
public JFrame displayFrame;
ContinuousPortrayal2D particlesPortrayal = new ContinuousPortrayal2D();
public Object getSimulationInspectedObject() { return state; } // non-volatile
public static void main(String[] args)
{
new JZombieWithUI().createController();
}
public JZombieWithUI() { super(new JZombie(System.currentTimeMillis())); }
public JZombieWithUI(SimState state) { super(state); }
public static String getName() { return "JZombie"; }
// We comment this out of the example, which will cause MASON to look
// for a file called "index.html" in the same directory -- which we've
// included for consistency with the other applications in the demo
// apps directory.
/*
public static Object getInfoByClass(Class theClass)
{
return "<H2>Tutorial3</H2><p>An odd little particle-interaction example.";
}
*/
public void quit()
{
super.quit();
if (displayFrame!=null) displayFrame.dispose();
displayFrame = null; // let gc
display = null; // let gc
}
public void start()
{
super.start();
// set up our portrayals
setupPortrayals();
}
public void load(SimState state)
{
super.load(state);
// we now have new grids. Set up the portrayals to reflect that
setupPortrayals();
}
// This is called by start() and by load() because they both had this code
// so I didn't have to type it twice :-)
public void setupPortrayals()
{
particlesPortrayal.setField(((JZombie)state).particles);
// reschedule the displayer
display.reset();
// redraw the display
display.repaint();
}
public void init(Controller c)
{
super.init(c);
// Make the Display2D. We'll have it display stuff later.
display = new Display2D(600,600,this); // at 400x400, we've got 4x4 per array position
displayFrame = display.createFrame();
c.registerFrame(displayFrame); // register the frame so it appears in the "Display" list
displayFrame.setVisible(true);
// specify the backdrop color -- what gets painted behind the displays
display.setBackdrop(Color.black);
// attach the portrayals
display.attach(particlesPortrayal,"Particles");
}
}
|
import networkx as nx
import numpy as np
import sys
import math
from sklearn.cluster import SpectralClustering
#from sklearn.metrics import adjusted_rand_score
#from sklearn.metrics import pairwise_distances
# Range of K:
MIN_CLUSTERS = 2
MAX_CLUSTERS = 5
def main():
filename = sys.argv[1]
outfilename = sys.argv[2]
clusternames = sys.argv[3]
print("reading the graph...")
file = open(filename, "r")
lines = file.readlines()
graph = nx.Graph()
counter = 0
for line in lines:
if counter != 0:
s = int(line.split(None, 2)[0])
t = int(line.split(None, 2)[1])
graph.add_node(s)
graph.add_node(t)
graph.add_edge(s, t)
graph.add_edge(t, s) # for spectral use undirected
counter +=1
file.close()
print(graph.number_of_nodes())
n = graph.number_of_nodes()
node_list = list(graph.nodes())
# Converts graph to an adj matrix with adj_matrix[i][j] = weight between node i,j.
#adj_matrix = nx.to_numpy_matrix(graph, nodelist=node_list, dtype=np.int32)
adj_matrix = nx.to_scipy_sparse_matrix(graph, nodelist=node_list, dtype=np.int8)
graph.clear()
outfile = open(outfilename, "w")
for k in range(MIN_CLUSTERS, MAX_CLUSTERS+1):
outfile.write("for " + str(k) + " clusters:\n")
print("spectral # of clusters: " + str(k))
labels = SpectralClustering(affinity = 'precomputed', assign_labels="discretize",random_state=0,n_clusters=k).fit_predict(adj_matrix)
#n_components = 10 * log(n) * k
file = open(clusternames[:-4] + "_" + str(k) + ".txt", "w")
for i in range(0, n):
file.write(str(i) + "\t" + str(labels[i]) + "\n")
file.close();
buckets = [0] * k
for i in range(0, n):
buckets[labels[i]] += 1
for i in range(0, k):
outfile.write("%" + str(int(round(buckets[i] * 100 / n))) + " ")
outfile.write("\n")
outfile.close()
return
if __name__ == "__main__":
main()
|
#!/bin/bash
if [ -n "$1" ]; then
ADD_NUM=$1
else
ADD_NUM=""
fi
./add_more.py $ADD_NUM
jekyll build -s . -d /var/html/blog
#jekyll build --lsi -s . -d /var/html/blog
|
#!/bin/bash
# pull all packages
for package in ipkiss klive
do
cd ~/$package
git pull
python setup.py sdist
done
|
<filename>src/templates/blogTag.js
import React from 'react';
import { Link, graphql } from 'gatsby';
import Layout from 'layout/layout';
import SEO from 'components/seo';
import { Main, Content } from 'pages/index';
import TagsCount from '../components/tagsCount';
const Tags = ({ pageContext, data }) => {
const { tag, tags } = pageContext;
const { edges, totalCount } = data.allMarkdownRemark;
const tagHeader = `${totalCount} post${
totalCount === 1 ? '' : 's'
} tagged with "${tag}"`;
return (
<Layout>
<SEO title="Tag" />
<Main>
<Content>
<TagsCount tags={tags}></TagsCount>
<h1>{tagHeader}</h1>
<ul>
{edges.map(({ node }) => {
const { title, date } = node.frontmatter;
const { slug } = node.fields;
return (
<li key={slug}>
<Link to={slug}>
{title} ({date})
</Link>
</li>
);
})}
</ul>
</Content>
</Main>
</Layout>
);
};
export default Tags;
export const query = graphql`
query ($tag: String) {
allMarkdownRemark(
limit: 2000
sort: { fields: [frontmatter___date], order: DESC }
filter: { frontmatter: { tags: { in: [$tag] } } }
) {
totalCount
edges {
node {
fields {
slug
}
frontmatter {
title
date(formatString: "MMMM DD, YYYY")
}
}
}
}
}
`;
|
<gh_stars>0
package com.twitter.inject.thrift.modules
import com.github.nscala_time.time
import com.google.inject.Provides
import com.twitter.finagle._
import com.twitter.finagle.factory.TimeoutFactory
import com.twitter.finagle.param.Stats
import com.twitter.finagle.service.TimeoutFilter
import com.twitter.finagle.stats.StatsReceiver
import com.twitter.finagle.thrift.ClientId
import com.twitter.inject.TwitterModule
import com.twitter.util.Duration
import javax.inject.Singleton
import scala.reflect.ClassTag
@deprecated("Use the com.twitter.inject.thrift.modules.FilteredThriftClientModule", "2016-06-23")
abstract class ThriftClientModule[T: ClassTag]
extends TwitterModule
with time.Implicits {
/**
* Name of client for use in metrics
*/
val label: String
/**
* Destination of client
*/
val dest: String
/**
* Enable thrift mux for this connection.
*
* Note: Both server and client must have mux enabled otherwise
* a nondescript ChannelClosedException will be seen.
*
* What is ThriftMux?
* http://twitter.github.io/finagle/guide/FAQ.html?highlight=thriftmux#what-is-thriftmux
*/
def mux: Boolean = true
def requestTimeout: Duration = Duration.Top
def connectTimeout: Duration = Duration.Top
@Singleton
@Provides
def providesClient(clientId: ClientId, statsReceiver: StatsReceiver): T = {
val labelAndDest = s"$label=$dest"
if (mux) {
ThriftMux.client.
configured(TimeoutFilter.Param(requestTimeout)).
configured(TimeoutFactory.Param(connectTimeout)).
configured(Stats(statsReceiver.scope("clnt"))).
withClientId(clientId).
newIface[T](labelAndDest)
}
else {
Thrift.client.
configured(TimeoutFilter.Param(requestTimeout)).
configured(TimeoutFactory.Param(connectTimeout)).
configured(Stats(statsReceiver.scope("clnt"))).
withClientId(clientId).
newIface[T](labelAndDest)
}
}
}
|
import React from 'react'
import PropTapes from 'prop-types'
import cx from 'classnames'
const InfoMsg = ({ hintMsg, errorMsg }) => (
<span className={cx({ hint: !!hintMsg, error: !!errorMsg })}>
{errorMsg || hintMsg}
</span>
)
InfoMsg.propTypes = {
hintMsg: PropTapes.string,
errorMsg: PropTapes.string,
}
InfoMsg.defaultProps = {
hintMsg: null,
errorMsg: null,
}
export default InfoMsg
|
import { useCallback, useMemo, useState } from "react";
import { throttle } from "./throttle";
export type Entry<T> = {
ready: boolean;
data: T;
};
export type EntryKey = string;
type EntryIndex<T> = Record<EntryKey, Entry<T>>;
const EMPTY = Object.create(null);
const FLUSH_THROTTLE = 150;
const PLACEHOLDER: Entry<null> = {
ready: false,
data: null,
};
export function useEntries<T>() {
const [index, setIndex] = useState<EntryIndex<T>>(EMPTY);
const set = useMemo(() => {
let buffer: EntryIndex<T> = {};
const flush = throttle(() => {
setIndex((entries) => {
return { ...entries, ...buffer };
});
buffer = {};
}, FLUSH_THROTTLE);
return function set(id: EntryKey, data: T) {
buffer[id] = {
ready: true,
data,
};
flush();
};
}, []);
const get = useCallback(
(id: EntryKey): Entry<T | null> => {
return index[id] || PLACEHOLDER;
},
[index],
);
return { get, set, index };
}
|
def CalculateSum(a, b):
return sum(range(a, b+1)) |
int arr[] = {2, 4, 6, 8};
int sum = 0;
for (int i=0; i<4; i++)
{
sum += arr[i];
}
std::cout << "The sum of elements in the array is: " << sum << std::endl; |
# !/bin/python
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
"""State Machine Router module"""
import os.path
import botocore
import boto3
from state_machine.utils.logger import Logger
from state_machine.state_machine_handler import (
TransitGateway,
VPC,
DynamoDb,
ResourceAccessManager,
ApprovalNotification,
GeneralFunctions,
)
from state_machine.lib.exceptions import (
ResourceNotFoundException,
AttachmentCreationInProgressException,
AlreadyConfiguredException,
ResourceBusyException,
)
# initialise logger
LOG_LEVEL = os.environ.get("LOG_LEVEL", "INFO")
logger = Logger(loglevel=LOG_LEVEL)
ERROR_MESSAGE = "Function name does not match any function in the handler file."
ROUTER_FUNCTION_NAME = "Router Function Name: {}"
logger.debug("boto3 version:" + boto3.__version__)
logger.debug("botocore version:" + botocore.__version__)
def transit_gateway(event, function_name):
"""
Method to handle event for executing transit gateway functions
:param event:
:param function_name:
"""
logger.info(ROUTER_FUNCTION_NAME.format(function_name))
tgw = TransitGateway(event)
if function_name == "describe_transit_gateway_vpc_attachments":
response = tgw.describe_transit_gateway_vpc_attachments()
elif function_name == "tgw_attachment_crud_operations":
response = tgw.tgw_attachment_crud_operations()
elif function_name == "describe_transit_gateway_route_tables":
response = tgw.describe_transit_gateway_route_tables()
elif function_name == "disassociate_transit_gateway_route_table":
response = tgw.disassociate_transit_gateway_route_table()
elif function_name == "associate_transit_gateway_route_table":
response = tgw.associate_transit_gateway_route_table()
elif function_name == "get_transit_gateway_attachment_propagations":
response = tgw.get_transit_gateway_attachment_propagations()
elif function_name == "enable_transit_gateway_route_table_propagation":
response = tgw.enable_transit_gateway_route_table_propagation()
elif function_name == "disable_transit_gateway_route_table_propagation":
response = tgw.disable_transit_gateway_route_table_propagation()
elif function_name == "get_transit_gateway_vpc_attachment_state":
response = tgw.get_transit_gateway_vpc_attachment_state()
elif function_name == "tag_transit_gateway_attachment":
response = tgw.tag_transit_gateway_attachment()
elif function_name == "subnet_deletion_event":
response = tgw.subnet_deletion_event()
elif function_name == "update_tags_if_failed":
response = tgw.update_tags_if_failed()
else:
logger.info(ERROR_MESSAGE)
return {"Message": ERROR_MESSAGE}
logger.info(response)
return response
def vpc(event, function_name):
"""
Method to handle event for executing vpc functions
:param event:
:param function_name:
"""
logger.info(ROUTER_FUNCTION_NAME.format(function_name))
vpc = VPC(event)
if function_name == "describe_resources":
response = vpc.describe_resources()
elif function_name == "default_route_crud_operations":
response = vpc.default_route_crud_operations()
else:
logger.info(ERROR_MESSAGE)
return {"Message": ERROR_MESSAGE}
logger.info(response)
return response
def ddb(event, function_name):
"""
Method to handle event for executing ddb functions
:param event:
:param function_name:
"""
logger.info(ROUTER_FUNCTION_NAME.format(function_name))
ddb = DynamoDb(event)
if function_name == "put_item":
response = ddb.put_item()
else:
logger.info(ERROR_MESSAGE)
return {"Message": ERROR_MESSAGE}
logger.info(response)
return response
def ram(event, function_name):
"""
Method to handle event for executing RAM functions
:param event:
:param function_name:
"""
logger.info(ROUTER_FUNCTION_NAME.format(function_name))
ram = ResourceAccessManager(event)
if function_name == "accept_resource_share_invitation":
response = ram.accept_resource_share_invitation()
else:
logger.info(ERROR_MESSAGE)
return {"Message": ERROR_MESSAGE}
logger.info(response)
return response
def sns(event, function_name):
"""
Method to handle event for executing SNS functions
:param event:
:param function_name:
"""
logger.info(ROUTER_FUNCTION_NAME.format(function_name))
sns = ApprovalNotification(event)
if function_name == "notify":
response = sns.notify()
else:
logger.info(ERROR_MESSAGE)
return {"Message": ERROR_MESSAGE}
logger.info(response)
return response
def general_functions(event, function_name):
"""
Method to handle event for executing general functions
:param event:
:param function_name:
"""
logger.info(ROUTER_FUNCTION_NAME.format(function_name))
general = GeneralFunctions(event)
if function_name == "process_failure":
response = general.process_failure()
elif function_name == "log_in_cloudwatch":
response = general.log_in_cloudwatch()
else:
logger.info(ERROR_MESSAGE)
return {"Message": ERROR_MESSAGE}
logger.info(response)
return response
def lambda_handler(event, context):
# Lambda handler function
try:
logger.info("Lambda Handler Event")
logger.info(event)
# Execute custom resource handlers
class_name = event.get("params", {}).get("ClassName")
function_name = event.get("params", {}).get("FunctionName")
event = event.get("event", {})
if class_name is not None:
if class_name == "TransitGateway":
return transit_gateway(event, function_name)
elif class_name == "VPC":
return vpc(event, function_name)
elif class_name == "DynamoDb":
return ddb(event, function_name)
elif class_name == "ResourceAccessManager":
return ram(event, function_name)
elif class_name == "ApprovalNotification":
return sns(event, function_name)
elif class_name == "GeneralFunctions":
return general_functions(event, function_name)
else:
logger.info(ERROR_MESSAGE)
return {"Message": ERROR_MESSAGE}
else:
message = "Class name not found in input."
logger.info(message)
return {"Message": message}
except (
ResourceNotFoundException,
AttachmentCreationInProgressException,
AlreadyConfiguredException,
ResourceBusyException,
) as e:
raise e
except Exception as error:
logger.exception("Error while executing lambda handler")
logger.exception(error)
raise
|
<filename>test/memory_test.rb
require File.expand_path('../helper', __FILE__)
module Horcrux
class MemoryTest < Test::Unit::TestCase
def setup
@adapter = Memory.new({})
end
def test_reads_set_values
assert_nil @adapter.get('a')
assert_equal true, @adapter.set('a', '1')
assert_equal '1', @adapter.get('a')
end
def test_deletes_values
assert_equal true, @adapter.set('a', '1')
assert_equal '1', @adapter.get('a')
assert_equal true, @adapter.delete('a')
assert_equal false, @adapter.delete('a')
end
def test_fetch_sets_fallback
assert_nil @adapter.get 'a'
assert_equal '1', @adapter.fetch('a') { '1' }
assert_equal '1', @adapter.get('a')
assert_equal '1', @adapter.fetch('a') { '2' }
end
def test_checks_for_existence_of_key
assert !@adapter.key?('a')
assert_equal true, @adapter.set('a', '1')
assert @adapter.key?('a')
end
def test_multi_get
@adapter.set 'a', '1'
@adapter.set 'c', '3'
assert_equal ['1', nil, '3'], @adapter.get_all('a', 'b', 'c')
end
def test_multi_set
assert_nil @adapter.get('a')
assert_nil @adapter.get('b')
@adapter.set_all 'a' => '1', 'b' => '2'
assert_equal '1', @adapter.get('a')
assert_equal '2', @adapter.get('b')
end
def test_multi_delete
@adapter.set_all 'a' => '1', 'b' => '2'
assert_equal '1', @adapter.get('a')
assert_equal '2', @adapter.get('b')
assert_equal [true, true, false], @adapter.delete_all('a', 'b', 'c')
assert_nil @adapter.get('a')
assert_nil @adapter.get('b')
end
end
end
|
#!/bin/bash -e
export EXPECTFAIL=${EXPECTFAIL:-0}
function flipstatus() {
if [ $EXPECTFAIL -eq 0 ];
then
echo $1
else
case $1 in
FAILED)
echo SUCCEEDED
;;
SUCCEEDED)
echo FAILED
;;
*)
echo $1
;;
esac
fi
}
starttime=$SECONDS
failed=""
for t in $TOOL;
do
set +e
toolstarttime=$SECONDS
echo "[-] TOOL $t TEST STARTED: $((SECONDS - starttime)) seconds since start of script."
if ! docker run -e EXPECTFAIL="$EXPECTFAIL" -e TOOL="$t" --rm ctftools bash -ic 'manage-tools -s -f -v test $TOOL';
then
failed="$failed$t "
echo "[ACCOUNTING]=====[ $DISTRO $t $(flipstatus FAILED) $((SECONDS - toolstarttime)) ]"
else
echo "[ACCOUNTING]=====[ $DISTRO $t $(flipstatus SUCCEEDED) $((SECONDS - toolstarttime)) ]"
fi
echo "[-] TOOL $t TEST ENDED: $((SECONDS - toolstarttime)) seconds, $((SECONDS - starttime)) seconds since start of script."
set -e
done
if [ "$failed" != "" ];
then
echo "==================================================="
failcount=$(echo "$failed" | wc -w)
totalcount=$(echo "$TOOL" | wc -w)
if [ "$EXPECTFAIL" -eq "1" ];
then
echo "ERROR: $failcount/$totalcount tools succeeded while they were expected to fail: $failed"
else
echo "ERROR: $failcount/$totalcount tools failed while they should have succeeded: $failed"
fi
echo "==================================================="
exit 1
fi
if [ "$EXPECTFAIL" -eq "1" ];
then
echo "DONE: $totalcount tools failed as expected."
else
echo "DONE: $totalcount tools succeeded as expected."
fi
exit 0
|
<reponame>thr-consulting/thr-addons
import debug from 'debug';
import {Field} from 'formik';
import React, {useRef, useState} from 'react';
import {Form, Container, Button, ButtonGroup, Segment} from 'semantic-ui-react';
import {object, string} from 'yup';
import {TForm} from './TForm';
import type {TFormProps} from './types';
import {useTForm} from './useTForm';
const d = debug('thx.controls.form.TForm.tform.stories');
export default {title: 'Form / TForm'};
// Yup Validation
const formValidation = object().shape({
text: string().required('Text is required'),
nested: object().shape({
text: string().required('Nested text is required'),
}),
plainInput: string(),
customInput: string(),
});
interface FormType {
text: string;
nested: {
text: string;
};
plainInput: string;
}
interface ApolloError {
message?: string;
graphQLErrors?: {
message?: string;
}[];
}
const sampleGraphqlError: (ApolloError | undefined)[] = [
undefined,
{message: 'Sample Graphql Error 1'},
{
message: 'Sample Graphql Error 2',
graphQLErrors: [{message: 'Graphql Sub Error 2-A'}, {message: 'Graphql Sub Error 2-B'}],
},
];
function MyForm(props: TFormProps<FormType>) {
const {values, handleChange, handleSubmit, handleBlur, hasWarnings, fieldError, formError, submitDisabled, isSubmitting, renderWarnings} = props;
return (
<Form error={formError} warning={hasWarnings} onSubmit={handleSubmit}>
<Form.Field width={6} error={fieldError('text')}>
<label>Enter some text</label>
<input name="text" value={values.text} onChange={handleChange} onBlur={handleBlur} />
</Form.Field>
<Form.Field width={6} error={fieldError('nested')}>
<label>Enter some nested text</label>
<input name="nested.text" value={values.nested.text} onChange={handleChange} onBlur={handleBlur} />
</Form.Field>
<Form.Field error={fieldError('plainInput')}>
<label>Plain input</label>
<input name="plainInput" value={values.plainInput} onChange={handleChange} onBlur={handleBlur} />
</Form.Field>
<Form.Button disabled={submitDisabled} type="submit" loading={isSubmitting}>
Submit
</Form.Button>
{renderWarnings()}
</Form>
);
}
function MyFootSegment({vars, setVars, setGraphqlError, formSubmitFn}: any) {
return (
<Segment>
<ButtonGroup vertical>
<Button
color={vars.graphqlError === 0 ? 'green' : 'blue'}
onClick={() => {
setVars({graphqlError: 0});
setGraphqlError(undefined);
}}
>
Clear GraphQL Error
</Button>
<Button
color={vars.graphqlError === 1 ? 'green' : 'blue'}
onClick={() => {
setVars({graphqlError: 1});
setGraphqlError(undefined);
}}
>
Simulate GraphQL Error 1
</Button>
<Button
color={vars.graphqlError === 2 ? 'green' : 'blue'}
onClick={() => {
setVars({graphqlError: 2});
setGraphqlError(undefined);
}}
>
Simulate GraphQL Error 2
</Button>
<Button color="orange" onClick={() => formSubmitFn.current && formSubmitFn.current()}>
Outside Submit
</Button>
</ButtonGroup>
</Segment>
);
}
export const Main = () => {
const [vars, setVars] = useState({
graphqlError: 0,
});
const [graphqlError, setGraphqlError] = useState<ApolloError>();
const formSubmitFn = useRef<() => Promise<void>>();
return (
<Container>
<TForm<FormType>
initialValues={{text: '', nested: {text: ''}, plainInput: ''}}
validationSchema={formValidation}
error={graphqlError}
getSubmitFn={sub => (formSubmitFn.current = sub)}
onSubmit={data =>
new Promise((resolve, reject) => {
setTimeout(() => {
if (vars.graphqlError === 0) {
d('Success!', data);
resolve(data);
} else {
setGraphqlError(sampleGraphqlError[vars.graphqlError]);
reject();
}
}, 1000);
})
}
>
{props => <MyForm {...props} />}
</TForm>
<MyFootSegment vars={vars} formSubmitFn={formSubmitFn} setGraphqlError={setGraphqlError} setVars={setVars} />
</Container>
);
};
export const UseHooks = () => {
const [vars, setVars] = useState({
graphqlError: 0,
});
const [graphqlError, setGraphqlError] = useState<ApolloError>();
const formSubmitFn = useRef<() => Promise<void>>();
const props = useTForm<FormType>({
initialValues: {
text: '',
nested: {
text: '',
},
plainInput: '',
},
validationSchema: formValidation,
error: graphqlError,
getSubmitFn: sub => (formSubmitFn.current = sub),
onSubmit: data =>
new Promise((resolve, reject) => {
setTimeout(() => {
if (vars.graphqlError === 0) {
d('Success!', data);
resolve(data);
} else {
setGraphqlError(sampleGraphqlError[vars.graphqlError]);
reject();
}
}, 1000);
}),
});
return (
<Container>
<MyForm {...props} />
<MyFootSegment vars={vars} formSubmitFn={formSubmitFn} setGraphqlError={setGraphqlError} setVars={setVars} />
</Container>
);
};
export const UsingField = () => {
const [vars, setVars] = useState({
graphqlError: 0,
});
const [graphqlError, setGraphqlError] = useState<ApolloError>();
const formSubmitFn = useRef<() => Promise<void>>();
return (
<Container>
<TForm<FormType>
initialValues={{text: '', nested: {text: ''}, plainInput: ''}}
validationSchema={formValidation}
error={graphqlError}
getSubmitFn={sub => (formSubmitFn.current = sub)}
onSubmit={data =>
new Promise((resolve, reject) => {
setTimeout(() => {
if (vars.graphqlError === 0) {
d('Success!', data);
resolve(data);
} else {
setGraphqlError(sampleGraphqlError[vars.graphqlError]);
reject();
}
}, 1000);
})
}
>
{props => {
const {handleSubmit, hasWarnings, formError, submitDisabled, isSubmitting, renderWarnings} = props;
return (
<Form error={formError} warning={hasWarnings} onSubmit={handleSubmit}>
<Field as={Form.Field} name="text" />
<Form.Button disabled={submitDisabled} type="submit" loading={isSubmitting}>
Submit
</Form.Button>
{renderWarnings()}
</Form>
);
}}
</TForm>
<MyFootSegment vars={vars} formSubmitFn={formSubmitFn} setGraphqlError={setGraphqlError} setVars={setVars} />
</Container>
);
};
|
#!/bin/bash
if [[ $# < 3 ]]
then
echo "usage: test_potential_intensity_app.sh [app prefix] " \
"[data root] [num threads] [mpiexec] [num ranks]"
exit -1
fi
app_prefix=${1}
data_root=${2}
n_threads=${3}
if [[ $# -eq 5 ]]
then
mpi_exec=${4}
test_cores=${5}
launcher="${mpi_exec} -n ${test_cores}"
fi
set -x
set -e
# run the app
${launcher} ${app_prefix}/teca_potential_intensity \
--input_regex ${data_root}/tcpypi_sample_data_1980-01-31-10Z'\.nc$' \
--psl_variable msl --sst_variable sst --air_temperature_variable t \
--mixing_ratio_variable q --t_axis_variable month --z_axis_variable p \
--output_file test_potential_intensity_app_%t%.nc \
--file_layout number_of_steps --steps_per_file 12 --verbose 2
# run the diff
${app_prefix}/teca_cartesian_mesh_diff \
--reference_dataset ${data_root}/test_potential_intensity_app'.*\.nc' \
--ref_reader::t_axis_variable month \
--test_dataset test_potential_intensity_app'.*\.nc' \
--test_reader::t_axis_variable month \
--arrays V_max P_min IFL T_o OTL --verbose
# clean up
#rm test_potential_intensity_app*.nc
|
package org.prebid.server.bidder.ix;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.iab.openrtb.request.BidRequest;
import com.iab.openrtb.request.Format;
import com.iab.openrtb.request.Imp;
import com.iab.openrtb.request.Publisher;
import com.iab.openrtb.request.Site;
import com.iab.openrtb.response.BidResponse;
import io.vertx.core.http.HttpMethod;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.prebid.server.auction.model.AdUnitBid;
import org.prebid.server.auction.model.AdapterRequest;
import org.prebid.server.auction.model.PreBidRequestContext;
import org.prebid.server.bidder.Adapter;
import org.prebid.server.bidder.OpenrtbAdapter;
import org.prebid.server.bidder.ix.proto.IxParams;
import org.prebid.server.bidder.model.AdapterHttpRequest;
import org.prebid.server.bidder.model.ExchangeCall;
import org.prebid.server.exception.PreBidException;
import org.prebid.server.json.JacksonMapper;
import org.prebid.server.proto.request.PreBidRequest;
import org.prebid.server.proto.response.Bid;
import org.prebid.server.proto.response.MediaType;
import org.prebid.server.util.HttpUtil;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* ix {@link Adapter} implementation.
*/
public class IxAdapter extends OpenrtbAdapter {
private static final Set<MediaType> ALLOWED_MEDIA_TYPES = Collections.singleton(MediaType.banner);
// maximum number of bid requests
private static final int REQUEST_LIMIT = 20;
private final String endpointUrl;
private final JacksonMapper mapper;
public IxAdapter(String cookieFamilyName, String endpointUrl, JacksonMapper mapper) {
super(cookieFamilyName);
this.endpointUrl = HttpUtil.validateUrl(Objects.requireNonNull(endpointUrl));
this.mapper = Objects.requireNonNull(mapper);
}
@Override
public List<AdapterHttpRequest<BidRequest>> makeHttpRequests(AdapterRequest adapterRequest,
PreBidRequestContext preBidRequestContext) {
validatePreBidRequest(preBidRequestContext.getPreBidRequest());
final List<AdUnitBid> adUnitBids = adapterRequest.getAdUnitBids();
validateAdUnitBidsMediaTypes(adUnitBids, ALLOWED_MEDIA_TYPES);
final List<BidRequest> requests = makeRequests(adUnitBids, preBidRequestContext);
if (CollectionUtils.isEmpty(requests)) {
throw new PreBidException("Invalid ad unit/imp");
}
return requests.stream()
.map(bidRequest -> AdapterHttpRequest.of(HttpMethod.POST, endpointUrl, bidRequest, HttpUtil.headers()))
.collect(Collectors.toList());
}
private static void validatePreBidRequest(PreBidRequest preBidRequest) {
if (preBidRequest.getApp() != null) {
throw new PreBidException("ix doesn't support apps");
}
}
private List<BidRequest> makeRequests(List<AdUnitBid> adUnitBids, PreBidRequestContext preBidRequestContext) {
final List<BidRequest> prioritizedRequests = new ArrayList<>();
final List<BidRequest> regularRequests = new ArrayList<>();
for (AdUnitBid adUnitBid : adUnitBids) {
final Set<MediaType> mediaTypes = allowedMediaTypes(adUnitBid, ALLOWED_MEDIA_TYPES);
if (!mediaTypes.contains(MediaType.banner)) {
continue;
}
final IxParams ixParams = parseAndValidateParams(adUnitBid);
boolean isFirstSize = true;
for (Format format : adUnitBid.getSizes()) {
final BidRequest bidRequest = createBidRequest(adUnitBid, ixParams, format, preBidRequestContext);
// prioritize slots over sizes
if (isFirstSize) {
prioritizedRequests.add(bidRequest);
isFirstSize = false;
} else {
regularRequests.add(bidRequest);
}
}
}
return Stream.concat(prioritizedRequests.stream(), regularRequests.stream())
// cap the number of requests to requestLimit
.limit(REQUEST_LIMIT)
.collect(Collectors.toList());
}
private IxParams parseAndValidateParams(AdUnitBid adUnitBid) {
final ObjectNode paramsNode = adUnitBid.getParams();
if (paramsNode == null) {
throw new PreBidException("ix params section is missing");
}
final IxParams params;
try {
params = mapper.mapper().convertValue(paramsNode, IxParams.class);
} catch (IllegalArgumentException e) {
// a weird way to pass parsing exception
throw new PreBidException(String.format("unmarshal params '%s' failed: %s", paramsNode,
e.getMessage()), e.getCause());
}
final String siteId = params.getSiteId();
if (StringUtils.isBlank(siteId)) {
throw new PreBidException("Missing siteId param");
}
return params;
}
private BidRequest createBidRequest(AdUnitBid adUnitBid, IxParams ixParams, Format size,
PreBidRequestContext preBidRequestContext) {
final Imp imp = makeImp(copyAdUnitBidWithSingleSize(adUnitBid, size), preBidRequestContext);
final PreBidRequest preBidRequest = preBidRequestContext.getPreBidRequest();
return BidRequest.builder()
.id(preBidRequest.getTid())
.at(1)
.tmax(preBidRequest.getTimeoutMillis())
.imp(Collections.singletonList(imp))
.site(makeSite(preBidRequestContext, ixParams.getSiteId()))
.device(deviceBuilder(preBidRequestContext).build())
.user(makeUser(preBidRequestContext))
.source(makeSource(preBidRequestContext))
.regs(preBidRequest.getRegs())
.build();
}
private static AdUnitBid copyAdUnitBidWithSingleSize(AdUnitBid adUnitBid, Format singleSize) {
return adUnitBid.toBuilder().sizes(Collections.singletonList(singleSize)).build();
}
private static Imp makeImp(AdUnitBid adUnitBid, PreBidRequestContext preBidRequestContext) {
final String adUnitCode = adUnitBid.getAdUnitCode();
return Imp.builder()
.id(adUnitCode)
.instl(adUnitBid.getInstl())
.banner(bannerBuilder(adUnitBid).build())
.secure(preBidRequestContext.getSecure())
.tagid(adUnitCode)
.build();
}
private static Site makeSite(PreBidRequestContext preBidRequestContext, String siteId) {
final Site.SiteBuilder siteBuilder = siteBuilder(preBidRequestContext);
return siteBuilder == null ? null : siteBuilder
.publisher(Publisher.builder().id(siteId).build())
.build();
}
@Override
public List<Bid.BidBuilder> extractBids(AdapterRequest adapterRequest,
ExchangeCall<BidRequest, BidResponse> exchangeCall) {
return responseBidStream(exchangeCall.getResponse())
.map(bid -> toBidBuilder(bid, adapterRequest))
.collect(Collectors.toList());
}
private static Bid.BidBuilder toBidBuilder(com.iab.openrtb.response.Bid bid, AdapterRequest adapterRequest) {
final AdUnitBid adUnitBid = lookupBid(adapterRequest.getAdUnitBids(), bid.getImpid());
return Bid.builder()
.bidder(adUnitBid.getBidderCode())
.bidId(adUnitBid.getBidId())
.code(bid.getImpid())
.price(bid.getPrice())
.adm(bid.getAdm())
.creativeId(bid.getCrid())
.width(bid.getW())
.height(bid.getH())
.dealId(bid.getDealid())
.mediaType(MediaType.banner);
}
}
|
<filename>SheetKit.podspec
Pod::Spec.new do |s|
s.name = 'SheetKit'
s.version = '0.1.2'
s.summary = 'A lightweight, user-friendly Swift library to create adaptive card-styled UI for action sheet and interactive bottom sheets easily
'
s.description = <<-DESC
SheetKit lets you create bottom sheets with just few lines of code. It also provides a modern looking action sheet for both iPad and iPhone. And yes it supports dark mode just like any other UIView in UIKit does.
SheetKit supports ActionItem types like button, title, cancelButton, destructiveButton and separator. You can also create a custom styled item for the sheet. The color, image, imageTintColor of button is all customizable. Button without image is also possible.
SheetKit also provides BottomSheets called as PreviewViewController in the kit. These components interactively grow on dragging, occupying the required space only on both brief and detail mode. The heights of each mode is completely customizable.
DESC
s.homepage = 'https://github.com/akaashdev/SheetKit'
s.screenshots = 'https://github.com/akaashdev/SheetKit/blob/master/Screenshots/screenshot-iphone-1.png?raw=true',
'https://github.com/akaashdev/SheetKit/blob/master/Screenshots/screenshot-iphone-2.png?raw=true',
'https://github.com/akaashdev/SheetKit/blob/master/Screenshots/screenshot-iphone-3.png?raw=true',
'https://github.com/akaashdev/SheetKit/blob/master/Screenshots/screenshot-iphone-4.png?raw=true',
'https://github.com/akaashdev/SheetKit/blob/master/Screenshots/screenshot-iphone-5.png?raw=true',
'https://github.com/akaashdev/SheetKit/blob/master/Screenshots/screenshot-iphone-6.png?raw=true',
'https://github.com/akaashdev/SheetKit/blob/master/Screenshots/screenshot-ipad-1.png?raw=true',
'https://github.com/akaashdev/SheetKit/blob/master/Screenshots/screenshot-ipad-2.png?raw=true'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { '<NAME>' => '<EMAIL>' }
s.source = { :git => 'https://github.com/akaashdev/SheetKit.git', :tag => s.version.to_s }
s.social_media_url = 'https://twitter.com/akaash_dev'
s.ios.deployment_target = '11.0'
s.source_files = 'SheetKit/Classes/**/*'
s.swift_version = '5.0'
end
|
def count_license_headers(source_code: str) -> int:
count = 0
in_comment = False
lines = source_code.split('\n')
for line in lines:
line = line.strip()
if line.startswith("/*"):
in_comment = True
if in_comment and "distributed under the License" in line:
count += 1
if line.endswith("*/"):
in_comment = False
return count |
/*
* Copyright (c) Open Source Strategies, Inc.
*
* Opentaps is free software: you can redistribute it and/or modify it
* under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Opentaps is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Opentaps. If not, see <http://www.gnu.org/licenses/>.
*/
package org.opentaps.domain.billing.invoice;
import java.math.BigDecimal;
import org.opentaps.foundation.service.ServiceException;
import org.opentaps.foundation.service.ServiceInterface;
/**
* POJO service which creates invoice item using the opentaps Service foundation
* class.
*/
public interface InvoiceItemServiceInterface extends ServiceInterface {
/**
* Gets the invoiceItemSeqId created by the service.
* @return the invoiceItem Seq ID
*/
public String getInvoiceItemSeqId();
/**
* Sets if the services should validate the accounting tags, defaults to <code>false</code>.
* @param validateAccountingTags a <code>Boolean</code> value
*/
public void setValidateAccountingTags(Boolean validateAccountingTags);
/**
* Sets the required input parameter for service.
* @param invoiceItemSeqId the invoiceItem seq Id
*/
public void setInvoiceItemSeqId(String invoiceItemSeqId);
/**
* Sets the required input parameter for service.
* @param invoiceId input parameter
*/
public void setInvoiceId(String invoiceId);
/**
* Sets the required input parameter for service.
* @param invoiceItemTypeId input parameter
*/
public void setInvoiceItemTypeId(String invoiceItemTypeId);
/**
* Sets the required input parameter for service.
* @param overrideGlAccountId input parameter
*/
public void setOverrideGlAccountId(String overrideGlAccountId);
/**
* Sets the required input parameter for service.
* @param inventoryItemId input parameter
*/
public void setInventoryItemId(String inventoryItemId);
/**
* Sets the required input parameter for service.
* @param productId input parameter
*/
public void setProductId(String productId);
/**
* Sets the required input parameter for service.
* @param productFeatureId input parameter
*/
public void setProductFeatureId(String productFeatureId);
/**
* Sets the required input parameter for service.
* @param parentInvoiceId input parameter
*/
public void setParentInvoiceId(String parentInvoiceId);
/**
* Sets the required input parameter for service.
* @param parentInvoiceItemSeqId input parameter
*/
public void setParentInvoiceItemSeqId(String parentInvoiceItemSeqId);
/**
* Sets the required input parameter for service.
* @param uomId input parameter
*/
public void setUomId(String uomId);
/**
* Sets the required input parameter for service.
* @param taxableFlag input parameter
*/
public void setTaxableFlag(String taxableFlag);
/**
* Sets the required input parameter for service.
* @param quantity input parameter
*/
public void setQuantity(BigDecimal quantity);
/**
* Sets the required input parameter for service.
* @param amount input parameter
*/
public void setAmount(BigDecimal amount);
/**
* Sets the required input parameter for service.
* @param description input parameter
*/
public void setDescription(String description);
/**
* Sets the required input parameter for service.
* @param taxAuthPartyId input parameter
*/
public void setTaxAuthPartyId(String taxAuthPartyId);
/**
* Sets the required input parameter for service.
* @param taxAuthGeoId input parameter
*/
public void setTaxAuthGeoId(String taxAuthGeoId);
/**
* Sets the required input parameter for service.
* @param taxAuthorityRateSeqId input parameter
*/
public void setTaxAuthorityRateSeqId(String taxAuthorityRateSeqId);
/**
* Sets the required input parameter for service.
* @param acctgTagEnumId1 input parameter
*/
public void setAcctgTagEnumId1(String acctgTagEnumId1);
/**
* Sets the required input parameter for service.
* @param acctgTagEnumId2 input parameter
*/
public void setAcctgTagEnumId2(String acctgTagEnumId2);
/**
* Sets the required input parameter for service.
* @param acctgTagEnumId3 input parameter
*/
public void setAcctgTagEnumId3(String acctgTagEnumId3);
/**
* Sets the required input parameter for service.
* @param acctgTagEnumId4 input parameter
*/
public void setAcctgTagEnumId4(String acctgTagEnumId4);
/**
* Sets the required input parameter for service.
* @param acctgTagEnumId5 input parameter
*/
public void setAcctgTagEnumId5(String acctgTagEnumId5);
/**
* Sets the required input parameter for service.
* @param acctgTagEnumId6 input parameter
*/
public void setAcctgTagEnumId6(String acctgTagEnumId6);
/**
* Sets the required input parameter for service.
* @param acctgTagEnumId7 input parameter
*/
public void setAcctgTagEnumId7(String acctgTagEnumId7);
/**
* Sets the required input parameter for service.
* @param acctgTagEnumId8 input parameter
*/
public void setAcctgTagEnumId8(String acctgTagEnumId8);
/**
* Sets the required input parameter for service.
* @param acctgTagEnumId9 input parameter
*/
public void setAcctgTagEnumId9(String acctgTagEnumId9);
/**
* Sets the required input parameter for service.
* @param acctgTagEnumId10 input parameter
*/
public void setAcctgTagEnumId10(String acctgTagEnumId10);
/**
* Service to create InvoiceItem.
* @throws ServiceException if an error occurs
*/
public void createInvoiceItem() throws ServiceException;
/**
* Service to update InvoiceItem.
* @throws ServiceException if an error occurs
*/
public void updateInvoiceItem() throws ServiceException;
}
|
//
// PROJBaseTableViewController.h
// PROJProjectA
//
// Created by 1 on 10/1/20.
// Copyright © 2020 hausinTec. All rights reserved.
//
#import <UIKit/UIKit.h>
#import "PROJBaseViewController.h"
#import "PROJTableViewModel.h"
#import "PROJBaseTableViewHeaderFooterView.h"
#import "PROJBaseTableViewCell.h"
NS_ASSUME_NONNULL_BEGIN
///--------------------------------------------
/// @name PROJBaseTableViewControllerDataSource
///--------------------------------------------
@protocol PROJBaseTableViewControllerDataSource <NSObject>
@required
- (NSArray<NSString *> *)proj_classesRegistionForCells;// 提供 tableview 所需要的所有 cell 类型数组
- (__kindof PROJTableViewModel *)proj_tableViewModelForTableViewController;// 提供 tableview 所有要的 tableviewmodel.
@optional
- (UITableViewStyle)proj_tableViewStyle;
- (NSArray<NSString *> *)proj_classesRegistionForSectionHeader;
- (NSArray<NSString *> *)proj_classesRegistionForSectionFooter;
- (CGFloat)proj_estimatedHeightForRowAtIndexPath:(NSIndexPath *)indexPath API_AVAILABLE(ios(7.0));
- (CGFloat)proj_estimatedHeightForHeaderInSection:(NSInteger)section API_AVAILABLE(ios(7.0));
- (CGFloat)proj_estimatedHeightForFooterInSection:(NSInteger)section API_AVAILABLE(ios(7.0));
@end
///--------------------------------------------
/// @name PROJBaseTableViewControllerDelegate
///--------------------------------------------
@protocol PROJBaseTableViewControllerDelegate <NSObject>
@end
///--------------------------------------------
/// @name PROJBaseTableViewController
///--------------------------------------------
@interface PROJBaseTableViewController : PROJBaseViewController
<UITableViewDelegate, UITableViewDataSource, UITableViewDataSourcePrefetching,
PROJBaseTableViewControllerDataSource, PROJBaseTableViewControllerDelegate>
@property (nonatomic, strong) UITableView *tableView;
@end
NS_ASSUME_NONNULL_END
|
import React from 'react'
import axios from 'axios'
export default class MovieSearch extends React.Component {
state = {
searchTerm: '',
movies: []
}
handleChange = e => {
this.setState({ searchTerm: e.target.value })
}
handleSubmit = e => {
e.preventDefault();
const apiKey = tmdb_api_key;
const searchUrl = `https://api.themoviedb.org/3/search/movie?api_key=${apiKey}&query=${this.state.searchTerm}`;
axios
.get(searchUrl)
.then(response => {
this.setState({ movies: response.data.results });
})
.catch(error => {
console.log(error);
});
}
render() {
return (
<div>
<form onSubmit={this.handleSubmit}>
<input
type="text"
value={this.state.searchTerm}
onChange={this.handleChange}
/>
<button>Search</button>
</form>
<ul>
{this.state.movies.map(movie => (
<li>{movie.title} ({movie.release_date})</li>
))}
</ul>
</div>
);
}
} |
import ARadio from './ARadio.vue';
import { App } from 'vue';
ARadio.install = (app: App) => {
app.component('ARadio', ARadio);
};
export default ARadio;
|
/*
* AtomicSWMRRegister.java
*
* Created on July 12, 2006, 9:18 PM
*
* From "The Art of Multiprocessor Programming",
* by <NAME> and <NAME>.
* Copyright 2006 Elsevier Inc. All rights reserved.
*/
package tamp.ch04.Register.register;
/**
* Atomic MRSW Register from Atomic MRSW Boolean Register
* Adapted from <NAME> and <NAME>, Constructing 1-writer multireader
* multivalued atomic variables from regular variables, JACM 42(1), 1995.
*
* @param T object type
* @author <NAME>
*/
public class AtomicSWMRRegister<T> {
volatile T[] buffer; // regular
volatile boolean flag; // atomic
public AtomicSWMRRegister(T value) {
buffer = (T[]) new Object[2];
buffer[0] = buffer[1] = value;
flag = false;
}
public void write(T value) {
flag = true;
buffer[0] = value;
flag = false;
buffer[1] = value;
}
public T read() {
T value = buffer[0];
if (flag) {
return value;
} else {
return buffer[1];
}
}
}
|
#!/bin/bash
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
echo "( ◜ᴗ◝)<ムギュルッパープエール"
|
const str = "Hi, You have received a message from 8503973342 Your Expedia verification code is: 854154"
console.log(str.split('').reverse().join('').slice(0, 6).split('').reverse().join('')) |
#!/usr/bin/env bash
# This script is executed inside the builder image
set -e
source ./ci/matrix.sh
if [ "$RUN_TESTS" != "true" ]; then
echo "Skipping unit tests"
exit 0
fi
# TODO this is not Travis agnostic
export BOOST_TEST_RANDOM=1$TRAVIS_BUILD_ID
export LD_LIBRARY_PATH=$BUILD_DIR/depends/$HOST/lib
export WINEDEBUG=fixme-all
export BOOST_TEST_LOG_LEVEL=test_suite
cd build-ci/gemcore-$BUILD_TARGET
if [ "$DIRECT_WINE_EXEC_TESTS" = "true" ]; then
# Inside Docker, binfmt isn't working so we can't trust in make invoking windows binaries correctly
wine ./src/test/test_gem.exe
else
make $MAKEJOBS check VERBOSE=1
fi
|
export const CloseStatus = {
close:0,
confirm:1,
click:2
} |
<gh_stars>0
package Server.Model;
import Server.Domain.User;
import Server.Domain.UsersRepository;
import org.json.JSONObject;
import org.riversun.okhttp3.OkHttp3CookieHelper;
import org.springframework.http.HttpStatus;
import org.springframework.web.server.ResponseStatusException;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.net.CookieManager;
import java.net.CookieStore;
import java.net.HttpCookie;
import java.net.URI;
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
public class GetUserData {
public static User getUserData(String username, String accessToken, UsersRepository usersRepository, HttpServletResponse httpServletResponse) throws ResponseStatusException, IOException, InterruptedException, ParseException {
List<User> allUsersFromDataBase = (List<User>) usersRepository.findAll();
User userFound = allUsersFromDataBase.stream()
.filter(x -> username.equals(x.getUsername()))
.findFirst()
.orElse(null);
List<String> scopesFromAccessToken = GetScopes.getScopes(accessToken);
User user;
if (userFound != null) {
var dbResponse = usersRepository.findById(userFound.getId());
if (dbResponse.isEmpty()) {
throw new ResponseStatusException(HttpStatus.NOT_FOUND, "User not found in data base");
}
System.out.println("User exists in the database (no request to aouth service)");
user = dbResponse.get();
} else {
System.out.println("User does not exist in the database, request to oauth service is executed");
String url = "http://localhost:8081/api/getUserData?clientID=2&accessToken=" + accessToken;
OkHttp3CookieHelper cookieHelper = new OkHttp3CookieHelper();
cookieHelper.setCookie(url, "AccessToken2", accessToken);
HttpClient client = HttpClient.newBuilder().cookieHandler(new CookieManager()).build();
HttpRequest request = HttpRequest.newBuilder().uri(URI.create(url)).build();
CookieStore cookieStore = ((CookieManager) (client.cookieHandler().get())).getCookieStore();
HttpCookie accessToken2Cookie = new HttpCookie("AccessToken2", accessToken);
accessToken2Cookie.setPath("/");
cookieStore.add(URI.create(url), accessToken2Cookie);
HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString());
JSONObject jsonObject1 = new JSONObject(response.body());
String user_email = null;
String user_firstname = null;
String user_username = null;
String user_surname = null;
Date user_birthdate = null;
if (scopesFromAccessToken.contains("user_email")) {
user_email = (String) jsonObject1.get("user_email");
}
if (scopesFromAccessToken.contains("user_firstname")) {
user_firstname = (String) jsonObject1.get("user_firstname");
}
if (scopesFromAccessToken.contains("user_username")) {
user_username = (String) jsonObject1.get("user_username");
}
if (scopesFromAccessToken.contains("user_surname")) {
user_surname = (String) jsonObject1.get("user_surname");
}
if (scopesFromAccessToken.contains("user_birthdate")) {
user_birthdate = new SimpleDateFormat("yyyy-MM-dd").parse(jsonObject1.get("user_birthdate").toString());
}
user = new User();
user.setEmail(user_email);
user.setFirstName(user_firstname);
user.setUsername(user_username);
user.setSurname(user_surname);
user.setBirthDate(user_birthdate);
usersRepository.save(user);
}
return user;
}
}
|
package result
import (
"testing"
"github.com/stretchr/testify/require"
)
func TestGetPeers(t *testing.T) {
gp := NewGetPeers()
require.Equal(t, 0, len(gp.Unconnected))
require.Equal(t, 0, len(gp.Connected))
require.Equal(t, 0, len(gp.Bad))
gp.AddUnconnected([]string{"1.1.1.1:53", "8.8.8.8:53", "9.9.9.9:53"})
gp.AddConnected([]string{"192.168.0.1:10333"})
gp.AddBad([]string{"127.0.0.1:20333"})
require.Equal(t, 3, len(gp.Unconnected))
require.Equal(t, 1, len(gp.Connected))
require.Equal(t, 1, len(gp.Bad))
require.Equal(t, "192.168.0.1", gp.Connected[0].Address)
require.Equal(t, "10333", gp.Connected[0].Port)
require.Equal(t, "127.0.0.1", gp.Bad[0].Address)
require.Equal(t, "20333", gp.Bad[0].Port)
}
|
package de.schub.docker_controller.Metadata.Collector;
import com.orbitz.consul.Consul;
import com.orbitz.consul.model.health.ServiceHealth;
import de.schub.docker_controller.Metadata.ConsulClientFactory;
import de.schub.docker_controller.Metadata.ContainerMetadata;
import de.schub.docker_controller.Metadata.Exception.MetadataCollectorException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.inject.Inject;
import java.net.URI;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Find consul services with the name docker and use them directly as metadata source.
* This was intended to make running a separate docker-controller per node redundant.
*
* It works as intended but it doesn't handle dynamic cluster changes yet (@TODO).
* Also the automated cleanup of orphaned consul services doesn't work
*
* Config:
* URL:
* * docker+consul://HOST:PORT/SERVICE_NAME
*
*/
public class ConsulDockerMetadataCollectorProvider implements MetadataCollectorProvider
{
private final ConsulClientFactory consulClientFactory;
private final DockerMetadataCollectorProvider dockerMetadataCollectorProvider;
@Inject
public ConsulDockerMetadataCollectorProvider(
ConsulClientFactory consulClientFactory,
DockerMetadataCollectorProvider dockerMetadataCollectorProvider)
{
this.consulClientFactory = consulClientFactory;
this.dockerMetadataCollectorProvider = dockerMetadataCollectorProvider;
}
@Override
public MetadataCollector getCollector(URI endpoint)
{
String serviceName = endpoint.getPath();
if (null == serviceName || serviceName.isEmpty()) {
serviceName = "docker";
}
return new ConsulDockerMetadataCollector(consulClientFactory.get(endpoint), serviceName);
}
@Override
public boolean supports(URI endpoint)
{
return endpoint.getScheme().equals("consul+docker");
}
class ConsulDockerMetadataCollector implements MetadataCollector
{
Logger logger = LoggerFactory.getLogger(ConsulDockerMetadataCollector.class);
private final Consul consulClient;
private final String serviceName;
private HashMap<String, MetadataCollector> dockerMetadataCollectors = new HashMap<>();
private boolean connected = false;
public ConsulDockerMetadataCollector(Consul consulClient, String serviceName)
{
this.consulClient = consulClient;
this.serviceName = serviceName;
}
private void connect()
{
if (connected) {
return;
}
connected = true;
// get all healthy docker services
List<ServiceHealth> services = consulClient
.healthClient()
.getHealthyServiceInstances(serviceName)
.getResponse();
for (ServiceHealth service : services) {
dockerMetadataCollectors.put(
service.getNode().getNode(),
dockerMetadataCollectorProvider.getCollector(
URI.create("docker://" + service.getNode().getAddress() + ":" + service.getService().getPort())
)
);
}
}
@Override
public ContainerMetadata get(String containerId) throws MetadataCollectorException
{
connect();
for (Map.Entry<String, MetadataCollector> entry : dockerMetadataCollectors.entrySet()) {
MetadataCollector collector = entry.getValue();
try {
ContainerMetadata metadata = collector.get(containerId);
if (null != metadata) {
return metadata;
}
} catch (MetadataCollectorException e) {
logger.error("failed to get metadata from node " + entry.getKey(), e);
}
}
return null;
}
@Override
public List<ContainerMetadata> getAll() throws MetadataCollectorException
{
return new ArrayList<>(getMap().values());
}
@Override
public Map<String, ContainerMetadata> getMap() throws MetadataCollectorException
{
connect();
Map<String, ContainerMetadata> containers = new HashMap<>();
for (Map.Entry<String, MetadataCollector> entry : dockerMetadataCollectors.entrySet()) {
MetadataCollector collector = entry.getValue();
try {
containers.putAll(collector.getMap());
} catch (MetadataCollectorException e) {
logger.error("failed to get metadata from node " + entry.getKey(), e);
}
}
return containers;
}
}
}
|
#include <cut/2.6/cut.h>
#include "token.h"
#include "ops.h"
#include "grammar.h"
#include <gc.h>
#include <string.h>
#include <gc/cord.h>
void __CUT__Token_createBringup(void)
{
GC_INIT();
}
void __CUT__Token_create( void )
{
CORD data = CORD_from_char_star("100");
Token *tk = Token_create(TK_INT, data, NULL);
ASSERT(tk->id == TK_INT, "wrong id.");
ASSERT(tk->value == 100, "wrong value.");
tk = Token_create(TK_CHR, CORD_from_char_star("'A'"), NULL);
ASSERT(tk->id == TK_CHR, "wrong id.");
ASSERT(tk->value == 'A', "wrong value.");
tk = Token_create(TK_FLOAT, CORD_from_char_star("1.1"), NULL);
ASSERT(tk->id == TK_FLOAT, "wrong id.");
ASSERT(tk->value <= 2.0f && tk->value >= 0.0f, "wrong value");
tk = Token_create(TK_HEX, CORD_from_char_star("0xAE"), NULL);
ASSERT(tk->id == TK_HEX, "wrong id.");
ASSERT(tk->value == 0xAE, "wrong value");
tk = Token_create(TK_REG, CORD_from_char_star("R0"), NULL);
ASSERT(tk->id = TK_REG, "wrong id.");
ASSERT(tk->value == 0, "wrong value.");
}
|
<filename>src/main/scala/org/purevalue/arbitrage/adapter/coinbase/CoinbasePublicDataInquirer.scala<gh_stars>1-10
package org.purevalue.arbitrage.adapter.coinbase
import akka.actor.typed.scaladsl.{ActorContext, Behaviors}
import akka.actor.typed.{ActorRef, Behavior}
import org.purevalue.arbitrage.adapter.PublicDataInquirer
import org.purevalue.arbitrage.adapter.coinbase.CoinbasePublicDataInquirer.{CoinbaseBaseRestEndpoint, GetCoinbaseTradePairs}
import org.purevalue.arbitrage.traderoom.{Asset, TradePair}
import org.purevalue.arbitrage.util.HttpUtil
import org.purevalue.arbitrage.util.HttpUtil.httpGetJson
import org.purevalue.arbitrage.util.Util.stepSizeToFractionDigits
import org.purevalue.arbitrage.{ExchangeConfig, GlobalConfig}
import org.slf4j.LoggerFactory
import spray.json.{DefaultJsonProtocol, RootJsonFormat}
import scala.concurrent.Await
import scala.concurrent.duration.DurationInt
private[coinbase] case class CoinbaseTradePair(id: String, // = product_id
baseAsset: Asset,
quoteAsset: Asset,
baseIncrement: Double,
quoteIncrement: Double,
baseMinSize: Double) {
def toTradePair: TradePair = TradePair(baseAsset, quoteAsset)
}
private[coinbase] case class ProductJson(id: String,
base_currency: String,
quote_currency: String,
base_increment: String,
quote_increment: String,
base_min_size: String,
base_max_size: String,
status: String, // "online"
status_message: String,
cancel_only: Boolean,
limit_only: Boolean,
post_only: Boolean,
trading_disabled: Boolean) {
def toCoinbaseTradePair: CoinbaseTradePair = CoinbaseTradePair(
id,
Asset(base_currency),
Asset(quote_currency),
base_increment.toDouble,
quote_increment.toDouble,
base_min_size.toDouble
)
}
private[coinbase] case class CurrencyJson(id: String,
name: String,
min_size: String)
private[coinbase] object CoinbaseJsonProtocol extends DefaultJsonProtocol {
implicit val productJson: RootJsonFormat[ProductJson] = jsonFormat13(ProductJson)
implicit val currencyJson: RootJsonFormat[CurrencyJson] = jsonFormat3(CurrencyJson)
}
object CoinbasePublicDataInquirer {
def apply(globalConfig: GlobalConfig,
exchangeConfig: ExchangeConfig):
Behavior[PublicDataInquirer.Command] =
Behaviors.setup(context => new CoinbasePublicDataInquirer(context, globalConfig, exchangeConfig))
val CoinbaseBaseRestEndpoint: String = "https://api.pro.coinbase.com" // "https://api-public.sandbox.pro.coinbase.com" //
case class GetCoinbaseTradePairs(replyTo: ActorRef[Set[CoinbaseTradePair]]) extends PublicDataInquirer.Command
}
private[coinbase] class CoinbasePublicDataInquirer(context: ActorContext[PublicDataInquirer.Command],
globalConfig: GlobalConfig,
exchangeConfig: ExchangeConfig) extends PublicDataInquirer(context) {
import PublicDataInquirer._
private val log = LoggerFactory.getLogger(getClass)
var tradePairs: Set[TradePair] = _
var coinbaseTradePairs: Set[CoinbaseTradePair] = _
import CoinbaseJsonProtocol._
def registerAssets(): Unit = {
Await.result(
httpGetJson[Seq[CurrencyJson], String](s"$CoinbaseBaseRestEndpoint/currencies"),
globalConfig.httpTimeout.plus(1.second)) match {
case Left(currencies) =>
// we don't have information about what is FIAT here, but Asset-register will correct that
currencies.foreach { e =>
Asset.register(e.id, Some(e.name), None, stepSizeToFractionDigits(e.min_size.toDouble), exchangeConfig.assetSourceWeight)
}
case Right(error) =>
throw new RuntimeException(s"coinbase: GET /currencies failed: $error")
}
log.debug("assets registered")
}
def pullTradePairs(): Unit = {
coinbaseTradePairs =
Await.result(
HttpUtil.httpGetJson[Vector[ProductJson], String](
s"$CoinbaseBaseRestEndpoint/products"
) map {
case Left(products: Vector[ProductJson]) =>
products
.filter(e => e.status == "online" && !e.trading_disabled && !e.cancel_only && !e.post_only)
.map(_.toCoinbaseTradePair)
.filterNot(e => exchangeConfig.assetBlocklist.contains(e.baseAsset) || exchangeConfig.assetBlocklist.contains(e.quoteAsset))
case Right(error) =>
throw new RuntimeException(s"query products failed with: $error")
},
globalConfig.httpTimeout.plus(500.millis)
).toSet
tradePairs = coinbaseTradePairs.map(_.toTradePair)
log.debug("pulled trade pairs")
}
def init(): Unit = {
log.debug("starting "+getClass.getSimpleName)
try {
registerAssets()
pullTradePairs()
} catch {
case e:Throwable =>
log.error("init failed", e)
throw e
}
}
override def onMessage(message: Command): Behavior[Command] = {
message match {
// @formatter:off
case GetAllTradePairs(replyTo) => replyTo ! tradePairs
case GetCoinbaseTradePairs(replyTo) => replyTo ! coinbaseTradePairs
// @formatter:on
}
this
}
init()
}
// Unless otherwise specified, all timestamps from API are returned in ISO 8601 with microseconds
|
<gh_stars>0
import { linkedin } from "./linkedin";
describe("linkedin", () => {
it("should work", () => {
expect(linkedin()).toEqual("linkedin");
});
});
|
#!/bin/bash
if [[ -z ${SENDGRID_TEST_API_KEY+x} ]]; then
echo "Provide an API key for testing: "
read apikey
export SENDGRID_TEST_API_KEY=$apikey
fi
py.test --pep8 --cov=sendgridmarketingapi --cov-report=term-missing -r a -v -s
if [[ $1 == "--with-radon" || $1 == "-r" ]]; then
printf "\n-- Up next: radon\n-- Press [ENTER] key to keep going...\n"
read
radon cc . -as --ignore=tests
fi
|
import React, { ReactElement } from "react"
import { Table } from "antd"
import {
Driver,
DriverStandingsList,
Team,
DriverStandingItem,
} from "../../types"
interface DriverStandingsTableProps {
data: DriverStandingsList
}
function DriverStandingsTable({
data,
}: DriverStandingsTableProps): ReactElement<DriverStandingsTableProps> {
return (
<Table
bodyStyle={{
overflowX: "scroll",
}}
showHeader
pagination={false}
size="small"
rowKey={(item: DriverStandingItem) => item.Driver.driverId}
columns={[
{
title: "Position",
dataIndex: "position",
},
{
title: "Driver",
dataIndex: "Driver",
render: (driver: Driver) =>
`${driver.givenName} ${driver.familyName}`,
},
{
title: "Nationality",
dataIndex: "Driver",
render: (driver: Driver) => driver.nationality,
key: "Nationality",
},
{
title: "Team",
dataIndex: "Constructors",
render: (teams: Team[]) =>
teams.map((team: Team) => team.name).join(", "),
},
{
title: "Wins",
dataIndex: "wins",
},
{
title: "Points",
dataIndex: "points",
},
]}
dataSource={data}
/>
)
}
export default React.memo(DriverStandingsTable)
|
<?xml version="1.0" encoding="UTF-8"?>
<fruits>
<fruit>
<name>Apple</name>
<color>Red</color>
</fruit>
</fruits> |
export * from './data-services';
export * from './use-cases/author';
export * from './use-cases/book';
export * from './use-cases/bookImages';
export * from './use-cases/category';
export * from './use-cases/language';
export * from './use-cases/personal-data';
export * from './use-cases/publisher';
export * from './use-cases/user';
export * from './use-cases/userSituation';
export * from './use-cases/request';
export * from './use-cases/book-categories';
export * from './use-cases/book-categories';
|
if [ ! -f "sd.bin" ]; then
dd if=/dev/zero of=sd.bin bs=1024 count=65536
fi
qemu-system-arm -M vexpress-a9 -smp cpus=2 -kernel rtthread.bin -serial stdio -sd sd.bin
|
#!/usr/bin/env sh
. scripts/predef
test_compile_working() {
./rp metagrok/pkmn/engine/test_engine.py
}
main() {
start_safe
import_config
load_nvm
start_unsafe
nvm install $node_version
start_safe
work_dir=$metagrok_client_root
psc_dir="$showdown_root/$showdown_client_dir"
if [ "$1" != "nobuild" ]; then
pushd $psc_dir
npm install
./build indexes
./build learnsets
popd
fi
rm -rf $work_dir
mkdir -p $work_dir
cat \
js/predef.js \
js/lib/promise-done-polyfill.js \
js/lib/cycle.js \
$psc_dir/data/Pokemon-Showdown/data/abilities.js \
$psc_dir/data/Pokemon-Showdown/data/aliases.js \
$psc_dir/data/Pokemon-Showdown/data/items.js \
$psc_dir/data/Pokemon-Showdown/data/moves.js \
$psc_dir/data/Pokemon-Showdown/data/pokedex.js \
$psc_dir/js/battle-scene-stub.js \
$psc_dir/js/battle-dex.js \
$psc_dir/js/battle-dex-data.js \
$psc_dir/js/battle-text-parser.js \
$psc_dir/js/battle.js \
js/engine.js \
> $work_dir/engine.js
# This part is not necessary, nor will it work, unless you have the conda env installed
# locally as opposed to just in the docker environment.
# test_compile_working
}
main $* |
#!/bin/bash
docker build $@ -t bmichalski/php-cli:5.6 .
|
const maxSumOfTwoInts = (array) => {
let maxSum = 0;
for(let i = 0; i < array.length; i++){
for(let j = i + 1; j < array.length; j++){
let currentSum = array[i] + array[j];
if (currentSum > maxSum){
maxSum = currentSum;
}
}
}
return maxSum;
}
const arr = [5, 10, 15, -4, 12];
console.log(maxSumOfTwoInts(arr)); // expected output: 27 |
#include <bits/stdc++.h>
#define endl '\n'
using namespace std;
int main() {
ios::sync_with_stdio(false);
cin.tie(0);
map<char, string>p;
p[' ']="0000000000";
p['c']="0111001111";
p['d']="0111001110";
p['e']="0111001100";
p['f']="0111001000";
p['g']="0111000000";
p['a']="0110000000";
p['b']="0100000000";
p['C']="0010000000";
p['D']="1111001110";
p['E']="1111001100";
p['F']="1111001000";
p['G']="1111000000";
p['A']="1110000000";
p['B']="1100000000";
int t;
cin>>t;
cin.ignore();
string s;
while(t--){
getline(cin, s);
s=" "+s;
int cnt[10];
memset(cnt, 0, sizeof(cnt));
for(int q=0; q<s.size(); q++){
for(int f=0; f<10; f++){
if(p[s[q]][f]=='0'&&p[s[q+1]][f]=='1'){
cnt[f]++;
}
}
}
cout<<cnt[0];
for(int q=1; q<10; q++){
cout<<" "<<cnt[q];
}
cout<<endl;
}
}
|
<gh_stars>0
package com.leetcode;
import java.util.Arrays;
public class Solution_576 {
int mod = 1000000007;
public int findPaths(int m, int n, int maxMove, int startRow, int startColumn) {
int[][][] cache = new int[m][n][maxMove];
for (int i = 0; i < m; i++) {
for (int j = 0; j < n; j++) {
int[] ints = new int[maxMove + 1];
Arrays.fill(ints, -1);
cache[i][j] = ints;
}
}
return dfs(m, n, maxMove, startRow, startColumn, 0, cache);
}
private int dfs(int m, int n, int maxMove, int curM, int curN, int moveCount, int[][][] cache) {
if (outBound(m, n, curM, curN)) return 1;
if (moveCount == maxMove) return 0;
if (cache[curM][curN][moveCount] != -1) return cache[curM][curN][moveCount];
int sub = 0;
sub = (dfs(m, n, maxMove, curM - 1, curN, moveCount + 1, cache) + sub) % mod;
sub = (dfs(m, n, maxMove, curM, curN - 1, moveCount + 1, cache) + sub) % mod;
sub = (dfs(m, n, maxMove, curM + 1, curN, moveCount + 1, cache) + sub) % mod;
sub = (dfs(m, n, maxMove, curM, curN + 1, moveCount + 1, cache) + sub) % mod;
cache[curM][curN][moveCount] = sub;
return sub;
}
private boolean outBound(int m, int n, int curM, int curN) {
return !(curM >= 0 && curM < m && curN >= 0 && curN < n);
}
}
|
#! /bin/sh
# Check that replaying a request via non-manager mode produces the
# same results.
good=/tmp/goodlookup.map
./start_manager -C $1/configRecord -R/tmp -L$good -lservice -lprocess
# test that recording works
$rgBinPath/plSend -L$good -Cset=context system.echo.0.raw "Test Data" >/dev/null
$rgBinPath/plPing -L$good -s100 -c1
$rgBinPath/plPing -L$good -s1000 -c1
$rgBinPath/plPing -L$good -s10000 -c1
$rgBinPath/plPing -L$good -s100000 -c1
$rgBinPath/plPing -L$good -s1000000 -c1
$rgBinPath/plPing -L$good -s10000000 -c1
./stop_manager
infile=`echo $rgData/record.in.*.00001`
outfile=`echo $rgData/record.out.*.00001`
if [ ! -r $infile -o ! -r $outfile ]; then
echo $infile or $outfile not created
exit 1
fi
# run echo manually to ensure the same output
for f in 00001 00002 00003 00004 00005 00006 00007
do
in=`echo $rgData/record.in.*.$f`
out=`echo $rgData/record.out.*.$f`
$rgServicesPath/echo 3<$in 4>$rgData/manual.$f
$rgBinPath/plNetStringGrep -v sa <$rgData/manual.$f >$rgData/manual.strip.$f
$rgBinPath/plNetStringGrep -v as <$out >$rgData/out.strip.$f
if [ ! -s $rgData/manual.strip.$f ]; then
echo Failed: Zero length string: $rgData/manual.strip.$f
failed=1
fi
cmp -s $rgData/manual.strip.$f $rgData/out.strip.$f
if [ $? -ne 0 ]; then
echo Failed: cmp -s $rgData/manual.strip.$f $rgData/out.strip.$f
failed=1
fi
done
exit $failed
|
<filename>src/service/Generator.ts<gh_stars>1-10
import { Service } from 'typedi';
import { Channel } from '../class/Channel';
import { IGeneratorResult } from '../interface/Generator';
import { Template } from '../class/Template';
import { Model } from '../class/Model';
import { EvaluationError, Generator } from '@hapify/generator';
import { NumberedError } from '@hapify/generator/dist/interfaces';
import { RichError } from '../class/RichError';
@Service()
export class GeneratorService {
/** Compile for a whole channel */
async runChannel(channel: Channel): Promise<IGeneratorResult[]> {
const models = await channel.modelsCollection.list();
return await Generator.run(channel.templates, models)
.then((results) => this.filterEmptyFiles(results))
.catch((e) => {
throw this.formatGeneratorError(e);
});
}
/**
* Compile a template to multiple files.
* One per model, if applicable.
*/
async runTemplate(template: Template): Promise<IGeneratorResult[]> {
const models = await template.channel().modelsCollection.list();
return await Generator.run([template], models)
.then((results) => this.filterEmptyFiles(results))
.catch((e) => {
throw this.formatGeneratorError(e);
});
}
/**
* Run generation process for one template/model
* @throws {Error} If the template needs a model and no model is passed
*/
async run(template: Template, model: Model | null): Promise<IGeneratorResult> {
if (template.needsModel() && !model) {
throw new Error('Model should be defined for this template');
}
const models = await template.channel().modelsCollection.list();
const result = await Generator.run([template], models, model ? [model.id] : null).catch((e) => {
throw this.formatGeneratorError(e);
});
return result[0];
}
/** Compute path from a string */
async pathPreview(path: string, model: Model | null = null): Promise<string> {
try {
return Generator.path(path, model ? model.name : null);
} catch (e) {
throw this.formatGeneratorError(e);
}
}
/** Convert generator errors to internal RichError */
private formatGeneratorError(error: NumberedError): RichError {
const richError = new RichError(error.message, {
code: error.code,
type: error.name,
columnNumber: (<EvaluationError>error).columnNumber,
lineNumber: (<EvaluationError>error).lineNumber,
details: (<EvaluationError>error).details,
});
if (error.stack) richError.stack = error.stack;
return richError;
}
private filterEmptyFiles(results: IGeneratorResult[]): IGeneratorResult[] {
return results.filter((result) => result.content.trim().length > 0);
}
}
|
<filename>opentaps/financials/src/com/opensourcestrategies/financials/payment/PaymentActions.java
/*
* Copyright (c) Open Source Strategies, Inc.
*
* Opentaps is free software: you can redistribute it and/or modify it
* under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Opentaps is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Opentaps. If not, see <http://www.gnu.org/licenses/>.
*/
package com.opensourcestrategies.financials.payment;
import java.math.BigDecimal;
import java.sql.Timestamp;
import java.text.ParseException;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.TimeZone;
import javax.servlet.http.HttpServletRequest;
import com.opensourcestrategies.financials.util.UtilFinancial;
import javolution.util.FastList;
import javolution.util.FastMap;
import org.ofbiz.base.util.Debug;
import org.ofbiz.base.util.GeneralException;
import org.ofbiz.base.util.UtilMisc;
import org.ofbiz.base.util.UtilValidate;
import org.ofbiz.entity.Delegator;
import org.ofbiz.entity.condition.EntityCondition;
import org.ofbiz.entity.condition.EntityFunction;
import org.ofbiz.entity.condition.EntityOperator;
import org.ofbiz.party.party.PartyHelper;
import org.opentaps.base.constants.PaymentTypeConstants;
import org.opentaps.base.constants.StatusItemConstants;
import org.opentaps.base.constants.StatusTypeConstants;
import org.opentaps.base.entities.PaymentAndPaymentApplication;
import org.opentaps.base.entities.PaymentMethod;
import org.opentaps.base.entities.PaymentMethodType;
import org.opentaps.base.entities.PaymentSum;
import org.opentaps.base.entities.PaymentType;
import org.opentaps.base.entities.StatusItem;
import org.opentaps.common.builder.EntityListBuilder;
import org.opentaps.common.builder.PageBuilder;
import org.opentaps.common.util.UtilAccountingTags;
import org.opentaps.common.util.UtilCommon;
import org.opentaps.common.util.UtilDate;
import org.opentaps.domain.DomainsDirectory;
import org.opentaps.domain.billing.BillingDomainInterface;
import org.opentaps.domain.billing.payment.Payment;
import org.opentaps.domain.billing.payment.PaymentRepositoryInterface;
import org.opentaps.domain.organization.Organization;
import org.opentaps.domain.organization.OrganizationRepositoryInterface;
import org.opentaps.foundation.action.ActionContext;
import org.opentaps.foundation.entity.Entity;
/**
* PaymentActions - Java Actions for payments.
*/
public final class PaymentActions {
private static final String MODULE = PaymentActions.class.getName();
private PaymentActions() { }
/**
* Action for the find / list payments screen.
* @param context the screen context
* @throws GeneralException if an error occurs
* @throws ParseException if an error occurs
*/
public static void findPayments(Map<String, Object> context) throws GeneralException, ParseException {
final ActionContext ac = new ActionContext(context);
final HttpServletRequest request = ac.getRequest();
final Delegator delegator = ac.getDelegator();
final Locale locale = ac.getLocale();
final TimeZone timeZone = ac.getTimeZone();
final String organizationPartyId = UtilCommon.getOrganizationPartyId(request);
if (organizationPartyId == null) {
Debug.logError("No organizationPartyId set in the current request.", MODULE);
return;
}
ac.put("organizationPartyId", organizationPartyId);
DomainsDirectory dd = DomainsDirectory.getDomainsDirectory(ac);
BillingDomainInterface billingDomain = dd.getBillingDomain();
PaymentRepositoryInterface paymentRepository = billingDomain.getPaymentRepository();
OrganizationRepositoryInterface organizationRepository = dd.getOrganizationDomain().getOrganizationRepository();
Organization organization = organizationRepository.getOrganizationById(organizationPartyId);
// this gets overrided later according to the payment type
ac.put("decoratorLocation", "component://opentaps-common/widget/screens/common/CommonScreens.xml");
// set the disbursement flag which is used to set the partyIdFrom/To to that of the organization on the find payment form as a hidden field
// also set a default status id which is based on whether it's a disbursement (SENT) or not (RECEIVED), but this is overriden by parameters.statusId
ac.put("headerItem", "receivables");
// for accounting tag filtering
String tagsType = UtilAccountingTags.LOOKUP_RECEIPT_PAYMENT_TAG;
boolean findDisbursement = false;
String findPaymentTypeId = ac.getParameter("findPaymentTypeId");
if ("DISBURSEMENT".equals(findPaymentTypeId)) {
findDisbursement = true;
tagsType = UtilAccountingTags.LOOKUP_DISBURSEMENT_PAYMENT_TAG;
ac.put("headerItem", "payables");
}
ac.put("findDisbursement", findDisbursement);
// get the list of paymentMethods, PaymentMethodTypes, paymentTypes
List<String> supportedPaymentTypes = null;
if (findDisbursement) {
ac.put("decoratorLocation", "component://financials/widget/financials/screens/payables/PayablesScreens.xml");
ac.put("headerItem", "payables");
ac.put("paymentMethodList", organization.getRelated(PaymentMethod.class, UtilMisc.toList("paymentMethodTypeId")));
supportedPaymentTypes = Arrays.asList(PaymentTypeConstants.Disbursement.CUSTOMER_REFUND,
PaymentTypeConstants.Disbursement.VENDOR_PAYMENT,
PaymentTypeConstants.Disbursement.VENDOR_PREPAY,
PaymentTypeConstants.Disbursement.COMMISSION_PAYMENT,
PaymentTypeConstants.TaxPayment.SALES_TAX_PAYMENT,
PaymentTypeConstants.TaxPayment.INCOME_TAX_PAYMENT,
PaymentTypeConstants.TaxPayment.PAYROLL_TAX_PAYMENT);
} else {
ac.put("decoratorLocation", "component://financials/widget/financials/screens/receivables/ReceivablesScreens.xml");
ac.put("headerItem", "receivables");
ac.put("paymentMethodTypeList", UtilFinancial.getSimpleCustomerPaymentMethodTypes(delegator));
supportedPaymentTypes = Arrays.asList(PaymentTypeConstants.Receipt.INTEREST_RECEIPT,
PaymentTypeConstants.Receipt.VENDOR_CREDIT_RCPT,
PaymentTypeConstants.Receipt.CUSTOMER_PAYMENT,
PaymentTypeConstants.Receipt.CUSTOMER_DEPOSIT);
}
List<PaymentType> paymentTypeList = paymentRepository.findListCache(PaymentType.class, EntityCondition.makeCondition(PaymentType.Fields.paymentTypeId.name(), EntityOperator.IN, supportedPaymentTypes));
ac.put("paymentTypeList", paymentTypeList);
List<StatusItem> statusList = paymentRepository.findListCache(StatusItem.class, paymentRepository.map(StatusItem.Fields.statusTypeId, StatusTypeConstants.PMNT_STATUS), UtilMisc.toList(StatusItem.Fields.sequenceId.desc()));
ac.put("statusList", statusList);
// get the accounting tags for the select inputs
if (tagsType != null) {
ac.put("tagFilters", UtilAccountingTags.getAccountingTagFiltersForOrganization(organizationPartyId, tagsType, delegator, locale));
}
// possible fields we're searching by
String paymentId = ac.getParameter("paymentId");
String partyIdFrom = ac.getParameter("partyIdFrom");
String partyIdTo = ac.getParameter("partyIdTo");
String paymentTypeId = ac.getParameter("paymentTypeId");
String paymentMethodId = ac.getParameter("paymentMethodId");
String paymentMethodTypeId = ac.getParameter("paymentMethodTypeId");
String paymentRefNum = ac.getParameter("paymentRefNum");
String statusId = ac.getParameter("statusId");
Timestamp fromDate = UtilDate.toTimestamp(ac.getParameter("fromDate"), timeZone, locale);
Timestamp thruDate = UtilDate.toTimestamp(ac.getParameter("thruDate"), timeZone, locale);
String amountFrom = ac.getParameter("amountFrom");
String amountThru = ac.getParameter("amountThru");
String openAmountFrom = ac.getParameter("openAmountFrom");
String openAmountThru = ac.getParameter("openAmountThru");
// construct search conditions
List<EntityCondition> searchConditions = new FastList<EntityCondition>();
// always filter out payments of type "CUSTOMER_REFUND" and payment method type "EXT_BILLACT"
searchConditions.add(EntityCondition.makeCondition(EntityOperator.OR,
EntityCondition.makeCondition(Payment.Fields.paymentTypeId.name(), EntityOperator.NOT_EQUAL, "CUSTOMER_REFUND"),
EntityCondition.makeCondition(Payment.Fields.paymentMethodTypeId.name(), EntityOperator.NOT_EQUAL, "EXT_BILLACT")));
if (paymentId != null) {
searchConditions.add(EntityCondition.makeCondition(Payment.Fields.paymentId.name(), EntityOperator.EQUALS, paymentId));
}
// force one of the party to the organization according to the payment type
if (findDisbursement) {
searchConditions.add(EntityCondition.makeCondition(Payment.Fields.partyIdFrom.name(), EntityOperator.EQUALS, organizationPartyId));
if (partyIdTo != null) {
searchConditions.add(EntityCondition.makeCondition(Payment.Fields.partyIdTo.name(), EntityOperator.EQUALS, partyIdTo));
}
} else {
searchConditions.add(EntityCondition.makeCondition(Payment.Fields.partyIdTo.name(), EntityOperator.EQUALS, organizationPartyId));
if (partyIdFrom != null) {
searchConditions.add(EntityCondition.makeCondition(Payment.Fields.partyIdFrom.name(), EntityOperator.EQUALS, partyIdFrom));
}
}
if (paymentTypeId != null) {
searchConditions.add(EntityCondition.makeCondition(Payment.Fields.paymentTypeId.name(), EntityOperator.EQUALS, paymentTypeId));
} else {
searchConditions.add(EntityCondition.makeCondition(Payment.Fields.paymentTypeId.name(), EntityOperator.IN, supportedPaymentTypes));
}
if (findDisbursement) {
if (paymentMethodId != null) {
searchConditions.add(EntityCondition.makeCondition(Payment.Fields.paymentMethodId.name(), EntityOperator.EQUALS, paymentMethodId));
}
} else {
if (paymentMethodTypeId != null) {
searchConditions.add(EntityCondition.makeCondition(Payment.Fields.paymentMethodTypeId.name(), EntityOperator.EQUALS, paymentMethodTypeId));
}
}
if (paymentRefNum != null) {
// make sure the look up is case insensitive
searchConditions.add(EntityCondition.makeCondition(EntityFunction.UPPER_FIELD(Payment.Fields.paymentRefNum.name()), EntityOperator.LIKE, EntityFunction.UPPER(paymentRefNum + "%")));
}
if (statusId != null) {
searchConditions.add(EntityCondition.makeCondition(Payment.Fields.statusId.name(), EntityOperator.EQUALS, statusId));
}
if (fromDate != null) {
searchConditions.add(EntityCondition.makeCondition(Payment.Fields.effectiveDate.name(), EntityOperator.GREATER_THAN_EQUAL_TO, fromDate));
}
if (thruDate != null) {
searchConditions.add(EntityCondition.makeCondition(Payment.Fields.effectiveDate.name(), EntityOperator.LESS_THAN_EQUAL_TO, thruDate));
}
if (amountFrom != null) {
searchConditions.add(EntityCondition.makeCondition(Payment.Fields.amount.name(), EntityOperator.GREATER_THAN_EQUAL_TO, new BigDecimal(amountFrom)));
}
if (amountThru != null) {
searchConditions.add(EntityCondition.makeCondition(Payment.Fields.amount.name(), EntityOperator.LESS_THAN_EQUAL_TO, new BigDecimal(amountThru)));
}
if (openAmountFrom != null) {
searchConditions.add(EntityCondition.makeCondition(Payment.Fields.openAmount.name(), EntityOperator.GREATER_THAN_EQUAL_TO, new BigDecimal(openAmountFrom)));
}
if (openAmountThru != null) {
searchConditions.add(EntityCondition.makeCondition(Payment.Fields.openAmount.name(), EntityOperator.LESS_THAN_EQUAL_TO, new BigDecimal(openAmountThru)));
}
// prepare the sum conditions, using the same conditions as for the search but tags must be handled separately
List<EntityCondition> sumConditions = new FastList<EntityCondition>();
sumConditions.addAll(searchConditions);
if (tagsType != null) {
// if the organization is using allocatePaymentTagsToApplications then the tags are on the payment applications,
// else they are on the Payments
if (organization.allocatePaymentTagsToApplications()) {
List<EntityCondition> tagConditions = UtilAccountingTags.buildTagConditions(organizationPartyId, tagsType, delegator, request, UtilAccountingTags.TAG_PARAM_PREFIX, "applAcctgTagEnumId");
if (UtilValidate.isNotEmpty(tagConditions)) {
searchConditions.addAll(tagConditions);
}
} else {
List<EntityCondition> tagConditions = UtilAccountingTags.buildTagConditions(organizationPartyId, tagsType, delegator, request);
if (UtilValidate.isNotEmpty(tagConditions)) {
searchConditions.addAll(tagConditions);
}
}
}
// for the tag condition we filter by the payment ID that matches the search filters
Set<String> payIds = Entity.getDistinctFieldValues(String.class, paymentRepository.findList(PaymentAndPaymentApplication.class, searchConditions), PaymentAndPaymentApplication.Fields.paymentId);
sumConditions.add(EntityCondition.makeCondition(PaymentSum.Fields.paymentId.name(), EntityOperator.IN, payIds));
// filter out voided and canceled payments
if (statusId == null) {
sumConditions.add(EntityCondition.makeCondition(Payment.Fields.statusId.name(), EntityOperator.NOT_IN, Arrays.asList(StatusItemConstants.PmntStatus.PMNT_CANCELLED, StatusItemConstants.PmntStatus.PMNT_VOID)));
}
List<PaymentSum> sums = paymentRepository.findList(PaymentSum.class, EntityCondition.makeCondition(sumConditions, EntityOperator.AND),
Arrays.asList(PaymentSum.Fields.statusId.name(),
PaymentSum.Fields.statusDescription.name(),
PaymentSum.Fields.totalAmount.name()),
Arrays.asList(PaymentSum.Fields.statusDescription.asc()));
ac.put("sumsPerStatus", sums);
// get the sums total
BigDecimal bigTotal = BigDecimal.ZERO;
for (PaymentSum sum : sums) {
BigDecimal am = sum.getTotalAmount();
if (am == null) {
continue;
}
bigTotal = bigTotal.add(am);
}
ac.put("overallSum", bigTotal);
// Pagination
Set<String> fieldsToSelect = new HashSet<String>(new Payment().getAllFieldsNames());
fieldsToSelect.retainAll(new PaymentAndPaymentApplication().getAllFieldsNames());
EntityListBuilder paymentListBuilder = new EntityListBuilder(paymentRepository, PaymentAndPaymentApplication.class, EntityCondition.makeCondition(searchConditions, EntityOperator.AND), fieldsToSelect, UtilMisc.toList(PaymentAndPaymentApplication.Fields.effectiveDate.desc()));
PageBuilder<PaymentAndPaymentApplication> pageBuilder = new PageBuilder<PaymentAndPaymentApplication>() {
public List<Map<String, Object>> build(List<PaymentAndPaymentApplication> page) throws Exception {
Delegator delegator = ac.getDelegator();
List<Map<String, Object>> newPage = FastList.newInstance();
for (PaymentAndPaymentApplication payment : page) {
Map<String, Object> newRow = FastMap.newInstance();
newRow.putAll(payment.toMap());
StatusItem status = payment.getStatusItem();
newRow.put("statusDescription", status.get(StatusItem.Fields.description.name(), locale));
PaymentMethodType meth = payment.getPaymentMethodType();
if (meth != null) {
newRow.put("paymentMethodDescription", meth.get(PaymentMethodType.Fields.description.name(), locale));
}
newRow.put("partyNameFrom", PartyHelper.getPartyName(delegator, payment.getPartyIdFrom(), false));
newRow.put("partyNameTo", PartyHelper.getPartyName(delegator, payment.getPartyIdTo(), false));
newPage.add(newRow);
}
return newPage;
}
};
paymentListBuilder.setPageBuilder(pageBuilder);
ac.put("paymentListBuilder", paymentListBuilder);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.