text
stringlengths 1
1.05M
|
|---|
#!/bin/sh
source activate bokeh_server
redis-server --port 7001
|
<filename>js/articles/table.controller.js
angular
.module('news')
.controller('ArticleTableCtrl', ArticleTableCtrl);
function ArticleTableCtrl($http, BASE_URL) {
var vm = this,
fb = new Firebase(BASE_URL),
user = fb.getAuth();
vm.vote = function (direction, uuid) {
var data = vm.articles[uuid],
vote = {};
vote[uuid] = direction;
if(!vm.usersVotes[uuid]) {
if (direction === 'up') {
data.votes.up++;
} else {
data.votes.down++;
}
vm.usersVotes[uuid] = vote;
$http.put('https://newsagg.firebaseio.com/articles/' + uuid + '.json', data);
$http.patch('https://newsagg.firebaseio.com/votes/' + user.uid + '/.json', vote);
}
};
$http
.get('https://newsagg.firebaseio.com/articles/.json')
.success(function (data){
vm.articles = data;
});
$http
.get('https://newsagg.firebaseio.com/votes/' + user.uid + '/.json')
.success(function (data) {
vm.usersVotes = data || {};
});
}
|
public static boolean isValidEmail(String email) {
String regex = "^[\\w-_.+]*[\\w-_.]@([\\w]+\\.)+[\\w]+[\\w]$";
return email.matches(regex);
}
|
##################################################################
## (c) Copyright 2015- by <NAME> ##
##################################################################
#====================================================================#
# qmcpack_method_analyzers.py #
# Analyzer classes at the level of QMC methods. Instances #
# contain all data outputted by VMC, Opt, DMC, etc. sub-runs #
# carried out by QMCPACK. #
# #
# Content summary: #
# MethodAnalyzer #
# Base class for specific method analyzers. #
# Derived classes are OptAnalyzer, VmcAnalyzer, DmcAnalyzer #
# #
#====================================================================#
import os
import re
from generic import obj
from hdfreader import HDFreader
from qmcpack_analyzer_base import Checks,QAanalyzer,QAdata,QAHDFdata
from qmcpack_property_analyzers import WavefunctionAnalyzer
from qmcpack_quantity_analyzers import HDFAnalyzer
from debug import *
class MethodAnalyzer(QAanalyzer):
def __init__(self,series=None,calc=None,input=None,nindent=0):
QAanalyzer.__init__(self,nindent=nindent)
if series!=None and calc!=None and input!=None:
self.init_sub_analyzers(series,calc,input)
#end if
#end def __init__
def init_sub_analyzers(self,series,calc,input):
request = QAanalyzer.request
run_info = QAanalyzer.run_info
source_path = run_info.source_path
file_prefix = run_info.file_prefix+'.s'+str(series).zfill(3)
method = calc.method
files = obj()
outfiles = os.listdir(source_path)
self.vlog('looking for file prefix: '+file_prefix,n=2)
matched = False
for file in outfiles:
if file.startswith(file_prefix):
local_match = True
if file.endswith('scalar.dat'):
files.scalar = file
elif file.endswith('stat.h5'):
files.stat = file
elif file.endswith('storeConfig.h5'):
files.storeconfig = file
elif file.endswith('opt.xml'):
files.opt = file
elif file.endswith('dmc.dat') and method=='dmc':
files.dmc = file
elif '.traces.' in file:
if not 'traces' in files:
files.traces = []
#end if
files.traces.append(file)
else:
local_match = False
#end if
matched = matched or local_match
self.vlog('match found: '+file,n=3)
#end if
#end for
complete = matched
complete &= 'scalar' in files
if 'linear' in method or method=='opt':
complete &= 'opt' in files
#end if
equil = request.equilibration
nblocks_exclude = -1
if isinstance(equil,int):
nblocks_exclude = equil
elif isinstance(equil,(dict,obj)):
if series in equil:
nblocks_exclude = equil[series]
#end if
elif equil!=None:
self.error('invalid input for equilibration which must be an int, dict, or obj\n you provided: {0}\n with type {1}'.format(equil,equil.__class__.__name__))
#end if
data_sources = request.data_sources & set(files.keys())
method_info = obj(
method = method,
series = series,
file_prefix = file_prefix,
files = files,
data_sources = data_sources,
method_input = calc.copy(),
nblocks_exclude = nblocks_exclude,
complete = complete,
)
self.info.transfer_from(method_info)
self.vlog('requested sources = '+str(list(request.data_sources)),n=2)
self.vlog('files available = '+str(list(files.keys())),n=2)
self.vlog('available sources = '+str(list(data_sources)),n=2)
if not matched:
msg = 'no data files found\n file prefix used for matching: {0}\n checked all files in directory: {1}'.format(file_prefix,source_path)
#self.error(msg,trace=False)
#self.warn(msg)
return
#end if
self.set_global_info()
try:
analyzers = self.capabilities.analyzers
if 'scalar' in data_sources:
filepath = os.path.join(source_path,files.scalar)
self.scalars = analyzers.scalars_dat(filepath,equilibration='LocalEnergy',nindent=self.subindent())
#end if
if 'stat' in data_sources:
#determine scalars and analyzer quantities
analyzer_quantities = self.capabilities.analyzer_quantities
analyzer_quants = obj()
ignored_quantities = set()
ham = input.get('hamiltonian')
ham = ham.get_single('h0')
ham_est = ham.get('estimator')
calc_est = calc.get('estimator')
estimators = obj()
if ham_est!=None:
estimators.transfer_from(ham_est)
#end if
if calc_est!=None:
estimators.transfer_from(calc_est)
#end if
for estname,est in estimators.items():
if est==None:
self.error('estimators have not been read properly by QmcpackInput',trace=False)
#end if
has_type = 'type' in est
has_name = 'name' in est
if has_type and has_name:
type = est.type
name = est.name
elif has_name:
type = est.name
name = est.name
elif has_type:
type = est.type
name = est.type
else:
self.error('estimator '+estname+' has no type or name')
#end if
cname = self.condense_name(name)
ctype = self.condense_name(type)
if ctype=='density' and not has_name:
name = 'any'
#end if
if ctype in analyzer_quantities:
analyzer_quants[name] = self.condense_name(type)
#end if
#end for
not_scalars = set(analyzer_quants.keys())
self.scalars_hdf = analyzers.scalars_hdf(not_scalars,nindent=self.subindent())
analyzer_quantities = analyzer_quantities & request.quantities
for name,type in analyzer_quants.items():
if type in analyzer_quantities:
if type in analyzers:
qqa = analyzers[type](name,nindent=self.subindent())
qqa.init_sub_analyzers()
self[name] = qqa
else:
ignored_quantities.add(name)
#end if
#end if
#end for
self.info.ignored_quantities = ignored_quantities
#end if
if 'dmc' in data_sources:
filepath = os.path.join(source_path,files.dmc)
self.dmc = analyzers.dmc_dat(filepath,nindent=self.subindent())
#end if
if 'traces' in data_sources and 'traces' in files:
self.traces = analyzers.traces(source_path,files.traces,nindent=self.subindent())
#end if
except:
self.info.complete = False
#end try
self.unset_global_info()
return
#end def init_sub_analyzers
def load_data_local(self):
source_path = QAanalyzer.run_info.source_path
data_sources = self.info.data_sources
files = self.info.files
if 'stat' in data_sources:
filepath = os.path.join(source_path,files.stat)
hr = HDFreader(filepath)
if not hr._success:
self.warn(' hdf file seems to be corrupted, skipping contents:\n '+filepath)
#end if
hdf = hr.obj
self.data = QAHDFdata()
self.data.transfer_from(hdf)
#end if
remove = []
for name,value in self.items():
if isinstance(value,HDFAnalyzer):
value.load_data_local(self.data)
value.info.data_loaded = True
if value.info.should_remove:
remove.append(name)
#end if
#end if
#end for
for name in remove:
del self[name]
#end for
#end def load_data_local
def set_global_info(self):
QAanalyzer.method_info = self.info
#end def set_global_info
def unset_global_info(self):
QAanalyzer.method_info = None
#end def unset_global_info
def check_traces(self,pad=None):
verbose = pad!=None
method = self.info.method
series = self.info.series
if verbose:
desc = 'method {0} series {1}'.format(method,series)
#end if
if 'traces' in self:
check = {None:True,False:False,True:True}
if verbose:
self.log(pad+'Checking traces in '+desc)
#end if
scalars = None
scalars_hdf = None
dmc = None
if 'scalars' in self and 'data' in self.scalars:
scalars = self.scalars.data
#end if
if 'scalars_hdf' in self and 'data' in self.scalars_hdf:
scalars_hdf = self.scalars_hdf.data
#end if
if 'dmc' in self and 'data' in self.dmc:
dmc = self.dmc.data
#end if
checks = Checks('traces')
checks.exclude(None)
traces = self.traces
traces.form_diagnostic_data()
checks.psums = traces.check_particle_sums()
if method=='dmc':
checks.dmc = traces.check_dmc(dmc)
else:
svalid,shvalid = traces.check_scalars(scalars,scalars_hdf)
checks.scalars = svalid
checks.scalars_hdf = shvalid
#end if
valid = checks.valid()
if verbose:
checks.write(pad+' ')
#end if
return valid
else:
if verbose:
self.log(pad+'No traces in '+desc)
#end if
return None
#end if
#end def check_traces
#end class MethodAnalyzer
class OptAnalyzer(MethodAnalyzer):
def init_sub_analyzers(self,series,calc,input):
MethodAnalyzer.init_sub_analyzers(self,series,calc,input)
source_path = QAanalyzer.run_info.source_path
files = self.info.files
if 'opt' in files:
opt_out_xml = os.path.join(source_path,files.opt)
self.wavefunction = WavefunctionAnalyzer(opt_out_xml)
#end if
#ed def init_sub_analyzers
#end class OptAnalyzer
class VmcAnalyzer(MethodAnalyzer):
None
#end class OptAnalyzer
class DmcAnalyzer(MethodAnalyzer):
None
#end class OptAnalyzer
|
#!/bin/bash
ifconfig p1p1 192.168.1.2 mtu 9710
arp -s 192.168.1.1 00:25:90:eb:e0:9e
sysctl -w \
net.core.rmem_max=26214400 \
net.core.wmem_max=16777216 \
net.core.rmem_default=524288 \
net.core.wmem_default=524288 \
fs.file-max=100000 \
vm.swappiness=10 \
net.core.optmem_max=40960 \
net.core.netdev_max_backlog=50000 \
net.ipv4.udp_rmem_min=8192 \
net.ipv4.udp_wmem_min=8192 \
net.ipv4.conf.all.send_redirects=0 \
net.ipv4.conf.all.accept_redirects=0 \
net.ipv4.conf.all.accept_source_route=0 \
net.ipv4.conf.all.log_martians=1 \
net.ipv4.neigh.default.gc_thresh1=8192 \
net.ipv4.neigh.default.gc_thresh1=8192 \
net.ipv4.neigh.default.gc_thresh1=8192 \
|
// Generated by script, don't edit it please.
import createSvgIcon from '../../createSvgIcon';
import ArrowUp2Svg from '@rsuite/icon-font/lib/legacy/ArrowUp2';
const ArrowUp2 = createSvgIcon({
as: ArrowUp2Svg,
ariaLabel: 'arrow up 2',
category: 'legacy',
displayName: 'ArrowUp2'
});
export default ArrowUp2;
|
#!/bin/sh
list1="Proliferation_b1,Proliferation_b2"
#Please first cd to the parent folder
python3 Scripts/Simulations/MaBoSS_PROFILE_drugs.py BRAF_Model -sy Mac -p 2 "Tests/BRAF_Model_CL_n5000_t100.txt" -o $list1 -s "n5000_t50" -rb "Results/Profiles/BRAF_Model_Test.csv" -rf 100
#for i in {1..100}
#do
# python3 Scripts/Simulations/MaBoSS_PROFILE_drugs.py BRAF_Model -sy Mac -p 1 "Tests/BRAF_Model_CL_n5000_t50_"$i".txt" -o $list1 -s "n5000_t50"
#done
|
#!/bin/bash
sleep 60
(nohup java -Dhost.name=$HOSTNAME -Dcom.sun.management.jmxremote -Djava.rmi.server.hostname=localhost -Dcom.sun.management.jmxremote.port=9002 -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.authenticate=false -classpath ./lib/*:./conf/ io.cantor.service.InitialService &) && java -DhostName=$HOSTNAME -jar /usr/share/jmxtrans/lib/jmxtrans-all.jar -e -j /var/lib/jmxtrans -s 5 -c false
|
#include <vector>
#include <string>
#include <memory> // *_ptr family
#include <memory.h> // memcpy
#include <iostream>
class ObjRaw { // simple rule-of-5 class using raw pointers
size_t bs; // buffer size
uint8_t* bp; // buffer pointer
public:
ObjRaw(int s) : bs(s), bp(new uint8_t(bs)) // CTOR
{
std::cout << "ObjRaw(" << bs << ")\n";
}
~ObjRaw() // DTOR
{
std::cout << "~ObjRaw(), bs=" << bs << "\n";
delete [] bp;
}
ObjRaw(const ObjRaw& rhs) // copy CTOR
{
std::cout << "copy ctor rhs.bs=" << rhs.bs << "\n";
bp = nullptr; // set up to use copy = operator
*this = rhs; // call it
}
ObjRaw& operator= (const ObjRaw& rhs) // copy = operator
{
std::cout << "copy = operator bs,rhs.bs=" << bs << "," << rhs.bs << "\n";
if(this!=&rhs) {
bs=rhs.bs;
delete [] bp;
bp = bs? (uint8_t*) memcpy(new uint8_t(bs), rhs.bp, rhs.bs): nullptr;
}
return *this;
}
ObjRaw(ObjRaw&& rhs) // move CTOR
{
std::cout << "move ctor rhs.bs=" << rhs.bs << "\n";
bp = nullptr; // set up to use move = operator
*this = std::move(rhs); // call it
}
ObjRaw&& operator= (ObjRaw&& rhs) // move = operator
{
std::cout << "move = operator bs,rhs.bs=" << bs << "," << rhs.bs << "\n";
if(this!=&rhs) {
delete [] bp;
bs = rhs.bs;
bp = rhs.bp;
rhs.bs = 0;
rhs.bp = nullptr;
}
return std::move(*this);
}
void print(const std::string& l) // print out buffers size
{
std::cout << l << ", bs=" << bs << "\n";
}
};
void doWorkRawPtr ()
{
std::cout << "doWorkRawPtr start\n";
std::vector< std::shared_ptr<ObjRaw> > v;
std::shared_ptr<ObjRaw> p;
v.push_back( std::shared_ptr<ObjRaw> (new ObjRaw(1e7)) );
v.push_back( std::shared_ptr<ObjRaw> (new ObjRaw(2e7)) );
v.front() -> print("v.front");
v.back() -> print("v.back");
std::cout << "front use_count=" << v.front().use_count() << "\n";
std::cout << "back use_count=" << v.back ().use_count() << "\n";
for(int i = 0; i < 1000; i++)
v.push_back( v.front() );
for(int i = 0; i < 1000; i++)
v.push_back( v[1] );
std::cout << "v.size()=" << v.size() << "\n";
v.front() -> print("v.front");
v.back() -> print("v.back");
std::cout << "front use_count=" << v.front().use_count() << "\n";
std::cout << "back use_count=" << v.back ().use_count() << "\n";
// Same thing again, this time using a copy of the ptr
p = v.front();
p->print("v.front");
std::cout << "front use_count=" << v.front().use_count() << "\n";
p = v.back();
std::cout << "front use_count=" << v.front().use_count() << "\n";
std::cout << "back use_count=" << v.back ().use_count() << "\n";
p->print("v.back");
std::cout << "doWorkRawPtr finish\n";
}
class ObjSmart { // simple rule-of-5 class using shared_ptr
size_t bs; // buffer size
std::shared_ptr<uint8_t> bp; // buffer pointer
public:
ObjSmart(int s) // CTOR
: bs(s),
bp( std::make_shared <uint8_t> (bs) ) // same as
//bp( std::shared_ptr <uint8_t> (new uint8_t(bs)) )
{
std::cout << "ObjSmart(" << bs << ")\n";
}
~ObjSmart() // DTOR
{
std::cout << "~ObjSmart(), bs=" << bs << "\n";
}
ObjSmart(const ObjSmart& rhs) // copy CTOR
{
std::cout << "copy ctor rhs.bs=" << rhs.bs << "\n";
bp.reset(); // set up to use copy = operator
*this = rhs; // call it
}
ObjSmart& operator= (const ObjSmart& rhs) // copy = operator
{
std::cout << "copy = operator bs,rhs.bs=" << bs << "," << rhs.bs << "\n";
if(this!=&rhs) {
bs=rhs.bs;
if(bs) {
bp = std::make_shared<uint8_t>( bs ); // same as
// bp = std::shared_ptr<uint8_t>(new uint8_t(bs));
memcpy( (void*) (bp.get()), (const void*) (rhs.bp.get()), rhs.bs);
} else {
bp.reset();
}
}
return *this;
}
ObjSmart(ObjSmart&& rhs) // move CTOR
{
std::cout << "move ctor rhs.bs=" << rhs.bs << "\n";
bp.reset(); // set up to use copy = operator
*this = std::move(rhs); // call it
}
ObjSmart&& operator= (ObjSmart&& rhs) // move = operator
{
std::cout << "move = operator bs,rhs.bs=" << bs << "," << rhs.bs << "\n";
if(this!=&rhs) {
bs = rhs.bs;
bp = rhs.bp;
rhs.bs = 0;
rhs.bp.reset();
}
return std::move(*this);
}
void print(const std::string& l) // print out buffers size
{
std::cout << l << ", bs=" << bs << "\n";
}
};
void doWorkSmartPtr ()
{
std::cout << "doWorkSmartPtr start\n";
std::vector< std::shared_ptr<ObjSmart> > v;
std::shared_ptr<ObjSmart> p;
v.push_back( std::shared_ptr<ObjSmart> (new ObjSmart(1e7)) );
v.push_back( std::shared_ptr<ObjSmart> (new ObjSmart(2e7)) );
v.front() -> print("v.front");
v.back() -> print("v.back");
std::cout << "front use_count=" << v.front().use_count() << "\n";
std::cout << "back use_count=" << v.back ().use_count() << "\n";
for(int i = 0; i < 1000; i++)
v.push_back( v.front() );
for(int i = 0; i < 1000; i++)
v.push_back( v[1] );
std::cout << "v.size()=" << v.size() << "\n";
v.front() -> print("v.front");
v.back() -> print("v.back");
std::cout << "front use_count=" << v.front().use_count() << "\n";
std::cout << "back use_count=" << v.back ().use_count() << "\n";
// Same thing again, this time using a copy of the ptr
p = v.front();
p->print("v.front");
std::cout << "front use_count=" << v.front().use_count() << "\n";
p = v.back();
std::cout << "front use_count=" << v.front().use_count() << "\n";
std::cout << "back use_count=" << v.back ().use_count() << "\n";
p->print("v.back");
std::cout << "doWorkSmartPtr finish\n";
}
int main()
{
std::cout << "main:\n";
std::cout << "++++++++++++++++++++++++++++++++ calling doWorkRawPtr\n";
doWorkRawPtr();
std::cout << "++++++++++++++++++++++++++++++++ doWorkRawPtr returned\n";
std::cout << "++++++++++++++++++++++++++++++++ calling doWorkSmartPtr\n";
doWorkSmartPtr();
std::cout << "++++++++++++++++++++++++++++++++ doWorkSmartPtr returned\n";
return 0;
}
|
def find_four_letter_words(sentence):
words = sentence.split()
four_letter_words = []
for word in words:
if len(word) == 4:
four_letter_words.append(word)
return four_letter_words
four_letter_words = find_four_letter_words("The quick brown fox jumps over the lazy dog.")
print(four_letter_words)
|
public IEnumerable<SourceItem> CreateItemsFromRange(Source source, DateTime startDate, DateTime endDate, int minQuantity, int maxQuantity, DateTime creationDate, string creatorName, SourceGroupEnum groupEnum)
{
List<SourceItem> sourceItems = new List<SourceItem>();
// Generate source items within the date range and meeting the quantity criteria
Random random = new Random();
int days = (int)(endDate - startDate).TotalDays + 1;
for (int i = 0; i < days; i++)
{
DateTime currentDate = startDate.AddDays(i);
int quantity = random.Next(minQuantity, maxQuantity + 1);
SourceItem item = new SourceItem
{
SourceId = source.Id,
DateCreated = currentDate,
Quantity = quantity,
Creator = creatorName,
GroupEnum = groupEnum
};
sourceItems.Add(item);
}
return sourceItems;
}
|
<filename>fetch.py
from yipy.api import Yipy
from math import ceil
import json
def fetch_movies():
api = Yipy()
movie_list = api.list(limit=1)
genres = set()
page_count = ceil(movie_list["data"]["movie_count"]/50)
# page_count = 2
movies = []
for page in range(1, 1+page_count):
print(page,"/",page_count," "*10,end="\r")
movie_list = api.list(limit=50, page=page)
for m in movie_list["data"]["movies"]:
movies.append(m)
if("genres" in m):
for g in m["genres"]:
genres.add(g)
# print(m["imdb_code"])
with open("data/movies.json","w") as f:
json.dump(movies,f)
with open("data/genres.json","w") as f:
json.dump(list(genres),f)
# with open("movies.json","r") as f:
# movies = json.load(f)
# movies_short = []
# for i,m in enumerate(movies):
# print(i,"/",len(movies),end="\r")
# if("genres" in m):
# movies_short.append({"id":m["id"], "genres":m["genres"]})
# else:
# movies_short.append({"id":m["id"], "genres":[]})
# with open("movies_short.json","w") as f:
# json.dump(movies_short,f)
if __name__ == '__main__':
fetch_movies()
|
#include <iostream>
#include <algorithm>
#include <unordered_map>
using namespace std;
int countOccurrences(int arr[], int size)
{
// Sort the array
sort(arr, arr+size);
// create an empty hash map
unordered_map<int, int> counts;
// Counting the occurrance of elements of the array
for (int i = 0; i < size; i++)
counts[arr[i]]++;
// Initialize max element
int max_element = 0;
int max_count = 0;
// Searching for the max element
// with maximum occurrence
for (int i = 0; i < size; i++) {
if (max_count < counts[arr[i]]) {
max_count = counts[arr[i]];
max_element = arr[i];
}
}
// returning the result
return max_element;
}
int main()
{
int arr[] = { 3,1,3,1,2,2,2 };
int size = sizeof(arr) / sizeof(arr[0]);
cout << countOccurrences(arr, size);
return 0;
}
|
import React from "react";
import { ScreenRecorder, IScreenRecorderOptions, safeCallback } from 'screen-recorder-base';
import bindkey from '@w-xuefeng/bindkey';
import Video from './Video';
export interface IScreenRecorderComponentProps {
shortKey?: string;
preview?: boolean;
videoOptions?: MediaTrackConstraints;
startBtnText?: React.ReactNode;
startBtnStyle?: React.CSSProperties;
endBtnText?: React.ReactNode;
endBtnStyle?: React.CSSProperties;
startContent?: (startEvent: Function, endEvent: Function) => React.ReactNode,
endContent?: (endEvent: Function, startEvent: Function) => React.ReactNode,
previewContent?: (mediaStream: MediaStream) => React.ReactNode,
onRecordingStart?: (mediaStream: MediaStream) => void
onRecordingEnd?: (blobUrl: string, fixedBlob: Blob) => void
onRecordingUnsupport?: () => void
onRecordingError?: (err: unknown) => void
};
export interface IScreenRecorderComponentStates {
error: boolean
unsupported: boolean
recording: boolean
previewMediaStream: null | MediaStream
}
const defaultBtnStyle: React.CSSProperties = {
margin: '0 16px',
cursor: 'pointer',
boxShadow: 'none',
fontSize: '14px',
lineHeight: '20px',
fontFamily: 'Microsoft YaHei, Helvetica Neue, Helvetica, Arial, sans-serif',
height: '32px',
display: 'inline-flex',
alignItems: 'center',
justifyContent: 'center',
userSelect: 'none',
border: '0 solid transparent',
borderRadius: '2px',
padding: '6px 12px',
fontWeight: 600,
outline: 'none',
verticalAlign: 'middle',
whiteSpace: 'nowrap',
color: 'rgb(80, 86, 94)',
}
const defaultPreviewStyle: React.CSSProperties = {
position: 'fixed',
border: '1px solid #666',
zIndex: 9999,
cursor: 'move',
top: 5,
right: 5
}
const rShow = (c: boolean, styles?: React.CSSProperties): React.CSSProperties => (
{
...styles,
...(c ? {} : { display: 'none' })
}
)
const AorB = (c: boolean, A: React.ReactNode, B: React.ReactNode = null) => c ? A : B
const initState: IScreenRecorderComponentStates = {
error: false,
unsupported: false,
recording: false,
previewMediaStream: null,
}
const handleAction = {
setState: (state: IScreenRecorderComponentStates, payload?: Partial<IScreenRecorderComponentStates>) => {
return {
...state,
...payload
}
}
}
type TActionType = keyof typeof handleAction
type TAction = TActionType | { type: TActionType, payload: Partial<IScreenRecorderComponentStates> }
const reducer = (state: IScreenRecorderComponentStates, action: TAction): IScreenRecorderComponentStates => {
const actionType = typeof action === 'string' ? action : action.type
const payload = typeof action === 'string' ? undefined : action.payload
const res: IScreenRecorderComponentStates | undefined = safeCallback<any, IScreenRecorderComponentStates>(handleAction[actionType], state, payload)
return res ?? state
}
const ScreenRecorderComponent: React.FC<IScreenRecorderComponentProps> = (props) => {
const {
shortKey,
preview = false,
videoOptions,
startBtnText = '开始录屏',
startBtnStyle,
endBtnText = '结束录屏',
endBtnStyle,
startContent,
endContent,
previewContent,
onRecordingStart,
onRecordingEnd,
onRecordingUnsupport,
onRecordingError,
} = props
const [state, dispatch] = React.useReducer(reducer, initState)
const previewDefaultWidth = 300;
const setState = (payload: Partial<IScreenRecorderComponentStates>) => dispatch({
type: 'setState',
payload
})
const initPreview = (mediaStream: MediaStream) => setState({ previewMediaStream: mediaStream })
const options: IScreenRecorderOptions = {
onUnsupported: () => {
setState({
unsupported: true,
recording: false,
error: false
})
safeCallback(onRecordingUnsupport);
},
onRecordStart: (mediaStream: MediaStream) => {
setState({
recording: true,
error: false
})
initPreview(mediaStream);
safeCallback(onRecordingStart, mediaStream);
},
onError: (err) => {
setState({
error: true,
recording: false
})
safeCallback(onRecordingError, err);
},
onRecordEnd: (blobUrl: string, fixedBlob: Blob) => {
setState({
recording: false,
error: false
})
safeCallback(onRecordingEnd, blobUrl, fixedBlob);
},
timeSlice: 1000,
videoOptions,
};
const screenRecorder = React.useRef(ScreenRecorder.createSR(options))
const bindShortKey = () => {
if (shortKey && shortKey.toUpperCase() !== 'ESC') {
bindkey.add(shortKey, start, { target: globalThis });
bindkey.add('ESC', end, { target: globalThis });
console.info(`[BindKey] ${shortKey} to start`)
console.info(`[BindKey] ESC to end`)
}
}
const removeShortKey = () => {
shortKey && (bindkey.remove(shortKey), bindkey.remove('ESC'))
console.info(`[RemoveKey] ${shortKey}`)
console.info(`[RemoveKey] ESC`)
}
React.useEffect(() => {
state.recording &&
screenRecorder.current &&
screenRecorder.current.mediaStream &&
initPreview(screenRecorder.current.mediaStream)
}, [preview])
React.useEffect(() => {
bindShortKey()
return () => {
removeShortKey()
}
}, [shortKey])
const start = () => {
screenRecorder.current?.startRecording();
};
const end = () => {
screenRecorder.current?.stopRecording();
};
return (
<div>
{
AorB(
!startContent && !state.recording,
<button
onClick={start}
style={{ ...defaultBtnStyle, ...startBtnStyle }}
>
{startBtnText}
</button>,
AorB(
!state.recording,
safeCallback(startContent, start, end)
)
)
}
{
AorB(
!endContent && state.recording,
<button
onClick={end}
style={{ ...defaultBtnStyle, ...endBtnStyle }}
>
{endBtnText}
</button>,
AorB(
state.recording,
safeCallback(endContent, end, start)
)
)
}
{
AorB(
preview && !previewContent,
<Video
draggable
muted
autoPlay
width={previewDefaultWidth}
srcObject={state.previewMediaStream}
dragWrapStyle={rShow(state.recording, { ...defaultPreviewStyle })}
></Video>,
AorB(
!!(preview && state.recording && screenRecorder.current),
screenRecorder.current
? safeCallback<[MediaStream], React.ReactNode>(previewContent, screenRecorder.current?.mediaStream!)
: null
)
)
}
</div >
);
}
export default ScreenRecorderComponent;
|
#!/bin/bash
APP=gmall
# 如果是输入的日期按照取输入日期;如果没输入日期取当前时间的前一天
if [ -n "$2" ] ;then
do_date=$2
else
do_date=`date -d "-1 day" +%F`
fi
dws_visitor_action_daycount="insert overwrite table ${APP}.dws_visitor_action_daycount partition(dt='$do_date')
select
t1.mid_id,
t1.brand,
t1.model,
t1.is_new,
t1.channel,
t1.os,
t1.area_code,
t1.version_code,
t1.visit_count,
t3.page_stats
from
(
select
mid_id,
brand,
model,
if(array_contains(collect_set(is_new),'0'),'0','1') is_new,--ods_page_log中,同一天内,同一设备的is_new字段,可能全部为1,可能全部为0,也可能部分为0,部分为1(卸载重装),故做该处理
collect_set(channel) channel,
collect_set(os) os,
collect_set(area_code) area_code,
collect_set(version_code) version_code,
sum(if(last_page_id is null,1,0)) visit_count
from ${APP}.dwd_page_log
where dt='$do_date'
and last_page_id is null
group by mid_id,model,brand
)t1
join
(
select
mid_id,
brand,
model,
collect_set(named_struct('page_id',page_id,'page_count',page_count,'during_time',during_time)) page_stats
from
(
select
mid_id,
brand,
model,
page_id,
count(*) page_count,
sum(during_time) during_time
from ${APP}.dwd_page_log
where dt='$do_date'
group by mid_id,model,brand,page_id
)t2
group by mid_id,model,brand
)t3
on t1.mid_id=t3.mid_id
and t1.brand=t3.brand
and t1.model=t3.model;"
dws_user_action_daycount="
with
tmp_login as
(
select
user_id,
count(*) login_count
from ${APP}.dwd_page_log
where dt='$do_date'
and user_id is not null
and last_page_id is null
group by user_id
),
tmp_cf as
(
select
user_id,
sum(if(action_id='cart_add',1,0)) cart_count,
sum(if(action_id='favor_add',1,0)) favor_count
from ${APP}.dwd_action_log
where dt='$do_date'
and user_id is not null
and action_id in ('cart_add','favor_add')
group by user_id
),
tmp_order as
(
select
user_id,
count(*) order_count,
sum(if(activity_reduce_amount>0,1,0)) order_activity_count,
sum(if(coupon_reduce_amount>0,1,0)) order_coupon_count,
sum(activity_reduce_amount) order_activity_reduce_amount,
sum(coupon_reduce_amount) order_coupon_reduce_amount,
sum(original_amount) order_original_amount,
sum(final_amount) order_final_amount
from ${APP}.dwd_order_info
where (dt='$do_date'
or dt='9999-99-99')
and date_format(create_time,'yyyy-MM-dd')='$do_date'
group by user_id
),
tmp_pay as
(
select
user_id,
count(*) payment_count,
sum(payment_amount) payment_amount
from ${APP}.dwd_payment_info
where dt='$do_date'
group by user_id
),
tmp_ri as
(
select
user_id,
count(*) refund_order_count,
sum(refund_num) refund_order_num,
sum(refund_amount) refund_order_amount
from ${APP}.dwd_order_refund_info
where dt='$do_date'
group by user_id
),
tmp_rp as
(
select
rp.user_id,
count(*) refund_payment_count,
sum(ri.refund_num) refund_payment_num,
sum(rp.refund_amount) refund_payment_amount
from
(
select
user_id,
order_id,
sku_id,
refund_amount
from ${APP}.dwd_refund_payment
where dt='$do_date'
)rp
left join
(
select
user_id,
order_id,
sku_id,
refund_num
from ${APP}.dwd_order_refund_info
where dt>=date_add('$do_date',-15)
)ri
on rp.order_id=ri.order_id
and rp.sku_id=rp.sku_id
group by rp.user_id
),
tmp_coupon as
(
select
user_id,
sum(if(date_format(get_time,'yyyy-MM-dd')='$do_date',1,0)) coupon_get_count,
sum(if(date_format(using_time,'yyyy-MM-dd')='$do_date',1,0)) coupon_using_count,
sum(if(date_format(used_time,'yyyy-MM-dd')='$do_date',1,0)) coupon_used_count
from ${APP}.dwd_coupon_use
where (dt='$do_date' or dt='9999-99-99')
and (date_format(get_time, 'yyyy-MM-dd') = '$do_date'
or date_format(using_time,'yyyy-MM-dd')='$do_date'
or date_format(used_time,'yyyy-MM-dd')='$do_date')
group by user_id
),
tmp_comment as
(
select
user_id,
sum(if(appraise='1201',1,0)) appraise_good_count,
sum(if(appraise='1202',1,0)) appraise_mid_count,
sum(if(appraise='1203',1,0)) appraise_bad_count,
sum(if(appraise='1204',1,0)) appraise_default_count
from ${APP}.dwd_comment_info
where dt='$do_date'
group by user_id
),
tmp_od as
(
select
user_id,
collect_set(named_struct('sku_id',sku_id,'sku_num',sku_num,'order_count',order_count,'activity_reduce_amount',activity_reduce_amount,'coupon_reduce_amount',coupon_reduce_amount,'original_amount',original_amount,'final_amount',final_amount)) order_detail_stats
from
(
select
user_id,
sku_id,
sum(sku_num) sku_num,
count(*) order_count,
cast(sum(split_activity_amount) as decimal(16,2)) activity_reduce_amount,
cast(sum(split_coupon_amount) as decimal(16,2)) coupon_reduce_amount,
cast(sum(original_amount) as decimal(16,2)) original_amount,
cast(sum(split_final_amount) as decimal(16,2)) final_amount
from ${APP}.dwd_order_detail
where dt='$do_date'
group by user_id,sku_id
)t1
group by user_id
)
insert overwrite table ${APP}.dws_user_action_daycount partition(dt='$do_date')
select
coalesce(tmp_login.user_id,tmp_cf.user_id,tmp_order.user_id,tmp_pay.user_id,tmp_ri.user_id,tmp_rp.user_id,tmp_comment.user_id,tmp_coupon.user_id,tmp_od.user_id),
nvl(login_count,0),
nvl(cart_count,0),
nvl(favor_count,0),
nvl(order_count,0),
nvl(order_activity_count,0),
nvl(order_activity_reduce_amount,0),
nvl(order_coupon_count,0),
nvl(order_coupon_reduce_amount,0),
nvl(order_original_amount,0),
nvl(order_final_amount,0),
nvl(payment_count,0),
nvl(payment_amount,0),
nvl(refund_order_count,0),
nvl(refund_order_num,0),
nvl(refund_order_amount,0),
nvl(refund_payment_count,0),
nvl(refund_payment_num,0),
nvl(refund_payment_amount,0),
nvl(coupon_get_count,0),
nvl(coupon_using_count,0),
nvl(coupon_used_count,0),
nvl(appraise_good_count,0),
nvl(appraise_mid_count,0),
nvl(appraise_bad_count,0),
nvl(appraise_default_count,0),
order_detail_stats
from tmp_login
full outer join tmp_cf on tmp_login.user_id=tmp_cf.user_id
full outer join tmp_order on coalesce(tmp_login.user_id,tmp_cf.user_id)=tmp_order.user_id
full outer join tmp_pay on coalesce(tmp_login.user_id,tmp_cf.user_id,tmp_order.user_id)=tmp_pay.user_id
full outer join tmp_ri on coalesce(tmp_login.user_id,tmp_cf.user_id,tmp_order.user_id,tmp_pay.user_id)=tmp_ri.user_id
full outer join tmp_rp on coalesce(tmp_login.user_id,tmp_cf.user_id,tmp_order.user_id,tmp_pay.user_id,tmp_ri.user_id)=tmp_rp.user_id
full outer join tmp_comment on coalesce(tmp_login.user_id,tmp_cf.user_id,tmp_order.user_id,tmp_pay.user_id,tmp_ri.user_id,tmp_rp.user_id)=tmp_comment.user_id
full outer join tmp_coupon on coalesce(tmp_login.user_id,tmp_cf.user_id,tmp_order.user_id,tmp_pay.user_id,tmp_ri.user_id,tmp_rp.user_id,tmp_comment.user_id)=tmp_coupon.user_id
full outer join tmp_od on coalesce(tmp_login.user_id,tmp_cf.user_id,tmp_order.user_id,tmp_pay.user_id,tmp_ri.user_id,tmp_rp.user_id,tmp_comment.user_id,tmp_coupon.user_id)=tmp_od.user_id;
"
dws_activity_info_daycount="
with
tmp_order as
(
select
activity_rule_id,
activity_id,
count(*) order_count,
sum(split_activity_amount) order_reduce_amount,
sum(original_amount) order_original_amount,
sum(split_final_amount) order_final_amount
from ${APP}.dwd_order_detail
where dt='$do_date'
and activity_id is not null
group by activity_rule_id,activity_id
),
tmp_pay as
(
select
activity_rule_id,
activity_id,
count(*) payment_count,
sum(split_activity_amount) payment_reduce_amount,
sum(split_final_amount) payment_amount
from ${APP}.dwd_order_detail
where (dt='$do_date'
or dt=date_add('$do_date',-1))
and activity_id is not null
and order_id in
(
select order_id from ${APP}.dwd_payment_info where dt='$do_date'
)
group by activity_rule_id,activity_id
)
insert overwrite table ${APP}.dws_activity_info_daycount partition(dt='$do_date')
select
activity_rule_id,
activity_id,
sum(order_count),
sum(order_reduce_amount),
sum(order_original_amount),
sum(order_final_amount),
sum(payment_count),
sum(payment_reduce_amount),
sum(payment_amount)
from
(
select
activity_rule_id,
activity_id,
order_count,
order_reduce_amount,
order_original_amount,
order_final_amount,
0 payment_count,
0 payment_reduce_amount,
0 payment_amount
from tmp_order
union all
select
activity_rule_id,
activity_id,
0 order_count,
0 order_reduce_amount,
0 order_original_amount,
0 order_final_amount,
payment_count,
payment_reduce_amount,
payment_amount
from tmp_pay
)t1
group by activity_rule_id,activity_id;"
dws_sku_action_daycount="
with
tmp_order as
(
select
sku_id,
count(*) order_count,
sum(sku_num) order_num,
sum(if(split_activity_amount>0,1,0)) order_activity_count,
sum(if(split_coupon_amount>0,1,0)) order_coupon_count,
sum(split_activity_amount) order_activity_reduce_amount,
sum(split_coupon_amount) order_coupon_reduce_amount,
sum(original_amount) order_original_amount,
sum(split_final_amount) order_final_amount
from ${APP}.dwd_order_detail
where dt='$do_date'
group by sku_id
),
tmp_pay as
(
select
sku_id,
count(*) payment_count,
sum(sku_num) payment_num,
sum(split_final_amount) payment_amount
from ${APP}.dwd_order_detail
where (dt='$do_date'
or dt=date_add('$do_date',-1))
and order_id in
(
select order_id from ${APP}.dwd_payment_info where dt='$do_date'
)
group by sku_id
),
tmp_ri as
(
select
sku_id,
count(*) refund_order_count,
sum(refund_num) refund_order_num,
sum(refund_amount) refund_order_amount
from ${APP}.dwd_order_refund_info
where dt='$do_date'
group by sku_id
),
tmp_rp as
(
select
rp.sku_id,
count(*) refund_payment_count,
sum(ri.refund_num) refund_payment_num,
sum(refund_amount) refund_payment_amount
from
(
select
order_id,
sku_id,
refund_amount
from ${APP}.dwd_refund_payment
where dt='$do_date'
)rp
left join
(
select
order_id,
sku_id,
refund_num
from ${APP}.dwd_order_refund_info
where dt>=date_add('$do_date',-15)
)ri
on rp.order_id=ri.order_id
and rp.sku_id=ri.sku_id
group by rp.sku_id
),
tmp_cf as
(
select
item sku_id,
sum(if(action_id='cart_add',1,0)) cart_count,
sum(if(action_id='favor_add',1,0)) favor_count
from ${APP}.dwd_action_log
where dt='$do_date'
and action_id in ('cart_add','favor_add')
group by item
),
tmp_comment as
(
select
sku_id,
sum(if(appraise='1201',1,0)) appraise_good_count,
sum(if(appraise='1202',1,0)) appraise_mid_count,
sum(if(appraise='1203',1,0)) appraise_bad_count,
sum(if(appraise='1204',1,0)) appraise_default_count
from ${APP}.dwd_comment_info
where dt='$do_date'
group by sku_id
)
insert overwrite table ${APP}.dws_sku_action_daycount partition(dt='$do_date')
select
sku_id,
sum(order_count),
sum(order_num),
sum(order_activity_count),
sum(order_coupon_count),
sum(order_activity_reduce_amount),
sum(order_coupon_reduce_amount),
sum(order_original_amount),
sum(order_final_amount),
sum(payment_count),
sum(payment_num),
sum(payment_amount),
sum(refund_order_count),
sum(refund_order_num),
sum(refund_order_amount),
sum(refund_payment_count),
sum(refund_payment_num),
sum(refund_payment_amount),
sum(cart_count),
sum(favor_count),
sum(appraise_good_count),
sum(appraise_mid_count),
sum(appraise_bad_count),
sum(appraise_default_count)
from
(
select
sku_id,
order_count,
order_num,
order_activity_count,
order_coupon_count,
order_activity_reduce_amount,
order_coupon_reduce_amount,
order_original_amount,
order_final_amount,
0 payment_count,
0 payment_num,
0 payment_amount,
0 refund_order_count,
0 refund_order_num,
0 refund_order_amount,
0 refund_payment_count,
0 refund_payment_num,
0 refund_payment_amount,
0 cart_count,
0 favor_count,
0 appraise_good_count,
0 appraise_mid_count,
0 appraise_bad_count,
0 appraise_default_count
from tmp_order
union all
select
sku_id,
0 order_count,
0 order_num,
0 order_activity_count,
0 order_coupon_count,
0 order_activity_reduce_amount,
0 order_coupon_reduce_amount,
0 order_original_amount,
0 order_final_amount,
payment_count,
payment_num,
payment_amount,
0 refund_order_count,
0 refund_order_num,
0 refund_order_amount,
0 refund_payment_count,
0 refund_payment_num,
0 refund_payment_amount,
0 cart_count,
0 favor_count,
0 appraise_good_count,
0 appraise_mid_count,
0 appraise_bad_count,
0 appraise_default_count
from tmp_pay
union all
select
sku_id,
0 order_count,
0 order_num,
0 order_activity_count,
0 order_coupon_count,
0 order_activity_reduce_amount,
0 order_coupon_reduce_amount,
0 order_original_amount,
0 order_final_amount,
0 payment_count,
0 payment_num,
0 payment_amount,
refund_order_count,
refund_order_num,
refund_order_amount,
0 refund_payment_count,
0 refund_payment_num,
0 refund_payment_amount,
0 cart_count,
0 favor_count,
0 appraise_good_count,
0 appraise_mid_count,
0 appraise_bad_count,
0 appraise_default_count
from tmp_ri
union all
select
sku_id,
0 order_count,
0 order_num,
0 order_activity_count,
0 order_coupon_count,
0 order_activity_reduce_amount,
0 order_coupon_reduce_amount,
0 order_original_amount,
0 order_final_amount,
0 payment_count,
0 payment_num,
0 payment_amount,
0 refund_order_count,
0 refund_order_num,
0 refund_order_amount,
refund_payment_count,
refund_payment_num,
refund_payment_amount,
0 cart_count,
0 favor_count,
0 appraise_good_count,
0 appraise_mid_count,
0 appraise_bad_count,
0 appraise_default_count
from tmp_rp
union all
select
sku_id,
0 order_count,
0 order_num,
0 order_activity_count,
0 order_coupon_count,
0 order_activity_reduce_amount,
0 order_coupon_reduce_amount,
0 order_original_amount,
0 order_final_amount,
0 payment_count,
0 payment_num,
0 payment_amount,
0 refund_order_count,
0 refund_order_num,
0 refund_order_amount,
0 refund_payment_count,
0 refund_payment_num,
0 refund_payment_amount,
cart_count,
favor_count,
0 appraise_good_count,
0 appraise_mid_count,
0 appraise_bad_count,
0 appraise_default_count
from tmp_cf
union all
select
sku_id,
0 order_count,
0 order_num,
0 order_activity_count,
0 order_coupon_count,
0 order_activity_reduce_amount,
0 order_coupon_reduce_amount,
0 order_original_amount,
0 order_final_amount,
0 payment_count,
0 payment_num,
0 payment_amount,
0 refund_order_count,
0 refund_order_num,
0 refund_order_amount,
0 refund_payment_count,
0 refund_payment_num,
0 refund_payment_amount,
0 cart_count,
0 favor_count,
appraise_good_count,
appraise_mid_count,
appraise_bad_count,
appraise_default_count
from tmp_comment
)t1
group by sku_id;"
dws_coupon_info_daycount="
with
tmp_cu as
(
select
coupon_id,
sum(if(date_format(get_time,'yyyy-MM-dd')='$do_date',1,0)) get_count,
sum(if(date_format(using_time,'yyyy-MM-dd')='$do_date',1,0)) order_count,
sum(if(date_format(used_time,'yyyy-MM-dd')='$do_date',1,0)) payment_count,
sum(if(date_format(expire_time,'yyyy-MM-dd')='$do_date',1,0)) expire_count
from ${APP}.dwd_coupon_use
where dt='9999-99-99'
or dt='$do_date'
group by coupon_id
),
tmp_order as
(
select
coupon_id,
sum(split_coupon_amount) order_reduce_amount,
sum(original_amount) order_original_amount,
sum(split_final_amount) order_final_amount
from ${APP}.dwd_order_detail
where dt='$do_date'
and coupon_id is not null
group by coupon_id
),
tmp_pay as
(
select
coupon_id,
sum(split_coupon_amount) payment_reduce_amount,
sum(split_final_amount) payment_amount
from ${APP}.dwd_order_detail
where (dt='$do_date'
or dt=date_add('$do_date',-1))
and coupon_id is not null
and order_id in
(
select order_id from ${APP}.dwd_payment_info where dt='$do_date'
)
group by coupon_id
)
insert overwrite table ${APP}.dws_coupon_info_daycount partition(dt='$do_date')
select
coupon_id,
sum(get_count),
sum(order_count),
sum(order_reduce_amount),
sum(order_original_amount),
sum(order_final_amount),
sum(payment_count),
sum(payment_reduce_amount),
sum(payment_amount),
sum(expire_count)
from
(
select
coupon_id,
get_count,
order_count,
0 order_reduce_amount,
0 order_original_amount,
0 order_final_amount,
payment_count,
0 payment_reduce_amount,
0 payment_amount,
expire_count
from tmp_cu
union all
select
coupon_id,
0 get_count,
0 order_count,
order_reduce_amount,
order_original_amount,
order_final_amount,
0 payment_count,
0 payment_reduce_amount,
0 payment_amount,
0 expire_count
from tmp_order
union all
select
coupon_id,
0 get_count,
0 order_count,
0 order_reduce_amount,
0 order_original_amount,
0 order_final_amount,
0 payment_count,
payment_reduce_amount,
payment_amount,
0 expire_count
from tmp_pay
)t1
group by coupon_id;"
dws_area_stats_daycount="
with
tmp_vu as
(
select
id province_id,
visit_count,
login_count,
visitor_count,
user_count
from
(
select
area_code,
count(*) visit_count,--访客访问次数
count(user_id) login_count,--用户访问次数,等价于sum(if(user_id is not null,1,0))
count(distinct(mid_id)) visitor_count,--访客人数
count(distinct(user_id)) user_count--用户人数
from ${APP}.dwd_page_log
where dt='$do_date'
and last_page_id is null
group by area_code
)tmp
left join ${APP}.dim_base_province area
on tmp.area_code=area.area_code
),
tmp_order as
(
select
province_id,
count(*) order_count,
sum(original_amount) order_original_amount,
sum(final_amount) order_final_amount
from ${APP}.dwd_order_info
where dt='$do_date'
or dt='9999-99-99'
and date_format(create_time,'yyyy-MM-dd')='$do_date'
group by province_id
),
tmp_pay as
(
select
province_id,
count(*) payment_count,
sum(payment_amount) payment_amount
from ${APP}.dwd_payment_info
where dt='$do_date'
group by province_id
),
tmp_ro as
(
select
province_id,
count(*) refund_order_count,
sum(refund_amount) refund_order_amount
from ${APP}.dwd_order_refund_info
where dt='$do_date'
group by province_id
),
tmp_rp as
(
select
province_id,
count(*) refund_payment_count,
sum(refund_amount) refund_payment_amount
from ${APP}.dwd_refund_payment
where dt='$do_date'
group by province_id
)
insert overwrite table ${APP}.dws_area_stats_daycount partition(dt='$do_date')
select
province_id,
sum(visit_count),
sum(login_count),
sum(visitor_count),
sum(user_count),
sum(order_count),
sum(order_original_amount),
sum(order_final_amount),
sum(payment_count),
sum(payment_amount),
sum(refund_order_count),
sum(refund_order_amount),
sum(refund_payment_count),
sum(refund_payment_amount)
from
(
select
province_id,
visit_count,
login_count,
visitor_count,
user_count,
0 order_count,
0 order_original_amount,
0 order_final_amount,
0 payment_count,
0 payment_amount,
0 refund_order_count,
0 refund_order_amount,
0 refund_payment_count,
0 refund_payment_amount
from tmp_vu
union all
select
province_id,
0 visit_count,
0 login_count,
0 visitor_count,
0 user_count,
order_count,
order_original_amount,
order_final_amount,
0 payment_count,
0 payment_amount,
0 refund_order_count,
0 refund_order_amount,
0 refund_payment_count,
0 refund_payment_amount
from tmp_order
union all
select
province_id,
0 visit_count,
0 login_count,
0 visitor_count,
0 user_count,
0 order_count,
0 order_original_amount,
0 order_final_amount,
payment_count,
payment_amount,
0 refund_order_count,
0 refund_order_amount,
0 refund_payment_count,
0 refund_payment_amount
from tmp_pay
union all
select
province_id,
0 visit_count,
0 login_count,
0 visitor_count,
0 user_count,
0 order_count,
0 order_original_amount,
0 order_final_amount,
0 payment_count,
0 payment_amount,
refund_order_count,
refund_order_amount,
0 refund_payment_count,
0 refund_payment_amount
from tmp_ro
union all
select
province_id,
0 visit_count,
0 login_count,
0 visitor_count,
0 user_count,
0 order_count,
0 order_original_amount,
0 order_final_amount,
0 payment_count,
0 payment_amount,
0 refund_order_count,
0 refund_order_amount,
refund_payment_count,
refund_payment_amount
from tmp_rp
)t1
group by province_id;"
case $1 in
"dws_visitor_action_daycount" )
hive -e "$dws_visitor_action_daycount"
;;
"dws_user_action_daycount" )
hive -e "$dws_user_action_daycount"
;;
"dws_activity_info_daycount" )
hive -e "$dws_activity_info_daycount"
;;
"dws_area_stats_daycount" )
hive -e "$dws_area_stats_daycount"
;;
"dws_sku_action_daycount" )
hive -e "$dws_sku_action_daycount"
;;
"dws_coupon_info_daycount" )
hive -e "$dws_coupon_info_daycount"
;;
"all" )
hive -e "$dws_visitor_action_daycount$dws_user_action_daycount$dws_activity_info_daycount$dws_area_stats_daycount$dws_sku_action_daycount$dws_coupon_info_daycount"
;;
esac
|
package org.apache.commons.chain2.testutils;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import org.apache.commons.chain2.Catalog;
import org.apache.commons.chain2.Command;
import org.apache.commons.chain2.Context;
/**
* @version $Id$
*/
public class TestCatalog<K, V, C extends Context<K, V>> implements Catalog<K, V, C> {
Map<String, Command> commands = new HashMap<String, Command>();
@Override
public <CMD extends Command<K, V, C>> void addCommand(String name, CMD command) {
commands.put(name, command);
}
@Override
public <CMD extends Command<K, V, C>> CMD getCommand(String name) {
return (CMD) commands.get(name);
}
@Override
public Iterator<String> getNames() {
return commands.keySet().iterator();
}
}
|
// j * j = i was pain
#include<bits/stdc++.h>
using namespace std;
#define PI acos(-1)
#define fi first
#define se second
#define pb push_back
#define sz(a) (int)(a).size()
#define all(c) (c).begin(), (c).end()
#define TIMESTAMP fprintf(stderr, "Execution time: %.3lf s.\n", 1.0*clock()/CLOCKS_PER_SEC)
typedef long long ll;
typedef long double ld;
typedef vector<int> vi;
typedef vector<ll> vll;
typedef pair <int, int> pii;
typedef vector <vi> vvi;
typedef vector <pii> vpii;
typedef vector<string> vs;
const int INF = 1e9;
const int MAXN = 28124;
const int MOD = 1e9 + 7;
bool ok[MAXN];
int n, m;
void precalc() {
vi aa;
for(int i = 1; i < MAXN; i++) {
int cur = -i;
for(int j = 1; j * j <= i; j++) {
if(i % j == 0) {
cur += i / j + j;
if(j * j == i) cur -= j;
}
}
if(cur > i) aa.pb(i);
}
for(int i = 0; i < sz(aa); i++) {
for(int j = 0; j < sz(aa); j++) {
if(aa[i] + aa[j] < MAXN) {
ok[aa[i] + aa[j]] = 1;
}
}
}
int ans = 0;
for(int i = 1; i < MAXN; i++) if(!ok[i]) ans += i;
//cout << ans << endl;
}
void solve() {
cin >> n;
if(n > 28123 || ok[n]) cout << "YES" << endl;
else cout << "NO" << endl;
}
int main() {
ios_base::sync_with_stdio(0);
cin.tie(0);
#ifdef LOCAL
freopen("xxx.in", "r", stdin);
freopen("xxx.out", "w", stdout);
#else
//freopen("xxx.in", "r", stdin);
//freopen("xxx.out", "w", stdout);
#endif
precalc();
int t;
cin >> t;
while(t--) solve();
return 0;
}
|
<filename>src/configuration.ts<gh_stars>1-10
import { JsonRpc } from "eosjs";
import fetch from 'node-fetch'
/**
* A configuration entry for an InfraBlockchain network
*
* @example ```js
* {
* networkId: '01',
* registryContract: 'infradidregi',
* rpcEndpoint: 'http://localhost:8888'
* }
* ```
*/
export interface NetworkConfiguration {
networkId: string
registryContract: string
rpcEndpoint: string
}
export interface MultiNetworkConfiguration {
networks?: NetworkConfiguration[]
noRevocationCheck?: boolean
}
export type ConfigurationOptions = MultiNetworkConfiguration
export interface ConfiguredNetwork {
jsonRpc: JsonRpc,
registryContract: string
}
export type ConfiguredNetworks = Record<string, ConfiguredNetwork>
function configureNetwork(net: NetworkConfiguration) : ConfiguredNetwork {
const registryContract = net.registryContract
const jsonRpc = new JsonRpc(net.rpcEndpoint, { fetch } );
return { jsonRpc, registryContract }
}
function configureNetworks(conf: MultiNetworkConfiguration) : ConfiguredNetworks {
const networks = {}
for (let i = 0; i < conf.networks.length; i++) {
const net = conf.networks[i]
networks[net.networkId] = configureNetwork(net)
if (networks[net.networkId] === null) {
console.warn(`invalid configuration for ${net.networkId}`)
}
}
return networks
}
export function configureResolverWithNetworks(conf: ConfigurationOptions = {}): ConfiguredNetworks {
const networks = {
...configureNetworks(conf)
}
for (const expectedNet of conf.networks) {
if (!networks[expectedNet.networkId]) {
throw new Error(
`Chain network configuration for ${expectedNet.networkId} was attempted but no valid configuration was provided`
)
}
}
if (Object.keys(networks).length === 0) {
throw new Error('InfraDIDResolver requires a provider configuration for at least one network')
}
return networks
}
|
package libs.trustconnector.scdp.smartcard.application.pboc;
import libs.trustconnector.scdp.smartcard.application.*;
import libs.trustconnector.scdp.smartcard.*;
import libs.trustconnector.scdp.smartcard.AID;
import libs.trustconnector.scdp.smartcard.SmartCardReader;
import libs.trustconnector.scdp.smartcard.application.Application;
public class QPBOC extends Application
{
public QPBOC(final SmartCardReader reader, final AID aid) {
super(reader, aid);
}
}
|
#!/bin/sh
python2 setup.py build
sudo python2 setup.py install
|
#!/bin/bash
# ES1_URL="`cat ES_URL.prod.secret`"
# ES2_URL="`cat ES_URL.test.secret`"
# IDX1='bitergia-git_symphonyoss_200604_enriched_200930'
# IDX2='sds-cncf-k8s-git'
# KEY1=url_id
# KEY2=url_id
# ID1='finos/alloy/commit/f12d39aa02375258c444bd8815ba8bf621045615'
# ID2='kubernetes-csi/external-attacher/commit/28c782912bf7418f67601bccdda559cc7e64e880'
if [ -z "${ES1_URL}" ]
then
export ES1_URL="`cat ES_URL.prod.secret`"
fi
if [ -z "${ES2_URL}" ]
then
export ES2_URL="`cat ES_URL.test.secret`"
fi
if [ -z "${IDX1}" ]
then
export IDX1='bitergia-git_symphonyoss_200604_enriched_200930'
fi
if [ -z "${IDX2}" ]
then
export IDX2='sds-cncf-k8s-git'
fi
if [ -z "${KEY1}" ]
then
export KEY1='url_id'
fi
if [ -z "${KEY2}" ]
then
export KEY2='url_id'
fi
if [ -z "${ID1}" ]
then
export ID1='finos/alloy/commit/f12d39aa02375258c444bd8815ba8bf621045615'
fi
if [ -z "${ID2}" ]
then
export ID2='kubernetes-csi/external-attacher/commit/28c782912bf7418f67601bccdda559cc7e64e880'
fi
curl -s -H 'Content-Type: application/json' "${ES1_URL}/${IDX1}/_search" -d "{\"query\":{\"term\":{\"${KEY1}\":\"${ID1}\"}}}" | jq -rS '.' > p2o.json
curl -s -H 'Content-Type: application/json' "${ES2_URL}/${IDX2}/_search" -d "{\"query\":{\"term\":{\"${KEY2}\":\"${ID2}\"}}}" | jq -rS '.' > dads.json
cat p2o.json | sort -r | uniq > tmp && mv tmp p2o.txt
cat dads.json | sort -r | uniq > tmp && mv tmp dads.txt
echo "da-ds:" > report.txt
echo '-------------------------------------------' >> report.txt
cat dads.txt >> report.txt
echo '-------------------------------------------' >> report.txt
echo "p2o:" >> report.txt
echo '-------------------------------------------' >> report.txt
cat p2o.txt >> report.txt
echo '-------------------------------------------' >> report.txt
|
package io.cattle.platform.engine.model;
public interface Trigger {
String METADATA_SOURCE = "metadata";
void trigger(Long accountId, Long clusterId, Object resource, String source);
}
|
<gh_stars>10-100
const {
AoijsAPI,
DbdTsDb,
AoiMongoDb,
CustomDb,
Promisify,
} = require("../../../classes/Database.js");
module.exports = async (d) => {
const data = d.util.aoiFunc(d);
if (data.err) return d.error(data.err);
const [
variable,
order = "asc",
type = "user",
custom = "{top}. {name}: {value}",
list = 10,
page = 1,
table = d.client.db.tables[0],
] = data.inside.splits;
if (!d.client.variableManager.has(variable.addBrackets()))
return d.aoiError.fnError(
d,
"custom",
{},
`Variable ${variable.addBrackets()} Not Found!`,
);
const idLength = type === "user" ? 2 : 1;
let y = 0;
let value;
let content = [];
const all = await d.client.db.all(table, variable.addBrackets(), idLength);
for (const Data of all.sort((x, y) => {
if (d.client.db instanceof AoijsAPI) {
if (d.client.db.type === "aoi.db")
return Number(y.value) - Number(x.value);
else return Number(y.data.value) - Number(x.data.value);
} else if (d.client.db instanceof DbdTsDb) {
return (
Number(y[variable.addBrackets()]) - Number(x[variable.addBrackets()])
);
} else if (d.client.db instanceof AoiMongoDb) {
return Number(y.value) - Number(x.value);
} else if (
d.client.db instanceof CustomDb ||
d.client.db instanceof Promisify
) {
return (
Number(
y.value ||
y[variable.addBrackets()] ||
(typeof y.Data === "object" ? y.Data.value : y.Data),
) -
Number(
x.value ||
x[variable.addBrackets()] ||
(typeof x.Data === "object" ? x.Data.value : x.Data),
)
);
}
})) {
let user;
if (d.client.db instanceof AoijsAPI) {
if (d.client.db.type === "aoi.db") value = Number(Data.value);
else value = Number(Data.data.value);
user = await getdata(user, Data, 1);
} else if (d.client.db instanceof DbdTsDb) {
value = Number(Data[variable.addBrackets()]);
user = await getdata(user, Data, 0);
} else if (d.client.db instanceof AoiMongoDb) {
value = Number(Data.value);
user = await getdata(user, Data, 1);
} else if (
d.client.db instanceof CustomDb ||
d.client.db instanceof Promisify
) {
value = Number(
Data.value ||
Data[variable.addBrackets()] ||
(typeof Data.Data === "object" ? Data.Data.value : Data.Data),
);
if (Data.key) {
const arr = Data.key.split("_");
user = await customarr(arr);
} else if (Data.id) {
const arr = Data.id.split("_");
user = await customarr(arr);
} else if (Data.ID) {
const arr = Data.ID.split("_");
user = await customarr(arr);
} else if (Data.Id) {
const arr = Data.Id.split("_");
user = await customarr(arr);
} else {
d.aoiError.fnError(
d,
"custom",
{},
"database Not Supported For LeaderBoard",
);
break;
}
}
if (user) {
user =
typeof user === "object"
? type === "user"
? user.user
: user
: { id: user };
y++;
let text = custom
.replaceAll(`{top}`, y)
.replaceAll("{id}", user.id)
.replaceAll("{tag}", user?.tag?.removeBrackets())
.replaceAll(
`{name}`,
["user", "globalUser"].includes(type)
? user.username?.removeBrackets()
: user.name?.removeBrackets(),
)
.replaceAll(`{value}`, value);
if (text.includes("{execute:")) {
let ins = text.split("{execute:")[1].split("}")[0];
const awaited = d.client.cmd.awaited.find((c) => c.name === ins);
if (!awaited)
return d.aoiError.fnError(
d,
"custom",
{ inside: data.inside },
` Invalid awaited command '${ins}' in`,
);
const CODE = await d.interpreter(
d.client,
{
guild: guild,
channel: d.message.channel,
author: user,
},
d.args,
awaited,
undefined,
true,
);
text = text.replace(`{execute:${ins}}`, CODE);
}
content.push(text);
}
if (order === "desc") content = content.reverse();
const px = page * list - list,
py = page * list;
data.result = content.slice(px, py).join("\n");
return {
code: d.util.setCode(data),
};
}
async function customarr(arr) {
user =
(type === "globalUser"
? await d.util.getUser(d, arr[1])
: type === "user"
? await d.util.getMember(d.guild, arr[1])
: type === "server"
? await d.util.getGuild(d, arr[1])
: arr[1]) ?? arr[1];
return user;
}
async function getdata(user, Data, key) {
user =
(type === "globalUser"
? await d.util.getUser(d, Data.key.split("_")[key])
: type === "user"
? await d.util.getMember(d.guild, Data.key.split("_")[key])
: type === "server"
? await d.util.getGuild(d, Data.key.split("_")[key])
: Data.key.split("_")[key]) ?? Data.key.split("_")[key];
return user;
}
};
|
# CelebA images and attribute labels
# bash download_celeba.sh
URL=https://www.dropbox.com/s/d1kjpkqklf0uw77/celeba.zip?dl=0
ZIP_FILE=./data/celeba.zip
mkdir -p ./data/
wget -N $URL -O $ZIP_FILE
unzip $ZIP_FILE -d ./data/
rm $ZIP_FILE
|
import React from 'react';
import Masonry from 'react-masonry-component';
import { Link } from 'react-router-dom';
const masonryOptions = {
transitionDuration: 0,
};
type ProjectsProps = {
images: IImage[];
};
export function Projects(props: ProjectsProps) {
const images = props.images.map((image, idx) => {
return (
<div className="col-lg-4 col-sm-6 item" key={idx}>
<div className="single-latest-projects">
<img src={image.image} alt="portfolio-image" />
<div className="content">
<span>{image.category}</span>
<h3>{image.title}</h3>
</div>
<Link to="/project-details">
<a className="link-btn"></a>
</Link>
</div>
</div>
);
});
return (
<div className="our-latest-projects ptb-80">
<div className="container">
<div className="section-title text-left">
<h2>
Our Latest <span>Projects</span>
</h2>
<p>
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do
eiusmod tempor incididunt ut labore et dolore magna aliqua.
</p>
</div>
<Masonry
className={'row'} // default ''
elementType={'div'} // default 'div'
options={masonryOptions} // default {}
disableImagesLoaded={false} // default false
updateOnEachImageLoad={false} // default false and works only if disableImagesLoaded is false
>
{images}
</Masonry>
</div>
</div>
);
}
Projects.defaultProps = {
images: [
{
image: '/images/agency-portfolio-projects/ap-project1.jpg',
category: 'Business',
title: 'Business Stratagy',
},
{
image: '/images/agency-portfolio-projects/ap-project2.jpg',
category: 'Development',
title: 'Web Development',
},
{
image: '/images/agency-portfolio-projects/ap-project3.jpg',
category: 'Marketing',
title: 'Digital Marketing',
},
{
image: '/images/agency-portfolio-projects/ap-project4.jpg',
category: 'App',
title: 'Mobile App Development',
},
{
image: '/images/agency-portfolio-projects/ap-project6.jpg',
category: 'Marketing',
title: 'Email Marketing',
},
{
image: '/images/agency-portfolio-projects/ap-project5.jpg',
category: 'Development',
title: 'E-commerce Development',
},
{
image: '/images/agency-portfolio-projects/ap-project9.jpg',
category: 'App',
title: 'React App Development',
},
{
image: '/images/agency-portfolio-projects/ap-project8.jpg',
category: 'Writing',
title: 'Content Writing',
},
{
image: '/images/agency-portfolio-projects/ap-project7.jpg',
category: 'IT',
title: 'IT Consultancy',
},
{
image: '/images/agency-portfolio-projects/ap-project11.jpg',
category: 'Solutions',
title: 'IT Solutions',
},
{
image: '/images/agency-portfolio-projects/ap-project10.jpg',
category: 'Marketing',
title: 'Marketing & Reporting',
},
],
};
export default Projects;
|
export class Profile {
jti: string;
password: string;
}
|
<gh_stars>0
#include <string.h>
#include <stdio.h>
#include <glib/gi18n.h>
#include <glib-object.h>
#include "pacat-control-object.h"
#include "pacat-control-stub.h"
/* Properties */
enum
{
PROP_0,
PROP_REC_ALLOWED
};
enum
{
SIGNAL_REC_ALLOWED_CHANGED,
LAST_SIGNAL
};
static guint signals[LAST_SIGNAL] = { 0, };
G_DEFINE_TYPE(PacatControl, pacat_control, G_TYPE_OBJECT)
#define PACAT_CONTROL_TYPE_GET_PRIVATE(o) (G_TYPE_INSTANCE_GET_PRIVATE ((o), PACAT_CONTROL_TYPE, userdata))
static void pacat_control_finalize (GObject *object)
{
//PacatControl *p = PACAT_CONTROL (object);
(G_OBJECT_CLASS (pacat_control_parent_class)->finalize) (object);
}
static void pacat_control_init (PacatControl *obj __attribute__((__unused__)))
{
}
static void pacat_control_set_property (GObject *object,
guint prop_id,
const GValue *value,
GParamSpec *pspec)
{
PacatControl *p;
int rec_allowed;
p = PACAT_CONTROL (object);
switch (prop_id)
{
case PROP_REC_ALLOWED:
assert(p->u);
rec_allowed = g_value_get_boolean(value);
g_mutex_lock(&p->u->prop_mutex);
p->u->rec_allowed = rec_allowed;
pacat_log("Setting audio-input to %s", p->u->rec_allowed ? "enabled" : "disabled");
if (p->u->rec_allowed && p->u->rec_requested) {
pacat_log("Recording start");
pa_stream_cork(p->u->rec_stream, 0, NULL, NULL);
} else if (!p->u->rec_allowed &&
(p->u->rec_requested || !pa_stream_is_corked(p->u->rec_stream))) {
pacat_log("Recording stop");
pa_stream_cork(p->u->rec_stream, 1, NULL, NULL);
}
g_mutex_unlock(&p->u->prop_mutex);
/* notify about the change */
g_signal_emit(object, signals[SIGNAL_REC_ALLOWED_CHANGED], 0, rec_allowed, p->u->name);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void pacat_control_get_property (GObject *object,
guint prop_id,
GValue *value,
GParamSpec *pspec)
{
PacatControl *p;
p = PACAT_CONTROL (object);
switch (prop_id)
{
case PROP_REC_ALLOWED:
assert(p->u);
g_value_set_boolean (value, p->u->rec_allowed);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void pacat_control_class_init (PacatControlClass *p_class)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (p_class);
dbus_g_object_type_install_info (PACAT_CONTROL_TYPE,
&dbus_glib_pacat_control_object_info);
G_GNUC_BEGIN_IGNORE_DEPRECATIONS
/* pointer to userdata struct */
g_type_class_add_private(p_class, sizeof(void*));
G_GNUC_END_IGNORE_DEPRECATIONS
gobject_class->finalize = pacat_control_finalize;
gobject_class->set_property = pacat_control_set_property;
gobject_class->get_property = pacat_control_get_property;
g_object_class_install_property(gobject_class, PROP_REC_ALLOWED,
g_param_spec_boolean("rec_allowed", "Allow use audio source", "Allow use audio source", FALSE, G_PARAM_READWRITE));
signals[SIGNAL_REC_ALLOWED_CHANGED] =
g_signal_new ("rec_allowed_changed",
G_OBJECT_CLASS_TYPE (p_class),
G_SIGNAL_RUN_LAST | G_SIGNAL_DETAILED,
0,
NULL, NULL,
g_cclosure_marshal_VOID__BOOLEAN,
G_TYPE_NONE, 2, G_TYPE_BOOLEAN, G_TYPE_STRING);
}
int dbus_init(struct userdata *u) {
DBusGProxy *busProxy = NULL;
GError *error = NULL;
char obj_path[1024];
PacatControl *pc;
int result, ret;
#if !GLIB_CHECK_VERSION(2,35,0)
g_type_init ();
#endif
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wdeprecated-declarations"
g_thread_init (NULL);
#pragma GCC diagnostic pop
dbus_g_thread_init ();
error = NULL;
if (!(u->dbus = dbus_g_bus_get (DBUS_BUS_SYSTEM, &error))) {
goto fail;
}
busProxy = dbus_g_proxy_new_for_name(u->dbus,
DBUS_SERVICE_DBUS, DBUS_PATH_DBUS, DBUS_INTERFACE_DBUS);
if (busProxy == NULL) {
pacat_log("Failed to get a proxy for D-Bus");
goto fail;
}
/* Attempt to register the well-known name.
The RPC call requires two parameters:
- arg0: (D-Bus STRING): name to request
- arg1: (D-Bus UINT32): flags for registration.
(please see "org.freedesktop.DBus.RequestName" in
http://dbus.freedesktop.org/doc/dbus-specification.html)
Will return one uint32 giving the result of the RPC call.
We're interested in 1 (we're now the primary owner of the name)
or 4 (we were already the owner of the name)
The function will return FALSE if it sets the GError. */
if (snprintf(obj_path, sizeof(obj_path), "org.vos.Audio.%s", u->name) >= (int)sizeof(obj_path)) {
pacat_log("VM name too long");
goto fail;
}
if (!dbus_g_proxy_call(busProxy,
"RequestName",
&error,
G_TYPE_STRING, obj_path, /* name */
G_TYPE_UINT, 0, /* flags */
G_TYPE_INVALID, /* end of input args */
G_TYPE_UINT, &result, /* result */
G_TYPE_INVALID)) {
pacat_log("D-Bus.RequestName RPC failed: %s", error->message);
g_error_free (error);
goto fail;
}
/* Check the result code of the registration RPC. */
if (result != 1 && result != 4) {
pacat_log("Failed to get the primary well-known name.");
goto fail;
}
if (!(u->pacat_control = g_object_new (PACAT_CONTROL_TYPE, NULL))) {
pacat_log("failed to create pacat_control object");
goto fail;
}
pc = PACAT_CONTROL(u->pacat_control);
pc->u = u;
dbus_g_connection_register_g_object (u->dbus,
"/org/vos/audio", u->pacat_control);
ret = 0;
goto finish;
fail:
if (u->pacat_control) {
dbus_g_connection_unregister_g_object(u->dbus, u->pacat_control);
g_object_unref(u->pacat_control);
u->pacat_control = NULL;
}
if (u->dbus) {
dbus_g_connection_unref(u->dbus);
}
ret = -1;
finish:
if (busProxy)
g_object_unref (busProxy);
return ret;
}
|
KSET_DEFAULT_KUBECONFIG=$HOME/.kube/config
KSET_KUBECONFIG_DIR="$HOME/.kube/configs/"
KSET_COMPLETION_TOOLS="kubectl oc velero tkn helm kn kustomize"
KSET_KUBECONFIG=${KSET_KUBECONFIG_DIR}/config.$$
kset_logout() {
rm -f "$KSET_KUBECONFIG"
}
trap kset_logout EXIT
kon() {
local _KUBECONFIG
if [ "$KUBECONFIG" == "$KSET_KUBECONFIG" ]; then
echo "Already on" >&2
else
if [ -z ${KUBECONFIG+x} ]; then
unset KSET_PREV_KUBECONFIG
else
KSET_PREV_KUBECONFIG=$KUBECONFIG
fi
_KUBECONFIG="${KUBECONFIG:-$KSET_DEFAULT_KUBECONFIG}"
mkdir -p $KSET_KUBECONFIG_DIR
( umask 066 && kubectl config view --raw >$KSET_KUBECONFIG )
export KUBECONFIG=$KSET_KUBECONFIG
fi
if hash kubeon 2>/dev/null; then
kubeon
fi
}
koff() {
if [ "$KUBECONFIG" != "$KSET_KUBECONFIG" ]; then
echo "Already off" >&2
else
if hash kubeoff 2>/dev/null; then
kubeoff
fi
if [ -z ${KSET_PREV_KUBECONFIG+x} ]; then
unset KUBECONFIG
else
KUBECONFIG=$KSET_PREV_KUBECONFIG
unset KSET_PREV_KUBECONFIG
fi
rm -f $KSET_KUBECONFIG
fi
}
# this won't work in BASH version provided by macos
for cmd in velero $KSET_COMPLETION_TOOLS
do
if hash $cmd 2>/dev/null
then
source <($cmd completion bash)
fi
done
alias k=kubectl
complete -F __start_kubectl k
|
<reponame>aadorian/baseline
import { Commitment } from './commitment';
export class MerkleTreeNode implements Commitment {
readonly hash: string;
readonly leafIndex: number;
constructor(hash: string, leafIndex: number) {
this.hash = hash;
this.leafIndex = leafIndex;
}
location(): number {
return this.leafIndex;
}
value(): string {
return this.hash;
}
}
|
# makefile
tail +7 cv/cv-pandoc.md > cv/cv-body.md
pandoc -S cv/cv-body.md -o cv/cv-body.tex
perl -p -i -e "s/~/\\\ /g" cv/cv-body.tex
perl -p -i -e "s/M.Sc./M.Sc.\\\/g" cv/cv-body.tex
perl -p -i -e "s/B.Sc./B.Sc.\\\/g" cv/cv-body.tex
perl -p -i -e "s/itemsep1pt/itemsep3pt/g" cv/cv-body.tex
pdflatex -output-directory=cv cv/EliasonCV.tex
# latexmk -outdir=cv cv/EliasonCV.tex
pandoc cv/cv-body.md -o cv/cv-body-clean.md
cat cv/cv-header.txt cv/cv-body-clean.md > cv/EliasonCV.txt
perl -p -i -e "s/–/--/g" cv/EliasonCV.txt
rm cv/cv-body.md
#rm cv-body.tex
rm cv/cv-body-clean.md
rm cv/*.log cv/*.out cv/*.aux cv/*.fdb_latexmk cv/*.fls
# and pre-process the HTML with pandoc
# because redcarpet markdown doesn't do definition lists
pandoc -S cv/cv-pandoc.md -o cv/cv-temp.html
cat cv/cv-pandoc-header.md cv/cv-temp.html > cv/cv.html
rm cv/cv-temp.html
# cp EliasonCV.pdf ~/Dropbox/public/EliasonCV.pdf
|
<reponame>Anth0nyWu/mmf<filename>mmf/datasets/builders/visual_genome/dataset.py
# Copyright (c) Facebook, Inc. and its affiliates.
import copy
import json
import torch
from mmf.common.sample import Sample, SampleList
from mmf.datasets.builders.vqa2 import VQA2Dataset
from mmf.datasets.databases.scene_graph_database import SceneGraphDatabase
from mmf.datasets.databases.region_description_database import RegionDescriptionDatabase
from mmf.datasets.databases.metadata_database import MetadataDatabase
from mmf.utils.general import get_absolute_path
from mmf.utils.configuration import get_mmf_env
_CONSTANTS = {"image_id_key": "image_id"}
class VisualGenomeDataset(VQA2Dataset):
def __init__(self, config, dataset_type, imdb_file_index, *args, **kwargs):
super().__init__(
config,
dataset_type,
imdb_file_index,
dataset_name="visual_genome",
*args,
**kwargs
)
self._return_scene_graph = config.return_scene_graph
self._return_objects = config.return_objects
self._return_relationships = config.return_relationships
self._return_region_descriptions = config.return_region_descriptions
self._no_unk = config.get("no_unk", False)
self.scene_graph_db = None
self.region_descriptions_db = None
self.image_metadata_db = None
self._max_feature = config.max_features
build_scene_graph_db = (
self._return_scene_graph
or self._return_objects
or self._return_relationships
)
# print("config", config)
if self._return_region_descriptions:
print("use_region_descriptions_true")
self.region_descriptions_db = self.build_region_descriptions_db()
self.image_metadata_db = self.build_image_metadata_db()
if build_scene_graph_db:
scene_graph_file = config.scene_graph_files[dataset_type][imdb_file_index]
print("scene_graph_file", scene_graph_file)
# scene_graph_file = self._get_absolute_path(scene_graph_file)
scene_graph_file = get_absolute_path(get_mmf_env("data_dir")+"/"+scene_graph_file)
print("scene_graph_file", scene_graph_file)
self.scene_graph_db = SceneGraphDatabase(config, scene_graph_file)
print("use_scene_graph_true")
self.scene_graph_db = self.build_scene_graph_db()
def build_region_descriptions_db(self):
region_descriptions_path = self._get_path_based_on_index(
self.config, "region_descriptions", self._index
)
print("region_descriptions:", region_descriptions_path)
return RegionDescriptionDatabase(
self.config, region_descriptions_path, annotation_db=self.annotation_db
)
def build_image_metadata_db(self):
metadatas_path = self._get_path_based_on_index(
self.config, "metadatas", self._index
)
print("metadatas:", metadatas_path)
return MetadataDatabase(
self.config, metadatas_path, annotation_db=self.annotation_db
)
def build_scene_graph_db(self):
scene_graph_files_path = self._get_path_based_on_index(
self.config, "scene_graph_files", self._index
)
print("scene_graph_files_path:", scene_graph_files_path)
return SceneGraphDatabase(
self.config, scene_graph_files_path, annotation_db=self.annotation_db
)
def load_item(self, idx):
# print("===load item===")
# load idx-th line in q_a file
# print("idx0", idx)
sample_info = self.annotation_db[idx]
# print("sample_info", sample_info)
sample_info = self._preprocess_answer(sample_info)
sample_info["question_id"] = sample_info["id"]
if self._check_unk(sample_info):
return self.load_item((idx + 1) % len(self.annotation_db))
current_sample = super().load_item(idx) # convert question to encoded vector here
current_sample = self._load_scene_graph(idx, current_sample)
current_sample = self._load_region_description(idx, current_sample) # also include text processor for region descriptions
# cut short features to max feat
# current_sample.max_features = self._max_feature
# print("region current sample", current_sample)
return current_sample
def _get_image_id(self, idx):
return self.annotation_db[idx][_CONSTANTS["image_id_key"]]
def _get_image_info(self, idx):
# Deep copy so that we can directly update the nested dicts
# return copy.deepcopy(self.scene_graph_db[self._get_image_id(idx)])
img_id = self._get_image_id(idx)
# print("img id", self._get_image_id(idx))
image_info = copy.deepcopy(self.region_descriptions_db[self._get_image_id(idx)])
# image width/length
image_info["height"] = self.image_metadata_db[img_id]["height"]
image_info["width"] = self.image_metadata_db[img_id]["width"]
image_info["url"] = self.image_metadata_db[img_id]["url"]
return image_info
def _preprocess_answer(self, sample_info):
sample_info["answers"] = [
self.vg_answer_preprocessor(
{"text": sample_info["answers"][0]},
remove=["?", ",", ".", "a", "an", "the"],
)["text"]
]
return sample_info
def _check_unk(self, sample_info):
if not self._no_unk:
return False
else:
index = self.answer_processor.word2idx(sample_info["answers"][0])
# print("ans_processor", self.answer_processor.answer_vocab)
# print("index1", index)
# print("index2", self.answer_processor.answer_vocab.UNK_INDEX)
return index == self.answer_processor.answer_vocab.UNK_INDEX
def _load_scene_graph(self, idx, sample):
if self.scene_graph_db is None:
return sample
image_info = self._get_image_info(idx)
regions = image_info["regions"]
objects, object_map = self._load_objects(idx)
if self._return_objects:
sample.objects = objects
relationships, relationship_map = self._load_relationships(idx, object_map)
if self._return_relationships:
sample.relationships = relationships
regions, _ = self._load_regions(idx, object_map, relationship_map)
if self._return_scene_graph:
sample.scene_graph = regions
return sample
def _load_region_description(self, idx, sample):
if self.region_descriptions_db is None:
return sample
image_info = self._get_image_info(idx)
# print("image_info", image_info)
regions, _ = self._load_regions(idx)
# print("regions", regions)
sample.region_description = regions
return sample
def _load_objects(self, idx):
image_info = self._get_image_info(idx)
image_height = image_info["height"]
image_width = image_info["width"]
object_map = {}
objects = []
for obj in image_info["objects"]:
obj["synsets"] = self.synset_processor({"tokens": obj["synsets"]})["text"]
obj["names"] = self.name_processor({"tokens": obj["names"]})["text"]
obj["height"] = obj["h"] / image_height
obj.pop("h")
obj["width"] = obj["w"] / image_width
obj.pop("w")
obj["y"] /= image_height
obj["x"] /= image_width
obj["attributes"] = self.attribute_processor({"tokens": obj["attributes"]})[
"text"
]
obj = Sample(obj)
object_map[obj["object_id"]] = obj
objects.append(obj)
objects = SampleList(objects)
return objects, object_map
def _load_relationships(self, idx, object_map):
if self._return_relationships is None and self._return_scene_graph is None:
return None, None
image_info = self._get_image_info(idx)
relationship_map = {}
relationships = []
for relationship in image_info["relationships"]:
relationship["synsets"] = self.synset_processor(
{"tokens": relationship["synsets"]}
)["text"]
relationship["predicate"] = self.predicate_processor(
{"tokens": relationship["predicate"]}
)["text"]
relationship["object"] = object_map[relationship["object_id"]]
relationship["subject"] = object_map[relationship["subject_id"]]
relationship = Sample(relationship)
relationship_map[relationship["relationship_id"]] = relationship
relationships.append(relationship)
relationships = SampleList(relationships)
return relationships, relationship_map
def _load_regions(self, idx, object_map, relationship_map):
if self._return_scene_graph is None:
return None, None
image_info = self._get_image_info(idx)
image_height = image_info["height"]
image_width = image_info["width"]
region_map = {}
regions = []
for region in image_info["regions"]:
for synset in region["synsets"]:
synset["entity_name"] = self.name_processor(
{"tokens": [synset["entity_name"]]}
)["text"]
synset["synset_name"] = self.synset_processor(
{"tokens": [synset["synset_name"]]}
)["text"]
region["height"] /= image_height
region["width"] /= image_width
region["y"] /= image_height
region["x"] /= image_width
relationships = []
objects = []
for relationship_idx in region["relationships"]:
relationships.append(relationship_map[relationship_idx])
for object_idx in region["objects"]:
objects.append(object_map[object_idx])
region["relationships"] = relationships
region["objects"] = objects
region["phrase"] = self.text_processor({"text": region["phrase"]})["text"]
region = Sample(region)
region_map[region["region_id"]] = region
regions.append(region)
regions = SampleList(regions)
return regions, region_map
def _load_regions(self, idx):
if self._return_region_descriptions is None:
return None, None
image_info = self._get_image_info(idx)
# print("img_info", image_info) # {"regions":[], "id": }
image_height = image_info["height"]
image_width = image_info["width"]
region_map = {}
region_cat = {}
region_cat["cat_description"]=[]
regions = []
for region in image_info["regions"]:
region["height"] /= image_height
region["width"] /= image_width
region["y"] /= image_height
region["x"] /= image_width
region["phrase"] = self.text_processor({"text": region["phrase"]})["text"]
# region["region_id"]=torch.tensor(region["region_id"])
# region["height"]=torch.tensor(region["height"])
# region["width"]=torch.tensor(region["width"])
# region["y"]=torch.tensor(region["y"])
# region["x"]=torch.tensor(region["x"])
region["phrase"]=region["phrase"].numpy()
# print("region", region)
# {'region_id': 3989716, 'width': 0.050666666666666665, 'height': 0.016, 'image_id': 2332304, 'phrase': tensor([66632, 44395, 46900, 66632, 49920, 0, 0, 0, 0, 0,
# 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), 'y': 0.182, 'x': 0.56}
region = Sample(region)
# sampled_region: Sample([('region_id', 3989715), ('width', 0.05333333333333334), ('height', 0.018), ('image_id', 2332304), ('phrase', tensor([48867, 46900, 66632, 60502, 0, 0, 0, 0, 0, 0,
# 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])), ('y', 0.268), ('x', 0.6426666666666667)])
# cat region.values
# region_cat["cat_description"]=[region["region_id"], region["height"], region["width"], region["y"], region["x"]] # .append(region["phrase"])
# # transform to tensor
# region_cat["cat_description"]=torch.tensor(region_cat["cat_description"]) # ??? dtype
# # cat phrase
# region_cat["cat_description"]= torch.cat((region_cat["cat_description"], region["phrase"].float()))
# region_cat = Sample(region_cat)
# print("sampled_region_cat", region_cat)
region_map[region["region_id"]] = region
regions.append(region)
# print("regions", regions)
regions = SampleList(regions)
regions["image_id"]=torch.tensor(regions["image_id"][0], dtype = torch.int32)
regions["image_url"]=image_info["url"]
# print("regions sample list", regions)
return regions, region_map
|
#!/bin/bash -eE
set -o pipefail
[ "${DEBUG,,}" == "true" ] && set -x
my_file="$(readlink -e "$0")"
my_dir="$(dirname $my_file)"
source "$my_dir/definitions"
MAX_DURATION="259200"
LOCKED=$(nova list --tags-any "SLAVE=vexxhost" --status ACTIVE --field locked,name | grep 'True' | tr -d '|' | awk '{print $1}' || true)
[[ -z "$LOCKED" ]] && exit
C_DATE=$(date +%s)
for i in $LOCKED; do
echo $i
L_DATE=$(date --date $(nova show $i | grep 'OS-SRV-USG:launched_at' | tr -d '|' | awk '{print $NF}') +%s)
DURATION=$(($C_DATE - $L_DATE))
if [[ "$DURATION" -ge "$MAX_DURATION" ]]; then
EXCEED+=($i)
fi
done
[[ "${#EXCEED[*]}" -eq "0" ]] && exit
for h in "${EXCEED[@]}"; do
echo "${h} $(openstack server show ${h} -f json | jq -r '.name')" >> vexxhost.report.txt
done
if [ -f vexxhost.report.txt ]; then
echo "VEXXHOST instances alive more than 3 days:" | cat - vexxhost.report.txt | tee vexxhost.report.txt
fi
|
#!/bin/bash -l
set -x
set -e
set -o
source /etc/profile.d/modules.sh
module load mpi
function mpirun() {
IFS=',' read -ra HOSTS <<< "$AZ_BATCH_HOST_LIST"
nodes=${#HOSTS[@]}
/usr/lib64/openmpi/bin/mpirun \
-np $nodes\
--allow-run-as-root \
--host $AZ_BATCH_HOST_LIST \
"$@"
}
mpirun python36 /root/job.py
|
import { BindGroupInfo, WGSL } from "./WGSL";
import { ShaderMacroCollection } from "../shader";
import {
WGSLBeginPositionVert,
WGSLBlendShapeInput,
WGSLBlendShapeVert,
WGSLCommon,
WGSLCommonVert,
WGSLPositionVert,
WGSLSkinningVert,
WGSLUVShare,
WGSLUVVert
} from "./functors";
import { WGSLEncoder } from "./WGSLEncoder";
export class WGSLUnlitVertex extends WGSL {
private _commonVert: WGSLCommonVert;
private _blendShapeInput: WGSLBlendShapeInput;
private _uvShare: WGSLUVShare;
private _beginPositionVert: WGSLBeginPositionVert;
private _blendShapeVert: WGSLBlendShapeVert;
private _skinningVert: WGSLSkinningVert;
private _uvVert: WGSLUVVert;
private _positionVert: WGSLPositionVert;
constructor() {
super();
this._commonVert = new WGSLCommonVert("VertexIn");
this._blendShapeInput = new WGSLBlendShapeInput("VertexIn");
this._uvShare = new WGSLUVShare("VertexOut");
this._beginPositionVert = new WGSLBeginPositionVert("in", "out");
this._blendShapeVert = new WGSLBlendShapeVert("in", "out");
this._skinningVert = new WGSLSkinningVert("in", "out");
this._uvVert = new WGSLUVVert("in", "out");
this._positionVert = new WGSLPositionVert("in", "out");
}
compile(macros: ShaderMacroCollection): [string, BindGroupInfo] {
this._source = "";
this._bindGroupInfo.clear();
const inputStructCounter = WGSLEncoder.startCounter();
const outputStructCounter = WGSLEncoder.startCounter(0);
{
const encoder = this.createSourceEncoder(GPUShaderStage.VERTEX);
this._commonVert.execute(encoder, macros);
this._blendShapeInput.execute(encoder, macros, inputStructCounter);
this._uvShare.execute(encoder, macros, outputStructCounter);
encoder.addBuiltInoutType("VertexOut", "position", "position", "vec4<f32>");
encoder.addRenderEntry([["in", "VertexIn"]], ["out", "VertexOut"], () => {
let source: string = "";
source += this._beginPositionVert.execute(macros);
source += this._blendShapeVert.execute(macros);
source += this._skinningVert.execute(macros);
source += this._uvVert.execute(macros);
source += this._positionVert.execute(macros);
return source;
});
encoder.flush();
}
WGSLEncoder.endCounter(inputStructCounter);
WGSLEncoder.endCounter(outputStructCounter);
return [this._source, this._bindGroupInfo];
}
}
export class WGSLUnlitFragment extends WGSL {
private _common: WGSLCommon;
private _uvShare: WGSLUVShare;
constructor() {
super();
this._common = new WGSLCommon();
this._uvShare = new WGSLUVShare("VertexOut");
}
compile(macros: ShaderMacroCollection): [string, BindGroupInfo] {
this._source = "";
this._bindGroupInfo.clear();
const inputStructCounter = WGSLEncoder.startCounter(0);
{
const encoder = this.createSourceEncoder(GPUShaderStage.FRAGMENT);
this._common.execute(encoder, macros);
this._uvShare.execute(encoder, macros, inputStructCounter);
encoder.addUniformBinding("u_baseColor", "vec4<f32>", 0);
encoder.addUniformBinding("u_alphaCutoff", "f32", 0);
encoder.addInoutType("Output", 0, "finalColor", "vec4<f32>");
if (macros.isEnable("HAS_BASE_TEXTURE")) {
encoder.addSampledTextureBinding("u_baseTexture", "texture_2d<f32>", "u_baseSampler", "sampler");
}
encoder.addRenderEntry([["in", "VertexOut"]], ["out", "Output"], () => {
let source: string = "";
source += "var baseColor = u_baseColor;\n";
if (macros.isEnable("HAS_BASE_TEXTURE")) {
source += "var textureColor = textureSample(u_baseTexture, u_baseSampler, in.v_uv);\n";
source += "baseColor = baseColor * textureColor;\n";
}
if (macros.isEnable("NEED_ALPHA_CUTOFF")) {
source += "if( baseColor.a < u_alphaCutoff ) {\n";
source += " discard;\n";
source += "}\n";
}
source += "out.finalColor = baseColor;\n";
return source;
});
encoder.flush();
}
WGSLEncoder.endCounter(inputStructCounter);
return [this._source, this._bindGroupInfo];
}
}
|
def create_linting_config(ctx, args):
# Construct the command for linting using the provided executable and source files
lint_command = "%s %s %s" % (
ctx.executable.linter.short_path,
args,
" ".join([src.short_path for src in ctx.files.srcs])
)
# Define the output of the linting process
output = ctx.outputs.executable
# Specify the runfiles required for the linting operation
runfiles = ctx.runfiles(files=ctx.files.linter + ctx.files.srcs)
return struct(
lint_command=lint_command,
output=output,
runfiles=runfiles
)
|
import React, { Component } from 'react'
import PropTypes from 'prop-types'
import styled from 'styled-components'
import merge from 'lodash.merge'
const Wrapper = styled.div`
align-items: center;
display: flex;
transform: translate3d(${p => p.translateOffset}px, 0, 0);
transition: transform ${p => p.transitionDuration}ms cubic-bezier(0.215, 0.61, 0.355, 1);
width: ${p => p.trackWidth}px
`
class Track extends Component {
render () {
if (!this.props.width) return null
const {
dragOffsetX,
isInfinite,
numberOfSlides,
shouldTransition,
slidesToShow,
width
} = this.props
const children = [ ...this.props.children ]
let currentSlide = this.props.currentSlide
if (isInfinite) {
children.push(this.props.children.slice(0, slidesToShow))
children.unshift(this.props.children.slice(numberOfSlides - slidesToShow))
currentSlide = this.props.currentSlide + slidesToShow
}
const transitionDuration = (shouldTransition) ? this.props.transitionDuration : 0
const slideWidth = width / slidesToShow
const trackWidth = slideWidth * children.length
const translateOffset = currentSlide * slideWidth * -1
const transform = (dragOffsetX)
? `translate3d(${translateOffset + dragOffsetX}px, 0, 0)`
: null
return (
<Wrapper
shouldTransition={shouldTransition}
style={{ transform }}
transitionDuration={transitionDuration}
translateOffset={translateOffset}
trackWidth={trackWidth}
>
{React.Children.map(children, (child, index) => {
return React.cloneElement(
child,
merge({}, child.props, {
style: {
width: slideWidth
}
})
)
})}
</Wrapper>
)
}
}
Track.propTypes = {
children: PropTypes.node,
currentSlide: PropTypes.number,
dragOffsetX: PropTypes.number,
isInfinite: PropTypes.bool,
numberOfSlides: PropTypes.number,
shouldTransition: PropTypes.bool,
slidesToShow: PropTypes.number,
transitionDuration: PropTypes.number,
width: PropTypes.number
}
export default Track
|
# for bash-git-prompt integration, shows current bucket if you're in one
function prompt_callback {
if [ -x "$(which t)" -a -n "$T_BUCKET_ID" ] ; then
echo -n " bucket:$(t title "$T_BUCKET_ID")"
fi
}
|
<reponame>menghuanlunhui/springboot-master<filename>jframe-demos/base-test/src/main/java/com/jf/system/redisson/lock/DistributedLocker.java
package com.jf.system.redisson.lock;
import com.jf.exception.SysException;
/**
* Created with IntelliJ IDEA.
* Description:获取锁管理类
* User: admin
* Date: 2018-01-03
* Time: 09:53
*/
public interface DistributedLocker {
/**
* 获取锁
*
* @param resourceName 锁的名称
* @param worker 获取锁后的处理类
* @param <T>
* @return 处理完具体的业务逻辑要返回的数据
*/
<T> T lock(String resourceName, AquiredLockWorker<T> worker) throws SysException, Exception;
<T> T lock(String resourceName, AquiredLockWorker<T> worker, int lockTime) throws SysException, Exception;
}
|
package stats
import (
"fmt"
"net/http"
"path/filepath"
"testing"
"time"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/v14/internal/git/gittest"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper"
"gitlab.com/gitlab-org/gitaly/v14/internal/testhelper/testcfg"
)
func TestClone(t *testing.T) {
cfg, _, repoPath := testcfg.BuildWithRepo(t)
ctx, cancel := testhelper.Context()
defer cancel()
serverPort, stopGitServer := gittest.GitServer(t, cfg, repoPath, nil)
defer func() {
require.NoError(t, stopGitServer())
}()
clone, err := PerformHTTPClone(ctx, fmt.Sprintf("http://localhost:%d/%s", serverPort, filepath.Base(repoPath)), "", "", false)
require.NoError(t, err, "perform analysis clone")
const expectedRequests = 90 // based on contents of _support/gitlab-test.git-packed-refs
require.Greater(t, clone.FetchPack.RefsWanted(), expectedRequests, "number of wanted refs")
require.Equal(t, 200, clone.ReferenceDiscovery.HTTPStatus(), "get status")
require.Greater(t, clone.ReferenceDiscovery.Packets(), 0, "number of get packets")
require.Greater(t, clone.ReferenceDiscovery.PayloadSize(), int64(0), "get payload size")
require.Greater(t, len(clone.ReferenceDiscovery.Caps()), 10, "get capabilities")
previousValue := time.Duration(0)
for _, m := range []struct {
desc string
value time.Duration
}{
{"time to receive response header", clone.ReferenceDiscovery.ResponseHeader()},
{"time to first packet", clone.ReferenceDiscovery.FirstGitPacket()},
{"time to receive response body", clone.ReferenceDiscovery.ResponseBody()},
} {
require.True(t, m.value > previousValue, "get: expect %s (%v) to be greater than previous value %v", m.desc, m.value, previousValue)
previousValue = m.value
}
require.Equal(t, 200, clone.FetchPack.HTTPStatus(), "post status")
require.Greater(t, clone.FetchPack.Packets(), 0, "number of post packets")
require.Greater(t, clone.FetchPack.BandPackets("progress"), 0, "number of progress packets")
require.Greater(t, clone.FetchPack.BandPackets("pack"), 0, "number of pack packets")
require.Greater(t, clone.FetchPack.BandPayloadSize("progress"), int64(0), "progress payload bytes")
require.Greater(t, clone.FetchPack.BandPayloadSize("pack"), int64(0), "pack payload bytes")
previousValue = time.Duration(0)
for _, m := range []struct {
desc string
value time.Duration
}{
{"time to receive response header", clone.FetchPack.ResponseHeader()},
{"time to receive NAK", clone.FetchPack.NAK()},
{"time to receive first progress message", clone.FetchPack.BandFirstPacket("progress")},
{"time to receive first pack message", clone.FetchPack.BandFirstPacket("pack")},
{"time to receive response body", clone.FetchPack.ResponseBody()},
} {
require.True(t, m.value > previousValue, "post: expect %s (%v) to be greater than previous value %v", m.desc, m.value, previousValue)
previousValue = m.value
}
}
func TestCloneWithAuth(t *testing.T) {
cfg, _, repoPath := testcfg.BuildWithRepo(t)
ctx, cancel := testhelper.Context()
defer cancel()
const (
user = "test-user"
password = "<PASSWORD>"
)
authWasChecked := false
serverPort, stopGitServer := gittest.GitServer(t, cfg, repoPath, func(w http.ResponseWriter, r *http.Request, next http.Handler) {
authWasChecked = true
actualUser, actualPassword, ok := r.BasicAuth()
require.True(t, ok, "request should have basic auth")
require.Equal(t, user, actualUser)
require.Equal(t, password, actualPassword)
next.ServeHTTP(w, r)
})
defer func() {
require.NoError(t, stopGitServer())
}()
_, err := PerformHTTPClone(
ctx,
fmt.Sprintf("http://localhost:%d/%s", serverPort, filepath.Base(repoPath)),
user,
password,
false,
)
require.NoError(t, err, "perform analysis clone")
require.True(t, authWasChecked, "authentication middleware should have gotten triggered")
}
func TestBandToHuman(t *testing.T) {
testCases := []struct {
in byte
out string
fail bool
}{
{in: 0, fail: true},
{in: 1, out: "pack"},
{in: 2, out: "progress"},
{in: 3, out: "error"},
{in: 4, fail: true},
}
for _, tc := range testCases {
t.Run(fmt.Sprintf("band index %d", tc.in), func(t *testing.T) {
out, err := bandToHuman(tc.in)
if tc.fail {
require.Error(t, err)
return
}
require.NoError(t, err)
require.Equal(t, tc.out, out, "band name")
})
}
}
|
#!/usr/bin/env bash
# **install_pip.sh**
# install_pip.sh [--pip-version <version>] [--use-get-pip] [--force]
#
# Update pip and friends to a known common version
# Assumptions:
# - update pip to $INSTALL_PIP_VERSION
set -o errexit
set -o xtrace
# Keep track of the current directory
TOOLS_DIR=$(cd $(dirname "$0") && pwd)
TOP_DIR=`cd $TOOLS_DIR/..; pwd`
# Change dir to top of devstack
cd $TOP_DIR
# Import common functions
source $TOP_DIR/functions
FILES=$TOP_DIR/files
PIP_GET_PIP_URL=https://bootstrap.pypa.io/get-pip.py
LOCAL_PIP="$FILES/$(basename $PIP_GET_PIP_URL)"
GetDistro
echo "Distro: $DISTRO"
function get_versions {
PIP=$(which pip 2>/dev/null || which pip-python 2>/dev/null || true)
if [[ -n $PIP ]]; then
PIP_VERSION=$($PIP --version | awk '{ print $2}')
echo "pip: $PIP_VERSION"
else
echo "pip: Not Installed"
fi
}
function install_get_pip {
if [[ ! -r $LOCAL_PIP ]]; then
curl -o $LOCAL_PIP $PIP_GET_PIP_URL || \
die $LINENO "Download of get-pip.py failed"
fi
sudo -H -E python $LOCAL_PIP
}
function configure_pypi_alternative_url {
PIP_ROOT_FOLDER="$HOME/.pip"
PIP_CONFIG_FILE="$PIP_ROOT_FOLDER/pip.conf"
if [[ ! -d $PIP_ROOT_FOLDER ]]; then
echo "Creating $PIP_ROOT_FOLDER"
mkdir $PIP_ROOT_FOLDER
fi
if [[ ! -f $PIP_CONFIG_FILE ]]; then
echo "Creating $PIP_CONFIG_FILE"
touch $PIP_CONFIG_FILE
fi
if ! ini_has_option "$PIP_CONFIG_FILE" "global" "index-url"; then
#it means that the index-url does not exist
iniset "$PIP_CONFIG_FILE" "global" "index-url" "$PYPI_OVERRIDE"
fi
}
# Setuptools 8 implements PEP 440, and 8.0.4 adds a warning triggered any time
# pkg_resources inspects the list of installed Python packages if there are
# non-compliant version numbers in the egg-info (for example, from distro
# system packaged Python libraries). This is off by default after 8.2 but can
# be enabled by uncommenting the lines below.
#PYTHONWARNINGS=$PYTHONWARNINGS,always::RuntimeWarning:pkg_resources
#export PYTHONWARNINGS
# Show starting versions
get_versions
# Do pip
# Eradicate any and all system packages
uninstall_package python-pip
install_get_pip
if [[ -n $PYPI_ALTERNATIVE_URL ]]; then
configure_pypi_alternative_url
fi
pip_install -U setuptools
get_versions
|
package com.github.thinkerou.karate.grpc;
import static io.grpc.MethodDescriptor.generateFullMethodName;
import static io.grpc.MethodDescriptor.newBuilder;
import java.util.logging.Logger;
import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.protobuf.Descriptors;
import com.google.protobuf.DynamicMessage;
import com.github.thinkerou.karate.protobuf.DynamicMessageMarshaller;
import io.grpc.CallOptions;
import io.grpc.ClientCall;
import io.grpc.ManagedChannel;
import io.grpc.MethodDescriptor;
import io.grpc.stub.ClientCalls;
import io.grpc.stub.StreamObserver;
/**
* DynamicClient
*
* A grpc client which operates on dynamic messages.
*
* @author thinkerou
*/
public final class DynamicClient {
private static final Logger logger = Logger.getLogger(DynamicClient.class.getName());
private final Descriptors.MethodDescriptor protoMethodDescriptor;
private final ManagedChannel channel;
/**
* Creates a client for the supplied method, talking to the supplied endpoint.
*/
public static DynamicClient create(Descriptors.MethodDescriptor protoMethod, ManagedChannel channel) {
return new DynamicClient(protoMethod, channel);
}
DynamicClient(Descriptors.MethodDescriptor protoMethodDescriptor, ManagedChannel channel) {
this.protoMethodDescriptor = protoMethodDescriptor;
this.channel = channel;
}
/**
* Makes an rpc to the remote endpoint and respects the supplied callback. Returns a
* future which terminates once the call has ended. For calls which are single-request,
* this throws IllegalArgumentException if the size of requests is not exactly 1.
*/
public ListenableFuture<Void> call(
ImmutableList<DynamicMessage> requests,
StreamObserver<DynamicMessage> responseObsever,
CallOptions callOptions) {
if (requests.isEmpty()) {
logger.warning("Can't make call without any requests");
return null;
}
long numRequests = requests.size();
MethodDescriptor.MethodType methodType = getMethodType();
switch (methodType) {
case UNARY:
if (numRequests != 1) {
logger.warning("Need exactly 1 request for unary call but got: " + numRequests);
}
return callUnary(requests.get(0), responseObsever, callOptions);
case SERVER_STREAMING:
if (numRequests != 1) {
logger.warning("Need exactly 1 request for server streaming call but got: " + numRequests);
}
return callServerStreaming(requests.get(0), responseObsever, callOptions);
case CLIENT_STREAMING:
logger.warning("Client stream call");
return callClientStreaming(requests, responseObsever, callOptions);
case BIDI_STREAMING:
return callBidiStreaming(requests, responseObsever, callOptions);
case UNKNOWN:
return null;
}
return null;
}
private ListenableFuture<Void> callBidiStreaming(
ImmutableList<DynamicMessage> requests,
StreamObserver<DynamicMessage> responseObserver,
CallOptions callOptions) {
DoneObserver<DynamicMessage> doneObserver = new DoneObserver<>();
StreamObserver<DynamicMessage> requestObserver = ClientCalls.asyncBidiStreamingCall(
createCall(callOptions),
ComponentObserver.of(responseObserver, doneObserver));
requests.forEach(requestObserver::onNext);
requestObserver.onCompleted();
return doneObserver.getCompletionFuture();
}
private ListenableFuture<Void> callClientStreaming(
ImmutableList<DynamicMessage> requests,
StreamObserver<DynamicMessage> responseObserver,
CallOptions callOptions) {
DoneObserver<DynamicMessage> doneObserver = new DoneObserver<>();
StreamObserver<DynamicMessage> requestObserver = ClientCalls.asyncClientStreamingCall(
createCall(callOptions),
ComponentObserver.of(responseObserver, doneObserver));
requests.forEach(requestObserver::onNext);
requestObserver.onCompleted();
return doneObserver.getCompletionFuture();
}
private ListenableFuture<Void> callServerStreaming(
DynamicMessage request,
StreamObserver<DynamicMessage> responseObserver,
CallOptions callOptions) {
DoneObserver<DynamicMessage> doneObserver = new DoneObserver<>();
ClientCalls.asyncServerStreamingCall(
createCall(callOptions),
request,
ComponentObserver.of(responseObserver, doneObserver));
return doneObserver.getCompletionFuture();
}
private ListenableFuture<Void> callUnary(
DynamicMessage request,
StreamObserver<DynamicMessage> responseObserver,
CallOptions callOptions) {
DoneObserver<DynamicMessage> doneObserver = new DoneObserver<>();
ClientCalls.asyncUnaryCall(
createCall(callOptions),
request,
ComponentObserver.of(responseObserver, doneObserver));
return doneObserver.getCompletionFuture();
}
private ClientCall<DynamicMessage, DynamicMessage> createCall(CallOptions callOptions) {
return channel.newCall(createGrpcMethodDescriptor(), callOptions);
}
private MethodDescriptor<DynamicMessage, DynamicMessage> createGrpcMethodDescriptor() {
MethodDescriptor.Builder builder = newBuilder();
builder.setType(getMethodType())
.setFullMethodName(getFullMethodName())
.setRequestMarshaller(new DynamicMessageMarshaller(protoMethodDescriptor.getInputType()))
.setResponseMarshaller(new DynamicMessageMarshaller(protoMethodDescriptor.getOutputType()));
return builder.build();
}
private String getFullMethodName() {
String serviceName = protoMethodDescriptor.getService().getFullName();
String methodName = protoMethodDescriptor.getName();
return generateFullMethodName(serviceName, methodName);
}
/**
* Returns the appropriate method type based on whether the client or server expect streams.
*/
private MethodDescriptor.MethodType getMethodType() {
boolean clientStreaming = protoMethodDescriptor.toProto().getClientStreaming();
boolean serverStreaming = protoMethodDescriptor.toProto().getServerStreaming();
if (!clientStreaming && !serverStreaming) {
return MethodDescriptor.MethodType.UNARY;
} else if (!clientStreaming && serverStreaming) {
return MethodDescriptor.MethodType.SERVER_STREAMING;
} else if (clientStreaming && !serverStreaming) {
return MethodDescriptor.MethodType.CLIENT_STREAMING;
} else {
return MethodDescriptor.MethodType.BIDI_STREAMING;
}
}
}
|
<gh_stars>0
package utils;
import javafx.application.Platform;
import javafx.scene.image.ImageView;
import javafx.scene.layout.AnchorPane;
public class SwapAnchorPaneForSelectedNode {
public synchronized static void swapToTopPane(AnchorPane topPane, AnchorPane bottomPane, ImageView image)
{
Platform.runLater(()->
{
topPane.getChildren().add(image);
bottomPane.getChildren().remove(image);
});
}
public synchronized static void swapToBottomPane(AnchorPane bottomPane, AnchorPane topPane,ImageView image)
{
Platform.runLater(()->
{
bottomPane.getChildren().add(image);
topPane.getChildren().remove(image);
});
}
}
|
/**
* @file libimobiledevice/restore.h
* @brief Initiate restore process or reboot device.
* @note This service is only available if the device is in restore mode.
* \internal
*
* Copyright (c) 2010 <NAME>. All Rights Reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef IRESTORE_H
#define IRESTORE_H
#ifdef __cplusplus
extern "C" {
#endif
#include <libimobiledevice/libimobiledevice.h>
/** @name Error Codes */
/*@{*/
#define RESTORE_E_SUCCESS 0
#define RESTORE_E_INVALID_ARG -1
#define RESTORE_E_INVALID_CONF -2
#define RESTORE_E_PLIST_ERROR -3
#define RESTORE_E_DICT_ERROR -4
#define RESTORE_E_NOT_ENOUGH_DATA -5
#define RESTORE_E_MUX_ERROR -6
#define RESTORE_E_START_RESTORE_FAILED -7
#define RESTORE_E_UNKNOWN_ERROR -256
/*@}*/
/** Represents an error code. */
typedef int16_t restored_error_t;
typedef struct restored_client_private restored_client_private;
typedef restored_client_private *restored_client_t; /**< The client handle. */
/* Interface */
restored_error_t restored_client_new(idevice_t device, restored_client_t *client, const char *label);
restored_error_t restored_client_free(restored_client_t client);
restored_error_t restored_query_type(restored_client_t client, char **type, uint64_t *version);
restored_error_t restored_query_value(restored_client_t client, const char *key, plist_t *value);
restored_error_t restored_get_value(restored_client_t client, const char *key, plist_t *value) ;
restored_error_t restored_send(restored_client_t client, plist_t plist);
restored_error_t restored_receive(restored_client_t client, plist_t *plist);
restored_error_t restored_goodbye(restored_client_t client);
restored_error_t restored_start_restore(restored_client_t client, plist_t options, uint64_t version);
restored_error_t restored_reboot(restored_client_t client);
/* Helper */
void restored_client_set_label(restored_client_t client, const char *label);
#ifdef __cplusplus
}
#endif
#endif
|
<reponame>Exploder98/COMP.SE.200-2020-assignment<gh_stars>0
/**
* at-funktion yksikkötestit
*
* @group unit
*/
import at from "../../src/at.js";
describe("unit/at", () => {
it("Tyhjä objekti ja polku", () => {
expect(at({}, ""))
.toStrictEqual([undefined]);
});
it("Indeksointi objektiin, avain löytyy", () => {
expect(at({"a": 1}, "a"))
.toStrictEqual([1]);
});
it("Indeksoi objektiin, avainta ei löydy", () => {
expect(at({"a": 1}, "b"))
.toStrictEqual([undefined]);
});
it("Indeksoi objektiin listan indeksointisyntaksilla", () => {
expect(at({"a": 1}, "[0]"))
.toStrictEqual([undefined]);
});
it("Indeksoi listaan, indeksi löytyy", () => {
expect(at([1, 2, 3, 4, 5], "[4]"))
.toStrictEqual([5]);
});
it("Indeksoi 2 kertaa listaan, molemmat löytyvät", () => {
expect(at([1, 2, 3, 4, 5], ["3", "1"]))
.toStrictEqual([4, 2]);
});
it("Indeksoi listaan listaindeksillä ja olioindeksillä, olioindeksiä ei löyty", () => {
expect(at([1, 2, 3, 42, 5], ["[3]", "a"]))
.toStrictEqual([42, undefined]);
});
it("Indeksoi objektiin 2 kertaa, molemmat löytyvät", () => {
expect(at({"kaikkeen": 2, "vastaus": 4}, ["vastaus", "kaikkeen"]))
.toStrictEqual([4, 2]);
});
it("Indeksoi objektiin objekti-indeksillä ja listaindeksillä, listaindeksiä ei löydy", () => {
expect(at({
"kaikkeen": 7,
"vastaus": 42
}, ["vastaus", "[0]"]))
.toStrictEqual([42, undefined]);
});
});
|
<filename>scan/detect_swaps.js<gh_stars>0
const asyncAuto = require('async/auto');
const {getJsonFromCache} = require('./../cache');
const {getTransaction} = require('./../blocks');
const {returnResult} = require('./../async-util');
const {setJsonInCache} = require('./../cache');
const swapsFromInputs = require('./swaps_from_inputs');
const swapsFromOutputs = require('./swaps_from_outputs');
const {Transaction} = require('./../tokenslib');
const cacheSwapsMs = 1000 * 60 * 10;
const type = 'detect_swaps';
/** Check a transaction to see if there are any associated swaps.
{
[block]: <Block Id Hex String>
cache: <Cache Type String> 'dynamodb|memory|redis'
id: <Transaction Id Hex String>
network: <Network Name String>
}
@return via cbk
{
swaps: [{
index: <Redeem Script Claim Key Index Number>
[invoice] <Funding Related BOLT 11 Invoice String>
[outpoint]: <Resolution Spent Outpoint String>
[output]: <Funding Output Script Hex String>
[preimage]: <Claim Preimage Hex String>
script: <Swap Redeem Script Hex String>
[tokens]: <Token Count Number>
type: <Transaction Type String> claim|funding|refund
[vout]: <Funding Output Index Number>
}]
}
*/
module.exports = ({block, cache, id, network}, cbk) => {
return asyncAuto({
// Check arguments
validate: cbk => {
if (!cache) {
return cbk([400, 'ExpectedCacheToCheck']);
}
if (!id) {
return cbk([400, 'ExpectedTxIdToCheck']);
}
if (!network) {
return cbk([400, 'ExpectedNetworkName']);
}
return cbk();
},
// Cache key
key: ['validate', ({}, cbk) => cbk(null, id)],
// See if we already know swaps related to this transaction
getCachedSwaps: ['key', ({key}, cbk) => {
return getJsonFromCache({cache: 'memory', key, type}, cbk);
}],
// Get the raw transaction to look for swaps
getTransaction: ['getCachedSwaps', ({getCachedSwaps}, cbk) => {
// Exit early when we already have swap details
if (!!getCachedSwaps) {
return cbk();
}
// This will get the transaction from the chain. Avoid caching mempool
// transactions as they could be pretty numerous.
return getTransaction({
block,
id,
network,
cache: !block ? null : cache,
},
cbk);
}],
// Parsed transaction
tx: ['getTransaction', ({getTransaction}, cbk) => {
// Exit early when there's no transaction to lookup
if (!getTransaction || !getTransaction.transaction) {
return cbk();
}
const {transaction} = getTransaction;
try {
const tx = Transaction.fromHex(transaction);
return cbk(null, {id: tx.getId(), inputs: tx.ins, outputs: tx.outs});
} catch (err) {
return cbk([400, 'ExpectedValidTransactionHex', err]);
}
}],
// Determine if the inputs have swaps. (Claim or refund type)
swapsFromInputs: ['tx', ({tx}, cbk) => {
// Exit early when there's no transaction to lookup
if (!tx) {
return cbk();
}
const {id} = tx;
const {inputs} = tx;
return swapsFromInputs({cache, id, inputs, network}, cbk);
}],
// Determine if the outputs have swap output scripts (funding type)
swapsFromOutputs: ['tx', ({tx}, cbk) => {
// Exit early when there's no transaction to lookup
if (!tx) {
return cbk();
}
const {id} = tx;
const {outputs} = tx;
const strangeOutputs = outputs.filter(({value}) => !value);
// Ignore mempool transactions that have strange outputs
if (!block && !!strangeOutputs.length) {
return cbk();
}
return swapsFromOutputs({cache, id, network, outputs}, cbk);
}],
// Concat all detected swaps
swaps: [
'getCachedSwaps',
'getTransaction',
'swapsFromInputs',
'swapsFromOutputs',
({
getCachedSwaps,
getTransaction,
swapsFromInputs,
swapsFromOutputs,
},
cbk) =>
{
// Exit early when the swaps results were cached
if (!!getCachedSwaps) {
return cbk(null, getCachedSwaps);
}
const fundingSwaps = !swapsFromOutputs ? [] : swapsFromOutputs.swaps;
const resolutionSwaps = !swapsFromInputs ? [] : swapsFromInputs.swaps;
return cbk(null, [].concat(fundingSwaps).concat(resolutionSwaps));
}],
// Set cached swap status
setCachedSwaps: [
'getCachedSwaps',
'key',
'swaps',
({getCachedSwaps, key, swaps}, cbk) =>
{
// Exit early without caching when the swaps are a cached result
if (!!getCachedSwaps) {
return cbk();
}
return setJsonInCache({
key,
type,
cache: 'memory',
ms: cacheSwapsMs,
value: swaps,
},
cbk);
}],
// Final swaps result
detectedSwaps: ['swaps', ({swaps}, cbk) => cbk(null, {swaps})],
},
returnResult({of: 'detectedSwaps'}, cbk));
};
|
<reponame>abin1525/rose-edg<gh_stars>1-10
#ifndef FIXUP_CXX_TO_USE_ALIASING_SYMBOLS_H
#define FIXUP_CXX_TO_USE_ALIASING_SYMBOLS_H
// DQ (4/14/2010):
/*! \brief Fixup C++ symbol tables to use aliasing symbols for using declarations.
C++ using declarations and use directives shold have an effect on the
symbol table for each scope that will cause a new symbol to be added
to support the visability. However within this design the the symbol
should be an alias to the original symbol in the original scope.
\implementation This analysis is required only for C++ support,
and is similar to work that is currently supporting "use"
declarations in Fortran 90.
*/
void fixupAstSymbolTablesToSupportAliasedSymbols (SgNode* node);
class FixupAstSymbolTablesToSupportAliasedSymbols : public AstSimpleProcessing
{
public:
// DQ (7/23/2011): Linkup namespaces so that we can build alias sysmbol from previously matching namespaces into subsequent matching namespaces.
std::map<SgName,std::vector<SgNamespaceDefinitionStatement*> > namespaceMap;
void visit ( SgNode* node );
// DQ (7/12/2014): Added support to record the causalDeclaration (the declaration that is causing the SgAliasSymbol to be introduced).
// This is important for the name qualification to be generated correctly before and after the causalDeclaration has been seen.
// It is also helpful in knowing which SgAliassymbols to remove from the AST when a SgUsingDirective or SgUsingDeclaration is
// removed from the AST. This functionality has not yet been implemented.
// This inserts an alias for each of the symbols in referencedScope into the symbol table of currentScope
// static void injectSymbolsFromReferencedScopeIntoCurrentScope ( SgScopeStatement* referencedScope, SgScopeStatement* currentScope, SgAccessModifier::access_modifier_enum accessLevel);
static void injectSymbolsFromReferencedScopeIntoCurrentScope ( SgScopeStatement* referencedScope, SgScopeStatement* currentScope, SgNode* causalNode, SgAccessModifier::access_modifier_enum accessLevel);
// DQ (3/24/2016): Adding Robb's meageage mechanism (data member and function).
static Sawyer::Message::Facility mlog;
static void initDiagnostics();
};
// endif for FIXUP_CXX_TO_USE_ALIASING_SYMBOLS_H
#endif
|
<filename>hummer-core/src/main/java/com/hummer/core/base/HMPerformancePlugin.java
package com.hummer.core.base;
public interface HMPerformancePlugin {
/**
* 输出性能统计信息
*
* @param key 性能类型,TimeCostType.*
* @param value 性能值,单位ms
*/
void printPerformance(String key, int value);
}
|
<filename>iot-suite-server-ability/src/main/java/com/tuya/iot/suite/ability/asset/model/AssetAuthBatchToUser.java
package com.tuya.iot.suite.ability.asset.model;
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
import java.io.Serializable;
import java.util.List;
/**
* @author mickey
* @date 2021年06月02日 19:44
*/
@Getter
@Setter
@ToString
public class AssetAuthBatchToUser implements Serializable {
private String uid;
private String asset_ids;
private Boolean authorized_children;
public AssetAuthBatchToUser(String uid, String asset_ids, Boolean authorized_children) {
this.uid = uid;
this.asset_ids = asset_ids;
this.authorized_children = authorized_children;
}
}
|
package cyclops.reactor.container.transformer.seq.convertable;
import cyclops.container.foldable.AbstractConvertableSequenceTest;
import cyclops.container.immutable.impl.ConvertableSequence;
import cyclops.monads.AnyMs;
import cyclops.monads.Witness;
import cyclops.reactive.ReactiveSeq;
public class StreamTSeqConvertableSequenceTest extends AbstractConvertableSequenceTest {
@Override
public <T> ConvertableSequence<T> of(T... elements) {
return ReactiveSeq.of(elements) .to(AnyMs::<Witness.reactiveSeq,T>liftM)
.apply(Witness.reactiveSeq.ITERATIVE).to();
}
@Override
public <T> ConvertableSequence<T> empty() {
return ReactiveSeq.<T>empty().to(AnyMs::<Witness.reactiveSeq,T>liftM)
.apply(Witness.reactiveSeq.ITERATIVE).to();
}
}
|
public class Carousel
{
private int _NumberOfItems;
private int _CurrentPosition;
public Carousel(int numberOfItems)
{
_NumberOfItems = numberOfItems;
_CurrentPosition = 1; // Start at the first item
}
public void ScrollToNext()
{
if (_CurrentPosition <= _NumberOfItems && _CurrentPosition > 0)
{
// Scroll to the next item
ScrollToPage(_CurrentPosition);
_CurrentPosition++;
// Wrap around to the first item if at the end
if (_CurrentPosition > _NumberOfItems)
{
_CurrentPosition = 1;
}
}
}
private void ScrollToPage(int index)
{
if (index <= _NumberOfItems && index > 0)
{
// Implement scrolling to the specified page
// This method is not fully defined in the provided code snippet
}
}
}
|
package net.toshirohex.lycurgus.interfaces;
import net.minecraft.entity.Entity;
import net.minecraft.entity.player.PlayerEntity;
import net.minecraft.item.ItemStack;
import net.minecraft.util.Hand;
import net.minecraft.util.TypedActionResult;
import net.minecraft.world.World;
public interface KingCotton {
TypedActionResult<ItemStack> use(World world, PlayerEntity user, Hand hand, ItemStack itemStack);
void inventoryTick(ItemStack stack, World world, Entity entity, int slot, boolean selected);
}
|
<html>
<head>
<title>Book Search</title>
</head>
<body>
<h1>Book Search</h1>
<form action="/search" method="POST">
<input type="text" name="title" placeholder="Enter book title...">
<input type="submit" value="Search">
</form>
</body>
</html>
|
# -*- coding: utf-8 -*-
# MIT license
#
# Copyright (C) 2016 by XESS Corp.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import, division, print_function, unicode_literals
import json
import os
from builtins import open, super
from future import standard_library
from . import tools # Import EDA tool-specific stuff.
from .circuit import Circuit
from .common import *
from .defines import *
from .logger import erc_logger, get_script_name, logger
from .part_query import footprint_cache
from .pin import Pin
from .utilities import *
standard_library.install_aliases()
class SkidlCfg(dict):
"""Class for holding SKiDL configuration."""
CFG_FILE_NAME = ".skidlcfg"
def __init__(self, *dirs):
super().__init__()
self.load(*dirs)
def load(self, *dirs):
"""Load SKiDL configuration from JSON files in given dirs."""
for dir in dirs:
path = os.path.join(dir, self.CFG_FILE_NAME)
path = os.path.expanduser(path)
path = os.path.abspath(path)
try:
with open(path) as cfg_fp:
merge_dicts(self, json.load(cfg_fp))
except (FileNotFoundError, IOError):
pass
def store(self, dir="."):
"""Store SKiDL configuration as JSON in directory as .skidlcfg file."""
path = os.path.join(dir, self.CFG_FILE_NAME)
path = os.path.expanduser(path)
path = os.path.abspath(path)
with open(path, "w") as cfg_fp:
json.dump(self, cfg_fp, indent=4)
def get_kicad_lib_tbl_dir():
"""Get the path to where the global fp-lib-table file is found."""
paths = (
"$HOME/.config/kicad",
"~/.config/kicad",
"%APPDATA%/kicad",
"$HOME/Library/Preferences/kicad",
"~/Library/Preferences/kicad",
)
for path in paths:
path = os.path.normpath(os.path.expanduser(os.path.expandvars(path)))
if os.path.lexists(path):
return path
return ""
###############################################################################
# Globals that are used by everything else.
###############################################################################
# Get SKiDL configuration.
skidl_cfg = SkidlCfg("/etc", "~", ".")
# If no configuration files were found, set some default lib search paths.
if "lib_search_paths" not in skidl_cfg:
skidl_cfg["lib_search_paths"] = {tool: ["."] for tool in ALL_TOOLS}
# Add the location of the default KiCad part libraries.
try:
skidl_cfg["lib_search_paths"][KICAD].append(os.environ["KICAD_SYMBOL_DIR"])
except KeyError:
logger.warning(
"KICAD_SYMBOL_DIR environment variable is missing, so the default KiCad symbol libraries won't be searched."
)
# Add the location of the default SKiDL part libraries.
default_skidl_libs = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "libs"
)
skidl_cfg["lib_search_paths"][SKIDL].append(default_skidl_libs)
# Shortcut to library search paths.
lib_search_paths = skidl_cfg["lib_search_paths"]
# If no configuration files were found, set some default footprint search paths.
if "footprint_search_paths" not in skidl_cfg:
dir_ = get_kicad_lib_tbl_dir()
skidl_cfg["footprint_search_paths"] = {tool: [dir_] for tool in ALL_TOOLS}
# Cause the footprint cache to be invalidated if the footprint search path changes.
def invalidate_footprint_cache(self, k, v):
footprint_cache.reset()
skidl_cfg["footprint_search_paths"] = TriggerDict(skidl_cfg["footprint_search_paths"])
skidl_cfg["footprint_search_paths"].trigger_funcs[KICAD] = invalidate_footprint_cache
# Shortcut to footprint search paths.
footprint_search_paths = skidl_cfg["footprint_search_paths"]
# Set default toolset being used with SKiDL.
def set_default_tool(tool):
"""Set the ECAD tool that will be used by default."""
skidl_cfg["default_tool"] = tool
def get_default_tool():
return skidl_cfg["default_tool"]
if "default_tool" not in skidl_cfg:
set_default_tool(KICAD)
# Make the various EDA tool library suffixes globally available.
lib_suffixes = tools.lib_suffixes
# Definitions for backup library of circuit parts.
BACKUP_LIB_NAME = get_script_name() + "_lib"
BACKUP_LIB_FILE_NAME = BACKUP_LIB_NAME + lib_suffixes[SKIDL]
# Boolean controls whether backup lib will be searched for missing parts.
QUERY_BACKUP_LIB = INITIAL_QUERY_BACKUP_LIB = True
def set_query_backup_lib(val):
"""Set the boolean that controls searching for the backup library."""
global QUERY_BACKUP_LIB
QUERY_BACKUP_LIB = val
def get_query_backup_lib():
return QUERY_BACKUP_LIB
# Backup lib for storing parts in a Circuit.
backup_lib = None
def set_backup_lib(lib):
"""Set the backup library."""
global backup_lib
backup_lib = lib
def get_backup_lib():
return backup_lib
@norecurse
def load_backup_lib():
"""Load a backup library that stores the parts used in the circuit."""
global backup_lib
# Don't keep reloading the backup library once it's loaded.
if not backup_lib:
try:
# The backup library is a SKiDL lib stored as a Python module.
exec(open(BACKUP_LIB_FILE_NAME).read())
# Copy the backup library in the local storage to the global storage.
backup_lib = locals()[BACKUP_LIB_NAME]
except (FileNotFoundError, ImportError, NameError, IOError):
pass
return backup_lib
# Create the default Circuit object that will be used unless another is explicitly created.
builtins.default_circuit = Circuit()
# NOCONNECT net for attaching pins that are intentionally left open.
builtins.NC = default_circuit.NC # pylint: disable=undefined-variable
# Create calls to functions on whichever Circuit object is the current default.
ERC = default_circuit.ERC
erc_assert = default_circuit.add_erc_assertion
generate_netlist = default_circuit.generate_netlist
generate_xml = default_circuit.generate_xml
generate_schematic = default_circuit.generate_schematic
generate_svg = default_circuit.generate_svg
generate_graph = default_circuit.generate_graph
reset = default_circuit.reset
backup_parts = default_circuit.backup_parts
# Define a tag for nets that convey power (e.g., VCC or GND).
POWER = Pin.drives.POWER
def no_files(circuit=default_circuit):
"""Prevent creation of output files (netlists, ERC, logs) by this Circuit object."""
circuit.no_files = True
erc_logger.stop_file_output()
logger.stop_file_output()
|
package io.opensphere.core.cache.accessor;
import io.opensphere.core.cache.matcher.IntervalPropertyMatcher;
/**
* An accessor for property values with lower and upper bounds.
*
* @param <S> The type of object that provides the property values.
* @param <T> The type of the property values.
*/
public interface IntervalPropertyAccessor<S, T> extends PropertyAccessor<S, T>
{
/**
* Create a property matcher that will match property values provided by
* this accessor.
*
* @return A property matcher.
*/
IntervalPropertyMatcher<?> createMatcher();
/**
* An extent that comprises all of the property values returned by this
* accessor.
*
* @return The extent.
*/
T getExtent();
}
|
<gh_stars>1-10
"use strict";
exports.__esModule = true;
var countSteps_1 = require("../math/countSteps");
var pad_1 = require("../number/pad");
var HOUR = 3600000;
var MINUTE = 60000;
var SECOND = 1000;
/**
* Format timestamp into a time string.
*/
function toTimeString(ms) {
var h = ms < HOUR ? 0 : countSteps_1["default"](ms, HOUR);
var m = ms < MINUTE ? 0 : countSteps_1["default"](ms, MINUTE, 60);
var s = ms < SECOND ? 0 : countSteps_1["default"](ms, SECOND, 60);
var str = '';
str += h ? h + ':' : '';
str += pad_1["default"](m, 2) + ':';
str += pad_1["default"](s, 2);
return str;
}
exports["default"] = toTimeString;
|
<gh_stars>0
import java.io.*;
import java.math.*;
import java.security.*;
import java.text.*;
import java.util.*;
import java.util.concurrent.*;
import java.util.function.*;
import java.util.regex.*;
import java.util.stream.*;
import static java.util.stream.Collectors.joining;
import static java.util.stream.Collectors.toList;
class Result {
// Solution
public static int maximumToys(List<Integer> prices, int k) {
int sum = 0, count = 0;
quickSort(prices, 0, prices.size() - 1);
// In sorted array we just count the sum until it reaches k
for (Integer pr : prices) {
if (sum + pr < k) {
sum += pr;
count++;
System.out.println(sum + " " + count);
} else break;
}
return count;
}
// Quicksort
public static void quickSort(List<Integer> array, int begin, int end) {
if (end <= begin) return;
int pivot = partition(array, begin, end);
quickSort(array, begin, pivot -1);
quickSort(array, pivot + 1, end);
}
// Finding pivot
static int partition(List<Integer> array, int begin, int end) {
int pivot = end;
int counter = begin;
for (int i = begin; i < end; i++) {
if (array.get(i) < array.get(pivot)) {
int temp = array.get(counter);
array.set(counter, array.get(i));
array.set(i, temp);
counter++;
}
}
int temp = array.get(pivot);
array.set(pivot, array.get(counter));
array.set(counter, temp);
return counter;
}
}
public class Solution {
public static void main(String[] args) throws IOException {
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(System.in));
BufferedWriter bufferedWriter = new BufferedWriter(new FileWriter(System.getenv("OUTPUT_PATH")));
String[] firstMultipleInput = bufferedReader.readLine().replaceAll("\\s+$", "").split(" ");
int n = Integer.parseInt(firstMultipleInput[0]);
int k = Integer.parseInt(firstMultipleInput[1]);
List<Integer> prices = Stream.of(bufferedReader.readLine().replaceAll("\\s+$", "").split(" "))
.map(Integer::parseInt)
.collect(toList());
int result = Result.maximumToys(prices, k);
bufferedWriter.write(String.valueOf(result));
bufferedWriter.newLine();
bufferedReader.close();
bufferedWriter.close();
}
}
|
#!/usr/bin/env bash
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
php "$DIR/vendor/phpunit/phpunit/phpunit" -c "$DIR/tests"
|
# 本地创建公私密钥
ssh-keygen -t rsa
cat ~/.ssh/id_rsa.pub
# 登录远程主机配置
# 修改sshd_config 配置文件
vi /etc/ssh/sshd_config
# 通过配置以下属性,允许SSH登录
# RSAAuthentication yes
# PubkeyAuthentication yes
# AuthorizedKeysFile .ssh/authorized_keys
# 把本地公钥追加到authorized_keys中
vi ~/.ssh/authorized_keys
# 重启SSH服务
systemctl restart sshd
# SSH 登录远程主机
ssh user_name@host_ip
ssh -i ~/.ssh/id_rsa user_name@host_ip
# 问题:对于非root,新建的用户,用私钥登录,也会提示输入密集
# 查看文件尾部20行内容
tail /var/log/secure -n 20
# Authentication refused: bad ownership or modes for file
chmod g-w /home/allen
chmod 700 /home/allen/.ssh
chmod 600 /home/allen/.ssh/authorized_keys
|
#!/bin/bash
printf "SERV CPI on Compliance Tests\n\n" | tee ../serv-cpi.txt
echo " Opcode Avg. Min Max" | tee -a ../serv-cpi.txt
single_ops="AUIPC LUI
ADD ADDI SUB AND ANDI OR ORI XOR XORI
ECALL
SLT SLTI SLTU SLTIU
SLL SLLI SRA SRAI SRL SRLI"
for op in $single_ops; do
wawk inst.wawk traces/I-$op-01.elf.vcd ${op,,} | tee -a ../serv-cpi.txt
done
branch_ops="JAL JALR BEQ BGE BGEU BLT BLTU BNE"
for op in $branch_ops; do
wawk inst.wawk traces/I-MISALIGN_JMP-01.elf.vcd ${op,,} | tee -a ../serv-cpi.txt
done
memory_ops="LB LBU LH LHU LW SB SH SW"
for op in $memory_ops; do
wawk inst.wawk traces/I-MISALIGN_LDST-01.elf.vcd ${op,,} | tee -a ../serv-cpi.txt
done
|
CREATE TABLE [auth].[ApiClientRoles]
(
[Id] INT NOT NULL identity(100000, 1),
[ApiClientId] int not null,
[RoleId] int not null,
[CreatedByUserId] int null,
[CreatedByClientId] int null,
[CreatedDate] as getdate(),
[UpdatedByUserId] int null,
[UpdatedByClientId] int null,
[UpdatedDate] datetime null,
constraint PK_ApiClientRoles_ID primary key ([Id]),
constraint FK_ApiClientRoles_ApiClientId foreign key ([ApiClientId]) references [auth].[ApiClients]([Id]),
constraint FK_ApiClientRoles_RoleId foreign key ([RoleId]) references [auth].[Roles]([Id]),
constraint FK_ApiClientRoles_CreatedByClientId foreign key ([CreatedByClientId]) references [auth].[ApiClients]([Id]),
constraint FK_ApiClientRoles_CreatedByUserId foreign key ([CreatedByUserId]) references [auth].[ApiUsers]([Id]),
constraint FK_ApiClientRoles_UpdatedByClientId foreign key ([UpdatedByClientId]) references [auth].[ApiClients]([Id]),
constraint FK_ApiClientRoles_UpdatedByUserId foreign key ([UpdatedByUserId]) references [auth].[ApiUsers]([Id])
)
|
#!/bin/bash
PROGNAME=$(basename "$0")
UPLOAD_DIR="$(pwd)/uploads"
# Create unique temp dir
UUID=$(uuidgen)
TEMP_DIR="$UPLOAD_DIR/tmp/$UUID"
# DATA_LOADER="$HOME/github/pathomics_featuredb/src/build/install/featuredb-loader/bin/featuredb-loader"
# TODO: data loader is now a docker container.
# DATA_LOADER="quip-loader"
function usage {
# Display usage message on standard error
echo "Usage: $PROGNAME zip tile mask executionId db subjectId caseId" 1>&2
}
function clean_up {
# Perform program exit housekeeping. Optionally accepts an exit status
rm -rf "$TEMP_DIR"
echo "Cleaning up and exiting $1"
exit "$1"
}
function error_exit {
# Display error message and exit
echo "${PROGNAME}: ${1:-"Error"}" 1>&2
clean_up 1
}
function do_stuff {
# ERROR-CHECK EVERYTHING.
# zip tile mask executionId algorithm subjectId caseId
zipFile=$1
tileImg=$2
maskImg=$3
executionId=$4
db=$5
subjectId=$6
caseId=$7
json="$8"
host=${MONHOST}
port=${MONPORT}
# Check db for caseId
#return_str=$(mongo --eval "connect('$host:$port/$db').images.find({'case_id':'$caseId'})" | grep "case_id" | xargs)
return_str=$(mongo $host:$port/$db --eval "db.images.find({'case_id':'$caseId'}).shellPrint()" | grep "case_id" | xargs)
# If we didn't find it, figure out which database it's really in.
if [ "$return_str" = "" ]; then
databases=( "quip" "u24_brca" "u24_gbm" "u24_lgg" "u24_luad" "u24_paad")
for dd in "${databases[@]}"
do
:
if [ "$db" = "$dd" ]; then
continue
fi
# return_str=$(mongo --eval "connect('$host:$port/$dd').images.find({'case_id':'$caseId'})" | grep "case_id" | xargs)
return_str=$(mongo $host:$port/$dd --eval "db.images.find({'case_id':'$caseId'}).shellPrint()" | grep "case_id" | xargs)
if [ "$return_str" = "" ]; then
echo "$caseId not in $dd"
else
echo "found $caseId in $dd"
db=$dd
break
fi
done
fi;
# Go to uploads dir
cd $UPLOAD_DIR || error_exit "$LINENO: Could not change directory"
# Create unique temp dir
mkdir -p "$TEMP_DIR" || error_exit "$LINENO: Could not make directory"
# Unzip file to temp dir
unzip "$zipFile" -d "$TEMP_DIR" || error_exit "$LINENO: Could not unzip file"
TEMP_SUB_DIR=$TEMP_DIR/${zipFile%.*}
# Turn off stderr, since we are going to handle it.
if ls $TEMP_SUB_DIR 2>/dev/null ; then
echo "OK"
else
TEMP_SUB_DIR=$TEMP_DIR/${zipFile%.*}
mkdir -p $TEMP_SUB_DIR
mv $TEMP_DIR/*.* $TEMP_SUB_DIR
fi
# Get docker container id
# TODO: User has to pass in what the container names are.
# For both feature computation and data loader.
# Use docker environment variable.
# container="quip-jobs" # eventually this may be quip-jobs
container="test_segmentation"
containerId=$(docker inspect --format '{{ .Id }}' $container) || error_exit "$LINENO: Could not get docker container ID"
# Copy mask and tile to docker container
if [ ! -f "$TEMP_SUB_DIR/$maskImg" ]; then
# A BandAid for SlicerPath.
maskImg="WEB_gray-label.tif"
fi
(ls "$TEMP_SUB_DIR/$tileImg" && ls "$TEMP_SUB_DIR/$maskImg") || error_exit "$LINENO: I give up. manifest.json tells a lie."
docker cp "$TEMP_SUB_DIR/$tileImg" "$containerId:/data/input/"
# From manifest.json layers[0].file
docker cp "$TEMP_SUB_DIR/$maskImg" "$containerId:/data/input/"
# Run feature computation algorithm
alg="Y"
# pgm="/tmp/build/computeFeatures"
pgm="/tmp/pathomics_analysis/nucleusSegmentation/build/app/computeFeatures"
tile="/data/input/$tileImg"
mask="/data/input/$maskImg"
outfile="output-$alg.csv"
output="/data/output/"$outfile
docker exec $container $pgm $tile $mask $alg $output
# Copy results here
docker cp "$containerId:$output" "$TEMP_SUB_DIR/$outfile"
ls "$TEMP_SUB_DIR/$outfile" || error_exit "$LINENO: Output file not found"
# Add data to manifest
sed -i '$s/}/,\n"output'$alg'":"'$outfile'"}/' $TEMP_SUB_DIR/manifest.json
# Load results to database
# mongoimport --host $host --port $port --db u24_3dslicer --collection optimized --type csv --headerline --file "$TEMP_SUB_DIR/$outfile"
# Use data loader
###$DATA_LOADER --dbhost $host --dbport $port --dbname $db --inptype csv --inpfile "$TEMP_SUB_DIR/$outfile" --eid $executionId --cid $caseId --eparms $json --sid $subjectId --studyid "u24_tcga_slicer_$alg" --fromdb
#|| error_exit "$LINENO: Data loader failed" # Loader doesn't return non-zero :(
# Run feature computation algorithm
alg="J"
outfile="output-$alg.csv"
output="/data/output/"$outfile
docker exec $container $pgm $tile $mask $alg $output
# Copy results here
docker cp "$containerId:$output" "$TEMP_SUB_DIR/$outfile"
ls "$TEMP_SUB_DIR/$outfile" || error_exit "$LINENO: Output file not found"
# Add data to manifest
sed -i '$s/}/,\n"output'$alg'":"'$outfile'"}/' $TEMP_SUB_DIR/manifest.json
# Load results to database
###$DATA_LOADER --dbhost $host --dbport $port --dbname $db --inptype csv --inpfile "$TEMP_SUB_DIR/$outfile" --eid $executionId --cid $caseId --eparms $json --sid $subjectId --studyid "u24_tcga_slicer_$alg" --fromdb
#|| error_exit "$LINENO: Data loader failed" # Loader doesn't return non-zero :(
# Repackage the zip file
cd $TEMP_DIR || error_exit "$LINENO: Could not change directory"
zip -r $zipFile ${zipFile%.*} || error_exit "$LINENO: Could not repackage zip"
mv $zipFile $UPLOAD_DIR || error_exit "$LINENO: Could not move zip"
# Clean up temp dir
clean_up 0
exit 0
}
trap clean_up SIGHUP SIGINT SIGTERM
if [ $# -lt "7" ]; then
usage
error_exit "$LINENO: zip tile mask executionId db subjectId caseId"
fi
if [ ! -f "$UPLOAD_DIR/$1" ]; then
error_exit "$LINENO: file $1 cannot be read"
fi
if file --mime-type "$UPLOAD_DIR/$1" | grep -q zip$; then
do_stuff "$1" "$2" "$3" "$4" "$5" "$6" "$7" "$8"
else
error_exit "$LINENO: $1 is not zipped"
fi
|
package com.example.wolweather;
/**
* Created by Administrator on 2017/7/16.
*/
public class Basic {
}
|
# @param string $file
#
function do_hook_file() {
local file=$1
if [[ ${file##*.} == 'php' ]]; then
cmd="$docs_php"
elif [[ ${file##*.} == 'sh' ]]; then
cmd=$docs_bash
fi
echo $cmd # or execute the command using $cmd
}
|
<gh_stars>0
package cn.stylefeng.roses.kernel.auth.api.loginuser.pojo;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
/**
* 获取登录用户信息的请求
*
* @author fengshuonan
* @date 2021/9/29 11:25
*/
@Data
@AllArgsConstructor
@NoArgsConstructor
public class LoginUserRequest {
/**
* 当前登录用户的token
*/
private String token;
}
|
package com.example.androidpractice;
import android.app.Application;
import android.content.Context;
import android.util.Log;
/**
* androidPractice 的 Application 入口。
* Created by yntense on 21/6/10.
*/
public class CPApplication extends Application {
private static final String TAG = "CPApplication";
private static Context context;
public static Context getContext(){ return context;};
@Override
public void onCreate() {
super.onCreate();
context = getApplicationContext();
Log.i(TAG, "onCreate:CPApplication ");
}
}
/**
* skill:
* 1./** ctrl + enter 快速注释
* function:
* 1.get context
*
* notice:
* 1.需要在Manifests中的 application 标签中添加 android:name=".CPApplication",
* 此文件才能在MAinActivity之前被调用
*
*
*/
|
#!/bin/bash
# Copyright 2020 Langston Howley
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
if [ -f $HOME/.bash_profile ]; then
touch $HOME/.bash_profile
fi
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
echo -e "\nexport CUSTOM_COMMANDS_HOME=$DIR" >> $HOME/.bash_profile
echo -e "source $DIR/.custom_commands.sh" >> $HOME/.bash_profile
echo "CUSTOM_COMMANDS_HOME set to $DIR"
virtualenv $DIR/env -p python3
source $DIR/env/bin/activate
pip3 install -r $DIR/python/requirements.txt
touch $DIR/python/.env
echo -e "WEATHER_API_KEY = " >> $DIR/python/.env
echo -e "\nDon't forget to put a Weather API Key in /python/.env\nMore info @ https://openweathermap.org/appid"
source $HOME/.bash_profile
cd $DIR
|
# Create the project structure
mkdir app
cd app
mkdir static templates
# Install all the dependencies
pip install flask
pip install flask-sqlalchemy
# Create the application
touch app.py
# Create the base template
touch templates/base.html
# Create the model
touch models.py
# Create the view
touch views.py
# Create the routes
touch routes.py
# Create the static assets
touch static/style.css
# Create the database
python manage.py db init
python manage.py db migrate
python manage.py db upgrade
# Run the server
python app.py
|
<gh_stars>1-10
import React from 'react';
import PropsType from 'prop-types';
import { logout } from 'containers/App/actions';
import { connect } from 'react-redux';
import { createStructuredSelector } from 'reselect';
import { makeSelectCurrentUser } from 'containers/App/selectors';
function Settings({ onLogout }) {
const change = evt => {
if (evt.target.value === '5') {
onLogout();
}
};
return (
<select
onChange={$event => {
change($event);
}}
>
<option value="1">Settings</option>
<option value="2">Theme</option>
<option value="3">Profile</option>
<option value="4">General</option>
<option value="5">Signout</option>
</select>
);
}
Settings.propTypes = {
onLogout: PropsType.func,
};
const mapStateToProps = createStructuredSelector({
currentUser: makeSelectCurrentUser,
});
const mapDispatchToProps = {
onLogout: logout,
};
export default connect(
mapStateToProps,
mapDispatchToProps,
)(Settings);
|
<gh_stars>0
//
// author: Kang
// date: 2017-05-27
package qrcode
//(JIS 8-bit character set (Latin and Kana) in accordance with JIS X 0201
//var characters string = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ $%*+-./:"
//8位字节数据编码器
type EightBitByteEncoder struct {
}
//编码
func (encoder EightBitByteEncoder) Encode(content string, ecl ErrorCorrectionLevel) (*BitList, *Version, error) {
data := []byte(content)
vi := getSmallestVersion(len(data)*8, ecl, EightBitByte)
if vi == nil {
return nil, nil, TooMuchCharactersError
}
// It's not correct to add the unicode bytes to the result directly but most readers can't handle the
// required ECI header...
res := new(BitList)
res.AddBits(int(EightBitByte), 4)
res.AddBits(len(content), vi.getNumberOfBitsInCharCountIndicator(EightBitByte))
for _, b := range data {
res.AddByte(b)
}
addPaddingAndTerminator(res, vi)
return res, vi, nil
}
|
"""ServiceNow dispatcher
envars:
- config
- SA_SN_API_HOST
- SA_SN_API_ENDPOINT
- SA_SN_FIELD_PREFIX
- auth1
- SA_SN_API_USER
- SA_SN_API_PASS
- auth2
- SA_SN_OAUTH_CLIENT_ID
- SA_SN_OAUTH_CLIENT_SECRET
- SA_SN_OAUTH_REFRESH_TOKEN
"""
from os import environ as env
import requests
from runners.helpers import log
from runners.helpers import vault
class Bearer(requests.auth.AuthBase):
def __init__(self, token):
self.token = token
def __call__(self, r):
r.headers["authorization"] = "Bearer " + self.token
return r
def handle(alert, assignee='', payload={}):
host = env.get('SA_SN_API_HOST')
if not host:
log.info('skipping service-now handler, missing host')
return
username = vault.decrypt_if_encrypted(envar='SA_SN_API_USER')
password = <PASSWORD>.decrypt_if_encrypted(envar='SA_SN_API_PASS')
client_id = env.get('SA_SN_OAUTH_CLIENT_ID')
client_secret = vault.decrypt_if_encrypted(envar='SA_SN_OAUTH_CLIENT_SECRET')
refresh_token = vault.decrypt_if_encrypted(envar='SA_SN_OAUTH_REFRESH_TOKEN')
if client_id:
oauth_return_params = {
'grant_type': 'refresh_token',
'client_id': client_id,
'client_secret': client_secret,
'refresh_token': refresh_token,
}
oauthresp = requests.post(
f'https://{host}/oauth_token.do',
data=oauth_return_params,
)
result = oauthresp.json()
access_token = result.get('access_token')
if not access_token:
log.info('skipping service-now handler, bad oauth')
raise RuntimeError(result)
else:
access_token = None
if not (username and password) and not access_token:
log.info('skipping service-now handler, no authorization')
return
title = alert.get('TITLE', 'SnowAlert Generate Incident')
description = alert.get('DESCRIPTION', '')
endpoint = env.get('SA_SN_API_ENDPOINT', '/now/table/incident')
api_url = f'https://{host}/api{endpoint}'
fp = env.get('SA_SN_FIELD_PREFIX', '')
response = requests.post(
api_url,
auth=Bearer(access_token) if access_token else (username, password),
json=payload or {
f'{fp}contact_type': 'Integration',
f'{fp}impact': '2',
f'{fp}urgency': '2',
f'{fp}category': 'IT Security',
f'{fp}subcategory': 'Remediation',
f'{fp}assignment_group': 'Security Compliance',
f'{fp}short_description': title,
f'{fp}description': description,
f'{fp}assigned_to': assignee,
},
)
if response.status_code != 201:
log.info(
f'URL: {api_url}',
f'Status Code: {response.status_code}',
f'Response Length: {len(response.text)}',
f'Response Headers: {response.headers}',
)
raise RuntimeError(response)
return response
|
# Libraries
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from sklearn import datasets, svm, metrics
# Load the data
digits = datasets.load_digits()
# Training set
X_train = digits.data[0:1200]
y_train = digits.target[0:1200]
# Test set
X_test = digits.data[1200:1797]
y_test = digits.target[1200:1797]
# Initialize the SVC model
svc_model = svm.SVC(gamma=0.001, C=100)
# Train the model
svc_model.fit(X_train, y_train)
# Make predictions
y_pred = svc_model.predict(X_test)
# Evaluate the model
print("\nClassification report for classifier %s:\n %s\n"
% (svc_model, metrics.classification_report(y_test, y_pred)))
|
<reponame>vagechirkov/mne-python
from itertools import product
import os
import os.path as op
import pytest
import numpy as np
from numpy.testing import assert_equal, assert_allclose, assert_array_equal
from mne.channels import make_standard_montage
from mne.datasets import testing
from mne.io import read_raw_fif, read_raw_kit, read_raw_bti, read_info
from mne.io.constants import FIFF
from mne import (read_forward_solution, write_forward_solution,
make_forward_solution, convert_forward_solution,
setup_volume_source_space, read_source_spaces, create_info,
make_sphere_model, pick_types_forward, pick_info, pick_types,
read_evokeds, read_cov, read_dipole,
get_volume_labels_from_aseg)
from mne.utils import requires_mne, requires_nibabel, run_subprocess
from mne.forward._make_forward import _create_meg_coils, make_forward_dipole
from mne.forward._compute_forward import _magnetic_dipole_field_vec
from mne.forward import Forward, _do_forward_solution
from mne.dipole import Dipole, fit_dipole
from mne.simulation import simulate_evoked
from mne.source_estimate import VolSourceEstimate
from mne.source_space import (write_source_spaces, _compare_source_spaces,
setup_source_space)
from mne.forward.tests.test_forward import assert_forward_allclose
data_path = testing.data_path(download=False)
fname_meeg = op.join(data_path, 'MEG', 'sample',
'sample_audvis_trunc-meg-eeg-oct-4-fwd.fif')
fname_raw = op.join(op.dirname(__file__), '..', '..', 'io', 'tests', 'data',
'test_raw.fif')
fname_evo = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc-ave.fif')
fname_cov = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc-cov.fif')
fname_dip = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc_set1.dip')
fname_trans = op.join(data_path, 'MEG', 'sample',
'sample_audvis_trunc-trans.fif')
subjects_dir = os.path.join(data_path, 'subjects')
fname_src = op.join(subjects_dir, 'sample', 'bem', 'sample-oct-4-src.fif')
fname_bem = op.join(subjects_dir, 'sample', 'bem',
'sample-1280-1280-1280-bem-sol.fif')
fname_aseg = op.join(subjects_dir, 'sample', 'mri', 'aseg.mgz')
fname_bem_meg = op.join(subjects_dir, 'sample', 'bem',
'sample-1280-bem-sol.fif')
def _compare_forwards(fwd, fwd_py, n_sensors, n_src,
meg_rtol=1e-4, meg_atol=1e-9,
eeg_rtol=1e-3, eeg_atol=1e-3):
"""Test forwards."""
# check source spaces
assert_equal(len(fwd['src']), len(fwd_py['src']))
_compare_source_spaces(fwd['src'], fwd_py['src'], mode='approx')
for surf_ori, force_fixed in product([False, True], [False, True]):
# use copy here to leave our originals unmodified
fwd = convert_forward_solution(fwd, surf_ori, force_fixed, copy=True,
use_cps=True)
fwd_py = convert_forward_solution(fwd_py, surf_ori, force_fixed,
copy=True, use_cps=True)
check_src = n_src // 3 if force_fixed else n_src
for key in ('nchan', 'source_rr', 'source_ori',
'surf_ori', 'coord_frame', 'nsource'):
assert_allclose(fwd_py[key], fwd[key], rtol=1e-4, atol=1e-7,
err_msg=key)
# In surf_ori=True only Z matters for source_nn
if surf_ori and not force_fixed:
ori_sl = slice(2, None, 3)
else:
ori_sl = slice(None)
assert_allclose(fwd_py['source_nn'][ori_sl], fwd['source_nn'][ori_sl],
rtol=1e-4, atol=1e-6)
assert_allclose(fwd_py['mri_head_t']['trans'],
fwd['mri_head_t']['trans'], rtol=1e-5, atol=1e-8)
assert_equal(fwd_py['sol']['data'].shape, (n_sensors, check_src))
assert_equal(len(fwd['sol']['row_names']), n_sensors)
assert_equal(len(fwd_py['sol']['row_names']), n_sensors)
# check MEG
assert_allclose(fwd['sol']['data'][:306, ori_sl],
fwd_py['sol']['data'][:306, ori_sl],
rtol=meg_rtol, atol=meg_atol,
err_msg='MEG mismatch')
# check EEG
if fwd['sol']['data'].shape[0] > 306:
assert_allclose(fwd['sol']['data'][306:, ori_sl],
fwd_py['sol']['data'][306:, ori_sl],
rtol=eeg_rtol, atol=eeg_atol,
err_msg='EEG mismatch')
def test_magnetic_dipole():
"""Test basic magnetic dipole forward calculation."""
info = read_info(fname_raw)
picks = pick_types(info, meg=True, eeg=False, exclude=[])
info = pick_info(info, picks[:12])
coils = _create_meg_coils(info['chs'], 'normal', None)
# magnetic dipole far (meters!) from device origin
r0 = np.array([0., 13., -6.])
for ch, coil in zip(info['chs'], coils):
rr = (ch['loc'][:3] + r0) / 2. # get halfway closer
far_fwd = _magnetic_dipole_field_vec(r0[np.newaxis, :], [coil])
near_fwd = _magnetic_dipole_field_vec(rr[np.newaxis, :], [coil])
ratio = 8. if ch['ch_name'][-1] == '1' else 16. # grad vs mag
assert_allclose(np.median(near_fwd / far_fwd), ratio, atol=1e-1)
# degenerate case
r0 = coils[0]['rmag'][[0]]
with pytest.raises(RuntimeError, match='Coil too close'):
_magnetic_dipole_field_vec(r0, coils[:1])
with pytest.warns(RuntimeWarning, match='Coil too close'):
fwd = _magnetic_dipole_field_vec(r0, coils[:1], too_close='warning')
assert not np.isfinite(fwd).any()
with np.errstate(invalid='ignore'):
fwd = _magnetic_dipole_field_vec(r0, coils[:1], too_close='info')
assert not np.isfinite(fwd).any()
@pytest.mark.slowtest # slow-ish on Travis OSX
@pytest.mark.timeout(60) # can take longer than 30 sec on Travis
@testing.requires_testing_data
@requires_mne
def test_make_forward_solution_kit(tmpdir):
"""Test making fwd using KIT, BTI, and CTF (compensated) files."""
kit_dir = op.join(op.dirname(__file__), '..', '..', 'io', 'kit',
'tests', 'data')
sqd_path = op.join(kit_dir, 'test.sqd')
mrk_path = op.join(kit_dir, 'test_mrk.sqd')
elp_path = op.join(kit_dir, 'test_elp.txt')
hsp_path = op.join(kit_dir, 'test_hsp.txt')
trans_path = op.join(kit_dir, 'trans-sample.fif')
fname_kit_raw = op.join(kit_dir, 'test_bin_raw.fif')
bti_dir = op.join(op.dirname(__file__), '..', '..', 'io', 'bti',
'tests', 'data')
bti_pdf = op.join(bti_dir, 'test_pdf_linux')
bti_config = op.join(bti_dir, 'test_config_linux')
bti_hs = op.join(bti_dir, 'test_hs_linux')
fname_bti_raw = op.join(bti_dir, 'exported4D_linux_raw.fif')
fname_ctf_raw = op.join(op.dirname(__file__), '..', '..', 'io', 'tests',
'data', 'test_ctf_comp_raw.fif')
# first set up a small testing source space
fname_src_small = tmpdir.join('sample-oct-2-src.fif')
src = setup_source_space('sample', 'oct2', subjects_dir=subjects_dir,
add_dist=False)
write_source_spaces(fname_src_small, src) # to enable working with MNE-C
n_src = 108 # this is the resulting # of verts in fwd
# first use mne-C: convert file, make forward solution
fwd = _do_forward_solution('sample', fname_kit_raw, src=fname_src_small,
bem=fname_bem_meg, mri=trans_path,
eeg=False, meg=True, subjects_dir=subjects_dir)
assert (isinstance(fwd, Forward))
# now let's use python with the same raw file
fwd_py = make_forward_solution(fname_kit_raw, trans_path, src,
fname_bem_meg, eeg=False, meg=True)
_compare_forwards(fwd, fwd_py, 157, n_src)
assert (isinstance(fwd_py, Forward))
# now let's use mne-python all the way
raw_py = read_raw_kit(sqd_path, mrk_path, elp_path, hsp_path)
# without ignore_ref=True, this should throw an error:
with pytest.raises(NotImplementedError, match='Cannot.*KIT reference'):
make_forward_solution(raw_py.info, src=src, eeg=False, meg=True,
bem=fname_bem_meg, trans=trans_path)
# check that asking for eeg channels (even if they don't exist) is handled
meg_only_info = pick_info(raw_py.info, pick_types(raw_py.info, meg=True,
eeg=False))
fwd_py = make_forward_solution(meg_only_info, src=src, meg=True, eeg=True,
bem=fname_bem_meg, trans=trans_path,
ignore_ref=True)
_compare_forwards(fwd, fwd_py, 157, n_src,
meg_rtol=1e-3, meg_atol=1e-7)
# BTI python end-to-end versus C
fwd = _do_forward_solution('sample', fname_bti_raw, src=fname_src_small,
bem=fname_bem_meg, mri=trans_path,
eeg=False, meg=True, subjects_dir=subjects_dir)
raw_py = read_raw_bti(bti_pdf, bti_config, bti_hs, preload=False)
fwd_py = make_forward_solution(raw_py.info, src=src, eeg=False, meg=True,
bem=fname_bem_meg, trans=trans_path)
_compare_forwards(fwd, fwd_py, 248, n_src)
# now let's test CTF w/compensation
fwd_py = make_forward_solution(fname_ctf_raw, fname_trans, src,
fname_bem_meg, eeg=False, meg=True)
fwd = _do_forward_solution('sample', fname_ctf_raw, mri=fname_trans,
src=fname_src_small, bem=fname_bem_meg,
eeg=False, meg=True, subjects_dir=subjects_dir)
_compare_forwards(fwd, fwd_py, 274, n_src)
# CTF with compensation changed in python
ctf_raw = read_raw_fif(fname_ctf_raw)
ctf_raw.info['bads'] = ['MRO24-2908'] # test that it works with some bads
ctf_raw.apply_gradient_compensation(2)
fwd_py = make_forward_solution(ctf_raw.info, fname_trans, src,
fname_bem_meg, eeg=False, meg=True)
fwd = _do_forward_solution('sample', ctf_raw, mri=fname_trans,
src=fname_src_small, bem=fname_bem_meg,
eeg=False, meg=True,
subjects_dir=subjects_dir)
_compare_forwards(fwd, fwd_py, 274, n_src)
fname_temp = tmpdir.join('test-ctf-fwd.fif')
write_forward_solution(fname_temp, fwd_py)
fwd_py2 = read_forward_solution(fname_temp)
_compare_forwards(fwd_py, fwd_py2, 274, n_src)
repr(fwd_py)
@pytest.mark.slowtest
@testing.requires_testing_data
def test_make_forward_solution():
"""Test making M-EEG forward solution from python."""
fwd_py = make_forward_solution(fname_raw, fname_trans, fname_src,
fname_bem, mindist=5.)
assert (isinstance(fwd_py, Forward))
fwd = read_forward_solution(fname_meeg)
assert (isinstance(fwd, Forward))
_compare_forwards(fwd, fwd_py, 366, 1494, meg_rtol=1e-3)
# Homogeneous model
with pytest.raises(RuntimeError, match='homogeneous.*1-layer.*EEG'):
make_forward_solution(fname_raw, fname_trans, fname_src,
fname_bem_meg)
@testing.requires_testing_data
def test_make_forward_solution_discrete(tmpdir):
"""Test making and converting a forward solution with discrete src."""
# smoke test for depth weighting and discrete source spaces
src = setup_source_space('sample', 'oct2', subjects_dir=subjects_dir,
add_dist=False)
src = src + setup_volume_source_space(
pos=dict(rr=src[0]['rr'][src[0]['vertno'][:3]].copy(),
nn=src[0]['nn'][src[0]['vertno'][:3]].copy()))
sphere = make_sphere_model()
fwd = make_forward_solution(fname_raw, fname_trans, src, sphere,
meg=True, eeg=False)
convert_forward_solution(fwd, surf_ori=True)
@testing.requires_testing_data
@requires_mne
@pytest.mark.timeout(90) # can take longer than 60 sec on Travis
def test_make_forward_solution_sphere(tmpdir):
"""Test making a forward solution with a sphere model."""
fname_src_small = tmpdir.join('sample-oct-2-src.fif')
src = setup_source_space('sample', 'oct2', subjects_dir=subjects_dir,
add_dist=False)
write_source_spaces(fname_src_small, src) # to enable working with MNE-C
out_name = tmpdir.join('tmp-fwd.fif')
run_subprocess(['mne_forward_solution', '--meg', '--eeg',
'--meas', fname_raw, '--src', fname_src_small,
'--mri', fname_trans, '--fwd', out_name])
fwd = read_forward_solution(out_name)
sphere = make_sphere_model(verbose=True)
fwd_py = make_forward_solution(fname_raw, fname_trans, src, sphere,
meg=True, eeg=True, verbose=True)
_compare_forwards(fwd, fwd_py, 366, 108,
meg_rtol=5e-1, meg_atol=1e-6,
eeg_rtol=5e-1, eeg_atol=5e-1)
# Since the above is pretty lax, let's check a different way
for meg, eeg in zip([True, False], [False, True]):
fwd_ = pick_types_forward(fwd, meg=meg, eeg=eeg)
fwd_py_ = pick_types_forward(fwd, meg=meg, eeg=eeg)
assert_allclose(np.corrcoef(fwd_['sol']['data'].ravel(),
fwd_py_['sol']['data'].ravel())[0, 1],
1.0, rtol=1e-3)
# Number of layers in the sphere model doesn't matter for MEG
# (as long as no sources are omitted due to distance)
assert len(sphere['layers']) == 4
fwd = make_forward_solution(fname_raw, fname_trans, src, sphere,
meg=True, eeg=False)
sphere_1 = make_sphere_model(head_radius=None)
assert len(sphere_1['layers']) == 0
assert_array_equal(sphere['r0'], sphere_1['r0'])
fwd_1 = make_forward_solution(fname_raw, fname_trans, src, sphere,
meg=True, eeg=False)
_compare_forwards(fwd, fwd_1, 306, 108, meg_rtol=1e-12, meg_atol=1e-12)
# Homogeneous model
sphere = make_sphere_model(head_radius=None)
with pytest.raises(RuntimeError, match='zero shells.*EEG'):
make_forward_solution(fname_raw, fname_trans, src, sphere)
@pytest.mark.slowtest
@testing.requires_testing_data
@requires_nibabel()
def test_forward_mixed_source_space(tmpdir):
"""Test making the forward solution for a mixed source space."""
# get the surface source space
rng = np.random.RandomState(0)
surf = read_source_spaces(fname_src)
# setup two volume source spaces
label_names = get_volume_labels_from_aseg(fname_aseg)
vol_labels = rng.choice(label_names, 2)
with pytest.warns(RuntimeWarning, match='Found no usable.*CC_Mid_Ant.*'):
vol1 = setup_volume_source_space('sample', pos=20., mri=fname_aseg,
volume_label=vol_labels[0],
add_interpolator=False)
vol2 = setup_volume_source_space('sample', pos=20., mri=fname_aseg,
volume_label=vol_labels[1],
add_interpolator=False)
# merge surfaces and volume
src = surf + vol1 + vol2
# calculate forward solution
fwd = make_forward_solution(fname_raw, fname_trans, src, fname_bem)
assert (repr(fwd))
# extract source spaces
src_from_fwd = fwd['src']
# get the coordinate frame of each source space
coord_frames = np.array([s['coord_frame'] for s in src_from_fwd])
# assert that all source spaces are in head coordinates
assert ((coord_frames == FIFF.FIFFV_COORD_HEAD).all())
# run tests for SourceSpaces.export_volume
fname_img = tmpdir.join('temp-image.mgz')
# head coordinates and mri_resolution, but trans file
with pytest.raises(ValueError, match='trans containing mri to head'):
src_from_fwd.export_volume(fname_img, mri_resolution=True, trans=None)
# head coordinates and mri_resolution, but wrong trans file
vox_mri_t = vol1[0]['vox_mri_t']
with pytest.raises(ValueError, match='head<->mri, got mri_voxel->mri'):
src_from_fwd.export_volume(fname_img, mri_resolution=True,
trans=vox_mri_t)
@pytest.mark.slowtest
@testing.requires_testing_data
def test_make_forward_dipole(tmpdir):
"""Test forward-projecting dipoles."""
rng = np.random.RandomState(0)
evoked = read_evokeds(fname_evo)[0]
cov = read_cov(fname_cov)
cov['projs'] = [] # avoid proj warning
dip_c = read_dipole(fname_dip)
# Only use magnetometers for speed!
picks = pick_types(evoked.info, meg='mag', eeg=False)[::8]
evoked.pick_channels([evoked.ch_names[p] for p in picks])
evoked.info.normalize_proj()
info = evoked.info
# Make new Dipole object with n_test_dipoles picked from the dipoles
# in the test dataset.
n_test_dipoles = 3 # minimum 3 needed to get uneven sampling in time
dipsel = np.sort(rng.permutation(np.arange(len(dip_c)))[:n_test_dipoles])
dip_test = Dipole(times=dip_c.times[dipsel],
pos=dip_c.pos[dipsel],
amplitude=dip_c.amplitude[dipsel],
ori=dip_c.ori[dipsel],
gof=dip_c.gof[dipsel])
sphere = make_sphere_model(head_radius=0.1)
# Warning emitted due to uneven sampling in time
with pytest.warns(RuntimeWarning, match='unevenly spaced'):
fwd, stc = make_forward_dipole(dip_test, sphere, info,
trans=fname_trans)
# stc is list of VolSourceEstimate's
assert isinstance(stc, list)
for n_dip in range(n_test_dipoles):
assert isinstance(stc[n_dip], VolSourceEstimate)
# Now simulate evoked responses for each of the test dipoles,
# and fit dipoles to them (sphere model, MEG and EEG)
times, pos, amplitude, ori, gof = [], [], [], [], []
nave = 200 # add a tiny amount of noise to the simulated evokeds
for s in stc:
evo_test = simulate_evoked(fwd, s, info, cov,
nave=nave, random_state=rng)
# evo_test.add_proj(make_eeg_average_ref_proj(evo_test.info))
dfit, resid = fit_dipole(evo_test, cov, sphere, None)
times += dfit.times.tolist()
pos += dfit.pos.tolist()
amplitude += dfit.amplitude.tolist()
ori += dfit.ori.tolist()
gof += dfit.gof.tolist()
# Create a new Dipole object with the dipole fits
dip_fit = Dipole(times, pos, amplitude, ori, gof)
# check that true (test) dipoles and fits are "close"
# cf. mne/tests/test_dipole.py
diff = dip_test.pos - dip_fit.pos
corr = np.corrcoef(dip_test.pos.ravel(), dip_fit.pos.ravel())[0, 1]
dist = np.sqrt(np.mean(np.sum(diff * diff, axis=1)))
gc_dist = 180 / np.pi * \
np.mean(np.arccos(np.sum(dip_test.ori * dip_fit.ori, axis=1)))
amp_err = np.sqrt(np.mean((dip_test.amplitude - dip_fit.amplitude) ** 2))
# Make sure each coordinate is close to reference
# NB tolerance should be set relative to snr of simulated evoked!
assert_allclose(dip_fit.pos, dip_test.pos, rtol=0, atol=1e-2,
err_msg='position mismatch')
assert dist < 1e-2 # within 1 cm
assert corr > 0.985
assert gc_dist < 20 # less than 20 degrees
assert amp_err < 10e-9 # within 10 nAm
# Make sure rejection works with BEM: one dipole at z=1m
# NB _make_forward.py:_prepare_for_forward will raise a RuntimeError
# if no points are left after min_dist exclusions, hence 2 dips here!
dip_outside = Dipole(times=[0., 0.001],
pos=[[0., 0., 1.0], [0., 0., 0.040]],
amplitude=[100e-9, 100e-9],
ori=[[1., 0., 0.], [1., 0., 0.]], gof=1)
with pytest.raises(ValueError, match='outside the inner skull'):
make_forward_dipole(dip_outside, fname_bem, info, fname_trans)
# if we get this far, can safely assume the code works with BEMs too
# -> use sphere again below for speed
# Now make an evenly sampled set of dipoles, some simultaneous,
# should return a VolSourceEstimate regardless
times = [0., 0., 0., 0.001, 0.001, 0.002]
pos = np.random.rand(6, 3) * 0.020 + \
np.array([0., 0., 0.040])[np.newaxis, :]
amplitude = np.random.rand(6) * 100e-9
ori = np.eye(6, 3) + np.eye(6, 3, -3)
gof = np.arange(len(times)) / len(times) # arbitrary
dip_even_samp = Dipole(times, pos, amplitude, ori, gof)
# I/O round-trip
fname = str(tmpdir.join('test-fwd.fif'))
with pytest.warns(RuntimeWarning, match='free orientation'):
write_forward_solution(fname, fwd)
fwd_read = convert_forward_solution(
read_forward_solution(fname), force_fixed=True)
assert_forward_allclose(fwd, fwd_read, rtol=1e-6)
fwd, stc = make_forward_dipole(dip_even_samp, sphere, info,
trans=fname_trans)
assert isinstance(stc, VolSourceEstimate)
assert_allclose(stc.times, np.arange(0., 0.003, 0.001))
@testing.requires_testing_data
def test_make_forward_no_meg(tmpdir):
"""Test that we can make and I/O forward solution with no MEG channels."""
pos = dict(rr=[[0.05, 0, 0]], nn=[[0, 0, 1.]])
src = setup_volume_source_space(pos=pos)
bem = make_sphere_model()
trans = None
montage = make_standard_montage('standard_1020')
info = create_info(['Cz'], 1000., 'eeg').set_montage(montage)
fwd = make_forward_solution(info, trans, src, bem)
fname = tmpdir.join('test-fwd.fif')
write_forward_solution(fname, fwd)
fwd_read = read_forward_solution(fname)
assert_allclose(fwd['sol']['data'], fwd_read['sol']['data'])
|
mylist = [1, 2, 3, 2, 5, 8, 8]
newList = []
for i in mylist:
if i not in newList:
newList.append(i)
print(newList)
|
<filename>jgrapht-master/jgrapht-io/src/test/java/org/jgrapht/io/DOTImporter1Test.java
/*
* (C) Copyright 2015-2017, by <NAME> and Contributors.
*
* JGraphT : a free Java graph-theory library
*
* This program and the accompanying materials are dual-licensed under
* either
*
* (a) the terms of the GNU Lesser General Public License version 2.1
* as published by the Free Software Foundation, or (at your option) any
* later version.
*
* or (per the licensee's choosing)
*
* (b) the terms of the Eclipse Public License v1.0 as published by
* the Eclipse Foundation.
*/
package org.jgrapht.io;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.fail;
import java.io.*;
import java.util.*;
import org.jgrapht.*;
import org.jgrapht.graph.*;
import org.junit.*;
/**
* 1st part of tests for DOTImporter. See also {@link DOTImporter2Test}.
*/
public class DOTImporter1Test
{
@Test
public void testImportID()
throws ImportException
{
String id = "MyGraph";
String input = "strict graph " + id + " {\n}\n";
GraphWithID expected = new GraphWithID();
expected.id = id;
GraphWithID result = new GraphWithID();
assertNull(result.id);
buildGraphIDImporter().importGraph(result, new StringReader(input));
assertEquals(expected.toString(), result.toString());
assertEquals(expected.id, id);
}
@Test
public void testImportWrongID()
throws ImportException
{
String invalidID = "2test";
String input = "graph " + invalidID + " {\n}\n";
GraphWithID result = new GraphWithID();
try {
buildGraphIDImporter().importGraph(result, new StringReader(input));
fail("Should not get here");
} catch (ImportException e) {
assertEquals(
"Failed to import DOT graph: line 1:7 extraneous input 'test' expecting '{'",
e.getMessage());
}
}
@Test
public void testInvalidHeader()
throws ImportException
{
// testing all cases of missing keywords or wrong order
for (String invalidInput : new String[] { " {}", "strict {}", "id {}", "strict id {}",
"id strict {}", "id strict graph {}", "graph strict id {}" })
{
GraphWithID result = new GraphWithID();
try {
buildGraphIDImporter().importGraph(result, new StringReader(invalidInput));
fail("Correctly loaded incorrect graph: " + invalidInput);
} catch (ImportException e) {
// this is the expected exception
} catch (Exception e) {
fail("Expected ImportException but found " + e.getClass().getSimpleName());
}
}
}
@Test
public void testImportOnlyGraphKeyword()
throws ImportException
{
String input = "graph {\n}\n";
GraphWithID result = new GraphWithID();
buildGraphIDImporter().importGraph(result, new StringReader(input));
assertNull(result.id);
}
@Test
public void testImportNoID()
throws ImportException
{
String input = "strict graph {\n}\n";
GraphWithID result = new GraphWithID();
buildGraphIDImporter().importGraph(result, new StringReader(input));
assertNull(result.id);
}
@Test
public void testUndirectedWithLabels()
throws ImportException
{
String input = "graph G {\n" + " 1 [ \"label\"=\"abc123\" ];\n"
+ " 2 [ label=\"fred\" ];\n" + " 1 -- 2;\n" + "}";
Multigraph<String, DefaultEdge> expected = new Multigraph<>(DefaultEdge.class);
expected.addVertex("1");
expected.addVertex("2");
expected.addEdge("1", "2");
GraphImporter<String, DefaultEdge> importer = buildImporter();
Multigraph<String, DefaultEdge> result = new Multigraph<>(DefaultEdge.class);
importer.importGraph(result, new StringReader(input));
assertEquals(expected.toString(), result.toString());
assertEquals(2, result.vertexSet().size());
assertEquals(1, result.edgeSet().size());
}
@Test
public void testDirectedNoLabels()
throws ImportException
{
String input =
"digraph graphname {\r\n" + " a -> b -> c;\r\n" + " b -> d;\r\n" + " }";
DirectedMultigraph<String, DefaultEdge> expected =
new DirectedMultigraph<>(DefaultEdge.class);
expected.addVertex("a");
expected.addVertex("b");
expected.addVertex("c");
expected.addVertex("d");
expected.addEdge("a", "b");
expected.addEdge("b", "c");
expected.addEdge("b", "d");
GraphImporter<String, DefaultEdge> importer = buildImporter();
DirectedMultigraph<String, DefaultEdge> result =
new DirectedMultigraph<>(DefaultEdge.class);
importer.importGraph(result, new StringReader(input));
assertEquals(expected.toString(), result.toString());
assertEquals(4, result.vertexSet().size());
assertEquals(3, result.edgeSet().size());
}
@Test
public void testDirectedSameLabels()
throws ImportException
{
String input =
"digraph sample {\n" + " a -> b;" + " b -> c;\n" + " a [ label=\"Test\"];\n"
+ " b [ label=\"Test\"];\n" + " c [ label=\"Test\"];\n" + "}";
DirectedMultigraph<String, DefaultEdge> expected =
new DirectedMultigraph<>(DefaultEdge.class);
expected.addVertex("a");
expected.addVertex("b");
expected.addVertex("c");
expected.addEdge("a", "b");
expected.addEdge("b", "c");
VertexProvider<String> vp = (label, attrs) -> label;
EdgeProvider<String, DefaultEdge> ep = (f, t, l, attrs) -> new DefaultEdge();
ComponentUpdater<String> cu = (v, attrs) -> {
};
DOTImporter<String, DefaultEdge> importer = new DOTImporter<>(vp, ep, cu);
DirectedMultigraph<String, DefaultEdge> result =
new DirectedMultigraph<>(DefaultEdge.class);
importer.importGraph(result, new StringReader(input));
assertEquals(expected.toString(), result.toString());
}
@Test
public void testMultiLinksUndirected()
throws ImportException
{
String input = "graph G {\n" + " 1 [ label=\"bob\" ];\n" + " 2 [ label=\"fred\" ];\n"
// the extra label will be ignored but not cause any problems.
+ " 1 -- 2 [ label=\"friend\"];\n" + " 1 -- 2;\n" + "}";
Multigraph<String, DefaultEdge> expected = new Multigraph<>(DefaultEdge.class);
expected.addVertex("1");
expected.addVertex("2");
expected.addEdge("1", "2", new DefaultEdge());
expected.addEdge("1", "2", new DefaultEdge());
GraphImporter<String, DefaultEdge> importer = buildImporter();
Multigraph<String, DefaultEdge> result = new Multigraph<>(DefaultEdge.class);
importer.importGraph(result, new StringReader(input));
assertEquals(expected.toString(), result.toString());
assertEquals(2, result.vertexSet().size());
assertEquals(2, result.edgeSet().size());
}
@Test
public void testExportImportLoop()
throws ImportException, ExportException, UnsupportedEncodingException
{
DirectedMultigraph<String, DefaultEdge> start = new DirectedMultigraph<>(DefaultEdge.class);
start.addVertex("a");
start.addVertex("b");
start.addVertex("c");
start.addVertex("d");
start.addEdge("a", "b");
start.addEdge("b", "c");
start.addEdge("b", "d");
DOTExporter<String, DefaultEdge> exporter = new DOTExporter<>(
vertex -> vertex, null, new IntegerComponentNameProvider<DefaultEdge>());
GraphImporter<String, DefaultEdge> importer = buildImporter();
ByteArrayOutputStream os = new ByteArrayOutputStream();
exporter.exportGraph(start, os);
String output = new String(os.toByteArray(), "UTF-8");
DirectedMultigraph<String, DefaultEdge> result =
new DirectedMultigraph<>(DefaultEdge.class);
importer.importGraph(result, new StringReader(output));
assertEquals(start.toString(), result.toString());
assertEquals(4, result.vertexSet().size());
assertEquals(3, result.edgeSet().size());
}
@Test
public void testDashLabelVertex()
throws ImportException
{
String input =
"graph G {\n" + "a [label=\"------this------contains-------dashes------\"]\n" + "}";
Multigraph<String, DefaultEdge> result = new Multigraph<>(DefaultEdge.class);
Map<String, Map<String, Attribute>> attrs = new HashMap<>();
VertexProvider<String> vp = (label, a) -> {
attrs.put(label, a);
return label;
};
EdgeProvider<String, DefaultEdge> ep = (f, t, l, a) -> new DefaultEdge();
ComponentUpdater<String> cu = (v, a) -> {
};
DOTImporter<String, DefaultEdge> importer =
new DOTImporter<String, DefaultEdge>(vp, ep, cu);
importer.importGraph(result, new StringReader(input));
assertEquals(1, result.vertexSet().size());
String v = result.vertexSet().stream().findFirst().get();
assertEquals("a", v);
assertEquals("------this------contains-------dashes------", attrs.get("a").get("label").getValue());
}
@Test
public void testAttributesWithNoQuotes()
throws ImportException
{
String input =
"graph G {\n" + " 1 [ label = \"bob\" \"foo\"=bar ];\n" + " 2 [ label = \"fred\" ];\n"
// the extra label will be ignored but not cause any problems.
+ " 1 -- 2 [ label = \"friend\" \"foo\" = wibble];\n" + "}";
Multigraph<TestVertex, TestEdge> result =
new Multigraph<TestVertex, TestEdge>(TestEdge.class);
DOTImporter<TestVertex, TestEdge> importer = new DOTImporter<TestVertex, TestEdge>(
(l, a) -> new TestVertex(l, a), (f, t, l, a) -> new TestEdge(l, a));
importer.importGraph(result, new StringReader(input));
assertEquals("wrong size of vertexSet", 2, result.vertexSet().size());
assertEquals("wrong size of edgeSet", 1, result.edgeSet().size());
for (TestVertex v : result.vertexSet()) {
if ("1".equals(v.getId())) {
assertEquals("wrong number of attributes", 2, v.getAttributes().size());
assertEquals("Wrong attribute values", "bar", v.getAttributes().get("foo").getValue());
assertEquals("Wrong attribute values", "bob", v.getAttributes().get("label").getValue());
} else {
assertEquals("wrong number of attributes", 1, v.getAttributes().size());
assertEquals("Wrong attribute values", "fred", v.getAttributes().get("label").getValue());
}
}
for (TestEdge e : result.edgeSet()) {
assertEquals("wrong id", "friend", e.getId());
assertEquals("wrong number of attributes", 2, e.getAttributes().size());
assertEquals("Wrong attribute value", "wibble", e.getAttributes().get("foo").getValue());
assertEquals("Wrong attribute value", "friend", e.getAttributes().get("label").getValue());
}
}
@Test
public void testEmptyString()
{
testGarbage(
"",
"Failed to import DOT graph: line 1:0 mismatched input '' expecting {STRICT, GRAPH, DIGRAPH}");
}
@Test
public void testGarbageStringEnoughLines()
{
String input =
"jsfhg kjdsf hgkfds\n" + "fdsgfdsgfd\n" + "gfdgfdsgfdsg\n" + "jdhgkjfdshgsjkhl\n";
testGarbage(
input,
"Failed to import DOT graph: line 1:0 mismatched input 'jsfhg' expecting {STRICT, GRAPH, DIGRAPH}");
}
@Test
public void testGarbageStringInvalidFirstLine()
{
String input = "jsfhgkjdsfhgkfds\n" + "fdsgfdsgfd\n";
testGarbage(
input,
"Failed to import DOT graph: line 1:0 mismatched input 'jsfhgkjdsfhgkfds' expecting {STRICT, GRAPH, DIGRAPH}");
}
@Test
public void testGarbageStringNotEnoughLines()
{
String input = "jsfhgkjdsfhgkfds\n";
testGarbage(
input,
"Failed to import DOT graph: line 1:0 mismatched input 'jsfhgkjdsfhgkfds' expecting {STRICT, GRAPH, DIGRAPH}");
}
@Test
public void testIncompatibleDirectedGraph()
{
String input = "digraph G {\n" + "a -- b\n" + "}";
Multigraph<String, DefaultEdge> result = new Multigraph<>(DefaultEdge.class);
testGarbageGraph(
input, "Failed to import DOT graph: Provided graph is not directed", result);
}
@Test
public void testIncompatibleGraph()
{
String input = "graph G {\n" + "a -- b\n" + "}";
DirectedMultigraph<String, DefaultEdge> result =
new DirectedMultigraph<>(DefaultEdge.class);
testGarbageGraph(
input, "Failed to import DOT graph: Provided graph is not undirected", result);
}
@Test
public void testAttributesWithNoValues()
throws ImportException
{
String input =
"graph G {\n" + " 1 [ label = \"bob\" \"foo\" ];\n" + " 2 [ label = \"fred\" ];\n"
// the extra label will be ignored but not cause any problems.
+ " 1 -- 2 [ label = friend foo];\n" + "}";
Multigraph<TestVertex, TestEdge> graph = new Multigraph<>(TestEdge.class);
VertexProvider<TestVertex> vp = (label, attrs) -> new TestVertex(label, attrs);
EdgeProvider<TestVertex, TestEdge> ep = (f, t, l, attrs) -> new TestEdge(l, attrs);
DOTImporter<TestVertex, TestEdge> importer = new DOTImporter<TestVertex, TestEdge>(vp, ep);
try {
importer.importGraph(graph, new StringReader(input));
fail("Failed to import DOT graph: line 2:26 mismatched input ']' expecting '='");
} catch (ImportException e) {
}
}
@Test
public void testUpdatingVertex()
throws ImportException
{
String input = "graph G {\n" + "a -- b;\n" + "a [foo=\"bar\"];\n" + "}";
Multigraph<TestVertex, DefaultEdge> result = new Multigraph<>(DefaultEdge.class);
VertexProvider<TestVertex> vp = (label, attrs) -> new TestVertex(label, attrs);
EdgeProvider<TestVertex, DefaultEdge> ep = (f, t, l, attrs) -> new DefaultEdge();
ComponentUpdater<TestVertex> cu = (v, attrs) -> v.getAttributes().putAll(attrs);
DOTImporter<TestVertex, DefaultEdge> importer = new DOTImporter<>(vp, ep, cu);
importer.importGraph(result, new StringReader(input));
assertEquals("wrong size of vertexSet", 2, result.vertexSet().size());
assertEquals("wrong size of edgeSet", 1, result.edgeSet().size());
for (TestVertex v : result.vertexSet()) {
if ("a".equals(v.getId())) {
assertEquals("wrong number of attributes", 1, v.getAttributes().size());
} else {
assertEquals("attributes are populated", 0, v.getAttributes().size());
}
}
}
@Test
public void testParametersWithSemicolons()
throws ImportException
{
String input = "graph G {\n 1 [ label=\"this label; contains a semi colon\" ];\n}\n";
Multigraph<TestVertex, DefaultEdge> result =
new Multigraph<TestVertex, DefaultEdge>(DefaultEdge.class);
DOTImporter<TestVertex, DefaultEdge> importer = new DOTImporter<TestVertex, DefaultEdge>(
(l, a) -> new TestVertex(l, a), (f, t, l, a) -> new DefaultEdge());
importer.importGraph(result, new StringReader(input));
assertEquals("wrong size of vertexSet", 1, result.vertexSet().size());
assertEquals("wrong size of edgeSet", 0, result.edgeSet().size());
}
@Test
public void testLabelsWithEscapedSemicolons()
throws ImportException
{
String escapedLabel = "this \\\"label; \\\"contains an escaped semi colon";
String input = "graph G {\n node [ label=\"" + escapedLabel + "\" ];\n node0 }\n";
Multigraph<TestVertex, DefaultEdge> result =
new Multigraph<TestVertex, DefaultEdge>(DefaultEdge.class);
DOTImporter<TestVertex, DefaultEdge> importer = new DOTImporter<TestVertex, DefaultEdge>(
(label, attrs) -> new TestVertex(label, attrs), (f, t, l, a) -> new DefaultEdge());
importer.importGraph(result, new StringReader(input));
assertEquals("wrong size of vertexSet", 1, result.vertexSet().size());
assertEquals("wrong size of edgeSet", 0, result.edgeSet().size());
assertEquals(
"wrong parsing", "node0", ((TestVertex) result.vertexSet().toArray()[0]).getId());
assertEquals(
"wrong parsing", "this \"label; \"contains an escaped semi colon",
((TestVertex) result.vertexSet().toArray()[0]).getAttributes().get("label").getValue());
}
@Test
public void testNoLineEndBetweenNodes()
throws ImportException
{
String input =
"graph G {\n 1 [ label=\"this label; contains a semi colon\" ]; 2 [ label=\"wibble\" ] \n}\n";
Multigraph<TestVertex, DefaultEdge> result =
new Multigraph<TestVertex, DefaultEdge>(DefaultEdge.class);
DOTImporter<TestVertex, DefaultEdge> importer = new DOTImporter<TestVertex, DefaultEdge>(
(l, a) -> new TestVertex(l, a), (f, t, l, a) -> new DefaultEdge());
importer.importGraph(result, new StringReader(input));
assertEquals("wrong size of vertexSet", 2, result.vertexSet().size());
assertEquals("wrong size of edgeSet", 0, result.edgeSet().size());
}
@Test
public void testWithReader()
throws ImportException
{
String input = "graph G {\n" + " 1 [ \"label\"=\"abc123\" ];\n"
+ " 2 [ label=\"fred\" ];\n" + " 1 -- 2;\n" + "}";
Multigraph<String, DefaultEdge> expected = new Multigraph<>(DefaultEdge.class);
expected.addVertex("1");
expected.addVertex("2");
expected.addEdge("1", "2");
GraphImporter<String, DefaultEdge> importer = buildImporter();
Graph<String, DefaultEdge> result = new Multigraph<>(DefaultEdge.class);
importer.importGraph(result, new StringReader(input));
assertEquals(expected.toString(), result.toString());
assertEquals(2, result.vertexSet().size());
assertEquals(1, result.edgeSet().size());
}
private void testGarbage(String input, String expected)
{
DirectedMultigraph<String, DefaultEdge> result =
new DirectedMultigraph<>(DefaultEdge.class);
testGarbageGraph(input, expected, result);
}
private void testGarbageGraph(
String input, String expected, AbstractBaseGraph<String, DefaultEdge> graph)
{
GraphImporter<String, DefaultEdge> importer = buildImporter();
try {
importer.importGraph(graph, new StringReader(input));
fail("Should not get here");
} catch (ImportException e) {
assertEquals(expected, e.getMessage());
}
}
private GraphImporter<String, DefaultEdge> buildImporter()
{
return new DOTImporter<String, DefaultEdge>(
(label, attributes) -> label, (from, to, label, attributes) -> new DefaultEdge());
}
private static class GraphWithID
extends AbstractBaseGraph<String, DefaultEdge>
{
private static final long serialVersionUID = 1L;
String id = null;
protected GraphWithID()
{
super(new ClassBasedEdgeFactory<>(DefaultEdge.class), false, false, false, false);
}
}
private GraphImporter<String, DefaultEdge> buildGraphIDImporter()
{
return new DOTImporter<String, DefaultEdge>(
(label, attributes) -> label, (from, to, label, attributes) -> new DefaultEdge(), null,
(component, attributes) -> {
if (component instanceof GraphWithID) {
Attribute idAttribute = attributes.get("ID");
String id = "G";
if (idAttribute != null) {
id = idAttribute.getValue();
}
((GraphWithID) component).id = id;
}
});
}
private class TestVertex
{
String id;
Map<String, Attribute> attributes;
public TestVertex(String id, Map<String, Attribute> attributes)
{
this.id = id;
this.attributes = attributes;
}
public String getId()
{
return id;
}
public Map<String, Attribute> getAttributes()
{
return attributes;
}
@Override
public String toString()
{
return id + ", " + attributes;
}
}
private class TestEdge
extends DefaultEdge
{
private static final long serialVersionUID = 1L;
String id;
Map<String, Attribute> attributes;
public TestEdge(String id, Map<String, Attribute> attributes)
{
super();
this.id = id;
this.attributes = attributes;
}
public String getId()
{
return id;
}
public Map<String, Attribute> getAttributes()
{
return attributes;
}
@Override
public String toString()
{
return id + ", " + attributes;
}
}
}
|
<reponame>Daniel-Liu-c0deb0t/UMICollapse
package umicollapse.util;
import java.util.Arrays;
import htsjdk.samtools.fastq.FastqRecord;
import static umicollapse.util.Utils.toBitSet;
import static umicollapse.util.Utils.toPhred33ByteArray;
import static umicollapse.util.Utils.toPhred33String;
public class FASTQRead extends Read{
private String desc;
private BitSet seq;
private byte[] qual;
private int avgQual;
public FASTQRead(String desc, String umi, String seq, String qual){
this.desc = desc;
this.seq = toBitSet(umi.toUpperCase() + seq.toUpperCase());
this.qual = toPhred33ByteArray(qual);
float avg = 0.0f;
for(byte b : this.qual)
avg += b;
this.avgQual = (int)(avg / this.qual.length);
}
public FASTQRead(String desc, String umiAndSeq, String qual){
this.desc = desc;
this.seq = toBitSet(umiAndSeq.toUpperCase());
this.qual = toPhred33ByteArray(qual);
float avg = 0.0f;
for(byte b : this.qual)
avg += b;
this.avgQual = (int)(avg / this.qual.length);
}
@Override
public BitSet getUMI(){
return seq;
}
@Override
public int getUMILength(){
return -1; // should never be called!
}
@Override
public int getAvgQual(){
return avgQual;
}
@Override
public boolean equals(Object o){
FASTQRead r = (FASTQRead)o;
if(!seq.equals(r.seq))
return false;
if(!desc.equals(r.desc))
return false;
if(!Arrays.equals(qual, r.qual))
return false;
return true;
}
public FastqRecord toFASTQRecord(int length, int umiLength){
return new FastqRecord(desc, Utils.toString(seq, length).substring(umiLength), "", Utils.toPhred33String(qual).substring(umiLength));
}
}
|
<reponame>python-programmer/web_frameworks_benchmark<gh_stars>1-10
from django.contrib import admin
from .models import Order, OrderItem
admin.site.register(OrderItem)
admin.site.register(Order)
|
require 'rails_helper'
RSpec.describe 'Events Management' do
describe 'add a new event' do
it 'creates a new user and generates a new event' do
visit('/')
click_link('Sign up')
expect(current_path).to have_content('/users/new')
fill_in('user[name]', with: 'user1004')
click_button('Create User')
expect(current_path).to have_content('/')
click_link('New event')
expect(current_path).to have_content('/events/new')
fill_in('event[description]', with: 'E3 Conference')
fill_in('event[location]', with: 'California')
fill_in('event[event_date]', with: Time.zone.now)
click_button('Create Event')
expect(page).to have_content('E3 Conference')
end
end
describe 'update an event' do
it 'updates the recent created event' do
visit('/')
click_link('Sign up')
expect(current_path).to have_content('/users/new')
fill_in('user[name]', with: 'user1004')
click_button('Create User')
expect(current_path).to have_content('/')
click_link('New event')
expect(current_path).to have_content('/events/new')
fill_in('event[description]', with: 'E3 Conference')
fill_in('event[location]', with: 'California')
fill_in('event[event_date]', with: Time.zone.now)
click_button('Create Event')
expect(page).to have_content('E3 Conference')
click_link('Edit')
fill_in('event[description]', with: 'Rubykaigi 2020')
fill_in('event[location]', with: 'Japan')
fill_in('event[event_date]', with: Time.zone.now)
click_button('Update Event')
expect(page).to have_content('Rubykaigi 2020')
end
end
end
|
DATA_DIR=./finetune_data/NTG_processed_en
USER_DIR=./prophetnet
ARCH=ngram_transformer_prophet_large
CRITERION=ngram_language_loss
SAVE_DIR=./models/xprophetnet_ntg_en
TENSORBOARD_LOGDIR=./models/logs_xprophetnet_ntg_en
PRETRAINED_MODEL=./prophetnet_multi.pt
fairseq-train \
--user-dir $USER_DIR --task translation_prophetnet --arch $ARCH \
--optimizer adam --adam-betas '(0.9, 0.999)' --clip-norm 0.1 \
--lr 0.00001 \
--lr-scheduler inverse_sqrt --warmup-init-lr 1e-07 --warmup-updates 1000 \
--dropout 0.1 --attention-dropout 0.1 --weight-decay 0.01 \
--criterion $CRITERION --label-smoothing 0.1 \
--update-freq 32 --max-sentences 8 \
--num-workers 4 --load-from-pretrained-model $PRETRAINED_MODEL \
--ddp-backend=no_c10d --max-epoch 20 \
--max-source-positions 512 --max-target-positions 512 \
--skip-invalid-size-inputs-valid-test \
--seed 1 \
--save-dir $SAVE_DIR \
--keep-last-epochs 20 \
--tensorboard-logdir $TENSORBOARD_LOGDIR \
$DATA_DIR
|
#!/bin/bash
eng="xkb:us::eng"
yue="rime"
case "$1" in
"print")
ibus engine | sed 's/.*eng.*/英文/;s/rime/廣東話/;'
;;
*)
case "$(ibus engine)" in
"$eng")
ibus engine $yue
;;
"$yue")
ibus engine $eng
;;
esac
polybar-msg hook ibus 1 > /dev/null
;;
esac
|
#!/bin/bash
# <bitbar.title>Kubernetes</bitbar.title>
# <bitbar.version>v1.0</bitbar.version>
# <bitbar.author>Peter Golm</bitbar.author>
# <bitbar.author.github>pgolm</bitbar.author.github>
# <bitbar.desc>Nagios info</bitbar.desc>
# <bitbar.dependencies>bash</bitbar.dependencies>
if [[ "$1" = "copy" ]]; then
echo "$2" | pbcopy
exit
fi
KUBERNETES_LOGO="iVBORw0KGgoAAAANSUhEUgAAABwAAAAbCAYAAABvCO8sAAAABGdBTUEAALGPC/xhBQAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAACXBIWXMAABGIAAARiAE3kcxvAAABWWlUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iWE1QIENvcmUgNS40LjAiPgogICA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPgogICAgICA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIgogICAgICAgICAgICB4bWxuczp0aWZmPSJodHRwOi8vbnMuYWRvYmUuY29tL3RpZmYvMS4wLyI+CiAgICAgICAgIDx0aWZmOk9yaWVudGF0aW9uPjE8L3RpZmY6T3JpZW50YXRpb24+CiAgICAgIDwvcmRmOkRlc2NyaXB0aW9uPgogICA8L3JkZjpSREY+CjwveDp4bXBtZXRhPgpMwidZAAAGTUlEQVRIDYVWaUiWWRS+nzmWTWI1VqJFE5qWYaZpZZlaudBQ/giS8scQQZBDEP0I+lHMj3BahAaRpIWxaVA0p8VAECpbsMi0bSYpimgxy3Jtcf+WM89z/N63L5lmDpz3nnvuuee599xzz32N+QqJiB/Y33cY/UhwAbgF3Ab+FTxvlI0/dH6+uv+UaQz+xtcI/Thw4dDQUGtTU5Ns375dtm7dKjdv3pSBgYF2jBWDk0bN+X9gTBq9o2ToSj59+tTe0NAgmzdvFjh1e9nDduPGjXL9+nV5//59D2x/A6eBHRY4ZALbfdX7KiiDM8B/dHV19Vy5ckXoFIZOsGvZsmWeRYsWyeLFi4UydRzLycmRCxcuSHt7+0fMrQT/AB6DMSXII6C2ADXkdHB1W1tbb01NjdAJ1ARypqenS0ZGBvtSW1srN27cUDk1NVVWrlxJmcDDK1askHPnzsmrV6/64asGnAW9EmQHQfSA0WbijHrOnj0r8+fPt4HokGDJycmyYMECBWF4nz59qnJUVJQsXbpU0tLShLbwrDuOiIiQsrIy+fDhQy985xDRwrLQSxk+dAZnzpzJSeowJSVFYmJiVKaOO3j48KG0trbKmjVrbH1kZKSCjh07VnWzZ8+mj4Hq6mrgyCnw53NE5ztwY0FBAY2HuNoTJ07Ivn37bIdHjhzh+cDsS+rs7JTS0lLbbtu2bXLy5ElZu3YtdcM7duwQRO5vzApDf4TQSeju7u70GunuGhsbaShFRUVSX19vo3B3FvnKd+7ckd27d0tPT488e/ZMZsyYQUB3YmKivH79momUYuExtj8+efJEV7lw4UKP9wzl1ClGYoSeP38uPC9MEixOnE6nyuz7Al+7dk3i4+MFxyI4Dmax5/79+3SST0CrIsS9efOGfVdQUJBjwoQJlM2tW7e05Qd6g1Brf9y4ccbff+TKIiomODjYtsNOzb1790x4eLgJCAjgubmRsRyPUyMgB4Ivl5SUcMXDq1atkjlz5ujquSsS75dFvOQWsdJYVFdXJ263W0MKPzJ9+nTrujj37t1LMxoHM5xR/f39rRs2bCCIi3cqLCxMVq9eTSN5+fKlgjOhvkZHjx5VmwcPHqgJE4eg1v1cvnw5j6EDg3EMaczbt2/DKysrTWxsrAPghuENDAzEkDEvXrzQFtfAdHR0qFxeXm6Ki4tVpj3upMrIA22nTp1qjyUlJfkh6QyKSQiU8wgY73XkmTx5ssMbb5OQkKCTUMZMb2+vQcjNlClTVMczts5t/Pjx5uDBg2ZwcNBkZmbqOBJGWxQIa+Eeb44kMqR1x48fZwhYlOXYsWNMY8Ei7Aj29fXJ48ePhVdlNPH1YChRUewh3ldejTNnzjBbtcjzToMaNKRXr17lirQSuFwu09LSYnD/THNzM/Vm165dJjo62lRVVWmfH0xWGcnCozDr1q3TPl4WU1hYaABokES0cyAafufPnxf4jqWi/vTp05ztREjY2pybm4th0XtHvZUUhw4dkj179ugYFqf23BEJZ2vP5xxmPX2zUoH+ImAquJtVggMEZf1klnICyxbp40cWi89Zy7FHjx6pjuWNxKJOPestC35WVpb6zM/PZ9ViEc/GOCxENuEBlezsbBq4mMZz584VVB11cPjwYfpTQlbK7du3hWfne24XL15UWxZxViq+IPTFV+Pdu3ec+xOxbIKigC8AFJ5p06a5+cAuWbJEZs2apY6YNMhWQbYpMD8scUwu1k/uCHP17nGhOHP9M+AzBiryBdIaBWUA+E+UJU50oh56LCcVFRXCnW3ZskWdskjk5eWpTBDcY7l79672Way9fwLDSEaC8RH+loBoR+ohBP1pQhsKbuQjTFA+U7RjEb906ZLKoaGh2lLPsLPlQ0vnqJ+C+0vdMJ83EEvP9+gT7IsfM1uBgSRw24EDBzjRyXNlqeMcvvo8X77sXAxlMtJex60k2blzp3g8ni74SfWCffFzRp0SDPSnB20uLvvw+vXrFXTSpEls9alBa8kKYuknTpyotjx3b9HYhDFuZAzYfu1HI9OpcTgcVTCKRAUqQAj9ecf4bLEoeMl2QGd8qvB/Kih9/vv37zchISG/wMfvXluBzMX8O8GBvpF0BP4Z3I6KMYC2F2Hin1if1VL29i0dw8gaZkXK/k200P4BmoPJHd1nlIYAAAAASUVORK5CYII="
PATH=/usr/local/bin:$PATH
GET_ALL=$(kubectl get all --all-namespaces -o json)
GET_NODES=$(kubectl get nodes -o json)
KINDS=$(echo ${GET_ALL}|jq "[.items[].kind]|unique|sort|.[]"|tr -d '"')
NAMESPACES=$(echo ${GET_ALL}|jq "[.items[].metadata.namespace]|unique|sort|.[]"|tr -d '"')
function menuItem {
echo "$1"
ITEMS=$(echo ${GET_ALL}|jq ".items[]|select(.kind == \"$2\")|[.metadata.name]|sort|.[]"|tr -d '"')
for item in $ITEMS; do
echo "--$item"
echo "----Copy|bash=$0 param1=copy param2=$item terminal=false"
# echo "--Delete $item|alternate=true bash=kubectl"
# echo "--Delete|alternate=true bash=kubectl param1=delete param2=pod param3=$item"
# echo "----Describe|bash=kubectl param1=describe param2='--namespace=loadbalancer' param3=pod/$item"
# echo "----Logs|bash=kubectl param1=logs param2=-f param3=pod/$item"
done
}
echo "|image=$KUBERNETES_LOGO"
echo "---"
echo "Namespaces"
for namespace in $NAMESPACES; do
echo "--$namespace"
done
echo "Nodes"
for node in $(echo ${GET_NODES}| jq ".items[]|[.metadata.name]|sort|.[]"|tr -d '"'); do
echo "--$node"
done
echo "---"
menuItem "Deployments" "Deployment"
menuItem "ReplicaSets" "ReplicaSet"
menuItem "Replication Controllers" "Replication Controller"
menuItem "Daemon Sets" "Daemon Set"
menuItem "Stateful Sets" "Stateful Set"
menuItem "Jobs" "Job"
menuItem "Pods" "Pod"
echo "---"
menuItem "Ingresses" "Ingress"
menuItem "Services" "Service"
echo "---"
echo "🔄 Refresh|refresh=true"
|
from skimage import io, transform, color
from os import remove
class GAN:
def apply(self, image):
# Apply GAN processing to the image
return image # Placeholder for GAN processing
def safe_remove(models, file_name):
if file_name in models:
models.remove(file_name)
def rgb2yuv(image):
return color.rgb2yuv(image)
def process_image_with_gan(file_path, models):
safe_remove(models, '.DS_Store')
image = rgb2yuv(transform.resize(io.imread(file_path), (128, 128)))
gan = GAN()
processed_image = gan.apply(image)
return processed_image
|
<gh_stars>10-100
const body = document.body;
const onOff = document.querySelector("#isDark")
const Darkbtn = document.querySelector("#DarkBtn")
if (load("dark") == "On") Darkbtn.checked = true
else Darkbtn.checked = false
onOff.innerText = load("dark")
Darkbtn.addEventListener("click", changeDark)
function changeDark(event){
if (load("dark") == "Off"){
onOff.innerText = "On"
Darkbtn.checked = true
save("dark", "On")
}
else if (load("dark") == "On"){
onOff.innerText = "Off"
Darkbtn.checked = false
save("dark", "Off")
}
}
function save(item, coordsObj) {
localStorage.setItem(item, coordsObj);
}
function load(item) {
const loadedDark = localStorage.getItem(item);
if (loadedDark === null) {
save("dark", "Off")
return "Off"
} else {
return loadedDark
}
}
|
# Run an instance of a web service.
#
# Run in folder with Dockerfile.
#
# Command line options:
# $1: port number for container (default: 8090)
# $2: name for container (default: spase-website)
#
port=${1:-8090}
name=${2:-spase-validate}
# Set SUDO appropriate for OS
SUDO=""
case "$OSTYPE" in
solaris*) SUDO="sudo" ;;
darwin*) SUDO="sudo" ;;
linux*) SUDO="sudo" ;;
bsd*) SUDO="sudo" ;;
msys*) SUDO="" ;;
*) SUDO="" ;;
esac
#
# Build the image if it doesn't exist
#
$SUDO docker inspect "$name" > /dev/null 2>&1 || $SUDO docker build -t $name .
# Run container
# Use option "-p" to set host port map.
$SUDO docker run -p $port:8080 -d --name $name --restart unless-stopped $name
|
<reponame>jiangshaofen/spring-boot-example
package com.example.starter;
//几个和Starter相关的注解
//@ConditionalOnClass,当classpath下发现该类的情况下进行自动配置。
//@ConditionalOnMissingBean,当Spring Context中不存在该Bean时。
//@ConditionalOnProperty(prefix = "example.service",value = "enabled",havingValue = "true"),当配置文件中example.service.enabled=true时。
//
//最后一步,在resources/META-INF/下创建spring.factories文件
//总结下Starter的工作原理
//
//1.Spring Boot在启动时扫描项目所依赖的JAR包,寻找包含spring.factories文件的JAR包
//2.根据spring.factories配置加载AutoConfigure类
//3.根据 @Conditional注解的条件,进行自动配置并将Bean注入Spring Context
|
package io.opensphere.tracktool.model.persist.v1;
import java.awt.Color;
import java.awt.Font;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import io.opensphere.core.math.Vector2i;
import io.opensphere.core.model.time.TimeSpan;
import io.opensphere.core.units.duration.Duration;
import io.opensphere.core.units.duration.Seconds;
import io.opensphere.core.units.length.Length;
import io.opensphere.core.units.length.Meters;
import io.opensphere.core.util.collections.New;
import io.opensphere.core.util.lang.EqualsHelper;
import io.opensphere.myplaces.util.PlacemarkUtils;
import io.opensphere.tracktool.model.Track;
import io.opensphere.tracktool.model.TrackNode;
/**
* The Class JAXBTrack.
*/
@XmlRootElement(name = "Track")
@XmlAccessorType(XmlAccessType.FIELD)
public class JAXBTrack implements Track
{
/** The description. */
@XmlAttribute(name = "description")
private String myDescription;
/** The name. */
@XmlAttribute(name = "name")
private String myName;
/** The Node list. */
@XmlElement(name = "TrackNode")
private List<JAXBTrackNode> myNodeList;
/** When true, show the description in the annotation bubble. */
@XmlAttribute(name = "showDescription")
private boolean myShowDescription;
/** When true, show the name in the annotation bubble. */
@XmlAttribute(name = "showName")
private boolean myShowName;
/**
* Instantiates a new jAXB track.
*/
public JAXBTrack()
{
}
/**
* Instantiates a new jAXB track.
*
* @param track the track
*/
public JAXBTrack(Track track)
{
List<? extends TrackNode> nodeList = track.getNodes();
myNodeList = nodeList == null || nodeList.isEmpty() ? New.<JAXBTrackNode>list()
: New.<JAXBTrackNode>list(nodeList.size());
if (nodeList != null)
{
for (TrackNode node : nodeList)
{
myNodeList.add(new JAXBTrackNode(node));
}
}
myName = track.getName();
myDescription = track.getDescription();
myShowDescription = track.isShowDescription();
myShowName = track.isShowName();
}
@Override
public boolean equals(Object obj)
{
if (this == obj)
{
return true;
}
if (obj == null || getClass() != obj.getClass())
{
return false;
}
JAXBTrack other = (JAXBTrack)obj;
return EqualsHelper.equals(myName, other.myName, myNodeList, other.myNodeList);
}
@Override
public Color getColor()
{
return Color.orange;
}
@Override
public String getDescription()
{
return myDescription;
}
@Override
public Font getFont()
{
return PlacemarkUtils.DEFAULT_FONT;
}
@Override
public String getId()
{
return null;
}
@Override
public String getName()
{
return myName;
}
@Override
public List<? extends TrackNode> getNodes()
{
return myNodeList;
}
@Override
public Vector2i getOffset()
{
return new Vector2i(10, 10);
}
@Override
public Color getTextColor()
{
return Color.white;
}
@Override
public TimeSpan getTimeSpan()
{
return null;
}
@Override
public Class<? extends Length> getDistanceUnit()
{
return Meters.class;
}
@Override
public Class<? extends Duration> getDurationUnit()
{
return Seconds.class;
}
@Override
public int hashCode()
{
final int prime = 31;
int result = 1;
result = prime * result + (myName == null ? 0 : myName.hashCode());
result = prime * result + (myNodeList == null ? 0 : myNodeList.hashCode());
return result;
}
@Override
public boolean isAnimate()
{
return false;
}
@Override
public boolean isFillBubble()
{
return false;
}
@Override
public boolean isShowBubble()
{
return true;
}
@Override
public boolean isShowDescription()
{
return myShowDescription;
}
@Override
public boolean isShowDistance()
{
return true;
}
@Override
public boolean isShowHeading()
{
return true;
}
/**
* {@inheritDoc}
*
* @see io.opensphere.tracktool.model.Track#isShowVelocity()
*/
@Override
public boolean isShowVelocity()
{
return true;
}
/**
* {@inheritDoc}
*
* @see io.opensphere.tracktool.model.Track#isShowDuration()
*/
@Override
public boolean isShowDuration()
{
return true;
}
@Override
public boolean isShowName()
{
return myShowName;
}
/**
* {@inheritDoc}
*
* @see io.opensphere.tracktool.model.Track#isShowFieldTitles()
*/
@Override
public boolean isShowFieldTitles()
{
return false;
}
}
|
import React from 'react'
import { compose, withHandlers } from 'recompose'
import { MentionsInput, Mention } from '../../../src'
import { provideExampleValue } from './higher-order'
import defaultStyle from './defaultStyle'
import defaultMentionStyle from './defaultMentionStyle'
import { merge } from '../../../src/utils'
const style = merge({}, defaultStyle, {
suggestions: {
list: {
maxHeight: 100,
overflow: 'auto',
position: 'absolute',
bottom: 14,
},
},
})
function Advanced({ value, data, onChange, onBlur, onAdd }) {
let inputEl = React.createRef()
return (
<div className="advanced">
<h3>Advanced options</h3>
<MentionsInput
value={value}
onChange={onChange}
onBlur={onBlur}
style={style}
inputRef={inputEl}
a11ySuggestionsListLabel={"Suggested mentions"}
>
<Mention
markup="{{__id__}}"
displayTransform={id => `<-- ${id} -->`}
data={data}
onAdd={onAdd}
style={defaultMentionStyle}
/>
</MentionsInput>
<button
onClick={() => {
inputEl.current.focus()
}}
>
focus programmatically
</button>
</div>
)
}
export default compose(
provideExampleValue('Hi {{johndoe}}!'),
withHandlers({
onBlur: () => (ev, clickedOnSuggestion) => {
if (!clickedOnSuggestion) {
console.log('finished editing')
}
},
})
)(Advanced)
|
<?php
namespace App\Models;
use Illuminate\Database\Eloquent\Model;
class Order extends Model {
/**
* The database table used by the model.
*
* @var string
*/
protected $table = 'orders';
/**
* Calculate the total price of the order.
*
* @return float
*/
public function calculateTotalPrice()
{
$totalPrice = 0;
// Iterate through each order item and calculate the total cost
foreach ($this->orderItems as $item) {
$totalPrice += $item->quantity * $item->unit_price;
}
return $totalPrice;
}
/**
* Define a relationship with the OrderItem model.
*
* @return \Illuminate\Database\Eloquent\Relations\HasMany
*/
public function orderItems()
{
return $this->hasMany(OrderItem::class, 'order_id', 'id');
}
}
|
// Copyright 2018, RadiantBlue Technologies, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package app
import (
"encoding/json"
"log"
s "github.com/venicegeo/vzutil-versioning/web/app/structs"
"github.com/venicegeo/vzutil-versioning/web/es"
"github.com/venicegeo/vzutil-versioning/web/es/types"
)
type FireAndForget struct {
app *Application
}
func NewFireAndForget(app *Application) *FireAndForget {
return &FireAndForget{app}
}
func (ff *FireAndForget) FireRequest(request *SingleRunnerRequest) {
go func(request *SingleRunnerRequest) {
exists := make(chan *types.Scan, 1)
ret := make(chan *types.Scan, 1)
defer func() {
close(exists)
close(ret)
}()
ff.app.wrkr.AddTask(request, exists, ret)
e := <-exists
if e != nil {
ff.tryUpdateScan(request.ref, e)
} else {
r := <-ret
if r != nil {
ff.postScan(r)
}
}
}(request)
}
func (ff *FireAndForget) FireGit(git *s.GitWebhook) {
go func(git *s.GitWebhook) {
fire := func(git *s.GitWebhook, repo *Repository) {
ret := make(chan *types.Scan, 1)
defer close(ret)
request := &SingleRunnerRequest{
repository: repo,
sha: git.AfterSha,
ref: git.Ref,
}
ff.app.wrkr.AddTask(request, nil, ret)
r := <-ret
if r != nil {
ff.postScan(r)
}
}
log.Println("[RECIEVED WEBHOOK]", git.Repository.FullName, git.AfterSha, git.Ref)
if projects, err := ff.app.rtrvr.GetAllProjectNamesUsingRepository(git.Repository.FullName); err != nil {
log.Println("FAILED TO FIND PROJECTS USING REPOSITORY FOR WEBHOOK", git.AfterSha)
} else {
for _, p := range projects {
go func(p string) {
if repo, _, err := ff.app.rtrvr.GetRepository(git.Repository.FullName, p); err != nil {
log.Println("FAILED TO GET THE REPO INSTANCE UNDER", p)
} else {
go fire(git, repo)
}
}(p)
}
}
}(git)
}
func (ff *FireAndForget) tryUpdateScan(ref string, scan *types.Scan) {
contains := false
for _, r := range scan.Refs {
if r == ref {
contains = true
break
}
}
if contains {
return
}
scan.Refs = append(scan.Refs, ref)
_, err := ff.app.index.PostData(RepositoryEntryType, scan.Sha+"-"+scan.ProjectId, scan)
if err != nil {
log.Printf("[ES-WORKER] Unable to update entry %s: %s\n", scan.Sha, err.Error())
} else {
log.Println("[ES-WORKER] Updated", scan.Sha, "for", scan.ProjectId, "with ref", ref)
}
}
func (ff *FireAndForget) postScan(scan *types.Scan) {
log.Println("[ES-WORKER] Starting work on", scan.Sha, "for", scan.ProjectId)
var err error
testAgainstEntries := make(map[string]*types.Scan, len(scan.Refs))
for _, ref := range scan.Refs {
boolq := es.NewBool().
SetMust(es.NewBoolQ(
es.NewTerm(types.Scan_FullnameField, scan.RepoFullname),
es.NewTerm(types.Scan_RefsField, ref),
es.NewTerm(types.Scan_ProjectIdField, scan.ProjectId),
es.NewRange(types.Scan_TimestampField, "lt", scan.Timestamp)))
q := map[string]interface{}{
"query": map[string]interface{}{"bool": boolq},
"sort": map[string]interface{}{
types.Scan_TimestampField: "desc",
},
"size": 1,
}
result, err := ff.app.index.SearchByJSON(RepositoryEntryType, q)
if err == nil && result.Hits.TotalHits == 1 {
entry := new(types.Scan)
if err = json.Unmarshal(*result.Hits.Hits[0].Source, entry); err == nil {
testAgainstEntries[ref] = entry
}
}
}
resp, err := ff.app.index.PostData(RepositoryEntryType, scan.Sha+"-"+scan.ProjectId, scan)
if err != nil {
log.Printf("[ES-WORKER] Unable to create entry %s: %s\n", scan.Sha, err.Error())
return
} else if !resp.Created {
log.Printf("[ES-WORKER] Unable to create entry %s. No error\n", scan.Sha)
return
}
log.Println("[ES-WORKER] Finished work on", scan.RepoFullname, scan.Sha)
for ref, old := range testAgainstEntries {
go ff.runDiff(scan.RepoFullname, scan.ProjectId, ref, old, scan)
}
}
func (w *FireAndForget) runDiff(repoName, projectName, ref string, oldEntry, newEntry *types.Scan) {
if _, err := w.app.diffMan.webhookCompare(repoName, projectName, ref, oldEntry, newEntry); err != nil {
log.Println("[ES-WORKER] Error creating diff:", err.Error())
}
}
|
for (int i = 1; i <= 50; i++) {
System.out.println(i);
}
|
/*
* Copyright The Stargate Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.stargate.it.http.graphql.cqlfirst;
import static org.assertj.core.api.Assertions.assertThat;
import com.apollographql.apollo.ApolloCall;
import com.apollographql.apollo.ApolloClient;
import com.apollographql.apollo.ApolloMutationCall;
import com.apollographql.apollo.api.CustomTypeAdapter;
import com.apollographql.apollo.api.CustomTypeValue;
import com.apollographql.apollo.api.Error;
import com.apollographql.apollo.api.Mutation;
import com.apollographql.apollo.api.Operation;
import com.apollographql.apollo.api.Response;
import com.apollographql.apollo.exception.ApolloException;
import com.datastax.oss.driver.api.core.CqlIdentifier;
import com.datastax.oss.driver.api.core.CqlSession;
import com.example.graphql.client.betterbotz.products.DeleteProductsMutation;
import com.example.graphql.client.betterbotz.products.GetProductsWithFilterQuery;
import com.example.graphql.client.betterbotz.type.CustomType;
import com.example.graphql.client.betterbotz.type.ProductsFilterInput;
import com.example.graphql.client.betterbotz.type.ProductsInput;
import com.example.graphql.client.betterbotz.type.QueryConsistency;
import com.example.graphql.client.betterbotz.type.QueryOptions;
import com.example.graphql.client.betterbotz.type.UuidFilterInput;
import io.stargate.it.driver.TestKeyspace;
import io.stargate.it.http.RestUtils;
import io.stargate.it.storage.StargateConnectionInfo;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.time.Duration;
import java.time.Instant;
import java.time.ZoneId;
import java.util.List;
import java.util.Optional;
import java.util.TimeZone;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.stream.Collectors;
import okhttp3.OkHttpClient;
import org.jetbrains.annotations.NotNull;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Base class for GraphQL tests that use the apollo-runtime client library.
*
* <p>Note that we are trying to limit usage of that library in our tests. Do not subclass this in
* new tests; instead, use {@link CqlFirstClient} (see {@link SelectTest}, {@link InsertTest}, etc).
*
* <p>Eventually, {@link ApolloTest} should be the only subclass, and we might merge this into it.
*/
public class ApolloTestBase extends BetterbotzTestBase {
protected static final Logger logger = LoggerFactory.getLogger(ApolloTest.class);
protected static CqlSession session;
protected static String authToken;
protected static StargateConnectionInfo stargate;
protected static String keyspace;
@BeforeAll
public static void setup(
StargateConnectionInfo stargateInfo,
CqlSession session,
@TestKeyspace CqlIdentifier keyspaceId)
throws Exception {
stargate = stargateInfo;
ApolloTest.session = session;
keyspace = keyspaceId.asInternal();
authToken = RestUtils.getAuthToken(stargate.seedAddress());
}
@AfterEach
public void cleanUpProducts() {
ApolloClient client = getApolloClient("/graphql/" + keyspace);
getProducts(client, 100, Optional.empty())
.flatMap(GetProductsWithFilterQuery.Products::getValues)
.ifPresent(
products ->
products.forEach(p -> p.getId().ifPresent(id -> cleanupProduct(client, id))));
}
protected static Optional<GetProductsWithFilterQuery.Products> getProducts(
ApolloClient client, int pageSize, Optional<String> pageState) {
ProductsFilterInput filterInput = ProductsFilterInput.builder().build();
QueryOptions.Builder optionsBuilder =
QueryOptions.builder().pageSize(pageSize).consistency(QueryConsistency.LOCAL_QUORUM);
pageState.ifPresent(optionsBuilder::pageState);
QueryOptions options = optionsBuilder.build();
GetProductsWithFilterQuery query =
GetProductsWithFilterQuery.builder().filter(filterInput).options(options).build();
GetProductsWithFilterQuery.Data result = getObservable(client.query(query));
assertThat(result.getProducts())
.hasValueSatisfying(
products -> {
assertThat(products.getValues())
.hasValueSatisfying(
values -> {
assertThat(values).hasSizeLessThanOrEqualTo(pageSize);
});
});
return result.getProducts();
}
private DeleteProductsMutation.Data cleanupProduct(ApolloClient client, Object productId) {
DeleteProductsMutation mutation =
DeleteProductsMutation.builder()
.value(ProductsInput.builder().id(productId).build())
.build();
DeleteProductsMutation.Data result = getObservable(client.mutate(mutation));
return result;
}
protected GetProductsWithFilterQuery.Value getProduct(ApolloClient client, String productId) {
List<GetProductsWithFilterQuery.Value> valuesList = getProductValues(client, productId);
return valuesList.get(0);
}
protected List<GetProductsWithFilterQuery.Value> getProductValues(
ApolloClient client, String productId) {
ProductsFilterInput filterInput =
ProductsFilterInput.builder().id(UuidFilterInput.builder().eq(productId).build()).build();
QueryOptions options =
QueryOptions.builder().consistency(QueryConsistency.LOCAL_QUORUM).build();
GetProductsWithFilterQuery query =
GetProductsWithFilterQuery.builder().filter(filterInput).options(options).build();
GetProductsWithFilterQuery.Data result = getObservable(client.query(query));
assertThat(result.getProducts()).isPresent();
GetProductsWithFilterQuery.Products products = result.getProducts().get();
assertThat(products.getValues()).isPresent();
return products.getValues().get();
}
protected static <T> T getObservable(ApolloCall<Optional<T>> observable) {
CompletableFuture<T> future = new CompletableFuture<>();
observable.enqueue(queryCallback(future));
try {
return future.get();
} catch (ExecutionException e) {
// Unwrap exception
if (e.getCause() instanceof RuntimeException) {
throw (RuntimeException) e.getCause();
}
throw new RuntimeException("Unexpected exception", e);
} catch (Exception e) {
throw new RuntimeException("Operation could not be completed", e);
} finally {
observable.cancel();
}
}
@SuppressWarnings("unchecked")
private static <D extends Operation.Data, T, V extends Operation.Variables> D mutateAndGet(
ApolloClient client, Mutation<D, T, V> mutation) {
return getObservable((ApolloMutationCall<Optional<D>>) client.mutate(mutation));
}
protected OkHttpClient getHttpClient() {
return new OkHttpClient.Builder()
.connectTimeout(Duration.ofMinutes(3))
.callTimeout(Duration.ofMinutes(3))
.readTimeout(Duration.ofMinutes(3))
.writeTimeout(Duration.ofMinutes(3))
.addInterceptor(
chain ->
chain.proceed(
chain.request().newBuilder().addHeader("X-Cassandra-Token", authToken).build()))
.build();
}
protected ApolloClient getApolloClient(String path) {
return ApolloClient.builder()
.serverUrl(String.format("http://%s:8080%s", stargate.seedAddress(), path))
.okHttpClient(getHttpClient())
.addCustomTypeAdapter(
CustomType.TIMESTAMP,
new CustomTypeAdapter<Instant>() {
@NotNull
@Override
public CustomTypeValue<?> encode(Instant instant) {
return new CustomTypeValue.GraphQLString(instant.toString());
}
@Override
public Instant decode(@NotNull CustomTypeValue<?> customTypeValue) {
return parseInstant(customTypeValue.value.toString());
}
})
.build();
}
protected static <U> ApolloCall.Callback<Optional<U>> queryCallback(CompletableFuture<U> future) {
return new ApolloCall.Callback<Optional<U>>() {
@Override
public void onResponse(@NotNull Response<Optional<U>> response) {
if (response.getErrors() != null && response.getErrors().size() > 0) {
logger.info(
"GraphQL error found in test: {}",
response.getErrors().stream().map(Error::getMessage).collect(Collectors.toList()));
future.completeExceptionally(
new GraphQLTestException("GraphQL error response", response.getErrors()));
return;
}
if (response.getData().isPresent()) {
future.complete(response.getData().get());
return;
}
future.completeExceptionally(
new IllegalStateException("Unexpected empty data and errors properties"));
}
@Override
public void onFailure(@NotNull ApolloException e) {
future.completeExceptionally(e);
}
};
}
protected static class GraphQLTestException extends RuntimeException {
protected final List<Error> errors;
GraphQLTestException(String message, List<Error> errors) {
super(message);
this.errors = errors;
}
}
private static Instant parseInstant(String source) {
try {
return TIMESTAMP_FORMAT.get().parse(source).toInstant();
} catch (ParseException e) {
throw new AssertionError("Unexpected error while parsing timestamp in response", e);
}
}
private static final ThreadLocal<SimpleDateFormat> TIMESTAMP_FORMAT =
ThreadLocal.withInitial(
() -> {
SimpleDateFormat parser = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSXXX");
parser.setTimeZone(TimeZone.getTimeZone(ZoneId.systemDefault()));
return parser;
});
}
|
<reponame>LukasVolgger/delivery-service<gh_stars>0
function generateLanguageAndCountrySectionHTML() {
return `
<div class="pop-up-container-language-country hide-mobile" onclick="closePopUp()">
<div id="language-country-container" class="language-country-container" onclick="event.stopPropagation()">
<div class="countries-container">
<h3 class="languague-country-heading">Land</h3>
<div class="countries">
<div class="country">
<img src="./img/icons/countries/austria.svg" alt="Österreich">
<span class="language-country-text"><b>Österreich</b></span>
</div>
<div class="country">
<img src="./img/icons/countries/australia.svg" alt="Australien">
<span class="language-country-text">Australien</span>
</div>
<div class="country">
<img src="./img/icons/countries/belgium.svg" alt="Belgien">
<span class="language-country-text">Belgien</span>
</div>
<div class="country">
<img src="./img/icons/countries/bulgaria.svg" alt="Bulgarien">
<span class="language-country-text">Bulgarien</span>
</div>
<div class="country">
<img src="./img/icons/countries/denmark.svg" alt="Dänemark">
<span class="language-country-text">Dänemark</span>
</div>
<div class="country">
<img src="./img/icons/countries/germany.svg" alt="Deutschland">
<span class="language-country-text">Deutschland</span>
</div>
<div class="country">
<img src="./img/icons/countries/france.svg" alt="Frankreich">
<span class="language-country-text">Frankreich</span>
</div>
<div class="country">
<img src="./img/icons/countries/israel.svg" alt="Israel">
<span class="language-country-text">Israel</span>
</div>
<div class="country">
<img src="./img/icons/countries/canada.svg" alt="Kanada">
<span class="language-country-text">Kanada</span>
</div>
<div class="country">
<img src="./img/icons/countries/new-zealand.svg" alt="Neuseeland">
<span class="language-country-text">Neuseeland</span>
</div>
<div class="country">
<img src="./img/icons/countries/norway.svg" alt="Norwegen">
<span class="language-country-text">Norwegen</span>
</div>
<div class="country">
<img src="./img/icons/countries/portugal.svg" alt="Portugal">
<span class="language-country-text">Portugal</span>
</div>
<div class="country">
<img src="./img/icons/countries/romania.svg" alt="Rumänien">
<span class="language-country-text">Rumänien</span>
</div>
<div class="country">
<img src="./img/icons/countries/spain.svg" alt="Spanien">
<span class="language-country-text">Spanien</span>
</div>
<div class="country">
<img src="./img/icons/countries/united-kingdom.svg" alt="Vereinigtes Königreich">
<span class="language-country-text">Vereinigtes Königreich</span>
</div>
</div>
</div>
<div class="languages-container">
<h3 class="languague-country-heading">Sprache</h3>
<div class="languages">
<span class="language-country-text"><b>Deutsch</b></span>
<span class="language-country-text">English</span>
<span class="language-country-text">Français</span>
<span class="language-country-text">Nederlands</span>
<span class="language-country-text">Polski</span>
<span class="language-country-text">Norsk</span>
<span class="language-country-text">Dansk</span>
<span class="language-country-text">Português</span>
<span class="language-country-text">Română</span>
<span class="language-country-text">Български</span>
<span class="language-country-text">Italiano</span>
</div>
</div>
</div>
</div>
`;
}
function generateLoginSectionHTML() {
return `
<div class="pop-up-container" onclick="closePopUp()">
<div class="login-section" onclick="event.stopPropagation()">
<div class="login-section-header">
<div class="login-section-header-subcontainer">
<div class="login-img-container">
<img src="./img/icons/smile.svg" alt="Smile">
</div>
<div>
<h3 class="login-section-heading">Mein Account</h3>
<span class="login-section-text">Persönliche Informationen anzeigen</span>
</div>
</div>
<button class="close-btn" onclick="closePopUp()">
<img src="./img/icons/close_white.svg" alt="Schließen">
</button>
</div>
<div>
</div>
<div>
</div>
<div class="login-section-body">
<button class="login-btn btns">Anmelden</button>
<button class="create-acc-btn btns">Account erstellen</button>
</div>
</div>
</div>
`;
}
function generateMenuItemsHTML(i) {
return `
<div id="menu-item-${i}" class="menu-items" onclick="openMenuItem(${i})">
<div class="menu-item-header menu-items-border-spacing">
<h3 class="menu-item-title">${menuItems[i].name}</h3>
<a class="product-information" onclick="showAllergenic(${i})">Produktinfo</a>
</div>
<div class="add-to-basket-container">
<img src="./img/icons/plus.svg" alt="Plus" id="menu-item-icon-${i}" class="menu-item-icon">
</div>
<div class="menu-item-body menu-items-border-spacing">
<span class="menu-item-description">${menuItems[i].description}</span>
<h3 class="menu-item-price">${convertPrice(menuItems[i].price)} €</h3>
</div>
<div id="menu-item-basket-section-${i}" class="d-none menu-item-basket-section menu-items-border-spacing" onclick="event.stopPropagation()">
<div class="menu-item-add-to-basket-counter">
<button id="minus-btn-${i}" class="minus-btn btns" onclick="itemCounterMinus(${i})"><img src="./img/icons/minus.svg" alt="Minus" id="minus-btn-img-${i}" class="menu-item-icon-img"></button>
<div id="menu-item-amount-${i}" class="menu-item-amount">${itemCounter[i] = 1}</div>
<button class="plus-btn btns" onclick="itemCounterPlus(${i})"><img src="./img/icons/plus.svg" alt="Plus" class="menu-item-icon-img"></button>
</div>
<button id="add-to-basket-btn-${i}" class="add-to-basket-btn btns" onclick="addToBasket(${i})"></button>
</div>
</div>
`;
}
function generateEmptyBasketHTML() {
let basket = document.getElementById('basket-container');
basket.innerHTML += `
<div class="empty-basket-container">
<img src="./img/icons/shopping_bag.svg" alt="Warenkorb" class="shopping-bag-icon">
<h3>Fülle deinen Warenkorb</h3>
<span class="empty-basket-text">Füge einige leckere Gerichte aus der Speisekarte hinzu und bestelle dein Essen.</span>
</div>
`;
}
function generateBasketItemsHTML(i) {
return `
<div class="basket-item-container">
<div class="basket-item-header">
<div class="basket-item-name-and-amount">
<span><b id="item-amount-in-basket-${i}">${menuItemsInBasket[i].amount}</b></span>
<span><b>${menuItemsInBasket[i].name}</b></span>
</div>
<span id="item-total-in-basket-${i}">${convertPrice(menuItemsInBasket[i].total) + ' €'}</span>
</div>
<div id="basket-item-body-${i}" class="basket-item-body">
<div class="basket-item-body-subcontainer">
<div id="annotation-btn-${i}"></div>
<div class="basket-item-counter-btns">
<button class="minus-btn btns" onclick="decreaseItemInBasket(${i})"><img src="./img/icons/minus.svg" alt="Minus" class="menu-item-icon-img"></button>
<button class="plus-btn btns" onclick="increaseItemInBasket(${i})"><img src="./img/icons/plus.svg" alt="Plus" class="menu-item-icon-img"></button>
</div>
</div>
</div>
<span id="annotation-output-${i}" class="annotation-output d-none">${menuItemsInBasket[i].annotation}</span>
<div id="annotation-section-${i}" class="annotation-section d-none"></div>
</div>
`;
}
function generateAnnotationHTML(item) {
return `
<div id="annotation-container-${item}" class="annotation-container">
<textarea onkeyup="textCounter(this,'counter-${item}', 160);" id="annotation-input-${item}" class="annotation-input"></textarea>
<div class="annotation-counter">
<span id="counter-${item}" class="counter-number">160</span><span class="counter-number">/160</span>
</div>
<div id="annotation-btns-${item}" class="annotation-control"></div>
</div>
`;
}
function generateAddAnnotation(item) {
let container = document.getElementById(`annotation-btn-${item}`);
container.innerHTML = '';
container.innerHTML = `
<button class="annotation-btn btns" onclick="openAnnotation(${item})">Anmerkung hinzufügen</button>
`;
}
function generateEditAddAnnotation(item) {
let container = document.getElementById(`annotation-btn-${item}`);
container.innerHTML = '';
container.innerHTML = `
<button class="annotation-btn btns" onclick="editAnnotation(${item})">Anmerkung bearbeiten</button>
`;
}
function generateAddAnnotationBtns(item) {
let container = document.getElementById(`annotation-btns-${item}`);
container.innerHTML = '';
container.innerHTML = `
<button class="annotation-control-btn btns" onclick="deleteAnnotation(${item})">Abbrechen</button><button class="annotation-control-btn btns" onclick="addAnnotation(${item})">Hinzufügen</button>
`;
}
function generateEditAnnotationBtns(item) {
let container = document.getElementById(`annotation-btns-${item}`);
container.innerHTML = '';
container.innerHTML = `
<button class="annotation-control-btn btns" onclick="deleteAnnotation(${item})">Löschen</button><button class="annotation-control-btn btns" onclick="addAnnotation(${item})">Speichern</button>
`;
}
function generateBasketSummaryHTML() {
return `
<div class="basket-summary">
<table>
<tr>
<td>Zwischensumme</td>
<td>${convertPrice(basketSubTotal) + ' €'}</td>
</tr>
<tr>
<td>Lieferkosten</td>
<td>${convertPrice(deliveryCosts) + ' €'}</td>
</tr>
<tr>
<td><b>Gesamt</b></td>
<td><b>${convertPrice(basketTotal) + ' €'}</b></td>
</tr>
</table>
</div>
`;
}
function generateMinOrderValueNotReacheHTML() {
return `
<div class="min-order-value-not-reached">
<span class="min-order-value-text">Benötigter Betrag, um den<br> Mindestbestellwert zu erreichen</span>
<span class="min-order-value">${convertPrice(minimumOrderValue - basketSubTotal) + ' €'}</span>
</div>
<div class="min-order-value-not-reached-sub-text">
Leider kannst du noch nicht bestellen. Japanisches Restaurant liefert erst ab einem Mindestbestellwert von ${convertPrice(minimumOrderValue) + ' €'} (exkl. Lieferkosten).
</div>
`;
}
function generateAllergenicHTML() {
return `
<div id="allergenic-container" class="pop-up-container" onclick="closePopUp()">
<div class="allergenic" onclick="event.stopPropagation()">
<div class="pop-up-header">
<h3>Weitere Produktinformationen</h3>
<button class="close-btn" onclick="closePopUp()">
<img src="./img/icons/close_black.svg" alt="Schließen">
</button>
</div>
<div class="allergenic-body">
<h3>Allergene</h3>
<ul id="allergenic-list">
</ul>
<span class="allergenic-text">Wir halten Dich stets zu relevanten Informationen über Essen auf dem Laufenden, die wir von dem Restaurant bezüglich ihrer Speisekarten erhalten. Es kann jedoch vorkommen, dass die angezeigten Informationen unvollständig sind bzw. automatisch generiert und/oder von den Restaurants noch nicht auf Korrektheit überprüft wurden. Bitte wende Dich an unseren Kundenservice, wenn Allergien oder Intoleranzen vorliegen oder Du Fragen zu bestimmten Speisen auf der Karte hast.</span>
</div>
</div>
</div>
`;
}
function generateRestaurantInfoHTML() {
return `
<div id="restaurant-info-container" class="pop-up-container" onclick="closePopUp()">
<div class="restaurant-info" onclick="event.stopPropagation()">
<div class="pop-up-header">
<h3>Über das Restaurant</h3>
<button class="close-btn" onclick="closePopUp()">
<img src="./img/icons/close_black.svg" alt="Schließen">
</button>
</div>
<div class="restaurant-info-body">
<iframe src="https://www.google.com/maps/embed?pb=!1m18!1m12!1m3!1d1298.492968466941!2d14.286479291670172!3d48.305697215691204!2m3!1f0!2f0!3f0!3m2!1i1024!2i768!4f13.1!3m3!1m2!1s0x47739719dfa50243%3A0xeecd03e396b6c102!2sHauptplatz!5e0!3m2!1sde!2sat!4v1644690648793!5m2!1sde!2sat"
class="google-maps">
</iframe>
<div class="restaurant-info-heading">
<img src="./img/icons/time_left.svg" alt="Lieferzeiten">
<h3>Lieferzeiten</h3>
</div>
<div class="restaurant-info-content">
<table>
<tr>
<td>Montag</td>
<td>11:00 - 22:15</td>
</tr>
<tr>
<td>Dienstag</td>
<td>11:00 - 22:15</td>
</tr>
<tr>
<td>Mittwoch</td>
<td>11:00 - 22:15</td>
</tr>
<tr>
<td>Donnerstag</td>
<td>11:00 - 22:15</td>
</tr>
<tr>
<td>Freitag</td>
<td>11:00 - 22:15</td>
</tr>
<tr>
<td>Samstag</td>
<td>11:00 - 22:15</td>
</tr>
<tr>
<td>Sonntag</td>
<td>11:00 - 22:15</td>
</tr>
</table>
</div>
<div class="restaurant-info-heading">
<img src="./img/icons/delivery_motorbike.svg" alt="Lieferung Motorrad">
<h3>Lieferkosten</h3>
</div>
<div class="restaurant-info-content">
<table>
<tr>
<td>Mindestbestellwert</td>
<td>15,00 €</td>
</tr>
<tr>
<td>Lieferkosten</td>
<td>Gratis</td>
</tr>
</table>
</div>
<div class="restaurant-info-heading">
<img src="./img/icons/payment.svg" alt="Bezahlung">
<h3>Bezahlmethoden</h3>
</div>
<div class="restaurant-info-content">
<div class="payment-method"><img src="./img/icons/paypal.svg" alt="PayPal" title="PayPal"></div>
<div class="payment-method"><img src="./img/icons/credit_card.svg" alt="Credit Card" title="Kreditkarte"></div>
<div class="payment-method"><img src="./img/icons/money.svg" alt="Bargeld" title="Bargeld"></div>
<div class="payment-method"><img src="./img/icons/coupon.svg" alt="Gutschein" title="Gutschein"></div>
</div>
<div class="restaurant-info-heading">
<img src="./img/icons/building.svg" alt="Impressum">
<h3>Impressum</h3>
</div>
<div class="restaurant-info-content bottom-spacing">
Japanisches Restaurant<br> Straße Hausnummer<br> Postleitzahl Ort<br> Land
</div>
</div>
</div>
</div>
`;
}
function generateOrderSuccessfulHTML() {
return `
<div id="order-successful-container" class="pop-up-container" onclick="closePopUp()">
<div class="order-successful" onclick="event.stopPropagation()">
<div class="pop-up-header">
<h3>Vielen Dank für deine Bestellung!</h3>
<button class="close-btn" onclick="closePopUp()">
<img src="./img/icons/close_black.svg" alt="Schließen">
</button>
</div>
<div class="order-successful-body">
<h3>Deine Bestellung wird in ungefähr 60 Minuten bei dir zu Hause ankommen!</h3>
<img src="./img/icons/smile.svg" alt="Smile">
</div>
</div>
</div>
`;
}
|
#!/bin/bash
CONFIGURATION=$1
if [ -z $CONFIGURATION ]; then
CONFIGURATION=Debug
fi
SCRIPTDIR=$(dirname ${BASH_SOURCE[0]})
SRCDIR=$SCRIPTDIR/../..
ROOTDIR=$SRCDIR/..
PUBLISHDIR=$ROOTDIR/Publish
sudo mkdir /GVFS.FT
sudo chown $USER /GVFS.FT
$SRCDIR/ProjFS.Mac/Scripts/LoadPrjFSKext.sh
$PUBLISHDIR/GVFS.FunctionalTests --full-suite $2
|
#!/bin/bash
# composer install -o --working-dir="$WORKDIR"/application
php-fpm
|
<reponame>artprojectteam/lsc
/**
* YAMLで作ったStylusの定義データをJSONに変換する
*/
import config from 'config'
import { relativePath } from '../modules/getPath'
import MultipleYamlBuild from '../modules/MultipleYamlBuild'
const stylusDir = [config.get('input.root'), config.get('input.files.stylus'), '_assets']
const outputPath = relativePath([...stylusDir, 'const'])
const operation = new MultipleYamlBuild('stylusConst', outputPath, [...stylusDir, 'data'])
operation.reset() // 一度生成済みJSONを削除
operation.run(false)
|
<gh_stars>0
export default function recreateSheet(context: Excel.RequestContext, name: string) {
const book = context.workbook;
const sheets = book.worksheets;
const sheet = sheets.getItemOrNullObject(name);
sheet.delete();
return sheets.add(name);
}
|
import random
def get_user_choice():
while True:
user_choice = input("Enter your choice (rock, paper, or scissors): ").lower()
if user_choice in ['rock', 'paper', 'scissors']:
return user_choice
else:
print("Invalid choice. Please enter rock, paper, or scissors.")
def get_computer_choice():
return random.choice(['rock', 'paper', 'scissors'])
def determine_winner(user_choice, computer_choice):
if user_choice == computer_choice:
return "It's a tie!"
elif (user_choice == 'rock' and computer_choice == 'scissors') or \
(user_choice == 'paper' and computer_choice == 'rock') or \
(user_choice == 'scissors' and computer_choice == 'paper'):
return "You win!"
else:
return "Computer wins!"
if __name__ == "__main__":
user_choice = get_user_choice()
computer_choice = get_computer_choice()
result = determine_winner(user_choice, computer_choice)
print(f"Your choice: {user_choice}")
print(f"Computer's choice: {computer_choice}")
print(result)
|
<gh_stars>1-10
import functools
import socket
def read_socket_bytes(s):
try:
for b in iter(functools.partial(s.read, 1), b""):
yield ord(b)
except socket.timeout:
pass
|
<gh_stars>1-10
/**
* Copyright 2014 isandlaTech
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.psem2m.isolates.ui;
import java.util.ArrayList;
import java.util.List;
import javax.swing.event.TreeModelListener;
import javax.swing.tree.TreeModel;
import javax.swing.tree.TreePath;
import org.cohorte.herald.Peer;
/**
* @author ogattaz
*
*/
public class CIsolatesTreeModel implements TreeModel {
/** The snapshots list */
private List<CSnapshotNode> pSnapshotNodes = new ArrayList<CSnapshotNode>();
/**
* Adds an isolate in the tree
*
* @param aPeer
* The Herald peer representing the isolate
*/
public synchronized void addIsolate(final Peer aPeer) {
// Find or create the node
CSnapshotNode wNode = findNode(aPeer.getNodeUid());
if (wNode == null) {
wNode = new CSnapshotNode(aPeer.getNodeUid(), aPeer.getNodeName());
// Store the new node
pSnapshotNodes.add(wNode);
}
// Store the isolate
final CSnapshotIsolate snapshot = new CSnapshotIsolate(aPeer);
wNode.add(snapshot);
}
/*
* (non-Javadoc)
*
* @see javax.swing.tree.TreeModel#addTreeModelListener(javax.swing.event.
* TreeModelListener)
*/
@Override
public void addTreeModelListener(final TreeModelListener aListener) {
}
/**
* Cleans up the model
*/
void destroy() {
pSnapshotNodes.clear();
pSnapshotNodes = null;
}
/**
* @param aId
* @return
*/
private CSnapshotNode findNode(final String aId) {
synchronized (pSnapshotNodes) {
final int index = findNodeIdx(aId);
if (index < 0) {
return null;
}
return pSnapshotNodes.get(index);
}
}
/**
* @param aId
* @return
*/
private int findNodeIdx(final String aId) {
synchronized (pSnapshotNodes) {
int wIdx = 0;
for (final CSnapshotNode wNode : pSnapshotNodes) {
if (wNode.getName().equals(aId)) {
return wIdx;
}
wIdx++;
}
}
return -1;
}
/*
* (non-Javadoc)
*
* @see javax.swing.tree.TreeModel#getChild(java.lang.Object, int)
*/
@Override
public Object getChild(final Object aParent, final int aIndex) {
if (aParent instanceof String) {
// Root
synchronized (pSnapshotNodes) {
return pSnapshotNodes.get(aIndex);
}
}
final CSnapshotAbstract wCompositionSnapshot = (CSnapshotAbstract) aParent;
return wCompositionSnapshot.getChild(aIndex);
}
/*
* (non-Javadoc)
*
* @see javax.swing.tree.TreeModel#getChildCount(java.lang.Object)
*/
@Override
public int getChildCount(final Object aParent) {
if (aParent instanceof String) {
// Root
synchronized (pSnapshotNodes) {
return pSnapshotNodes.size();
}
}
final CSnapshotAbstract wCompositionSnapshot = (CSnapshotAbstract) aParent;
return wCompositionSnapshot.getChildCount();
}
/*
* (non-Javadoc)
*
* @see javax.swing.tree.TreeModel#getIndexOfChild(java.lang.Object,
* java.lang.Object)
*/
@Override
public int getIndexOfChild(final Object aParent, final Object aChild) {
if (aParent instanceof String) {
// Root
synchronized (pSnapshotNodes) {
return pSnapshotNodes.indexOf(aChild);
}
}
final CSnapshotAbstract wCompositionSnapshot = (CSnapshotAbstract) aParent;
return wCompositionSnapshot.getIndexOfChild((CSnapshotAbstract) aChild);
}
/*
* (non-Javadoc)
*
* @see javax.swing.tree.TreeModel#getRoot()
*/
@Override
public Object getRoot() {
return "Isolates";
}
/*
* (non-Javadoc)
*
* @see javax.swing.tree.TreeModel#isLeaf(java.lang.Object)
*/
@Override
public boolean isLeaf(final Object aObject) {
if (aObject instanceof String) {
synchronized (pSnapshotNodes) {
return pSnapshotNodes.isEmpty();
}
}
return aObject instanceof CSnapshotIsolate;
}
/**
* Removes an isolate from the tree
*
* @param aPeer
* Peer going away
*/
public synchronized void removeIsolate(final Peer aPeer) {
// Find the node index in the list
final CSnapshotNode wNode = findNode(aPeer.getNodeUid());
if (wNode != null) {
wNode.removeChild(aPeer.getUid());
if (wNode.getChildCount() <= 0) {
pSnapshotNodes.remove(wNode);
}
}
}
/*
* (non-Javadoc)
*
* @see
* javax.swing.tree.TreeModel#removeTreeModelListener(javax.swing.event.
* TreeModelListener)
*/
@Override
public void removeTreeModelListener(final TreeModelListener aListener) {
}
/**
* Set the local peer information
*
* @param aLocalPeer
* The local peer bean
*/
public void setLocalPeer(final Peer aLocalPeer) {
// Find or create the node
CSnapshotNode wNode = findNode(aLocalPeer.getNodeUid());
if (wNode == null) {
wNode = new CSnapshotNode(aLocalPeer.getNodeUid(),
aLocalPeer.getNodeName());
// Store the new node
pSnapshotNodes.add(wNode);
}
// Store the isolate
final CSnapshotIsolate snapshot = new CSnapshotIsolate(aLocalPeer, true);
wNode.add(snapshot);
}
/*
* (non-Javadoc)
*
* @see
* javax.swing.tree.TreeModel#valueForPathChanged(javax.swing.tree.TreePath,
* java.lang.Object)
*/
@Override
public void valueForPathChanged(final TreePath aArg0, final Object aArg1) {
}
}
|
package commands
import (
"context"
"errors"
"github.com/urfave/cli/v2"
"golang.org/x/xerrors"
cmd "github.com/aquasecurity/trivy/pkg/commands/artifact"
"github.com/aquasecurity/trivy/pkg/k8s/report"
"github.com/aquasecurity/trivy/pkg/k8s/scanner"
"github.com/aquasecurity/trivy/pkg/log"
"github.com/aquasecurity/trivy-kubernetes/pkg/artifacts"
"github.com/aquasecurity/trivy-kubernetes/pkg/k8s"
)
const (
clusterArtifact = "cluster"
allArtifact = "all"
)
// Run runs a k8s scan
func Run(cliCtx *cli.Context) error {
opt, err := cmd.InitOption(cliCtx)
if err != nil {
return xerrors.Errorf("option error: %w", err)
}
cluster, err := k8s.GetCluster(opt.KubernetesOption.ClusterContext)
if err != nil {
return xerrors.Errorf("failed getting k8s cluster: %w", err)
}
switch cliCtx.Args().Get(0) {
case clusterArtifact:
return clusterRun(cliCtx, opt, cluster)
case allArtifact:
return namespaceRun(cliCtx, opt, cluster)
default: // resourceArtifact
return resourceRun(cliCtx, opt, cluster)
}
}
func run(ctx context.Context, opt cmd.Option, cluster string, artifacts []*artifacts.Artifact) error {
ctx, cancel := context.WithTimeout(ctx, opt.Timeout)
defer cancel()
var err error
defer func() {
if xerrors.Is(err, context.DeadlineExceeded) {
log.Logger.Warn("Increase --timeout value")
}
}()
runner, err := cmd.NewRunner(opt)
if err != nil {
if errors.Is(err, cmd.SkipScan) {
return nil
}
return xerrors.Errorf("init error: %w", err)
}
defer func() {
if err := runner.Close(ctx); err != nil {
log.Logger.Errorf("failed to close runner: %s", err)
}
}()
s := scanner.NewScanner(cluster, runner, opt)
r, err := s.Scan(ctx, artifacts)
if err != nil {
return xerrors.Errorf("k8s scan error: %w", err)
}
if err := report.Write(r, report.Option{
Format: opt.Format,
Report: opt.KubernetesOption.ReportFormat,
Output: opt.Output,
Severities: opt.Severities,
}); err != nil {
return xerrors.Errorf("unable to write results: %w", err)
}
cmd.Exit(opt, r.Failed())
return nil
}
// Full-cluster scanning with '--format table' without explicit '--report all' is not allowed so that it won't mess up user's terminal.
// To show all the results, user needs to specify "--report all" explicitly
// even though the default value of "--report" is "all".
//
// e.g. $ trivy k8s --report all cluster
// $ trivy k8s --report all all
//
// Or they can use "--format json" with implicit "--report all".
//
// e.g. $ trivy k8s --format json cluster // All the results are shown in JSON
//
// Single resource scanning is allowed with implicit "--report all".
//
// e.g. $ trivy k8s pod myapp
func validateReportArguments(cliCtx *cli.Context) error {
if cliCtx.String("report") == "all" &&
!cliCtx.IsSet("report") &&
cliCtx.String("format") == "table" {
m := "All the results in the table format can mess up your terminal. Use \"--report all\" to tell Trivy to output it to your terminal anyway, or consider \"--report summary\" to show the summary output."
return xerrors.New(m)
}
return nil
}
|
<reponame>belugafm/beluga-v3-api-server<filename>src/infrastructure/prisma/repository/query/LoginCredential.ts
import { LoginCredential, PrismaClient } from "@prisma/client"
import {
RepositoryError,
UnknownRepositoryError,
} from "../../../../domain/repository/RepositoryError"
import { ILoginCredentialQueryRepository } from "../../../../domain/repository/query/LoginCredential"
import { LoginCredentialEntity } from "../../../../domain/entity/LoginCredential"
import { UserId } from "../../../../domain/types"
import { prisma } from "../client"
function toEntity(loginCredential: LoginCredential) {
return new LoginCredentialEntity({
userId: loginCredential.userId,
passwordHash: loginCredential.passwordHash,
})
}
export class LoginCredentialQueryRepository implements ILoginCredentialQueryRepository {
private _prisma: PrismaClient
constructor(transaction?: PrismaClient) {
if (transaction) {
this._prisma = transaction
} else {
this._prisma = prisma
}
}
async findByUserId(userId: UserId): Promise<LoginCredentialEntity | null> {
try {
const loginCredential = await this._prisma.loginCredential.findUnique({
where: {
userId: userId,
},
})
if (loginCredential == null) {
return null
}
return toEntity(loginCredential)
} catch (error) {
if (error instanceof Error) {
throw new RepositoryError(
error.message,
error.stack,
"LoginCredentialQueryRepository::findByUserId"
)
} else {
throw new UnknownRepositoryError()
}
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.