text
stringlengths
1
1.04M
language
stringclasses
25 values
<filename>standalone/parser.py #!/usr/bin/env python import argparse from argparse import ArgumentParser,ArgumentDefaultsHelpFormatter import subprocess mkbleedhelp = """ Detects bleedtrails in Y for incoming FITS image. Sets BADPIX_TRAIL for detected bleedtrails, and BADPIX_INTERP if interpolation is enabled, and BADPIX_STAR if star masking is enabled. mkbleedmask Usage: mkbleedmask [-aeghimzD] [-b <factor> -d <npix> -f <value> -j <npix> -l <value> -n <npix> -o <filename> -r <factor> -s <n> -t <npixels> -v <level> -w <value> -x <filename> -y <value> -E <num_x_dilations> -L <npix> -W <factor> ] <infile> <outfile> -a,--interpolate-stars Do radial interpolation over saturated stars for which bleedtrails were detected. -b,--bgreject <factor> Use specified <factor> as scalefactor for background rejection. (5.0) -d,--edgesize <npix> Size of edges used to detect edgebleed. (15) -e,--version Print version and exit -f,--scalefactor <value> Use specified <value> as scalefactor on background to detect bleedtrails. (1.0) -g,--global_only Do not use local statistics. -h,--help Prints this long version of help. -i,--interpolate Interpolate over bleedtrails. -j,--trailreject <npix> Reject bleedtrails of size <npix> or smaller. (1) -l,--starlevel <value> Use specified <value> as scalefactor on background to detect stars. (5.0) -m,--starmask Create a mask for the detected bright objects. (No) -n,--numtrail <npix> Number of contiguous pixels required for trail detection. (trail_length/2) -o,--saturated_objects <filename> Filename for saturated star table.(None) -r,--growbox <factor> Factor by which to grow blob boxes for determining background level. (2) -s,--bgiters <n> Number of iterations for background estimation. (1) -t,--trail_length <npixels> Use specified <npixels> for trail structuring element in Y. (20) -v,--verbose <level> Verbosity level [0-3], default = 1. -w,--growrad <value> Factor by which to grow detected star radii. (1.0) -x,--trailboxes <filename> Filename for trail box table.(None) -y,--holelevel <value> Number of sigma below sky for hole detection. (3.0) -z,--zerostarweights Set weights to zero for masked stars. -D,--Debug Turn on debug/development mode. -E,--Expand <num_x_dilations> Expand bleed trails using dilation in order to capture anomalous adjacent pixels (0). -L,--Long_strong <npix> Length scale used to check when dilating long/strong bleed trails (30). -W,--trail_weight_factor <factor> Factor by which to downweight pixels flagged as bleed trails. (0) <infile> Input FITS file. <outfile> Output FITS file. """ xtalkhelp = """ DECam_crosstalk <infile.fits> <outfile> <options> -crosstalk <crosstalk matrix- file> -linear -crossatthresh <factor> -photflag <0 or 1> -presatmask -postsatmask -overscan -overscansample <-1 for MEDIAN, 0 for MEAN, 1 for MEAN w/MINMAX>, -overscanfunction < -50 < N < -1 for cubic SPLINE, 0 for LINE_BY_LINE, 1 < N < 50 for legendre polynomial> -overscanorder <1-6>, default=1 <order for legendre polynomial> -overscantrim <ncols>, default=0 <trim ncols at both edges of overscan, default = 0> -maxhdunum <integer>, default=62 <all valid images> -ccdlist <comma-separated list of CCDs to process> -hdulist <comma-separated list of HDUs to process> -focuschipsout -replace <filename> -verbose <0-3> -help (print help message and exit) -version (print version and exit) """ class DESDMApp(object): _prog = "desdm_app" _description = 'Python wrapper around %s.'%_prog _section = 'desdmapp' def __init__(self,options=None,**kwargs): self.namespace = argparse.Namespace() self.parser = self._create_parser() self.namespace = self.parser.parse_args(options,self.namespace) self.namespace.__dict__.update(**kwargs) def __call__(self,dryrun=False): return self.run(dryrun=dryrun) def _create_parser(self): parser = ArgumentParser(prog=self._prog,description=self._description, formatter_class=ArgumentDefaultsHelpFormatter, add_help=False) for args,kwargs in self._arguments: parser.add_argument(*args,**kwargs) return parser def run(self,dryrun=False): cmdline = self.cmdline() if dryrun: return cmdline return subprocess.check_output(cmdline,shell=True) def parse_args(self,args=None): namespace = getattr(self,'namespace',None) self.namespace = self.parser.parse_args(args,namespace) return self.namespace def parse_config(self,config,section=None): if section is None: section = self._section if isinstance(config,basestring): configfile = config config = SafeConfigParser() config.read(configfile) for key,value in config.items(section): setattr(self.namespace,key,value) return self.namespace def cmdlist(self): cmdlist = [self._prog] for action in self.parser._actions: dest = action.dest default = action.default value = getattr(self.namespace,dest) if value == default: continue if not action.option_strings: option = dest else: option = action.option_strings[0] if not action.option_strings: # Positional argument cmdlist += [value] elif isinstance(action,argparse._StoreTrueAction) and value: cmdlist += [option] elif isinstance(action,argparse._StoreFalseAction) and not value: cmdlist += [option] elif isinstance(action,argparse._HelpAction): cmdlist += [option] elif isinstance(action,argparse._CountAction): cmdlist += value*[option] elif isinstance(action,argparse._StoreAction): cmdlist += [option,value] else: msg = "Invalid action type: %s"%type(action) raise Exception(msg) return cmdlist def cmdline(self): cmdlist = self.cmdlist() return ' '.join([str(x) for x in cmdlist]) class crosstalk(DESDMApp): _prog = 'DECam_crosstalk' _section = 'crosstalk' def _create_parser(self): """ Create an ArgumentParser from help message. """ parser = ArgumentParser(prog=self._prog,description=self._description, formatter_class=ArgumentDefaultsHelpFormatter, add_help=False) cmd = '%s -help'%self._prog try: out = subprocess.check_output(cmd,shell=True) except subprocess.CalledProcessError: out = xtalkhelp lines = out.strip().split('\n') arguments = ['<infile>','<outfile>'] options = [l.strip() for l in lines if l.strip().startswith('-')] for arg in arguments: args = (arg.strip('<>'),) kwargs = dict(nargs='?') parser.add_argument(*args,**kwargs) for opt in options: olist = opt.split() if (len(olist) == 1) or (olist[1][0] != '<'): args = olist[0].split(',') kwargs = dict(action='store_true') else: args = (olist[0],) kwargs = dict(action='store') parser.add_argument(*args,**kwargs) return parser class mkbleedmask(DESDMApp): _prog = 'mkbleedmask' _section = 'bleedmask' def _create_parser(self): """ Create an ArgumentParser from 'mkbleedmask -h'. """ parser = ArgumentParser(prog=self._prog,description=self._description, formatter_class=ArgumentDefaultsHelpFormatter, add_help=False) cmd = '%s -h'%self._prog try: out = subprocess.check_output(cmd,shell=True) except subprocss.CalledProcessError: out = mkbleedhelp lines = [l.strip() for l in out.strip().split('\n')] options = [l for l in lines if l.startswith('-')] arguments = [l for l in lines if l.startswith('<') and l.endswith('>')] for arg in arguments: args = (arg.strip('<>'),) kwargs = dict(nargs='?') parser.add_argument(*args,**kwargs) for opt in options: olist = opt.split() if len(olist) == 1: args = olist[0].split(',') kwargs = dict(action='store_true') else: args = olist[0].split(',') kwargs = dict(action='store') parser.add_argument(*args,**kwargs) return parser if __name__ == '__main__': # cmdtest = 'DECam_crosstalk DECam_00229650.fits.fz D00229650_g_%02d_r0000p01_xtalk.fits -crosstalk DECam_20130606.xtalk -ccdlist 3,4 -overscanfunction 0 -overscansample 1 -overscantrim 5 -photflag 1 -verbose 0 -replace DES_header_update.20151120' # app = crosstalk() # app.parse_args(cmdtest.split()[1:]) # print cmdtest # print app(dryrun=True) # print # cmdtest='pixcorrect_im --verbose --in xtalk.fits -o detrend.fits --bias biascor.fits --bpm bpm.fits --lincor lin.fits --bf bfmodel.fits --gain --flat dflatcor.fits --resaturate --fixcols --addweight' # app = pixcorrect() # app.parse_args(cmdtest.split()[1:]) # print cmdtest # print app(dryrun=True) # print # cmdtest='mkbleedmask inname.fits outname.fits -m -b 5 -f 1.0 -l 7 -n 7 -r 5 -t 20 -v 3 -w 2.0 -y 1.0 -s 100 -v 3 -E 6 -L 30 -x trailbox.fits -o satstars.fits' # app = mkbleedmask() # app.parse_args(cmdtest.split()[1:]) # print cmdtest # print app(dryrun=True) # print cmdtest = 'sky_compress --in D00229650_g_03_r0000p01_bleedmasked.fits --skyfilename D00229650_g_03_r0000p01_bleedmask-mini.fits --blocksize 128' app = skycompress() app.parse_args(cmdtest.split()[1:]) print cmdtest print app(dryrun=True) print cmdtest = "sky_combine --miniskylist listpcain -o D00229650_g_r0000p01_bleedmask-mini-fp.fits --ccdnums 1,3,4 --invalid S30,N30" cmdtest = "sky_fit --infilename D00229650_g_r0000p01_bleedmask-mini-fp.fits --outfilename D00229650_g_r0000p01_skyfit-binned-fp.fits --pcfilename pca_mini_y2_e2_g_n04.fits" cmdtest = "sky_subtract -i D00229650_g_03_r0000p01_bleedmasked.fits -o D00229650_g_03_r0000p01_skysub.fits --fitfilename D00229650_g_r0000p01_skyfit-binned-fp.fits --pcfilename skytemp_y2_e2_g_n04_c03.fits --domefilename D_n20150105t0115_g_c03_r2050p02_norm-dflatcor.fits --weight sky" cmdtest = "sex D00229650_g_03_r0000p01_nullwtbkg.fits[0] -c sexforscamp.config -CHECKIMAGE_TYPE BACKGROUND -DETECT_THRESH 1000 -FILTER N -CHECKIMAGE_NAME D00229650_g_03_r0000p01_bkg.fits -WEIGHT_TYPE MAP_WEIGHT -WEIGHT_IMAGE D00229650_g_03_r0000p01_nullwtbkg.fits[2],D00229650_g_03_r0000p01_nullwtbkg.fits[2] -PARAMETERS_NAME sex.param_bkg -CATALOG_TYPE NONE -INTERP_TYPE ALL -INTERP_MAXXLAG 16 -INTERP_MAXYLAG 16" cmdtest = "immask all D00229650_g_03_r0000p01_starflat.fits D00229650_g_03_r0000p01_immask.fits --minSigma 7.0 --max_angle 75 --max_width 300 --nsig_detect 18 --nsig_mask 12 --nsig_merge 12 --nsig_sky 1.5 --min_fill 0.33 --draw --write_streaks --streaksfile D00229650_g_03_r0000p01_streaksfile.fits" """ DECam_crosstalk <infile.fits> <outfile> <options> -crosstalk <crosstalk matrix- file> -linear -crossatthresh <factor> -photflag <0 or 1> -presatmask -postsatmask -overscan -overscansample <-1 for MEDIAN, 0 for MEAN, 1 for MEAN w/MINMAX>, -overscanfunction < -50 < N < -1 for cubic SPLINE, 0 for LINE_BY_LINE, 1 < N < 50 for legendre polynomial> -overscanorder <1-6>, default=1 <order for legendre polynomial> -overscantrim <ncols>, default=0 <trim ncols at both edges of overscan, default = 0> -maxhdunum <integer>, default=62 <all valid images> -ccdlist <comma-separated list of CCDs to process> -hdulist <comma-separated list of HDUs to process> -focuschipsout -replace <filename> -verbose <0-3> -help (print help message and exit) -version (print version and exit) """ """ usage: pixcorrect_im [-h] [-s SAVECONFIG] [-l LOG] [-v] [-i IN] [-o OUT] [--bias BIAS] [--lincor LINCOR] [--bf BF] [--gain] [--bpm BPM] [--flat FLAT] [--fixcols] [--mini MINI] [--blocksize BLOCKSIZE] [--sky SKY] [--skyfit SKYFIT] [--starflat STARFLAT] [--addweight] [--resaturate] [--null_mask NULL_MASK] [config] Do image-by-image pixel level corrections positional arguments: config Configuration file filename optional arguments: -h, --help show this help message and exit -s SAVECONFIG, --saveconfig SAVECONFIG output config file -l LOG, --log LOG the name of the logfile -v, --verbose be verbose -i IN, --in IN input image file name -o OUT, --out OUT output image file name --bias BIAS Bias correction image --lincor LINCOR linearity correction Table --bf BF brighter/fatter correction Table --gain convert ADU to e- using gain values in hdr --bpm BPM bad pixel mask filename --flat FLAT Dome flat correction image --fixcols fix bad columns --mini MINI compressed sky image filename --blocksize BLOCKSIZE blocksize for compressed sky image --sky SKY Template file for sky subtraction and weight creation. Requires flat and skyfit files be given. --skyfit SKYFIT MiniDECam file holding sky fit coefficients --starflat STARFLAT Star flat correction image --addweight Add a weight map to the image if none exists --resaturate Put saturated value in BADPIX_SATURATE pixels --null_mask NULL_MASK Names of mask bits to null (or an integer mask) """ """ usage: sky_compress [-h] [-s SAVECONFIG] [-l LOG] [-v] [-i IN] [-o OUT] [--skyfilename SKYFILENAME] [--blocksize BLOCKSIZE] [--bitmask BITMASK] [config] Produce compressed image of sky background positional arguments: config Configuration file filename optional arguments: -h, --help show this help message and exit -s SAVECONFIG, --saveconfig SAVECONFIG output config file -l LOG, --log LOG the name of the logfile -v, --verbose be verbose -i IN, --in IN input image file name -o OUT, --out OUT output image file name --skyfilename SKYFILENAME Filename for compressed sky image --blocksize BLOCKSIZE Size of squares in which median is taken for sky --bitmask BITMASK Mask image bits for pixels to ignore in sky estimate """ """ usage: sky_combine [-h] [-s SAVECONFIG] [-l LOG] [-v] [--ccdnums CCDNUMS] [--miniskyfiles MINISKYFILES] [--miniskylist MINISKYLIST] [-o OUTFILENAME] [--mask_value MASK_VALUE] [--invalid INVALID] [config] Combine sky images of all CCDs in one exposure positional arguments: config Configuration file filename optional arguments: -h, --help show this help message and exit -s SAVECONFIG, --saveconfig SAVECONFIG output config file -l LOG, --log LOG the name of the logfile -v, --verbose be verbose --ccdnums CCDNUMS Range(s) of ccdnums to combine --miniskyfiles MINISKYFILES Filename template for single-chip minisky images --miniskylist MINISKYLIST File containing a list of single-chip minisky images -o OUTFILENAME, --outfilename OUTFILENAME Filename for combined minisky FITS image --mask_value MASK_VALUE Value of pixels without valid sky information --invalid INVALID Value(s) of DETPOS to ignore in sky image """ """ usage: sky_fit [-h] [-s SAVECONFIG] [-l LOG] [-v] [-i IN] [-o OUT] [--infilename INFILENAME] [--outfilename OUTFILENAME] [--pcfilename PCFILENAME] [--clip_sigma CLIP_SIGMA] [config] Fit coefficients of sky templates to mini-sky image positional arguments: config Configuration file filename optional arguments: -h, --help show this help message and exit -s SAVECONFIG, --saveconfig SAVECONFIG output config file -l LOG, --log LOG the name of the logfile -v, --verbose be verbose -i IN, --in IN input image file name -o OUT, --out OUT output image file name --infilename INFILENAME Filename for input minisky FITS image to fit --outfilename OUTFILENAME Filename for minisky FITS image with fit results/resids --pcfilename PCFILENAME Filename for minisky principal components --clip_sigma CLIP_SIGMA Rejection threshold for robust fitting/statistics """ """ usage: sky_subtract [-h] [-s SAVECONFIG] [-l LOG] [-v] [-i IN] [-o OUT] [--fitfilename FITFILENAME] [--pcfilename PCFILENAME] [--domefilename DOMEFILENAME] [--weight {sky,all,none}] [--resaturate] [--null_mask NULL_MASK] [config] Subtract sky from images based on principal-component fit and calculate weight image positional arguments: config Configuration file filename optional arguments: -h, --help show this help message and exit -s SAVECONFIG, --saveconfig SAVECONFIG output config file -l LOG, --log LOG the name of the logfile -v, --verbose be verbose -i IN, --in IN input image file name -o OUT, --out OUT output image file name --fitfilename FITFILENAME Filename for minisky FITS image with PC coefficients --pcfilename PCFILENAME Filename for full-res sky principal components --domefilename DOMEFILENAME Filename for dome flat (for weight calculation) --weight {sky,all,none} Construct weight from sky photons, from all photons, or not at all --resaturate Put saturated value in BADPIX_SATURATE pixels --null_mask NULL_MASK Names of mask bits to null (or an integer mask) """ """ SYNTAX: scamp catalog1 [catalog2,...][@catalog_list1 [@catalog_list2 ...]] [-c <config_file>][-<keyword> <value>] > to dump a default configuration file: SCAMP -d > to dump a default extended configuration file: SCAMP -dd PLPLOT-specific options: Usage: scamp [options] PLplot options: -h Print out this message -v Print out the PLplot library version number -verbose Be more verbose than usual -debug Print debugging info (implies -verbose) -dev name Output device name -o name Output filename -display name X server to contact -px number Plots per page in x -py number Plots per page in y -geometry geom Window size/position specified as in X, e.g., 400x300, 400x3 00-100+200, +100-200, etc. -wplt xl,yl,xr,yr Relative coordinates [0-1] of window into plot -mar margin Margin space in relative coordinates (0 to 0.5, def 0) -a aspect Page aspect ratio (def: same as output device) -jx justx Page justification in x (-0.5 to 0.5, def 0) -jy justy Page justification in y (-0.5 to 0.5, def 0) -ori orient Plot orientation (0,1,2,3=landscape,portrait,seascape,upside -down) -freeaspect Allow aspect ratio to adjust to orientation swaps -portrait Sets portrait mode (both orientation and aspect ratio) -width width Sets pen width (0 <= width) -bg color Background color (FF0000=opaque red, 0000FF_0.1=blue with al pha of 0.1) -ncol0 n Number of colors to allocate in cmap 0 (upper bound) -ncol1 n Number of colors to allocate in cmap 1 (upper bound) -fam Create a family of output files -fsiz size[kKmMgG] Output family file size (e.g. -fsiz 0.5G, def MB) -fbeg number First family member number on output -finc number Increment between family members -fflen length Family member number minimum field width -nopixmap Don't use pixmaps in X-based drivers -db Double buffer X window output -np No pause between pages -server_name name Main window name of PLplot server (tk driver) -dpi dpi Resolution, in dots per inch (e.g. -dpi 360x360) -compression num Sets compression level in supporting devices -cmap0 file name Initializes color table 0 from a cmap0.pal format file in on e of standard PLplot paths. -cmap1 file name Initializes color table 1 from a cmap1.pal format file in on e of standard PLplot paths. -locale Use locale environment (e.g., LC_ALL, LC_NUMERIC, or LANG) t o set LC_NUMERIC locale (which affects decimal point separator). -eofill For the case where the boundary of the filled region is self -intersecting, use the even-odd fill rule rather than the default nonzero fill rule. -drvopt option[=value][,option[=value]]* Driver specific options All parameters must be white-space delimited. Some options are driver dependent. Please see the PLplot reference document for more detail. """ """ > SYNTAX: sex <image> [<image2>][-c <configuration_file>][-<keyword> <value>] > to dump a default configuration file: sex -d > to dump a default extended configuration file: sex -dd > to dump a full list of measurement parameters: sex -dp """ """ _description = 'Python wrapper around DECam_crosstalk' _arguments = ( (('infile',), dict(metavar='infile.fits',help='input file')), (('outfile',), dict(metavar='outfile.fits',help='output file')), (('-crosstalk',), dict(metavar='matrix',type=str,help='crosstalk matrix file')), (('-linear',), dict(action='store_true',help='')), (('-crossatthresh',), dict(metavar='<factor>',type=float,help='')), (('-photflag',), dict(type=int,choices=[0,1],help='')), (('-presatmask',), dict(action='store_true',help='')), (('-postsatmask',), dict(action='store_true',help='')), (('-overscan',), dict(action='store_true', help='')), (('-overscansample',), dict(metavar='type {-1,0,1}', type=int,choices=[-1,0,1], help='-1 for MEDIAN, 0 for MEAN, 1 for MEAN w/MINMAX')), (('-overscanfunction',), dict(metavar='func {-50,50}',default=0,type=int,choices=range(-50,51), help='-50 < N < -1 for cubic SPLINE, 0 for LINE_BY_LINE, 1 < N < 50 for legendre polynomial')), (('-overscanorder',), dict(default=1,type=int,choices=range(1,7), help='order for legendre polynomial')), (('-overscantrim',), dict(metavar='ncols',type=int,default=0, help='trim ncols at both edges of overscan')), (('-maxhdunum',), dict(type=int,default=62, help='all valid images (1-indexed)')), (('-ccdlist',), dict(help='comma-separated list of CCDs to process')), (('-hdulist',), dict(help='comma-separated list of HDUs to process')), (('-focuschipsout',), dict(action='store_true',help='')), (('-replace',), dict(metavar='filename', help='replace filename')), (('-verbose',), dict(choices=range(4),default=0,type=int, help='Output verbosity')), (('-version',), dict(action='store_true', help='print version and exit')), (('-help',), dict(action='store_true',help='print help message and exit')), ) """ """ # All of the pixcorrect steps could be done better... class pixcorrect(DESDMApp): _prog = 'pixcorrect_im' _section = 'pixcorrect' def _create_parser(self): from pixcorrect.pixcorrect_im import PixCorrectIm parser = PixCorrectIm.parser() parser.prog = self._prog parser.description = self._description self.namespace.help = '==SUPPRESS==' return parser class skycompress(DESDMApp): _prog = 'sky_compress' _section = 'skycompress' def _create_parser(self): from pixcorrect.sky_compress import sky_compress parser = sky_compress.parser() parser.prog = self._prog parser.description = self._description self.namespace.help = '==SUPPRESS==' return parser class skycompress(DESDMApp): _prog = 'sky_compress' _section = 'skycompress' def _create_parser(self): from pixcorrect.sky_compress import sky_compress parser = sky_compress.parser() parser.prog = self._prog parser.description = self._description self.namespace.help = '==SUPPRESS==' return parser class skycombine(DESDMApp): _prog = 'sky_combine' _section = 'skycombine' def _create_parser(self): from pixcorrect.sky_combine import sky_combine parser = sky_compress.parser() parser.prog = self._prog parser.description = self._description self.namespace.help = '==SUPPRESS==' return parser """
python
<reponame>odelavia/foodbears import { IRead, IWrite } from "./interfaces"; import * as Sql from "sequelize"; export default abstract class BaseRepository<T extends ModelInstance<Y>, Y> implements IWrite<T, Y>, IRead<T, Y> { protected readonly _collection: Sql.Model<T, Y>; constructor(collection: Sql.Model<T, Y>) { this._collection = collection; } async create(item: Y) { return await this._collection.create(item); } async update(id: string, item: Partial<Y>) { const instance = await this.findById(id); await instance.update(item); return instance; } async delete(id: string) { const instance = await this._collection.findById(id); if (!instance) return false; await instance.destroy(); return true; } async findAll(item: Y) { const results = await this._collection.findAll({ where: item }); return results || []; } async findById(id: string) { const result = await this._collection.findById(id); if (!result) throw new Error("Not Found"); return result; } }
typescript
I am an assistant professor in the VLSI group of the department of Electronics and Electrical Engineering at Indian Institute of Technology, Guwahati. I primarily work in the area of analog and RF integrated circuits. I did my M.Tech and Ph.D from IIT Delhi, during 2007-09 and 2009-13 respectively. During the Ph.D, I worked in the area of Multi-band Radio Frequency Integrated Circuits (RFIC) under the guidance of Dr. Shouri Chatterjee. From May 2013 to April 2014, I worked as a senior R&D Engineer with Synopsys, Hyderabad.
english
<filename>undp-transparency-portal-be/undp_extra_features/templates/donors.html {% load custom_filters %} <html lang="en"> <head> <meta charset="UTF-8"> <title>Donors</title> </head> <style> * { box-sizing: border-box; } ul, li { list-style-type: none; margin: 0; padding: 0; } .sectionWrapper { margin-top: 30px; } .sectionWrapper .donor_heading { font-size: 20px; font-weight: 700; } .sectionWrapper .sectionHeader { display: -webkit-box; display: -ms-flexbox; display: flex; -webkit-box-pack: justify; -ms-flex-pack: justify; justify-content: space-between; -webkit-box-align: center; -ms-flex-align: center; align-items: center; width: 100%; font-size: 20px; font-weight: 600; } .sectionWrapper .filterWrapper { display: table; width: 100%; padding: 20px 0; margin: 0; } /* ---------- Progress Bar style start------- */ .statsWrapper { height: 125px; background-color: #f4f5fa; padding: 5px; border-radius: 5px; } .contributionBarWrapper { display: block; margin: 0 auto; width: 100%; height: 30px; } .contributionBar { display: inline-block; position: relative; text-align: center; padding: 6px 0; color: white; height: 100%; float: left; } contributionBar::after { clear: both; content: ''; } .contributionBarleft { width: 10%; background-color: #16537d; } .contributionBarRight { width: 90%; background-color: #52abe9; } .barLabel { position: absolute; bottom: calc(100% + 5px); left: 0; right: 0; font-size: 12px; text-align: center; color: #303030; } .barValue { position: absolute; top: calc(100% + 5px); left: 0; right: 0; font-weight: 600; text-align: center; color: #303030; } .barValue span { display: block; font-weight: 400; margin-left: 10px; color: #303030; } .textLabel { margin-Bottom: 5px; font-size: 18px; color: rgba(48, 48, 48, 0.7) } .barTextWrapper { width: 100%; font-size: 18px; display: inline-block; color: #303030; } .barTextLeft { width: 50%; display: inline-block; text-align: left; font-weight: 600; float: left; } .barTextLeft::after { clear: both; content: '' } .barTextRight { width: 50%; display: inline-block; text-align: right; font-weight: 600; float: left; } .barTextRight::after { clear: both; content: ''; } .textPerc { color: rgba(48, 48, 48, 0.5); font-size: 16px; } .totalContributionWrapper { width: 100%; text-align: center; font-size: 20px; color: #444; } .totalContributionWrapper .label { font-size: 18px; } .totalContributionWrapper .value { font-weight: 600; } /* ---------- Progress Bar style end ------- */ /* ---------- Filter bar style start ------- */ .filterSection { width: 50%; display: inline-block; margin-top: 20px; } .filterElement { width: 50%; display: inline-block; float: left; } .filterElement::after { clear: both; content: ''; } .filterElement .label { color: rgba(48, 48, 48, 0.7); font-size: 16px; font-weight: 600; } .filterElement .value { font-weight: 600; font-size: 18px; } /* ---------- Filter bar style end ------- */ /* ---------- Donor table style start ------- */ .donorTable { width: 100%; margin: 25px 0; border-collapse: collapse; border: 1px solid #666666; } .donorTable th, .donorTable td, .donorTable tr { page-break-inside: avoid; } .donorTable th, .donorTable td { border: 1px solid #666666; padding: 15px; } .donorTable th { background-color: #CBCBCB; font-size: 20px; } .donorTable td { font-size: 18px; } /* ---------- Donor table style end ------- */ </style> <body> <!-- Page Header start --> {% include "header.html" %} <!-- Page Header end --> <section class="sectionWrapper statsWrapper"> <div class="totalContributionWrapper"> <span class="label">Total Contribution: </span> <span class="value">${{data.totalContribution|numberToCommaFormatter}}</span> </div> <div class="contributionBarWrapper"> <div class="barTextWrapper"> <div class="barTextLeft textLabel"> Regular</div> <div class="barTextRight textLabel">Other</div> </div> <span class="contributionBar contributionBarleft"></span> <span class="contributionBar contributionBarRight"></span> <div class="barTextWrapper"> <div class="barTextLeft"> <span>{{data.regularContribution|numberToCurrencyFormatter:"2"}}</span> <span class="textPerc">({{data.regularPercentage}}%)</span> </div> <div class="barTextRight"> <span>{{data.otherContribution|numberToCurrencyFormatter:"2"}}</span> <span class="textPerc">({{data.OtherPercentage}}%)</span> </div> </div> </div> </section> <section class="filterSection"> {% if data.donorType|length >= 1 %} <div class="filterElement"> <div class="label">Donor Type</div> <div class="value">{{data.donorType}}</div> </div> {% endif %} {% if data.fundStreams|length >= 1 %} <div class="filterElement"> <div class="label">Fund Category</div> <div class="value">{{data.fundStreams}}</div> </div> {% endif %} </section> <section class="sectionWrapper"> {% if data.tabSelected|length > 1 %} <p class="donor_heading">List of Donors contributing in UNDP regular and other resources</p> {% elif data.tabSelected.0 == "Regular" %} <p class="donor_heading">List of Donors contributing in UNDP regular resources</p> {% elif data.tabSelected.0 == "Others" %} <p class="donor_heading">List of Donors contributing in UNDP other resources</p> {% endif %} <table class="donorTable"> <tr> <th>Donors</th> {% for tabs in data.tabSelected %} <th>{{tabs}}</th> {% endfor %} </tr> {% for donor in data.aggerateSummary %} <tr> {% for item in data.tabMapper %} {% if item == "country" %} <td>{{donor.country}}</td> {% elif item == "regular_contribution" %} <td style="text-align: right">${{donor.regular_contribution.amount|numberToCommaFormatter}}</td> {% elif item == "other_contribution" %} <td style="text-align: right">${{donor.other_contribution.amount|numberToCommaFormatter}}</td> {% elif item == "total_contribution" %} <td style="text-align: right">${{donor.total_contribution|numberToCommaFormatter}}</td> {% endif %} {% endfor %} </tr> {% endfor %} </table> </section> </body> </html>
html
<reponame>liuyi0501/IwaraCollector<gh_stars>0 {"title":"【HoneySelect×MMD】ロキ(HimeHina covered)","author":"loxoprofen","description":"music:undefined原曲undefined【ロキ】みきとP様undefined(<aundefinedhref='https://youtu.be/Xg-qfsKN2_E'>https://youtu.be/Xg-qfsKN2_E</a>)<br>Sing:田中ヒメ様undefined@HimeTanaka_HHundefined/undefined鈴木ヒナ様undefined@HinaSuzuki_HH<br>motion:HimeHinaundefinedChannel/田中工務店様undefined<aundefinedhref='https://www.youtube.com/watch?v=mtb-qa8xvFU'>https://www.youtube.com/watch?v=mtb-qa8xvFU</a><br>camera:むらびと様undefined<aundefinedhref='https://www.nicovideo.jp/watch/sm34193997'>https://www.nicovideo.jp/watch/sm34193997</a>","thumb":"//i.iwara.tv/sites/default/files/styles/thumbnail/public/videos/thumbnails/531943/thumbnail-531943_0005.jpg?itok=J3CDKR-f","download":"https://www.iwara.tv/api/video/mewqjhnkyc87n2qv","origin":"https://www.iwara.tv/videos/mewqjhnkyc87n2qv"}
json
<reponame>TheHolyRoger/RogerXplorer<filename>lib/markets/altmarkets.js var request = require('request'); var base_url = 'https://v2.altmarkets.io/api/v2/peatio/public/markets/'; function get_summary(coin, exchange, cb) { console.log("Fetching altmarkets summary"); var req_url = base_url + coin.toLowerCase() + exchange.toLowerCase() + '/tickers/'; var summary = {}; request({uri: req_url, json: true}, function (error, response, body) { if (error) { return cb(error, null); } else { if (body.error) { return cb(body.error, null); } else { summary['volume'] = parseFloat(body['ticker']['amount']).toFixed(8); summary['volume_btc'] = parseFloat(body['ticker']['volume']).toFixed(8); summary['high'] = parseFloat(body['ticker']['high']).toFixed(8); summary['low'] = parseFloat(body['ticker']['low']).toFixed(8); summary['last'] = parseFloat(body['ticker']['last']).toFixed(8); summary['change'] = parseFloat(body['ticker']['price_change_percent']).toFixed(8); return cb(null, summary); } } }); } function get_trades(coin, exchange, cb) { console.log("Fetching altmarkets trades"); var req_url = base_url + coin.toLowerCase() + exchange.toLowerCase() + '/trades/?limit=50&order_by=desc'; request({uri: req_url, json: true}, function (error, response, body) { if (body.error) { return cb(body.error, null); } else { return cb (null, body); } }); } function get_orders(coin, exchange, cb) { console.log("Fetching altmarkets orders"); var req_url = base_url + coin.toLowerCase() + exchange.toLowerCase() + '/order-book/'; request({uri: req_url, json: true}, function (error, response, body) { if (body.error) { return cb(body.error, [], []) } else { var orders = body; var buys = []; var sells = []; if (orders['bids'].length > 0){ for (var i = 0; i < orders['bids'].length; i++) { var order = { amount: parseFloat(orders.bids[i].remaining_volume).toFixed(8), price: parseFloat(orders.bids[i].price).toFixed(8), // total: parseFloat(orders.bids[i].Total).toFixed(8) // Necessary because API will return 0.00 for small volume transactions total: (parseFloat(orders.bids[i].remaining_volume).toFixed(8) * parseFloat(orders.bids[i].price)).toFixed(8) } buys.push(order); } } else {} if (orders['asks'].length > 0) { for (var x = 0; x < orders['asks'].length; x++) { var order = { amount: parseFloat(orders.asks[x].remaining_volume).toFixed(8), price: parseFloat(orders.asks[x].price).toFixed(8), // total: parseFloat(orders.asks[x].Total).toFixed(8) // Necessary because API will return 0.00 for small volume transactions total: (parseFloat(orders.asks[x].remaining_volume).toFixed(8) * parseFloat(orders.asks[x].price)).toFixed(8) } sells.push(order); } } else {} var sells = sells.reverse(); return cb(null, buys, sells); } }); } function get_chartdata(coin, exchange, cb) { console.log("Fetching altmarkets chartdata"); var end = Date.now(); end = parseInt(end / 1000); start = end - (86400 * 14); var req_url = base_url + coin.toLowerCase() + exchange.toLowerCase() + '/k-line/?time_from=' + start + '&time_to=' + end + '&period=1'; request({uri: req_url, json: true}, function (error, response, chartdata) { if (error) { return cb(error, []); } else { if (chartdata.error == null) { var processed = []; for (var i = 0; i < chartdata.length; i++) { processed.push([chartdata[i][0] * 1000, parseFloat(chartdata[i][1]), parseFloat(chartdata[i][2]), parseFloat(chartdata[i][3]), parseFloat(chartdata[i][4])]); if (i == chartdata.length - 1) { return cb(null, processed); } } } else { return cb(chartdata.error, []); } } }); } module.exports = { get_data: function(settings, cb) { console.log("Fetching altmarkets full market data"); var error = null; get_chartdata(settings.coin, settings.exchange, function (err, chartdata){ if (err) { chartdata = []; error = err; } get_orders(settings.coin, settings.exchange, function(err, buys, sells) { if (err) { buys = []; sells = []; error = err; } get_trades(settings.coin, settings.exchange, function(err, trades) { if (err) { trades = []; error = err; } get_summary(settings.coin, settings.exchange, function(err, stats) { if (err) { stats = []; error = err; } return cb(error, {buys: buys, sells: sells, chartdata: chartdata, trades: trades, stats: stats}); }); }); }); }); } };
javascript
from sklearn.naive_bayes import BernoulliNB from run_binary_classifier import run param_grid = { 'bag_of_words__stop_words': ['english'], 'bag_of_words__ngram_range': [(1, 2)], 'bag_of_words__max_features': [500], 'dim_reduct__n_components': [300], 'normalizer__norm': ['l2'], 'classifier__alpha': [1.0], 'classifier__binarize': [0.0] } clf = BernoulliNB() run(param_grid, clf)
python
{ "networkCallRecords" : [ { "Method" : "POST", "Uri" : "https://REDACTED.azurecr.io/oauth2/exchange", "Headers" : { "User-Agent" : "azsdk-java-UnknownName/UnknownVersion (11.0.10; Windows 10; 10.0)", "x-ms-client-request-id" : "23969ea5-d6ec-4000-ac1b-c20fb6d24c94", "Content-Type" : "application/x-www-form-urlencoded" }, "Response" : { "Transfer-Encoding" : "chunked", "X-Ms-Correlation-Request-Id" : "fd38c758-4209-4185-a2ba-5755ee8c5f51", "Strict-Transport-Security" : "max-age=31536000; includeSubDomains", "Server" : "openresty", "Connection" : "keep-alive", "x-ms-ratelimit-remaining-calls-per-second" : "166.433333", "retry-after" : "0", "StatusCode" : "200", "Body" : "{\"refresh_token\":\"REDACTED\"}", "Date" : "Wed, 21 Jul 2021 22:54:32 GMT", "Content-Type" : "application/json; charset=utf-8" }, "Exception" : null }, { "Method" : "POST", "Uri" : "https://REDACTED.azurecr.io/oauth2/token", "Headers" : { "User-Agent" : "azsdk-java-UnknownName/UnknownVersion (11.0.10; Windows 10; 10.0)", "x-ms-client-request-id" : "767c407a-ab5e-415c-ae37-fc7a11f5272f", "Content-Type" : "application/x-www-form-urlencoded" }, "Response" : { "Transfer-Encoding" : "chunked", "X-Ms-Correlation-Request-Id" : "144706b9-5b5c-4455-9cd8-fa1cb55e0aea", "Strict-Transport-Security" : "max-age=31536000; includeSubDomains", "Server" : "openresty", "Connection" : "keep-alive", "x-ms-ratelimit-remaining-calls-per-second" : "166.55", "retry-after" : "0", "StatusCode" : "200", "Body" : "{\"access_token\":\"<PASSWORD>ACTED\"}", "Date" : "Wed, 21 Jul 2021 22:54:32 GMT", "Content-Type" : "application/json; charset=utf-8" }, "Exception" : null }, { "Method" : "DELETE", "Uri" : "https://REDACTED.azurecr.io/acr/v1/library%2Falpine/_tags/v3", "Headers" : { "User-Agent" : "azsdk-java-UnknownName/UnknownVersion (11.0.10; Windows 10; 10.0)", "x-ms-client-request-id" : "4fc5ef5c-ab3b-4699-8944-b23a47318ef5" }, "Response" : { "content-length" : "0", "Server" : "openresty", "X-Content-Type-Options" : "nosniff", "Connection" : "keep-alive", "retry-after" : "0", "StatusCode" : "202", "Date" : "Wed, 21 Jul 2021 22:54:32 GMT", "X-Ms-Int-Docker-Content-Digest" : "sha256:234cb88d3020898631af0ccbbcca9a66ae7306ecd30c9720690858c1b007d2a0", "Docker-Distribution-Api-Version" : "registry/2.0", "X-Ms-Correlation-Request-Id" : "8b35d6ed-8f22-4b6f-8631-773f44295888", "Access-Control-Expose-Headers" : "Docker-Content-Digest,WWW-Authenticate,Link,X-Ms-Correlation-Request-Id", "Strict-Transport-Security" : "max-age=31536000; includeSubDomains,max-age=31536000; includeSubDomains", "X-Ms-Client-Request-Id" : "4fc5ef5c-ab3b-4699-8944-b23a47318ef5", "X-Ms-Request-Id" : "5d2ba029-9d89-47d7-aa85-9aaefcd73580", "X-Ms-Ratelimit-Remaining-Calls-Per-Second" : "8.000000" }, "Exception" : null } ], "variables" : [ ] }
json
The Chinese government said on Thursday it was “vigorously committed” to developing military ties with India, drawing a line over nine months of strained defence ties and indicating it was in favour of expanding military-to-military contacts. The two countries on Wednesday agreed to resume defence exchanges following a meeting between Prime Minister Manmohan Singh and Chinese President Hu Jintao, along the sidelines of the BRICS summit in Sanya, in southern China's Hainan province. Defence exchanges were suspended in July last, when China voiced objections to issuing a regular visa to the head of the Army's Northern Command, saying the “sensitive” region of Kashmir was under his control. Since 2008, China has been issuing stapled visa to Indian citizens in Jammu and Kashmir, a move that Indian officials felt is tantamount to questioning Indian sovereignty over the State, and one that has strained diplomatic ties. “India is an important neighbour of China. China is vigorously committed to developing military-to-military relations with India,” Foreign Ministry spokesperson Hong Lei said on Thursday. “China always values our military exchanges with India, and believes the two sides could proceed from the overall interest of bilateral relations, and follow the principle of seeking for common ground while solving differences, to promote the sound and stable development of our military relations,” he said. According to Indian officials, China had agreed to receive a delegation comprising officers from the Northern Command later this year, and will issue them regular visas. China has not issued stapled visas since November, Indian officials said adding that it was not, however, clear whether the policy had been stopped. The resumption of exchanges would allow both countries to address persisting mistrust, said Zhao Gancheng, Director of South Asia Studies at the Shanghai Institutes for International Studies.
english
<filename>Serving Files with Nodejs/2.2 - Practical Exercise/solutions/sync.js /* * Read the comments to understand why this works - and add console.log statements * too. It's important you understand how this works, so take as much time as you need! */ const http = require('http'); const fs = require('fs'); //Using the new arrow notation we introduced in the video to make the code a bit shorter and cleaner http.createServer( (req, res) => { /* We read the file and return it to the client. If you paste this code into your own file it won't work. You need to change the directory passed to readFileSync to just 'index.html' if you move it to the file up a folder */ var data = fs.readFileSync("../index.html"); res.end(data); }).listen(8080);
javascript
import FirestorePage from "./pages/FirestorePage/FirestorePage"; const Routes = [ { path: ['/'], sidebarName: ['Firestore'], // icon: ['home'], component: [FirestorePage], }, ]; export default Routes;
typescript
import Component from "@biotope/element"; import template from "./template"; import PortfolioHeadline from '../portfolio-headline/portfolio-headline'; import { PortfolioAccordionProps, PortfolioAccordionState, PortfolioAccordionMethods } from "./defines"; class PortfolioAccordion extends Component< PortfolioAccordionProps, PortfolioAccordionState > { static componentName = "portfolio-accordion"; static attributes = []; public static dependencies = [ PortfolioHeadline as typeof Component, ] public methods: PortfolioAccordionMethods = {}; rendered() { const allAccordionItems = this.querySelectorAll( "portfolio-accordion-items" ); this.addEventListener("accordionOpened", (event: CustomEvent) => { const currentItem = event.target as HTMLElement; if (currentItem.parentElement === this) { if (event.detail) { currentItem.removeAttribute("is-open"); } else { allAccordionItems.forEach(item => { item.removeAttribute("is-open"); }); currentItem.setAttribute("is-open", "true"); } } }); } get defaultState() { return {}; } get defaultProps() { return {}; } render() { return template( this.html, { ...this.props, ...this.state, ...this.methods }, this.createStyle ); } } export default PortfolioAccordion;
typescript
<reponame>BenjaminLoison/ic use criterion::{BatchSize, Criterion}; use ic_config::execution_environment::Config; use ic_execution_environment::Hypervisor; use ic_interfaces::{ execution_environment::{ExecutionMode, ExecutionParameters, SubnetAvailableMemory}, messages::RequestOrIngress, }; use ic_metrics::MetricsRegistry; use ic_registry_routing_table::{CanisterIdRange, RoutingTable}; use ic_registry_subnet_type::SubnetType; use ic_replicated_state::{CallContextAction, CanisterState, NetworkTopology, SubnetTopology}; use ic_test_utilities::{ cycles_account_manager::CyclesAccountManagerBuilder, get_test_replica_logger, mock_time, state::canister_from_exec_state, types::ids::{canister_test_id, subnet_test_id}, types::messages::IngressBuilder, }; use ic_types::{ CanisterId, Cycles, MemoryAllocation, NumBytes, NumInstructions, PrincipalId, Time, UserId, }; use lazy_static::lazy_static; use maplit::btreemap; use std::convert::TryFrom; use std::str::FromStr; use std::sync::Arc; const MAX_NUM_INSTRUCTIONS: NumInstructions = NumInstructions::new(10_000_000_000); lazy_static! { static ref MAX_SUBNET_AVAILABLE_MEMORY: SubnetAvailableMemory = SubnetAvailableMemory::new(i64::MAX); } #[derive(Clone)] struct ExecuteUpdateArgs( CanisterState, RequestOrIngress, Time, Arc<NetworkTopology>, ExecutionParameters, ); pub fn get_hypervisor() -> (Hypervisor, std::path::PathBuf) { let log = get_test_replica_logger(); let tmpdir = tempfile::Builder::new().prefix("test").tempdir().unwrap(); let metrics_registry = MetricsRegistry::new(); let cycles_account_manager = Arc::new(CyclesAccountManagerBuilder::new().build()); let hypervisor = Hypervisor::new( Config::default(), &metrics_registry, subnet_test_id(1), SubnetType::Application, log, cycles_account_manager, ); (hypervisor, tmpdir.path().into()) } fn setup_update<W>( hypervisor: &Hypervisor, canister_root: &std::path::Path, wat: W, ) -> ExecuteUpdateArgs where W: AsRef<str>, { let mut features = wabt::Features::new(); features.enable_multi_value(); let canister_id = canister_test_id(0); let execution_state = hypervisor .create_execution_state( wabt::wat2wasm_with_features(wat.as_ref(), features).unwrap(), canister_root.into(), canister_id, ) .expect("Failed to create execution state"); let mut canister_state = canister_from_exec_state(execution_state, canister_id); canister_state.system_state.memory_allocation = MemoryAllocation::try_from(NumBytes::from(0)).unwrap(); let request = RequestOrIngress::Ingress( IngressBuilder::new() .method_name("test") .method_payload(vec![0; 8192]) .source(UserId::from( PrincipalId::from_str( "mvlzf-grr7q-nhzpd-geghp-zdgtp-ib3yt-hzgi6-texkf-kk6rz-p2ejr-iae", ) .expect("we know this converts OK"), )) .build(), ); let time = mock_time(); let routing_table = Arc::new(RoutingTable::try_from(btreemap! { CanisterIdRange{ start: CanisterId::from(0), end: CanisterId::from(0xff) } => subnet_test_id(1), }).unwrap()); let network_topology = Arc::new(NetworkTopology { routing_table, subnets: btreemap! { subnet_test_id(1) => SubnetTopology { subnet_type: SubnetType::Application, ..SubnetTopology::default() } }, ..NetworkTopology::default() }); let execution_parameters = ExecutionParameters { instruction_limit: MAX_NUM_INSTRUCTIONS, canister_memory_limit: canister_state.memory_limit(NumBytes::new(std::u64::MAX)), subnet_available_memory: MAX_SUBNET_AVAILABLE_MEMORY.clone(), compute_allocation: canister_state.scheduler_state.compute_allocation, subnet_type: SubnetType::Application, execution_mode: ExecutionMode::Replicated, }; ExecuteUpdateArgs( canister_state, request, time, network_topology, execution_parameters, ) } /// Run execute_update() benchmark for a given WAT snippet. pub fn run_benchmark<I, W>( c: &mut Criterion, id: I, wat: W, expected_instructions: u64, hypervisor: &Hypervisor, canister_root: &std::path::Path, ) where I: AsRef<str>, W: AsRef<str>, { let mut group = c.benchmark_group("update"); let mut bench_args = None; group .throughput(criterion::Throughput::Elements(expected_instructions)) .bench_function(id.as_ref(), |b| { b.iter_batched( || { // Lazily setup the benchmark arguments if bench_args.is_none() { println!( "\n Instructions per bench iteration: {} ({}M)", expected_instructions, expected_instructions / 1_000_000 ); println!(" WAT: {}", wat.as_ref()); bench_args = Some(setup_update(hypervisor, canister_root, wat.as_ref())); } // let (hypervisor, args) = bench_setup.take().unwrap(); bench_args.as_ref().unwrap().clone() }, |ExecuteUpdateArgs( cloned_canister_state, cloned_request, cloned_time, cloned_network_topology, cloned_execution_parameters, )| { let (_state, instructions, action, _bytes) = hypervisor.execute_update( cloned_canister_state, cloned_request, cloned_time, cloned_network_topology, cloned_execution_parameters, ); match action { CallContextAction::NoResponse { .. } => {} CallContextAction::Reply { .. } => {} CallContextAction::Reject { .. } => {} _ => assert_eq!( action, CallContextAction::NoResponse { refund: Cycles::from(0), }, "The system call should not fail" ), } assert_eq!( expected_instructions, MAX_NUM_INSTRUCTIONS.get() - instructions.get(), "Expected number of instructions is required for IPS metric" ); }, BatchSize::SmallInput, ); }); group.finish(); }
rust
<filename>boboleetcode/Play-Leetcode-master/0049-Group-Anagrams/cpp-0049/main.cpp /// Source : https://leetcode.com/problems/group-anagrams/description/ /// Author : liuyubobobo /// Time : 2018-09-12 #include <iostream> #include <vector> #include <unordered_map> using namespace std; /// Using HashMap /// Using sorted string as key /// /// Time Complexity: O(n*klogk) where k is the max length of string in strs /// Space Complexity: O(n*k) class Solution { public: vector<vector<string>> groupAnagrams(vector<string>& strs) { unordered_map<string, vector<string>> map; for(const string& s: strs){ string key = s; sort(key.begin(), key.end()); map[key].push_back(s); } vector<vector<string>> res; for(const auto& p: map) res.push_back(p.second); return res; } }; int main() { return 0; }
cpp
<reponame>Agunderman10/HackOHI-O<gh_stars>1-10 .active-recipe { margin: 0 auto; width: 60%; } .card { box-shadow: 0 4px 8px 0 rgba(0,0,0,0.3); width: 90%; border-radius: 5px; background-color: white; padding: 15px; margin-left: 50px; margin-top:50px; } .active-recipe__img { height: 500px !important; } .active-recipe__title { text-transform: uppercase; letter-spacing: 2px; margin: 1rem 0; } .active-recipe__publisher, .active-recipe__website { text-transform: uppercase; font-size: 1.2rem; margin-bottom: 1rem; } .active-recipe__publisher span { text-transform: none; font-weight: 300; letter-spacing: 2px; margin-left: 5px; } .active-recipe__website span { text-transform: lowercase; font-weight: 300; letter-spacing: 2px; color: #e74c3c; margin-left: 10px; } .active-recipe__button { border: solid 2px #e74c3c; background: transparent; padding: 0.2rem 0.9rem; margin: 0 0.2rem; border-radius: 1px; font-size: 1rem; text-transform: uppercase; font-weight: 500; }
css
{ "name": "cool-ascii-faces", "version": "1.3.4", "description": "get some cool ascii faces ヽ༼ຈل͜ຈ༽ノ", "main": "index.js", "bin": { "cool-face": "./cli.js" }, "scripts": { "test": "node test.js" }, "author": "<NAME>", "license": "BSD", "repository": { "type": "git", "url": "https://github.com/maxogden/cool-ascii-faces.git" }, "bugs": { "url": "https://github.com/maxogden/cool-ascii-faces/issues" }, "homepage": "https://github.com/maxogden/cool-ascii-faces", "dependencies": { "stream-spigot": "~3.0.3" } }
json
<reponame>MarioJim/alvap-store import React, { useEffect, useState } from 'react'; import { RouteComponentProps, Link } from 'react-router-dom'; import { getFromApi, postToApi } from '../../utils'; import styled from '@emotion/styled'; import unavailable from '../Store/unavailable.png'; import { Button, Header, Message } from 'semantic-ui-react'; import { withCookies, Cookies } from 'react-cookie'; interface MatchParams { id: string; } interface Props extends RouteComponentProps<MatchParams> { cookies: Cookies; } const StyleProduct = styled.div<MatchParams>` h1, h2, p { text-align: center; margin: 0 5%; } h1 { margin: 5%; margin-bottom:0; font-size: 2.25rem; background-color: #e6fffe; padding: 2.5% 0; } .description { margin: 0 5% 5% 5%; font-size: 1.61rem; border: 1px solid #333; padding: 2.5% 0; } img { width: 75%; margin: 2.5% 12.5%; } button { width: 75%; margin: 0 12.5% 5% 12.5% !important; font-size: 1.5rem !important; padding: 2.5% 0; border: 1px solid #bbb; } a { color: black; } .regresar { background-color: #fff; } `; const Product: React.FunctionComponent<Props> = ({ cookies, match }) => { const [product, setProduct] = useState<any>({ id: -1 }); const [success, setSuccess] = useState(false); useEffect(() => { getFromApi(`/products/${match.params.id}`).then((res) => { console.log(res); setProduct(res); }); }, [match.params.id]); const cart = cookies.get('cart'); const handleAddToCart = () => { postToApi('/carts/addProduct', { id_carrito: cart, id_producto: match.params.id, }).then((res) => { if (!res.errors) { setSuccess(true); } }); }; return ( <StyleProduct id={match.params.id}> {product.id === -1 ? ( <div>Loading</div> ) : ( <div> <h1>{product.nombre}</h1> <h2>${product.precio}.00</h2> {product.foto === null ? ( <img src={unavailable} alt="Not available" /> ) : ( <img src={product.foto} alt={product.nombre + ' foto'} /> )} <Header as="p" className="description">{product.descripcion}</Header> <Button color="teal" onClick={handleAddToCart}> Añadir al carrito </Button> {success && ( <Message success header="Se ha añadido el producto a tu carrito" /> )} <Link to="/tienda"> <Button color="blue">Regresar a la tienda</Button> </Link> </div> )} </StyleProduct> ); }; export default withCookies(Product);
typescript
<reponame>hugoCANNEDDU/Doga_Analytics {"data":{"token":[{"reveal_status":2,"timestamp":"2022-02-22T23:06:00+00:00","owner_id":"tz1fBBHT1oAhWch2W7RukS8V5u6RqpjNraDw","creator_id":"tz1fBBHT1oAhWch2W7RukS8V5u6RqpjNraDw","metadata":{"description":"DOGAMI, Adopt Raise, Earn.","name":"DOGAMI #1402","display_uri":"https://nft-zzz.mypinata.cloud/ipfs/QmZBZkXdTX513V1HLpLYDNCF9nCbyqJtVXvN7f2sxvk5eT","thumbnail_uri":"https://nft-zzz.mypinata.cloud/ipfs/QmSTPgqvkVukhW7m7TM21iNEx5uzE75NSWY8XJJwyNhYcc","artifact_uri":"https://nft-zzz.mypinata.cloud/ipfs/QmSeMxZUm5RRUkxeFWy8oWXqQYVSrkwZ6EbPExwo8FeAMe","decimals":0,"attributes":{"rarity_score":78,"ranking":547,"rarity_tier":{"name":"Gold","__typename":"rarity_tier"},"generation":"Alpha","gender":"Female","breed":{"name":"<NAME>","__typename":"breed"},"fur_color":"Red #8","friendliness":5,"eyes_color":"Orange #3","intelligence":3,"strength":4,"obedience":7,"vitality":7,"secondary_personality":"Curious","bonding_level":1,"primary_personality":"Funny","stats":{"bonding_level_top_pct":0,"breed_pct":8,"eyes_color_pct":3,"friendliness_top_pct":30,"fur_color_pct":0,"gender_pct":50,"generation_pct":778,"intelligence_top_pct":76,"obedience_top_pct":35,"rarity_tier_pct":7,"primary_personality_pct":5,"size_pct":15,"vitality_top_pct":26,"strength_top_pct":46,"secondary_personality_pct":4,"__typename":"attributes_stats"},"__typename":"attributes"},"is_boolean_amount":true,"__typename":"metadata"},"id":1402,"swaps":[{"price":22022022000000,"creator_id":"tz1fBBHT1oAhWch2W7RukS8V5u6RqpjNraDw","timestamp":"2022-02-23T18:18:10+00:00","status":2,"__typename":"swap"}],"__typename":"token"}]}}
json
To see if you're eligible for immigration, click here. In 2013, Forbes magazine ranked Ireland as the best country for business, and since then it has retained its status as country that provides ease of business for companies. The brightest talent from across Europe and the world comes together in Ireland, offering a multinational and multilingual melting pot of skills and employees. Ireland has featured as Site Selection’s ‘Best country in western Europe to invest in.’ Not only that, it consistently ranks in the top 15 most innovative countries in the world by the Global Innovation Index. Let’s take a closer look at why Ireland remains a choice destination for companies. Ireland is home to the world’s leading high-performance companies which include Facebook, Pfizer, Intel, Citi, Huawei, Twitter, Novartis to name a few. The country is also positioning itself to become a world leader in various fields- IT, Big Data, Health Innovation and Cloud Computing. Ireland is a hub for the top 5 global software companies in the world, the top 10 pharmaceutical corporations, and is home to 14 out of the top 15 medical technology companies, 18 of the top financial services companies, and the top 8 industrial automation companies worldwide. Over 1,000 multinational corporations have been expanding their headquarters and facilities in Ireland due to the productivity and success of their Irish operations. The country has a demonstrated track record with some of the world’s biggest and most successful companies across a range of industry sectors, such as Johnson & Johnson, Pfizer, IBM, and HP, which is proof of their confidence and belief in Ireland and its future. The country is also the only English speaking member of the EU following Brexit, with a legal system based on common law principles. This also makes it an ideal hub for organisations seeking a base in Europe. In addition, Ireland offers companies a barrier-free access to over 500 million consumers within Europe. Ireland’s favourable tax regime make it one of the most attractive global investment locations. The tax regime is designed to be competitive and to facilitate innovation and enterprise. With a 12.5% corporate tax rate, it has one of the lowest statutory Corporate Tax Rates in the world. Ireland’s worldwide tax system gives relief for foreign tax credit and offers a 25% Research and Development tax credit to companies. Ireland is a stable, competitive, secure, and pro-business country, and its economy is the fastest growing in the Eurozone. It has a well-established FDI sector generating significant exports across business sectors. This means companies can set up operations swiftly, and with minimum red tape in an open environment. From green field sites and business parks to edgy architectural offices - Ireland offers property solutions that are welcoming to local and international clients and businesses. Under the National Development Plan 2018-2027, Ireland plans to invest €116 billion in public infrastructure and capital works. A significant number of new construction projects are already underway in the country, including office buildings and other commercial properties. Rental prices for offices over here are among the most competitive across Europe. Another key reason for companies investing in Ireland is its highly skilled, educated, young and multicultural population. The country ranks in the top 10 globally for its education system that meets the needs of a competitive economy. This bright and well educated workforce has been a vital factor in attracting foreign direct investment into the country. Moreover, Ireland’s state agencies are responsible for supporting the development of manufacturing and internationally traded services companies. They provide funding and support across a wide range – Covid-19 sustainability and recovery funds to companies, as well as business planning and accelerated growth funding to small, medium and large sized industries- be it entrepreneurs and potential start-ups or large companies expanding their activities and growing international sales. Ireland has a long history of achievements in science and technology, and continues to invest in research and technological skills and competencies. It continues to be a centre of excellence for technology as many large technological companies develop their innovation and data centres here. What makes Ireland the destination of choice for incoming and investing companies is evident from the above outlined factors. A third of multinationals in Ireland have had operations in the country for over 20 years, which demonstrates the durability and commitment of these companies to Ireland. Not just large multinationals, Ireland also encourages potential multinationals by helping them forge their future in the country. The experience of working with international business partners has helped the Irish develop an understanding and a significant level of experience which benefits its investors. With this backdrop, international students can certainly take advantage of studying in a place like Ireland- where opportunities and a bright future are within reach.
english
<reponame>sigurasg/ghidra /* ### * IP: GHIDRA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ghidra.app.plugin.core.functiongraph; import java.awt.Rectangle; import java.awt.datatransfer.Transferable; import org.apache.commons.lang3.exception.ExceptionUtils; import docking.ActionContext; import docking.widgets.fieldpanel.Layout; import docking.widgets.fieldpanel.internal.EmptyLayoutBackgroundColorManager; import docking.widgets.fieldpanel.internal.LayoutBackgroundColorManager; import generic.text.TextLayoutGraphics; import ghidra.app.context.ListingActionContext; import ghidra.app.plugin.core.clipboard.CodeBrowserClipboardProvider; import ghidra.app.plugin.core.functiongraph.graph.FunctionGraph; import ghidra.app.plugin.core.functiongraph.graph.vertex.FGVertex; import ghidra.app.plugin.core.functiongraph.mvc.FGController; import ghidra.app.plugin.core.functiongraph.mvc.FGData; import ghidra.app.util.viewer.listingpanel.ListingModel; import ghidra.framework.plugintool.PluginTool; import ghidra.program.model.address.*; import ghidra.util.Msg; import ghidra.util.task.TaskMonitor; public class FGClipboardProvider extends CodeBrowserClipboardProvider { private FGController controller; FGClipboardProvider(PluginTool tool, FGController controller) { super(tool, controller.getProvider()); this.controller = controller; } @Override public boolean isValidContext(ActionContext context) { if (!(context instanceof ListingActionContext)) { return false; } return context.getComponentProvider() == componentProvider; } /** * Overridden because we don't have a single listing model from which to copy, but rather * many different ones, depending upon the which vertex contains the selection. */ @Override protected Transferable copyCode(TaskMonitor monitor) { try { TextLayoutGraphics g = new TextLayoutGraphics(); Rectangle rect = new Rectangle(2048, 2048); AddressRangeIterator rangeItr = currentSelection.getAddressRanges(); while (rangeItr.hasNext()) { AddressRange curRange = rangeItr.next(); Address curAddress = curRange.getMinAddress(); Address maxAddress = curRange.getMaxAddress(); while (!monitor.isCancelled()) { if (curAddress != null && curAddress.compareTo(maxAddress) > 0) { break; } curAddress = copyDataForAddress(curAddress, curRange, g, rect); if (curAddress == null) { break; } } } return createStringTransferable(g.getBuffer().toString()); } catch (Exception e) { String message = "Copy failed: " + ExceptionUtils.getMessage(e); Msg.error(this, message, e); tool.setStatusInfo(message, true); } return null; } private Address copyDataForAddress(Address address, AddressRange currentRange, TextLayoutGraphics g, Rectangle rectangle) { FGData functionGraphData = controller.getFunctionGraphData(); FunctionGraph functionGraph = functionGraphData.getFunctionGraph(); FGVertex vertex = functionGraph.getVertexForAddress(address); if (vertex == null) { return null; // shouldn't happen } ListingModel listingModel = vertex.getListingModel(address); // Add the layout for the present address Layout layout = listingModel.getLayout(address, false); if (layout != null) { LayoutBackgroundColorManager layoutColorMap = new EmptyLayoutBackgroundColorManager(PAINT_CONTEXT.getBackground()); layout.paint(null, g, PAINT_CONTEXT, rectangle, layoutColorMap, null); g.flush(); } // Get the next Address and update the page index if (address.equals(currentRange.getMaxAddress())) { return null; } Address addressAfter = listingModel.getAddressAfter(address); if (addressAfter != null) { return addressAfter; } // A null address could mean that we have reached the end of the listing for the given // vertex. If that is the case, we should look the next address by adding to the current // address. This will allow a future call to this method to get the vertex that contains // that address. Address nextAddress = null; try { nextAddress = address.add(layout.getIndexSize()); } catch (AddressOutOfBoundsException oobe) { // ignore and give up! } return nextAddress; } }
java
{"deprecated": "false", "authority-code": "XCE", "identifier": "XCE", "about": "http://publications.europa.eu/resource/authority/language/XCE", "prefLabel": {"@lang": "xce", "#text": "celtiberian"}, "start.use": "1950-05-09"}
json
/** * @addtogroup DFNs * @{ */ #include "PointCloudAssemblyExecutor.hpp" #include <Errors/Assert.hpp> using namespace PointCloudWrapper; using namespace PoseWrapper; namespace CDFF { namespace DFN { namespace Executors { void Execute(PointCloudAssemblyInterface* dfn, PointCloudConstPtr inputFirstCloud, PointCloudConstPtr inputSecondCloud, PointCloudConstPtr& outputAssembledCloud) { Execute(dfn, *inputFirstCloud, *inputSecondCloud, outputAssembledCloud); } void Execute(PointCloudAssemblyInterface* dfn, PointCloudConstPtr inputFirstCloud, PointCloudConstPtr inputSecondCloud, PointCloudPtr outputAssembledCloud) { ASSERT(outputAssembledCloud != NULL, "PointCloudAssemblyExecutor, Calling NO instance creation Executor with a NULL pointer"); Execute(dfn, *inputFirstCloud, *inputSecondCloud, *outputAssembledCloud); } void Execute(PointCloudAssemblyInterface* dfn, const PointCloud& inputFirstCloud, const PointCloud& inputSecondCloud, PointCloudConstPtr& outputAssembledCloud) { ASSERT( dfn!= NULL, "PointCloudAssemblyExecutor, input dfn is null"); ASSERT( outputAssembledCloud == NULL, "PointCloudAssemblyExecutor, Calling instance creation executor with a non-NULL pointer"); dfn->firstPointCloudInput(inputFirstCloud); dfn->secondPointCloudInput(inputSecondCloud); dfn->process(); outputAssembledCloud = & ( dfn->assembledCloudOutput() ); } void Execute(PointCloudAssemblyInterface* dfn, const PointCloud& inputFirstCloud, const PointCloud& inputSecondCloud, PointCloud& outputAssembledCloud) { ASSERT( dfn!= NULL, "PointCloudAssemblyExecutor, input dfn is null"); dfn->firstPointCloudInput(inputFirstCloud); dfn->secondPointCloudInput(inputSecondCloud); dfn->process(); Copy( dfn->assembledCloudOutput(), outputAssembledCloud); } void Execute(PointCloudAssemblyInterface* dfn, PointCloudConstPtr cloud, Pose3DConstPtr viewCenter, float viewRadius, PointCloudConstPtr& outputAssembledCloud) { Execute(dfn, *cloud, *viewCenter, viewRadius, outputAssembledCloud); } void Execute(PointCloudAssemblyInterface* dfn, PointCloudConstPtr cloud, Pose3DConstPtr viewCenter, float viewRadius, PointCloudPtr outputAssembledCloud) { ASSERT(outputAssembledCloud != NULL, "PointCloudAssemblyExecutor, Calling NO instance creation Executor with a NULL pointer"); Execute(dfn, *cloud, *viewCenter, viewRadius, *outputAssembledCloud); } void Execute(PointCloudAssemblyInterface* dfn, const PointCloud& cloud, const Pose3D& viewCenter, float viewRadius, PointCloudConstPtr& outputAssembledCloud) { ASSERT( dfn!= NULL, "PointCloudAssemblyExecutor, input dfn is null"); ASSERT( outputAssembledCloud == NULL, "PointCloudAssemblyExecutor, Calling instance creation executor with a non-NULL pointer"); dfn->firstPointCloudInput(cloud); dfn->viewCenterInput(viewCenter); dfn->viewRadiusInput(viewRadius); dfn->process(); outputAssembledCloud = & ( dfn->assembledCloudOutput() ); } void Execute(PointCloudAssemblyInterface* dfn, const PointCloud& cloud, const Pose3D& viewCenter, float viewRadius, PointCloud& outputAssembledCloud) { ASSERT( dfn!= NULL, "PointCloudAssemblyExecutor, input dfn is null"); dfn->firstPointCloudInput(cloud); dfn->viewCenterInput(viewCenter); dfn->viewRadiusInput(viewRadius); dfn->process(); Copy( dfn->assembledCloudOutput(), outputAssembledCloud); } } } } /** @} */
cpp
{ "type": "entry", "author": { "type": "card", "name": "<NAME>", "url": "http://notsorelevant.com", "photo": "https://www.gravatar.com/avatar/96cbbd21f8d60106ac4ab1f93d7aa7a8?s=256&d=404" }, "url": "http://notsorelevant.com", "published": "2007-04-06T22:27:27-07:00", "content": { "html": "Your plugin seems to work for most people, though I have encountered problem while testing it on my blog. I can't access any article in IE 7, I just see a blank page.\r\nI am running a K2 derivative, http://www.obharath.net/blog/2006/12/17/3-column-k2-093/, on WordPress 2.1.2. I have disabled AJAX commenting but it didn't work either.\r\nDo you have any idea what might be the problem?\r\n\r\nThanks\r\nCarsten", "text": "" }, "wm-id": "2228", "wm-property": "in-reply-to", "in-reply-to": "https://willnorris.com/2007/02/new-wpopenid-r13/" }
json
<filename>kiba-backend/jobdata/4879.json {"id":4879,"group":"27302100","joblink":true,"name":"Robotereinsteller/in","potential":100,"skills":[{"id":60714,"skill":"Maschineneinrichtung, Anlageneinrichtung","replaceable":true},{"id":60715,"skill":"Maschinenführung, Anlagenführung, -bedienung","replaceable":true},{"id":60731,"skill":"Roboter- und Handhabungstechnik","replaceable":true},{"id":61650,"skill":"Mechatronik","replaceable":true}]}
json
The study of society: nature and limitations; Individual and group/society, community, social interaction, socialisation and social consciousness, conformity, deviance, culture and civilisation; Institutions: social, economic and political; Social structure: social stratification and inequality; Social change: forms and processes; Perspectives on the study of society: Comte, Spencer, Durkheim, Weber and Marx. A. Giddens, Sociology, Polity, 2010.^$^C. Jenks, ed., Core Sociological Dichotomies, Sage, 1998.^$^ A. Giddens, Capitalism and Modern Social Theory, Cambridge University Press, 1996.^$^D. Gupta, ed., Social Stratification, Oxford University Press, 1992.^$^ V. Das, Handbook of Indian Sociology, Oxford University Press, 2008.
english
{"title": "Sensor synaesthesia: touch in motion, and motion in touch.", "fields": ["computer vision", "reverse perspective", "zoom", "gesture", "mobile device"], "abstract": "We explore techniques for hand-held devices that leverage the multimodal combination of touch and motion. Hybrid touch + motion gestures exhibit interaction properties that combine the strengths of multi-touch with those of motion-sensing. This affords touch-enhanced motion gestures, such as one-handed zooming by holding one's thumb on the screen while tilting a device. We also consider the reverse perspective, that of motion-enhanced touch, which uses motion sensors to probe what happens underneath the surface of touch. Touching the screen induces secondary accelerations and angular velocities in the sensors. For example, our prototype uses motion sensors to distinguish gently swiping a finger on the screen from 'Sdrags with a hard onset' - to enable more expressive touch interactions.", "citation": "Citations (154)", "year": "2011", "departments": ["Microsoft", "University of Maryland, College Park"], "conf": "chi", "authors": ["<NAME>.....http://dblp.org/pers/hd/h/Hinckley:Ken", "<NAME>.....http://dblp.org/pers/hd/s/Song:Hyunyoung"], "pages": 10}
json
What do you think? "It's going to fall, oh man, half the format codes have been cracked already. No way. I mean no fucking way! Do you know what kind of encryption I used for that thing? Eighty dimensional geometry. Eighty! That should take like a century to break, if you're lucky." He seemed more angry than worried by the event. Rob was starting to get a real bad feeling about the mission. "So what can crack that kind of encryption?" The tech became very still. "The SI." His gaze found a ceiling camera that was lined up on his console, and he looked straight into the tiny lens. "Oh shit."
english
import GetValue from '../../../utils/object/GetValue.js'; import CSVParser from 'papaparse/papaparse.min.js'; import DefaultConvertFn from '../../../utils/string/TypeConvert.js'; var ParseCSV = function (csvString, config) { var delimiter = GetValue(config, 'delimiter', ','); var arr = CSVParser.parse(csvString, { header: true, delimiter: delimiter, }).data; var questionType = GetValue(config, 'types.question', 'q'); var optionType = GetValue(config, 'types.option', ''); var convertFn = GetValue(config, 'convert', true); if (convertFn === true) { convertFn = DefaultConvertFn; } var items = []; var rowObj, rowType, item, option; for (var i = 0, cnt = arr.length; i < cnt; i++) { rowObj = arr[i]; rowType = rowObj.type; delete rowObj.type; if (rowType === questionType) { item = rowObj; if (item.key === '') { delete item.key; } convert(item, convertFn); item.options = []; items.push(item); } else if (rowType === optionType) { if (item) { option = rowObj; if (option.key === '') { delete option.key; } convert(option, convertFn); item.options.push(option); } else { // Error } } } return items; }; var convert = function (item, convertFn) { if (!convertFn) { return item; } for (var key in item) { item[key] = convertFn(item[key], key); } return item; } export default ParseCSV;
javascript
<filename>services/trader.py from bitso_requester import get_account_balance, get_tricker, place_order, get_last_transactions def make_order(currency, action): balances = get_account_balance() action = action.upper() if action == 'BUY': fiat_currency = filter(lambda x: x["currency"] == "mxn", balances)[0] cripto_currency = filter(lambda x: x["currency"] == currency, balances)[0] if currency in map(lambda x: x["currency"], balances) else {'available': 0} # print('## fiat currency: ', fiat_currency) balance = (float(fiat_currency['available']), float(cripto_currency['available'])) print("user account currency balance: ", balance) if balance[0] > 0 and balance[1] <= 0: # gettin' the last trade prices for that asset! prices = get_last_transactions(currency + "_mxn") if prices: # TRyin' to apply saffe rules budget = 2000 #balance/(2 * 4) # calculation the amount of crypto per budget. amount = float("{:.5f}".format(budget/prices[1])) #balance/(2 * 4) #Getting the highest price to buy price = prices[0] + 1 print("$$$$$$ the order to ", action, " will be place with price ", price, " and amount of cryptos", amount, "\n") # placing the order! return place_order(amount, price, book=currency+"_mxn" ,side=action.lower()) elif action == 'SELL': cripto_currency = filter(lambda x: x["currency"] == currency, balances)[0] print('## cripto currency: ', cripto_currency) if cripto_currency: balance = float(cripto_currency['available']) print("user account currency balance: ", balance) if balance > 0: # gettin' the last trade prices for that asset! prices = get_last_transactions(currency + "_mxn") if prices: # Sell all the cryptos mutherfucker! amount = balance # Getting the lowest price to sell :/ price = prices[0] - 1 print("$$$$$$ the order to ", action, " will be place with price ", price, " and amount of cryptos", amount, "\n") return place_order(amount, price, book=currency+"_mxn" ,side=action.lower()) else: return Exception('No crypto assets available for transaction!!')
python
{ "occupation": "Adventurer, Student", "base": "formerly <NAME>'s mind, Utopia, San Francisco Bay, California; Westcliffe, Colorado; \"No-Time\"; <NAME>'s Mutant Research Center, Muir Island, off the coast of Scotland; Haifa; Paris, France" }
json
{ "id": 7989, "cites": 17, "cited_by": 12, "reference": [ "<NAME>. (1998), \u00c3\u0082\u00e2\u0080\u009cEstimating the Labor Market Impact of Voluntary Military Service Using Social Security Data on Military Applicants,\u00c3\u0082\u00e2\u0080\u009d Econometrica , 66:2, 249-288.", "<NAME>., and <NAME>. (1995), \u00c3\u0082\u00e2\u0080\u009cTwo-Stage Least Squares Estimation of Average Causal Effects in Models with Variable Treatment Intensity,\u00c3\u0082\u00e2\u0080\u009d Journal of the American Statistical Association , 90, 431-442.", "<NAME>., <NAME>, and <NAME>. (1996), \u00c3\u0082\u00e2\u0080\u009cIdentification of Causal Effects Using Instrumental Variables,\u00c3\u0082\u00e2\u0080\u009d Journal of the American Statistical Association , 91, 444-455.", "<NAME>., and <NAME>. (1991), \u00c3\u0082\u00e2\u0080\u009cDoes Compulsory School Attendance Affect Schooling and Earnings,\u00c3\u0082\u00e2\u0080\u009d Quarterly Journal of Economics , 106, 979-1014.", "<NAME>. (1975), Human Capital. Chicago: University of Chicago Press.", "Center for Human Resources Research, (1994), \u00c3\u0082\u00e2\u0080\u009cNLS Users Guide,\u00c3\u0082\u00e2\u0080\u009d Columbus: Ohio State University.", "<NAME>., and <NAME>. (2000) \u00c3\u0082\u00e2\u0080\u009cBorrowing Constraints and the Returns to Schooling,\u00c3\u0082\u00e2\u0080\u009d NBER Working Paper No. 7761.", "<NAME>. (1995a), \u00c3\u0082\u00e2\u0080\u009cEarnings, Schooling, and Ability Revisited,\u00c3\u0082\u00e2\u0080\u009d Research in Labor Economics , 14, 23-48.", "Schooling,\u00c3\u0082\u00e2\u0080\u009d in Aspects of Labour Market Behaviour: Essays in Honor of <NAME> , eds. L. N. Christofides et al. Toronto: University of Toronto Press. 201-221. 19 ----- (1999), \u00c3\u0082\u00e2\u0080\u009cThe Causal Effect of Education on Earnings,\u00c3\u0082\u00e2\u0080\u009d in Handbook of Labor Economics, Volume 3A, eds. <NAME> and <NAME>. Amsterdam: North-Holland, 1801-1863.", "<NAME>. (1977), \u00c3\u0082\u00e2\u0080\u009cEstimating the Returns to Schooling: Some Econometric Problems.\u00c3\u0082\u00e2\u0080\u009d Econometrica, 45, 1-22.", "<NAME>., and <NAME>. (1998) \u00c3\u0082\u00e2\u0080\u009cInstrumental Variables Methods for the Correlated Random Coefficient Model,\u00c3\u0082\u00e2\u0080\u009d Journal of Human Resources, 33:4, 974-1002.", "<NAME>. (1997) \u00c3\u0082\u00e2\u0080\u009cHow the Changing Market Structure of U.S. Higher Education Explains College Tuition,\u00c3\u0082\u00e2\u0080\u009d NBER Working Paper No. 6323.", "<NAME>., and <NAME>. (1994) \u00c3\u0082\u00e2\u0080\u009cIdentification and Estimation of Local Average Treatment Effects,\u00c3\u0082\u00e2\u0080\u009d Econometrica , 62, 467-476.", "<NAME>. (1999), The Price of Admission, Washington, D.C.: The Brookings Institution.", "<NAME>. (1995), \u00c3\u0082\u00e2\u0080\u009cNatural and Quasi-experiments in Economics,\u00c3\u0082\u00e2\u0080\u009d Journal of Business and Economic Statistics , 13:2, 151-161.", "<NAME>. (1986), \u00c3\u0082\u00e2\u0080\u009cWage Determinants: A Survey and Reinterpretation of Human Capital Earnings Functions,\u00c3\u0082\u00e2\u0080\u009d in Handbook of Labor Economics, Volume 1 , eds. <NAME> and <NAME>, Amsterdam: North-Holland, 525-602.", "<NAME>. (1997), \u00c3\u0082\u00e2\u0080\u009cOn two stage least squares estimation of the average treatment effect in a random coefficients model,\u00c3\u0082\u00e2\u0080\u009d Economic Letters , 56, 129-133." ] }
json
Awesome GameLift [![Awesome](https://cdn.rawgit.com/sindresorhus/awesome/d7305f38d29fed78fa85652e3a63e154dd8e8829/media/badge.svg)](https://github.com/sindresorhus/awesome) =============== > A curated list of awesome packages, articles, and other cool resources from the GameLift community. > [GameLift](https://aws.amazon.com/gamelift/) provides solutions for hosting session-based multiplayer game servers in the cloud, including a fully managed service for deploying, operating, and scaling game servers, based on AWS global computing infrastructure. You might also like [Awesome AWS](https://github.com/donnemartin/awesome-aws). ## General resources - [Official site](https://aws.amazon.com/gamelift/) - [SDKs](https://aws.amazon.com/gamelift/getting-started/#Developer_Resources_and_Documentation) - [Documentation](https://docs.aws.amazon.com/gamelift/index.html) - Twitter accounts: [@AmazonGameLift](https://twitter.com/AmazonGameLift) - [@AWSGameTech](https://twitter.com/AWSGameTech) - YouTube channels: [AWS Online Tech Talks](https://www.youtube.com/channel/UCT-nPlVzJI-ccQXlxjSvJmw) - [AWS Game Tech](https://www.youtube.com/channel/UCQH55cT_em5E8XU2J8erMKA) - [Blog](https://aws.amazon.com/blogs/gametech/category/game-development/amazon-gamelift/) - [Forums](https://forums.awsgametech.com/c/amazon-gamelift/7) - [GitHub topic](https://github.com/topics/gamelift) - [Stack Overflow tag [amazon-gamelift]](https://stackoverflow.com/questions/tagged/amazon-gamelift) - [Other resources](#resources) ## Solutions ### GameLift Realtime Servers (NodeJS) - [node-gameloop](https://github.com/tangmi/node-gameloop) - A game loop for NodeJS applications, uses a combination of setTimeout and setImmediate to achieve accurate update ticks with minimal CPU usage. - [Unofficial Typescript definitions for GameLift realtime server API](https://github.com/jcular/unofficial-gamelift-realtime-server-api) - Make Realtime interface work with Typescript. ### Integrations - [Defold extension-gamelift](https://github.com/defold/extension-gamelift) - GameLift Amazon GameLift extension for the Defold game engine. Run Defold on the server with GameLift SDK support. - [GomeLift](https://github.com/neguse/gomelift) - Unofficial GameLift Server SDK in Go. ### Devops - [amazon-gamelift-remote-plus](https://github.com/aws-samples/amazon-gamelift-remote-plus) - A tool creates remote connections to access fleet instances using Secure Shell (SSH) for fleets running Linux, and Remote Desktop Protocol (RDP) for fleets running Windows. - [fleetiq-adapter-for-agones](https://github.com/awslabs/fleetiq-adapter-for-agones) - Allows you to run containerized game servers on Spot instances while decreasing the likelihood of Spot interruptions by using Agones and Gamelift FleetIQ. ## Resources ### Articles #### Official articles - [Creating a Battle Royale Game Using Unity and Amazon GameLift](https://aws.amazon.com/blogs/gametech/creating-a-battle-royale-game-using-unity-and-amazon-gamelift/) ([Source Repo](https://github.com/aws-samples/amazon-gamelift-ultrafrogroyale-large-match-sample)) #### Other articles ### Samples #### Official samples - [amazon-gamelift-largematch-sample-ultrafrogroyale](https://github.com/aws-samples/amazon-gamelift-ultrafrogroyale-large-match-sample) - A small game built with Unity to demonstrate how to use the new Amazon GameLift large match features. - [amazon-gamelift-unity](https://github.com/aws-samples/amazon-gamelift-unity) - A sample code shows how to set up a basic GameLift server and client for games using the Unity Game Engine. - [aws-gamelift-sample (with FlexMatch)](https://github.com/aws-samples/aws-gamelift-sample) - A sample Gomoku board game project using Amazon GameLift and AWS serverless services including DynamoDB, Lambda, SQS, S3, and so on. - [Game Server Hosting on Amazon Elastic Container Service with Amazon GameLift FleetIQ](https://github.com/aws-samples/amazon-gamelift-fleetiq-with-amazon-ecs) - A sample solution on how to scale a fleet of game servers on Elastic Container Service and match players to game sessions using a Serverless backend. Game Sessions are managed by Amazon GameLift FleetIQ. All resources are deployed with Infrastructure as Code using CloudFormation, Serverless Application Model, Docker and bash scripts. - [GameLift Example for Unity with Serverless Backend](https://github.com/aws-samples/aws-gamelift-and-serverless-backend-sample) - A simple 3D game GameLift example with a Serverless backend service designed especially for getting started with MacOS and mobile development and leveraging deployment automation and Infrastructure as Code. - [megafrogRace-gameLift-realtime-server-sample](https://github.com/aws-samples/megafrograce-gamelift-realtime-servers-sample) - A sample 2D racing game using Amazon GameLift (Realtime Servers) and AWS services including AWS Lambda and Amazon Cognito. - [Persistent Game Server with Amazon GameLift](https://github.com/aws-samples/amazon-gamelift-persistent-sample) - A sample includes how to implement persistent game server based on Amazon GameLift. #### Other samples - [Unity GameLift/Mirror Sample](https://github.com/joaoborks/unity-gamelift-mirror-sample) - Features a working multiplayer sample using AWS GameLift and Mirror on Unity. Supports IL2CPP and mobile devices. ### Videos #### Official videos - [Dedicated Servers with Amazon GameLift AWS Game Tech Series](https://www.youtube.com/playlist?list=PLuGWzrvNze7KQO5mREeae2eIR8rJws0At). - [Getting Started with Amazon GameLift FleetIQ](https://www.youtube.com/watch?v=p07ueG4A3qA) by AWS Online Tech Talks. #### Other videos - [Better together: running containerized game servers with Agones and GameLift FleetIQ](https://www.youtube.com/watch?v=RvBjgKME21U). - [Unity + Amazon GameLift RealTime Servers Series](https://www.youtube.com/playlist?list=PLOtt3_R1rR9VMkqZvMF-39TeKrbpKZocW) by [Battery Acid](https://www.youtube.com/channel/UCtTEc7bBzP7Tq5U2Jzf5lPw). - [Unity + GameLift Custom Server Series](https://www.youtube.com/playlist?list=PLOtt3_R1rR9XlvhJZXtiHQuIkisdXIxlc) by [Battery Acid](https://www.youtube.com/channel/UCtTEc7bBzP7Tq5U2Jzf5lPw). ### Made with GameLift - [For Honor](https://aws.amazon.com/blogs/gametech/for-honor-friday-the-13th-the-game-move-from-p2p-to-the-cloud-to-improve-player-experience/) by Ubisoft. - [Ninjala](https://aws.amazon.com/blogs/gametech/amazon-gamelift-announces-general-availability-of-six-new-regions/) by GungHo. ## Contribute Contributions are always welcome! Please read the [contribution guidelines](./CONTRIBUTING.md) first. ## License The project is licensed under [BSD 3-Clause License](./LICENSE). AWS and Amazon GameLift are trademarks and brands of Amazon Technologies. > DISCLAIMER: Awesome-GameLift is an open-source project, not an official project provided by Amazon Technologies.
markdown
<reponame>aaujayasena/identy-apps { "_args": [ [ "react-password-strength-bar@0.3.2", "/home/achini/projects/cloudtest/identity-apps/31122020-clone/identity-apps" ] ], "_from": "react-password-strength-bar@0.3.2", "_id": "react-password-strength-bar@0.3.2", "_inBundle": false, "_integrity": "<KEY> "_location": "/react-password-strength-bar", "_phantomChildren": {}, "_requested": { "type": "version", "registry": true, "raw": "react-password-strength-bar@0.3.2", "name": "react-password-strength-bar", "escapedName": "react-password-strength-bar", "rawSpec": "0.3.2", "saveSpec": null, "fetchSpec": "0.3.2" }, "_requiredBy": [ "/" ], "_resolved": "https://registry.npmjs.org/react-password-strength-bar/-/react-password-strength-bar-0.3.2.tgz", "_spec": "0.3.2", "_where": "/home/achini/projects/cloudtest/identity-apps/31122020-clone/identity-apps", "author": { "name": "<EMAIL>" }, "bugs": { "url": "https://github.com/lannex/react-password-strength-bar/issues" }, "dependencies": { "zxcvbn": "4.4.2" }, "description": "A React component that displays the password strength bar", "devDependencies": { "@types/enzyme": "3.10.5", "@types/enzyme-adapter-react-16": "1.0.6", "@types/eslint": "6.1.8", "@types/eslint-plugin-prettier": "2.2.0", "@types/jest": "25.1.3", "@types/prettier": "1.19.0", "@types/react": "16.9.21", "@types/react-dom": "16.9.5", "@types/zxcvbn": "4.4.0", "@typescript-eslint/eslint-plugin": "2.20.0", "@typescript-eslint/parser": "2.20.0", "coveralls": "3.0.9", "enzyme": "3.11.0", "enzyme-adapter-react-16": "1.15.2", "eslint": "6.8.0", "eslint-config-airbnb": "18.0.1", "eslint-config-prettier": "6.10.0", "eslint-plugin-import": "2.20.1", "eslint-plugin-jsx-a11y": "6.2.3", "eslint-plugin-prettier": "3.1.2", "eslint-plugin-react": "7.18.3", "eslint-plugin-react-hooks": "2.4.0", "jest": "25.1.0", "prettier": "1.19.1", "react": "16.12.0", "react-dom": "16.12.0", "rollup": "1.31.1", "rollup-plugin-commonjs": "10.1.0", "rollup-plugin-node-resolve": "5.2.0", "rollup-plugin-peer-deps-external": "2.2.2", "rollup-plugin-replace": "2.2.0", "rollup-plugin-typescript2": "0.26.0", "rollup-plugin-uglify": "6.0.4", "ts-jest": "25.2.1", "typescript": "3.7.5" }, "homepage": "https://github.com/lannex/react-password-strength-bar#readme", "keywords": [ "react", "password", "strength", "bar", "zxcvbn" ], "license": "MIT", "main": "dist/index.js", "name": "react-password-strength-bar", "peerDependencies": { "react": "16.8.6", "react-dom": "16.8.6" }, "repository": { "type": "git", "url": "git+https://github.com/lannex/react-password-strength-bar.git" }, "scripts": { "build": "NODE_ENV=production rollup -c --environment INCLUDE_DEPS,BUILD:production", "dev": "rollup -cw", "start": "npm run build", "test": "jest --coverage --coverageReporters=text-lcov | coveralls" }, "version": "0.3.2" }
json
<reponame>c089/expect-redux { "name": "redux-obversable-example", "version": "1.0.0", "description": "A simple example of how to use expect-redux", "main": "index.js", "scripts": { "test": "jest" }, "author": "<NAME> <<EMAIL>>", "license": "MIT", "dependencies": { "redux": "^4.0.0", "redux-observable": "^1.0.0", "rxjs": "^6.3.3" }, "devDependencies": { "jest": "^23.6.0" } }
json
const uuidv1 = require('uuid/v1') const { REACTIONS, FEEDBACK } = require('../src/actions') const { getNormalizedData, PostSchema } = require('../src/utils/dataSchema') const definedUsers = { goldXFive: { id: uuidv1(), profileName: '金乘五', profileLink: 'https://www.facebook.com/takeshikaneshirofanspage/', profileImg: 'goldxfive.png', isVerified: false }, theMazu: { id: uuidv1(), profileName: '媽祖', profileLink: 'https://www.facebook.com/themazhou/', profileImg: 'mazu.png', isVerified: false }, terryGoodTiming: { id: uuidv1(), profileName: '鍋苔冥', profileLink: 'https://www.facebook.com/TerryGou1018/', profileImg: 'terry.png', isVerified: true }, dingDing: { id: uuidv1(), profileName: '叮守鐘', profileLink: 'https://www.youtube.com/watch?v=_97bLScvHWs', profileImg: 'dingding.png', isVerified: false }, toolMan: { id: uuidv1(), profileName: '台灣工具伯 汪進坪', profileLink: 'https://www.facebook.com/jingping.tw/', profileImg: 'toolman.png', isVerified: true }, english: { id: uuidv1(), profileName: '菜應蚊', profileLink: 'https://www.facebook.com/tsaiingwen/', profileImg: 'english.png', isVerified: true }, koreanFish: { id: uuidv1(), profileName: '韓國魚', profileLink: 'https://www.facebook.com/twherohan/', profileImg: 'korean-fish.png', isVerified: true }, careWheelEveryday: { id: uuidv1(), profileName: '每天關心愛情摩天輪的興建狀況', profileLink: 'https://www.facebook.com/CareLoveFerrisWheelEveryday/', profileImg: 'wheel.png', isVerified: false }, universityFoundField: { id: uuidv1(), profileName: '找到田大學', profileLink: '#', profileImg: 'anonymous-university.png', isVerified: false }, sparkJoy: { id: uuidv1(), profileName: '尛理惠的整理魔法', profileLink: '#', profileImg: 'sparkJoy.jpg', isVerified: true } } const definedReactors = [ definedUsers.terryGoodTiming, definedUsers.koreanFish, definedUsers.universityFoundField, definedUsers.english, definedUsers.toolMan, definedUsers.dingDing ] /** * Get user object for reactions with the following properties: * @prop {id} * @prop {profileName} * @prop {profileLink} * @prop {profileImg} * @param {nameId} nameId * An id to distinguish from users, * will append to @prop {profileName}. * @param {gender} gender * Choose an img represent gender, `MALE` if ommited. */ const getReactor = (nameId, gender = 'MALE') => { const user = nameId < definedReactors.length ? definedReactors[nameId] : getFakeUser(nameId, gender) return user } /** * Get fake user object with the following properties: * @prop {id} * @prop {profileName} * @prop {profileLink} * @prop {profileImg} * @param {nameId} nameId * An id to distinguish from users, * will append to @prop {profileName}. * @param {gender} gender * Choose an img represent gender, `MALE` if ommited. */ const getFakeUser = (nameId, gender = 'MALE') => { const maleUserImg = 'anonymous-male.png' const femaleUserImg = 'anonymous-female.png' const userImg = gender === 'MALE' ? maleUserImg : femaleUserImg return { id: uuidv1(), profileName: `假帳號${nameId}`, profileLink: '#', profileImg: userImg, isVerified: false } } const createCommentReactions = ( commentId, commentReacts, commentReactsTotal = 200, ratio = [3, 2, 1] ) => { if (typeof commentId !== 'string') { throw new Error('Comment id must be string.') } if (ratio.length > 6 || ratio.length < 1) { throw new Error('Ratio must be 1 to 6 numbers.') } if (ratio.filter(r => typeof r !== 'number').length > 0) { throw new Error('Array of ratio must be type of number.') } // Calculate reations feeling distribution by ratio // If commentReactsTotal < ratioTotal, ratio will be [1] for full of Likes const ratioTotal = ratio.reduce((r, acc) => r + acc) const ratioAvailable = commentReactsTotal >= ratioTotal const divisorLike = ratioAvailable ? ratioTotal / ratio[0] : 1 const divisorHaha = ratioAvailable && ratio[1] ? ratioTotal / ratio[1] : 0 const divisorLove = ratioAvailable && ratio[2] ? ratioTotal / ratio[2] : 0 const divisorWow = ratioAvailable && ratio[3] ? ratioTotal / ratio[3] : 0 const divisorSad = ratioAvailable && ratio[4] ? ratioTotal / ratio[4] : 0 const divisorAnger = ratioAvailable && ratio[5] ? ratioTotal / ratio[5] : 0 const totalLikes = divisorLike === 0 ? 0 : Math.ceil(commentReactsTotal / divisorLike) const totalHahas = divisorHaha === 0 ? 0 : Math.ceil(commentReactsTotal / divisorHaha) const totalLoves = divisorLove === 0 ? 0 : Math.ceil(commentReactsTotal / divisorLove) const totalWows = divisorWow === 0 ? 0 : Math.ceil(commentReactsTotal / divisorWow) const totalSads = divisorSad === 0 ? 0 : Math.ceil(commentReactsTotal / divisorSad) const totalAngers = divisorAnger === 0 ? 0 : Math.ceil(commentReactsTotal / divisorAnger) for (let i = 0; i < commentReactsTotal; i++) { if (i < totalLikes) { commentReacts.push({ id: uuidv1(), user: getFakeUser(i), feeling: REACTIONS.LIKE, targetId: commentId }) } else if (i < totalLikes + totalHahas) { commentReacts.push({ id: uuidv1(), user: getFakeUser(i), feeling: REACTIONS.HAHA, targetId: commentId }) } else if (i < totalLikes + totalHahas + totalLoves) { commentReacts.push({ id: uuidv1(), user: getFakeUser(i), feeling: REACTIONS.LOVE, targetId: commentId }) } else if (i < totalLikes + totalHahas + totalLoves + totalWows) { commentReacts.push({ id: uuidv1(), user: getFakeUser(i), feeling: REACTIONS.WOW, targetId: commentId }) } else if ( i < totalLikes + totalHahas + totalLoves + totalWows + totalSads ) { commentReacts.push({ id: uuidv1(), user: getFakeUser(i), feeling: REACTIONS.SAD, targetId: commentId }) } else if ( i < totalLikes + totalHahas + totalLoves + totalWows + totalSads + totalAngers ) { commentReacts.push({ id: uuidv1(), user: getFakeUser(i), feeling: REACTIONS.ANGRY, targetId: commentId }) } } } /** * create fake reactions with default ratio of * 1/2 of Likes, 1/3 of Hahas and 1/6 of Loves * if @param {ratio} ratio was omitted. * @param {totalReactions} totalReactions * How many comments to make. * @param {ratio} ratio * How to distribute the ratio of reaction type, * accept only an array of 1 to 6 numbers, * and ignore the omitted numbers. * eg. `[3,2,1]` will distribute the ratio for * 3/(3+2+1) of `Like`, 2/(3+2+1) of `Haha`, * 1/(3+2+1) of `Love` and empty ratio for the rest of types. * eg2. `[1]` will distribute the ratio for full of `Like`. * eg3. `[0,0,0,0,0,1]` will distribute the ratio for full of `Anger`. * eg4. `[0,0,0,0,1]` will distribute the ratio for full of `Sad`. * @returns {reactObj} * An object contains th following properties: * @prop {reacts} * An array of objects contains the following properties: * @prop {name} * The user's profile name * @prop {feeling} * The react's feeling: `Like`, `Haha`, `Love`, * `Wow`, `Sad`, `Anger` * @prop {commentIds} * An array of @prop {commentId}, type is string. * For @function createComments . */ const createReactions = (totalReactions, ratio = [3, 2, 1]) => { if (totalReactions < 1) { throw new Error('Number of total reactions at least to be 1.') } if (ratio.length > 6 || ratio.length < 1) { throw new Error('Ratio must be 1 to 6 numbers.') } if (ratio.filter(r => typeof r !== 'number').length > 0) { throw new Error('Array of ratio must be type of number.') } // Calculate reations feeling distribution by ratio // If totalReactions < ratioTotal, ratio will be [1] for full of Likes const ratioTotal = ratio.reduce((r, acc) => r + acc) const ratioAvailable = totalReactions >= ratioTotal const divisorLike = ratioAvailable ? ratioTotal / ratio[0] : 1 const divisorHaha = ratioAvailable && ratio[1] ? ratioTotal / ratio[1] : 0 const divisorLove = ratioAvailable && ratio[2] ? ratioTotal / ratio[2] : 0 const divisorWow = ratioAvailable && ratio[3] ? ratioTotal / ratio[3] : 0 const divisorSad = ratioAvailable && ratio[4] ? ratioTotal / ratio[4] : 0 const divisorAnger = ratioAvailable && ratio[5] ? ratioTotal / ratio[5] : 0 const totalLikes = divisorLike === 0 ? 0 : Math.ceil(totalReactions / divisorLike) const totalHahas = divisorHaha === 0 ? 0 : Math.ceil(totalReactions / divisorHaha) const totalLoves = divisorLove === 0 ? 0 : Math.ceil(totalReactions / divisorLove) const totalWows = divisorWow === 0 ? 0 : Math.ceil(totalReactions / divisorWow) const totalSads = divisorSad === 0 ? 0 : Math.ceil(totalReactions / divisorSad) const totalAngers = divisorAnger === 0 ? 0 : Math.ceil(totalReactions / divisorAnger) // Add reactions as many as totalReactions // And the first n reactions is pre-defined reactors // (n = length of pre-defined reactors) // Or (totalReactions + commentReactsTotal) reactions for comments // if totalComments > 0 const reacts = [] for (let i = 0; i < totalReactions; i++) { if (i < totalLikes) { reacts.push({ id: uuidv1(), user: getReactor(i), feeling: REACTIONS.LIKE, targetId: FEEDBACK.TARGET }) } else if (i < totalLikes + totalHahas) { reacts.push({ id: uuidv1(), user: getReactor(i), feeling: REACTIONS.HAHA, targetId: FEEDBACK.TARGET }) } else if (i < totalLikes + totalHahas + totalLoves) { reacts.push({ id: uuidv1(), user: getReactor(i), feeling: REACTIONS.LOVE, targetId: FEEDBACK.TARGET }) } else if (i < totalLikes + totalHahas + totalLoves + totalWows) { reacts.push({ id: uuidv1(), user: getReactor(i), feeling: REACTIONS.WOW, targetId: FEEDBACK.TARGET }) } else if ( i < totalLikes + totalHahas + totalLoves + totalWows + totalSads ) { reacts.push({ id: uuidv1(), user: getReactor(i), feeling: REACTIONS.SAD, targetId: FEEDBACK.TARGET }) } else if ( i < totalLikes + totalHahas + totalLoves + totalWows + totalSads + totalAngers ) { reacts.push({ id: uuidv1(), user: getReactor(i), feeling: REACTIONS.ANGRY, targetId: FEEDBACK.TARGET }) } } return reacts } /** * create fake comments. * @param {commentIds} commentIds * How many comments to make, based on output of * @function createReactions . * @returns {comments} * An array of objects contains the following properties: * @prop {name} * The user's profile name */ const createComments = commentsTotal => { const commentReacts = [] const comments = [ { id: uuidv1(), user: definedUsers.dingDing, saying: '可以托夢讓我重選台北市長嗎?', time: '4天', targetId: FEEDBACK.TARGET, reactId: uuidv1(), isHidden: false }, { id: uuidv1(), user: definedUsers.toolMan, saying: '這個我想,要查證比較難啦', time: '5天', targetId: FEEDBACK.TARGET, reactId: uuidv1(), isHidden: false }, { id: uuidv1(), user: definedUsers.english, saying: '我也這麼覺得', time: '3天', targetId: FEEDBACK.TARGET, reactId: uuidv1(), isHidden: false }, { id: uuidv1(), user: definedUsers.terryGoodTiming, saying: '謝謝樓主托夢,三樓的民主不能當飯吃!', time: '3天', targetId: FEEDBACK.TARGET, reactId: uuidv1(), isHidden: false }, { id: uuidv1(), user: definedUsers.koreanFish, saying: '樓上為什麼不考慮吃個包子呢?', time: '3天', targetId: FEEDBACK.TARGET, reactId: uuidv1(), isHidden: false }, { id: uuidv1(), user: definedUsers.careWheelEveryday, saying: '五樓,我快等不及了', time: '5天', targetId: FEEDBACK.TARGET, reactId: uuidv1(), isHidden: false }, { id: uuidv1(), user: definedUsers.universityFoundField, saying: '五樓要不要藉這個機會在神明的面前澄清一下?', attachMedia: 'wvWFAMT.jpg', mediaType: 'pic', time: '4天', targetId: FEEDBACK.TARGET, reactId: uuidv1(), isHidden: false }, { id: uuidv1(), user: definedUsers.sparkJoy, saying: `臺灣的碰有打家好~ 今天要來教打家年後清理臉書版面的小妙招分享 1. 點進去五樓的粉絲團 2. 啊~原來XXX和其他 10 位朋友都說這個讚 3. 果斷斷開好友連結 #怦然心動的臉書整理魔法 #簡單三步驟打家學會了嗎`, time: '5天', targetId: FEEDBACK.TARGET, reactId: uuidv1(), isHidden: false } ] createCommentReactions(comments[0].id, commentReacts, 452, [2, 0, 0, 1]) createCommentReactions(comments[1].id, commentReacts, 582, [3, 2, 0, 1]) createCommentReactions(comments[2].id, commentReacts, 3120, [2, 1]) createCommentReactions(comments[3].id, commentReacts, 187, [0, 0, 0, 0, 0, 1]) createCommentReactions(comments[4].id, commentReacts, 987, [2, 0, 0, 0, 0, 1]) createCommentReactions(comments[5].id, commentReacts, 501, [2, 3]) createCommentReactions(comments[6].id, commentReacts, 872, [2, 1, 0, 1, 2]) createCommentReactions(comments[7].id, commentReacts, 789, [2, 1, 0, 1]) for (let i = 0; i < commentsTotal; i++) { if (comments[i] === undefined) { comments.push({ id: uuidv1(), user: getFakeUser(i), saying: '假留言', time: `${i}天`, targetId: FEEDBACK.TARGET, reactId: uuidv1(), isHidden: false }) } } if (comments.length > commentsTotal) { comments.splice(commentsTotal) } const commentObj = { comments, commentReacts } return commentObj } /** * create fake shares. * @param {sharesTotal} sharesTotal * How many shares to make, 290 will pass down if omitted. * @returns {shares} * An array of objects contains the following properties: * @prop {name} * The user's profile name */ const createShares = sharesTotal => { const shares = [ { id: uuidv1(), user: definedUsers.terryGoodTiming }, { id: uuidv1(), user: definedUsers.koreanFish }, { id: uuidv1(), user: definedUsers.english }, { id: uuidv1(), user: definedUsers.toolMan }, { id: uuidv1(), user: definedUsers.dingDing }, { id: uuidv1(), user: definedUsers.universityFoundField }, { id: uuidv1(), user: definedUsers.careWheelEveryday }, { id: uuidv1(), user: definedUsers.sparkJoy } ] for (let i = 0; i < sharesTotal; i++) { if (shares[i] === undefined) { shares.push({ id: uuidv1(), user: getFakeUser(i) }) } } if (shares.length > sharesTotal) { shares.splice(sharesTotal) } return shares } module.exports = { createResponseData: ( reactsTotal = 17419, commentsTotal = 1552, sharesTotal = 2871 ) => { const reacts = createReactions(reactsTotal) const { comments, commentReacts } = createComments(commentsTotal) const shares = createShares(sharesTotal) const normalizedData = getNormalizedData( { reacts, commentReacts, comments, shares }, PostSchema ) return { status: 'success', data: normalizedData } } }
javascript
<gh_stars>0 //! Structs and functions for interacting with a Pact Broker use std::collections::HashMap; use futures::stream::*; use itertools::Itertools; use maplit::*; use pact_models::http_utils::HttpAuth; use pact_models::pact::{load_pact_from_json, Pact}; use pact_models::{http_utils, PACT_RUST_VERSION}; use regex::{Captures, Regex}; use reqwest::Method; use serde::{Deserialize, Serialize}; use serde_json::{json, Value}; use serde_with::skip_serializing_none; use tracing::{debug, error, info, trace, warn}; use pact_matching::Mismatch; use crate::MismatchResult; use crate::utils::with_retries; use super::provider_client::join_paths; fn is_true(object: &serde_json::Map<String, serde_json::Value>, field: &str) -> bool { match object.get(field) { Some(json) => match *json { serde_json::Value::Bool(b) => b, _ => false }, None => false } } fn as_string(json: &serde_json::Value) -> String { match *json { serde_json::Value::String(ref s) => s.clone(), _ => format!("{}", json) } } fn content_type(response: &reqwest::Response) -> String { match response.headers().get("content-type") { Some(value) => value.to_str().unwrap_or("text/plain").into(), None => "text/plain".to_string() } } fn json_content_type(response: &reqwest::Response) -> bool { match content_type(response).parse::<mime::Mime>() { Ok(mime) => { match (mime.type_().as_str(), mime.subtype().as_str(), mime.suffix()) { ("application", "json", None) => true, ("application", "hal", Some(mime::JSON)) => true, _ => false } } Err(_) => false } } fn find_entry(map: &serde_json::Map<String, serde_json::Value>, key: &str) -> Option<(String, serde_json::Value)> { match map.keys().find(|k| k.to_lowercase() == key.to_lowercase() ) { Some(k) => map.get(k).map(|v| (key.to_string(), v.clone()) ), None => None } } /// Errors that can occur with a Pact Broker #[derive(Debug, Clone, thiserror::Error)] pub enum PactBrokerError { /// Error with a HAL link #[error("Error with a HAL link - {0}")] LinkError(String), /// Error with the content of a HAL resource #[error("Error with the content of a HAL resource - {0}")] ContentError(String), #[error("IO Error - {0}")] /// IO Error IoError(String), /// Link/Resource was not found #[error("Link/Resource was not found - {0}")] NotFound(String), /// Invalid URL #[error("Invalid URL - {0}")] UrlError(String) } impl PartialEq<String> for PactBrokerError { fn eq(&self, other: &String) -> bool { let message = match *self { PactBrokerError::LinkError(ref s) => s.clone(), PactBrokerError::ContentError(ref s) => s.clone(), PactBrokerError::IoError(ref s) => s.clone(), PactBrokerError::NotFound(ref s) => s.clone(), PactBrokerError::UrlError(ref s) => s.clone() }; message == *other } } impl <'a> PartialEq<&'a str> for PactBrokerError { fn eq(&self, other: &&str) -> bool { let message = match *self { PactBrokerError::LinkError(ref s) => s.clone(), PactBrokerError::ContentError(ref s) => s.clone(), PactBrokerError::IoError(ref s) => s.clone(), PactBrokerError::NotFound(ref s) => s.clone(), PactBrokerError::UrlError(ref s) => s.clone() }; message.as_str() == *other } } #[derive(Debug, Clone, Serialize, Deserialize)] #[serde(default)] /// Structure to represent a HAL link pub struct Link { /// Link name pub name: String, /// Link HREF pub href: Option<String>, /// If the link is templated (has expressions in the HREF that need to be expanded) pub templated: bool, /// Link title pub title: Option<String> } impl Link { /// Create a link from serde JSON data pub fn from_json(link: &str, link_data: &serde_json::Map<String, serde_json::Value>) -> Link { Link { name: link.to_string(), href: find_entry(link_data, &"href".to_string()) .map(|(_, href)| as_string(&href)), templated: is_true(link_data, "templated"), title: link_data.get("title").map(|title| as_string(title)) } } /// Converts the Link into a JSON representation pub fn as_json(&self) -> serde_json::Value { match (self.href.clone(), self.title.clone()) { (Some(href), Some(title)) => json!({ "href": href, "title": title, "templated": self.templated }), (Some(href), None) => json!({ "href": href, "templated": self.templated }), (None, Some(title)) => json!({ "title": title, "templated": self.templated }), (None, None) => json!({ "templated": self.templated }) } } } impl Default for Link { fn default() -> Self { Link { name: "link".to_string(), href: None, templated: false, title: None } } } /// HAL aware HTTP client #[derive(Clone)] pub struct HALClient { client: reqwest::Client, url: String, path_info: Option<serde_json::Value>, auth: Option<HttpAuth>, retries: u8 } impl HALClient { /// Initialise a client with the URL and authentication pub fn with_url(url: &str, auth: Option<HttpAuth>) -> HALClient { HALClient { url: url.to_string(), auth, ..HALClient::default() } } fn update_path_info(self, path_info: serde_json::Value) -> HALClient { HALClient { client: self.client.clone(), url: self.url.clone(), path_info: Some(path_info), auth: self.auth, retries: self.retries } } /// Navigate to the resource from the link name pub async fn navigate( self, link: &'static str, template_values: &HashMap<String, String> ) -> Result<HALClient, PactBrokerError> { trace!("navigate(link='{}', template_values={:?})", link, template_values); let client = if self.path_info.is_none() { let path_info = self.clone().fetch("/".into()).await?; self.update_path_info(path_info) } else { self }; let path_info = client.clone().fetch_link(link, template_values).await?; let client = client.update_path_info(path_info); Ok(client) } fn find_link(&self, link: &'static str) -> Result<Link, PactBrokerError> { match self.path_info { None => Err(PactBrokerError::LinkError(format!("No previous resource has been fetched from the pact broker. URL: '{}', LINK: '{}'", self.url, link))), Some(ref json) => match json.get("_links") { Some(json) => match json.get(link) { Some(link_data) => link_data.as_object() .map(|link_data| Link::from_json(&link.to_string(), &link_data)) .ok_or_else(|| PactBrokerError::LinkError(format!("Link is malformed, expected an object but got {}. URL: '{}', LINK: '{}'", link_data, self.url, link))), None => Err(PactBrokerError::LinkError(format!("Link '{}' was not found in the response, only the following links where found: {:?}. URL: '{}', LINK: '{}'", link, json.as_object().unwrap_or(&json!({}).as_object().unwrap()).keys().join(", "), self.url, link))) }, None => Err(PactBrokerError::LinkError(format!("Expected a HAL+JSON response from the pact broker, but got a response with no '_links'. URL: '{}', LINK: '{}'", self.url, link))) } } } async fn fetch_link( self, link: &'static str, template_values: &HashMap<String, String> ) -> Result<serde_json::Value, PactBrokerError> { trace!("fetch_link(link='{}', template_values={:?})", link, template_values); let link_data = self.find_link(link)?; self.fetch_url(&link_data, template_values).await } /// Fetch the resource at the Link from the Pact broker pub async fn fetch_url( self, link: &Link, template_values: &HashMap<String, String> ) -> Result<serde_json::Value, PactBrokerError> { trace!("fetch_url(link={:?}, template_values={:?})", link, template_values); let link_url = if link.templated { debug!("Link URL is templated"); self.clone().parse_link_url(&link, &template_values) } else { link.href.clone() .ok_or_else(|| PactBrokerError::LinkError( format!("Link is malformed, there is no href. URL: '{}', LINK: '{}'", self.url, link.name ) )) }?; let base_url = self.url.parse::<reqwest::Url>() .map_err(|err| PactBrokerError::UrlError(format!("{}", err)))?; let joined_url = base_url.join(&link_url) .map_err(|err| PactBrokerError::UrlError(format!("{}", err)))?; self.fetch(joined_url.path().into()).await } async fn fetch(self, path: &str) -> Result<serde_json::Value, PactBrokerError> { info!("Fetching path '{}' from pact broker", path); let url = join_paths(&self.url, path).parse::<reqwest::Url>() .map_err(|err| PactBrokerError::UrlError(format!("{}", err)))?; let request_builder = match self.auth { Some(ref auth) => match auth { HttpAuth::User(username, password) => self.client.get(url).basic_auth(username, password.clone()), HttpAuth::Token(token) => self.client.get(url).bearer_auth(token), _ => self.client.get(url) }, None => self.client.get(url) }.header("accept", "application/hal+json, application/json"); let response = with_retries(self.retries, request_builder).await .map_err(|err| { PactBrokerError::IoError(format!("Failed to access pact broker path '{}' - {}. URL: '{}'", &path, err, &self.url, )) })?; self.parse_broker_response(path.to_string(), response) .await } async fn parse_broker_response( &self, path: String, response: reqwest::Response, ) -> Result<serde_json::Value, PactBrokerError> { let is_json_content_type = json_content_type(&response); let content_type = content_type(&response); if response.status().is_success() { let body = response.bytes() .await .map_err(|_| PactBrokerError::IoError( format!("Failed to download response body for path '{}'. URL: '{}'", &path, self.url) ))?; if is_json_content_type { serde_json::from_slice(&body) .map_err(|err| PactBrokerError::ContentError( format!("Did not get a valid HAL response body from pact broker path '{}' - {}. URL: '{}'", path, err, self.url) )) } else { Err(PactBrokerError::ContentError( format!("Did not get a HAL response from pact broker path '{}', content type is '{}'. URL: '{}'", path, content_type, self.url ) )) } } else if response.status() == reqwest::StatusCode::NOT_FOUND { Err(PactBrokerError::NotFound( format!("Request to pact broker path '{}' failed: {}. URL: '{}'", path, response.status(), self.url ) )) } else { Err(PactBrokerError::IoError( format!("Request to pact broker path '{}' failed: {}. URL: '{}'", path, response.status(), self.url ) )) } } fn parse_link_url(self, link: &Link, values: &HashMap<String, String>) -> Result<String, PactBrokerError> { match link.href { Some(ref href) => { debug!("templated URL = {}", href); let re = Regex::new(r"\{(\w+)}").unwrap(); let final_url = re.replace_all(href, |caps: &Captures| { let lookup = caps.get(1).unwrap().as_str(); trace!("Looking up value for key '{}'", lookup); match values.get(lookup) { Some(val) => urlencoding::encode(val.as_str()).to_string(), None => { warn!("No value was found for key '{}', mapped values are {:?}", lookup, values); format!("{{{}}}", lookup) } } }); debug!("final URL = {}", final_url); Ok(final_url.to_string()) }, None => Err(PactBrokerError::LinkError( format!("Expected a HAL+JSON response from the pact broker, but got a link with no HREF. URL: '{}', LINK: '{}'", self.url, link.name))) } } /// Iterate over all the links by name pub fn iter_links(&self, link: &str) -> Result<Vec<Link>, PactBrokerError> { match self.path_info { None => Err(PactBrokerError::LinkError(format!("No previous resource has been fetched from the pact broker. URL: '{}', LINK: '{}'", self.url, link))), Some(ref json) => match json.get("_links") { Some(json) => match json.get(&link) { Some(link_data) => link_data.as_array() .map(|link_data| link_data.iter().map(|link_json| match link_json { Value::Object(data) => Link::from_json(&link, data), Value::String(s) => Link { name: link.to_string(), href: Some(s.clone()), templated: false, title: None }, _ => Link { name: link.to_string(), href: Some(link_json.to_string()), templated: false, title: None } }).collect()) .ok_or_else(|| PactBrokerError::LinkError(format!("Link is malformed, expected an object but got {}. URL: '{}', LINK: '{}'", link_data, self.url, link))), None => Err(PactBrokerError::LinkError(format!("Link '{}' was not found in the response, only the following links where found: {:?}. URL: '{}', LINK: '{}'", link, json.as_object().unwrap_or(&json!({}).as_object().unwrap()).keys().join(", "), self.url, link))) }, None => Err(PactBrokerError::LinkError(format!("Expected a HAL+JSON response from the pact broker, but got a response with no '_links'. URL: '{}', LINK: '{}'", self.url, link))) } } } async fn post_json(&self, url: &str, body: &str) -> Result<serde_json::Value, PactBrokerError> { trace!("post_json(url='{}', body='{}')", url, body); self.send_document(url, body, Method::POST).await } async fn put_json(&self, url: &str, body: &str) -> Result<serde_json::Value, PactBrokerError> { trace!("put_json(url='{}', body='{}')", url, body); self.send_document(url, body, Method::PUT).await } async fn send_document(&self, url: &str, body: &str, method: Method) -> Result<serde_json::Value, PactBrokerError> { debug!("Sending JSON to {} using {}: {}", url, method, body); let url = url.parse::<reqwest::Url>() .map_err(|err| PactBrokerError::UrlError(format!("{}", err)))?; let base_url = self.url.parse::<reqwest::Url>() .map_err(|err| PactBrokerError::UrlError(format!("{}", err)))?; let url = base_url.join(&url.path()) .map_err(|err| PactBrokerError::UrlError(format!("{}", err)))?; let request_builder = match self.auth { Some(ref auth) => match auth { HttpAuth::User(username, password) => self.client .request(method, url.clone()) .basic_auth(username, password.clone()), HttpAuth::Token(token) => self.client .request(method, url.clone()) .bearer_auth(token), _ => self.client.request(method, url.clone()) }, None => self.client.request(method, url.clone()) } .header("Content-Type", "application/json") .header("Accept", "application/hal+json") .header("Accept", "application/json") .body(body.to_string()); let response = with_retries(self.retries, request_builder) .await .map_err(|err| PactBrokerError::IoError( format!("Failed to send JSON to the pact broker URL '{}' - {}", url, err) ))? .error_for_status() .map_err(|err| PactBrokerError::ContentError( format!("Request to pact broker URL '{}' failed - {}", url, err) )); match response { Ok(res) => { let res = self.parse_broker_response(url.path().to_string(), res).await; Ok(res.unwrap_or_default()) }, Err(err) => Err(err) } } fn with_doc_context(self, doc_attributes: &[Link]) -> Result<HALClient, PactBrokerError> { let links: serde_json::Map<String, serde_json::Value> = doc_attributes.iter() .map(|link| (link.name.clone(), link.as_json())).collect(); let links_json = json!({ "_links": json!(links) }); Ok(self.update_path_info(links_json)) } } impl Default for HALClient { fn default() -> Self { HALClient { client: reqwest::ClientBuilder::new() .build() .unwrap(), url: "".to_string(), path_info: None, auth: None, retries: 3 } } } fn links_from_json(json: &Value) -> Vec<Link> { match json.get("_links") { Some(json) => match json { Value::Object(v) => { v.iter().map(|(link, json)| match json { Value::Object(attr) => Link::from_json(link, attr), _ => Link { name: link.clone(), .. Link::default() } }).collect() }, _ => vec![] }, None => vec![] } } /// Fetches the pacts from the broker that match the provider name pub async fn fetch_pacts_from_broker( broker_url: &str, provider_name: &str, auth: Option<HttpAuth> ) -> anyhow::Result<Vec<anyhow::Result<(Box<dyn Pact + Send + Sync>, Option<PactVerificationContext>, Vec<Link>)>>> { trace!("fetch_pacts_from_broker(broker_url='{}', provider_name='{}', auth={})", broker_url, provider_name, auth.clone().unwrap_or_default()); let mut hal_client = HALClient::with_url(broker_url, auth); let template_values = hashmap!{ "provider".to_string() => provider_name.to_string() }; hal_client = hal_client.navigate("pb:latest-provider-pacts", &template_values) .await .map_err(move |err| { match err { PactBrokerError::NotFound(_) => PactBrokerError::NotFound( format!("No pacts for provider '{}' where found in the pact broker. URL: '{}'", provider_name, broker_url)), _ => err } })?; let pact_links = hal_client.clone().iter_links("pacts")?; let results: Vec<_> = futures::stream::iter(pact_links) .map(|ref pact_link| { match pact_link.href { Some(_) => Ok((hal_client.clone(), pact_link.clone())), None => Err( PactBrokerError::LinkError( format!( "Expected a HAL+JSON response from the pact broker, but got a link with no HREF. URL: '{}', LINK: '{:?}'", &hal_client.url, pact_link ) ) ) } }) .and_then(|(hal_client, pact_link)| async { let pact_json = hal_client.fetch_url( &pact_link.clone(), &template_values.clone() ).await?; Ok((pact_link, pact_json)) }) .map(|result| { match result { Ok((pact_link, pact_json)) => { let href = pact_link.href.unwrap_or_default(); let links = links_from_json(&pact_json); load_pact_from_json(href.as_str(), &pact_json) .map(|pact| (pact, None, links)) }, Err(err) => Err(err.into()) } }) .into_stream() .collect() .await; Ok(results) } /// Fetch Pacts from the broker using the "provider-pacts-for-verification" endpoint pub async fn fetch_pacts_dynamically_from_broker( broker_url: &str, provider_name: String, pending: bool, include_wip_pacts_since: Option<String>, provider_tags: Vec<String>, provider_branch: Option<String>, consumer_version_selectors: Vec<ConsumerVersionSelector>, auth: Option<HttpAuth> ) -> Result<Vec<Result<(Box<dyn Pact + Send + Sync>, Option<PactVerificationContext>, Vec<Link>), PactBrokerError>>, PactBrokerError> { trace!("fetch_pacts_dynamically_from_broker(broker_url='{}', provider_name='{}', pending={}, \ include_wip_pacts_since={:?}, provider_tags: {:?}, consumer_version_selectors: {:?}, auth={})", broker_url, provider_name, pending, include_wip_pacts_since, provider_tags, consumer_version_selectors, auth.clone().unwrap_or_default()); let mut hal_client = HALClient::with_url(broker_url, auth); let template_values = hashmap!{ "provider".to_string() => provider_name.clone() }; hal_client = hal_client.navigate("pb:provider-pacts-for-verification", &template_values) .await .map_err(move |err| { match err { PactBrokerError::NotFound(_) => PactBrokerError::NotFound( format!("No pacts for provider '{}' were found in the pact broker. URL: '{}'", provider_name.clone(), broker_url)), _ => err } })?; // Construct the Pacts for verification payload let pacts_for_verification = PactsForVerificationRequest { provider_version_tags: provider_tags, provider_version_branch: provider_branch, include_wip_pacts_since, consumer_version_selectors, include_pending_status: pending, }; let request_body = serde_json::to_string(&pacts_for_verification).unwrap(); // Post the verification request let response = match hal_client.find_link("self") { Ok(link) => { let link = hal_client.clone().parse_link_url(&link, &hashmap!{})?; match hal_client.clone().post_json(link.as_str(), request_body.as_str()).await { Ok(res) => Some(res), Err(err) => { debug!("error Response for pacts for verification {:?} ", err); return Err(err) } } }, Err(e) => return Err(e) }; // Find all of the Pact links let pact_links = match response { Some(v) => { let pfv: PactsForVerificationResponse = serde_json::from_value(v) .unwrap_or(PactsForVerificationResponse { embedded: PactsForVerificationBody { pacts: vec!() } }); if pfv.embedded.pacts.len() == 0 { return Err(PactBrokerError::NotFound(format!("No pacts were found for this provider"))) }; let links: Result<Vec<(Link, PactVerificationContext)>, PactBrokerError> = pfv.embedded.pacts.iter().map(| p| { match p.links.get("self") { Some(l) => Ok((l.clone(), PactVerificationContext{ short_description: p.short_description.clone(), verification_properties: PactVerificationProperties { pending: p.verification_properties.pending, notices: p.verification_properties.notices.clone(), } })), None => Err( PactBrokerError::LinkError( format!( "Expected a HAL+JSON response from the pact broker, but got a link with no HREF. URL: '{}', PATH: '{:?}'", &hal_client.url, &p.links, ) ) ) } }).collect(); links }, None => Err(PactBrokerError::NotFound(format!("No pacts were found for this provider"))) }?; let results: Vec<_> = futures::stream::iter(pact_links) .map(|(ref pact_link, ref context)| { match pact_link.href { Some(_) => Ok((hal_client.clone(), pact_link.clone(), context.clone())), None => Err( PactBrokerError::LinkError( format!( "Expected a HAL+JSON response from the pact broker, but got a link with no HREF. URL: '{}', LINK: '{:?}'", &hal_client.url, pact_link ) ) ) } }) .and_then(|(hal_client, pact_link, context)| async { let pact_json = hal_client.fetch_url( &pact_link.clone(), &template_values.clone() ).await?; Ok((pact_link, pact_json, context)) }) .map(|result| { match result { Ok((pact_link, pact_json, context)) => { let href = pact_link.href.unwrap_or_default(); let links = links_from_json(&pact_json); load_pact_from_json(href.as_str(), &pact_json) .map(|pact| (pact, Some(context), links)) .map_err(|err| PactBrokerError::ContentError(format!("{}", err))) }, Err(err) => Err(err) } }) .into_stream() .collect() .await; Ok(results) } /// Fetch the Pact from the given URL, using any required authentication. This will use a GET /// request to the given URL and parse the result into a Pact model. It will also look for any HAL /// links in the response, returning those if found. pub async fn fetch_pact_from_url(url: &str, auth: &Option<HttpAuth>) -> anyhow::Result<(Box<dyn Pact + Send + Sync>, Vec<Link>)> { let url = url.to_string(); let auth = auth.clone(); let (url, pact_json) = tokio::task::spawn_blocking(move || { http_utils::fetch_json_from_url(&url, &auth) }).await??; let pact = load_pact_from_json(&url, &pact_json)?; let links = links_from_json(&pact_json); Ok((pact, links)) } /// Struct that wraps the result of a verification test pub enum TestResult { /// Test was OK Ok(Vec<Option<String>>), /// Test failed verification Failed(Vec<(Option<String>, Option<MismatchResult>)>) } impl TestResult { /// Convert this test result to a boolean value pub fn to_bool(&self) -> bool { match self { TestResult::Ok(_) => true, _ => false } } } /// Publishes the result to the "pb:publish-verification-results" link in the links associated with the pact pub async fn publish_verification_results( links: Vec<Link>, broker_url: &str, auth: Option<HttpAuth>, result: TestResult, version: String, build_url: Option<String>, provider_tags: Vec<String>, branch: Option<String> ) -> Result<serde_json::Value, PactBrokerError> { let hal_client = HALClient::with_url(broker_url, auth.clone()); if branch.is_some() { publish_provider_branch(&hal_client, &links, &branch.unwrap(), &version).await?; } if !provider_tags.is_empty() { publish_provider_tags(&hal_client, &links, provider_tags, &version).await?; } let publish_link = links .iter() .cloned() .find(|item| item.name.to_ascii_lowercase() == "pb:publish-verification-results") .ok_or_else(|| PactBrokerError::LinkError( "Response from the pact broker has no 'pb:publish-verification-results' link".into() ))?; let json = build_payload(result, version, build_url); hal_client.post_json(publish_link.href.unwrap_or_default().as_str(), json.to_string().as_str()).await } fn build_payload(result: TestResult, version: String, build_url: Option<String>) -> serde_json::Value { let mut json = json!({ "success": result.to_bool(), "providerApplicationVersion": version, "verifiedBy": { "implementation": "Pact-Rust", "version": PACT_RUST_VERSION } }); let json_obj = json.as_object_mut().unwrap(); if build_url.is_some() { json_obj.insert("buildUrl".into(), json!(build_url.unwrap())); } match result { TestResult::Failed(mismatches) => { let values = mismatches.iter() .group_by(|(id, _)| id.clone().unwrap_or_default()) .into_iter() .map(|(key, mismatches)| { let acc: (Vec<serde_json::Value>, Vec<serde_json::Value>) = (vec![], vec![]); let values = mismatches.fold(acc, |mut acc, (_, result)| { if let Some(mismatch) = result { match mismatch { MismatchResult::Mismatches { mismatches, .. } => { for mismatch in mismatches { match mismatch { Mismatch::MethodMismatch { expected, actual } => acc.0.push(json!({ "attribute": "method", "description": format!("Expected method of {} but received {}", expected, actual) })), Mismatch::PathMismatch { mismatch, .. } => acc.0.push(json!({ "attribute": "path", "description": mismatch })), Mismatch::StatusMismatch { mismatch, .. } => acc.0.push(json!({ "attribute": "status", "description": mismatch })), Mismatch::QueryMismatch { parameter, mismatch, .. } => acc.0.push(json!({ "attribute": "query", "identifier": parameter, "description": mismatch })), Mismatch::HeaderMismatch { key, mismatch, .. } => acc.0.push(json!({ "attribute": "header", "identifier": key, "description": mismatch })), Mismatch::BodyTypeMismatch { expected, actual, .. } => acc.0.push(json!({ "attribute": "body", "identifier": "$", "description": format!("Expected body type of '{}' but received '{}'", expected, actual) })), Mismatch::BodyMismatch { path, mismatch, .. } => acc.0.push(json!({ "attribute": "body", "identifier": path, "description": mismatch })), Mismatch::MetadataMismatch { key, mismatch, .. } => acc.0.push(json!({ "attribute": "metadata", "identifier": key, "description": mismatch })) } } }, MismatchResult::Error(err, _) => acc.1.push(json!({ "message": err })) }; }; acc }); let mut json = json!({ "interactionId": key, "success": values.0.is_empty() && values.1.is_empty() }); if !values.0.is_empty() { json.as_object_mut().unwrap().insert("mismatches".into(), json!(values.0)); } if !values.1.is_empty() { json.as_object_mut().unwrap().insert("exceptions".into(), json!(values.1)); } json }).collect::<Vec<serde_json::Value>>(); json_obj.insert("testResults".into(), serde_json::Value::Array(values)); } TestResult::Ok(ids) => { let values = ids.iter().filter(|id| id.is_some()) .map(|id| json!({ "interactionId": id.clone().unwrap_or_default(), "success": true })).collect(); json_obj.insert("testResults".into(), serde_json::Value::Array(values)); } } json } async fn publish_provider_tags( hal_client: &HALClient, links: &[Link], provider_tags: Vec<String>, version: &str) -> Result<(), PactBrokerError> { let hal_client = hal_client.clone().with_doc_context(links)? .navigate("pb:provider", &hashmap!{}).await?; match hal_client.find_link("pb:version-tag") { Ok(link) => { for tag in &provider_tags { let template_values = hashmap! { "version".to_string() => version.to_string(), "tag".to_string() => tag.clone() }; match hal_client.clone().put_json(hal_client.clone().parse_link_url(&link, &template_values)?.as_str(), "{}").await { Ok(_) => debug!("Pushed tag {} for provider version {}", tag, version), Err(err) => { error!("Failed to push tag {} for provider version {}", tag, version); return Err(err); } } }; Ok(()) }, Err(_) => Err(PactBrokerError::LinkError("Can't publish provider tags as there is no 'pb:version-tag' link".to_string())) } } async fn publish_provider_branch( hal_client: &HALClient, links: &[Link], branch: &str, version: &str) -> Result<(), PactBrokerError> { let hal_client = hal_client.clone().with_doc_context(links)? .navigate("pb:provider", &hashmap!{}).await?; match hal_client.find_link("pb:branch-version") { Ok(link) => { let template_values = hashmap! { "branch".to_string() => branch.to_string(), "version".to_string() => version.to_string(), }; match hal_client.clone().put_json(hal_client.clone().parse_link_url(&link, &template_values)?.as_str(), "{}").await { Ok(_) => debug!("Pushed branch {} for provider version {}", branch, version), Err(err) => { error!("Failed to push branch {} for provider version {}", branch, version); return Err(err); } } Ok(()) }, Err(_) => Err(PactBrokerError::LinkError("Can't publish provider branch as there is no 'pb:branch-version' link. Please ugrade to Pact Broker version 2.86.0 or later for branch support".to_string())) } } #[skip_serializing_none] #[derive(Serialize, Deserialize, Debug, Clone)] #[serde(rename_all = "camelCase")] /// Structure to represent a HAL link pub struct ConsumerVersionSelector { /// Application name to filter the results on pub consumer: Option<String>, /// Tag pub tag: Option<String>, /// Fallback tag if Tag doesn't exist pub fallback_tag: Option<String>, /// Only select the latest (if false, this selects all pacts for a tag) pub latest: Option<bool>, /// Applications that have been deployed or released pub deployed_or_released: Option<bool>, /// Applications that have been deployed pub deployed: Option<bool>, /// Applications that have been released pub released: Option<bool>, /// Applications in a given environment pub environment: Option<String>, /// Applications with the default branch set in the broker pub main_branch: Option<bool>, /// Applications with the given branch pub branch: Option<String>, /// Applications that match the the provider version branch sent during verification pub matching_branch: Option<bool>, } #[derive(Serialize, Deserialize, Debug, Clone)] #[serde(rename_all = "camelCase")] struct PactsForVerificationResponse { #[serde(rename(deserialize = "_embedded"))] pub embedded: PactsForVerificationBody } #[derive(Serialize, Deserialize, Debug, Clone)] #[serde(rename_all = "camelCase")] struct PactsForVerificationBody { pub pacts: Vec<PactForVerification> } #[derive(Serialize, Deserialize, Debug, Clone)] #[serde(rename_all = "camelCase")] struct PactForVerification { pub short_description: String, #[serde(rename(deserialize = "_links"))] pub links: HashMap<String, Link>, pub verification_properties: PactVerificationProperties, } #[skip_serializing_none] #[derive(Serialize, Deserialize, Debug, Clone)] #[serde(rename_all = "camelCase")] /// Request to send to determine the pacts to verify pub struct PactsForVerificationRequest { /// Provider tags to use for determining pending pacts (if enabled) pub provider_version_tags: Vec<String>, /// Enable pending pacts feature pub include_pending_status: bool, /// Find WIP pacts after given date pub include_wip_pacts_since: Option<String>, /// Detailed pact selection criteria , see https://docs.pact.io/pact_broker/advanced_topics/consumer_version_selectors/ pub consumer_version_selectors: Vec<ConsumerVersionSelector>, /// Current provider version branch if used (instead of tags) pub provider_version_branch: Option<String> } #[skip_serializing_none] #[derive(Serialize, Deserialize, Debug, Clone)] #[serde(rename_all = "camelCase")] /// Provides the context on why a Pact was included pub struct PactVerificationContext { /// Description pub short_description: String, /// Properties pub verification_properties: PactVerificationProperties, } #[skip_serializing_none] #[derive(Serialize, Deserialize, Debug, Clone)] #[serde(rename_all = "camelCase")] /// Properties associated with the verification context pub struct PactVerificationProperties { #[serde(default)] /// If the Pact is pending pub pending: bool, /// Notices provided by the Pact Broker pub notices: Vec<HashMap<String, String>>, } #[cfg(test)] mod tests { use env_logger::*; use expectest::expect; use expectest::prelude::*; use pact_models::{Consumer, PactSpecification, Provider}; use pact_models::prelude::RequestResponsePact; use pact_models::sync_interaction::RequestResponseInteraction; use pact_consumer::*; use pact_consumer::prelude::*; use pact_matching::Mismatch::MethodMismatch; use super::*; use super::{content_type, json_content_type}; #[tokio::test] async fn fetch_returns_an_error_if_there_is_no_pact_broker() { let client = HALClient::with_url("http://idont.exist:6666", None); expect!(client.fetch("/").await).to(be_err()); } #[tokio::test] async fn fetch_returns_an_error_if_it_does_not_get_a_success_response() { let pact_broker = PactBuilder::new("RustPactVerifier", "PactBroker") .interaction("a request to a non-existant path", "", |mut i| { i.given("the pact broker has a valid pact"); i.request.path("/hello"); i.response.status(404); futures::future::ready(i) }) .await .start_mock_server(None); let client = HALClient::with_url(pact_broker.url().as_str(), None); let result = client.fetch("/hello").await; expect!(result).to(be_err().value(format!("Request to pact broker path \'/hello\' failed: 404 Not Found. URL: '{}'", pact_broker.url()))); } #[tokio::test] async fn fetch_returns_an_error_if_it_does_not_get_a_hal_response() { let pact_broker = PactBuilder::new("RustPactVerifier", "PactBrokerStub") .interaction("a request to a non-json resource", "", |mut i| { i.request.path("/nonjson"); i.response .header("Content-Type", "text/html") .body("<html></html>"); futures::future::ready(i) }) .await .start_mock_server(None); let client = HALClient::with_url(pact_broker.url().as_str(), None); let result = client.fetch("/nonjson").await; expect!(result).to(be_err().value(format!("Did not get a HAL response from pact broker path \'/nonjson\', content type is 'text/html'. URL: '{}'", pact_broker.url()))); } #[test] fn content_type_test() { let response = reqwest::Response::from( http::response::Builder::new() .header("content-type", "application/hal+json; charset=utf-8") .body("null") .unwrap() ); expect!(content_type(&response)).to(be_equal_to("application/hal+json; charset=utf-8".to_string())); } #[test] fn json_content_type_test() { let response = reqwest::Response::from( http::response::Builder::new() .header("content-type", "application/json") .body("null") .unwrap() ); expect!(json_content_type(&response)).to(be_true()); } #[test] fn json_content_type_utf8_test() { let response = reqwest::Response::from( http::response::Builder::new() .header("content-type", "application/hal+json;charset=utf-8") .body("null") .unwrap() ); expect!(json_content_type(&response)).to(be_true()); } #[tokio::test] async fn fetch_returns_an_error_if_it_does_not_get_a_valid_hal_response() { let pact_broker = PactBuilder::new("RustPactVerifier", "PactBrokerStub") .interaction("a request to a non-hal resource", "", |mut i| { i.request.path("/nonhal"); i.response.header("Content-Type", "application/hal+json"); futures::future::ready(i) }) .await .interaction("a request to a non-hal resource 2", "", |mut i| { i.request.path("/nonhal2"); i.response .header("Content-Type", "application/hal+json") .body("<html>This is not JSON</html>"); futures::future::ready(i) }) .await .start_mock_server(None); let client = HALClient::with_url(pact_broker.url().as_str(), None); let result = client.clone().fetch("/nonhal").await; expect!(result).to(be_err().value(format!("Did not get a valid HAL response body from pact broker path \'/nonhal\' - EOF while parsing a value at line 1 column 0. URL: '{}'", pact_broker.url()))); let result = client.clone().fetch("/nonhal2").await; expect!(result).to(be_err().value(format!("Did not get a valid HAL response body from pact broker path \'/nonhal2\' - expected value at line 1 column 1. URL: '{}'", pact_broker.url()))); } #[tokio::test] async fn fetch_retries_the_request_on_50x_errors() { let _ = env_logger::try_init(); let pact_broker = PactBuilder::new("RustPactVerifier", "PactBrokerStub") .interaction("a request to a hal resource", "", |mut i| async move { i.given("server returns a gateway error"); i.request.path("/"); i.response.status(503); i }) .await .start_mock_server(None); let client = HALClient::with_url(pact_broker.url().as_str(), None); let expected_requests = client.retries as usize; let result = client.fetch("/").await; expect!(result).to(be_err()); expect!(pact_broker.metrics().requests).to(be_equal_to(expected_requests )); } #[tokio::test] async fn post_json_retries_the_request_on_50x_errors() { let _ = env_logger::try_init(); let pact_broker = PactBuilder::new("RustPactVerifier", "PactBrokerStub") .interaction("a POST request", "", |mut i| async move { i.given("server returns a gateway error"); i.request.path("/").method("POST"); i.response.status(503); i }) .await .start_mock_server(None); let client = HALClient::with_url(pact_broker.url().as_str(), None); let expected_requests = client.retries as usize; let result = client.post_json(pact_broker.url().as_str(), "{}").await; expect!(result.clone()).to(be_err()); expect!(pact_broker.metrics().requests).to(be_equal_to(expected_requests )); } #[tokio::test] async fn put_json_retries_the_request_on_50x_errors() { let _ = env_logger::try_init(); let pact_broker = PactBuilder::new("RustPactVerifier", "PactBrokerStub") .interaction("a PUT request", "", |mut i| async move { i.given("server returns a gateway error"); i.request.path("/").method("PUT"); i.response.status(503); i }) .await .start_mock_server(None); let client = HALClient::with_url(pact_broker.url().as_str(), None); let expected_requests = client.retries as usize; let result = client.put_json(pact_broker.url().as_str(), "{}").await; expect!(result.clone()).to(be_err()); expect!(pact_broker.metrics().requests).to(be_equal_to(expected_requests )); } #[test] fn parse_link_url_returns_error_if_there_is_no_href() { let client = HALClient::default(); let link = Link { name: "link".to_string(), href: None, templated: false, title: None }; expect!(client.parse_link_url(&link, &hashmap!{})).to(be_err().value( "Expected a HAL+JSON response from the pact broker, but got a link with no HREF. URL: '', LINK: 'link'")); } #[test] fn parse_link_url_replaces_all_tokens_in_href() { let client = HALClient::default(); let values = hashmap!{ "valA".to_string() => "A".to_string(), "valB".to_string() => "B".to_string() }; let link = Link { name: "link".to_string(), href: Some("http://localhost".to_string()), templated: false, title: None }; expect!(client.clone().parse_link_url(&link, &values)).to(be_ok().value("http://localhost")); let link = Link { name: "link".to_string(), href: Some("http://{valA}/{valB}".to_string()), templated: false, title: None }; expect!(client.clone().parse_link_url(&link, &values)).to(be_ok().value("http://A/B")); let link = Link { name: "link".to_string(), href: Some("http://{valA}/{valC}".to_string()), templated: false, title: None }; expect!(client.clone().parse_link_url(&link, &values)).to(be_ok().value("http://A/{valC}")); } #[test] fn parse_link_url_encodes_the_tokens_in_href() { let client = HALClient::default(); let values = hashmap!{ "valA".to_string() => "A".to_string(), "valB".to_string() => "B/C".to_string() }; let link = Link { name: "link".to_string(), href: Some("http://{valA}/{valB}".to_string()), templated: false, title: None }; expect!(client.clone().parse_link_url(&link, &values)).to(be_ok().value("http://A/B%2FC")); } #[tokio::test] async fn fetch_link_returns_an_error_if_a_previous_resource_has_not_been_fetched() { let client = HALClient::with_url("http://localhost", None); let result = client.fetch_link("anything_will_do", &hashmap!{}).await; expect!(result).to(be_err().value("No previous resource has been fetched from the pact broker. URL: 'http://localhost', LINK: 'anything_will_do'".to_string())); } #[tokio::test] async fn fetch_link_returns_an_error_if_the_previous_resource_was_not_hal() { try_init().unwrap_or(()); let pact_broker = PactBuilder::new("RustPactVerifier", "PactBrokerStub") .interaction("a request to a non-hal json resource", "", |mut i| async move { i.request.path("/"); i.response .header("Content-Type", "application/hal+json") .body("{}"); i }) .await .start_mock_server(None); let mut client = HALClient::with_url(pact_broker.url().as_str(), None); let result = client.clone().fetch("/").await; expect!(result.clone()).to(be_ok()); client.path_info = result.ok(); let result = client.clone().fetch_link("hal2", &hashmap!{}).await; expect!(result).to(be_err().value(format!("Expected a HAL+JSON response from the pact broker, but got a response with no '_links'. URL: '{}', LINK: 'hal2'", pact_broker.url()))); } #[tokio::test] async fn fetch_link_returns_an_error_if_the_previous_resource_links_are_not_correctly_formed() { try_init().unwrap_or(()); let pact_broker = PactBuilder::new("RustPactVerifier", "PactBrokerStub") .interaction("a request to a hal resource with invalid links", "", |mut i| async move { i.request.path("/"); i.response .header("Content-Type", "application/hal+json") .body("{\"_links\":[{\"next\":{\"href\":\"abc\"}},{\"prev\":{\"href\":\"def\"}}]}"); i }) .await .start_mock_server(None); let mut client = HALClient::with_url(pact_broker.url().as_str(), None); let result = client.clone().fetch("/").await; expect!(result.clone()).to(be_ok()); client.path_info = result.ok(); let result = client.clone().fetch_link("any", &hashmap!{}).await; expect!(result).to(be_err().value(format!("Link 'any' was not found in the response, only the following links where found: \"\". URL: '{}', LINK: 'any'", pact_broker.url()))); } #[tokio::test] async fn fetch_link_returns_an_error_if_the_previous_resource_does_not_have_the_link() { let pact_broker = PactBuilder::new("RustPactVerifier", "PactBrokerStub") .interaction("a request to a hal resource", "", |mut i| async move { i.request.path("/"); i.response .header("Content-Type", "application/hal+json") .body("{\"_links\":{\"next\":{\"href\":\"/abc\"},\"prev\":{\"href\":\"/def\"}}}"); i }) .await .start_mock_server(None); let mut client = HALClient::with_url(pact_broker.url().as_str(), None); let result = client.clone().fetch("/").await; expect!(result.clone()).to(be_ok()); client.path_info = result.ok(); let result = client.clone().fetch_link("any", &hashmap!{}).await; expect!(result).to(be_err().value(format!("Link 'any' was not found in the response, only the following links where found: \"next, prev\". URL: '{}', LINK: 'any'", pact_broker.url()))); } #[tokio::test] async fn fetch_link_returns_the_resource_for_the_link() { let pact_broker = PactBuilder::new("RustPactVerifier", "PactBrokerStub") .interaction("a request to a hal resource", "", |mut i| async move { i.request.path("/"); i.response .header("Content-Type", "application/hal+json") .body("{\"_links\":{\"next\":{\"href\":\"/abc\"},\"prev\":{\"href\":\"/def\"}}}"); i }) .await .interaction("a request to next", "", |mut i| async move { i.request.path("/abc"); i.response .header("Content-Type", "application/json") .json_body(json_pattern!("Yay! You found your way here")); i }) .await .start_mock_server(None); let mut client = HALClient::with_url(pact_broker.url().as_str(), None); let result = client.clone().fetch("/").await; expect!(result.clone()).to(be_ok()); client.path_info = result.ok(); let result = client.clone().fetch_link("next", &hashmap!{}).await; expect!(result).to(be_ok().value(serde_json::Value::String("Yay! You found your way here".to_string()))); } #[tokio::test] async fn fetch_link_returns_handles_absolute_resource_links() { try_init().unwrap_or(()); let pact_broker = PactBuilder::new("RustPactVerifier", "PactBrokerStub") .interaction("a request to a hal resource with absolute paths", "", |mut i| async move { i.request.path("/"); i.response .header("Content-Type", "application/hal+json") .body("{\"_links\":{\"next\":{\"href\":\"http://localhost/abc\"},\"prev\":{\"href\":\"http://localhost/def\"}}}"); i }) .await .interaction("a request to next", "", |mut i| async move { i.request.path("/abc"); i.response .header("Content-Type", "application/json") .json_body(json_pattern!("Yay! You found your way here")); i }) .await .start_mock_server(None); let mut client = HALClient::with_url(pact_broker.url().as_str(), None); let result = client.clone().fetch("/").await; expect!(result.clone()).to(be_ok()); client.path_info = result.ok(); let result = client.clone().fetch_link("next", &hashmap!{}).await; expect!(result).to(be_ok().value(serde_json::Value::String("Yay! You found your way here".to_string()))); } #[tokio::test] async fn fetch_link_returns_the_resource_for_the_templated_link() { try_init().unwrap_or(()); let pact_broker = PactBuilder::new("RustPactVerifier", "PactBrokerStub") .interaction("a request to a templated hal resource", "", |mut i| async move { i.request.path("/"); i.response .header("Content-Type", "application/hal+json") .body("{\"_links\":{\"document\":{\"href\":\"/doc/{id}\",\"templated\":true}}}"); i }) .await .interaction("a request for a document", "", |mut i| async move { i.request.path("/doc/abc"); i.response .header("Content-Type", "application/json") .json_body(json_pattern!("Yay! You found your way here")); i }) .await .start_mock_server(None); let mut client = HALClient::with_url(pact_broker.url().as_str(), None); let result = client.clone().fetch("/").await; expect!(result.clone()).to(be_ok()); client.path_info = result.ok(); let result = client.clone().fetch_link("document", &hashmap!{ "id".to_string() => "abc".to_string() }).await; expect!(result).to(be_ok().value(serde_json::Value::String("Yay! You found your way here".to_string()))); } #[tokio::test] async fn fetch_pacts_from_broker_returns_empty_list_if_there_are_no_pacts() { try_init().unwrap_or(()); let pact_broker = PactBuilder::new("RustPactVerifier", "PactBroker") .interaction("a request to the pact broker root", "", |mut i| async move { i.request .path("/") .header("Accept", "application/hal+json") .header("Accept", "application/json"); i.response .header("Content-Type", "application/hal+json") .json_body(json_pattern!({ "_links": { "pb:latest-provider-pacts": { "href": "http://localhost/pacts/provider/{provider}/latest", "templated": true, } } })); i }) .await .interaction("a request for a providers pacts", "", |mut i| async move { i.given("There are no pacts in the pact broker"); i.request .path("/pacts/provider/sad_provider/latest") .header("Accept", "application/hal+json") .header("Accept", "application/json"); i.response.status(404); i }) .await .start_mock_server(None); let result = fetch_pacts_from_broker(pact_broker.url().as_str(), "sad_provider", None).await; match result { Ok(_) => { panic!("Expected an error result, but got OK"); }, Err(err) => { println!("err: {}", err); expect!(err.to_string().starts_with("Link/Resource was not found - No pacts for provider 'sad_provider' where found in the pact broker")).to(be_true()); } } } #[tokio::test] async fn fetch_pacts_from_broker_returns_a_list_of_pacts() { try_init().unwrap_or(()); let pact = RequestResponsePact { consumer: Consumer { name: "Consumer".to_string() }, provider: Provider { name: "happy_provider".to_string() }, .. RequestResponsePact::default() } .to_json(PactSpecification::V3).unwrap().to_string(); let pact2 = RequestResponsePact { consumer: Consumer { name: "Consumer2".to_string() }, provider: Provider { name: "happy_provider".to_string() }, interactions: vec![ RequestResponseInteraction { description: "a request friends".to_string(), .. RequestResponseInteraction::default() } ], .. RequestResponsePact::default() } .to_json(PactSpecification::V3).unwrap().to_string(); let pact_broker = PactBuilder::new("RustPactVerifier", "PactBroker") .interaction("a request to the pact broker root", "", |mut i| async move { i.request .path("/") .header("Accept", "application/hal+json") .header("Accept", "application/json"); i.response .header("Content-Type", "application/hal+json") .json_body(json_pattern!({ "_links": { "pb:latest-provider-pacts": { "href": "http://localhost/pacts/provider/{provider}/latest", "templated": true, } } })); i }) .await .interaction("a request for a providers pacts", "", |mut i| async move { i.given("There are two pacts in the pact broker"); i.request .path("/pacts/provider/happy_provider/latest") .header("Accept", "application/hal+json") .header("Accept", "application/json"); i.response .header("Content-Type", "application/hal+json") .json_body(json_pattern!({ "_links":{ "pacts":[ {"href":"http://localhost/pacts/provider/happy_provider/consumer/Consumer/version/1.0.0"}, {"href":"http://localhost/pacts/provider/happy_provider/consumer/Consumer2/version/1.0.0"} ] } })); i }) .await .interaction("a request for the first provider pact", "", |mut i| async move { i.given("There are two pacts in the pact broker"); i.request .path("/pacts/provider/happy_provider/consumer/Consumer/version/1.0.0") .header("Accept", "application/hal+json") .header("Accept", "application/json"); i.response .header("Content-Type", "application/json") .body(pact.clone()); i }) .await .interaction("a request for the second provider pact", "", |mut i| async move { i.given("There are two pacts in the pact broker"); i.request .path("/pacts/provider/happy_provider/consumer/Consumer2/version/1.0.0") .header("Accept", "application/hal+json") .header("Accept", "application/json"); i.response .header("Content-Type", "application/json") .body(pact2.clone()); i }) .await .start_mock_server(None); let result = fetch_pacts_from_broker(pact_broker.url().as_str(), "happy_provider", None).await; match &result { Ok(_) => (), Err(err) => panic!("Expected an Ok result, got a error {}", err) } let pacts = &result.unwrap(); expect!(pacts.len()).to(be_equal_to(2)); for pact in pacts { match pact { Ok(_) => (), Err(err) => panic!("Expected an Ok result, got a error {}", err) } } } #[tokio::test] async fn fetch_pacts_for_verification_from_broker_returns_a_list_of_pacts() { try_init().unwrap_or(()); let pact = RequestResponsePact { consumer: Consumer { name: "Consumer".to_string() }, provider: Provider { name: "happy_provider".to_string() }, .. RequestResponsePact::default() } .to_json(PactSpecification::V3).unwrap().to_string(); let pact_broker = PactBuilder::new("RustPactVerifier", "PactBroker") .interaction("a request to the pact broker root", "", |mut i| async move { i.given("Pacts for verification is enabled"); i.request .path("/") .header("Accept", "application/hal+json") .header("Accept", "application/json"); i.response .header("Content-Type", "application/hal+json") .json_body(json_pattern!({ "_links": { "pb:provider-pacts-for-verification": { "href": like!("http://localhost/pacts/provider/{provider}/for-verification"), "title": like!("Pact versions to be verified for the specified provider"), "templated": like!(true) } } })); i }) .await .interaction("a request to the pacts for verification endpoint", "", |mut i| async move { i.given("There are pacts to be verified"); i.request .get() .path("/pacts/provider/happy_provider/for-verification") .header("Accept", "application/hal+json") .header("Accept", "application/json"); i.response .header("Content-Type", "application/hal+json") .json_body(json_pattern!({ "_links": { "self": { "href": like!("http://localhost/pacts/provider/happy_provider/for-verification"), "title": like!("Pacts to be verified") } } })); i }) .await .interaction("a request to fetch pacts to be verified", "", |mut i| async move { i.given("There are pacts to be verified"); i.request .post() .path("/pacts/provider/happy_provider/for-verification") .header("Accept", "application/hal+json") .header("Accept", "application/json") .json_body(json_pattern!({ "consumerVersionSelectors": each_like!({ "tag": "prod" }), "providerVersionTags": each_like!("master"), "includePendingStatus": like!(false), "providerVersionBranch": like!("main") })); i.response .header("Content-Type", "application/hal+json") .json_body(json_pattern!({ "_embedded": { "pacts": each_like!({ "shortDescription": "latest prod", "verificationProperties": { "pending": false, "notices": [ { "when": "before_verification", "text": "The pact at http://localhost/pacts/provider/happy_provider/consumer/Consumer/pact-version/12345678 is being verified because it matches the following configured selection criterion: latest pact for a consumer version tagged 'prod'" }, { "when": "before_verification", "text": "This pact has previously been successfully verified by a version of happy_provider with tag 'master'. If this verification fails, it will fail the build. Read more at https://pact.io/pending" } ] }, "_links": { "self": { "href": "http://localhost/pacts/provider/happy_provider/consumer/Consumer/pact-version/12345678", "name": "Pact between Consumer (239aa5048a7de54fe5f231116c6d603eab0c6fde) and happy_provider" } } }) }, "_links": { "self": { "href": like!("http://localhost/pacts/provider/happy_provider/for-verification"), "title":like!("Pacts to be verified") } } })); i }) .await .interaction("a request for a pact by version", "", |mut i| async move { i.given("There is a pact with version 12345678"); i.request .path("/pacts/provider/happy_provider/consumer/Consumer/pact-version/12345678") .header("Accept", "application/hal+json") .header("Accept", "application/json"); i.response .header("Content-Type", "application/json") .body(pact.clone()); i }) .await .start_mock_server(None); let result = fetch_pacts_dynamically_from_broker(pact_broker.url().as_str(), "happy_provider".to_string(), false, None, vec!("master".to_string()), Some("main".to_string()), vec!(ConsumerVersionSelector { consumer: None, tag: Some("prod".to_string()), fallback_tag: None, latest: None, branch: None, deployed_or_released: None, deployed: None, released: None, main_branch: None, matching_branch: None, environment: None, }), None).await; match &result { Ok(_) => (), Err(err) => panic!("Expected an Ok result, got a error {}", err) } let pacts = &result.unwrap(); expect!(pacts.len()).to(be_equal_to(1)); for pact in pacts { match pact { Ok(_) => (), Err(err) => panic!("Expected an Ok result, got a error {}", err) } } } #[tokio::test] async fn fetch_pacts_for_verification_from_broker_returns_empty_list_if_there_are_no_pacts() { try_init().unwrap_or(()); let pact_broker = PactBuilder::new("RustPactVerifier", "PactBroker") .interaction("a request to the pact broker root", "", |mut i| async move { i.given("Pacts for verification is enabled"); i.request .path("/") .header("Accept", "application/hal+json") .header("Accept", "application/json"); i.response .header("Content-Type", "application/hal+json") .json_body(json_pattern!({ "_links": { "pb:provider-pacts-for-verification": { "href": like!("http://localhost/pacts/provider/{provider}/for-verification"), "title": like!("Pact versions to be verified for the specified provider"), "templated": like!(true) } } })); i }) .await .interaction("a request to the pacts for verification endpoint", "", |mut i| async move { i.request .get() .path("/pacts/provider/sad_provider/for-verification") .header("Accept", "application/hal+json") .header("Accept", "application/json"); i.response .header("Content-Type", "application/hal+json") .json_body(json_pattern!({ "_links": { "self": { "href": like!("http://localhost/pacts/provider/sad_provider/for-verification"), "title": like!("Pacts to be verified") } } })); i }) .await .interaction("a request to fetch pacts to be verified", "", |mut i| async move { i.given("There are no pacts to be verified"); i.request .post() .path("/pacts/provider/sad_provider/for-verification") .header("Accept", "application/hal+json") .header("Accept", "application/json") .json_body(json_pattern!({ "consumerVersionSelectors": each_like!({ "tag": "prod" }), "providerVersionTags": each_like!("master"), "includePendingStatus": like!(false), "providerVersionBranch": like!("main") })); i.response .json_body(json_pattern!({ "_embedded": { "pacts": [] } })); i }) .await .start_mock_server(None); let result = fetch_pacts_dynamically_from_broker(pact_broker.url().as_str(), "sad_provider".to_string(), false, None, vec!("master".to_string()), Some("main".to_string()), vec!(ConsumerVersionSelector { consumer: None, tag: Some("prod".to_string()), fallback_tag: None, latest: None, branch: None, deployed_or_released: None, deployed: None, released: None, main_branch: None, matching_branch: None, environment: None, }), None).await; match result { Ok(_) => { panic!("Expected an error result, but got OK"); }, Err(err) => { println!("err: {}", err); expect!(err.to_string().starts_with("Link/Resource was not found - No pacts were found for this provider")).to(be_true()); } } } #[test] fn test_build_payload_with_success() { let result = TestResult::Ok(vec![]); let payload = super::build_payload(result, "1".to_string(), None); expect!(payload).to(be_equal_to(json!({ "providerApplicationVersion": "1", "success": true, "testResults": [], "verifiedBy": { "implementation": "Pact-Rust", "version": PACT_RUST_VERSION } }))); } #[test] fn test_build_payload_adds_the_build_url_if_provided() { let result = TestResult::Ok(vec![]); let payload = super::build_payload(result, "1".to_string(), Some("http://build-url".to_string())); expect!(payload).to(be_equal_to(json!({ "providerApplicationVersion": "1", "success": true, "buildUrl": "http://build-url", "testResults": [], "verifiedBy": { "implementation": "Pact-Rust", "version": PACT_RUST_VERSION } }))); } #[test] fn test_build_payload_adds_a_result_for_each_interaction() { let result = TestResult::Ok(vec![Some("1".to_string()), Some("2".to_string()), Some("3".to_string()), None]); let payload = super::build_payload(result, "1".to_string(), Some("http://build-url".to_string())); expect!(payload).to(be_equal_to(json!({ "providerApplicationVersion": "1", "success": true, "buildUrl": "http://build-url", "testResults": [ { "interactionId": "1", "success": true }, { "interactionId": "2", "success": true }, { "interactionId": "3", "success": true } ], "verifiedBy": { "implementation": "Pact-Rust", "version": PACT_RUST_VERSION } }))); } #[test] fn test_build_payload_with_failure() { let result = TestResult::Failed(vec![]); let payload = super::build_payload(result, "1".to_string(), None); expect!(payload).to(be_equal_to(json!({ "providerApplicationVersion": "1", "success": false, "testResults": [], "verifiedBy": { "implementation": "Pact-Rust", "version": PACT_RUST_VERSION } }))); } #[test] fn test_build_payload_with_failure_with_mismatches() { let result = TestResult::Failed(vec![ (Some("1234abc".to_string()), Some(MismatchResult::Mismatches { mismatches: vec![ MethodMismatch { expected: "PUT".to_string(), actual: "POST".to_string() } ], expected: Box::new(RequestResponseInteraction::default()), actual: Box::new(RequestResponseInteraction::default()), interaction_id: Some("1234abc".to_string()) })) ]); let payload = super::build_payload(result, "1".to_string(), None); expect!(payload).to(be_equal_to(json!({ "providerApplicationVersion": "1", "success": false, "testResults": [ { "interactionId": "1234abc", "mismatches": [ { "attribute": "method", "description": "Expected method of PUT but received POST" } ], "success": false } ], "verifiedBy": { "implementation": "Pact-Rust", "version": PACT_RUST_VERSION } }))); } #[test] fn test_build_payload_with_failure_with_exception() { let result = TestResult::Failed(vec![ (Some("1234abc".to_string()), Some(MismatchResult::Error("Bang".to_string(), Some("1234abc".to_string())))) ]); let payload = super::build_payload(result, "1".to_string(), None); expect!(payload).to(be_equal_to(json!({ "providerApplicationVersion": "1", "success": false, "testResults": [ { "exceptions": [ { "message": "Bang" } ], "interactionId": "1234abc", "success": false } ], "verifiedBy": { "implementation": "Pact-Rust", "version": PACT_RUST_VERSION } }))); } #[test] fn test_build_payload_with_mixed_results() { let result = TestResult::Failed(vec![ (Some("1234abc".to_string()), Some(MismatchResult::Mismatches { mismatches: vec![ MethodMismatch { expected: "PUT".to_string(), actual: "POST".to_string() } ], expected: Box::new(RequestResponseInteraction::default()), actual: Box::new(RequestResponseInteraction::default()), interaction_id: Some("1234abc".to_string()) })), (Some("12345678".to_string()), Some(MismatchResult::Error("Bang".to_string(), Some("1234abc".to_string())))), (Some("abc123".to_string()), None) ]); let payload = super::build_payload(result, "1".to_string(), None); expect!(payload).to(be_equal_to(json!({ "providerApplicationVersion": "1", "success": false, "testResults": [ { "interactionId": "1234abc", "mismatches": [ { "attribute": "method", "description": "Expected method of PUT but received POST" } ], "success": false }, { "exceptions": [ { "message": "Bang" } ], "interactionId": "12345678", "success": false }, { "interactionId": "abc123", "success": true } ], "verifiedBy": { "implementation": "Pact-Rust", "version": PACT_RUST_VERSION } }))); } #[test] fn build_link_from_json() { let json = json!({ "href": "localhost" }); let link = Link::from_json(&"link name".to_string(), json.as_object().unwrap()); expect!(link.name).to(be_equal_to("link name")); expect!(link.href).to(be_some().value("localhost")); expect!(link.templated).to(be_false()); let json2 = json!({ "templated": true }); let link2 = Link::from_json(&"link name".to_string(), json2.as_object().unwrap()); expect!(link2.name).to(be_equal_to("link name")); expect!(link2.href).to(be_none()); expect!(link2.templated).to(be_true()); } #[test] fn build_json_from_link() { let link = Link { name: "Link Name".to_string(), href: Some("1234".to_string()), templated: true, title: None }; let json = link.as_json(); expect!(json.to_string()).to(be_equal_to( "{\"href\":\"1234\",\"templated\":true}")); let link = Link { name: "Link Name".to_string(), href: Some("1234".to_string()), templated: true, title: Some("title".to_string()) }; let json = link.as_json(); expect!(json.to_string()).to(be_equal_to( "{\"href\":\"1234\",\"templated\":true,\"title\":\"title\"}")); } }
rust
package com.lpf.traffic.light.turn; public class TurnLeftHandle implements TurnHandle { private TurnControl turnControl; public TurnLeftHandle(TurnControl turnControl) { this.turnControl = turnControl; } @Override public void turnCar() { turnControl.goTurnLeft(); } }
java
__author__ = '<NAME>' __email__ = '<EMAIL>' __version__ = '0.0.1'
python
<reponame>4uy/ratio-bot<gh_stars>0 { "name": "ratio-bot", "version": "0.0.0", "description": "", "main": "index.ts", "scripts": { "start": "node ." }, "license": "MIT", "bugs": { "url": "https://github.com/divinitybot/bot/issues" }, "homepage": "https://github.com/divinitybot/bot#readme", "dependencies": { "discord.js": "^13.3.1" } }
json
<reponame>elhachimi/fahd import React from "react" import { Image, CloudinaryContext } from "cloudinary-react" export default ({ name, description, image }) => ( <a className="db center mw5 black link dim" title="Frank Ocean's Blonde on Apple Music" href="javascript:function() { return false; }" > <CloudinaryContext cloudName="dvgmggxmh"> <Image publicId={image} className="db ba b--black-10" /> </CloudinaryContext> {/* <dl className="mt2 f6 lh-copy"> <dt className="clip">Name</dt> <dd className="ml0 fw9">{name}</dd> <dt className="clip">Mesures</dt> <dd className="ml0 gray">Frank Ocean</dd> </dl> */} </a> )
javascript
<filename>objects/183/564/18356419.json { "id":"18356419", "tms:id":"61636", "accession_number":"1938-88-1387", "title":"Drawing, \"Tabernacle\"", "url":"http:\/\/collection.cooperhewitt.org\/objects\/18356419\/", "department_id":"35347493", "period_id":null, "media_id":"35410933", "type_id":"35237093", "date":null, "year_start":null, "year_end":null, "year_acquired":"1938", "decade":null, "woe:country":"23424853", "medium":"Pen and brown ink, brush and brown wash", "markings":"Verso: Cooper Union Museum for the Arts of Decoration (Lugt#457d and 457e)", "signed":null, "inscribed":null, "provenance":"Ex collection: Piancastelli and Mrs. <NAME>.", "dimensions":"Image: 27 x 18.6 cm (10 5\/8 x 7 5\/16 in.)", "creditline":"Museum purchase through gift of various donors and from Eleanor G. Hewitt Fund", "description":"Vertical rectangle. Below are the moldings of the altar furniture. The tabernacle has the shape of a building. Seen from its left corner. Below is a dado. The central part is bordered by a convex semi-circle with six free columns and a cupola. Inside is a group of figures. The lateral panels contain a niche with a statue. Above the upper entablature are on either side two figures, in the center are two angels upon clouds, in adoration of a central motif.", "justification":null, "type":{ "id":"35237093", "name":"Drawing" }, "images":[ ], "participants":[ ], "tombstone":{ "epitaph":"Drawing, \"Tabernacle\".\nPen and brown ink, brush and brown wash. Museum purchase through gift of various donors and from Eleanor G. Hewitt Fund. 1938-88-1387." }, "colors":[ ] }
json
Tom Graveney enjoyed his best days in Test cricket after turning 39. However, even when well into in his 60s and 70s he continued to dazzle onlookers with his elegance and class. Arunabha Sengupta recounts two of his magical acts with the willow at seriously advanced age — the last of which came on May 6, 2003. Class proved to be permanent as the most aesthetically pleasing of England’s post-War batsmen came back at the age of 39 to take on Wes Hall and Charlie Griffith; and he scored two hundreds against the fiery men, plonking his left foot down the wicket and hooking hair-flattening bouncers disdainfully off the front foot. In the Tests that followed in the final days of his career, there were further hundreds of quality and grace, along with more than one and a half thousand runs. There were however an epilogue and a post-script, inscribed by that most splendid of willows, long after the story of Graveney the cricketer had been closed with the magnificent final chapters. Graveney, a delight to watch all his career, remained a man who could spontaneously pick up the willow when men of his age were wont to reach for the supporting cane. Every time he did so, he unfurled magic. Those were deeds performed by an increasingly venerable man and they remained everlastingly imprinted on the minds of those lucky enough to witness the acts — as the happiest of recollections, to be accessed with awe. Broadcaster and journalist Sir Michael Parkinson recalled one such occasion. In his brief foreword to Andrew Murtagh’s recent biography of the maestro, Parkinson describes two scenes. One, he confesses, is the cream that emerges on the surface when his memory churns through a long lifetime of watching cricket: the image of Fred Trueman bowling to Graveney in the Yorkshire-Worcestershire match at the New Road Ground in the summer of 1962. The purity and grace of Trueman’s action, and the languid elegance of Graveney’s strokeplay, in the foreground of the cathedral with the Severn flowing along in the distance; Graveney scored 89 that day on a seaming pitch. Parkinson stood as the umpire as Graveney took guard. The broadcaster hoped that the fast bowler, a recent recruit from Australia and genuinely quick, would have the sense and courtesy to give the old man an easy one off the mark. However, the young man from Down Under had no clue about the identity or the stature of the man in his sixties who stood ready to face him. He rushed in with all the exuberance and speed of youth. The ball sped through nasty and short. Graveney just about managed to sway away and cast at the bowler what Parkinson termed ‘an old-fashioned look’. The youthful Australian fast bowler, whom Parkinson does not name, returned to his mark and sprinted in again. There was another bouncer. This time, however, Graveney plonked his foot down the wicket as in the days of yore, and hooked him off the front foot into the adjoining churchyard. The response of the fast bowler could not be printed. As I wrote at the beginning of the article, there was a further postscript. It took place on May 6, 2003, when Graveney was 76. The setting was a beautiful college cricket ground, nestling on the slopes of the Malvern Hills, overlooking a breath-taking landscape etched with the Severn and the Vale of Eversham. Far into the distance, in the blue outline of the valley, one could even glimpse Cotswold. The occasion was the formal opening of the new, all-weather cricket nets at the Malvern College grounds. It was to be dedicated to George Chesterton, the former Worcestershire opening bowler, a geography teacher at Malvern and for long the master in charge of cricket. Some 54 summers before that day, Chesterton had played for Oxford University against Gloucestershire, in only his second First-Class match. He had captured six on that occasion, his first victim being a 21-year-old Graveney. They had become friends along the way, each serving Worcestershire CCC as president. So, Chesterton had invited Graveney to the event. The former was 81, the latter, as already mentioned, 76. A crowd gathered, the atmosphere was festive, the boys were having a hit, some of the masters had joined in, there were parents, some interested onlookers, and some members of the press. And suddenly Graveney’s twinkling eyes fell upon a spare bat. Chesterton, 263 wickets from 72 First-Class matches at 22.78, had played his final game for Worcestershire in 1957. He had continued to turn out for Free Foresters after that, but had called it a day in 1961. His final couple of matches had come for a touring MCC side in Dublin, way back in 1966. An octogenarian now, he needed no further encouragement. He grabbed a ball. Chesterton measured his run up. In his days of youth and glory he used ten paces. Now it was more of, as Murtagh describes, ‘a hop and a step.’ But the arm had remained as straight as ever. The ball was pitched perfectly on the off stump. Graveney leaned forward, his bat went through the classical downswing and the ball was struck into the side netting. But for the nets, it would have sped through where covers would have been. An entirely new generation had witnessed the famous Graveney cover drive. It was, according to Murtagh, the last time that Chesterton ever bowled or Graveney ever batted. But the cover-drive was as unblemished as ever. This website uses cookies so that we can provide you with the best user experience possible. Cookie information is stored in your browser and performs functions such as recognising you when you return to our website and helping our team to understand which sections of the website you find most interesting and useful. Strictly Necessary Cookie should be enabled at all times so that we can save your preferences for cookie settings. If you disable this cookie, we will not be able to save your preferences. This means that every time you visit this website you will need to enable or disable cookies again.
english
<filename>examples/src/main.rs use examples::{Example, ExampleOptions, ExampleType}; use log::debug; use miden::StarkProof; use std::{io::Write, time::Instant}; use structopt::StructOpt; fn main() { // configure logging env_logger::Builder::new() .format(|buf, record| writeln!(buf, "{}", record.args())) .filter_level(log::LevelFilter::Debug) .init(); // read command-line args let options = ExampleOptions::from_args(); debug!("============================================================"); let proof_options = options.get_proof_options(); // instantiate and prepare the example let example = match options.example { ExampleType::Fib { sequence_length } => examples::fibonacci::get_example(sequence_length), }; let Example { program, inputs, num_outputs, pub_inputs, expected_result, } = example; #[cfg(feature = "std")] debug!("--------------------------------"); // execute the program and generate the proof of execution #[cfg(feature = "std")] let now = Instant::now(); let (outputs, proof) = miden::execute(&program, &inputs, num_outputs, &proof_options).unwrap(); debug!("--------------------------------"); #[cfg(feature = "std")] debug!( "Executed program in {} ms", //hex::encode(program.hash()), // TODO: include into message now.elapsed().as_millis() ); debug!("Program output: {:?}", outputs); assert_eq!( expected_result, outputs, "Program result was computed incorrectly" ); // serialize the proof to see how big it is let proof_bytes = proof.to_bytes(); debug!("Execution proof size: {} KB", proof_bytes.len() / 1024); debug!( "Execution proof security: {} bits", proof.security_level(true) ); debug!("--------------------------------"); // verify that executing a program with a given hash and given inputs // results in the expected output let proof = StarkProof::from_bytes(&proof_bytes).unwrap(); let now = Instant::now(); match miden::verify(*program.hash(), &pub_inputs, &outputs, proof) { Ok(_) => debug!("Execution verified in {} ms", now.elapsed().as_millis()), Err(err) => debug!("Failed to verify execution: {}", err), } }
rust
These Starfield spaceships will blow your mind. In Starfield, ships are built piece by piece, with a few essential elements necessary for the vessel to take flight. Starfield's high level of customization has inspired many to push the engine to its limits when designing their own space-worthy vessels. While the game has a fleet of iconic starships in its own right, like the Starfield Star Eagle ship, there's still plenty of room for creativity and invention. Whether it's a spaceship inspired by popular movies or TV franchises or just a really great concept, there are many cool ship designs that have already been created in the game. Related: How To Steal (& Sell) a Ship In Starfield 10 Capybara Ship Mechanical things like spaceships modeled after organic living things like animals make for a great self-imposed mission in Starfield's ship builder. User Kurbs on Reddit had that exact thought when they constructed a Starfield vessel modeled after a Capybara with their boyfriend. Less known outside of South America, the Capybara is a giant rodent resembling a landlocked beaver.Read more: Starfield: Will Starfield on Xbox Have Mods?Will the console version of Starfield get to enjoy the wonders of the modding community? How To Increase Ship Storage Capacity In StarfieldMore space for the additions to your collection. Butt-ugly Starfield ship defeats the enemy AI's perfect aimA Starfield player has created what they claim is an “unbeatable' spacecraft after figuring out that the enemy AI targets the center of ships. Starfield High Price To Pay: Should You Build the Armillary On Your Ship or An OutpostThe Starborn are after the Armillary in Starfield, and you have a choice to keep it on your ship or at an outpost. How To Get Starfield’s Best Ships For Free, And No, Not Space HijackingI have acquired a career in Starfield it seems, and that is stealing ships. It’s my favorite thing to do and I genuinely cannot believe the kinds of ships this tactic has landed me over time. So I’m here to share what I know. Which Starfield Faction You Should Join FirstOne Starfield faction is clearly the best.
english
What’s the story? South African opener Dean Elgar has accepted a deal from Somerset to play as their overseas player in the 2017 season. The 29-year-old will be available to take part in all three formats for Somerset, who came agonisingly close to winning Division One last season. The Proteas opening batsman who has established himself as one of the mainstays of the Test side will be available to play for Somerset across all formats in the 2017 season when he isn't busy with international matches. He admitted that he really enjoyed his time at the club back in 2013 and is eager to come back. The Proteas have already lost Kyle Abbott, Rilee Rossouw and Marchant de Lange to Kolpak deals, which has hurt their chances of building a strong side that is capable of regaining the No. 1 spot in Tests. But the news that their Test opener has taken an overseas deal ahead of a crucial tour will come as good news. With plenty of doubt surrounding AB’s future as a Test player, the news that he has opted out of the England series won’t come as great news for the fans. However, the fact that he is just returning from injury has played a large part in that decision. He admitted that he isn't ready for Tests just yet but is hopeful that everything will be fine. The Proteas star also added that he will take part in the domestic first-class competition for the Titans, in order to prove himself ahead of the series against India, the details of the tour that will take place in 2017 haven’t been revealed as yet. What next? South Africa will now take on Sri Lanka in the limited-overs leg of their tour before playing New Zealand in March and then travelling to England later in the year, without AB de Villiers, who opted out of the series as he is still not completely fine after his elbow surgery. The recent trend of South Africa's Test players opting for Kolpak deals with County sides has not been one that fans would have wanted. But this deal makes a lot of sense as it gives Elgar a taste of the conditions in England ahead of their tour in the summer.
english
Bayern Munich legend Franck Ribery has claimed his former teammate Robert Lewandowski deserved the 2021 Ballon d'Or. Despite the Polish star being among the frontrunners for the prize, the title eventually went to Lionel Messi. During an interview with Sky Italia, Ribery lavished praise on French youngster Kylian Mbappe but also suggested that the Bayern Munich talisman is currently even better than the PSG star. Ribery said: "Mbappé is the strongest in general, he knows he has great qualities but he doesn't let himself be taken down. He does his job, he plays, he has fun. It's a pleasure for him and it's pleasant when you see him play. " "But Lewandowski is the number one striker for me at the moment, the strongest in the world for two or three years. He works a lot and likes to help the team. (…) I think he deserved the Ballon d'Or," he added. While PSG star Lionel Messi took home the coveted individual award, many experts and fans believe the Ballon d'Or should have gone to Lewandowski. The Polish international has stood out as one of the best strikers in the Bundesliga and Champions League in recent years. Ribery's thoughts were echoed by former teammate Thomas Muller, who openly stated that the Pole should've won the Ballon d'Or. Other players like Zlatan Ibrahimovic, Ilkay Gundogan, and Alphonso Davies mirrored Muller's thoughts, admitting that Lewandowski deserved the award. The Bavarians' key marksman has already flourished this campaign, scoring 28 goals in 23 Bundesliga games and leading Bayern to the top of the table. His nine goals in seven Champions League appearances saw Julian Nagelsmann's men through the group stages and into the knockout round. Last season, the Polish star famously scored 41 Bundesliga goals, beating Gerd Muller's record for scoring the most goals in a season in the German top flight. He may not reach those heights again, but the striker remains a world-class striker by any standard. According to a report from Sport Bild via Bavarian Football Works, Bayern Munich are actively making progress on extending the contracts of three important players. Thomas Muller, Robert Lewandowski, and Manuel Neuer are reportedly set to receive two-year extensions with the Bavarians. Muller's party was reportedly contacted first by the club, and his contract extension is expected to be a fairly straightforward affair. With Lewandowski's extension, negotiations are said to be pending and could be a potentially tricky situation. Manuel Neuer's captaincy and longtime relationship with the Bavarians will help with negotiations, especially with the star reportedly intent on hanging up his gloves at the club. However, a deal has not yet been announced.
english
New Delhi: The Supreme Court on Friday agreed to hear bail plea of Indrani Mukerjea, the prime accused in the Sheena Bora murder case, and sought a response from the CBI. Senior advocate Mukul Rohatgi, appearing for Mukerjea, submitted before the bench that his client has been in jail for more than six years and taking into account the ongoing state of affairs it is apparent that the trial will not come to an end in 10 years. “Six and a half years she has been in jail. The trial is not going to end in the next 10 years,” he said. The bench asked Rohatgi, how many witnesses are there in the case. Rohatgi responded that 185 witnesses are yet to be examined. He added that no witness has been examined in the past one and half years, and her husband is already on bail. Rohatgi further submitted the trial court has been vacant without a presiding officer since June 2021. He also informed the top court that his client is also not well. “My husband is on bail. This lady is not well. She is suffering,” he said. Mukherjea has been in jail since her arrest in the murder case in 2015. Recently Mukerjea created a sensation by sending a letter to the CBI claiming that her daughter Sheena Bora was alive. The CBI has made it clear this particular angle will not be taken up, unless there is an intervention from the court. In April 2012, a case was filed with the Mumbai Police alleging kidnapping and murder of Sheena Bora. In 2015, the CBI took up the investigation. Indira was arrested and so was her husband Peter Mukerjea, who was granted bail in March 2020. In December last year, Indrani wrote a letter to CBI stating that she would move the special court to record the statement of an inmate who had claimed to have met Bora in Kashmir. The court has rejected her bail on several occasions. In November last year, the Bombay High Court rejected her bail plea, noting that material in the form of circumstantial evidence strongly supported her direct involvement in the murder.
english
--- layout: post title: "Research on Emotion Index of Students 大学生情绪指数分析" categories: Projects excerpt: We Collected relevant emotional indicators of college students and public investors, used principal component analysis method to construct a sentiment index and found the emotion index do have predictability to the stock price.我们小组通过对“安信证券杯”大学生模拟炒股大赛中的数据,运用主成分分析的方法对大学生的投资行为和心理状况,量化成指数并与A股指数进行对比,进而分析大学生情绪指数和A股走势的关系。随后再将市场上的总体水平,与大学生情绪指数进行量化比较,确定出大学生群体投资特点以及情绪指数确实有预见能力 --- 我们小组通过对“安信证券杯”大学生模拟炒股大赛中的数据,运用主成分分析的方法对大学生的投资行为和心理状况,量化成指数并与A股指数进行对比,进而分析大学生情绪指数和A股走势的关系。随后再将市场上的总体水平,与大学生情绪指数进行量化比较,确定出大学生群体投资特点以及情绪指数确实有预见能力。以下内容为结项论文摘选。 由于变量多且有一定的相关性,我们采取主成分分析方法,主成分分析把大量原始指标组合成较少的几个综合指标,可以使分析简化,这种方法是用线性组合的方法将原始指标组合起来,得到反映原始指标变动程度最大的综合指标。每个情绪指标都在不同程度上反映了投资者情绪。一方面,不同的指标在反映投资者情绪有重复,另一方面,各个投资者情绪只能反映情绪的一个角度。因此我们有必要通过主成分分析对初始情绪指标降维,提取出主成分作为投资者情绪的代理变量。 对于市场情绪,我们获得了,每日开户数,每日交易额,大盘换手率的数据。对于大学生投资者,我们使用爬虫工具获得了参赛者收益率、周转率、仓位、最大回撤深度、个股交易量,成交价格等数据,并将其计算成资金余额,单日交易额,股票总市值,仓位,收益率,最大回撤作为情绪指数因子。根据以上数据分别进行主成分分析,并与大盘指数对比,结果如下图。 根据以上图示结果,可以认为大学生群体总体偏被动,受情绪影响并没有想象的大,以及情绪指数确实有预见能力。 We used the principal component analysis method to quantify the investment behavior and psychological status of college students by using the data in the “Essence Securities Cup” college students' and compare this emotion index with the benchmark(A-share index). Then, the college students' sentiment index is compared with the overall emotion of the market. We determined the investment characteristics of the college students and we noticed the sentiment index do have predictive ability. The following is an excerpt of the final paper. Because of the large number of variables and correlation, we adopt the principal component analysis method. Principal component analysis combines a large number of original indicators into a few comprehensive indicators, which can simplify the analysis. This method uses a linear combination method on the indicators to obtain a comprehensive indicator that reflects the S^2 in the original indicators. Each sentiment indicator reflects investor sentiment to varying degrees. On the one hand, different indicators may have repetition of investor sentiment. On the other hand, each investor's sentiment can only reflect an angle of emotion. Therefore, it is necessary to use the principal component analysis to reduce the initial emotional indicators and extract the principal components as the proxy variables of investor sentiment. For market sentiment, we obtained data on daily account opening, daily trading volume, and market turnover. For college student investors, we use the Web crawler tool to obtain the entrant's rate of return, turnover rate, position, maximum withdrawal, individual stock trading volume, transaction price and other data, and calculate it into balance, single day trading amount, total stock Market value, position, return, and maximum drawback as sentiment index factors. Principal component analysis was performed according to the above data, and compared with the market index, the results are shown below. According to the results, it can be considered that the college students are generally passive to the market, the emotions are not as significant as imagined, and the sentiment index does have predictive ability. <center> <img src="https://i.ibb.co/Nj4Rzr1/image.png" width="50%"/> </center> <center> <img src="https://i.ibb.co/6XXPg3z/image.png" width="50%"/> </center>
markdown
<filename>chasm/src/main/java/org/quiltmc/chasm/internal/metadata/OriginMetadata.java package org.quiltmc.chasm.internal.metadata; import org.quiltmc.chasm.api.Transformation; import org.quiltmc.chasm.api.metadata.Metadata; public class OriginMetadata implements Metadata { private final String transformerId; public OriginMetadata(Transformation origin) { this(origin.getParent().getId()); } private OriginMetadata(String transformerId) { this.transformerId = transformerId; } @Override public OriginMetadata copy() { return new OriginMetadata(transformerId); } }
java
<filename>Kamek/src/bossKoopaThrow.cpp #include <common.h> #include <game.h> #include <g3dhax.h> #include <sfx.h> #include "boss.h" struct TypeInfo { const char *arcName; const char *brresName; const char *modelName; const char *deathEffect; int launchSound; int breakSound; int flySound; float size; float scale; u16 xrot; u16 yrot; u16 zrot; }; static const TypeInfo types[6] = { {"choropoo", "g3d/choropoo.brres", "spanner", "Wm_en_hit", 0, SE_BOSS_JR_FLOOR_BREAK, 0, 8.0f, 2.0f, 0, 0, 0x1000}, {"choropoo", "g3d/choropoo.brres", "spanner", "Wm_en_burst_s", 0, SE_BOSS_JR_BOMB_BURST, 0, 12.0f, 2.0f, 0, 0, 0x1000}, {"koopa_clown_bomb", "g3d/koopa_clown_bomb.brres", "koopa_clown_bomb", "Wm_en_burst_s", SE_EMY_ELCJ_THROW, SE_BOSS_JR_BOMB_BURST, 0, 16.0f, 0.8f, 0x200, 0x800, 0x1000}, {"bros", "g3d/t00.brres", "bros_hammer", "Wm_en_hit", 0, SE_OBJ_HAMMER_HIT_BOTH, 0, 16.0f, 2.0f, 0, 0, 0x1000}, {"dossun", "g3d/t02.brres", "dossun", "Wm_en_hit", SE_EMY_DOSSUN, SE_EMY_DOSSUN_DEAD, 0, 14.0f, 1.0f, 0, 0, 0}, {"KoopaShip", "g3d/present.brres", "PresentBox_penguin", "Wm_dm_presentopen",SE_DEMO_OP_PRESENT_THROW_2400f, SE_DEMO_OP_PRESENT_BOX_BURST, 0, 20.0f, 1.0f, 0x20, 0x40, 0x200} }; const char* KPTarcNameList [] = { "choropoo", "koopa_clown_bomb", "dossun", "KoopaShip", NULL }; class daKoopaThrow : public dEn_c { int onCreate(); int onExecute(); int onDelete(); int onDraw(); mHeapAllocator_c allocator; m3d::mdl_c bodyModel; int timer; char Type; char direction; char front; float ymod; int lifespan; u32 cmgr_returnValue; bool playsAnim; m3d::anmChr_c chrAnim; nw4r::snd::SoundHandle hammerSound; const TypeInfo *currentInfo; static daKoopaThrow *build(); void updateModelMatrices(); void playerCollision(ActivePhysics *apThis, ActivePhysics *apOther); void spriteCollision(ActivePhysics *apThis, ActivePhysics *apOther); bool collisionCat1_Fireball_E_Explosion(ActivePhysics *apThis, ActivePhysics *apOther); bool collisionCat2_IceBall_15_YoshiIce(ActivePhysics *apThis, ActivePhysics *apOther); bool collisionCat9_RollingObject(ActivePhysics *apThis, ActivePhysics *apOther); bool collisionCat13_Hammer(ActivePhysics *apThis, ActivePhysics *apOther); bool collisionCat14_YoshiFire(ActivePhysics *apThis, ActivePhysics *apOther); bool collisionCat7_GroundPound(ActivePhysics *apThis, ActivePhysics *apOther); USING_STATES(daKoopaThrow); DECLARE_STATE(Straight); }; CREATE_STATE(daKoopaThrow, Straight); // Types: // // 0 - Wrench // 1 - Exploding Wrench // 2 - Bomb // 3 - Hammer // 4 - Thwomp // 5 - Present // extern "C" void *PlayWrenchSound(dEn_c *); extern "C" void *dAcPy_c__ChangePowerupWithAnimation(void * Player, int powerup); extern "C" int CheckExistingPowerup(void * Player); void daKoopaThrow::playerCollision(ActivePhysics *apThis, ActivePhysics *apOther) { if (Type == 5) { PlaySoundAsync(this, currentInfo->breakSound); SpawnEffect(currentInfo->deathEffect, 0, &this->pos, &(S16Vec){0,0,0}, &(Vec){3.0, 3.0, 3.0}); // dStageActor_c *spawned = CreateActor(EN_ITEM, 0x20000063, this->pos, 0, 0); // spawned->pos.x = this->pos.x; // spawned->pos.y = this->pos.y; int p = CheckExistingPowerup(apOther->owner); if (p == 0 || p == 3) { // Powerups - 0 = small; 1 = big; 2 = fire; 3 = mini; 4 = prop; 5 = peng; 6 = ice; 7 = hammer dAcPy_c__ChangePowerupWithAnimation(apOther->owner, 1); } this->Delete(1); return; } DamagePlayer(this, apThis, apOther); if (Type == 1 || Type == 2) { PlaySoundAsync(this, SE_BOSS_JR_BOMB_BURST); SpawnEffect("Wm_en_burst_s", 0, &this->pos, &(S16Vec){0,0,0}, &(Vec){0.75, 0.75, 0.75}); SpawnEffect("Wm_mr_wirehit", 0, &this->pos, &(S16Vec){0,0,0}, &(Vec){1.25, 1.25, 1.25}); this->Delete(1); } } void daKoopaThrow::spriteCollision(ActivePhysics *apThis, ActivePhysics *apOther) {} bool daKoopaThrow::collisionCat1_Fireball_E_Explosion(ActivePhysics *apThis, ActivePhysics *apOther) { return true; } bool daKoopaThrow::collisionCat2_IceBall_15_YoshiIce(ActivePhysics *apThis, ActivePhysics *apOther) { return false; } bool daKoopaThrow::collisionCat9_RollingObject(ActivePhysics *apThis, ActivePhysics *apOther) { return true; } bool daKoopaThrow::collisionCat13_Hammer(ActivePhysics *apThis, ActivePhysics *apOther) { if (Type == 1 || Type == 2) { SpawnEffect("Wm_en_burst_s", 0, &this->pos, &(S16Vec){0,0,0}, &(Vec){0.75, 0.75, 0.75}); SpawnEffect("Wm_mr_wirehit", 0, &this->pos, &(S16Vec){0,0,0}, &(Vec){1.25, 1.25, 1.25}); } else { SpawnEffect("Wm_ob_cmnboxgrain", 0, &this->pos, &(S16Vec){0,0,0}, &(Vec){0.5, 0.5, 0.5}); } PlaySoundAsync(this, currentInfo->breakSound); this->Delete(1); return true; } bool daKoopaThrow::collisionCat14_YoshiFire(ActivePhysics *apThis, ActivePhysics *apOther) { return true; } bool daKoopaThrow::collisionCat7_GroundPound(ActivePhysics *apThis, ActivePhysics *apOther) { DamagePlayer(this, apThis, apOther); if (Type == 1 || Type == 2) { PlaySoundAsync(this, SE_BOSS_JR_BOMB_BURST); SpawnEffect("Wm_en_burst_s", 0, &this->pos, &(S16Vec){0,0,0}, &(Vec){0.75, 0.75, 0.75}); SpawnEffect("Wm_mr_wirehit", 0, &this->pos, &(S16Vec){0,0,0}, &(Vec){1.25, 1.25, 1.25}); this->Delete(1); } return true; } daKoopaThrow *daKoopaThrow::build() { void *buffer = AllocFromGameHeap1(sizeof(daKoopaThrow)); return new(buffer) daKoopaThrow; } int daKoopaThrow::onCreate() { this->direction = this->settings & 0xF; this->Type = (this->settings >> 4) & 0xF; this->front = (this->settings >> 8) & 0xF; currentInfo = &types[Type]; allocator.link(-1, GameHeaps[0], 0, 0x20); nw4r::g3d::ResFile rf(getResource(currentInfo->arcName, currentInfo->brresName)); nw4r::g3d::ResMdl resMdl = rf.GetResMdl(currentInfo->modelName); bodyModel.setup(resMdl, &allocator, (Type == 4 ? 0x224 : 0), 1, 0); SetupTextures_Enemy(&bodyModel, 0); if (Type == 4) { // Thwomp playsAnim = true; nw4r::g3d::ResAnmChr anmChr = rf.GetResAnmChr("boss_throw"); chrAnim.setup(resMdl, anmChr, &allocator, 0); chrAnim.bind(&bodyModel, anmChr, 1); bodyModel.bindAnim(&chrAnim, 0.0); chrAnim.setUpdateRate(1.0); } allocator.unlink(); ActivePhysics::Info KoopaJunk; KoopaJunk.xDistToCenter = 0.0f; KoopaJunk.yDistToCenter = (Type == 4) ? currentInfo->size : 0.0; KoopaJunk.xDistToEdge = currentInfo->size; KoopaJunk.yDistToEdge = currentInfo->size; this->scale.x = currentInfo->scale; this->scale.y = currentInfo->scale; this->scale.z = currentInfo->scale; KoopaJunk.category1 = 0x3; KoopaJunk.category2 = 0x0; KoopaJunk.bitfield1 = 0x47; KoopaJunk.bitfield2 = 0xFFFFFFFF; KoopaJunk.unkShort1C = 0; KoopaJunk.callback = &dEn_c::collisionCallback; this->aPhysics.initWithStruct(this, &KoopaJunk); this->aPhysics.addToList(); spriteSomeRectX = currentInfo->size; spriteSomeRectY = currentInfo->size; _320 = 0.0f; _324 = currentInfo->size; // These structs tell stupid collider what to collide with - these are from koopa troopa static const lineSensor_s below(12<<12, 4<<12, 0<<12); static const pointSensor_s above(0<<12, 12<<12); static const lineSensor_s adjacent(6<<12, 9<<12, 6<<12); collMgr.init(this, &below, &above, &adjacent); collMgr.calculateBelowCollisionWithSmokeEffect(); cmgr_returnValue = collMgr.isOnTopOfTile(); if (this->direction == 0) { // Ground Facing Left this->pos.x -= 0.0; // -32 to +32 this->pos.y += 36.0; // this->rot.z = 0x2000; } else if (this->direction == 1) { // Ground Facing Right this->pos.x += 0.0; // +32 to -32 this->pos.y += 36.0; // this->rot.z = 0xE000; } if (this->front == 1) { this->pos.z = -1804.0; } else { this->pos.z = 3300.0; } if (currentInfo->launchSound != 0) { PlaySound(this, currentInfo->launchSound); } if (Type == 3) { PlaySoundWithFunctionB4(SoundRelatedClass, &hammerSound, SE_EMY_MEGA_BROS_HAMMER, 1); } doStateChange(&StateID_Straight); this->onExecute(); return true; } int daKoopaThrow::onDelete() { if (hammerSound.Exists()) hammerSound.Stop(10); return true; } int daKoopaThrow::onDraw() { bodyModel.scheduleForDrawing(); return true; } void daKoopaThrow::updateModelMatrices() { matrix.translation(pos.x, pos.y, pos.z); matrix.applyRotationYXZ(&rot.x, &rot.y, &rot.z); bodyModel.setDrawMatrix(matrix); bodyModel.setScale(&scale); bodyModel.calcWorld(false); } int daKoopaThrow::onExecute() { acState.execute(); updateModelMatrices(); if (playsAnim) { if (chrAnim.isAnimationDone()) chrAnim.setCurrentFrame(0.0f); bodyModel._vf1C(); } float rect[] = {this->_320, this->_324, this->spriteSomeRectX, this->spriteSomeRectY}; int ret = this->outOfZone(this->pos, (float*)&rect, this->currentZoneID); if(ret) { this->Delete(1); } return true; } void daKoopaThrow::beginState_Straight() { float rand = (float)GenerateRandomNumber(10) * 0.4; if (this->direction == 0) { // directions 1 spins clockwise, fly rightwards speed.x = 1.5 + rand; } else { // directions 0 spins anti-clockwise, fly leftwards speed.x = -1.5 - rand; } speed.y = 9.0; } void daKoopaThrow::executeState_Straight() { speed.y = speed.y - 0.01875; HandleXSpeed(); HandleYSpeed(); doSpriteMovement(); // cmgr_returnValue = collMgr.isOnTopOfTile(); // collMgr.calculateBelowCollisionWithSmokeEffect(); // if (collMgr.isOnTopOfTile() || (collMgr.outputMaybe & (0x15 << direction))) { // // hit the ground or wall // PlaySoundAsync(this, currentInfo->breakSound); // SpawnEffect(currentInfo->deathEffect, 0, &this->pos, &(S16Vec){0,0,0}, &(Vec){0.75, 0.75, 0.75}); // this->Delete(1); // } if (this->direction == 1) { // directions 1 spins clockwise, fly rightwards this->rot.x -= currentInfo->xrot; this->rot.y -= currentInfo->yrot; this->rot.z -= currentInfo->zrot; } else { // directions 0 spins anti-clockwise, fly leftwards this->rot.x -= currentInfo->xrot; this->rot.y -= currentInfo->yrot; this->rot.z += currentInfo->zrot; } if (Type < 2) { PlayWrenchSound(this); } else if (currentInfo->flySound == 0) { return; } else { PlaySound(this, currentInfo->flySound); } } void daKoopaThrow::endState_Straight() { }
cpp
TheGrefg's Fortnite Icon Series outfit is all set to release in the game, with the LTM coming back to the game. The 'Floor Is Lava' LTM was introduced back in Fortnite Chapter 1 - Season 8. It became instantly popular, and players enjoyed this game mode for a limited period. The fourth and final Fortnite Icon Series outfit belongs to TheGrefg. The outfit was delayed for a while as it was initially scheduled to release in October 2020. This was followed up by a prank that TheGrefg pulled on the entire Fortnite community. TheGrefg pranked the Fortnite community by posting a tweet mentioning how his outfit has been indefinitely delayed due to his behavior. However, he soon followed this up with an announcement on revealing his outfit on stream. Ninja, Loserfruit, and Lachlan were the three content creators that received their Fortnite Icon Series outfit in 2020. Since then, fans have been looking forward to TheGrefg's introduction to Fortnite. Leaks on Twitter revealed TheGrefg's Icon Series Fortnite outfit. The design is definitely inspired by a Dragon Ball character. The outfit also features three balls, just like the iconic Dragon balls from the anime series. The outfit has a bare body design with a golden belt and golden body stripes running through his torso. There is also a huge TheGrefg logo on the chest, which makes the outfit look like a superhero suit. As one of the pillars of the LATAM Fortnite community, TheGrefg hinted that a surprise tournament might be hosted following his outfit reveal. This was confirmed with the Floor Is Lava LTM returning to Fortnite Chapter 2 - Season 5. The popular LTM features a unique game mode where players have to save themselves from the lava. The lava begins to rise five minutes into the match and keeps rising at a steady pace. Touching the lava instantly reduces the player's HP by 20, although players can build on the lava to protect themselves. The lava also sends the player bouncing on the surface, which makes it difficult to build. Players get a small number of materials to build, and the entire Fortnite map gets submerged under lava. While this has been the dynamic of "Floor Is Lava" in Fortnite, it might change with the introduction of TheGrefg's outfit. For instance, the balls in the outfit might have some special use during this LTM in Fortnite. At the same time, players will get a chance to win this tournament, just like they did during the Lachlan Pickaxe Frenzy tournament. On January 11th, 2021, TheGrefg will be showcasing his outfit in Fortnite on the live stream. GTA 5's mammoth $7,700,000,000 earnings set to be challenged by upcoming game! Know more here.
english
<reponame>xanthian/variant_sticks_and_stuff<filename>src/main/resources/assets/vsas/models/item/bows/crimson_bow_pulling_0.json { "parent": "vsas:item/bows/crimson_bow", "textures": { "layer0": "vsas:item/bows/crimson_bow_pulling_0" } }
json
<filename>analyses/verbose/testdata/verbosedata.json { "analysed_dependencies": [ { "name": "click", "transitives": [{ "name": "Transitive 1", "version": "1.1" }], "latest_version": "7.1.2", "publicly_available_vulnerabilities": [ {"id": "ABC-PYTHON-CODECOV-12345", "severity": "medium", "title": "Command Injection1"}, {"id": "ABC-PYTHON-CODECOV-67890", "severity": "medium", "title": "Command Injection2"}, {"id": "ABC-PYTHON-CODECOV-abcde", "severity": "high", "title": "Command Injection3"} ], "vulnerabilities_unique_with_snyk": [ { "severity": "critical", "id": "Critical-12345", "title": "SQL Attack" } ] } ], "total_transitives_scanned": 1, "total_direct_vulnerabilities": 4, "severity": { "medium": [ {"id": "ABC-PYTHON-CODECOV-12345", "severity": "medium", "title": "Command Injection1"}, {"id": "ABC-PYTHON-CODECOV-67890", "severity": "medium", "title": "Command Injection2"} ], "high": [ {"id": "ABC-PYTHON-CODECOV-abcde", "severity": "high", "title": "Command Injection3"} ], "critical": [ {"id": "Critical-12345", "severity": "critical", "title": "SQL Attack"} ] }, "report_link": "https://recommender.api.openshift.io/api/v2/stack-report/123456789" }
json
If John Coates was trying to stir controversy, he succeeded. An International Olympic Committee vice president, Coates was asked a few days ago by a Japanese reporter at an online news conference if the Tokyo Olympics would go ahead, even if a state of emergency were in force in Japan. Coates replied: "Absolutely, yes." Coates said what the IOC and local organisers have been trying to persuade the Japanese public about for months: The postponed Olympics with 11,000 athletes from 200 nations and territories will open on July 23 and will be "safe and secure." But his defiant tone has stirred a backlash in Japan where 60-80 percent in polls say they do not want the Olympics to open in two months in the midst of a pandemic. Just over 12,000 deaths in Japan — good by global standards, but poor in Asia — have been attributed to COVID-19. But Tokyo and Osaka and several other areas are under a state of emergency until May 31. And it's likely to be extended. There is fear of new variants spreading with only a tiny percentage of Japanese vaccinated. Estimates range between 2 percent and 4 percent. "Right now, more than 80 percent of the nation’s people want the Olympics postponed or cancelled," Japanese billionaire businessman Masayoshi Son said over the weekend. He is the founder and CEO of SoftBank Group Corp . He also owns the SoftBank Hawks baseball team. "Who is forcing this to go ahead, and under what rights?” Son added. Technically, the games belong to the International Olympic Committee and only it has the power to cancel. Of course, any move would have to be negotiated with Japanese organisers. There is no suggestion this will happen. Social media criticised Coates, and also went after IOC president Thomas Bach who has said repeatedly that everyone must "sacrifice" to pull off these Olympics, which have already banned fans from abroad. A decision on local fans attending — if any — will be made next month. The IOC relies on selling television rights for 75 percent of its income, and Japan has officially spent USD 15.4 billion to prepare the games. Government audits suggest the figure is much higher. All but USD 6.7 billion is public money. The Shukan Post magazine said in its latest issue that organisers have booked all the rooms during the Olympics in at least four of Tokyo's most expensive hotels. The magazine called the accommodations "fitting or royalty" for the IOC and others. Tokyo organising committee president Seiko Hashimoto said on Friday the "Olympic family, IOC and international federations" would amount to 23,000 visitors. The magazine said the IOC would pay up to USD 400 per night for rooms, with local organisers making up any difference. 'Fear, anxiety' Many of Japan's newspapers are among more than 60 local Olympic sponsors that have contributed more than USD 3 billion to local organisers. They have been restrained in their criticism. The Shinano Mainichi Shimbun , which is not a sponsor, called for a cancellation in an editorial on Sunday. Organisers and the IOC say that the Games will be safe because of extensive testing and building a bubble around the athletes. It says more than 80 percent of the residents in the Olympics Village, located on Tokyo Bay, will be vaccinated. The comments of Atsuko Saitoh, who identifies herself as midwife and former university professor, are representative of the criticism on social media. She has run unsuccessfully for Japan's upper house and is running in the next lower house election. "Bach and Coates do not value the lives of the athletes, others involved, or the people of the host nation. It’s tantamount to predicting terrorism to say that the Games will be held under an emergency, despite the overwhelming opposition in public opinion."
english
32. Tripura 33. Uttar Pradesh 34. Uttaranchal 35. West Bengal Note: D.W.F. denotes drinking water facilities. Infrastructural Facilities in KVS 2465. SHRI PRABHUNATH SINGH: Will the Minister of HUMAN RESOURCE DEVELOPMENT be pleased to state: (a) whether Kendriya Vidyalaya Sangathan (KVS) has failed to cater to the education needs of the children; (b) if so, the measures taken in this regard; (c) whether infrastructural facilities; such as building, playground, laboratories, computers, drinking water and sewerage etc. for the students are lacking in many schools despite enhancement of monthly contribution from the students; (d) if so, the action taken in this regard; (e) whether large scale irregularities in the management of schools have been reported to KVS; (f) if so, the details of the steps initiated by the Government to strengthen control and monitoring of KVS over the functioning of schools? THE MINISTER OF STATE IN THE MINISTRY OF HUMAN RESOURCE DEVELOPMENT (SHRI M.A.A. FATMI): (a) No, Sir. (b) Does not arise. (c) and (d) No, Sir. Kendriya Vidyalayas are initially opened in temporary/hired buildings. When the land is To Questions 344 Financial Assistance by Japan for Delhi Metro Rail Project made available free of cost by the concerned State Government the Sangathan construct their permanent buildings with all necessary infrastructure. As on the 1st April 2005, only 207 out of a total of 931 Kendriya Vidyalayas were functioning in temporary/hired buildings. (e) Some irregularities with regard to recruitment of teachers, diversion of funds, concessions to private parties, purchase of land, etc. have come to light and are under detailed enquiry. (1) Kendriya Vidyalaya Sangathan is administered and controlled by a Board of Governors chaired by the Minister for Human Resource Development, which takes all decisions towards ensuring proper functioning of the Sangathan and of the schools. The accounts of Kendriya Vidyalaya Sangathan is audited by the office of Director General Audit Central Revenue, New Delhi. There is also a Vidyalaya Management Committee for day-to-day Management of the Vidyalaya in accordance with the rules/directions issued by Kendriya Vidyalaya Sangathan (Hqrs.) from time to time. 2466. SHRI ASADUDDIN OWAISI: Will the Minister of URBAN DEVELOPMENT be pleased to state: (a) whether Delhi Metro Rail Project is being partially financed by the Government of Japan; (b) If so, the total assistance so far provided for the said project; (c) whether the Metro Rail Projects in other States are likely to be financed by the Government of Japan; and Sravana 18, 1927 (Saka) (d) if so, the details thereof? THE MINISTER OF PARLIAMENTARY AFFAIRS AND MINISTER OF URBAN DEVELOPMENT (SHRI GHULAM NABI AZAD): (a) and (b) Delhi Metro Rail Project is partially financed by loan from the Japan Bank of International Cooperation (JBIC). The assistance so far is of the order of Rs. 4440 crore. (c) and (d) Government of India has posed the Bangalore Metro Project to Government of Japan for funding under Financial Year 2005-2006 ODA Loan package for JBIC loan amounting to Rs. 1500 crore. Another project, namely, Kolkata Metro Project has been posed to Government of Japan for JBIC loan under Rolling Plan 2006-2007. Ban on Sale of Non-lodised Salt 2467. SHRI N. JANARDHANA REDDY: SHRI P.S. GADHAVI: Will the Minister of COMMERCE AND INDUSTRY be pleased to state: (a) whether the Government is planning to reimpose ban on production and the sale of non-lodised salt in the country; (b) if so, the details thereof alongwith the reasons therefor; (c) the policy adopted by the Government to rehabilitate the displaced persons as a result thereof; (d) whether the commercial production of fortified salt with iron and lodine has begun/is expected to begin; (e) if so, the details thereof; (f) whether the existing lodised salt manufacturing units are capable of meeting the salt requirement in the country; To Questions 346 (g) if not, the steps being taken by the Government to boost the production of iodised salt; (h) whether the Government is aware of the adverse effects of non-iodised salt on the health of the people; and (i) if so, the action taken/being taken by the Government to make iodised salt available to the poor people at cheaper rate compared to the non-iodised salt? THE MINISTER OF COMMERCE AND INDUSTRY (SHRI KAMAL NATH): (a) and (b) The Government of India has taken a decision to restrict the sale of edible common salt for direct human consumption unless it is iodised. However, it may be sold for iodisation, iron fortification, animal use, preservation, manufacturing medicines and industrial use under proper lable declaration. The Government has under the Prevention of Food Adulteration (PFA) Rules, 1955, issued a draft notification GSR 340(E) dated 27-5-2005 for this purpose for inviting objections/suggestions. (c) As there is no ban on the production of edible common salt, there would be no displacement of any person. (d) and (e) As per PFA Rules, 1955, production and sale of iodised salt and iron fortified salt are already permitted. However, double fortification of salt with both iron and lodine is not permitted under PFA Rules at present. (f) and (g) Existing lodised salt manufacturing units can meet the requirement of the country. (h) and (i) The use of lodised salt prevents several lodine deficiency disorders like goltre, abortions, stillbirths, congenital anomalies, mental deficiency, etc. lodised salt is available at a price ranging from Rs. 2.50 per kg. to Rs. 8.50 per kg. depending on the variety, quality and
english
NIPER-Hyderabad is home to some of the most venerated academic programs in India. Qualifying Criteria for award of Masters degree in various programmes : Requirements for Master of Pharmacy/Master of Technology (Pharm.)/Master of Sciences (Pharm.)/Master of Business Administration (Pharm.) Requirements in undertaking Doctor of Pharmacy:
english
<filename>src/RNQrScanView.js /* eslint-disable no-underscore-dangle */ import React, { Component } from 'react'; import { requireNativeComponent, ViewPropTypes, View, Platform } from 'react-native'; import PropTypes from 'prop-types'; import { request, PERMISSIONS, RESULTS } from 'react-native-permissions'; const EventThrottleMs = 500; export default class RNQrScanner extends Component { _lastEvents = []; _lastEventsTimes = []; constructor(props) { super(props); this.state = { grant: false, }; } componentDidMount() { const requestPermission = Platform.OS === 'android' ? PERMISSIONS.ANDROID.CAMERA : PERMISSIONS.IOS.CAMERA; request(requestPermission).then((result) => { const isGranted = result === RESULTS.GRANTED; this.setState({ grant: isGranted, }); }); } _onObjectDetected = (callback) => ({ nativeEvent }) => { const { type } = nativeEvent; if ( this._lastEvents[type] && this._lastEventsTimes[type] && JSON.stringify(nativeEvent) === this._lastEvents[type] && new Date() - this._lastEventsTimes[type] < EventThrottleMs ) { return; } if (callback) { callback(nativeEvent); this._lastEventsTimes[type] = new Date(); this._lastEvents[type] = JSON.stringify(nativeEvent); } }; render() { const { grant } = this.state; if (!grant) { return (<View style={this.props.style} />); } return ( <RNQrScanView style={this.props.style} boxTopOffset={this.props.boxTopOffset} boxSize={this.props.boxSize} flashMode={this.props.flashMode} noticeText={this.props.noticeText} scanMode={this.props.scanMode} enableScan={this.props.enableScan} onBarCodeRead={this._onObjectDetected(this.props.onBarCodeRead)} /> ); } } RNQrScanner.defaultProps = { onBarCodeRead: () => {}, enableScan: true, flashMode: false, scanMode: 1, boxTopOffset: 0, boxSize: 200, noticeText: null, }; RNQrScanner.propTypes = { ...ViewPropTypes, boxSize: PropTypes.number, scanMode: PropTypes.number, boxTopOffset: PropTypes.number, noticeText: PropTypes.string, onBarCodeRead: PropTypes.func, flashMode: PropTypes.bool, enableScan: PropTypes.bool, }; const RNQrScanView = requireNativeComponent('RNQrScanView', RNQrScanner, { nativeOnly: { onBarCodeRead: true, scanMode: true, boxTopOffset: true, noticeText: true, flashMode: true, }, });
javascript
Perhaps the greatest single leap forward any beginner can make when starting out with Photoshop is learning to work with Adjustment Layers. In this easy-to-follow guide we explain exactly how. What is an Adjustment Layer? An Adjustment Layer is a transparent layer that is placed over an image in Photoshop to add an effect without making any destructive changes to the original photo below. An Adjustment Layer functions like a filter but with an added degree of flexibility and control. Imagine sheets of acetate stacked over a printed photograph, each one altering the look of the photo at the bottom of the pile. As the acetate sheets aren’t permanently attached to the photo, they can be switched out for others, or shuffled into a different order. That’s pretty much how Photoshop Adjustment Layers work: As Adjustment Layers are totally separate from the background image, you can keep tweaking them until you get the photo looking just how you want it. All without making any destructive changes to your original file. This has three main advantages: the strength of the effect can be adjusted at any time by changing the opacity of the layer; an Adjustment Layer can be made to alter only a specific section of the image by adding a layer mask; and if you totally change your mind about the effect of an Adjustment Layer you can easily switch it off or even just delete it. Going to Image > Adjustments > Color Balance would permanently change the colors of the background image. Instead you should go to Layer > New Adjustment Layer > Color Balance, where you’ll be prompted to choose a name for the Adjustment Layer. Now you can work on the colors of your photo, safe in the knowledge that the changes can easily be modified or discarded later. For example, once you’ve made your adjustments to the colors, you can fine-tune the strength of the effect with the Opacity slider on the Adjustment Layer panel. This is particularly handy if you come back to an image you were working on earlier, only to decide that you went overboard with the color adjustments and want to bring things back a little closer to how they were before. Drop the opacity to somewhere between the two, and you’ve got an instant compromise without having to re-do your work. Editing color is just one example of how you can use an Adjustment Layer. In fact, you should use Adjustment Layers for almost any photo editing task. Take the contrast, for example. Depending upon the lighting conditions when shooting, images straight out of the camera can often look a little flat and lifeless. This is especially true when photographing on a cloudy day or in the shade. Increasing the contrast will help to give a photo a little extra graphic impact, but effectively also reduces the amount of information in your photo. For this reason it needs to be done carefully. Although Photoshop has an Auto Contrast function, this should generally be avoided, as it gives no control over the degree of contrast that will be applied to your image. On top of which, the Auto Contrast command is applied destructively to the background image, not as an Adjustment Layer, and so will permanently alter your original file. Never a good idea. Instead, go to Layer > New Adjustment Layer > Brightness/Contrast, and once again choose a name for the Adjustment Layer. Now tweak the Contrast slider until you achieve the desired degree of punch. More advanced users might want to go to Layer > New Adjustment Layer > Curves instead. The Curves panel offers a much greater level of control over adjustments to brightness and contrast, but is notoriously scary for beginners to use. (Learn more about Curves in this article.) As with many Photoshop editing techniques, adjustments to the contrast of an image can easily be overdone – making your photos look trashy and amateurish. For a more sophisticated and technically accomplished look, pay particular attention to shadow and highlight areas when adjusting the contrast. Is there still some detail in the shadows, or have they all turned to black? What about the highlights, is info still visible there? Or have the highlights burned out completely, becoming just an empty white space? Subtlety is key: You’re looking for extra bite, but without losing too much information. To check for burned out areas, go to Window > Histogram, and a panel will appear displaying histogram information for your photo. It’s rare that a photo isn’t significantly improved by a bit of creative “dodging and burning.” While Photoshop has dedicated Dodge and Burn tools, there’s actually a better way of achieving this using Adjustment Layers. Before we get to that, though, what do we actually mean by the term “dodge and burn”? And what’s it used for? The expression dodge and burn is a hangover from the days of analog film photography. It refers to the technique of partially blocking the light from a photographic enlarger in the darkroom – exposing some areas of the photographic paper for more (or less) time than others. This has the effect of making those particular sections darker (or lighter) than the overall exposure of the print. There are several reasons why we might want to do this to a photo. First, the balance of an image is extremely important. Darker areas feel “heavy,” while lighter ones feel, well, “light.” For an image to appear balanced, our eyes expect to see heavier (i.e. darker) areas at the bottom of the frame. But as they don’t always come out that way when we shoot them, we’ll often need to give the image a helping hand in the editing stage. Similarly, if the main subject of the photo (for example, a person) is on the right side of the shot, then the composition might feel more balanced if some “weight” (i.e. density) is added on the left side in order to compensate. The best way to do this is to create a new Adjustment Layer (Layer > New Adjustment Layer) either for Brightness/Contrast or, if you’re feeling brave, Curves. As we want this Adjustment Layer only to alter the exposure of the image – rather than the colors or anything else – we’ll also need to change the Mode setting in the Adjustment Layer panel from Normal to Luminosity. Let’s say we want to darken the bottom third of the frame to give a greater feeling of solidity and balance to the image. Our next step, then, would be to create a second Brightness/contrast Adjustment Layer (don’t re-use the first one we already made, though, as this would just undo its effects). Now move the Brightness slider left to the degree that you want to darken the bottom of the frame. Of course, this darkens the whole image all at once. So now we need to select the area of the image we wish to mask from the effects of the Adjustment Layer (in this case the top two-thirds of the frame). Painting with black on an Adjustment Layer creates a mask which blocks that part of the Adjustment Layer so that it doesn’t have any effect on the background image. What we want to do now, then, is to select the bottom third of the image using the Gradient tool. To do this, make sure the background and foreground are set to white and black respectively, and draw a line using the gradient tool descending from approximately the top 1/3 mark down to the bottom 2/3 mark. Now the effects of the Adjustment Layer will be visible only on the bottom part of the image, gradually fading out about a third of the way up. If the effect is still too subtle – if the bottom part of the image isn’t dark enough – we have two main options open to us. The easiest solution is to move the Brightness slider further to the left on the Adjustments panel, darkening the bottom of the frame. However, the feathered gradient might not be gradual enough to handle this, and so the effect might become too obvious. If that happens, the alternative is to keep our first Adjustment Layer as it was, and then repeat the above process, only this time making the gradient begin a little lower down or higher up the frame. This way it will overlap with, but not exactly match, our first adjustment layer; likely making for a smoother transition. We can go on stacking adjustment layers in this way as much as we need to, until we get a nice smooth gradient and the desired degree of burning at the bottom of the frame. Another highly effective editing technique is vignetting; that is, subtly darkening the corners and edges of a photo while leaving the center normally exposed. This has the effect of drawing the viewer’s eye into the frame and can be achieved in exactly the same way that we added weight to the bottom of the frame in the steps above. Only this time we draw a circle with the Lasso or Elipse tool in the center of the photo so that the Adjustment Layer only alters the edges and corners of the frame. You may need to experiment with the degree of feathering (gradient) here, as the exact results will depend upon the size of your image. In our example we feathered the selection by 250 pixels, but if you are working on a much larger file you may need to input a higher value here (the maximum amount of feathering Photoshop will currently allow is 1000px). Be careful with this vignetting effect, though, as it is often overused and can look amateurish if applied heavy-handedly. Vignetting should be subconsciously felt rather than seen. As a general rule, if the effect is immediately noticeable, then it’s probably too much. There’s no reason not to use vignetting in tandem with other burning techniques, too. For example, we might want to go for a subtle vignette and darken other areas of the image as well. Clever use of dodging and burning can also help to draw the eye toward the main subject of a photo by toning down distractions. For example, if a person is photographed in a busy scene, with a lot of competing information all around her, the viewer may be unsure where to look. By lightening up the main subject, or even just the face or eyes if the subject is a person, and burning down any bright or distracting areas of the foreground or background, there will be less information competing for the viewer’s attention. This way it will be more obvious precisely which element is the main subject of the photo, and its impact will be much greater. What’s the best way to do this? Let’s imagine that we want to darken down one particularly distracting element in a photograph. There are various ways we can about go it, but perhaps the most effective is to create a new Brightness/Contrast Adjustment Layer and slide the Brightness control to the right to darken the image, just as we did above. This time, though, we don’t make a selection with the Lasso or Gradient tools, but instead immediately fill the entire Adjustment Layer with black using the Paint Bucket tool or by pressing Alt+Backspace (alternatively, Alt-click the Layer Mask icon) to add a mask that blocks the effect of the entire Adjustment Layer. Now the effects of the Adjustment Layer are no longer visible anywhere, masked as they are by the black paint. But just as black blocks the effects of an Adjustment Layer, white reveals those effects again (or serves to delete the black, if you prefer to think of it that way). This means that if we now select the Paintbrush tool (right-click for options), with a moderately soft brush and pure white selected from the color palette, we can go in and effectively paint shadow onto the areas of the image that we want to darken down. Be careful not overdo your edits in this way, though, as there is a risk that a photo can end up looking very fake and unnatural if the rules of light are not respected – in other words, if you start playing around with the balance of light between different elements of the image too much, highlights can end up darker than shadows and vice versa. This would result in totally improbable lighting, undermining the credibility of your photo and making it look more like an illustration than reality. Nonetheless, this technique is particularly handy for toning down small but distracting details, such as a bright button on a coat or an annoying highlight area. Good photo editing is often really just about problem solving. We’ve barely touched upon the many possible uses of Adjustment Layers here. All the same, the above techniques should have armed you with the sufficient skills and, perhaps most importantly, put you in the right frame of mind, to tackle all kinds of photo editing and retouching problems on your own. So the next time you’re faced with a particularly troublesome photo editing challenge, consider whether the creative use of Adjustment Layers might be the answer. That’s what the pros do!
english
Chennai’s Presidency College retained its third rank. PSGR Krishnammal College for Women, Coimbatore secured the fourth spot, a jump from its previous rank (6th) in 2022. St. Xavier’s College, Kolkata’s rank also improved from 8th spot in 2022 to 5th spot this year. You have exhausted your monthly limit of free stories. Atma Ram Sanatan Dharm College, DU secured the sixth spot, followed by Loyola College which secured the seventh spot, a decline from the fourth spot in 2022. Rama Krishna Mission Vivekananda Centenary College, Kolkata secured the 8th spot, Kirori Mal College and Lady Shri Ram of Delhi University jointly secured the 9th rank. LSR was ranked second in 2020 and 2021. A total of five Delhi University colleges are in the top 10.
english
Fatorda (Goa), Nov 30: NorthEast United FC head coach Ricki Herbert Sunday kept his cards close to his chest as he downplayed the significance of his team's clash against FC Goa in an Indian Super League (ISL) match here Monday. NorthEast United, who are currently sixth on the table, can displace the hosts from the third spot if they win Monday. Both sides come into the fixture after convincing wins in their last outings but Goa have been in top form in the second half of the tournament, having won thrice in the last five games. Herbert said he was aware of this fact but countered that his team will be looking to win as well. "I don't think anybody is really hard to beat in this league. All the teams are probably in the same mix. Every team has had their ups and downs in the competition and with three games to go, everyone is still in contention for a spot in the knockout stages. We're just looking at the Goa game as another fixture. It's a game we want to win," he said at the pre-match press conference here Sunday. Looking back at the last game, when the northeasterners humbled league leaders Chennaiyin FC 3-0 in their own backyard, the New Zealander was all praises for his players. "I thought we were terrific in the last game. We played probably the best team in the league and it was a very convincing result for us. We're very pleased to be here in Goa and we look forward to another three points," he said. A loss in the previous match would have left the Highlanders, who were bottom of the league table, in a precarious position in terms of qualifying for the knockout stages. Herbert, however, chose to look at things positively. "I don't think we would have been out of the competition had we not won that game. It would certainly make things very, very difficult for us, but as it turns out, we put our best foot forward and took three points from a tough game," he said. Not divulging too much team information, the coach revealed that every player has travelled and will be training on the eve of the match.
english
{"limit":100,"name":"<NAME>","value":90,"highalch":54,"id":4825,"lowalch":36,"name_pt":"Arco composto sem corda","price":814,"last":814}
json
<reponame>Rudracool/MotherCareBackend- { "type": "object", "subtab": { "id": "recentlyvieweditems", "group": "catalog", "title": "Recently Viewed Items", "docRef": "bridgehead_4667037511", "description": "Recently Viewed Items" }, "properties": { "recentlyViewedItems.useCookie": { "group": "catalog", "subtab": "recentlyvieweditems", "type": "boolean", "title": "Use cookies for Recently Viewed Items", "description": "Check this box to track recently viewed items with a browser cookie.", "default": true }, "recentlyViewedItems.numberOfItemsDisplayed": { "group": "catalog", "subtab": "recentlyvieweditems", "type": "integer", "title": "Number of displayed items", "description": "Enter the number of recently viewed items to display in the browser.", "default": 6 } } }
json
An airstrike killed a senior Taliban commander and several of his fighters in the southern province of Kandahar, NATO said on Monday. The strike on Sunday targeted Haji Amir, one of the top two rebel commanders in Kandahar’s Panjwayee district, the NATO—led International Security Assistance Force (ISAF) said. The air raid was called in after Amir and his fighters had been tracked for several days and when they stopped at a small, unoccupied hut in a farming area near the village of Zangabad, it said. Amir was among hundreds of Taliban prisoners who escaped from a jail in the provincial capital, also called Kandahar, in June 2008. He had since been leading Taliban attacks in the Dand, Zherai and Panjwayee districts, the ISAF said. “Most recently, Amir had been in Pakistan planning the Taliban’s upcoming attacks, and he returned to Afghanistan in April to lead attacks against coalition and Afghan forces,” it said. The strike came amid preparations by around 12,000 Afghan and NATO forces for a major offensive in Kandahar province. A NATO official recently said that the allied forces planned to take out “as many Taliban leaders as we can” before the start of the operation in Kandahar. Kandahar is the spiritual home of the Taliban movement and was its main headquarters from 1995 to late 2001 when it was ousted from power in a US—led invasion. Afghan and NATO officials said they hope to turn around the tide of war against the militants in Kandahar and force them to negotiate peace with the government.
english
{ "name": "webdriverio-simple-example", "version": "1.0.0", "description": "simple mobile example", "main": "", "scripts": { "test.android.local.emu": "npx wdio test/configs/wdio.android.local.emu.conf.ts", "test.sayucelabs.android.emu.eu": "REGION=eu npx wdio test/configs/wdio.android.sauce.emu.conf.ts", "test.saucelabs.android.emu.us": "npx wdio test/configs/wdio.android.sauce.emu.conf.ts", "test.ios.local.sim": "npx wdio test/configs/wdio.ios.local.sim.conf.ts", "test.saucelabs.ios.sim.eu": "npx REGION=eu npx wdio test/configs/wdio.ios.sauce.sim.conf.ts", "test.saucelabs.ios.sim.us": "npx wdio test/configs/wdio.ios.sauce.sim.conf.ts" }, "keywords": [], "license": "MIT", "devDependencies": { "@wdio/cli": "^7.16.15", "@wdio/local-runner": "^7.16.15", "@wdio/mocha-framework": "^7.16.15", "@wdio/sauce-service": "^7.16.15", "@wdio/appium-service": "^7.19.1", "@wdio/spec-reporter": "^7.16.14", "@wdio/types": "^7.16.14", "ts-node": "^10.5.0", "typescript": "^4.5.5" } }
json
Manchester City are reportedly interested in signing Real Madrid legend Luka Modric next summer on a free transfer. The Croatian midfielder's contract with the club expires next summer. Hence, he will be free to negotiate with foreign clubs in January. As per El Nacional, Manchester City have shared a proposal with Modric and asked him to share his decision in the winter transfer window. The 37-year-old is still performing at the top level despite his age. He has contributed two goals and one assist in nine games across competitions this season. The 2018 Ballon d'Or winner wants to retire at Real Madrid, but perhaps after at least one more year. Los Blancos would be happy to grant him a contract extension at the end of the season. Modric has played 445 games for the club, contributing 33 goals and 73 assists. He has been key in their five UEFA Champions League and three La Liga triumphs, among other honours. However, Manchester City could offer him a big salary and a good project, with Pep Guardiola rating the midfielder highly. Modric also has experience of playing in the Premier League. He played 127 games for Tottenham Hotspur in the competition before moving to Real Madrid in 2012. Los Blancos, meanwhile, are also looking for potential replacements for the veteran midfielder. They have identified Jude Bellingham, Sergej Milinkovic-Savic and Nicolo Barella as their targets. City, meanwhile, could go for Modric's teammate Toni Kroos if the Croat decides to extend his stay at the Santiago Bernabeu. Los Blancos beat Getafe 1-0 away in La Liga on Saturday (October 8) to return to the top of the table. While Real Madrid have dropped points just once in all competitions this season, it was their first clean sheet in La Liga. Speaking after the game, manager Carlo Ancelotti hailed Eder Militao and Antonio Rudiger for their performances against Getafe, saying (via Marca): "It's very important. We have defended very well, and I think that Militao and Rüdiger have done their job, especially in the defence of aerial balls. Also Tchouameni. " Militao scored the winner via a Modric delivery from a corner. Real Madrid will next travel to face Shakhtar Donetsk in the UEFA Champions League on October 11 before hosting rivals Barcelona in La Liga five days later.
english
After a blistering campaign against the ruling Samajwadi Party government in Uttar Pradesh for past several months, the Congress has suddenly pulled itself back and has instead decided to train guns at the Narendra Modi government. The party has now announced “pol kholo abhiyan”, a village level protest across state, on July 22 with complete focus on “suit boot ki Modi sarkar” and not a single word of mention about any issue being raised by other opposition parties against the state government. Monsoon session of the Parliament is set to begin on July 21. Initially, when the protest was planned, Congress had announced to take up issues of non-payment of sugarcane dues and lesser assessment of farmers crop loss by the state government, too. However, party suddenly went on back foot Monday and based its protest completely on BJP government at the Centre. The protest has been planned to help Congress develop its base before upcoming panchayat elections in the state, for which it has asked all its district units to field party-supported candidates. While workers are confused about how the party would strengthen its base in the state by only hitting on central government and not the ruling Samajwadi Party, senior Congress leaders inform that the situation is likely to continue till Bihar Assembly polls, which are due later this year. Asked about the sudden change and leaving Samajwadi Party untouched in its upcoming agitations in the state, state Congress president Nirmal Khatri said, “We have been raising issues against the Samajwadi party government in the past and now it is clear that they are on their way out in the upcoming elections. We are now focusing on the wrong policies and corrupt practices of Modi government at the Centre”. “Moreover, everyone knows that both the BJP and the SP have been working in co-ordination to create polarisation for a long time and Muzaffarnagar was an example. Now, we are not required to blow the trumpet over it again and again,” he added. The party workers have been asked to raise issues such as amendments proposed in the Land Acquisition Act, inflation, corruption of BJP leaders, promises made before Lok Sabha elections, reduction of funds in the welfare schemes launched during UPA regime and trying to disturb communal harmony. They have been asked to burn effigy of Prime Minister Narendra Modi and hold meetings to inform villagers about failures of Modi Government. Meanwhile, answering to a question on the party facing difficulties in showing its strength in UP despite a good leadership and ideology, Khatri said “The generation which raised the voice of Congress is no more and we did not cultivate the new generation which we are doing now”.
english
ISI Kolkata has recently published a notification for the recruitment of 1 Project linked person vacancy. Apply now! ISI Kolkata has recently published a notification for the recruitment of 1 Project linked person vacancy. Desirous candidates can check all the job details of no of posts, age limit, salary, qualification etc prescribed below: Rs. 25,000-31,000/- p. m. Educational Qualification & Eligibility Criteria for Project linked person vacancy: To be suitable for the advertised post of Project linked person in ISI , candidate should have qualified M. Sc. in Statistics or equivalent from a recognised University or Institute with acquiring good academic record. Freshers may as well apply for the position. How to Apply for ISI Job Opening: Candidate who are willing to apply for the vacant post are advised to submit with a cover letter superscribed to the Head, Statistical Quality Control and Operation Research Unit of the Institute, Kolkata stating the following details mentioned below: - Name (In block letters), - Permanent/ Present Address, - E-mail Address, - Telephone/Mobile No. . - Parent's/Spouse's Name, - Date of birth, - Academie Qualifications (with the percentage of marks obtained in each examination starting from 10th class), - Experience (if any), - Aadhar Card No. , G) SC/ST/OBC/Differently abled status, along with self-attested soft copies of all the requisite documents latest by 24th December 2020 through e-mail to: akchakraborty123@rediffmail. com. Selection Procedure of Project linked person vacancy: Disclaimer: Provided by Indian Statistical Institute; Kolkata.
english
.post .text-level- { font-weight: bold; } .post .text-level-1 { font-size: 2.5rem; } .post .text-level-2 { font-size: 2rem; } .post .text-level-3 { font-size: 1.75rem; } .post .text-level-4 { font-size: 1.5rem; } .post .text-level-5 { font-size: 1.25rem; } .post .text-level-6 { font-size: 1rem; } .post .image { width: 100%; } .post .authors_photo { width: 35px; height: auto; border-radius: 50%; } .post .authors_name { display: inline-block; } /*# sourceMappingURL=post.component.css.map */
css
U Yadav to Willey, out Bowled!! India are doing well to keep the wickets column ticking. Fine yorker outside off stump, Willey tries to dig it out, drags it back onto the stumps. Willey b U Yadav 1(2)
english
<div class="tickets"> <div class="participants-table"> <h1>{{ page.title }}<a href="/pages/for-editors">Back to Editor's pages</a></h1>{% assign participants = site.data.mapped.tickets.by_Participant %} {% assign totals = site.data.mapped.tickets.stats.count %} {% assign no-regonline = site.data.mapped.tickets.stats.no_regonline %} {% assign regonline = totals | minus, no-regonline %} <h4>Total # Participants: {{ totals }}</h4> <ul> <li>{{ regonline }} registered on Regonline</li> <li>{{ no-regonline }} not registered on Regonline</li> </ul> <table class="table table-striped"> <thead> <th>#</th> <th>Regonline Name</th> <th></th> <th>ticket</th> <th>when-day</th> <th>regonline</th> </thead>{% for participant in participants %} <tr class="text-center regonline-{{participant[1].regonline}}"> <td class="text-center">{{ forloop.index }}</td> <td><a href="{{ participant[1].url }}" target="_blank">{{ participant[1].regonline_Name }}</a></td> <td><a href="https://github.com/OWASP/owasp-summit-2017/blob/master/{{ participant[1].url | replace: '.html','.md'}}" target="_blank">edit</a></td> <td>{{ participant[1].ticket }}</td> <td>{{ participant[1].when-day }}</td> <td>{{ participant[1].regonline }}</td> </tr>{% endfor %} </table> <h3>Participants NOT on Regonline</h3> <ul> {% for participant in participants %} {% if participant[1].regonline == 'No' %} <li><a href="{{ participant[1].url }}" target="_blank">{{ participant[1].regonline_Name }}</a></li> {% endif %} {% endfor %} </ul> </div> </div>
html
Paper 2 Topic: Statutory, regulatory and various quasi-judicial bodies. In its first official forecast of the season, the IMD expects India to have normal monsoons this year. Key facts: - Rains are likely to be 96% of the 50-year average of 89cm for the monsoon season of June to September. They are expected to fan out favourably and “help agriculture.” This estimate falls at the bottom edge of what it considers ‘normal’ monsoon rains. However, due to the looming threat of El Nino and adoption of new weather model, the IMD says there is significant element of uncertainty in this forecast. - Another climate phenomenon, called the Indian Ocean Dipole (IOD), which refers to a swing in the sea surface temperatures in the western and eastern Indian oceans, is also known to influence the Indian monsoon. A ‘positive’ IOD can counter an ominous El Nino. In its assessment, the IMD expects “weak positive IOD” to develop in the latter half of the monsoon, which means that it’s unlikely to be a potent ally this year. India saw drought years in 2014 and 2015. As for 2016, it received 3% less than the 89 cm average, despite an IMD forecast of ‘above normal’ rains. Given the deficient pre-monsoon rains over large parts of Karnataka, Tamil Nadu and Kerala, good rains are essential this year to trap enough soil moisture for a healthy kharif crop, which is vital to keep inflation down and rural consumption up. About IMD: The India Meteorological Department (IMD), also referred to as the Met Department, is an agency of the Ministry of Earth Sciences of the Government of India. - It is the principal agency responsible for meteorological observations, weather forecasting and seismology. - IMD is also one of the six Regional Specialised Meteorological Centres of the World Meteorological Organization. - It has the responsibility for forecasting, naming and distribution of warnings for tropical cyclones in the Northern Indian Ocean region, including the Malacca Straits, the Bay of Bengal, the Arabian Sea and the Persian Gulf. Sources: pib. Paper 2 Topic: Effect of policies and politics of developed and developing countries on India’s interests, Indian diaspora. The Australian government has scrapped the Australian “457” visa programme that allowed Australian companies to hire foreign workers in a number of skilled jobs for a period up to four years. The programme will be replaced with a more stringent system, making it difficult to hire non-Australian citizens. This is aimed at giving priority for Australian workers for Australian jobs. - According to the notification, the new visa programme will cut more than 200 eligible jobs for skilled migrants — from 651 to 435, visa fees are set to increase, and the visas will be restricted to filling “critical skills shortages”. New regime: - But both streams will come with several riders such as mandatory labour market testing with limited exemptions, a new non-discriminatory workforce test, criminal history checks, a market salary rate assessment and a new two-year work experience requirement. Further, English language requirements will be tightened for the medium-term stream. - The new visa will also include a strengthened training obligation for employers sponsoring foreign skilled workers to provide enhanced training outcomes for Australians in high-need industries and occupations. How would this affect Indians? The move is expected to impact Indian IT and other companies. India provides the highest number of temporary skilled workers to Australia of any country; eight out of the top 10 occupations for Indian 457 visa holders (as at December 2016) were IT professionals. According to the Australian Department of Immigration and Border Protection, Indians constituted 76% of the total ‘457’ visas issued in the three IT streams, and 57% of permanent migrant visas issued in the skilled stream of workers last year. Way ahead: The Centre is studying the implications of the Australian government’s decision to scrap a visa programme that benefited Indians. It has warned that the move could have an impact on negotiations on the free trade agreement — Comprehensive Economic Cooperation Agreements (CECA) — between both countries. Sources: the hindu. Paper 2 Topic: Government policies and interventions for development in various sectors and issues arising out of their design and implementation. The Railway Ministry has sought compensation for operating railway lines of strategic and national importance — a practice discontinued following the merger of Railway and Union Budgets. What’s the issue? Every year, the Indian Railways used to get subsidy from the Finance Ministry for losses incurred on railway operations on strategic lines. After the Budget merger, the Finance Ministry had discontinued the practice of providing annual subsidy to the Railways for operating loss-making strategic routes as the Railways was no longer required to pay an annual dividend. However, recently, two Parliamentary bodies — Standing Committee on Railways and Estimates Committee — recommended that the Finance Ministry should continue compensating the Railways for losses in operating strategic lines for the following reasons: - Considering the role of Railways in nation building, reimbursement to Railways on their operating losses on strategic lines is justifiable. - Also, these lines mainly cater to defence movements on border areas and development of social and backward region. - The compensation also helps the Railways in providing relief towards “socially desirable projects” which are usually loss-making projects. Sources: the hindu. Paper 3 Topic: Indian Economy and issues relating to planning, mobilization of resources, growth, development and employment. - RBI has asked the banks to make provisions for standard assets in this sector at higher rates so that necessary resilience is built in the balance sheets should the stress reflect on the quality of exposure to the sector at a future date. What’s the concern? Telecom players are reeling under a huge debt — estimated to be about ₹4.2 lakh crore — amid a tariff war and a consolidation among some of the players. The telecom sector is reporting stressed financial conditions, and presently interest coverage ratio for the sector is less than one. Interest coverage ratio of less than one means the company’s EBIDTA (Earnings Before Interest, Taxes, Depreciation and Amortization) is not sufficient to repay interest, let alone principal. Debt accumulation and potential NPAs have been on the rise across a range of sectors including telecom. Alarming debt levels in the backdrop of recent declining revenues increase debt serviceability risks for the telecom sector. Five sectors, including telecom, contribute to 61% of the stress in the banking system. Sources: the hindu. Paper 1 Topic: Indian culture will cover the salient aspects of Art Forms, Literature and Architecture from ancient to modern times. The Channakeshava temple in Belur — a masterpiece of Hoysala architecture — turned 900 this year. Belur, along with Halebid, is proposed as a UNESCO heritage site. About the temple: The temple, constructed by Vishnuvardhana of the Hoysala dynasty, is a monument recognised by the ASI. Considered to be the first temple of Hoysala style, it was built between 1106 and 1117. The temple is regarded a marvel of architecture in typical Hoysala style. It commemorates the king’s victory in 1104. Sources: the hindu. Paper 2 Topic: Indian culture will cover the salient aspects of Art Forms, Literature and Architecture from ancient to modern times. History lovers, curious onlookers, veterans and NCC cadets gathered recently in Chennai to commemorate The Battle of Adyar, a turning point in modern Indian history. About Battle of Adyar: The Battle of Adyar took place on 24 October 1746. The battle was between the French East India Company men and Nawab of Arcot forces over the St. George Fort, which was held by the French. It was part of the First Carnatic War between the English and the French. - The French captured Fort St. George from the British East Indian Company. Nawab of Arcot, a close ally of the British, set out to regain it by sending troops, led by his son Mahfuz Khan, to Madras. While leading an army of 10,000, he was dispersed by French forces, forcing him to move south. Khan seized San Thomé and formed a battle line on the north bank of the Adyar River on October 22 to prevent the French from moving up reinforcements from Pondicherry. - 200 French and French-trained Indian troops led by Captain Paradis, force marched from Pondicherry, crossed Quibble Island and took positions on the south bank of the Adyar River and faced ineffective artillery fire from Khan’s forces. - On 24 October, Paradis was informed that a similar sized army led by de le Tour was on its way from St. George Fort. He decided to ford the Adayar river to attack the rear of Mahfuz Khan’s battle line. de la Tour arrived too late to support Paradis, who with disciplined firing and then charging with bayonets, broke the Nawab’s line. Mahfuz Khan’s troops fled and the Battle of the Adyar River ended with the French retaining control over Fort St. George. Sources: the hindu. Paper 2 Topic: Effect of policies and politics of developed and developing countries on India’s interests, Indian diaspora. President Trump has signed an executive order that directs federal agencies to review employment immigration laws to promote “Hire American” policies. Key facts: - The order makes no immediate changes to work visa programs but tells the Departments of Labor, Justice, Homeland Security and State to study existing laws and procedures and recommend changes. - In the case of one program, H-1B temporary visas, the order directs the agencies to suggest changes to help ensure that the visas are awarded to the most skilled, best-paid immigrant workers. - The order also calls for a crackdown on fraud and abuse in the current system of work visas — which the agencies had already signaled they would do. What is the H-1B program, and why is the president focused on it? The H-1B program grants 65,000 work visas a year to foreigners, with 20,000 more visas granted to foreigners with advanced degrees and an unlimited number of extra visas available for universities, teaching hospitals and other nonprofit organizations. The program is aimed at highly skilled workers, and most of the visas are awarded to technology companies, although other specialty industries such as fashion also use the visas. - Under federal law, employers that use a large number of H-1B workers are supposed to document that they tried to hire Americans for the jobs. All H-1B employers are also required to pay prevailing wages to the immigrant workers who receive the visas. - Labor Department data shows that about 40% of the visas go to entry-level workers and another 40% go to people with limited experience and skills. The Trump administration says that more of the jobs performed by those people could and should be filled by Americans. How are the visas allocated? Visa applications must be sponsored by employers. Every April 1, the government begins accepting applications for the next fiscal year, which starts Oct. 1. This year, the government received 199,000 H-1B applications in the first five days and then stopped accepting them. Because there are more applicants than visas, the visas are awarded by lottery. Each visa is good for three years and can be renewed for three more years. Immigrants who are applying for a permanent residency visa, known as a green card, can typically stay longer while waiting for approval. Sources: the hindu. Facts for Prelims: Textiles India 2017: - It is the first ever global B2B Textile and handicrafts event in India. Hosted by the Ministry of Textiles, the event was inaugurated recently. - Textiles India 2017 is a landmark global trade event for the Indian Textile and handicraft Sector that will showcase the entire range of textile products from ‘Fibre to Fashion’. - The aim of the event is to make it one of the key annual destinations for International Buyers. Buzz over photograph of ‘Indian wolf’ in Sunderbans: - An Indian wolf (Canis lupus pallipes) has reportedly been sighted for the first time in the Sunderbans. the forest department is yet to verify the veracity of the claim. - The sighting is significant since wolves in Bengal are mostly found in the western parts bordering Chhattisgarh and Jharkhand. - The Indian wolf is a Schedule I animal in the Wildlife (Protection) Act 1972 of India. - It prefers to live in scrub lands, grasslands and semi-arid pastoral/ agricultural landscape. Less than 2,000 wolves are currently found in the forests of India. - It is categorised as ‘endangered’ by the International Union for Conservation of Nature. - Less than 2,000 wolves are there in the Indian forests.
english
{ "version": "0.1", "language": "en", "dictionaryDefinitions": [ { "name": "projectDictionary", "path": "./dictionary.txt"} ], "dictionaries": ["projectDictionary"], "languageSettings": [ { "languageId": "*", "dictionaries": ["projectDictionary"] } ], "ignoreRegExpList": [ "\\\\cite{[A-Za-z0-9, -]+}", "\\\\begin{\\w+}", "\\\\end{\\w+}", "\\\\usepackage{\\w+}", "\\\\bibliographystyle{\\w+}", "\\\\hyphenation{[A-Za-z0-9, -]+}", "\\w+{?"] } { "cSpell.enabled": true, "editor.cursorBlinking": "solid", "editor.wordWrap": "on", "editor.wordWrapColumn": 80, "editor.wrappingIndent": "same", "latex-workshop.debug.showUpdateMessage": false, "telemetry.enableCrashReporter": false, "telemetry.enableTelemetry": false }
json
<gh_stars>0 /* * Copyright (C) 2013 Google Inc. * Licensed to The Android Open Source Project. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.mail.ui; import android.app.Fragment; import com.android.mail.utils.LogTag; import com.android.mail.utils.LogUtils; /** * Small Runnable-like wrapper that first checks that the Fragment is in a good state before * doing any work. Ideal for use with a {@link android.os.Handler}. */ public abstract class FragmentRunnable implements Runnable { private static final String LOG_TAG = LogTag.getLogTag(); private final String mOpName; private final Fragment mFragment; public FragmentRunnable(String opName, Fragment fragment) { mOpName = opName; mFragment = fragment; } public abstract void go(); @Override public void run() { if (!mFragment.isAdded()) { LogUtils.i(LOG_TAG, "Unable to run op='%s' b/c fragment is not attached: %s", mOpName, mFragment); return; } go(); } }
java
--- title: Structure SSVARIANT | Microsoft Docs ms.custom: '' ms.date: 06/13/2017 ms.prod: sql-server-2014 ms.reviewer: '' ms.technology: native-client ms.topic: reference f1_keywords: - SSVARIANT helpviewer_keywords: - SSVARIANT struct ms.assetid: d13c6aa6-bd49-467a-9093-495df8f1e2d9 author: MightyPen ms.author: genemi manager: craigg ms.openlocfilehash: ff6e37986378a66d94dc113c4e3fe072fe3c077f ms.sourcegitcommit: 3026c22b7fba19059a769ea5f367c4f51efaf286 ms.translationtype: MT ms.contentlocale: fr-FR ms.lasthandoff: 06/15/2019 ms.locfileid: "63062497" --- # <a name="ssvariant-structure"></a>Structure SSVARIANT La structure `SSVARIANT`, qui est définie dans sqlncli.h, correspond à une valeur DBTYPE_SQLVARIANT dans le fournisseur OLE DB [!INCLUDE[ssNoVersion](../../includes/ssnoversion-md.md)] Native Client. `SSVARIANT` est une union de discrimination. En fonction de la valeur du membre vt, le consommateur peut identifier le membre à lire. Les valeurs de vt correspondent aux types de données [!INCLUDE[ssNoVersion](../../includes/ssnoversion-md.md)]. Par conséquent, la structure `SSVARIANT` peut contenir tout type SQL Server. Pour plus d’informations sur la structure de données pour les types OLE DB standard, consultez [indicateurs de Type](https://go.microsoft.com/fwlink/?LinkId=122171). ## <a name="remarks"></a>Notes Lorsque DataTypeCompat==80, plusieurs sous-types `SSVARIANT` deviennent des chaînes. Par exemple, les valeurs vt suivantes apparaissent dans `SSVARIANT` en tant que VT_SS_WVARSTRING : - VT_SS_DATETIMEOFFSET - VT_SS_DATETIME2 - VT_SS_TIME2 - VT_SS_DATE Lorsque DateTypeCompat == 0, ces types s'affichent sous leur forme native. Pour plus d’informations sur SSPROP_INIT_DATATYPECOMPATIBILITY, consultez [Using Connection String Keywords with SQL Server Native Client](../native-client/applications/using-connection-string-keywords-with-sql-server-native-client.md). Le fichier sqlncli.h contient des macros d'accès de type Variant qui simplifient l'annulation de la référence des types de membres dans la structure `SSVARIANT`. Un exemple est V_SS_DATETIMEOFFSET, que vous pouvez utiliser comme suit : ``` memcpy(&V_SS_DATETIMEOFFSET(pssVar).tsoDateTimeOffsetVal, pDTO, cbNative); V_SS_DATETIMEOFFSET(pssVar).bScale = bScale; ``` Pour obtenir le jeu complet de macros d'accès pour chaque membre de la structure `SSVARIANT`, consultez le fichier sqlncli.hi. Le tableau ci-dessous décrit les membres de la structure `SSVARIANT` : |Membre|Indicateur de type OLE DB|Type de données OLE DB C|Valeur vt|Commentaires| |------------|---------------------------|------------------------|--------------|--------------| |vt|SSVARTYPE|||Spécifie le type de valeur contenu dans la structure `SSVARIANT`.| |bTinyIntVal|DBTYPE_UI1|`BYTE`|`VT_SS_UI1`|Prend en charge le type de données `tinyint`[!INCLUDE[ssNoVersion](../../includes/ssnoversion-md.md)].| |sShortIntVal|DBTYPE_I2|`SHORT`|`VT_SS_I2`|Prend en charge le type de données `smallint`[!INCLUDE[ssNoVersion](../../includes/ssnoversion-md.md)].| |lIntVal|DBTYPE_I4|`LONG`|`VT_SS_I4`|Prend en charge le type de données `int`[!INCLUDE[ssNoVersion](../../includes/ssnoversion-md.md)].| |llBigIntVal|DBTYPE_I8|`LARGE_INTEGER`|`VT_SS_I8`|Prend en charge le type de données `bigint`[!INCLUDE[ssNoVersion](../../includes/ssnoversion-md.md)].| |fltRealVal|DBTYPE_R4|`float`|`VT_SS_R4`|Prend en charge le type de données `real`[!INCLUDE[ssNoVersion](../../includes/ssnoversion-md.md)].| |dblFloatVal|DBTYPE_R8|`double`|`VT_SS_R8`|Prend en charge le type de données `float`[!INCLUDE[ssNoVersion](../../includes/ssnoversion-md.md)].| |cyMoneyVal|DBTYPE_CY|`LARGE_INTEGER`|**VT_SS_MONEY VT_SS_SMALLMONEY**|Prend en charge la `money` et **smallmoney** [!INCLUDE[ssNoVersion](../../includes/ssnoversion-md.md)] types de données.| |fBitVal|DBTYPE_BOOL|`VARIANT_BOOL`|`VT_SS_BIT`|Prend en charge le type de données `bit`[!INCLUDE[ssNoVersion](../../includes/ssnoversion-md.md)].| |rgbGuidVal|DBTYPE_GUID|`GUID`|`VT_SS_GUID`|Prend en charge le type de données `uniqueidentifier`[!INCLUDE[ssNoVersion](../../includes/ssnoversion-md.md)].| |numNumericVal|DBTYPE_NUMERIC|`DB_NUMERIC`|`VT_SS_NUMERIC`|Prend en charge le type de données `numeric`[!INCLUDE[ssNoVersion](../../includes/ssnoversion-md.md)].| |dDateVal|DBTYPE_DATE|`DBDATE`|`VT_SS_DATE`|Prend en charge le type de données `date`[!INCLUDE[ssNoVersion](../../includes/ssnoversion-md.md)].| |tsDateTimeVal|DBTYPE_DBTIMESTAMP|`DBTIMESTAMP`|`VT_SS_SMALLDATETIME VT_SS_DATETIME VT_SS_DATETIME2`|Prend en charge les types de données `smalldatetime`, `datetime` et `datetime2`[!INCLUDE[ssNoVersion](../../includes/ssnoversion-md.md)].| |Time2Val|DBTYPE_DBTIME2|`DBTIME2`|`VT_SS_TIME2`|Prend en charge le type de données `time`[!INCLUDE[ssNoVersion](../../includes/ssnoversion-md.md)].<br /><br /> Inclut les membres suivants :<br /><br /> *tTime2Val* (`DBTIME2`)<br /><br /> *bScale* (`BYTE`) spécifie l’échelle pour *tTime2Val* valeur.| |DateTimeVal|DBTYPE_DBTIMESTAMP|`DBTIMESTAMP`|`VT_SS_DATETIME2`|Prend en charge le type de données `datetime2`[!INCLUDE[ssNoVersion](../../includes/ssnoversion-md.md)].<br /><br /> Inclut les membres suivants :<br /><br /> *tsDataTimeVal* (DBTIMESTAMP)<br /><br /> *bScale* (`BYTE`) spécifie l’échelle pour *tsDataTimeVal* valeur.| |DateTimeOffsetVal|DBTYPE_DBTIMESTAMPOFSET|`DBTIMESTAMPOFFSET`|`VT_SS_DATETIMEOFFSET`|Prend en charge le type de données `datetimeoffset`[!INCLUDE[ssNoVersion](../../includes/ssnoversion-md.md)].<br /><br /> Inclut les membres suivants :<br /><br /> *tsoDateTimeOffsetVal* (`DBTIMESTAMPOFFSET`)<br /><br /> *bScale* (`BYTE`) spécifie l’échelle pour *tsoDateTimeOffsetVal* valeur.| |NCharVal|Aucun indicateur de type OLE DB correspondant.|`struct _NCharVal`|`VT_SS_WVARSTRING,`<br /><br /> `VT_SS_WSTRING`|Prend en charge la `nchar` et **nvarchar** [!INCLUDE[ssNoVersion](../../includes/ssnoversion-md.md)] types de données.<br /><br /> Inclut les membres suivants :<br /><br /> *sActualLength* (`SHORT`) spécifie la longueur réelle de la chaîne vers laquelle *pwchNCharVal* points. N'inclut pas le zéro de fin.<br /><br /> *sMaxLength* (`SHORT`) spécifie la longueur maximale de la chaîne vers laquelle *pwchNCharVal* points.<br /><br /> *pwchNCharVal* (`WCHAR` \*) pointeur vers la chaîne.<br /><br /> Membres non utilisés : *rgbReserved*, *dwReserved*, et *pwchReserved*.| |CharVal|Aucun indicateur de type OLE DB correspondant.|`struct _CharVal`|`VT_SS_STRING,`<br /><br /> `VT_SS_VARSTRING`|Prend en charge la `char` et **varchar** [!INCLUDE[ssNoVersion](../../includes/ssnoversion-md.md)] types de données.<br /><br /> Inclut les membres suivants :<br /><br /> *sActualLength* (`SHORT`) spécifie la longueur réelle de la chaîne vers laquelle *pchCharVal* points. N'inclut pas le zéro de fin.<br /><br /> *sMaxLength* (`SHORT`) spécifie la longueur maximale de la chaîne vers laquelle *pchCharVal* points.<br /><br /> *pchCharVal* (`CHAR` \*) pointeur vers la chaîne.<br /><br /> Membres non utilisés :<br /><br /> *rgbReserved*, *dwReserved*, et *pwchReserved*.| |BinaryVal|Aucun indicateur de type OLE DB correspondant.|`struct _BinaryVal`|`VT_SS_VARBINARY,`<br /><br /> `VT_SS_BINARY`|Prend en charge la `binary` et **varbinary** [!INCLUDE[ssNoVersion](../../includes/ssnoversion-md.md)] types de données.<br /><br /> Inclut les membres suivants :<br /><br /> *sActualLength* (`SHORT`) indique la longueur réelle des données vers lesquelles *prgbBinaryVal* points.<br /><br /> *sMaxLength* (`SHORT`) spécifie la longueur maximale des données vers lesquelles *prgbBinaryVal* points.<br /><br /> *prgbBinaryVal* (`BYTE` \*) pointeur vers les données binaires.<br /><br /> Membre non utilisé : *dwReserved*.| |UnknownType|UNUSED|UNUSED|UNUSED|UNUSED| |BLOBType|UNUSED|UNUSED|UNUSED|UNUSED| ## <a name="see-also"></a>Voir aussi [Types de données &#40;OLE DB&#41;](data-types-ole-db.md)
markdown
We have a round up of the best dressed celebrities at the red carpet of the Nykaa.com Femina Beauty Awards 2015. From Sonam Kapoor to Kajol, these ladies sure did fire up the red carpet. Huma Qureshi: Huma looked every bit of a princess in the pale pink strapless gown. Her unkempt curly bob perfectly complimented the dress. Sarah-Jane Dias: Sarah looked smouldering in the Amit Aggarwal creation. Smokey eyes and a slick low bun completed her look. Shriya Saran: Shriya cut a regal figure in this red Gaurav Gupta creation. We love the minimal jewellery look. Sonam Kapoor: Our favourite fashionista did not disappoint us a tiny bit in her monochrome gown. She added the perfect amount of quirk to her outfit with the embellished cat clutch. Kajol: This classic beauty upped her fashion game with a monotone gown and blazer.
english
Colombo News: Police produced the two lesbian lovers before the Akkaraipattu Magistrate who ordered the two women to be examined by a psychiatrist and a report of psychiatric evaluation be prepared on them. Colombo: The LGBTQ+ community in Sri Lanka held a pride march in Colombo on June 25 to change the public perspective regarding the marginalized section of society. In the meantime, a pair of Indian and Sri Lankan women in the eastern city of Akkaraipattue was arrested for having an ‘abnormal relationship’ by the police. The lesbian couple have threatened to commit suicide if they were not allowed to leave the country and go to India, as per the news agency IANS report. As per reports, the two women, one is a 24-year-old from Tamil Nadu in India and the other is a 33-year-old married woman with one child from Akkaraipattu in Sri Lanka, met on social media two years ago. When the two developed relationship, the Indian woman invited the Sri Lanka woman to India but due to some overwork Immigration Department amid the ongoing economic crisis, she was not able to get her passport prepared. On Monday, the Indian woman, arrived in Sri Lanka on a tourist visa and went to Akkaraipattu, some 220 km from Colombo, and stayed at her friend's house at the night. The father of the 33-year-old local woman, however, objected to their relationship and lodged a complaint at the local police station in Akkaraipattu. Following the complaint, police arrested the two women and during interrogation, the Sri Lankan woman said that she wanted to go to India with her friend. She also threatened that both would commit suicide if they were not allowed to leave the country. Police later produced them before the Akkaraipattu Magistrate who ordered the two women to be examined by a psychiatrist and a report of psychiatric evaluation be prepared on them, reported IANS. They had been admitted to near Kalmunai hospital under the protection of prison officials and were ordered to be produced before the court on Monday with examination reports. Homosexuality is illegal and homosexual acts are punishable by a jail term of up to ten years as per Sri Lanka's laws which dates from the time of colonial British Ceylon, reported IANS.
english
Government has approved on 27th October, 2006 a Centrally Sponsored Scheme titled “National Bamboo Mission” under the Department of Agriculture & Cooperation for the financial year 2006-07 of the X Plan and first four year (2007-2011) of the XI Five Year Plan at a total outlay of Rs. 56823.00 lakhs. The Scheme is being implemented in 27 States, including 8 in North East Region. During 2008-09, a total amount of Rs. 8432.86 lakhs was released out of which an amount of Rs. 4482.43 lakhs was released to North Eastern States. An area of 67564 ha has been brought under bamboo plantation and 18412 ha of the existing plantations have been improved in North East region. To supply quality planting material, 548 bamboo nurseries have been established in the region. The Annual Action Plans for the year 2009-10 from the States, including those in the North East region, are under consideration of Department of Agriculture & Cooperation. State Bamboo Steering Committee Assam under National Bamboo Mission has not approved any consolidated plan for the full 11th Five Year Plan. However, the State proposes to raise about 12,000 ha of bamboo plantation and improve 10,000 ha of existing degraded bamboo stock during 11th Five Year Plan. Seasoning and treatment plant at Kokrajhar and Silchar will be developed. Capacity building through institutional trainings, both within and outside State, for farmers and field functionaries, will be carried out. Innovation in bamboo handicraft and utility products will be encouraged. The artisans will be provided marketing support for the sale of their bamboo products. Bamboo will be encouraged as industrial raw material in the State. This information was given by Prof. K.V. Thomas, Minister of State for Agriculture in a written reply to a question in the Rajya Sabha.
english
{ "recommendations": [ "ms-vscode.powershell", "ms-azuretools.vscode-azurefunctions" ] }
json
<gh_stars>0 html { font-size: 16px; } body, body > * { width: 100%; } header { height: 80px; background-color: transparent; padding: 0 2rem; position: fixed; top: 0; left: 0; } header:after { content: ''; display: block; position: fixed; top: 80px; bottom: 0; left: 0; right: 0; width: 100%; pointer-events: none; box-shadow: 0 0 4px 5px #F5F6EB; } header > .logo { width: 300px; height: 80px; float: left; line-height: 80px; font-size: 1.5rem; text-align: right; text-shadow: 0 0 3px 5px #34352C; color: white; background: url("../../image/120x80-logo.png") no-repeat 0 50%; } header > .links { height: 80px; line-height: 80px; float: right; font-size: 1.5rem; } header > .links > a { padding: 0 0.5em; text-decoration: none; color: white; } header.solid { background-color: #F5F6EB; } header.solid:after { box-shadow: 0 0 4px 5px #C9CABB; } header.solid > .logo { color: #34352C; } header.solid > .links > a { color: #34352C; } .banner, .intro, .pros, .price, .contact, footer { position: absolute; z-index: -1; text-align: center; } .banner:after, .intro:after, .pros > *:after, .price > *:after, .contact:after, footer:after { content: ''; display: inline-block; width: 0; height: 100%; vertical-align: middle; } .banner > .wrapper, .intro > .wrapper, .pros > * > .wrapper, .price > * > .wrapper, .contact > .wrapper { display: inline-block; width: 40%; vertical-align: middle; } .banner { height: 100%; background: #34352C url("../../image/banner.jpg") no-repeat; background-size: 100% 100%; } .banner > .wrapper { color: #F5F6EB; } .banner > .wrapper > h1 { font-size: 3rem; padding: 0.5em; } .banner > .wrapper > p { font-size: 1.5rem; padding: 0.5em 0.5em 2em; } .banner > .wrapper > .btns > * + * { margin-left: 1em; } .banner > .wrapper a { display: inline-block; text-decoration: none; border-radius: 1em; font-size: 1.5rem; font-weight: bolder; line-height: 2em; padding: 0 1em; cursor: pointer; } .banner > .wrapper a.register { color: #F5F6EB; background-color: red; } .banner > .wrapper a.register:hover { background-color: orangered; } .banner > .wrapper a.login { color: #34352C; background-color: white; } .banner > .wrapper a.login:hover { background-color: #F5F6EB; } .intro { top: 100%; height: 420px; background-color: white; } .intro > .wrapper > h1, .pros > *:nth-child(1) > .wrapper > h1, .price > *:nth-child(1) > .wrapper > h1, .contact > .wrapper > h1 { width: 100%; font-size: 2rem; } .intro > .wrapper > h1:after, .pros > *:nth-child(1) > .wrapper > h1:after, .price > *:nth-child(1) > .wrapper > h1:after, .contact > .wrapper > h1:after { content: ''; display: block; width: 20%; height: 10px; position: relative; left: 40%; margin: 1em 0; background-color: black; } .intro > .wrapper > p, .contact > .wrapper > p { font-size: 1.1rem; margin-bottom: 1em; } .intro > .wrapper > a { display: inline-block; text-decoration: none; border-radius: 1em; font-size: 1.5rem; font-weight: bolder; line-height: 2em; padding: 0 1em; cursor: pointer; color: #34352C; background-color: #F5F6EB; transition: transform .1s; } .intro > .wrapper > a:hover { color: #34352C; background-color: #EFF0DC; } .pros { top: calc(100% + 420px); height: 460px; background-color: #CCCCCC; } .pros > * { width: 100%; text-align: center; } .pros > *:nth-child(1) { height: 160px; } .pros > *:nth-child(1) > .wrapper > h1, .price > *:nth-child(1) > .wrapper > h1 { color: white; } .pros > *:nth-child(1) > .wrapper > h1:after, .price > *:nth-child(1) > .wrapper > h1:after { background-color: white; } .pros > *:nth-child(2) { height: 300px; } .pros > *:nth-child(2) > .wrapper { width: 80%; } .pros > *:nth-child(2) > .wrapper > .pro { width: 25%; height: 250px; float: left; color: white; padding: 1em; } .pros > *:nth-child(2) > .wrapper > .pro:before { content: attr(data-icon); display: block; color: #34352C; font-size: 5rem; transform: scale(1) rotate(0deg); transition: transform 1s; transition-timing-function: cubic-bezier(0,1,1,1); } .pros > *:nth-child(2) > .wrapper > .pro.stage:before { transform: scale(.1) rotate(-180deg); } .pros > *:nth-child(2) > .wrapper > .pro > h1 { color: #F5F6EB; font-size: 2rem; padding: 0.5em; } .price { top: calc(100% + 880px); height: 750px; background-color: #CCCCCC; } .price > *:nth-child(1) { height: 250px; } .price > *:nth-child(2) { height: 500px; } .price > *:nth-child(2) > .wrapper { width: 100%; } .price > *:nth-child(2) > .wrapper > .package { width: calc(25% - 2rem); height: 450px; float: left; margin: 1rem; background-color: #34352C; transform: scale(1); transition: transform 1s; } .price > *:nth-child(2) > .wrapper > .package:before, .price > *:nth-child(2) > .wrapper > .package:after { display: block; width: 100%; height: 50px; font-weight: bolder; text-align: center; line-height: 50px; } .price > *:nth-child(2) > .wrapper > .package:last-child:before, .price > *:nth-child(2) > .wrapper > .package:last-child:after { color: white; background-color: red; } .price > *:nth-child(2) > .wrapper > .package:before { content: attr(data-rel); background-color: #EFF0DC; } .price > *:nth-child(2) > .wrapper > .package:after { content: '购 买'; color: white; background-color: #60C0DC; } .price > *:nth-child(2) > .wrapper > .package > .content { width: 100%; height: 350px; line-height: 50px; font-weight: lighter; color: #F5F6EB; text-align: center; } .price > *:nth-child(2) > .wrapper > .package:hover { transform: scale(1.1); } .contact { top: calc(100% + 1630px); height: 420px; } footer { top: calc(100% + 2050px); height: 60px; color: #EFF0DC; background-color: #34352C; font-size: 1rem; line-height: 60px; text-align: center; text-shadow: 1px 1px 2px #ffffff; } @media all and (max-height: 500px) { body { height: 150%; } }
css
The US President said India imposing an import tax, while the US does not was an example of ‘unfair trade’ practices. United States President Donald Trump on Tuesday criticised India for imposing a high import tax on Harley-Davidson motorcycles. He said the US charges no tax on Indian bike imports and that India’s new tax was an example of “unfair” trade practices. Trump said the United States must impose “reciprocal taxes” to fight back. “I’m not blaming India,” Trump said while talking to lawmakers in the White House. “I think it’s great that they can get away with it. I don’t know why people allowed them to get away with it, but there’s an example that’s very unfair,” he said. Trump had first brought up the matter soon after taking office, during his inaugural address to a joint sitting of the US Congress in 2017, the Hindustan Times reported. He had not named India then, but said the tariff on Harley-Davidson was so high that it put the brand at a disadvantage against its competition. “We pay a tremendous tax to get into their countries – motorcycles, Harley Davidson – it goes into a certain country. I won’t mention the fact that it happens to be India, in this case,” he said on Tuesday. Trump’s comments come soon after India’s Central Board of Excise and Customs said in a February 12 notification that it has reduced the duty on imported motorcycles such as the Harley-Davidson and Triumph to 50%. Trump’s claim that “thousands and thousands” of Indian motorcycles sell in the US was an exaggeration, the Hindustan Times quoted experts as saying.
english
A new report suggests that Apple previously dropped a large-scale user encryption plan due to pressure from the FBI. The news comes as Apple is mired in a spat with law enforcement over access to iPhones that belonged to the suspect in the December 2019 shooting at a U.S. Naval base in Pensacola, Florida. According to the report from Reuters, Apple alerted the FBI a few years ago that the company "planned to offer users end-to-end encryption when storing their phone data on iCloud." That meant that Apple would no longer be able to access a user's encrypted backup data and turn it over to law enforcement. This prompted an outcry from the FBI, sources told Reuters, and when the two entities met again a year later, Apple had dropped the plan. According to Reuters, a former Apple employed claimed the company, "did not want to risk being attacked by public officials for protecting criminals, sued for moving previously accessible data out of reach of government agencies or used as an excuse for new legislation against encryption." While an exact timeline of the back-and-forth isn't detailed by Reuters, it does appear it was affected by the very public battle between the two sides in 2016 over access to an iPhone belonging to the San Bernardino shooter. In that case, the FBI found its own way into the phone, and dropped a court order it brought against Apple for refusing to access the phone's data. The government knows all about devices and companies that can unlock iPhones. Most notably, in 2019 U.S. Immigration and Customs Enforcement (ICE) inked an $820,000 deal with Greykey, creators of an iPhone-hacking tool, that would allow the agency to search phones obtained from undocumented migrants at the U.S.-Mexico border. Indeed, in both the first and second halves of 2018, Apple granted about 80 percent of the U.S. government's request for data. In this most recent dust-up, Apple stuck to the claim it laid out in 2016: that a backdoor for law enforcement doesn't exist, and creating one could give bad actors access to user data, too. (Experts agree.) Additionally, the company claimed that it has, in fact, helped out the government with the Pensacola case even if it's not exactly the way the FBI, Attorney General William Barr, and President Donald Trump want. Reuters notes that Apple still uses end-to-end encryption to protect a portion of user data, like passwords and health data, but that contact and messaging information remains accessible via iCloud backups. We've reached out to Apple for comment on the story and will update if we hear back.
english
import { basename } from "path"; import { Storage, File, Bucket } from "@google-cloud/storage"; import mimeDB from "mime-db"; import easyDB, { getRandomId, addToQueue, Data } from "easy-db-core"; // export {} from "easy-db-core"; type Configuration = { bucketName: string, /** * If is not set, the files will not be convert to URL. * The bucketNameFiles should be public for `allUsers`. */ bucketNameFiles?: string, projectId?: string, keyFilename?: string, cacheExpirationTime?: number, distanceWriteFileTime?: number, readable?: boolean, }; export default function easyDBGoogleCloud(configuration: Configuration) { const { bucketName, bucketNameFiles, keyFilename, projectId, readable, cacheExpirationTime, distanceWriteFileTime } = configuration; const storage = new Storage({ keyFilename, projectId }); const bucket = storage.bucket(bucketName); const bucketFiles = bucketNameFiles ? storage.bucket(bucketNameFiles) : null; const distanceWriteFile = distanceWriteFileTime ? distance(writeFile, distanceWriteFileTime) : writeFile; return easyDB({ // cacheExpirationTime shouldn't be smaller than cacheExpirationTime cacheExpirationTime: distanceWriteFileTime > cacheExpirationTime ? distanceWriteFileTime : cacheExpirationTime, async saveCollection(name: string, data: Data) { const file = bucket.file(`${name}.json`); const fileContent = readable === true ? JSON.stringify(data, null, " ") : JSON.stringify(data); const bufferContent = Buffer.from(fileContent, "utf8"); await distanceWriteFile(file, bufferContent, "application/json", false); return; }, async loadCollection(name: string): Promise<null | Data> { const file = bucket.file(`${name}.json`); const [exists] = await file.exists(); if (exists) { const content = await readFile(file); try { const data = JSON.parse(content); if (data !== null && typeof data === "object") { return data; } else { return null; } } catch (e) { // save inconsistent data const wrongFileName = `${name}-wrong-${new Date().toISOString()}.json`; const wrongFile = bucket.file(wrongFileName); const wrongBufferContent = Buffer.from(content, "utf8"); await distanceWriteFile(wrongFile, wrongBufferContent, "application/json", false); console.error(`Collection "${name}" is not parsable. It is save to "${wrongFileName}".`); return null; } } else { return null; } }, ...(bucketFiles ? { async saveFile(base64: string) { const extension = getFileExtension(base64); const fileName = await getFreeFileName(bucketFiles, extension); const file = bucketFiles.file(`${fileName}`); const fileContent = Buffer.from(getClearBase64(base64), "base64"); await writeFile(file, fileContent, getType(extension), true); return file.publicUrl(); }, async removeFile(path: string) { const fileName = basename(path); const file = bucketFiles.file(fileName); await file.delete(); } } : {}), }); } function writeFile(file: File, fileContent: Buffer, contentType: string, dbFile: boolean): Promise<void> { return new Promise((resolve, reject) => { file.createWriteStream({ resumable: false, gzip: true, metadata: { contentType }, }) .on("error", err => reject(err)) .on("finish", () => resolve()) .end(fileContent); }); } function readFile(file: File): Promise<string> { return new Promise((resolve, reject) => { let buffer = Buffer.alloc(0); file.createReadStream() .on("error", err => reject(err)) .on("data", chunk => buffer = Buffer.concat([buffer, chunk])) .on("end", () => resolve(buffer.toString("utf8"))) .read(); }); } async function getFreeFileName(bucket: Bucket, extension: string): Promise<string> { const [files] = await bucket.getFiles(); const nameFiles = files.map(file => file.name); // TODO: throw after full dictionary while (true) { const fileName = `${getRandomId()}.${extension}`; if (!nameFiles.includes(fileName)) { return fileName; } } } // TODO: merge with easy-db-node // parser for most popular extensions const regexMimeType = new RegExp("^data:(.*);base64,", "gi"); function getFileExtension(base64: string): string { regexMimeType.lastIndex = 0; const result = regexMimeType.exec(base64); if (result && result[1]) { return getExtension(result[1]); } else { return "bin"; } } function getClearBase64(base64: string): string { const result = base64.split(';base64,'); return result[1] || base64; } type WriteFile = typeof writeFile; function distance(writeFile: WriteFile, delay: number): WriteFile { const fileQueues: { [fileName: string]: { queue: null | Promise<any>, lastRef: () => Promise<void>, }, } = {}; // Save data in queue with SIGINT process.on("SIGINT", async () => { await Promise.all(Object.keys(fileQueues) .filter(fileName => fileQueues[fileName].queue !== null) .map(fileName => { fileQueues[fileName].queue = null; return fileQueues[fileName].lastRef() }) ); process.exit(); }); const distanceWriteFile: WriteFile = async (file, ...arg) => { const lastRef = async () => await distanceWriteFile(file, ...arg); if (!(file.name in fileQueues) || fileQueues[file.name].queue === null) { const queue = addToQueue<void>(null, () => new Promise(async resolve => { await writeFile(file, ...arg); setTimeout(() => { fileQueues[file.name].queue = null; resolve(); }, delay); })); fileQueues[file.name] = { queue, lastRef }; } else { fileQueues[file.name].lastRef = lastRef; (async () => { await fileQueues[file.name].queue; if (fileQueues[file.name].lastRef === lastRef) { // this is last called function await distanceWriteFile(file, ...arg); } else { // there is newer content } })(); } }; return distanceWriteFile; } function getExtension(type: string): string { if (mimeDB[type]?.extensions) { return mimeDB[type]?.extensions[0]; } else { return "bin"; } } // TODO: improve performance function getType(extension: string): string { for (const type in mimeDB) { if (mimeDB[type]?.extensions.includes(extension)) { return type; } } return "application/binary"; }
typescript
The U. S. says it expects India to enforce U. N. sanctions against Iran, but would leave it to New Delhi to decide how to convince Tehran to give up its pursuit of nuclear weapons. “This is about the danger of a nuclear arms race in the Middle East, which will affect countries outside of the region, including India,” State Department spokesman Phillip J. Crowley told reporters on Monday. Asked to comment on Indian Foreign Secretary Nirupama Rao’s reported remarks that such “unilateral sanctions” can have “a direct and adverse impact” on India’s energy security, Mr. Crowley said the U. S. understands that “every country obviously pursues its own self-interest of its citizens”. Mr. Crowley also offered no immediate comment “as to whether we have concerns about” the proposed Iran-Pakistan-India gas pipeline. The U. S. has ongoing concerns about the nature of Iran’s nuclear programme, he said noting Russian President Dmitry Medvedev too had voiced a concern that Tehran could reach a “tipping point” in its nuclear drive. “This is just indicative of the cooperation and shared perspective that the United States and Russia have reached on this issue,” Mr. Crowley said. Iran’s continued pursuit of sensitive uranium enrichment work was narrowing the “leap from a civilian programme to a military programme,” he said. “We have definite concerns that if this trajectory continues, that Iran will at some point approach that moment-that tipping point, if you will-where it has a de facto military capability,” Mr. Crowley said. “We are doing everything in our power to delay and deter that moment from occurring,” he said.
english
Rappers Divine and King are all set to perform at the Indian Premier League (IPL) 2023 final on Sunday. Taking to Twitter, Indian Premiere League announced the news and wrote, "Ahmedabad - You are in for a treat! Brace yourselves for an iconic evening as @VivianDivine & King have some power-packed performances in store for you. How excited are you to witness the two in action. " King and Divine shared this news on their respective social media account. The final will be held at the Narendra Modi Stadium in Ahmedabad, Gujarat on May 28 (Sunday) On Tuesday, Chennai Super Kings defeated Gujarat Titans by 15 runs in the first qualifier at Chepuk Stadium in Chennai which confirmed their seat in the final of IPL 2023. This win also marked their tenth entry in the IPL final. GT who has finished at the top of the table will get another shot to play final by facing Mumbai Indians, the winner of the qualifier 2 against Lucknow Super Giants. MI defeated LSG by 81 runs. Chennai Super Kings will be waiting to take on the Qualifier 2 winner in the IPL 2023 final on Sunday at the Narendra Modi Stadium in Ahmedabad. Only two teams in Indian Premier League (IPL) history have won the title two times in a row. While one of those teams -- Chennai Super Kings-- has already reached the summit clash, the Mumbai Indians (MI) will face Gujarat Titans (GT) on Friday in Qualifier 2 of the IPL 2023. Gujarat Titans' hunt to reach the summit clash now heads to Ahmedabad, where they will face Rohit Sharma's Mumbai Indians on Friday. Gujarat Titans Squad: Wriddhiman Saha(w), Shubman Gill, Hardik Pandya(c), Vijay Shankar, David Miller, Dasun Shanaka, Rahul Tewatia, Rashid Khan, Darshan Nalkande, Noor Ahmad, Mohammed Shami, Mohit Sharma, Sai Sudharsan, Srikar Bharat, Jayant Yadav, Shivam Mavi, Joshua Little, Pradeep Sangwan, Matthew Wade, Abhinav Manohar, Odean Smith, Alzarri Joseph, Ravisrinivasan Sai Kishore, Urvil Patel and Yash Dayal. Mumbai Indians Squad: Ishan Kishan(w), Rohit Sharma(c), Cameron Green, Suryakumar Yadav, Tilak Varma, Tim David, Chris Jordan, Hrithik Shokeen, Piyush Chawla, Akash Madhwal, Jason Behrendorff, Nehal Wadhera, Vishnu Vinod, Ramandeep Singh, Sandeep Warrier, Kumar Kartikeya, Riley Meredith, Shams Mulani, Arjun Tendulkar, Duan Jansen, Arshad Khan, Tristan Stubbs, Dewald Brevis and Raghav Goyal. READ ALSO:
english
Recently Rajya Sabha passed The Compensatory Afforestation Fund Bill, 2016. The Bill had earlier been passed by Lok Sabha in May 2016. - India is one of the ten most forest-rich countries of the world along with the Russian Federation, Brazil, Canada, United States of America, China, Democratic Republic of the Congo, Australia, Indonesia and Sudan. - The 2013 Forest Survey of India states its forest cover increased to 69.8 million hectares or more than 21% of the country’s area. - For the past two decades concern for climate change and sustainable development is the talk over all major international platform but on the other side there is increasing deforestation, forest fires, encroachments etc. - For sustainable development ecological balance should also be given due importance along with the economic development. - In 2002, the Supreme Court of India observed that collected funds for afforestation were underutilized by the states and it ordered for centrally pooling of funds under Compensatory Afforestation Fund. - The court had set up the National Compensatory Afforestation Fund Management and Planning Authority (National CAMPA) to manage the Fund. - In 2009, states also had set up State CAMPAs that receive 10% of funds form National CAMPA to use for afforestation and forest conservation. - However, in 2013, a CAG report identified that the funds continued to be underutilized. What is Compensatory Afforestation? - The simple principle at work here is that since forests are an important natural resource and render a variety of ecological services, they must not be destroyed. - However, because of developmental or industrial requirements, forests are routinely cut, or, as it is said in official language, “diverted for non-forest purposes”. - In such cases, the Forest (Conservation) Act of 1980 requires that non-forest land, equal to the size of the forest being “diverted”, is afforested. - But since afforested land does not become a forest overnight, there is still a loss of the goods and services that the diverted forest would have provided in the interim period. - These goods and services include timber, bamboo, fuel wood, carbon sequestration, soil conservation, water recharge, and seed dispersal. - Afforested land is expected to take no less than 50 years to start delivering comparable goods and services. - To compensate for the loss in the interim, the law requires that the Net Present Value (NPV) of the diverted forest is calculated for a period of 50 years, and recovered from the “user agency” that is “diverting” the forests. - So in short, Compensatory afforestation is defined as afforestation done in lieu of the diversion of forest land for non-forest use. - An expert committee calculates the NPV for every patch of forest. Currently, the NPV ranges from Rs 4.38 lakh per hectare in case of poor quality forests to Rs 10.43 lakh/ha for very dense forests. An expert committee has recently recommended that this be revised to Rs 5.65 lakh and Rs 55.55 lakh respectively. - “User agencies”, which are often private parties, are not expected to undertake afforestation work themselves. - This work has to be done by the state government. But the entire expenditure to be incurred on creating this new ‘forest’, including purchase of land for the purpose, has to be borne by the user. - The state government eventually has to transfer this land to the forest department for maintenance and management. - Thus, if any user agency wants to divert forest land for non-forest purposes, it has to deposit money for compensatory afforestation as well as pay the NPV, besides a few other charges. - Since forests are being diverted routinely (at the rate of about 20,000-25,000 ha per year according to the Ministry of Environment and Forests) a large sum of money is accruing to the government. Currently, more than Rs 40,000 crore has accumulated from these sources, and the fund is increasing at the rate of about Rs 6,000 crore every year. Need of Compensatory Afforestation fund Management and Planning Authority (CAMPA) It is to manage this money, and to use it for the designated purposes that CAMPA is proposed to be set up. The compensatory afforestation money and NPV are supposed to be collected from the user agency by the government of the state in which the project is located, and deposited with the central government. - The Bill establishes the National Compensatory Afforestation Fund under the Public Account of India, and a State Compensatory Afforestation Fund under the Public Account of each state. - These Funds will receive payments for: (i) compensatory afforestation, (ii) net present value of forest (NPV), and (iii) other project specific payments. The National Fund will receive 10% of these funds, and the State Funds will receive the remaining 90%. - These Funds will be primarily spent on afforestation to compensate for loss of forest cover, regeneration of forest ecosystem, wildlife protection and infrastructure development. - The Bill also establishes the National and State Compensatory Afforestation Fund Management and Planning Authorities to manage the National and State Funds. - The passing of the Bill has ended the long era of ad-hocism and will help the Centre and State Governments to utilise these amounts in a planned manner. - It will facilitate make available more than Rs. 6,000 crores per annum to the States/UTs for conservation, protection, improvement and expansion of forest and wildlife resources of the country. - Availability of these amounts will not only help the States/UTs and local communities to ensure better management of their forest resources but will also result in creation of more than 15 crores man-days of direct employment. - A major part of these amounts will be used to restock and improve quality of degraded forests, which constitutes more than 40 % of the total forest cover of the country. - Rules to be framed by the Central Government in consultation with the States/ UTs will provide for use of native species in afforestation activities to be undertaken from these funds. - Majority of the employment will be generated in tribal dominated and backward areas of the country. - Apart from creation of direct employment, utilisation of these amounts will result in increased availability of timber and various other non-timber forest products, and will thus help in improvement of the overall living standards of the forest dependent communities. - The Bill establishes the Funds for compensatory afforestation and forest conservation. However, there are several factors (other than administration of funds) which affect compensatory afforestation and forest conservation. These factors are mentioned below. - A 2013 CAG report noted that state forest departments lack the planning and implementation capacity to carry out compensatory afforestation and forest conservation. With the share of funds transferred to states increasing from 10% to 90%, effective utilisation of these funds will depend on the capacity of state forest departments. - Procuring land for compensatory afforestation is difficult as land is a limited resource, and is required for multiple purposes, such as agriculture, industry, etc. This is compounded by unclear land titles, and difficulties in complying with procedures for land use. - A High Level Committee on Environment Laws observed that quality of forest cover has declined between 1951 and 2014, with poor quality of compensatory afforestation plantations being one of the reasons behind the decline. - The Bill delegates the determination of NPV (value of loss of forest ecosystem) to an expert committee constituted by the central government. As NPV constitutes about half of the total funds collected, its computation methodology would be important. - Loss of biodiversity: – Since it leads to diversion of original forests, the result is fragmentation, that is, the breaking up of large forest blocks into smaller and more vulnerable patches. Fragmentation in turn leads to biodiversity loss. Moreover, non-native species planted in the name of artificial plantation often have served as a threat to even the existing ecosystem. - .Artificial vs original: –Natural ecosystems take thousands of years to develop over a place. Raising artificial plantations elsewhere such as those along the flanks of railway lines, highways, and so on can’t be supposed to have the same biodiversity value as the original ones. Often, they have a poor survival rate. - .Unavailability of land for planting new forests: –which has often led to use of CAMPA funds for purchasing forest department vehicles or repairing buildings defeating the original purpose. What is required is actually an ecosystems approach with focus on climate justice and the rights and role of local communities. It should also address biodiversity and poverty effectively and challenge the underlying causes of deforestation directly, resolving governance, poverty and land tenure issues. What do you understand by Compensatory Afforestation? Critically comment on the provisions of Compensatory Afforestation Fund Bill.
english
import { Road, Response } from "roads"; import { Server, Http2Server } from "roads-server"; const road = new Road(); const server = new Server(road, (error: any) => { switch (error.code) { case 404: return new Response("Not Found", 404); case 405: return new Response("Not Allowed", 405); default: case 500: return new Response(error.message, 500); } }); server.listen(3000, () => { console.log("Serve is running in the port 3000"); }); const server2 = new Http2Server(road); server2.listen(3000, () => { console.log("Serve is running in the port 3000"); });
typescript
--- layout: record subheadline: 'Collection: TRAP Mounds' title: TRAP Mound - 3113 record_id: '3113' categories: - 3000-3999 header: no tags: - '' certainty: Certain elevation: '436' max_diameter: '13' min_diameter: '8.5' height: '0.8' size_category: Small surrounding_land_use: Pasture surface_condition: Pasture robbed: Yes crm_condition: '3' crm_intactness: '3' campaign: '2010' source: Survey municipality: Golyamo Dryano vo locality: '' bgcode: DS001 notes: |- Digged through, looks like already excavated - maybe Kitov ?. rights: standard maps: lat: 42.6285 lon: 25.2442 date: 2018-12-05 land_use_source: Photo photo: Good bg_code: GD 010 akb_code: '' latitude: '42.671734' longitude: '25.242546' images: - image_path: https://drive.google.com/uc?id=0B3Rg88wZDQsccE5sa3FUQjJlb1U title: 3113_Overveiw_SE.JPG ---
markdown
<gh_stars>10-100 /* * Copyright (C) 2017 Seoul National University * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.snu.spl.cruise.services.et.evaluator.impl; import edu.snu.spl.cruise.services.et.configuration.parameters.KeyCodec; import edu.snu.spl.cruise.services.et.configuration.parameters.UpdateValueCodec; import edu.snu.spl.cruise.services.et.configuration.parameters.ValueCodec; import org.apache.reef.io.network.impl.StreamingCodec; import org.apache.reef.io.serialization.Codec; import org.apache.reef.tang.annotations.Parameter; import javax.inject.Inject; /** * A serializer that provides codecs for both key and value of a table. * @param <K> a key type in table * @param <V> a value type in table */ final class KVUSerializer<K, V, U> { private final StreamingCodec<K> keyCodec; private final StreamingCodec<V> valueCodec; private final Codec<U> updateValueCodec; @Inject private KVUSerializer(@Parameter(KeyCodec.class) final StreamingCodec<K> keyCodec, @Parameter(ValueCodec.class) final StreamingCodec<V> valueCodec, @Parameter(UpdateValueCodec.class) final Codec<U> updateValueCodec) { this.keyCodec = keyCodec; this.valueCodec = valueCodec; this.updateValueCodec = updateValueCodec; } /** * @return a key codec */ StreamingCodec<K> getKeyCodec() { return keyCodec; } /** * @return a value codec */ StreamingCodec<V> getValueCodec() { return valueCodec; } /** * @return an update value codec */ Codec<U> getUpdateValueCodec() { return updateValueCodec; } }
java
<filename>tests/test_makeparser.py from hamcrest import assert_that, has_entries, all_of, contains_string from os.path import join from testing_utils import BaseTestCase as TestCase from vznncv.cubemx.tools._make_parser import parse_variables class MakeParserTestCase(TestCase): def test_variable_parsing(self): makefile = join(self.FIXTURE_DIR, 'stm32f3_project', 'Makefile') makefile_vars = parse_variables(makefile) assert_that(makefile_vars, has_entries({ 'TARGET': 'DemoProject', 'C_SOURCES': all_of(contains_string('Src/main.c'), contains_string('Middlewares/Third_Party/FreeRTOS/Source/queue.c')), 'ASM_SOURCES': 'startup_stm32f303xc.s', 'MCU': '-mcpu=cortex-m4 -mthumb -mfpu=fpv4-sp-d16 -mfloat-abi=hard', 'AS_DEFS': '', 'C_DEFS': '-DUSE_FULL_LL_DRIVER -DUSE_HAL_DRIVER -DSTM32F303xC', 'AS_INCLUDES': '', 'C_INCLUDES': '-IInc ' '-IDrivers/STM32F3xx_HAL_Driver/Inc ' '-IDrivers/STM32F3xx_HAL_Driver/Inc/Legacy ' '-IDrivers/CMSIS/Device/ST/STM32F3xx/Include ' '-IDrivers/CMSIS/Include ' '-IMiddlewares/Third_Party/FreeRTOS/Source/portable/GCC/ARM_CM4F ' '-IMiddlewares/Third_Party/FatFs/src ' '-IMiddlewares/Third_Party/FreeRTOS/Source/include ' '-IMiddlewares/Third_Party/FreeRTOS/Source/CMSIS_RTOS', 'LDSCRIPT': 'STM32F303VCTx_FLASH.ld', }))
python
package analyze import ( "github.com/Mirantis/dataeng/pkg/client" "github.com/Mirantis/dataeng/cmd/analyze/jira" "github.com/spf13/cobra" ) // Wrapper for Jira Client func NewAnalyzeCmd(dataClient *client.DataClient) *cobra.Command { //salesforcecfg := &sfconfig.Config{} //jiracfg := &jiracfg.Config cmd := &cobra.Command{ Use: "analyze", Short: "Analyze something", } // Wrapper for Output Flag to All Base Commands /*func OutPutCmd() *cobra.Command { var output string output := &output{} cmd := &cobra.Command{ Use: "output" }, }*/ // Universal Flag Commands //cmd.PersistentFlags().StringVar(&output), "output", "", "csv,dataframe,json,yaml") // Universal Base Commands cmd.AddCommand(jira.NewAnalyzeJiraCommand(dataClient)) return cmd }
go
<reponame>KristinaRay/english-arabic-nmt-bot import re def replace_contractions(sentence: str) -> str: """ reference https://www.enchantedlearning.com/grammar/contractions/list.shtml https://www.analyticsvidhya.com/blog/2021/06/must-known-techniques-for-text-preprocessing-in-nlp/ """ contractions_dict = { "'cause": "because", "'em": "them", "'tis": "it is", "'twas": "it was", "I'd": "I would", "I'd've": "I would have", "I'll": "I will", "I'll've": "I will have", "I'm": "I am", "I'm'a": "I am about to", "I'm'o": "I am going to", "I've": "I have", "Whatcha": "What are you", "ain't": "are not", "amn't": "am not", "aren't": "are not", "can't": "cannot", "could've": "could have", "couldn't": "could not", "daren't": "dare not", "daresn't": "dare not", "dasn't": "dare not", "didn't": "did not", "doesn't": "does not", "don't": "do not", "everyone's": "everyone is", "finna": "fixing to", "gimme": "give me", "gon't": "go not", "gonna": "going to", "gotta": "got to", "hadn't": "had not", "hasn't": "has not", "haven't": "have not", "he'd": "he had", "he'll": "he will", "he's": "he is", "here's": "here is", "how'd": "how did", "how'd'y": "how do you", "how'll": "how will", "how're": "how are", "how's": "how is", "i'd": "i would", "i'd've": "i would have", "i'll": "i will", "i'll've": "i will have", "i'm": "i am", "i've": "i have", "isn't": "is not", "it'd": "it would", "it'd've": "it would have", "it'll": "it will", "it'll've": "it will have", "it's": "it is", "kinda": "kind of", "let's": "let us", "luv": "love", "ma'am": "madam", "may've": "may have", "mayn't": "may not", "might've": "might have", "mightn't": "might not", "mightn't've": "might not have", "must've": "must have", "mustn't": "must not", "mustn't've": "must not have", "ne'er": "never", "needn't": "need not", "needn't've": "need not have", "o'": "of", "o'clock": "of the clock", "ol'": "old", "oughtn't": "ought not", "oughtn't've": "ought not have", "sha'n't": "shall not", "shan't": "shall not", "shan't've": "shall not have", "she'd": "she had", "she'd've": "she would have", "she'll": "she will", "she'll've": "she will have", "she's": "she is", "should've": "should have", "shouldn't": "should not", "shouldn't've": "should not have", "so's": "so is", "so've": "so have", "somebody's": "somebody is", "someone's": "someone is", "something's": "something is", "that'd've": "that would have", "that'll": "that will", "that's": "that is", "there'd": "there would", "there'd've": "there would have", "there'll": "there will", "there're": "there are", "there's": "there is", "these're": "these are", "they'd": "they would", "they'd've": "they would have", "they'll": "they will", "they'll've": "they will have", "they're": "they are", "they've": "they have", "those're": "those are", "to've": "to have", "wanna": "want to", "wasn't": "was not", "we'd": "we would", "we'd've": "we would have", "we'll": "we will", "we'll've": "we will have", "we're": "we are", "we've": "we have", "weren't": "were not", "what'd": "what would", "what'll": "what will", "what'll've": "what will have", "what're": "what are", "what's": "what is", "what've": "what have", "when'd": "when would", "when'll": "when will", "when's": "when is", "when've": "when have", "where'd": "where would", "where'll": "where will", "where's": "where is", "which's": "which is", "who'd": "who would", "who'd've": "who would have", "who'll": "who will", "who'll've": "who will have", "who're": "who are", "who's": "who is", "why'd": "why would", "why'll": "why will", "why're": "why are", "why's": "why is", "why've": "why have", "will've": "will have", "won't": "will not", "won't've": "will not have", "would've": "would have", "wouldn't": "would not", "wouldn't've": "would not have", "y'all": "you all", "y'all'd": "you all would", "y'all'd've": "you all would have", "y'all're": "you all are", "y'all've": "you all have", "you'd": "you would", "you'd've": "you would have", "you'll": "you will", "you'll've": "you will have", "you're": "you are", "you've": "you have" } contractions_re = re.compile('(%s)' % '|'.join(contractions_dict.keys())) def replace(match): return contractions_dict[match.group(0)] return contractions_re.sub(replace, sentence) def replace_misspell(sentence: str) -> str: """ reference https://www.macmillandictionary.com/misspells.html """ mispell_dict = { 'colour': 'color', 'centre': 'center', 'favourite': 'favorite', 'travelling': 'traveling', 'counselling': 'counseling', 'theatre': 'theater', 'cancelled': 'canceled', 'labour': 'labor', 'organisation': 'organization', 'wwii': 'world war 2', 'citicise': 'criticize', 'youtu ': 'youtube ', 'Qoura': 'Quora', 'sallary': 'salary', 'Whta': 'What', 'narcisist': 'narcissist', 'howdo': 'how do', 'whatare': 'what are', 'howcan': 'how can', 'howmuch': 'how much', 'howmany': 'how many', 'whydo': 'why do', 'doI': 'do I', 'theBest': 'the best', 'howdoes': 'how does', 'Etherium': 'Ethereum', 'narcissit': 'narcissist', 'bigdata': 'big data', '2k17': '2017', '2k18': '2018', 'qouta': 'quota', 'exboyfriend': 'ex boyfriend', 'airhostess': 'air hostess', 'whst': 'what', 'watsapp': 'whatsapp', 'demonitisation': 'demonetization', 'demonitization': 'demonetization', 'demonetisation': 'demonetization', 'accomodation': 'accommodation', 'adress': 'address', 'accomodate': 'accommodate', 'wether': 'weather', # whether 'rehersal': 'rehearsal', 'commited': 'committed', 'persue': 'pursue', 'occurence': 'occurrence', 'lenght': 'length', 'strenght': 'strength', 'seperate': 'separate', 'appaling': 'appalling', 'tought': 'thought', # taught 'throught': 'through', 'commision': 'commission', 'comission': 'commission', 'recieve': 'receive', 'collegue': 'colleague', 'desease': 'disease', 'compell': 'compel', 'bizzare': 'bizarre', 'concious': 'conscious', 'advertisment': 'advertisement', 'succint': 'succinct', 'rythm': 'rhythm', 'wich': 'which', # witch 'wheather': 'weather', # whether 'percieve': 'perceive', 'occure': 'occur', 'enterpreneur': 'entrepreneur', 'aquire': 'acquire', 'convinient': 'convenient', 'devide': 'divide', 'agressive': 'aggressive', 'enviroment': 'environment', 'supress': 'suppress', 'embarassed': 'embarrassed', 'miniscule':'minuscule', 'occured': 'occurred', 'strech': 'stretch', 'embarrased': 'embarrassed', 'responsability': 'responsibility', 'assesment': 'assessment', 'akward': 'awkward', 'endevour': 'endeavour', 'belive': 'believe', 'wierd ': 'weird', 'achive': 'achieve', 'greatful': 'grateful', 'biogrophay':'biography' } mispell_re = re.compile('(%s)' % '|'.join(mispell_dict.keys())) def replace(match): return mispell_dict[match.group(0)] return mispell_re.sub(replace, sentence) def replace_endings(sentence: str) -> str: """ reference https://github.com/kootenpv/contractions/blob/master/contractions/data/leftovers_dict.json """ endings_dict = { "'all": " all", "'am": " am", "'cause": "because", "'d": " would", "'ll": " will", "'re": " are", "'em": "them", "'er": " her", "doin'": "doing", "goin'": "going", "nothin'": "nothing", "somethin'": "something", "havin'": "having", "lovin'": "loving", "'coz": "because", "thats": "that is", "whats": "what is" } endings_re = re.compile('(%s)' % '|'.join(endings_dict.keys())) def replace(match): return endings_dict[match.group(0)] return endings_re.sub(replace, sentence) def replace_slang(sentence: str) -> str: """ reference https://github.com/kootenpv/contractions/blob/master/contractions/data/slang_dict.json """ slang_dict = { "'aight": "alright", "dunno": "do not know", "howdy": "how do you do", "ima": "I am going to", "innit": "is it not", "iunno": "I do not know", "g'day": "good day", "gonna": "going to", "gotta": "got to", "wanna": "want to", "woulda": "would have", "gimme": "give me", "asap": "as soon as possible", " u ": " you ", " r ": " are " } slang_re = re.compile('(%s)' % '|'.join(slang_dict.keys())) def replace(match): return slang_dict[match.group(0)] return slang_re.sub(replace, sentence) def preprocess_sentence(sentence: str) -> str: ''' Lowercase, trim, and remove non-letter and non-digit characters ''' sentence = sentence.lower() clean = lambda x: x in "abcdefghijklmnopqrstuvwxyz '\n" sentence = ''.join([i for i in sentence if clean(i)]) sentence = re.sub(' +', ' ', sentence) return sentence def clean_en_text(sentence: str) -> str: preprocess = [replace_contractions, replace_misspell, replace_endings, replace_slang, preprocess_sentence] for f in preprocess: sentence = f(sentence) return sentence def clean_ar_text(sentence: str) -> str: """ reference https://jrgraphix.net/r/Unicode/0600-06FF https://en.wikipedia.org/wiki/Arabic_alphabet """ # all_diacritics = u"[\u0640\u064b\u064c\u064d\u064e\u064f\u0650\u0651\u0652\u0670]" # remove_diacritics = lambda x: re.sub(all_diacritics, '', x) sentence = re.sub(f"[^{'؀-ۿ'} ,\n]", '', sentence) sentence = re.sub(f"['.,؟?!،]", '', sentence) sentence = re.sub(' +', ' ', sentence) return sentence
python
QUT vs BOB Dream11 Team Prediction and Suggestions for today’s Oman D10 2022 match between Qurum Thunders and Bousher Busters: Qurum Thunders will kick off the proceedings in the Oman D10 2022 with a battle with Bousher Busters. The two teams will play against each other at the Al Amerat Cricket Ground in Oman at 11:30 PM IST on March 12, Saturday. This is the first time that the two teams will play in the 10-over format against each other. Qurum Thunders will start the game as favourites. They have a strong squad on paper. The franchise did a decent job in the Oman D20 by finishing second in the points table. Thunders won four games while two matches didn’t go as per the plan. However, the team couldn’t lift the cup as they suffered a heartbreaking loss in the Super Four round. Speaking of Bousher Busters, they will hope to forget the T20 league. The team failed to make an impact and ended up at the second-last place in the points table. Busters played a total of seven games and won just two encounters. Ahead of the match between Qurum Thunders and Bousher Busters; here is everything you need to know: The Oman D10 2022 will be streamed live on the FanCode app and website. The match will be hosted at the Al Amerat Cricket Ground in Oman at 11:30 PM IST on March 12, Saturday.
english
A court in military-ruled Myanmar on Friday jailed American journalist Danny Fenster for 11 years, his lawyer and his employer said, despite U. S. calls for his release from what it said was unjust detention. Fenster, 37, the managing editor of online magazine Frontier Myanmar, was found guilty of incitement and violations of immigration and unlawful associations laws, his magazine said, describing the sentences as “the harshest possible under the law". He is the first Western journalist sentenced to prison in recent years in Myanmar, where a Feb. 1 coup by the military against an elected government led by Nobel laureate Aung San Suu Kyi ended a decade of tentative steps towards democracy and triggered nationwide protests. “There is absolutely no basis to convict Danny of these charges," said Thomas Kean, editor-in-chief of Frontier Myanmar, one of the country’s top independent news outlets. “Everyone at Frontier is disappointed and frustrated at this decision. We just want to see Danny released as soon as possible so he can go home to his family. " Fenster was arrested while trying to leave the country in May and has since been held in Yangon’s notorious Insein prison, where hundreds of opponents of the Tatmadaw, as the military is known, were jailed, many beaten and tortured, during decades of dictatorship. He was charged with additional, and more serious, offences of sedition and violations of the terrorism act earlier this week, without an explanation by authorities. Those charges are punishable by a maximum 20 years in prison each. Phil Robertson, deputy Asia director of Human Rights Watch, said Fenster’s jailing was also intended as warnings to the United States and the media. “The junta’s rationale for this outrageous, rights abusing sentence is first to shock and intimidate all remaining Burmese journalists inside Myanmar by punishing a foreign journalist this way," he said. “The second message is more strategic, focused on sending a message to the U. S. that the Tatmadaw’s generals don’t appreciate being hit with economic sanctions and can bite back with hostage diplomacy," he said. Fenster’s family has repeatedly called for his release, saying they were heartbroken about his detention. His trial had not been made public and a spokesman for the junta did not immediately respond to a request for comment. The United States has been pushing for Fenster’s release. The U. S. embassy in Myanmar did not immediately respond to a request for comment on Friday’s verdict. The State Department had earlier said his detention was “profoundly unjust" and “plain for the world to see", urging the junta to release him immediately. The American is among dozens of journalists who were detained in Myanmar after protests and strikes erupted following the coup, hampering the military’s efforts to consolidate power. Independent media has been accused by the junta of incitement. More than 1,200 civilians have been killed in protests and thousands detained since the coup, according to activists cited by the United Nations. Myanmar authorities overlooked Fenster in a recent amnesty for hundreds of people detained over anti-junta protests, which included some journalists. During nearly half a century of harsh rule by the military, news reporting was tightly controlled by the state but Myanmar’s media blossomed after the a quasi-civilian government introduced tentative reforms from 2011. Since the February coup, however, the military has rescinded media licenses, curbed the internet and satellite broadcasts and arrested dozens of journalists, in what human rights groups have called an assault on the truth. The Committee to Protect Journalists said in a report in July that Myanmar’s rulers had effectively criminalised independent journalism. Ming Yu Hah, Amnesty International’s deputy regional director for campaigns, called the sentence a “reprehensible outcome" in a deeply flawed case. “Danny should have never been arrested in the first place and to sentence him to a combined 11 years shows how far Myanmar authorities are willing to go to signal that they do not respect independent media," she said. Frontier Myanmar’s publisher, Sonny Swe, who spent eight years in prison during the previous era of military rule, announced Fenster’s imprisonment on Twitter under the message: “A lot of things are going so wrong in this country. " Read all the Latest News here(This story has not been edited by News18 staff and is published from a syndicated news agency feed - Reuters)
english
John Morrison is currently part of the WWE roster on Monday Night RAW. He's currently part of a storyline with Riddle, whilst his friend The Miz is confined to a wheelchair. This is Morrison's second stint in WWE after the former IC Champion was released from the company back in 2011. At the time, Morrison was in a relationship with fellow WWE star Melina, but the couple went their separate ways back in 2015. Morrison then moved on to a relationship with Taya Valkyrie in 2016, who he has worked alongside with in Lucha Underground and IMPACT Wrestling. The couple announced their engagement the following year before going on to marry in 2018. Morrison later returned to WWE, but this time around, his wife was able to follow him to the company. Taya currently wrestles for WWE on their NXT brand under the name Franky Monet. The couple haven't yet been given the chance to work together in WWE but, with Monet now being part of the company, a number of interesting possibilities have opened up.
english
<reponame>dgrechka/FetchClimateWebClient<filename>scripts/time.js (function (FC, $) { (function (Time) { "use strict"; var _$page, _timeSelectorPanel, _selectedTimeMode, _activeTimeControl, _yearControl, _dayControl, _hourControl, _$yearsTabHeader, _$daysTabHeader, _$hoursTabHeader, _$selectedTabHeader; function getDefaultDomain() { var temporalDomain = new FC.TemporalDomainBuilder(); temporalDomain.parseYearCells(FC.state.config.boundaries.yearMin + "," + FC.state.config.boundaries.yearMax); temporalDomain.parseDayCells("1,366"); temporalDomain.parseHourCells("0,24"); return temporalDomain; } function isDefaultYearsDomain(d) { return d.yearCellMode && d.years.length == 2 && d.years[0] == FC.state.config.boundaries.yearMin && d.years[1] == FC.state.config.boundaries.yearMax; } function isDefaultDaysDomain(d) { return d.dayCellMode && d.days.length == 2 && d.days[0] == 1 && d.days[1] == 366; } function isDefaultHoursDomain(d) { return d.hourCellMode && d.hours.length == 2 && d.hours[0] == 0 && d.hours[1] == 24; } function isDefaultDomain(d) { return isDefaultYearsDomain(d) && isDefaultDaysDomain(d) && isDefaultHoursDomain(d); } Time.initialize = function () { _$page = $("section.time"); if (!FC.state.temporal) { var temporalDomain = getDefaultDomain(); FC.state.setTemporal(temporalDomain.getTemporalDomain()); if (FC.Results) FC.Results.updateSliders(); } _timeSelectorPanel = _$page.find(".time-selection-panel"); _timeSelectorPanel = new FC.Controls.TimeSectionControlsPanel(_timeSelectorPanel); _yearControl = _$page.find(".time-selection-control.year-control"); _yearControl = new FC.Controls.TimeSelectYearsControl(_yearControl); _dayControl = _$page.find(".time-selection-control.day-control"); _dayControl = new FC.Controls.TimeSelectDaysControl(_dayControl); _hourControl = _$page.find(".time-selection-control.hour-control"); _hourControl = new FC.Controls.TimeSelectHoursControl(_hourControl); // Time control state changed event handlers. // Update tile control info text and Temporal Domain. function updateUI() { if(isDefaultDomain(FC.state.temporal)) _timeSelectorPanel.$resetButton.addClass("hidden"); else _timeSelectorPanel.$resetButton.removeClass("hidden"); if (isDefaultYearsDomain(FC.state.temporal)) { _timeSelectorPanel.yearBtn.$resetButton.addClass("hidden"); } else _timeSelectorPanel.yearBtn.$resetButton.removeClass("hidden"); var isDefaultDays = isDefaultDaysDomain(FC.state.temporal); var isDefaultHours = isDefaultHoursDomain(FC.state.temporal); if (isDefaultDays) { _timeSelectorPanel.yearBtn.$defaultDaysLabel.removeClass("hidden"); _timeSelectorPanel.dayBtn.$tile.addClass("hidden"); } else { _timeSelectorPanel.yearBtn.$defaultDaysLabel.addClass("hidden"); _timeSelectorPanel.dayBtn.$tile.removeClass("hidden"); } if (isDefaultHours) { _timeSelectorPanel.dayBtn.$defaultHoursLabel.removeClass("hidden"); _timeSelectorPanel.hourBtn.$tile.addClass("hidden"); } else { _timeSelectorPanel.dayBtn.$defaultHoursLabel.addClass("hidden"); _timeSelectorPanel.hourBtn.$tile.removeClass("hidden"); } if (isDefaultHours && isDefaultDays) _timeSelectorPanel.yearBtn.$defaultHoursLabel.removeClass("hidden"); else _timeSelectorPanel.yearBtn.$defaultHoursLabel.addClass("hidden"); } _yearControl.$control.on("yearsstatechanged", function () { _timeSelectorPanel.yearBtn.$info = _yearControl.state.status; var domain = new FC.TemporalDomain( _yearControl.state.temporal.years, _yearControl.state.temporal.yearCellMode, FC.state.temporal.days, FC.state.temporal.dayCellMode, FC.state.temporal.hours, FC.state.temporal.hourCellMode); FC.state.setTemporal(domain); updateUI(); }); _dayControl.$control.on("daysstatechanged", function () { _timeSelectorPanel.dayBtn.$info = _dayControl.state.status; var domain = new FC.TemporalDomain( FC.state.temporal.years, FC.state.temporal.yearCellMode, _dayControl.state.temporal.days, _dayControl.state.temporal.dayCellMode, FC.state.temporal.hours, FC.state.temporal.hourCellMode); FC.state.setTemporal(domain); updateUI(); }); _hourControl.$control.on("hoursstatechanged", function () { _timeSelectorPanel.hourBtn.$info = _hourControl.state.status; var domain = new FC.TemporalDomain( FC.state.temporal.years, FC.state.temporal.yearCellMode, FC.state.temporal.days, FC.state.temporal.dayCellMode, _hourControl.state.temporal.hours, _hourControl.state.temporal.hourCellMode); FC.state.setTemporal(domain); updateUI(); }); // Time control click event handlers. _timeSelectorPanel.$resetButton.on("click", function () { var temporalDomain = getDefaultDomain().getTemporalDomain(); FC.state.setTemporal(temporalDomain); _yearControl.initialize(temporalDomain); _dayControl.initialize(temporalDomain); _hourControl.initialize(temporalDomain); }); _timeSelectorPanel.yearBtn.$resetButton.on("click", function () { var temporalDomain = FC.state.temporal; temporalDomain.yearCellMode = true; temporalDomain.years = [FC.state.config.boundaries.yearMin, FC.state.config.boundaries.yearMax]; FC.state.setTemporal(temporalDomain); _yearControl.initialize(temporalDomain); }); _timeSelectorPanel.dayBtn.$resetButton.on("click", function () { var temporalDomain = FC.state.temporal; temporalDomain.dayCellMode = true; temporalDomain.days = [1, 366]; FC.state.setTemporal(temporalDomain); _dayControl.initialize(temporalDomain); }); _timeSelectorPanel.hourBtn.$resetButton.on("click", function () { var temporalDomain = FC.state.temporal; temporalDomain.hourCellMode = true; temporalDomain.hours = [0, 24]; FC.state.setTemporal(temporalDomain); _hourControl.initialize(temporalDomain); }); _$selectedTabHeader = _$yearsTabHeader = _$page.find('.time-selection-tab-header[data-mode="years"]'); _$daysTabHeader = _$page.find('.time-selection-tab-header[data-mode="days"]'); _$hoursTabHeader = _$page.find(".time-selection-tab-header[data-mode='hours']"); function setYearsTimeMode() { if (_selectedTimeMode === _timeSelectorPanel.yearBtn.$tile) return; if (_selectedTimeMode) { _selectedTimeMode.removeClass("selected"); _activeTimeControl.hide(); } _$selectedTabHeader.removeClass("selected"); (_$selectedTabHeader = _$yearsTabHeader).addClass("selected"); (_selectedTimeMode = _timeSelectorPanel.yearBtn.$tile).addClass("selected"); _yearControl.show(); _activeTimeControl = _yearControl; } _$yearsTabHeader.on("click", setYearsTimeMode); _timeSelectorPanel.yearBtn.$editButton.on("click", setYearsTimeMode); function setDaysTimeMode() { if (_selectedTimeMode === _timeSelectorPanel.dayBtn.$tile) return; if (_selectedTimeMode) { _selectedTimeMode.removeClass("selected"); _activeTimeControl.hide(); } _$selectedTabHeader.removeClass("selected"); (_selectedTimeMode = _timeSelectorPanel.dayBtn.$tile).addClass("selected"); (_$selectedTabHeader = _$daysTabHeader).addClass("selected"); var isLeap = false; if (1 === FC.state.temporal.years.length || (2 === FC.state.temporal.years.length && FC.state.temporal.years[0] === FC.state.temporal.years[1])) { isLeap = FC.isLeapYear(FC.state.temporal.years[0]); } _dayControl.updateDaysTable(isLeap); _dayControl.show(); _activeTimeControl = _dayControl; } _$daysTabHeader.on("click", setDaysTimeMode); _timeSelectorPanel.dayBtn.$editButton.on("click", setDaysTimeMode); function setHoursTimeMode() { if (_selectedTimeMode === _timeSelectorPanel.hourBtn.$tile) return; if (_selectedTimeMode) { _selectedTimeMode.removeClass("selected"); _activeTimeControl.hide(); } _$selectedTabHeader.removeClass("selected"); (_selectedTimeMode = _timeSelectorPanel.hourBtn.$tile).addClass("selected"); (_$selectedTabHeader = _$hoursTabHeader).addClass("selected"); _hourControl.show(); _activeTimeControl = _hourControl; } _$hoursTabHeader.on("click", setHoursTimeMode); _timeSelectorPanel.hourBtn.$editButton.on("click", setHoursTimeMode); _yearControl.initialize(FC.state.temporal); _dayControl.initialize(FC.state.temporal); _hourControl.initialize(FC.state.temporal); // set year time control as active _timeSelectorPanel.yearBtn.$editButton.trigger("click"); }; })(FC.Time || (FC.Time = {})); })(window.FC = window.FC || {}, jQuery);
javascript
{ "accountLinkingWhitelistedDomains": null, "asin": "B01HLF94AI", "averageRating": 1, "canDisable": true, "capabilities": null, "category": null, "description": "trivia about india.test and build your general knowledge about india", "enablement": null, "exampleInteractions": [ "Alexa, Open india quiz game", "my answer is one", "repeat" ], "firstReleaseDate": 1467206867.875, "homepageLinkText": null, "homepageLinkUrl": null, "id": "amzn1.echo-sdk-ams.app.b83813d5-4bbd-42f4-a958-1dc36411dbe1", "imageAltText": "india quiz game icon", "imageUrl": "https://github.com/dale3h/alexa-skills-list/raw/master/skills/B01HLF94AI/skill_icon", "inAppPurchasingSupported": false, "launchPhrase": "india quiz game", "name": "india quiz game", "numberOfReviews": 1, "pamsPartnerId": null, "permissions": null, "privacyPolicyUrl": null, "shortDescription": "india quiz game", "skillTypes": null, "stage": "live", "termsOfUseUrl": null, "vendorId": "M2TPGKTBR2P4OQ", "vendorName": "danmullaguru" }
json
<filename>docs/files/index_js/report.history.json<gh_stars>0 [{"date":"Thu, 29 Jun 2017 12:57:33 GMT","sloc":66,"lloc":15,"functions":1,"deliveredBugs":0.343,"maintainability":100,"lintErrors":4,"difficulty":12.87},{"date":"Wed, 05 Jul 2017 12:11:35 GMT","sloc":66,"lloc":15,"functions":1,"deliveredBugs":0.343,"maintainability":100,"lintErrors":4,"difficulty":12.87},{"date":"Wed, 05 Jul 2017 12:14:44 GMT","sloc":66,"lloc":15,"functions":1,"deliveredBugs":0.343,"maintainability":100,"lintErrors":4,"difficulty":12.87},{"date":"Tue, 11 Jul 2017 13:34:06 GMT","sloc":67,"lloc":15,"functions":1,"deliveredBugs":0.343,"maintainability":100,"lintErrors":4,"difficulty":12.87},{"date":"Tue, 11 Jul 2017 13:35:06 GMT","sloc":67,"lloc":15,"functions":1,"deliveredBugs":0.343,"maintainability":100,"lintErrors":4,"difficulty":12.87}]
json
{"title":{"a":"Nombre de los Candidatos","b":"Partido","c":"Votos","d":"Porcentaje","e":"Candidatos","f":"Electo","sd":null},"isVisible":{"a":"true","b":"false","c":"true","d":"true","e":"false","f":"true","sd":null},"dataType":{"a":"String","b":"String","c":"Integer","d":"Integer","e":"Integer","f":"Integer","sd":null},"data":[{"a":"1. <NAME>","b":null,"c":"172","d":"9,20%","e":null,"f":"","sd":null},{"a":"2. <NAME>","b":null,"c":"88","d":"4,71%","e":null,"f":"","sd":null},{"a":"3. <NAME>","b":null,"c":"985","d":"52,70%","e":null,"f":"*","sd":null},{"a":"4. <NAME>","b":null,"c":"365","d":"19,53%","e":null,"f":"*","sd":null},{"a":"5. <NAME>","b":null,"c":"151","d":"8,08%","e":null,"f":"","sd":null},{"a":"6. <NAME>","b":null,"c":"90","d":"4,82%","e":null,"f":"","sd":null},{"a":"7. <NAME>","b":null,"c":"7","d":"0,37%","e":null,"f":"","sd":null},{"a":"8. <NAME>","b":null,"c":"11","d":"0,59%","e":null,"f":"","sd":null}],"resumen":[{"a":"Válidamente Emitidos","b":null,"c":"1.869","d":"97,39%","e":null,"f":null,"sd":null},{"a":"Votos Nulos","b":null,"c":"27","d":"1,41%","e":null,"f":null,"sd":null},{"a":"Votos en Blanco","b":null,"c":"23","d":"1,20%","e":null,"f":null,"sd":null},{"a":"Total Votación","b":null,"c":"1.919","d":"100,00%","e":null,"f":null,"sd":null}],"colegioEscrutador":null,"labels":null,"mesasEscrutadas":"13","totalMesas":"13","totalMesasPorcent":"100,00%","tipoGlosaComputo":"1","mostrarGlosaNominados":true,"tipoGlosaNominados":"2"}
json
<reponame>admariner/trending_archive ### 2017-12-25 diff between today and yesterday #### python * [ruiminshen/yolo2-pytorch](https://github.com/ruiminshen/yolo2-pytorch): PyTorch implementation of the YOLO (You Only Look Once) v2 * [kutoga/going_deeper](https://github.com/kutoga/going_deeper): Going Deeper: Infinite Deep Neural Networks * [soimort/you-get](https://github.com/soimort/you-get): Dumb downloader that scrapes the web * [bitcoinbook/bitcoinbook](https://github.com/bitcoinbook/bitcoinbook): Mastering Bitcoin 2nd Edition - Programming the Open Blockchain * [sxcurity/230-OOB](https://github.com/sxcurity/230-OOB): An Out-of-Band XXE server for retrieving file contents over FTP. * [yeasy/docker_practice](https://github.com/yeasy/docker_practice): Learn and understand Docker technologies, with real DevOps practice! #### go * [snail007/goproxy](https://github.com/snail007/goproxy): proxygolanghttp,https,websocket,tcp,socks5,.,,N.,,,tls,.,();,,,.:https://github.com/snail007/goproxy/releases QQ:189618940 * [ncw/rclone](https://github.com/ncw/rclone): "rsync for cloud storage" - Google Drive, Amazon Drive, S3, Dropbox, Backblaze B2, One Drive, Swift, Hubic, Cloudfiles, Google Cloud Storage, Yandex Files * [yunabe/lgo](https://github.com/yunabe/lgo): Go (golang) REPL and Jupyter Notebook kernel * [mojocn/base64Captcha](https://github.com/mojocn/base64Captcha): Golangsupport digits, numbers,alphabet, arithmetic, audio and digit-alphabet captcha. * [prometheus/prometheus](https://github.com/prometheus/prometheus): The Prometheus monitoring system and time series database. * [moby/moby](https://github.com/moby/moby): Moby Project - a collaborative project for the container ecosystem to assemble container-based systems * [tinode/chat](https://github.com/tinode/chat): Instant messaging server - backend in Go, client-side binding in Java and Javascript * [pingcap/tidb](https://github.com/pingcap/tidb): TiDB is a distributed HTAP database compatible with MySQL protocol #### cpp * [grpc/grpc](https://github.com/grpc/grpc): The C based gRPC (C++, Python, Ruby, Objective-C, PHP, C#) * [CMU-Perceptual-Computing-Lab/openpose](https://github.com/CMU-Perceptual-Computing-Lab/openpose): OpenPose: Real-time multi-person keypoint detection library for body, face, and hands estimation * [baidu/bigflow](https://github.com/baidu/bigflow): Baidu Bigflow is an interface that allows for writing distributed computing programs and provides lots of simple, flexible, powerful APIs. Using Bigflow, you can easily handle data of any scale. Bigflow processes 4P+ data inside Baidu and runs about 5000 jobs every day. * [davisking/dlib](https://github.com/davisking/dlib): A toolkit for making real world machine learning and data analysis applications in C++ * [siavash119/qtchan](https://github.com/siavash119/qtchan): 4chan browser in qt5 * [cld378632668/leveldb_chinese_comments](https://github.com/cld378632668/leveldb_chinese_comments): leveldbstarfork #### javascript * [wdlhao/vue2-element-touzi-admin](https://github.com/wdlhao/vue2-element-touzi-admin): vue2.0 +vuex+ element-ui * [justjavac/free-programming-books-zh_CN](https://github.com/justjavac/free-programming-books-zh_CN): * [sentsin/layui](https://github.com/sentsin/layui): UI HTML/CSS/JS * [egoist/poi](https://github.com/egoist/poi): Delightful web development. * [mrdoob/three.js](https://github.com/mrdoob/three.js): JavaScript 3D library. * [Tencent/wepy](https://github.com/Tencent/wepy): * [ant-design/ant-design-pro](https://github.com/ant-design/ant-design-pro): An out-of-box UI solution for enterprise applications * [gulzar1996/auto-like-my-gf-insta-pic](https://github.com/gulzar1996/auto-like-my-gf-insta-pic): Bot to automatically like your friend's Instagram post and notify you on Slack #### coffeescript * [snd/url-pattern](https://github.com/snd/url-pattern): easier than regex string matching patterns for urls and other strings. turn strings into data or data into strings. * [github/hubot-scripts](https://github.com/github/hubot-scripts): DEPRECATED, see https://github.com/github/hubot-scripts/issues/1113 for details - optional scripts for hubot, opt in via hubot-scripts.json * [basecamp/pow](https://github.com/basecamp/pow): Zero-configuration Rack server for Mac OS X * [JoelBesada/activate-power-mode](https://github.com/JoelBesada/activate-power-mode): Atom package - Activate POWER MODE to write your code in style.
markdown
{"id": "QmbUnVpbaESfVBefrn8i9bR3EpmwDSeT1C3AjHPCHHyUqw", "title": "NEST 3.7 Update Announcement", "body": "\nIn order to make the NEST oracle quotation data output more high-quality and continue to increase the scale of DAO's capital utilization, the following modifications are proposed for reference:\n\n1\u3001Add earning rate for nest price interface.\n\n2\u3001Adjust the postFeeUnit to 0.01eth for nestMining and ntokenMining.\n\n3\u3001Adjust the repurchase quota per block to 10000, and the quota limit to \n 3000000 for NEST Token.\n\n Adjust the repurchase quota per block to 100, and the quota limit to \n 30000 for nToken.\n\n4\u3001The allowable deviation of repurchase price is \u00b110%\n\nNow submit this proposal to the NEST community, NEST gamers can vote whether to support this proposal through Snapshot.\n\n\u4e3a\u4e86\u4f7f NEST \u9884\u8a00\u673a\u62a5\u4ef7\u6570\u636e\u8f93\u51fa\u66f4\u4f18\u8d28\uff0c\u5e76\u7ee7\u7eed\u63d0\u5347 DAO \u7684\u56de\u8d2d\u901f\u5ea6\uff0c\u62df\u5c06\u8fdb\u884c\u4ee5\u4e0b\u4fee\u6539\u4f9b\u53c2\u8003\uff1a\n\n1\u3001 \u4e3a\u62a5\u4ef7\u754c\u9762\u6dfb\u52a0\u6536\u76ca\u7387\u3002\n\n2\u3001\u5c06 nestMining \u548c ntokenMining \u7684 postFeeUnit \u8c03\u6574\u4e3a 0.01eth\u3002\n\n3\u3001NEST Token \u8c03\u6574\uff1a\u6bcf\u4e2a\u533a\u5757\u7684\u56de\u8d2d\u989d\u5ea6\u4e3a 10000\uff0c\u5355\u6b21\u989d\u5ea6\u6700\u9ad8\u9650\u5236\u4e3a 3000000\u3002\n\n nToken \u8c03\u6574\uff1a\u5c06\u6bcf\u4e2a\u533a\u5757\u7684\u56de\u8d2d\u914d\u989d\u8c03\u6574\u4e3a 100\uff0c\u5355\u6b21\u989d\u5ea6\u9650\u5236\u6700\u9ad8\u4e3a 30000\u3002\n\n4\u3001\u56de\u8d2d\u4ef7\u683c\u7684\u5141\u8bb8\u504f\u5dee\u4e3a\u00b110%\n\n\u73b0\u5728\u5c06\u672c\u63d0\u6848\u63d0\u4ea4\u5230 NEST \u793e\u533a\uff0cNEST \u73a9\u5bb6\u4eec\u53ef\u901a\u8fc7 Snapshot \u8fdb\u884c\u6295\u7968\u662f\u5426\u652f\u6301\u672c\u63d0\u6848\u3002\n", "choices": ["\u540c\u610f", "\u4e0d\u540c\u610f"], "start": 1626080400, "end": 1626253200, "snapshot": "12811048", "state": "closed", "author": "0x<PASSWORD>FCbbaeCd<PASSWORD>", "space": {"id": "nestprotocols.eth", "name": "NEST"}, "votes": 4, "votes_data": [{"id": "QmbpKdh83c8fMDCb9hyj8PdcqspdUok9FDNx78SebJNBUT", "voter": "0xc944C75513525C03807D44cadD9314dcCc81E4D8", "created": 1626163318, "choice": 1, "space": {"id": "nestprotocols.eth"}}, {"id": "QmWUW7QyowheewbRZUcpfUQJZsTDbcYqtgQfn42NrZ1smi", "voter": "0x3C8eC2338C2dAE8ba5251b677F6dAF55990E4780", "created": 1626144501, "choice": 1, "space": {"id": "nestprotocols.eth"}}, {"id": "<KEY>", "voter": "0x7ccaD45FCbbaeCd43f960c5af4E82ed2a0021d7C", "created": 1626144363, "choice": 1, "space": {"id": "nestprotocols.eth"}}, {"id": "<KEY>", "voter": "<KEY>", "created": 1626083902, "choice": 1, "space": {"id": "nestprotocols.eth"}}]}
json