text
stringlengths
27
775k
package com.test.luxpmtestapp import android.os.Bundle import androidx.appcompat.app.AppCompatActivity abstract class BaseActivity:AppCompatActivity() { abstract var resourceId:Int override fun onCreate(savedInstanceState: Bundle?) { super.onCreate(savedInstanceState) setContentView(resourceId) } }
#!/usr/bin/env python3 # -*- coding: utf-8 -*- ######################################################## # ____ _ __ # # ___ __ __/ / /__ ___ ______ ______(_) /___ __ # # / _ \/ // / / (_-</ -_) __/ // / __/ / __/ // / # # /_//_/\_,_/_/_/___/\__/\__/\_,_/_/ /_/\__/\_, / # # /___/ team # # # # nullscan # # A modular framework designed to chain and automate security tests # # # # FILE # # tools.py # # # # AUTHOR # # noptrix@nullsecurity.net # # # ################################################################################ # sys imports import ipaddress import dns.resolver import whois import requests import concurrent.futures as cf from ipwhois import IPWhois from googlesearch import search as gsearch from requests.auth import HTTPBasicAuth, HTTPDigestAuth from collections import deque # own imports from modules.libs.toolsattr import ToolsAttr import core.nmap class Tools(ToolsAttr): """ tools (wrapper) class: implement shared tools or wrap existing tools. """ def __init__(self, target, opts): """ init """ ToolsAttr.__init__(self, target, opts) return def _crack_http_auth(self, url, logfile, threads=20, exit_on_success=True): """ check for http auth type and crack login """ futures = deque() headers={'User-Agent': f"'{self.useragent}'"} s = requests.session() h = s.head(url, verify=False, headers=headers).headers auth_header = '' if 'WWW-Authenticate' in h: auth_header = h['WWW-Authenticate'] def crack(s, url, h, a, u, p): code = s.head(url, verify=False, headers=h, auth=a(f'{u}', f'{p}')).status_code if code == 200: return f'Login found: {u}:{p}' return if 'Basic realm' in auth_header: auth_type = HTTPBasicAuth elif 'Digest realm' in auth_header: auth_type = HTTPDigestAuth else: # todo: proxy auth etc. return # single username + single password if self.opts['user'] and self.opts['pass']: us = self.opts['user'] pw = self.opts['pass'] r = s.head(url, headers=headers, verify=False, auth=auth_type(f'{us}', f'{pw}')) if r.status_code == 200: self._log(logfile, f'Login found: {us}:{pw}') if exit_on_success: return # single username + password list if self.opts['user'] and self.opts['plists']: us = self.opts['user'] for pwlist in self.opts['plists']: pws = self._read_file(pwlist) with cf.ThreadPoolExecutor(threads) as exe: for pw in pws: futures.append(exe.submit(crack, s, url, headers, auth_type, us, pw)) for r in cf.as_completed(futures): if r.result(): self._log(logfile, f'{r.result()}') if exit_on_success: return futures = [] # username list + password list if self.opts['ulists'] and self.opts['plists']: for uslist in self.opts['ulists']: for pwlist in self.opts['plists']: usrs = self._read_file(uslist) pws = self._read_file(pwlist) with cf.ThreadPoolExecutor(threads) as exe: for us in usrs: for pw in pws: futures.append(exe.submit(crack, s, url, headers, auth_type, us, pw)) for r in cf.as_completed(futures): if r.result(): self._log(logfile, f'{r.result()}') if exit_on_success: return return def _crack_tomcat(self, url, user, password, logfile, timeout=5): """ crack tomcat login using default tomcat creds """ headers = {'User-Agent': self.useragent} session = requests.session() try: res = session.get(url, timeout=timeout, auth=(user, password), verify=False, headers=headers) if res.status_code == 200: data = f'Login found: {url} ({user}:{password})' self._log(f'{logfile}', data) except: pass return def _sparty(self, url, log, mode='frontpage', opts=''): """ wrapper for sparty """ if not opts: if mode == 'frontpage': for i in ('pvt', 'bin'): for j in ('rpc_version_check', 'rpc_service_listing', 'rpc_file_upload', 'author_config_check', 'author_remove_folder'): opts = f'-u {url} -f {i} -v ms_frontpage -d extract -l list -e {j}' self._run_tool('sparty', opts, nullscan_tool=log, newlines=True) elif mode == 'sharepoint': for i in ('forms', 'layouts', 'catalog'): for j in ('rpc_version_check', 'rpc_service_listing', 'rpc_file_upload', 'author_config_check', 'author_remove_folder'): opts = f'-u {url} -s {i} -v ms_sharepoint -d extract -l list -e {j}' self._run_tool('sparty', opts, nullscan_tool=log, newlines=True) else: opts += ' -u {url}' self._run_tool('sparty', opts, nullscan_tool=log) return return def _snallygaster(self, target, log, opts='', timeout=300): """ wrapper for snallygaster """ if not opts: opts = f"-i -n --nowww --useragent '{self.useragent}'" opts = f'{opts} {target}' self._run_tool('snallygaster', opts, logfile=log, timeout=timeout) return def _jexboss(self, host, port, log, scheme='http', opts=''): """ wrapper for jexboss """ if not opts: opts = ' -D' if self.opts['post_data']: opts += f" -H {self.opts['post_data']}" if self.opts['cookies']: opts += f" --cookies '{self.cookies}'" if self.opts['proxy']: opts += f" --proxy {self.opts['proxy']}" opts = f'{opts} -u {scheme}://{host}:{port}/' self._run_tool('jexboss', opts, logfile=log, precmd='yes "NO" |', escape_codes=True) return def _gobuster(self, host, port, flist, scheme='http', log=None, opts=''): """ wrapper for gobuster """ if not log: log = f'gobuster_{scheme}' if not opts: opts = f"dir -a '{self.useragent}' -e -f -k -l -q -r --timeout 5s -t 20" opts += f' -w {flist} -u {scheme}://{host}:{port}/' if self.opts['cookies']: opts += f" -c '{self.cookies}'" if self.opts['web_user'] and self.opts['web_pass']: opts += f" -U {self.opts['web_user']} -P {self.opts['web_pass']}" if self.opts['proxy']: opts += f" --proxy {self.opts['proxy']}" opts = f'{opts} -u {scheme}://{host}:{port}/' self._run_tool('gobuster', opts, logfile=log) return def _droopescan(self, cms='drupal', opts=''): """ wrapper for droopescan """ if not opts: opts = '--enumerate a -t 10 -o standard --timeout 5 --hide-progressbar' opts += ' --threads-enumerate 10' opts = f'{cms} -u {self.target} {opts}' self._run_tool('droopescan scan', opts, logfile=f'droopescan_{cms}') return def _domi_owned(self, target, log, mode='', newlines=False, opts=''): """ wrapper for domi-owned """ if not opts: modes = ('fingerprint', 'enumerate', 'hashdump') for m in modes: # fingerprint, enumerate, hashdump without creds opts = f'{m} {target}' self._run_tool('domi-owned', opts, logfile=log, newlines=newlines, escape_codes=True) # fingerprint, enumerate, hashdump with creds if self.opts['user'] and self.opts['pass']: opts = f"{m} {target} --username {self.opts['user']}" opts += f" --password {self.opts['pass']}" self._run_tool('domi-owned', opts, logfile=log, newlines=newlines, escape_codes=True) # bruteforce accounts if self.opts['ulists']: for ulist in self.opts['ulists']: opts = f'bruteforce {target} {ulist} ' self._run_tool('domi-owned', opts, logfile=log, newlines=newlines, escape_codes=True) else: self._run_tool('domi-owned', f'{mode} {target} {opts}', logfile=log, escape_codes=True) return def _commix(self, url, opts='', timeout=3600): """ wrapper for commix """ if not opts: opts = '--batch --sys-info --level 3 --crawl 1 --disable-coloring' opts += f" --user-agent '{self.useragent}' --output-dir /tmp/" opts += ' --sys-info --retries 2 --flush-session --os-cmd dir' if self.opts['post_data']: opts += f" -d {self.opts['post_data']}" if self.opts['referer']: opts += f" --referer {self.opts['referer']}" if self.opts['login_url']: opts += f" --auth-url {self.opts['auth_url']}" if self.opts['cookies']: opts += f' --cookie {self.cookies}' if self.opts['web_user'] and self.opts['web_pass']: opts += f" --auth-type Basic" # we need to add auth type opt later opts += f" --auth-cred {self.opts['web_user']}:{self.opts['web_pass']}" opts = f'-u {url} {opts}' self._run_tool('commix', opts, escape_codes=True, timeout=timeout) return def _brutemap(self, target, opts='', logfile='brutemap'): """ crack website logins using dictionary attack. """ if not opts: _opts = '--retries 2 -oD /tmp/brutemap' # single user + pass if self.opts['user'] and self.opts['pass']: opts = f"-t {target} -u {self.opts['user']} -p {self.opts['pass']}" opts += f' {_opts}' self._run_tool('brutemap', opts, logfile=logfile, newlines=True) # single user + passlist if self.opts['user'] and self.opts['plists']: for plist in self.opts['plists']: if self._check_file(plist, block=False): opts = f" -t {target} -u {self.opts['user']} -p {plist} {_opts}" self._run_tool('brutemap', opts, logfile=logfile, newlines=True) # userlist + passlist if self.opts['ulists'] and self.opts['plists']: for ulist in self.opts['ulists']: if self._check_file(ulist, block=False): for plist in self.opts['plists']: if self._check_file(plist, block=False): opts = f'-t {target} -u {ulist} -p {plist} {_opts}' self._run_tool('brutemap', opts, logfile=logfile) else: self._run_tool('brutemap', opts, logfile=logfile) return def _httprint(self, host, port, scheme='http', opts=''): """ wrapper to fingerprint http server using httprint """ if not opts: opts = f'-h {scheme}://{host}:{port}/' opts += ' -s /usr/share/httprint/signatures.txt' opts += ' -r 2 -P0' opts = f'-h {scheme}://{host}:{port}/ {opts}' self._run_tool('httprint', opts, f'httprint_{scheme}') return def _httping(self, host, port, scheme='http', opts=''): """ wrapper to ping http server using httping """ if not opts: opts = f"-a -t 3 -c 3 -I '{self.useragent}' -R {scheme}://nullscan.net/" if scheme == 'https': opts += ' -l' else: opts += ' -F' if self.opts['web_user'] and self.opts['web_pass']: opts += f" -A -U {self.opts['web_user']} -P {self.opts['web_pass']}" if self.opts['proxy']: h, p, s, pa = self._parse_url(self.opts['proxy']) opts += f' --proxy {h}:{p}' if self.opts['proxy_user'] and self.opts['proxy_pass']: opts += f" --proxy-user {self.opts['proxy_user']}" opts += f" --proxy-password {self.opts['proxy_pass']}" opts += f' -p {port} -g {host}' opts = f'{opts} -p {port} -g {host}' self._run_tool('httping', opts, f'httping_{scheme}') return def _metoscan(self, host, port, scheme='http', opts=''): """ wrapper to scan for available HTTP methods using metoscan """ if not opts: opts = f'{scheme}://{host}:{port}/' self._run_tool('metoscan', opts, f'metoscan_{scheme}') return def _lbmap(self, host, port, scheme='http', opts=''): """ wrapper to fingerprint web-server using lbmap """ if not opts: opts = f'{scheme}://{host}:{port} --timeout 10 --batch' opts = f'{scheme}://{host}:{port} {opts}' self._run_tool('lbmap', opts, nullscan_tool=f'lbmap_{scheme}') return def _halberd(self, host, port, scheme='http', opts=''): """ wrapper to discover http load balancers using halberd """ if not opts: opts = f'-t 10 -p 15 -q {scheme}://{host}:{port}' opts = f'{opts} {scheme}://{host}:{port}/' self._run_tool('halberd', opts, nullscan_tool=f'halberd_{scheme}') return def _nikto(self, host, port, scheme='http', opts=''): """ wrapper to crawl the web-server for dirs,files and vulns using nikto """ if not opts: opts = f'-C all -no404 -nointeractive -useragent "{self.useragent}"' opts += f' -p {port} -h {host}' if self.opts['proxy']: opts += f" -useproxy self.opts['proxy']" opts = f'{opts} -p {port} -h {host}' if scheme == 'https': opts += ' -ssl' self._run_tool('nikto', opts, nullscan_tool=f'nikto_{scheme}', timeout=3600) return def _fpdns(self, prot='udp', opts=''): """ wrapper to fingerprint remote DNS server using fpdns. """ if not opts: if prot == 'tcp': opts = '-f -F 16 -T' else: opts = '-f -F 16' if self.opts['shost']: opts = f"{opts} -Q self.opts['shost']" opts = f"{opts} -p {self.target['port']} {self.target['host']}" self._run_tool('fpdns', opts, f'fpdns_{prot}') return def _snoop_cache(self, prot='udp', opts=''): """ test for DNS cache snoop leak using dig. """ if not opts: if prot == 'tcp': opts = 'A +norecurse +tcp +timeout=5' else: opts = 'A +norecurse +timeout=5' site = 'gmail.com' cmd1 = f"dig @{self.target['host']} {site} {opts}" cmd2 = f"dig @{self.target['host']} {site} {opts} | grep -i 'answer'" # make first request self._run_cmd(cmd1, f'snoop_cache_{prot}', newlines=True) # make second request (grep for ANSWER flag) self._run_cmd(cmd2, f'snoop_cache_{prot}') return def _dig_dns_version(self, prot='udp', opts=''): """ determine remote DNS server version using 'dig' cmd. """ if not opts: if prot == 'tcp': opts = '+short +tcp +timeout=5 chaos txt version.bind' opts += f" @{self.target['host']}" else: opts = "+short +timeout=5 chaos txt version.bind self.target['host']" self._run_tool('dig', opts, 'dig_dns_version', logfile=f'dig_dns_version_{prot}') return def _host_dns_version(self, prot='udp', opts=''): """ determine remote DNS server version using 'host' cmd. """ if not opts: if prot == 'tcp': opts = f"-T -W 5 -c chaos -t txt version.bind {self.target['host']}" else: opts = f"-W 5 -c chaos -t txt version.bind {self.target['host']}" self._run_tool('host', opts, logfile=f'host_dns_version_{prot}') return def _googlesearch(self, query, **kwargs): """ simple google search """ # defaults defaults = {'stop': 100, 'user_agent': self.useragent} # merge kwargs = {**defaults, **kwargs} for url in gsearch(query, **kwargs): yield url return def _testssl(self, host, port, opts=''): """ wrapper for testssl """ if not opts: opts = f'--color 0 --vulnerable {host}:{port}' else: opts = f'{opts} {host}:{port}' self._run_tool('testssl', opts) return def _lulzbuster(self, host, port, scheme='http', flist=None, log=None, opts=''): """ wrapper for lulzbuster """ if not log: log = f'lulzbuster_{scheme}.log' else: if not '.log' in log: log = f'{log}.log' if not opts: opts = f'-S -f -i -U -l {log}' if self.opts['proxy']: h, p, s, pa = self._parse_url(self.opts['proxy']) opts += f' -p {s}://{h}:{p}' if self.opts['proxy_user'] and self.opts['proxy_pass']: opts += f" -P {self.opts['proxy_user']}:{self.opts['proxy_pass']}" if self.opts['web_user'] and self.opts['web_pass']: opts += f" -a {self.opts['web_user']}:{self.opts['web_pass']}" if flist: opts += f' -w {flist}' opts = f'-s {scheme}://{host}:{port}/ {opts}' self._run_tool('lulzbuster', opts, create_log=False) return def _dirsearch(self, host, port, scheme='http', flist=None, log=None, opts=''): """ wrapper for dirsearch """ if not log: log = f'dirsearch_{scheme}.log' else: if not '.log' in log: log = f'{log}.log' if not opts: opts = "-b -e ' ' -t 25 -x 300,301,302,303,400,401,402,404,430,500,501," opts += f"502,503 --plain-text-report={log} --ua='{self.useragent}'" if self.opts['cookies']: opts += f" --cookie='{self.cookies}'" if self.opts['proxy']: h, p, s, pa = self._parse_url(self.opts['proxy']) opts += f' --proxy={h}:{p}' if flist: opts += f' -w {flist}' opts = f'-u {scheme}://{host}:{port}/ {opts}' self._run_tool('dirsearch', opts, create_log=False) return def _ikescan(self, opts, log): """ wrapper for all ikescan methods """ # RFC compliant auth types for types in self.ike_auth_types.values(): for t in types: _opts = f"{opts} --auth={t} {self.target['host']}" self._run_tool('ike-scan', _opts, logfile=log, newlines=True) return def _whois(self, _type, target=None): """ perform whois on domain or ipv4 addr """ res = [] try: if target: if _type == 'domain': res.append(whois.whois(target)) else: obj = IPWhois(target) res.append(obj.lookup_rdap(depth=1)) else: if _type == 'domain': log = self._read_log('domainname') else: log = self._read_log('ipv4addr') for target in log: if target: if _type == 'domain': res.append(whois.whois(target)) else: obj = IPWhois(target) res.append(obj.lookup_rdap(depth=1)) except: pass return res def _portscan(self, nmap_opts, logfile, output=None): """ wrapper to perform nmap portscan """ nmap = core.nmap.Nmap(nmap_opts) nmap.set_logfile(logfile) nmap.build_cmd() nmap.scan(output=output) return def _icmp_req(self, name, icmp_type, icmp_code, count): """ wrapper for all icmp requests """ opts = f'--icmp --icmp-type {icmp_type} --icmp-code {icmp_code}' opts += f" --delay 0.2s -c {count} {self.target['host']}" self._run_tool('nping', opts, name) return def _hydra(self, protocol, _opts): """ wrapper for hydra to crack logins """ log = f'hydra_{protocol}.log' _opts += f' -o {log}' # single username + single password mode opts = f"{_opts} -l {self.opts['user']} -p {self.opts['pass']}" opts += f" {protocol}://{self.target['host']}:{self.target['port']}" self._run_tool('hydra', opts, create_log=False) # single username + password list mode for pwlist in self.opts['plists']: if self._check_file(pwlist): opts = f"{_opts} -l {self.opts['user']} -P {pwlist}" opts += f" {protocol}://{self.target['host']}:{self.target['port']}" self._run_tool('hydra', opts, create_log=False) # username list and password list mode for userlist in self.opts['ulists']: if self._check_file(userlist, block=False): for passlist in self.opts['plists']: if self._check_file(passlist, block=False): opts = f"{_opts} -L {userlist} -P {passlist}" opts += f" {protocol}://{self.target['host']}:{self.target['port']}" self._run_tool('hydra', opts, create_log=False) return def _dns_query(self, qtype, tool, logfile=None): """ perform dns query and log results """ res = [] if logfile: for host in self._read_log(logfile): try: res.append(dns.resolver.query(host, qtype)) except: pass # try with target['host'] directly if not res: try: res.append(dns.resolver.query(self.target['host'], qtype)) except: pass if res: for answers in res: for a in answers: if qtype != 'mx' and qtype != 'MX' and qtype != 'Mx' and qtype != 'mX': if not logfile: # output for dnsrecords() self._log(tool, f"{qtype.upper()}: {a.to_text().rstrip('.')}") else: # normal output self._log(tool, a.to_text().rstrip('.')) else: self._log(tool, a.exchange.to_text().rstrip('.')) return # EOF
plugins { id(libs.plugins.kotlin.multiplatform.get().pluginId) alias(libs.plugins.arrowGradleConfig.kotlin) alias(libs.plugins.arrowGradleConfig.publish) } kotlin { explicitApi = null } apply(from = property("TEST_COVERAGE")) apply(from = property("ANIMALSNIFFER_MPP")) kotlin { sourceSets { jvmMain { dependencies { implementation(libs.ksp) } } jvmTest { dependencies { implementation(libs.kotlin.stdlibJDK8) implementation(libs.junitJupiter) implementation(libs.junitJupiterEngine) implementation(libs.assertj) implementation(libs.classgraph) implementation(libs.kotlinCompileTesting) implementation(libs.kotlinCompileTestingKsp) runtimeOnly(projects.arrowOpticsKspPlugin) runtimeOnly(projects.arrowAnnotations) runtimeOnly(projects.arrowCore) runtimeOnly(projects.arrowOptics) } } } }
--- title: 'Chapter 3: R0' permalink: 'chapters/ob18/c3/intro' previouschapter: url: chapters/ob18/c2/r title: 'Original R code' nextchapter: url: chapters/ob18/c3/r title: 'Original R code' redirect_from: - 'chapters/ob18/c3/intro' --- ## Chapter 3: R0
import io.kotest.core.spec.style.FunSpec import io.kotest.matchers.shouldBe class PlatformNameTestNative : FunSpec({ test("platform name should be native") { platformName() shouldBe "native" } // Comment this in for a failed test // test("my only purpose is to fail!") { // platformName() shouldBe "jvm" // } })
subroutine nurbl(ioption, * k1,k1points, * m1,ia,s,w,x,y,z,v, * irow,ipt,ict,icttot, * npoints,ntets,nbpoints,nbtets, * itoff,jtoff) C C####################################################################### C C PURPOSE - C C THIS ROUTINE PROCESSES AN IGES TYPE "126" ENTITY (A CURVE). C C INPUT ARGUMENTS - C C C OUTPUT ARGUMENTS - C C C CHANGE HISTORY - C C $Log: nurbl.f,v $ C Revision 2.00 2007/11/05 19:46:02 spchu C Import to CVS C CPVCS CPVCS Rev 1.2 08 Feb 2006 14:35:36 dcg CPVCS "enforce lower case - add external statements for shift routines CPVCS these changes needed to compile with absoft pro fortran" CPVCS CPVCS Rev 1.1 30 Sep 2004 09:18:52 dcg CPVCS replace calls to real( with calls to dble( CPVCS CPVCS Rev 1.0 27 Jan 2000 12:30:52 dcg CPVCS Initial revision. CPVCS CPVCS Rev 1.5 Fri Oct 23 13:11:34 1998 dcg CPVCS declare k1, ia before use - DEC compiler complaint CPVCS CPVCS Rev 1.4 Fri Aug 28 14:24:58 1998 dcg CPVCS remove single precision constants CPVCS CPVCS Rev 1.3 Mon Apr 14 16:55:54 1997 pvcs CPVCS No change. CPVCS CPVCS Rev 1.2 Thu Oct 10 08:41:52 1996 het CPVCS Do an automatic addatt for "vels" to contain normal directions. CPVCS CPVCS Rev 1.1 Thu Jun 27 14:52:30 1996 het CPVCS Put unit normals into the vels array for each NURB. CPVCS CPVCS Rev 1.0 Tue Jan 30 15:20:22 1996 dcg CPVCS Initial revision. C C####################################################################### C implicit none C C C ###################################################################### C integer ia,k1 real*8 s(ia) real*8 w(k1), x(k1), y(k1), z(k1) real*8 v(*) C pointer (ipxs, xs(2,1000000)) pointer (ipbs, bs(ia,1000000)) pointer (ipgx, gx(k1)) pointer (ipgy, gy(k1)) pointer (ipgz, gz(k1)) C character*32 cmo character*32 isubname C pointer (ipimt1, imt1) pointer (ipitp1, itp1) integer itp1(10000000), imt1(10000000) pointer (ipxic, xic) pointer (ipyic, yic) pointer (ipzic, zic) real*8 xic(10000000), yic(10000000), zic(10000000) pointer(ipvels,vels) real*8 vels(3,1000000) C pointer (ipitet, itet1) pointer (ipjtet, jtet1) integer itet1(4*1000000), jtet1(4*1000000) pointer (ipitetclr, itetclr) pointer (ipitettyp, itettyp) pointer (ipitetoff, itetoff) pointer (ipjtetoff, jtetoff) integer itetclr(1000000), itettyp(1000000), * itetoff(1000000), jtetoff(1000000) C character*32 cvelnm character*8092 cbuff real*8 bs,gx,gy,gz,epsilon,smin,smax,ds1,s1,term1,term2, * bsum,xsum,ysum,zsum,x1,y1,z1,x2,y2,z2,dx1,dx,xf1,xf2, * xs,rout integer it,ityp,i1,i2,nnodesmm,ntetsinc,inc,iout,lout, * nelementsmm,ierr,npointsinc,npsave,ntetsave,j,ik1,ks,js, * mbndry,ioption,k1points,irow,ipt integer npoints,length,icmotype,ierror,itin,lin,m1, * icscode,ict,icttot,ntets,nbpoints,nbtets, * itoff,jtoff,k1point,i,nx,ix,k,ilen pointer(ipout,out) real*8 out(*) real*8 alargenumber parameter (alargenumber=1.d+99) C C ###################################################################### C C data epsilon / 1.0d-10 / C C ###################################################################### C isubname="nurbl" C call cmo_get_name(cmo,ierror) C call cmo_get_info('nnodes',cmo,npoints,length,icmotype,ierror) call cmo_get_info('nelements',cmo,ntets,length,icmotype,ierror) call cmo_get_info('mbndry',cmo,mbndry,length,icmotype,ierror) call cmo_get_info('imt1',cmo,ipimt1,length,icmotype,ierror) call cmo_get_info('itp1',cmo,ipitp1,length,icmotype,ierror) call cmo_get_info('itetclr',cmo,ipitetclr,length,icmotype,ierror) call cmo_get_info('itettyp',cmo,ipitettyp,length,icmotype,ierror) call cmo_get_info('itetoff',cmo,ipitetoff,length,icmotype,ierror) call cmo_get_info('jtetoff',cmo,ipjtetoff,length,icmotype,ierror) call cmo_get_info('itet',cmo,ipitet,length,icmotype,ierror) call cmo_get_info('jtet',cmo,ipjtet,length,icmotype,ierror) C if(k1points.le.0) then k1point=k1 else k1point=k1points endif C length=2*ia call mmgetblk("xs",isubname,ipxs,length,2,icscode) length=ia*(m1+1) call mmgetblk("bs",isubname,ipbs,length,2,icscode) length=k1point call mmgetblk("gx",isubname,ipgx,length,2,icscode) call mmgetblk("gy",isubname,ipgy,length,2,icscode) call mmgetblk("gz",isubname,ipgz,length,2,icscode) C smin=alargenumber smax=-smin js=0 ks=0 do 100 i=1,ia-1 C***** if(s(i).ne.s(i+1)) then if(abs(s(i)-s(i+1)).gt.epsilon) then js=js+1 xs(1,js)=s(i) xs(2,js)=s(i+1) smin=min(smin,xs(1,js),xs(2,js)) smax=max(smax,xs(1,js),xs(2,js)) else ks=ks+1 endif 100 continue ds1=(v(2)-v(1))/(k1point-1) s1=v(1)-ds1 do 240 ik1=1,k1point s1=s1+ds1 do 245 j=1,m1+1 do 246 i=1,ia bs(i,j)=0.0 246 continue 245 continue do 250 i=1,ia-1 C***** if(s1.ge.s(i).and.s1.le.s(i+1).and.s(i).ne.s(i+1)) then if((s1-s(i)).gt.-epsilon .and. * (s1-s(i+1)).lt.epsilon .and. * abs(s(i)-s(i+1)).gt.epsilon) then bs(i,1)=1 else bs(i,1)=0 endif 250 continue do 260 j=1,m1 do 270 i=1,ia-j-1 C***** if(s(i+j).eq.s(i)) then if(abs(s(i+j)-s(i)).lt.epsilon) then term1=0.0 else term1=(s1-s(i))/(s(i+j)-s(i)) endif C***** if(s(i+j+1).eq.s(i+1)) then if(abs(s(i+j+1)-s(i+1)).lt.epsilon) then term2=0.0 else term2=(s(i+j+1)-s1)/(s(i+j+1)-s(i+1)) endif bs(i,j+1)=term1*bs(i,j)+term2*bs(i+1,j) 270 continue 260 continue bsum=0.0 xsum=0.0 ysum=0.0 zsum=0.0 do 310 i=1,k1 bsum=bsum+w(i)*bs(i,m1+1) xsum=xsum+w(i)*x(i)*bs(i,m1+1) ysum=ysum+w(i)*y(i)*bs(i,m1+1) zsum=zsum+w(i)*z(i)*bs(i,m1+1) 310 continue if(abs(bsum).lt.epsilon) then gx(ik1)=0.0 gy(ik1)=0.0 gz(ik1)=0.0 else gx(ik1)=xsum/bsum gy(ik1)=ysum/bsum gz(ik1)=zsum/bsum endif 240 continue call cmo_get_info('itp1',cmo, * ipitp1,length,icmotype,ierror) call cmo_get_info('imt1',cmo, * ipimt1,length,icmotype,ierror) call cmo_get_info('xic',cmo, * ipxic,length,icmotype,ierror) call cmo_get_info('yic',cmo, * ipyic,length,icmotype,ierror) call cmo_get_info('zic',cmo, * ipzic,length,icmotype,ierror) ntetsave=ntets do 510 i=1,k1point-1 x1=gx(i) y1=gy(i) z1=gz(i) x2=gx(i+1) y2=gy(i+1) z2=gz(i+1) npsave=npoints nx=1 dx1=1.0/dble(nx) dx=-dx1 do 530 ix=1,nx+1 dx=dx+dx1 xf1=(1.0-dx) xf2=dx npoints=npoints+1 call mmfindbk('xic',cmo,ipxic,length,icscode) if((npoints+1).gt.length) then npointsinc=npoints+1000 call cmo_set_info('nnodes',cmo,npointsinc,1,1,ierr) call mmgetlen(ipitetclr,nelementsmm,icscode) call cmo_set_info('nelements',cmo, * nelementsmm,1,1,ierror) call cmo_newlen(cmo,ierror) call cmo_get_info('itp1',cmo, * ipitp1,length,icmotype,ierror) call cmo_get_info('imt1',cmo, * ipimt1,length,icmotype,ierror) call cmo_get_info('xic',cmo, * ipxic,length,icmotype,ierror) call cmo_get_info('yic',cmo, * ipyic,length,icmotype,ierror) call cmo_get_info('zic',cmo, * ipzic,length,icmotype,ierror) endif imt1(npoints)=1+mod(irow-1,64) itp1(npoints)=0 xic(npoints)=xf1*x1+xf2*x2 yic(npoints)=xf1*y1+xf2*y2 zic(npoints)=xf1*z1+xf2*z2 530 continue do 560 k=1,nx call mmgetlen(ipitetclr,length,icscode) if((ntets+1).gt.length) then inc=1000 ntetsinc=ntets+inc call cmo_set_info('nelements',cmo,ntetsinc,1,1,ierr) call mmfindbk('xic',cmo,ipxic,nnodesmm,icscode) call cmo_set_info('nnodes',cmo,nnodesmm,1,1,ierror) call cmo_newlen(cmo,ierror) call cmo_get_info('itetclr',cmo, * ipitetclr,length,icmotype,ierror) call cmo_get_info('itettyp',cmo, * ipitettyp,length,icmotype,ierror) call cmo_get_info('itetoff',cmo, * ipitetoff,length,icmotype,ierror) call cmo_get_info('jtetoff',cmo, * ipjtetoff,length,icmotype,ierror) call cmo_get_info('itet',cmo, * ipitet,length,icmotype,ierror) call cmo_get_info('jtet',cmo, * ipjtet,length,icmotype,ierror) endif i1=k+npsave i2=k+1+npsave ntets=ntets+1 itetclr(ntets)=imt1(i1) itettyp(ntets)=2 itetoff(ntets)=itoff jtetoff(ntets)=jtoff itoff=itoff+2 jtoff=jtoff+2 itet1(1+itetoff(ntets))=i1 itet1(2+itetoff(ntets))=i2 jtet1(1+jtetoff(ntets))=-1 jtet1(2+jtetoff(ntets))=-1 560 continue 510 continue call cmo_get_attinfo('velname',cmo,iout,rout,cvelnm, * ipout,lout,ityp,ierror) if(ierror.ne.0) cvelnm='vels' call cmo_get_info(cvelnm,cmo,ipvels,ilen,ityp,ierr) if(ierr.ne.0) then cbuff ='cmo/addatt/-def-/vels/VDOUBLE/vector/' // * 'nnodes/linear/permanent/gxa/0.0 ; ' // * 'finish' call dotaskx3d(cbuff,ierror) endif call cmo_get_info(cvelnm,cmo,ipvels,lin,itin,ierror) do it=ntetsave+1,ntets i1=itet1(itetoff(it)+1) i2=itet1(itetoff(it)+2) vels(1,i1)=vels(1,i1)+(xic(i2)-xic(i1)) vels(2,i1)=vels(2,i1)+(yic(i2)-yic(i1)) vels(3,i1)=vels(3,i1)+(zic(i2)-zic(i1)) vels(1,i2)=vels(1,i2)+(xic(i2)-xic(i1)) vels(2,i2)=vels(2,i2)+(yic(i2)-yic(i1)) vels(3,i2)=vels(3,i2)+(zic(i2)-zic(i1)) enddo call cmo_set_info('nnodes',cmo,npoints,1,1,ierror) call cmo_set_info('nelements',cmo,ntets,1,1,ierror) call mmrelprt(isubname,icscode) goto 9999 9999 continue return end
//dog function dog() { const inp = document.createElement("button"); inp.setAttribute("type", "button"); let text = document.createTextNode("dog"); inp.setAttribute("class", "mmm"); inp.appendChild(text); inp.setAttribute("id", "dog"); document.getElementById("body").appendChild(inp); } dog(); document.querySelector("#dog").addEventListener("click", () => { let image = document.createElement("img"); image.setAttribute("id", "dogImage"); image.setAttribute("class", "zurag"); image.setAttribute("src", "/image/dog.jpg"); document.getElementById("body").append(image); }); //horse function horse() { const inp = document.createElement("button"); inp.setAttribute("type", "button"); let text = document.createTextNode("horse"); inp.appendChild(text); inp.setAttribute("id", "horse"); inp.setAttribute("class", "mmm"); document.getElementById("body").appendChild(inp); } horse(); document.querySelector("#horse").addEventListener("click", () => { let image = document.createElement("img"); image.setAttribute("id", "horseImage"); image.setAttribute("class", "zurag"); image.setAttribute("src", "/image/horse.jpg"); document.getElementById("body").append(image); }); //cat function cat() { const inp = document.createElement("button"); inp.setAttribute("type", "button"); let text = document.createTextNode("cat"); inp.appendChild(text); inp.setAttribute("id", "cat"); inp.setAttribute("class", "mmm"); document.getElementById("body").appendChild(inp); } cat(); document.querySelector("#cat").addEventListener("click", () => { let image = document.createElement("img"); image.setAttribute("id", "catImage"); image.setAttribute("class", "zurag"); image.setAttribute("src", "/image/cat.jpg"); document.getElementById("body").append(image); }); //cow function cow() { const inp = document.createElement("button"); inp.setAttribute("type", "button"); let text = document.createTextNode("cow"); inp.appendChild(text); inp.setAttribute("id", "cow"); inp.setAttribute("class", "mmm"); document.getElementById("body").appendChild(inp); } cow(); document.querySelector("#cow").addEventListener("click", () => { let image = document.createElement("img"); image.setAttribute("id", "cowImage"); image.setAttribute("class", "zurag"); image.setAttribute("src", "/image/cow.jpg"); document.getElementById("body").append(image); }); // small // document.querySelector("#").addEventListener("click", () => { // document.getElementById("#cowImage").style.width = "400px"; // }); function small() { const inp = document.createElement("button"); inp.setAttribute("type", "button"); let text = document.createTextNode("small"); inp.appendChild(text); inp.setAttribute("id", "small"); inp.setAttribute("class", "sm"); document.getElementById("body").appendChild(inp); } small(); //big function big() { const inp = document.createElement("button"); inp.setAttribute("type", "button"); let text = document.createTextNode("big"); inp.appendChild(text); inp.setAttribute("id", "big"); inp.setAttribute("class", "sm"); document.getElementById("body").appendChild(inp); } big();
name "scm_helper" description "Offers a few library functions to interact with source control systems" maintainer "AWS OpsWorks" license "Apache 2.0" version "1.0.0" depends "opsworks_commons" depends "s3_file"
{-# LANGUAGE CPP #-} module MacAddress where import Data.Binary (decode) import qualified Data.ByteString.Lazy as BSL import Safe (headDef) #ifdef ETA_VERSION import Java import Data.Maybe (catMaybes) import Data.Traversable (for) import Data.Word (Word64, Word8) #else /* !defined ETA_VERSION */ import Data.Word (Word64) import Network.Info (MAC (MAC), getNetworkInterfaces, mac) #endif /* ETA_VERSION */ getMacAddress :: IO Word64 #ifdef ETA_VERSION getMacAddress = java $ do interfaces <- fromJava <$> getNetworkInterfaces macs <- for interfaces (<.> getHardwareAddress) let macBytes = headDef (error "Can't get any non-zero MAC address of this machine") $ catMaybes macs let mac = foldBytes $ fromJava macBytes pure mac data NetworkInterface = NetworkInterface @java.net.NetworkInterface deriving Class foreign import java unsafe "@static java.net.NetworkInterface.getNetworkInterfaces" getNetworkInterfaces :: Java a (Enumeration NetworkInterface) foreign import java unsafe getHardwareAddress :: Java NetworkInterface (Maybe JByteArray) foldBytes :: [Word8] -> Word64 foldBytes bytes = decode . BSL.pack $ replicate (8 - length bytes) 0 ++ bytes #else /* !defined ETA_VERSION */ getMacAddress = decodeMac <$> getMac getMac :: IO MAC getMac = headDef (error "Can't get any non-zero MAC address of this machine") . filter (/= minBound) . map mac <$> getNetworkInterfaces decodeMac :: MAC -> Word64 decodeMac (MAC b5 b4 b3 b2 b1 b0) = decode $ BSL.pack [0, 0, b5, b4, b3, b2, b1, b0] #endif /* ETA_VERSION */
from __future__ import annotations from abc import abstractmethod from typing import TypeVar, Iterator, List, Tuple from .Typing import POS_T, PTS_T class SexpNode(): """Sexp Node Implementation This class is used to represent a single node in an s-expressionself. It can be used to represent a list of nodes or a single nodeself. """ def __init__(self): self.sexp: List[SexpNode|str] = [] def __contains__(self, key: str) -> bool: return len(self[key]) > 0 def __getitem__(self, key: str|int|slice) -> List[SexpNode]: res: List[SexpNode] = [] for node in self.sexp: if isinstance(node, SexpNode) and node.sexp[0] == key: res.append(node) return res def __iter__(self) -> Iterator[SexpNode]: return iter([x for x in self.sexp if isinstance(x, SexpNode)]) def __repr__(self): return f'({self.sexp})' def __len__(self) -> int: return len(self.sexp) def values(self) -> List[str]: """Return the string value of this Node""" res: List[str] = [] for node in self.sexp: if isinstance(node, str): res.append(node) return res def pos(self) -> POS_T: """Return the POS_T from this Node""" return (float(str(self.sexp[1])), float(str(self.sexp[2]))) def pts(self) -> PTS_T: """Return PTS_T from this node""" pts = [] for _xy in self.sexp: if not isinstance(_xy, str) and _xy.get(0, '') == 'xy': pts.append((_xy.get(1, 0.0), _xy.get(2, 0.0))) return pts T = TypeVar("T") def get(self, path: int, default: T) -> T: """ Get the value at the position. :param path slice: position. :param default T: default value. :rtype T: Type of default value. """ if len(self.sexp) <= path: return default #_node = self.__getitem__(path) _node = self.sexp[path] if _node: if isinstance(default, str): return str(_node) # type: ignore if isinstance(default, int): return int(_node) # type: ignore if isinstance(default, float): return float(_node) # type: ignore return default def load_tree(sexp: str) -> SexpNode: """ Load the sexp string to List :param input str: Input string. :rtype SEXP_T: The parsed result. """ length = len(sexp) def traverse(index: int) -> Tuple[SexpNode, int]: res = SexpNode() #items = [] buffer: List[str] = [] item = sexp[index] while item != ")": if item in [' ', '\n', '\r']: pass elif item == '(': subtree, index = traverse(index + 1) res.sexp.append(subtree) elif item == '"': buffer = [] index += 1 while index < length: if sexp[index] == '"': break if sexp[index] == '\\': buffer.append(sexp[index]) index += 1 buffer.append(sexp[index]) index += 1 res.sexp.append("".join(buffer)) else: buffer = [] while index < length: if sexp[index] == ')': res.sexp.append("".join(buffer)) index -= 1 break if sexp[index] in [' ', '\n', '\r']: res.sexp.append("".join(buffer)) break buffer.append(sexp[index]) index += 1 index += 1 item = sexp[index] return res, index return traverse(sexp.find('(')+1)[0] class SexpVisitor(): """Visit the SexpNode items.""" @abstractmethod def start(self) -> None: """SexpNode callback method.""" @abstractmethod def end(self) -> None: """SexpNode callback method.""" @abstractmethod def node(self, name: str, sexp: SexpNode) -> None: """SexpNode callback method.""" def visit(self, sexp: SexpNode, level: int = 0, act_level: int=0) -> None: """Visit the sexp nodes.""" self.start() self._visit(sexp, level, act_level) self.end() def _visit(self, sexp: SexpNode, level: int = 0, act_level: int=0) -> None: for node in sexp.sexp: if isinstance(node, SexpNode): if act_level == level: self.node(str(node.sexp[0]), node) else: self._visit(node, level=level, act_level=act_level+1)
package appshop.modules.sys.dto import appshop.annotation.* import org.hibernate.validator.constraints.* /** * Created with IntelliJ IDEA * USER:jin * CLASSNAME: AccountDTO * DATE: 2021/3/26 * TIME: 14:17 * JDK 1.8 */ @noArg data class AccountDTO( @field:NotBlank(message = "username can not null") @field:Length(min = 3, max = 100, message = "username length in 3-100") var username: String, @field:NotBlank(message = "password can not null") @field:Length(min = 3, max = 100, message = "password length in 3-100") var password: String)
package com.example.aquam.base import androidx.fragment.app.Fragment import com.example.aquam.ui.MainActivity abstract class BaseFragment : Fragment(), BaseView { override fun showProgressDialog() { (activity as MainActivity).showProgressDialog() } override fun hideProgressDialog() { (activity as MainActivity).hideProgressDialog() } }
import os from koosli.search_providers import bing, yahoo DEBUG = True SECRET_KEY = 'not a secret' SPLASH_REGISTRATION = False #========================================= # Search Providers #========================================= SEARCH_PROVIDERS = { 'bing': bing.BingMock, 'yahoo': yahoo.YahooMock, } #========================================= # Database Config #========================================= # In-memory SQLite for quick test runs SQLALCHEMY_DATABASE_URI = 'sqlite://'
#ifndef MDSU_INSN_H #define MDSU_INSN_H #include <inttypes.h> typedef uint8_t reg_t; typedef uint32_t insn_off_t; #define MACHINIC 0b00 // base machine commands #define CONSTANT 0b01 // 24Bit constant #define CALL 0b10 // Optimized call #define EXTENDED 0b11 // Extended constant definition #define MAX20BIT ((1u << 20u) - 1) #define PARTIAL_INSN (1u << 19u) #define FULL_INSN (0u << 19u) // REGISTERS #define R0 0b0001 #define R1 0b0010 #define R2 0b0011 #define R3 0b0100 #define R4 0b0101 #define R5 0b0110 #define R6 0b0111 #define R7 0b1000 #define R8 0b1001 #define R9 0b1010 #define R10 0b1011 #define R11 0b1100 #define R12 0b1101 #define R13 0b1110 // MACHINIC commands #define NOOP 0x0 #define PRINTLN 0x1 // Print string with 24bit length #define JMP 0x2 // Unconditional jump #define EJMP 0x3 // Conditional jump to 23bit global condition offset #define NEJMP 0x4 // same as EJMP, but jumps if condition is false #define IPRINT 0x5 // Print i32 constant(debugging) // Arithmetic commands #define ADD 0x0 // Extended #define CONST_CHUNK 0x0 // argument continuation followed by other commands #endif
using System.Collections; namespace Smith { public abstract class SmithBase : ISmith { private Hashtable _context; public void SetContext(Hashtable context) { _context = context; } protected void AddToContext(object original, object clone) { _context.Add(original, clone); } protected virtual object CloneProp(object original) { return Smith.Clone(original, _context); } protected virtual void CloneList(IList original, IList clone) { foreach (var item in original) { clone.Add(Smith.Clone(item, _context)); } } protected virtual void CloneArray(IList original, IList clone) { for (var i = 0; i < original.Count; i++) { clone[i] = Smith.Clone(original[i], _context); } } protected virtual void CloneDictionary(IDictionary original, IDictionary clone) { foreach (var key in original.Keys) { var value = original[key]; clone.Add(Smith.Clone(key, _context), Smith.Clone(value, _context)); } } public abstract object Clone(object original); } }
<? $MESS['F_ACTIVE'] = "Активність"; $MESS['PROPS_GROUP_DEFAULT_NAME'] = "Властивості замовлення"; $MESS['SPTEN_2FLIST'] = "Список типів платників"; $MESS['SPTEN_ACTIVE'] = "Активність"; $MESS['SPTEN_ADDING'] = "Додавання нового типу платника"; $MESS['SPTEN_APPLY'] = "Застосувати"; $MESS['SPTEN_CANCEL'] = "Скинути"; $MESS['SPTEN_CODE'] = "Код"; $MESS['SPTEN_DELETE_PERSON_TYPE'] = "Видалити тип"; $MESS['SPTEN_DELETE_PERSON_TYPE_CONFIRM'] = "Ви впевнені, що хочете видалити цей тип платника?"; $MESS['SPTEN_DOMAIN_P_TYPE'] = "Відповідність з фіз. та юр. Особою"; $MESS['SPTEN_DOMAIN_P_TYPE_E'] = "Юридична особа"; $MESS['SPTEN_DOMAIN_P_TYPE_I'] = "Фізична особа"; $MESS['SPTEN_DOMAIN_P_TYPE_NONE'] = "Не встановлено"; $MESS['SPTEN_ERROR'] = "Помилка збереження"; $MESS['SPTEN_ERROR_PERSON_TYPE_EXISTS'] = "Тип платника з даним кодом вже існує"; $MESS['SPTEN_ERROR_SAVING_PERSON_TYPE'] = "Помилка збереження типу платника"; $MESS['SPTEN_ERROR_SAVING_PROPS_GRP'] = "Помилка збереження групи властивостей"; $MESS['SPTEN_NAME'] = "Назва"; $MESS['SPTEN_NEW_PERSON_TYPE'] = "Створити новий тип"; $MESS['SPTEN_NO_PERMS2ADD'] = "У вас недостатньо прав для додавання нового типу платника"; $MESS['SPTEN_NO_PERSON_TYPE'] = "Тип платника ##ID# не знайдений"; $MESS['SPTEN_PERSON_TYPE_VALUE'] = "Величина знижки"; $MESS['SPTEN_PRICE'] = "Застосовується при сумі замовлення"; $MESS['SPTEN_PRICE_FROM'] = "від"; $MESS['SPTEN_PRICE_TO'] = "до"; $MESS['SPTEN_SAVE'] = "Зберегти"; $MESS['SPTEN_SAVE_ADD'] = "Додати"; $MESS['SPTEN_SITE'] = "Сайт"; $MESS['SPTEN_SORT'] = "Індекс сортування"; $MESS['SPTEN_TAB_PERSON_TYPE'] = "Тип платника"; $MESS['SPTEN_TAB_PERSON_TYPE_DESCR'] = "Параметри типу платника"; $MESS['SPTEN_TIMESTAMP'] = "Дата останнього змінення:"; $MESS['SPTEN_TO_LIST'] = "Список типів платників"; $MESS['SPTEN_UPDATING'] = "Змінення параметрів типу платника"; $MESS['SPTEN_XML_ID'] = "Зовнішній код"; ?>
# frozen_string_literal: true require 'rails_helper' RSpec.describe Jobs::CleanUpDeprecatedUrlSiteSettings do before do @original_provider = SiteSetting.provider SiteSetting.provider = SiteSettings::DbProvider.new(SiteSetting) end after do SiteSetting.delete_all SiteSetting.provider = @original_provider end it 'should clean up the old deprecated site settings correctly' do logo_upload = Fabricate(:upload) SiteSetting.logo = logo_upload SiteSetting.set("logo_url", '/test/some/url', warn: false) SiteSetting.set("logo_small_url", '/test/another/url', warn: false) expect do described_class.new.execute({}) end.to change { SiteSetting.logo_url }.from("/test/some/url").to("") expect(SiteSetting.exists?(name: "logo_url")).to eq(false) expect(SiteSetting.logo).to eq(logo_upload) expect(SiteSetting.logo_small_url).to eq('/test/another/url') end end
const assert = require('assert'); //const os = require("os"); const { execFile } = require('child_process'); describe("Command Line - Help", function() { // noglifyjs -h describe("noglifyjs -h", function() { it("is a string", function(done) { execFile('./bin/noglify-js', ['-h'], {}, callback); function callback(error, stdout, stderr) { done(); assert.equal(error, null); assert.equal(typeof stdout, 'string'); } }); it("contains Usage", function(done) { execFile('./bin/noglify-js', ['-h'], {}, callback); function callback(error, stdout, stderr) { done(); assert.equal(error, null); assert.notEqual(stdout.indexOf("Usage"), -1, "Substring is missing"); } }); it("contains Usage parameters", function(done) { execFile('./bin/noglify-js', ['-h'], {}, callback); function callback(error, stdout, stderr) { done(); assert.equal(error, null); assert.notEqual(stdout.indexOf("[files...|STDIN]"), -1, "Substring is missing"); } }); it("contains Usage parameters", function(done) { execFile('./bin/noglify-js', ['-h'], {}, callback); function callback(error, stdout, stderr) { done(); assert.equal(error, null); assert.notEqual(stdout.indexOf("Output file (default STDOUT)"), -1, "Substring is missing"); } }); it("contains Usage parameters", function(done) { execFile('./bin/noglify-js', ['-h'], {}, callback); function callback(error, stdout, stderr) { done(); assert.equal(error, null); assert.notEqual(stdout.indexOf("Add comments lines with file names"), -1, "Substring is missing"); } }); it("contains Usage parameters", function(done) { execFile('./bin/noglify-js', ['-h'], {}, callback); function callback(error, stdout, stderr) { done(); assert.equal(error, null); assert.notEqual(stdout.indexOf("Add extra new lines"), -1, "Substring is missing"); } }); }); // noglifyjs --help describe("noglifyjs --help", function() { it("is a string", function(done) { execFile('./bin/noglify-js', ['--help'], {}, callback); function callback(error, stdout, stderr) { done(); assert.equal(error, null); assert.equal(typeof stdout, 'string'); } }); it("contains Usage", function(done) { execFile('./bin/noglify-js', ['--help'], {}, callback); function callback(error, stdout, stderr) { done(); assert.equal(error, null); assert.notEqual(stdout.indexOf("Usage"), -1, "Substring is missing"); } }); it("contains Usage parameters", function(done) { execFile('./bin/noglify-js', ['--help'], {}, callback); function callback(error, stdout, stderr) { done(); assert.equal(error, null); assert.notEqual(stdout.indexOf("[files...|STDIN]"), -1, "Substring is missing"); } }); it("contains Usage parameters", function(done) { execFile('./bin/noglify-js', ['--help'], {}, callback); function callback(error, stdout, stderr) { done(); assert.equal(error, null); assert.notEqual(stdout.indexOf("Output file (default STDOUT)"), -1, "Substring is missing"); } }); it("contains Usage parameters", function(done) { execFile('./bin/noglify-js', ['--help'], {}, callback); function callback(error, stdout, stderr) { done(); assert.equal(error, null); assert.notEqual(stdout.indexOf("Add comments lines with file names"), -1, "Substring is missing"); } }); it("contains Usage parameters", function(done) { execFile('./bin/noglify-js', ['--help'], {}, callback); function callback(error, stdout, stderr) { done(); assert.equal(error, null); assert.notEqual(stdout.indexOf("Add extra new lines"), -1, "Substring is missing"); } }); }); });
## Modules <dl> <dt><a href="#module_cognito-login">cognito-login</a></dt> <dd><p>Module</p> </dd> <dt><a href="#module_cognito-login-factory">cognito-login-factory</a></dt> <dd><p>Factory module</p> </dd> </dl> <a name="module_cognito-login"></a> ## cognito-login Module * [cognito-login](#module_cognito-login) * [.package()](#module_cognito-login+package) * [.login(username, password)](#module_cognito-login+login) <a name="module_cognito-login+package"></a> ### cognito-login.package() Returns the package name **Kind**: instance method of [<code>cognito-login</code>](#module_cognito-login) <a name="module_cognito-login+login"></a> ### cognito-login.login(username, password) Login method. **Kind**: instance method of [<code>cognito-login</code>](#module_cognito-login) | Param | Type | Description | | --- | --- | --- | | username | <code>string</code> | Cognito user name | | password | <code>string</code> | Cognito user password | **Example** *(Usage Example)* ```js var factory = require("@mitchallen/cognito-login"); factory.create({ userPoolId: COGNITO_TEST_USER_POOL_ID, clientId: COGNITO_TEST_CLIENT_ID }) .then( obj => obj.login({ username: COGNITO_TEST_USER, password: COGNITO_TEST_PASSWORD }) ) .then( token => { // console.log(token); // user has successfully logged in // update state or redux store }) .catch( err => { console.error(err); // login failed }); ``` <a name="module_cognito-login-factory"></a> ## cognito-login-factory Factory module <a name="module_cognito-login-factory.create"></a> ### cognito-login-factory.create(userPool, userPoolId, clientId) ⇒ <code>Promise</code> Factory method. **Kind**: static method of [<code>cognito-login-factory</code>](#module_cognito-login-factory) **Returns**: <code>Promise</code> - that resolves to {module:cognito-login} | Param | Type | Description | | --- | --- | --- | | userPool | <code>Object</code> | Cognito user pool | | userPoolId | <code>string</code> | Cognito user pool id | | clientId | <code>string</code> | Cognito client id | **Example** *(Use existing pool)* ```js var factory = require("@mitchallen/cognito-login"); factory.create({ userPool: userPool }) .then( obj => obj.login({ ... }) ) .catch( err => { console.error(err); }); ``` **Example** *(Create pool from id&#x27;s example)* ```js var factory = require("@mitchallen/cognito-login"); factory.create({ userPoolId: COGNITO_TEST_USER_POOL_ID, clientId: COGNITO_TEST_CLIENT_ID }) .then( obj => obj.login({ ... }) ) .catch( err => { console.error(err); }); ```
import 'package:flutter/material.dart'; import 'package:flutter_bloc/flutter_bloc.dart'; import 'package:two_miners_monitor_oss/extension.dart'; import 'package:two_miners_monitor_oss/l10n/l10n.dart'; import 'package:two_miners_monitor_oss/miners/bloc/miners_bloc.dart'; import 'package:two_miners_monitor_oss/workers/bloc/workers_bloc.dart'; import 'package:two_miners_monitor_oss/workers/view/worker_tab.dart'; class WorkersPage extends StatelessWidget { const WorkersPage({Key? key}) : super(key: key); @override Widget build(BuildContext context) { return BlocProvider( create: (context) => WorkersBloc(), child: const WorkersView(), ); } } class WorkersView extends StatefulWidget { const WorkersView({Key? key}) : super(key: key); @override State<WorkersView> createState() => _WorkersViewState(); } class _WorkersViewState extends State<WorkersView> with TickerProviderStateMixin { TabController? _controller; @override void dispose() { _controller?.dispose(); super.dispose(); } @override Widget build(BuildContext context) { return BlocBuilder<MinersBloc, MinersState>( builder: (context, state) { final isEmpty = state.miners.isEmpty; _controller = TabController(length: state.miners.length, vsync: this); _controller?.addListener(() { context .read<WorkersBloc>() .add(WorkersEvent.tabTapped(tabIndex: _controller?.index ?? 0)); }); return CustomScrollView( slivers: [ SliverAppBar( automaticallyImplyLeading: false, actions: [ IconButton( icon: const Icon(Icons.refresh), onPressed: () { context .read<MinersBloc>() .add(const MinersEvent.loadMinersRequested()); }, ), ], title: Text(context.l10n.workersNavBarTitle), expandedHeight: kToolbarHeight, bottom: !isEmpty ? TabBar( controller: _controller, isScrollable: true, tabs: state.miners .map( (miner) => Tab( icon: SizedBox( width: 25, height: 25, child: miner.repository?.logo, ), text: miner.walletID.shortenAddress, ), ) .toList(), ) : null, floating: !isEmpty, pinned: isEmpty, ), if (isEmpty) const NoWorkersView() else const WorkersTab() ], ); }, ); } } class NoWorkersView extends StatelessWidget { const NoWorkersView({Key? key}) : super(key: key); @override Widget build(BuildContext context) { return SliverFillRemaining( child: Padding( padding: const EdgeInsets.symmetric(horizontal: 25), child: FittedBox( child: Text( context.l10n.workersNoWorkers, style: Theme.of(context) .textTheme .headlineMedium! .copyWith(color: Theme.of(context).colorScheme.onBackground), textAlign: TextAlign.center, ), ), ), ); } } class WorkersTab extends StatelessWidget { const WorkersTab({ Key? key, }) : super(key: key); @override Widget build(BuildContext context) { return BlocBuilder<MinersBloc, MinersState>( builder: (context, minersState) { return BlocBuilder<WorkersBloc, WorkersState>( builder: (context, workersState) { final miner = minersState.miners[workersState.tabIndex]; return WorkerTab( key: ValueKey('workers${miner.walletID}${miner.repositoryIndex}'), miner: miner, ); }, ); }, ); } }
package com.webank.wedatasphere.linkis.governance.common.protocol.job import org.apache.commons.lang.builder.{EqualsBuilder, HashCodeBuilder} import java.util import scala.beans.BeanProperty class JobRespProtocol { @BeanProperty var id: Long = _ @BeanProperty var status: Int = _ @BeanProperty var msg: String = _ @BeanProperty var data: util.Map[String, Object] = new util.HashMap[String, Object]() override def equals(o: Any): Boolean = { if (this == o) return true if (o == null || (getClass != o.getClass)) return false val that = o.asInstanceOf[JobRespProtocol] new EqualsBuilder() .append(status, that.status) .append(msg, that.msg) .append(data, that.data) .isEquals } override def hashCode(): Int = { new HashCodeBuilder(17, 37) .append(status) .append(msg) .append(data) .toHashCode() } override def toString: String = { "JobResponse{" + "status=" + status + ", msg='" + msg + "'" + ", data=" + data + "}" } }
"""Test the API by running a simple query against one of the public datasets """ import os import tempfile import pytest import bqtools # Query run against the NOAA GSOD database. Based on the one at: # https://cloud.google.com/bigquery/public-data/noaa-gsod query = """ SELECT max, (max-32)*5/9 celsius, mo, da, state, stn, name FROM ( SELECT max, mo, da, state, stn, name, ROW_NUMBER() OVER(PARTITION BY state ORDER BY max DESC) rn FROM [bigquery-public-data:noaa_gsod.gsod2015] a JOIN [bigquery-public-data:noaa_gsod.stations] b ON a.stn=b.usaf AND a.wban=b.wban WHERE state IS NOT NULL AND max<1000 AND country='US' ) WHERE rn=1 ORDER BY max DESC, name LIMIT 10; """ expected_query_result = """ max,celsius,mo,da,state,stn,name 129.9,54.388888888888886,06,29,AK,703605,PLATINUM AIRPORT 127.4,53,05,17,CO,740002,LA VETA PASS AWOS-3 ARPT 126.1,52.277777777777779,07,22,TX,720647,LAMPASAS AIRPORT 121.8,49.888888888888886,06,21,CA,999999,STOVEPIPE WELLS 1 SW 117,47.222222222222221,08,15,AZ,722780,PHOENIX SKY HARBOR INTL AIRPO 115,46.111111111111114,07,17,OK,723525,HOBART MUNICIPAL AIRPORT 113,45,06,27,NV,723860,MCCARRAN INTERNATIONAL AIRPOR 113,45,06,29,WA,727846,WALLA WALLA REGIONAL ARPT 111.2,44,06,26,UT,724754,ST GEORGE MUNICIPAL ARPT 111,43.888888888888886,06,28,OR,726883,HERMISTON MUNICIPAL ARPT """.strip() local_path = tempfile.gettempdir() + "/test_bqtools.csv" # Currently only testing the small result interface. Testing the larger # result interface requires also having a scratch BQ table path # XXX add that as a passed parameter and only run if set. (XXX -> issues) # XXX Many of the options still untested as well. def test_basic_async_query(proj_id, std_gcs_path): bigq = bqtools.BigQuery() query_job = bigq.async_query(proj_id, query, allow_large_results=False) bigq.poll_job(query_job) extract_job = bigq.async_extract_query(query_job, std_gcs_path, compression=None) bigq.poll_job(extract_job) _download_and_check(std_gcs_path) def test_basic_query_and_extract(proj_id, std_gcs_path): "Test the basic, small-result interface" bigq = bqtools.BigQuery() bigq.query_and_extract(proj_id, query, std_gcs_path, compression="NONE") _download_and_check(std_gcs_path) def test_basic_parallel_query_and_extract(proj_id, gcs_temp_dir): queries = [] for i in range(3): # We are creating three identical queries here, # typically, you'd use different queries gcs_path = gcs_temp_dir + "temp_bqtools_{}.csv".format(i) queries.append(dict( proj_id=proj_id, query=query, compression="NONE", path=gcs_path)) bigq = bqtools.BigQuery() for gcs_path in bigq.parallel_query_and_extract(queries): _download_and_check(gcs_path) def test_broken_query(proj_id, std_gcs_path): query = "SELECT COUNT(*) FRM_MISPELLED [bigquery-public-data:noaa_gsod.gsod2015]" bigq = bqtools.BigQuery() assert pytest.raises(RuntimeError, bigq.query_and_extract, proj_id, query, std_gcs_path, compression="NONE") def _download_and_check(gcs_path): bqtools.gs_mv(gcs_path, local_path) with open(local_path) as f: query_result = f.read().strip() assert query_result == expected_query_result, query_result
# Streaming This example assumes you have a configured Twitter Streaming `client`. Instructions on how to configure a client can be found in [examples/Configuration.md][cfg]. [cfg]: https://github.com/sferik/twitter/blob/master/examples/Configuration.md Here's a simple example of how to stream tweets from San Francisco: ```ruby client.filter(locations: "-122.75,36.8,-121.75,37.8") do |tweet| puts tweet.text end ```
//! The `graphql` module implements the GraphQL types, queries and mutations //! that are available to users. use crate::repository::Repository; /// The query object defines all queries that the schema supports. pub struct Query; juniper::graphql_object!(Query: Repository |&self| { field apiVersion() -> & str { "1.0" } }); /// The mutation object defines all mutations that the schema supports. pub struct Mutation; juniper::graphql_object!(Mutation: Repository | &self | {}); /// The GraphQL schema can be queries by users. pub type Schema = juniper::RootNode<'static, Query, Mutation>; /// Create a schema. /// /// This method initializes the schema with the default query and mutation /// objects. pub fn create_schema() -> Schema { Schema::new(Query, Mutation) } #[cfg(test)] mod tests { use crate::config::{Config, Environment}; use crate::graphql::create_schema; use crate::repository::Repository; use juniper::Variables; fn repo() -> Repository { let config = Config { env: Environment::Test, ..Default::default() }; Repository::with_test_transactions(&config.database_url().as_str()) } #[test] fn execute_schema() { let (result, _errors) = juniper::execute( "query { apiVersion }", None, &create_schema(), &Variables::new(), &repo(), ) .unwrap(); assert_eq!(result, graphql_value!({ "apiVersion" : "1.0" })); } }
How to install the developer edition of Explain As Explain is completely open-source it only uses external open-source software-packages to run. Install VS Code : https://code.visualstudio.com/download <br> Install Git : https://git-scm.com/downloads <br> Install NodeJs : https://nodejs.org/en/download/ <br> Sign up for GitHub : https://github.com <br> Go to the command prompt (Windows) or Terminal (Linux/OSX).<br><br> Register your git username and email to Git with commands: <br> <i>git config --global user.name “your username”</i> <br> <i>git config –global user.email “your email”</i> <br> <br> Install the Yarn package manager with command: <i>npm install -g yarn</i> <br> <br> Install the Quasar framework with command: <i>npm install -g @quasar/cli</i><br> <br> Go to the command prompt (Windows) or Terminal(Linux/OSX) if not already open.<br> Make a directory where you want to put the explain application.<br> <i>mkdir projects</i><br> Change directory to the newly created folder with command.<br> <i>cd projects</i><br> Clone the GitHub Explain project with command<br> <i>git clone https://github.com/Dobutamine/explain_dev.git</i><br> Navigate into the explain directory with command<br> <i>cd explain_dev</i><br> Update the dependencies with command<br> <i>yarn</i><br> <br> WINDOWS USERS ONLY!<br> To allow the scripts to run, open the Powershell with administrator rights.<br> You can find Powershell in your start menu. Right click Powershell en select Run as administrator. <br> In the Powershell window type:<br> <i>Set-ExecutionPolicy unrestricted</i><br> <br> FIRST TIME RUN<br> Open VS Code from the desktop or Start menu.<br> - Goto File -> Open<br> - Navigate to the explain directory and select Open.<br> - If asked by VS Code in right lower corner -> Install recommended extensions -> yes!<br> - First click on the the master branch in the lower left corner of VS Code.<br> - Create a new branch for yourself to work in (top middle in VS Code). You can choose any name.<br> <br> RUNNING EXPLAIN<br> - In VS Code with the Explain project loaded (see step above) Go to Terminal in the menu bar and choose New Terminal<br> In the terminal window below enter the command<br> <i>quasar dev</i><br> Go to any browser (prefereably Chrome) and type in the addressbar<br> <i>localhost:8080</i> Have fun!
#!/usr/bin/env bash echo "Backing up existing genesis file..." cp "$HOME"/.ixod/config/genesis.json "$HOME"/.ixod/config/genesis.json.backup echo "Copying new genesis file to $HOME/.ixod/config/genesis.json..." cp genesis.json "$HOME"/.ixod/config/genesis.json ixod unsafe-reset-all ixod validate-genesis # Enable REST API (assumed to be at line 104 of app.toml) FROM="enable = false" TO="enable = true" sed -i "104s/$FROM/$TO/" "$HOME"/.ixod/config/app.toml # Enable Swagger docs (assumed to be at line 107 of app.toml) FROM="swagger = false" TO="swagger = true" sed -i "107s/$FROM/$TO/" "$HOME"/.ixod/config/app.toml # Uncomment the below to broadcast node RPC endpoint #FROM="laddr = \"tcp:\/\/127.0.0.1:26657\"" #TO="laddr = \"tcp:\/\/0.0.0.0:26657\"" #sed -i "s/$FROM/$TO/" "$HOME"/.ixod/config/config.toml # Uncomment the below to set timeouts to 1s for shorter block times #sed -i 's/timeout_commit = "5s"/timeout_commit = "1s"/g' "$HOME"/.ixod/config/config.toml #sed -i 's/timeout_propose = "3s"/timeout_propose = "1s"/g' "$HOME"/.ixod/config/config.toml ixod start --pruning "nothing"
require File.expand_path(File.dirname(__FILE__) + '/../utilities') Capistrano::Configuration.instance(true).load do set :delayed_script_path, "#{current_path}/script/delayed_job" set :delayed_job_env, 'production' set :delayed_job_role, :app set :base_ruby_path, '/usr' # TODO: I think the with_role pattern is broken, if you override delayed_job_role in your deploy.rb it's too late. # the task has been associated with :app at recipe load time. Thats probably why with_role was created so that the # role could be re-evaluated at execution time making setting the role at load time meaningless. A better pattern # that I've started elsewhere is to create an empty role and associate servers with the role which CAN happen at # load time. See: riak as an example. -- donnoman namespace :delayed_job do desc "Start delayed_job process" task :start, :roles => delayed_job_role do utilities.with_role(delayed_job_role) do try_sudo "RAILS_ENV=#{delayed_job_env} #{base_ruby_path}/bin/ruby #{delayed_script_path} start" end end desc "Stop delayed_job process" task :stop, :roles => delayed_job_role do utilities.with_role(delayed_job_role) do try_sudo "RAILS_ENV=#{delayed_job_env} #{base_ruby_path}/bin/ruby #{delayed_script_path} stop" end end desc "Restart delayed_job process" task :restart, :roles => delayed_job_role do utilities.with_role(delayed_job_role) do delayed_job.stop sleep(4) try_sudo "killall -s TERM delayed_job; true" delayed_job.start end end end end
using Root.Coding.Code.Enums.E01D.Json; namespace Root.Coding.Code.Models.E01D.Json.Conversion { class StringEnumConverter : JsonConverter { public override JsonConverterKind Kind => JsonConverterKind.StringEnum; } }
class Tables(): def __init__(self, newLength, newWidth, newHeight): self.length = newLength self.width = newWidth self.height = newHeight def getSize(self): print(f'Размеры данного стола: \nДлина - {self.length} м \nШирина - {self.width} м \nВысота - {self.height} м') class KitchenTables(Tables): def getNumberOfSeats(self): return int((self.length * self.width) // (0.6 * 0.35)) class Desk(Tables): def getWorkingArea(self): return self.length * self.width class ComputerDesk(Desk): def __init__(self, newLength, newWidth, newHeight, newDevices): self.devices = newDevices super().__init__(newLength, newWidth, newHeight) def getWorkingArea(self): working_area = (self.length * self.width) - self.devices return working_area if working_area > 0 else 'Недостаточно рабочего места' table1 = KitchenTables(2, 1, 0.75) print('Обеденный стол') print('**********') table1.getSize() print('Количество посадочных мест - ', table1.getNumberOfSeats()) print('**********') print() table2 = Desk(1.2, 0.8, 0.75) print('Письменный стол') print('**********') table2.getSize() print('Площадь рабочей зоны - ', table2.getWorkingArea(), 'м^2') print('**********') print() table3 = ComputerDesk(2, 1, 0.75, 0.45) print('Компьютерный стол') print('**********') table3.getSize() print('Площадь рабочей зоны - ', table3.getWorkingArea(), ' м^2') print('**********')
import React from 'react'; import * as layout from '../three-columns'; import { layoutWrapper } from '../layout-wrapper'; import LinksPanel from '../containers/links-panel-container'; const leaderBoardTableStyle = { width: "85%", margin: "10px auto", }; const topTenStyle = { fontSize: "1.3em", fontWeight: "bold", }; const LeaderBoard = ({ users }) => { return ( <div style={layoutWrapper}> <div style={layout.rightColumn}> <LinksPanel /> </div> <div style={layout.centerColumn}> <h1 className="headerBar">Leaderboard</h1> <div className="borderedContainer"> <h2>Fastest typists</h2> <table className="borderedContainer" style={leaderBoardTableStyle}> <thead> <tr> <th>Ranking</th> <th>Name</th> <th>WPM</th> </tr> </thead> <tbody> {users.map((u, i) => { const rowStyle = i < 10 ? topTenStyle : {}; return ( <tr style={rowStyle} key={i}> <td>{i + 1}</td> <td>{u.name}</td> <td>{u.avgWpm.toFixed(1)}</td> </tr>); })} </tbody> </table> </div> </div> </div>); }; export default LeaderBoard;
--- layout: watch title: TLP9 - 25/03/2020 - M20200325_051556_TLP_9T.jpg date: 2020-03-25 05:15:56 permalink: /2020/03/25/watch/M20200325_051556_TLP_9 capture: TLP9/2020/202003/20200324/M20200325_051556_TLP_9T.jpg ---
<?php /* |-------------------------------------------------------------------------- | Web Routes |-------------------------------------------------------------------------- | | Here is where you can register web routes for your application. These | routes are loaded by the RouteServiceProvider within a group which | contains the "web" middleware group. Now create something great! | */ // Dashboard Route::get('/')->name('dashboard')->uses('DashboardController')->middleware('auth'); // Auth Route::get('login')->name('login')->uses('Auth\LoginController@showLoginForm')->middleware('guest'); Route::get('register')->name('register')->uses('Auth\LoginController@showRegisterForm')->middleware('guest'); Route::post('login')->name('login.attempt')->uses('Auth\LoginController@login')->middleware('guest'); Route::post('register')->name('register.attempt')->uses('Auth\RegisterController@register')->middleware('guest'); Route::post('logout')->name('logout')->uses('Auth\LoginController@logout'); // Users Route::get('users')->name('users')->uses('UsersController@index')->middleware('remember', 'auth'); Route::get('users/create')->name('users.create')->uses('UsersController@create')->middleware('auth'); Route::post('users')->name('users.store')->uses('UsersController@store')->middleware('auth'); Route::get('users/{user}/edit')->name('users.edit')->uses('UsersController@edit')->middleware('auth'); Route::put('users/{user}')->name('users.update')->uses('UsersController@update')->middleware('auth'); Route::delete('users/{user}')->name('users.destroy')->uses('UsersController@destroy')->middleware('auth'); Route::put('users/{user}/restore')->name('users.restore')->uses('UsersController@restore')->middleware('auth'); // Images Route::get('/img/{path}', 'ImagesController@show')->where('path', '.*'); // Tasks Route::post('tasks.add', 'TaskController@addTask')->name('tasks.add'); Route::post('tasks.delete', 'TaskController@deleteTask')->name('tasks.delete'); Route::post('tasks.edit', 'TaskController@editTask')->name('tasks.edit');
import 'package:books_app/models/book.dart'; import 'package:books_app/services/utils.dart'; import 'package:flutter/material.dart'; class ActionsWidget extends StatelessWidget { final Book book; ActionsWidget(this.book); @override Widget build(BuildContext context) { String availability; IconData icon; if (book.accessViewStatus == 'SAMPLE') { availability = 'SAMPLE'; icon = Icons.description; } else if (book.accessViewStatus == 'FULL_PUBLIC_DOMAIN') { availability = 'READ'; icon = Icons.book; } else { availability = 'PAID'; icon = Icons.attach_money; } return Row( mainAxisAlignment: MainAxisAlignment.spaceEvenly, children: <Widget>[ ActionButton( icon: Icons.chrome_reader_mode, label: 'PREVIEW', onPressed: () async { await Utils.launchURL(book.previewLink); }, ), SizedBox(width: 5.0), ActionButton( label: book.saleability != 'FOR_SALE' ? 'N/A' : '${book.amount} ${book.currencyCode}', onPressed: book.saleability != 'FOR_SALE' ? null : () async { await Utils.launchURL(book.buyLink); }, icon: Icons.shop, ), SizedBox(width: 5.0), ActionButton( label: availability, icon: icon, onPressed: book.accessViewStatus != 'FULL_PUBLIC_DOMAIN' && book.accessViewStatus != 'SAMPLE' ? null : () async { await Utils.launchURL(book.webReaderLink); }, ), ], ); } } class ActionButton extends StatelessWidget { final IconData icon; final String label; final VoidCallback onPressed; ActionButton( {@required this.icon, @required this.label, @required this.onPressed}); @override Widget build(BuildContext context) { return OutlineButton.icon( label: Text( label, style: TextStyle(fontSize: 10.0), ), onPressed: onPressed, icon: Icon( icon, size: 16.0, ), ); } }
<?php namespace App\Models; use Illuminate\Database\Eloquent\Model; use Cviebrock\EloquentSluggable\Sluggable; class UserPermission extends Model { use Sluggable; public $timestamps = false; protected $guarded = []; protected $casts = [ 'user_permissions_group_id' => 'integer', ]; public function sluggable(){ return [ 'slug' => [ 'source' => 'name' ] ]; } // relation: users with current permission enabled public function users(){ return $this->belongsToMany('App\Models\User', 'user_has_permissions'); } // relation: group that owns this permission public function group(){ return $this->belongsTo('App\Models\UserPermissionsGroup'); } // non relational methods public function _data(){ $data = $this; return $data; } public function _deleteAllowed(){ $denials = []; // only admin can delete return $denials; } public function _afterDelete(){} }
/**************************************************************************** * * MODULE: lin.cpp * PURPOSE: nifs for liblinear * * for abbreviated names: * . m is the number of training examples * . n is the number of features * . k is the number of classes * . x is a feature matrix/vector/value * . y is a class vector/value * * The basic functionality of liblinear is extended to include predicting * calibrated probabilites for SVM models, using Platt scaling. Binary Platt * scaling is extended to OVR multiclass using simple normalization. * * see https://github.com/cjlin1/liblinear for details * ***************************************************************************/ /*-------------------[ Pre Include Defines ]-------------------*/ /*-------------------[ Library Include Files ]-------------------*/ #include <math.h> /*-------------------[ Project Include Files ]-------------------*/ #include "deps/liblinear/linear.h" #include "penelope.hpp" /*-------------------[ Macros/Constants/Types ]-------------------*/ // extend the linear model structure to include an optional calibration model typedef struct tag_model : model { double* prob_a; double* prob_b; } LINEAR_MODEL; typedef struct problem LINEAR_PROBLEM; typedef struct feature_node LINEAR_NODE; typedef struct parameter LINEAR_PARAM; /*-------------------[ Global Variables ]-------------------*/ /*-------------------[ Global Prototypes ]-------------------*/ /*-------------------[ Module Variables ]-------------------*/ static ErlNifResourceType* g_model_type = NULL; /*-------------------[ Module Prototypes ]-------------------*/ static bool erl2lin_must_calibrate ( ErlNifEnv* env, ERL_NIF_TERM options, LINEAR_PARAM& params); static void erl2lin_problem ( ErlNifEnv* env, ERL_NIF_TERM x, ERL_NIF_TERM y, ERL_NIF_TERM params, LINEAR_PROBLEM* problem); static LINEAR_MODEL* erl2lin_model ( ErlNifEnv* env, ERL_NIF_TERM params); static void erl2lin_params ( ErlNifEnv* env, ERL_NIF_TERM options, LINEAR_PARAM* params, int training); static LINEAR_NODE** erl2lin_features ( ErlNifEnv* env, ERL_NIF_TERM x, unsigned m, double bias); static LINEAR_NODE* erl2lin_feature ( ErlNifEnv* env, ERL_NIF_TERM x, double bias); static double* erl2lin_targets ( ErlNifEnv* env, ERL_NIF_TERM y, unsigned m); static void erl2lin_free_problem ( LINEAR_PROBLEM* problem); static void erl2lin_free_params ( LINEAR_PARAM* params); static void erl2lin_free_model ( LINEAR_MODEL* model); static ERL_NIF_TERM lin2erl_model ( ErlNifEnv* env, LINEAR_MODEL* model); static LINEAR_MODEL* lin2lin_model ( model* source, double* prob_a, double* prob_b); static void nif_destruct_model ( ErlNifEnv* env, void* object); static void lin_print ( const char* message); static void lin_calibrate_train ( int m, double* decision, double* labels, double& prob_a, double& prob_b); static double lin_calibrate_predict ( double decision, double prob_a, double prob_b); /*-------------------[ Implementation ]-------------------*/ /*-----------< FUNCTION: nif_lin_init >-------------------------------------- // Purpose: linear module initialization // Parameters: env - erlang environment // Returns: 1 if successful // 0 otherwise ---------------------------------------------------------------------------*/ int nif_lin_init (ErlNifEnv* env) { // register the model resource type, // which holds trained linear model instances ErlNifResourceFlags flags = ERL_NIF_RT_CREATE; g_model_type = enif_open_resource_type( env, NULL, "lin_model", &nif_destruct_model, flags, &flags); if (!g_model_type) return 0; // suppress liblinear debug output set_print_string_function(&lin_print); return 1; } /*-----------< FUNCTION: nif_lin_train >------------------------------------- // Purpose: trains a linear model // Parameters: x - list of feature vectors (floats) // y - list of target labels (integer) // params - map of linear parameters // Returns: reference to a trained linear model resource ---------------------------------------------------------------------------*/ ERL_NIF_TERM nif_lin_train ( ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]) { // validate parameters if (!enif_is_list(env, argv[0])) return enif_make_badarg(env); if (!enif_is_list(env, argv[1])) return enif_make_badarg(env); if (!enif_is_map(env, argv[2])) return enif_make_badarg(env); // train the linear model LINEAR_PROBLEM problem; memset(&problem, 0, sizeof(LINEAR_PROBLEM)); LINEAR_PARAM params; memset(&params, 0, sizeof(LINEAR_PARAM)); model* linear = NULL; double* prob_a = NULL; double* prob_b = NULL; double* prob_d = NULL; double* prob_l = NULL; LINEAR_MODEL** resource = NULL; ERL_NIF_TERM result; try { // extract training parameters and feature/target vectors erl2lin_problem(env, argv[0], argv[1], argv[2], &problem); erl2lin_params(env, argv[2], &params, 1); const char* errors = check_parameter(&problem, &params); if (errors) throw NifError(errors); // train the prediction model linear = CHECKALLOC(train(&problem, &params)); // train the calibration model if (erl2lin_must_calibrate(env, argv[2], params)) { int model_count = linear->nr_class == 2 ? 1 : linear->nr_class; prob_a = nif_alloc<double>(model_count); prob_b = nif_alloc<double>(model_count); prob_d = nif_alloc<double>(problem.l); prob_l = nif_alloc<double>(problem.l); for (int i = 0; i < model_count; i++) { // train a calibration logistic regression model per class for (int j = 0; j < problem.l; j++) { double decision[model_count]; double label = predict_values(linear, problem.x[j], decision); prob_d[j] = decision[i]; prob_l[j] = label == linear->label[i] ? 1 : -1; } lin_calibrate_train( problem.l, prob_d, prob_l, prob_a[i], prob_b[i]); } } // create an erlang resource to wrap the model CHECKALLOC(resource = (LINEAR_MODEL**)enif_alloc_resource( g_model_type, sizeof(LINEAR_MODEL*))); CHECKALLOC(*resource = lin2lin_model(linear, prob_a, prob_b)); prob_a = NULL; prob_b = NULL; result = enif_make_resource(env, resource); // relinquish the resource to erlang enif_release_resource(resource); } catch (NifError& e) { if (resource && *resource) erl2lin_free_model((LINEAR_MODEL*)*resource); result = e.to_term(env); } // free the model using the liblinear allocator if (linear != NULL) free_and_destroy_model(&linear); if (prob_a != NULL) { nif_free(prob_a); nif_free(prob_b); } nif_free(prob_d); nif_free(prob_l); // release the training parameters erl2lin_free_problem(&problem); erl2lin_free_params(&params); return result; } /*-----------< FUNCTION: nif_lin_export >------------------------------------ // Purpose: extracts model parameters from a linear model resource, // which is useful for persisting a model externally // Parameters: model - erlang resource wrapping the trained model // Returns: a map containing the model parameters ---------------------------------------------------------------------------*/ ERL_NIF_TERM nif_lin_export ( ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]) { // validate parameters if (!enif_is_ref(env, argv[0])) return enif_make_badarg(env); // extract the model resource LINEAR_MODEL** resource = NULL; if (!enif_get_resource(env, argv[0], g_model_type, (void**)&resource)) return enif_make_badarg(env); // convert the resource to a map try { return lin2erl_model(env, *resource); } catch (NifError& e) { return e.to_term(env); } } /*-----------< FUNCTION: nif_lin_compile >----------------------------------- // Purpose: converts the map representation of a model to the // native linear model structure // Parameters: model - map containing model parameters // Returns: reference to a trained linear model resource ---------------------------------------------------------------------------*/ ERL_NIF_TERM nif_lin_compile ( ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]) { // validate parameters if (!enif_is_map(env, argv[0])) return enif_make_badarg(env); // compile the model LINEAR_MODEL* model = NULL; try { model = erl2lin_model(env, argv[0]); // create an erlang resource to wrap the model LINEAR_MODEL** resource = (LINEAR_MODEL**)enif_alloc_resource( g_model_type, sizeof(LINEAR_MODEL*)); CHECKALLOC(resource); *resource = model; ERL_NIF_TERM result = enif_make_resource(env, resource); // relinquish the resource to erlang enif_release_resource(resource); return result; } catch (NifError& e) { if (model) erl2lin_free_model(model); return e.to_term(env); } } /*-----------< FUNCTION: nif_lin_predict_class >----------------------------- // Purpose: predicts a single target class from a feature vector // Parameters: model - reference to the trained linear model // x - feature vector to predict // Returns: predicted integer class ---------------------------------------------------------------------------*/ ERL_NIF_TERM nif_lin_predict_class ( ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]) { ERL_NIF_TERM result; LINEAR_NODE* features = NULL; LINEAR_MODEL** resource = NULL; // validate parameters if (!enif_get_resource(env, argv[0], g_model_type, (void**)&resource)) return enif_make_badarg(env); LINEAR_MODEL* model = *resource; if (!enif_is_binary(env, argv[1])) return enif_make_badarg(env); try { // extract the feature vector features = erl2lin_feature(env, argv[1], model->bias); // predict the target class double cls = predict(model, features); result = enif_make_int(env, (int)cls); } catch (NifError& e) { result = e.to_term(env); } nif_free(features); return result; } /*-----------< FUNCTION: nif_lin_predict_probability >----------------------- // Purpose: predicts class probabilities from a feature vector // Parameters: model - reference to the trained linear model // x - feature vector to predict // Returns: list of probabilities (double) for each class, in the order // that the classes appear in the model ---------------------------------------------------------------------------*/ ERL_NIF_TERM nif_lin_predict_probability ( ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]) { ERL_NIF_TERM result; LINEAR_NODE* features = NULL; LINEAR_MODEL** resource = NULL; // validate parameters if (!enif_get_resource(env, argv[0], g_model_type, (void**)&resource)) return enif_make_badarg(env); LINEAR_MODEL* model = *resource; if (!enif_is_binary(env, argv[1])) return enif_make_badarg(env); try { CHECK(check_probability_model(model) || model->prob_a, "probability_not_trained"); // extract the feature vector features = erl2lin_feature(env, argv[1], model->bias); // predict the class probabilities directly if supported double prob[model->nr_class]; if (check_probability_model(model)) predict_probability(model, features, prob); else { // otherwise, compute calibrated probabilities int model_count = model->nr_class == 2 ? 1 : model->nr_class; double decision[model_count]; predict_values(model, features, decision); for (int i = 0; i < model_count; i++) prob[i] = lin_calibrate_predict( decision[i], model->prob_a[i], model->prob_b[i]); // normalize the calibrated probabilities if (model_count == 1) prob[1] = 1 - prob[0]; else { double sum = 0; for (int i = 0; i < model->nr_class; i++) sum += prob[i]; for (int i = 0; i < model->nr_class; i++) prob[i] = prob[i] / sum; } } // return the list of probabilities ERL_NIF_TERM results[model->nr_class]; for (int i = 0; i < model->nr_class; i++) results[i] = enif_make_tuple2(env, enif_make_int(env, model->label[i]), enif_make_double(env, prob[i])); result = enif_make_list_from_array(env, results, model->nr_class); } catch (NifError& e) { result = e.to_term(env); } nif_free(features); return result; } /*-----------< FUNCTION: nif_destruct_model >-------------------------------- // Purpose: frees the memory associated with a linear model resource // Parameters: env - current erlang environment // object - model resource reference to free // Returns: none ---------------------------------------------------------------------------*/ void nif_destruct_model (ErlNifEnv* env, void* object) { erl2lin_free_model(*(LINEAR_MODEL**)object); } /*-----------< FUNCTION: erl2lin_must_calibrate >---------------------------- // Purpose: determines whether a calibration model should be built // Parameters: env - current erlang environment // options - training parameters // params - linear model parameters // Returns: true if a calibration model is required // false otherwise ---------------------------------------------------------------------------*/ bool erl2lin_must_calibrate ( ErlNifEnv* env, ERL_NIF_TERM options, LINEAR_PARAM& params) { ERL_NIF_TERM key; ERL_NIF_TERM value; switch (params.solver_type) { case L2R_L2LOSS_SVC_DUAL: case L2R_L2LOSS_SVC: case L2R_L1LOSS_SVC_DUAL: case MCSVM_CS: case L1R_L2LOSS_SVC: key = enif_make_atom(env, "probability?"); CHECK(enif_get_map_value(env, options, key, &value), "missing_prob"); CHECK(enif_is_atom(env, value), "invalid_prob"); return enif_is_identical(value, enif_make_atom(env, "true")) ? true : false; default: return false; } } /*-----------< FUNCTION: erl2lin_problem >----------------------------------- // Purpose: constructs a linear problem from feature/target vectors // Parameters: env - current erlang environment // x - training feature vector list // y - list of target class labels // params - additional problem parameters // problem - return the linear problem via here // Returns: pointer to problem ---------------------------------------------------------------------------*/ void erl2lin_problem ( ErlNifEnv* env, ERL_NIF_TERM x, ERL_NIF_TERM y, ERL_NIF_TERM params, LINEAR_PROBLEM* problem) { ERL_NIF_TERM key; ERL_NIF_TERM value; ERL_NIF_TERM head; ERL_NIF_TERM tail; ErlNifBinary vector; // get bias value key = enif_make_atom(env, "bias"); CHECK(enif_get_map_value(env, params, key, &value), "missing_bias"); CHECK(enif_get_double(env, value, &problem->bias), "invalid_bias"); // get sample matrix size unsigned m; CHECK(enif_get_list_length(env, x, &m), "invalid_x"); CHECK(enif_get_list_cell(env, x, &head, &tail), "missing_features"); problem->l = m; // get feature vector size CHECK(enif_inspect_binary(env, head, &vector), "invalid_features"); int n = vector.size / sizeof(float); problem->n = problem->bias < 0 ? n : n + 1; // copy feature/target values problem->x = erl2lin_features(env, x, m, problem->bias); problem->y = erl2lin_targets(env, y, m); } /*-----------< FUNCTION: erl2lin_model >------------------------------------- // Purpose: constructs a linear model structure from a map representation // Parameters: env - current erlang environment // params - model parameters (constructed via nif_lin_export) // Returns: pointer to the allocated and constructed model ---------------------------------------------------------------------------*/ LINEAR_MODEL* erl2lin_model (ErlNifEnv* env, ERL_NIF_TERM params) { LINEAR_MODEL* model = nif_alloc<LINEAR_MODEL>(); try { ERL_NIF_TERM key; ERL_NIF_TERM value; ERL_NIF_TERM tail; ErlNifBinary vector; // extract model parameters erl2lin_params(env, params, &model->param, 0); // extract model version int version = 0; key = enif_make_atom(env, "version"); CHECK(enif_get_map_value(env, params, key, &value), "missing_version"); CHECK(enif_get_int(env, value, &version), "invalid_version"); CHECK(version == 1, "invalid_version"); // extract class count unsigned class_count = 0; key = enif_make_atom(env, "classes"); CHECK(enif_get_map_value(env, params, key, &tail), "missing_classes"); CHECK(enif_get_list_length(env, tail, &class_count), "invalid_classes"); model->nr_class = class_count; // extract class list model->label = nif_alloc<int>(model->nr_class); for (int i = 0; i < model->nr_class; i++) { CHECK(enif_get_list_cell(env, tail, &value, &tail), "missing_class"); CHECK(enif_get_int(env, value, &model->label[i]), "invalid_class"); } // extract bias model->bias = -1; key = enif_make_atom(env, "intercept"); if (enif_get_map_value(env, params, key, &value)) if (enif_is_binary(env, value)) model->bias = 1; // extract feature count key = enif_make_atom(env, "coef"); CHECK(enif_get_map_value(env, params, key, &tail), "missing_coef"); CHECK(enif_get_list_cell(env, tail, &value, &tail), "missing_coef"); CHECK(enif_inspect_binary(env, value, &vector), "invalid_coef"); model->nr_feature = vector.size / sizeof(float); // extract coefficients int model_count = model->nr_class == 2 ? 1 : model->nr_class; int weight_count = model->bias >= 0 ? model->nr_feature + 1 : model->nr_feature; model->w = nif_alloc<double>(model_count * weight_count); for (int i = 0; i < model_count; i++) { for (int j = 0; j < model->nr_feature; j++) model->w[j * model_count + i] = ((float*)vector.data)[j]; if (i < model_count - 1) { CHECK(enif_get_list_cell(env, tail, &value, &tail), "missing_coef"); CHECK(enif_inspect_binary(env, value, &vector), "invalid_coef"); } } // extract intercepts if (model->bias >= 0) { key = enif_make_atom(env, "intercept"); CHECK(enif_get_map_value(env, params, key, &value), "missing_intercept"); CHECK(enif_inspect_binary(env, value, &vector), "invalid_intercept"); for (int i = 0; i < model_count; i++) model->w[model->nr_feature * model_count + i] = ((float*)vector.data)[i]; } // extract prob_a key = enif_make_atom(env, "prob_a"); CHECK(enif_get_map_value(env, params, key, &value), "missing_prob_a"); if (!enif_is_identical(value, enif_make_atom(env, "nil"))) { CHECK(enif_inspect_binary(env, value, &vector), "invalid_prob_a"); model->prob_a = nif_alloc<double>(vector.size / sizeof(float)); for (int i = 0; i < (int)(vector.size / sizeof(float)); i++) model->prob_a[i] = ((float*)vector.data)[i]; } // extract prob_b key = enif_make_atom(env, "prob_b"); CHECK(enif_get_map_value(env, params, key, &value), "missing_prob_b"); if (!enif_is_identical(value, enif_make_atom(env, "nil"))) { CHECK(enif_inspect_binary(env, value, &vector), "invalid_prob_b"); model->prob_b = nif_alloc<double>(vector.size / sizeof(float)); for (int i = 0; i < (int)(vector.size / sizeof(float)); i++) model->prob_b[i] = ((float*)vector.data)[i]; } return model; } catch (NifError& e) { erl2lin_free_model(model); throw; } } /*-----------< FUNCTION: erl2lin_params >------------------------------------ // Purpose: constructs a linear param structure from an options map // Parameters: env - current erlang environment // options - linear model options map // params - returns linear parameters via here // training - true if the model is being trained // false if we are loading it from a trained model // Returns: pointer to params ---------------------------------------------------------------------------*/ void erl2lin_params ( ErlNifEnv* env, ERL_NIF_TERM options, LINEAR_PARAM* params, int training) { ERL_NIF_TERM key; ERL_NIF_TERM value; // decode solver type key = enif_make_atom(env, "solver"); CHECK(enif_get_map_value(env, options, key, &value), "missing_solver"); if (enif_is_identical(value, enif_make_atom(env, "l2r_lr"))) params->solver_type = L2R_LR; else if (enif_is_identical(value, enif_make_atom(env, "l2r_l2loss_svc_dual"))) params->solver_type = L2R_L2LOSS_SVC_DUAL; else if (enif_is_identical(value, enif_make_atom(env, "l2r_l2loss_svc"))) params->solver_type = L2R_L2LOSS_SVC; else if (enif_is_identical(value, enif_make_atom(env, "l2r_l1loss_svc_dual"))) params->solver_type = L2R_L1LOSS_SVC_DUAL; else if (enif_is_identical(value, enif_make_atom(env, "mcsvm_cs"))) params->solver_type = MCSVM_CS; else if (enif_is_identical(value, enif_make_atom(env, "l1r_l2loss_svc"))) params->solver_type = L1R_L2LOSS_SVC; else if (enif_is_identical(value, enif_make_atom(env, "l1r_lr"))) params->solver_type = L1R_LR; else if (enif_is_identical(value, enif_make_atom(env, "l2r_lr_dual"))) params->solver_type = L2R_LR_DUAL; else if (enif_is_identical(value, enif_make_atom(env, "l2r_l2loss_svr"))) params->solver_type = L2R_L2LOSS_SVR; else if (enif_is_identical(value, enif_make_atom(env, "l2r_l2loss_svr_dual"))) params->solver_type = L2R_L2LOSS_SVR_DUAL; else if (enif_is_identical(value, enif_make_atom(env, "l2r_l1loss_svr_dual"))) params->solver_type = L2R_L1LOSS_SVR_DUAL; else throw NifError("invalid_solver"); // decode training parameters if (training) { // decode cost parameter key = enif_make_atom(env, "c"); CHECK(enif_get_map_value(env, options, key, &value), "missing_c"); CHECK(enif_get_double(env, value, &params->C), "invalid_c"); // decode class weights size_t weight_count; key = enif_make_atom(env, "weights"); CHECK(enif_get_map_value(env, options, key, &value), "missing_weights"); CHECK(enif_get_map_size(env, value, &weight_count), "invalid_weights"); if (weight_count > 0) { params->nr_weight = weight_count; params->weight_label = nif_alloc<int>(weight_count); params->weight = nif_alloc<double>(weight_count); ErlNifMapIterator iter; CHECKALLOC(enif_map_iterator_create( env, value, &iter, ERL_NIF_MAP_ITERATOR_FIRST)); try { for (int i = 0; i < (int)weight_count; i++) { CHECK(enif_map_iterator_get_pair( env, &iter, &key, &value), "invalid_weight"); CHECK(enif_get_int( env, key, &params->weight_label[i]), "invalid_weight"); CHECK(enif_get_double( env, value, &params->weight[i]), "invalid_weight"); enif_map_iterator_next(env, &iter); } enif_map_iterator_destroy(env, &iter); } catch (...) { enif_map_iterator_destroy(env, &iter); throw; } } // decode stopping criteria key = enif_make_atom(env, "epsilon"); CHECK(enif_get_map_value(env, options, key, &value), "missing_epsilon"); CHECK(enif_get_double(env, value, &params->eps)); // decode SVR sensitivity key = enif_make_atom(env, "p"); CHECK(enif_get_map_value(env, options, key, &value), "missing_p"); CHECK(enif_get_double(env, value, &params->p)); } } /*-----------< FUNCTION: erl2lin_features >---------------------------------- // Purpose: converts a list of feature vectors to a linear sparse matrix // Parameters: env - current erlang environment // x - list of feature vectors (floats) // m - number of vectors in the feature matrix // bias - feature vector bias term // Returns: pointer to an array of sparse feature vectors ---------------------------------------------------------------------------*/ LINEAR_NODE** erl2lin_features ( ErlNifEnv* env, ERL_NIF_TERM x, unsigned m, double bias) { LINEAR_NODE** nodes = nif_alloc<LINEAR_NODE*>(m); try { for (int i = 0; i < (int)m; i++) { ERL_NIF_TERM head; CHECK(enif_get_list_cell(env, x, &head, &x), "missing_features"); nodes[i] = erl2lin_feature(env, head, bias); } return nodes; } catch (...) { for (int i = 0; i < (int)m; i++) nif_free(nodes[i]); nif_free(nodes); throw; } } /*-----------< FUNCTION: erl2lin_feature >----------------------------------- // Purpose: converts a feature vector to a linear sparse vector // Parameters: env - current erlang environment // x - feature vector (floats) // bias - bias term // Returns: pointer to a sparse feature vector ---------------------------------------------------------------------------*/ LINEAR_NODE* erl2lin_feature (ErlNifEnv* env, ERL_NIF_TERM x, double bias) { ErlNifBinary vector; CHECK(enif_inspect_binary(env, x, &vector), "invalid_feature"); // copy the feature vector to the sparse array int n = vector.size / sizeof(float); LINEAR_NODE* nodes = nif_alloc<LINEAR_NODE>(bias >= 0 ? n + 2 : n + 1); int j = 0; while (j < n) { nodes[j] = (LINEAR_NODE){ .index = j + 1, .value = ((float*)vector.data)[j] }; j++; } // add the bias term if specified if (bias >= 0) { nodes[j] = (LINEAR_NODE){ .index = j + 1, .value = bias }; j++; } // terminate the sparse vector with -1 per liblinear spec nodes[j] = (LINEAR_NODE){ .index = -1, .value = 0 }; return nodes; } /*-----------< FUNCTION: erl2lin_targets >----------------------------------- // Purpose: converts a list of target labels to an array of labels // Parameters: env - current erlang environment // y - list of target labels (integers) // m - number of training labels // Returns: array of doubles representing the target labels ---------------------------------------------------------------------------*/ double* erl2lin_targets (ErlNifEnv* env, ERL_NIF_TERM y, unsigned m) { double* targets = nif_alloc<double>(m); try { for (int i = 0; i < (int)m; i++) { // get the list head and advance the tail ERL_NIF_TERM head; CHECK(enif_get_list_cell(env, y, &head, &y), "missing_target"); // retrieve the target label value int cls; CHECK(enif_get_int(env, head, &cls), "invalid_target"); targets[i] = cls; } return targets; } catch (...) { nif_free(targets); throw; } } /*-----------< FUNCTION: erl2lin_free_problem >------------------------------ // Purpose: frees the memory associated with a linear problem structure // Parameters: problem - structure to free // Returns: none ---------------------------------------------------------------------------*/ void erl2lin_free_problem (LINEAR_PROBLEM* problem) { if (problem->x) for (int i = 0; i < problem->l; i++) nif_free(problem->x[i]); nif_free(problem->x); problem->x = NULL; nif_free(problem->y); problem->y = NULL; } /*-----------< FUNCTION: erl2lin_free_params >------------------------------- // Purpose: frees the memory associated with a linear parameter structure // Parameters: params - structure to free // Returns: none ---------------------------------------------------------------------------*/ void erl2lin_free_params (LINEAR_PARAM* params) { nif_free(params->weight_label); params->weight_label = NULL; nif_free(params->weight); params->weight = NULL; } /*-----------< FUNCTION: lin2erl_model >------------------------------------- // Purpose: converts a linear model to an erlang map // Parameters: env - current erlang environment // model - linear model structure to convert // Returns: erlang map containing the model parameters ---------------------------------------------------------------------------*/ ERL_NIF_TERM lin2erl_model (ErlNifEnv* env, LINEAR_MODEL* model) { ERL_NIF_TERM result = enif_make_new_map(env); ERL_NIF_TERM key; ERL_NIF_TERM value; ErlNifBinary vector; // encode version key = enif_make_atom(env, "version"); value = enif_make_int(env, 1); CHECKALLOC(enif_make_map_put(env, result, key, value, &result)); // encode solver type key = enif_make_atom(env, "solver"); switch (model->param.solver_type) { case L2R_LR: value = enif_make_atom(env, "l2r_lr"); break; case L2R_L2LOSS_SVC_DUAL: value = enif_make_atom(env, "l2r_l2loss_svc_dual"); break; case L2R_L2LOSS_SVC: value = enif_make_atom(env, "l2r_l2loss_svc"); break; case L2R_L1LOSS_SVC_DUAL: value = enif_make_atom(env, "l2r_l1loss_svc_dual"); break; case MCSVM_CS: value = enif_make_atom(env, "mcsvm_cs"); break; case L1R_L2LOSS_SVC: value = enif_make_atom(env, "l1r_l2loss_svc"); break; case L1R_LR: value = enif_make_atom(env, "l1r_lr"); break; case L2R_LR_DUAL: value = enif_make_atom(env, "l2r_lr_dual"); break; case L2R_L2LOSS_SVR: value = enif_make_atom(env, "l2r_l2loss_svr"); break; case L2R_L2LOSS_SVR_DUAL: value = enif_make_atom(env, "l2r_l2loss_svr_dual"); break; case L2R_L1LOSS_SVR_DUAL: value = enif_make_atom(env, "l2r_l1loss_svr_dual"); break; default: value = enif_make_atom(env, "nil"); } CHECKALLOC(enif_make_map_put(env, result, key, value, &result)); // encode classes ERL_NIF_TERM classes[model->nr_class]; memset(classes, 0, sizeof(classes)); for (int i = 0; i < model->nr_class; i++) classes[i] = enif_make_int(env, model->label[i]); key = enif_make_atom(env, "classes"); value = enif_make_list_from_array(env, classes, model->nr_class); CHECKALLOC(enif_make_map_put(env, result, key, value, &result)); // encode coefficients int model_count = model->nr_class == 2 ? 1 : model->nr_class; ERL_NIF_TERM coefs[model_count]; memset(coefs, 0, sizeof(coefs)); for (int i = 0; i < model_count; i++) { int coef_count = model->nr_feature; CHECKALLOC(enif_alloc_binary(coef_count * sizeof(float), &vector)); for (int j = 0; j < coef_count; j++) ((float*)vector.data)[j] = model->w[j * model_count + i]; coefs[i] = enif_make_binary(env, &vector); } key = enif_make_atom(env, "coef"); value = enif_make_list_from_array(env, coefs, model_count); CHECKALLOC(enif_make_map_put(env, result, key, value, &result)); // encode intercepts key = enif_make_atom(env, "intercept"); value = enif_make_double(env, 0); if (model->bias >= 0) { CHECKALLOC(enif_alloc_binary(model_count * sizeof(float), &vector)); for (int i = 0; i < model_count; i++) ((float*)vector.data)[i] = model->w[model->nr_feature * model_count + i]; value = enif_make_binary(env, &vector); } CHECKALLOC(enif_make_map_put(env, result, key, value, &result)); // encode prob_a key = enif_make_atom(env, "prob_a"); if (model->prob_a) { CHECKALLOC(enif_alloc_binary(model_count * sizeof(float), &vector)); for (int i = 0; i < model_count; i++) ((float*)vector.data)[i] = model->prob_a[i]; value = enif_make_binary(env, &vector); CHECKALLOC(enif_make_map_put(env, result, key, value, &result)); } else { value = enif_make_atom(env, "nil"); CHECKALLOC(enif_make_map_put(env, result, key, value, &result)); } // encode prob_b key = enif_make_atom(env, "prob_b"); if (model->prob_b) { CHECKALLOC(enif_alloc_binary(model_count * sizeof(float), &vector)); for (int i = 0; i < model_count; i++) ((float*)vector.data)[i] = model->prob_b[i]; value = enif_make_binary(env, &vector); CHECKALLOC(enif_make_map_put(env, result, key, value, &result)); } else { value = enif_make_atom(env, "nil"); CHECKALLOC(enif_make_map_put(env, result, key, value, &result)); } return result; } /*-----------< FUNCTION: lin2lin_model >------------------------------------- // Purpose: clones a linear model structure // Parameters: source - linear model structure to copy // prob_a - calibration probability slope variables // prob_b - calibration probability intercept variables // Returns: cloned linear model ---------------------------------------------------------------------------*/ LINEAR_MODEL* lin2lin_model (model* source, double* prob_a, double* prob_b) { LINEAR_MODEL* target = nif_alloc<LINEAR_MODEL>(); try { // copy scalar fields + clear class weights (training-only) memcpy(&target->param, &source->param, sizeof(LINEAR_PARAM)); target->nr_class = source->nr_class; target->nr_feature = source->nr_feature; target->bias = source->bias; target->param.weight_label = NULL; target->param.weight = NULL; target->prob_a = prob_a; target->prob_b = prob_b; // copy weights int model_count = source->nr_class == 2 ? 1 : source->nr_class; int weight_count = source->bias >= 0 ? source->nr_feature + 1 : source->nr_feature; target->w = nif_clone(source->w, model_count * weight_count); // copy labels target->label = nif_clone(source->label, target->nr_class); } catch (...) { erl2lin_free_model(target); throw; } return target; } /*-----------< FUNCTION: erl2lin_free_model >-------------------------------- // Purpose: frees the memory associated with a linear model // Parameters: model - linear model structure to free // Returns: none ---------------------------------------------------------------------------*/ void erl2lin_free_model (LINEAR_MODEL* model) { if (model != NULL) { erl2lin_free_params(&model->param); nif_free(model->w); nif_free(model->label); nif_free(model->prob_a); nif_free(model->prob_b); } nif_free(model); } /*-----------< FUNCTION: lin_print >----------------------------------------- // Purpose: liblinear debug output callback // Parameters: message - message to display // Returns: none ---------------------------------------------------------------------------*/ void lin_print (const char* message) { // suppress debug output } /*-----------< FUNCTION: lin_calibrate_train >------------------------------- // Purpose: calibrates decision outputs to class probabilities using // univariate binary logistic regression // lifted from libsvm sigmoid_train, which is not exported // Parameters: m - number of training examples // decision - decision ouputs for each training example // used as X in the calibration model // labels - class labels for each training example (pos/neg) // prob_a - return regression slope parameter via here // prob_b - return regression intercept parameter via here // Returns: none ---------------------------------------------------------------------------*/ void lin_calibrate_train ( int m, double* decision, double* labels, double& prob_a, double& prob_b) { double prior1=0, prior0 = 0; int i; for (i=0;i<m;i++) if (labels[i] > 0) prior1+=1; else prior0+=1; int max_iter=100; // Maximal number of iterations double min_step=1e-10; // Minimal step taken in line search double sigma=1e-12; // For numerically strict PD of Hessian double eps=1e-5; double hiTarget=(prior1+1.0)/(prior1+2.0); double loTarget=1/(prior0+2.0); double *t=nif_alloc<double>(m); double fApB,p,q,h11,h22,h21,g1,g2,det,dA,dB,gd,stepsize; double newA,newB,newf,d1,d2; int iter; // Initial Point and Initial Fun Value prob_a=0.0; prob_b=log((prior0+1.0)/(prior1+1.0)); double fval = 0.0; for (i=0;i<m;i++) { if (labels[i]>0) t[i]=hiTarget; else t[i]=loTarget; fApB = decision[i]*prob_a+prob_b; if (fApB>=0) fval += t[i]*fApB + log(1+exp(-fApB)); else fval += (t[i] - 1)*fApB +log(1+exp(fApB)); } for (iter=0;iter<max_iter;iter++) { // Update Gradient and Hessian (use H' = H + sigma I) h11=sigma; // numerically ensures strict PD h22=sigma; h21=0.0;g1=0.0;g2=0.0; for (i=0;i<m;i++) { fApB = decision[i]*prob_a+prob_b; if (fApB >= 0) { p=exp(-fApB)/(1.0+exp(-fApB)); q=1.0/(1.0+exp(-fApB)); } else { p=1.0/(1.0+exp(fApB)); q=exp(fApB)/(1.0+exp(fApB)); } d2=p*q; h11+=decision[i]*decision[i]*d2; h22+=d2; h21+=decision[i]*d2; d1=t[i]-p; g1+=decision[i]*d1; g2+=d1; } // Stopping Criteria if (fabs(g1)<eps && fabs(g2)<eps) break; // Finding Newton direction: -inv(H') * g det=h11*h22-h21*h21; dA=-(h22*g1 - h21 * g2) / det; dB=-(-h21*g1+ h11 * g2) / det; gd=g1*dA+g2*dB; stepsize = 1; // Line Search while (stepsize >= min_step) { newA = prob_a + stepsize * dA; newB = prob_b + stepsize * dB; // New function value newf = 0.0; for (i=0;i<m;i++) { fApB = decision[i]*newA+newB; if (fApB >= 0) newf += t[i]*fApB + log(1+exp(-fApB)); else newf += (t[i] - 1)*fApB +log(1+exp(fApB)); } // Check sufficient decrease if (newf<fval+0.0001*stepsize*gd) { prob_a=newA;prob_b=newB;fval=newf; break; } else stepsize = stepsize / 2.0; } CHECK(stepsize >= min_step, "calibration line search failed"); } CHECK(iter < max_iter, "exceeded calibration max iterations"); nif_free(t); } /*-----------< FUNCTION: lin_calibrate_predict >----------------------------- // Purpose: produces a calibrated probability estimate for a // non-probabilistic classifier (like svm) // lifted from libsvm sigmoid_predict, which is not exported // Parameters: decision - decision output, used as x in the calibration model // prob_a - regression slope parameter // prob_b - regression intercept parameter // Returns: none ---------------------------------------------------------------------------*/ double lin_calibrate_predict ( double decision, double prob_a, double prob_b) { double fApB = decision*prob_a+prob_b; // 1-p used later; avoid catastrophic cancellation if (fApB >= 0) return exp(-fApB)/(1.0+exp(-fApB)); else return 1.0/(1+exp(fApB)) ; }
select r.a, s.a, t.a from R2 r, S2 s, T2 t where r.b=s.a and s.b=t.a;
<div align="center"> <a href="http://vue-scaff.joenix.com/" target="_blank"> <img width="180" src="http://oss.joenix.com/vue-scaff/logo.png" alt="logo"> </a> </div> <div align="center"> <a target="_blank" href="https://github.com/vue-scaff/vue-scaff-demo/archive/vue-scaff-demo-v1.0.zip"> <img src="https://img.shields.io/badge/Download-v5.1.12-brightgreen" alt="download"> </a> <a target="_blank" href="https://www.npmjs.com/package/@scaff/vue-cli-scaff"> <img src="https://img.shields.io/badge/npm-v5.2.12-ea2039" alt="npm"> </a> <a target="_blank" href="https://github.com/vue-scaff/vue-cli-scaff"> <img src="https://img.shields.io/badge/github-v5.2.12-blue" alt="github"> </a> <a target="_blank" href="https://github.com/vue-scaff/vue-cli-scaff/blob/master/LICENSE"> <img src="https://img.shields.io/badge/license-MIT-green" alt="npm"> </a> <a target="_blank" href="https://discord.gg/xxu5V5C"> <img src="https://img.shields.io/badge/license-MIT-738bd7" alt="npm"> </a> </div> # Vue Scaff Immersion, Freedom, Out-Of-The-Box ## Core Team - [Joenix](http://joenix.com/) - Anko ## Documentation See [http://vue-scaff.joenix.com/](http://vue-scaff.com/) ## Repo See [Github Vue-Scaff](https://github.com/vue-scaff) ### Get started with Vue-Scaff ```sh # install yarn global add @vue/cli # create an application vue create -p vue-scaff/vue-cli-preset --clone <project-name> # start coding yarn serve # build project yarn build ``` ## Demos ### H5 - [Preview](http://demo.vue-scaff.com) - [Download](https://github.com/vue-scaff/vue-scaff-demo/archive/vue-scaff-demo-v1.0.zip) ### Admin - [Preview](http://admin.vue-scaff.com) - [Download](https://github.com/vue-scaff/vue-scaff-admin/archive/vue-scaff-admin-v1.0.zip) ### Electron - [Download](https://github.com/vue-scaff/vue-scaff-electron/archive/vue-scaff-electron.zip) ## License [MIT](https://img.shields.io/badge/license-MIT-738bd7)
package otto import ( "math" "strconv" ) // Number func numberValueFromNumberArgumentList(argumentList []Value) Value { if len(argumentList) > 0 { return toNumber(argumentList[0]) } return toValue_int(0) } func builtinNumber(call FunctionCall) Value { return numberValueFromNumberArgumentList(call.ArgumentList) } func builtinNewNumber(self *_object, _ Value, argumentList []Value) Value { return toValue_object(self.runtime.newNumber(numberValueFromNumberArgumentList(argumentList))) } func builtinNumber_toString(call FunctionCall) Value { // Will throw a TypeError if ThisObject is not a Number value := call.thisClassObject("Number").primitiveValue() radix := 10 radixArgument := call.Argument(0) if radixArgument.IsDefined() { integer := toIntegerFloat(radixArgument) if integer < 2 || integer > 36 { panic(newRangeError("RangeError: toString() radix must be between 2 and 36")) } radix = int(integer) } if radix == 10 { return toValue_string(toString(value)) } return toValue_string(numberToStringRadix(value, radix)) } func builtinNumber_valueOf(call FunctionCall) Value { return call.thisClassObject("Number").primitiveValue() } func builtinNumber_toFixed(call FunctionCall) Value { precision := toIntegerFloat(call.Argument(0)) if 20 < precision || 0 > precision { panic(newRangeError("toFixed() precision must be between 0 and 20")) } if call.This.IsNaN() { return toValue_string("NaN") } value := toFloat(call.This) if math.Abs(value) >= 1e21 { return toValue_string(floatToString(value, 64)) } return toValue_string(strconv.FormatFloat(toFloat(call.This), 'f', int(precision), 64)) } func builtinNumber_toExponential(call FunctionCall) Value { if call.This.IsNaN() { return toValue_string("NaN") } precision := float64(-1) if value := call.Argument(0); value.IsDefined() { precision = toIntegerFloat(value) if 0 > precision { panic(newRangeError("RangeError: toExponential() precision must be greater than 0")) } } return toValue_string(strconv.FormatFloat(toFloat(call.This), 'e', int(precision), 64)) } func builtinNumber_toPrecision(call FunctionCall) Value { if call.This.IsNaN() { return toValue_string("NaN") } value := call.Argument(0) if value.IsUndefined() { return toValue_string(toString(call.This)) } precision := toIntegerFloat(value) if 1 > precision { panic(newRangeError("RangeError: toPrecision() precision must be greater than 1")) } return toValue_string(strconv.FormatFloat(toFloat(call.This), 'g', int(precision), 64)) } func builtinNumber_toLocaleString(call FunctionCall) Value { return builtinNumber_toString(call) }
# Question Design a system to keep track of employee data. The system should keep track of an employee’s name, ID number and hourly pay rate in a class called Employee. You may also store any additional data you may need, hint, you need something extra. This data is stored in a file (user selectable) with the id number, hourly pay rate, and the employee’s full name (example): 17 5.25 Daniel Katz 18 6.75 John F. Jones Additionally we would like to be able to print payroll information from data in a different file. The data is the employee’s id number and a number of hours that they worked (example): 17 40 18 20 18 20 As you see we can have the same number listed twice in which case that person should be paid the sum of the numbers (John Jones did 40 hours work, but it’s listed as 20+20). You should start by reading in the first file and storing the data as objects in a linked list. You will need to create the linked list class and the Employee data class. You may choose to use the Linked List class we created, or you may opt to create your own doubly linked list class. (Note: if you cannot get either working, you can use the STL list class but 20% will be deducted). The Linked list could be templated or not, it’s up to you, however templating it would allow it to be used for other projects, so it might be a good idea. Once you have read in the information from the first file, read in the second file. Ultimately we would like to print payroll information based on the hourly wage from the first file multiplied by the number of times an employee worked in the second file. How you do this is entirely up to you. The output must be in sorted (descending; so the person who gets paid most prints first) order in the form of: *********Payroll Information******** Daniel Katz, $270 John F. Jones, $210 *********End payroll************** (Question text copyright belongs to the professor)
package com.morladim.mario.sample.ipc.aidl import android.app.Service import android.content.Intent import android.os.IBinder import com.morladim.mario.sample.ipc.ITalkAidlInterface import com.morladim.mario.sample.ipc.ITalkCallbackAidlInterface import java.util.* import java.util.concurrent.Executors import java.util.concurrent.TimeUnit class AidlTalkService : Service() { override fun onBind(intent: Intent): IBinder { val e = Executors.newSingleThreadScheduledExecutor() e.scheduleAtFixedRate({ callbacks.forEach { c -> c.onMessage("sdf") } }, 0, 5, TimeUnit.SECONDS) return binder } private val callbacks = ArrayList<ITalkCallbackAidlInterface>() private val binder = object : ITalkAidlInterface.Stub() { override fun tellServer(s: String?) { println(s) } override fun getMessage(): String { return "gggg message" } override fun registerCallback(callback: ITalkCallbackAidlInterface?) { if (callback != null) { callbacks.add(callback) } } override fun unRegisterCallback(callback: ITalkCallbackAidlInterface?) { callbacks.remove(callback) } } }
namespace TheRace { using System; public class UnitDriver { private string name; public UnitDriver(string name, UnitCar car) { Name = name; Car = car; } public string Name { get => name; private set => name = value ?? throw new ArgumentNullException(nameof(Name), "Name cannot be null!"); } public UnitCar Car { get; } } }
package org.foraci.mxf.mxfReader.parsers; import org.foraci.mxf.mxfReader.MxfInputStream; import java.io.IOException; import java.math.BigInteger; import java.util.Vector; import java.util.LinkedList; import java.util.List; /** * A default parser for a KLV's value * @author jforaci */ public class Parser implements Cloneable { protected final BigInteger length; protected final MxfInputStream in; protected BigInteger count = BigInteger.ZERO; public Parser(BigInteger length, MxfInputStream in) { this.length = length; this.in = in; } public Object read() throws IOException { skip(); return null; } public List readAll() throws IOException { LinkedList v = new LinkedList(); Object o; while ((o = read()) != null) { v.add(o); } return v; } public void skip() throws IOException { BigInteger length = this.length.subtract(this.count); in.skip(length); } protected final Parser clone() { try { return (Parser)super.clone(); } catch (CloneNotSupportedException e) { in.error("unable to create parser"); return null; } } }
declare module 'inquirer-dynamic-list' { interface Choice { name: string; value: any; } class DynamicList { opt: { choices: { choices: Choice[]; realChoices: Choice[]; }; }; constructor(options: { message?: string; emptyMessage?: string; choices: Choice[]; }); addChoice(choice: Choice): void; render(): void; run(): Promise<any>; } export = DynamicList; }
require 'spec_helper' describe 'Enumerable#drop_last' do it 'drops the last n elements' do expect([1, 2, 3, 4].drop_last(2)).to eq([1, 2]) end it 'returns an empty array if you request to drop too many elems' do expect((1..10).drop_last(100)).to eq([]) end it 'does not accept negative argument' do expect { [1, 2, 3, 4].drop_last(-1) }.to raise_error(ArgumentError) end end
unit Map.Plotter; interface uses System.Types, Graphics, Map, Map.Plotter.Intf, Map.Projection.Intf; type TMapPlotter = class(TInterfacedObject, IMapPlotter) private FProjection: IMapProjection; public constructor Create(projection: IMapProjection); procedure Plot(map: TMap; canvas: TCanvas); end; implementation constructor TMapPlotter.Create(projection: IMapProjection); begin inherited Create; FProjection := projection; end; procedure TMapPlotter.Plot(map: TMap; canvas: TCanvas); var area: IArea; polygon: TPolygon; points: array of TPoint; x, y: Integer; i: Integer; begin Canvas.Font.Size := 6; for area in map.Areas do begin x := FProjection.LatLonToPoint(area.GetShapes[0][0]).X; y := FProjection.LatLonToPoint(area.GetShapes[0][0]).Y; Canvas.TextOut(x, y, area.Code); end; for area in map.Areas do begin for polygon in area.GetShapes do begin SetLength(points, Length(polygon)); for i := Low(polygon) to High(polygon) do points[i] := FProjection.LatLonToPoint(polygon[i]); Canvas.Polyline(points); end; end; end; end.
// Copyright 2012 Jacob Trimble // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. using System; namespace ModMaker.Lua.Parser.Items { /// <summary> /// Defines a parse item that represents a numerical for loop. /// e.g. for i = 2, 4 do ... end. /// </summary> public sealed class ForNumItem : IParseStatement { /// <summary> /// Creates a new ForNumItem with the given name. /// </summary> /// <param name="name">The name of the variable defined.</param> /// <param name="limit">The item that defines the limit of the loop.</param> /// <param name="start">The item that defines the start of the loop.</param> /// <param name="step">The item that defines the step of the loop.</param> /// <exception cref="System.ArgumentNullException">If name, start, or limit is null.</exception> public ForNumItem(NameItem name, IParseExp start, IParseExp limit, IParseExp step, BlockItem block) { if (name == null) { throw new ArgumentNullException(nameof(name)); } if (start == null) { throw new ArgumentNullException(nameof(start)); } if (limit == null) { throw new ArgumentNullException(nameof(limit)); } Start = start; Limit = limit; Step = step; Name = name; Block = block; } /// <summary> /// Gets or sets the name of the variable in the loop. /// </summary> public NameItem Name { get; set; } /// <summary> /// Gets or sets the expression that determines the start of the loop. /// </summary> /// <exception cref="System.ArgumentNullException">If setting to null.</exception> public IParseExp Start { get; set; } /// <summary> /// Gets or sets the expression that determines the limit of the loop. /// </summary> /// <exception cref="System.ArgumentNullException">If setting to null.</exception> public IParseExp Limit { get; set; } /// <summary> /// Gets or sets the expression that determines the step of the loop. /// </summary> public IParseExp Step { get; set; } /// <summary> /// Gets or sets the block of the for loop. /// </summary> public BlockItem Block { get; set; } /// <summary> /// Gets the label that represents a break from the loop. /// </summary> public LabelItem Break { get; } = new LabelItem("<break>"); /// <summary> /// Contains the DebugInfo for the whole block. /// </summary> public DebugInfo Debug { get; set; } /// <summary> /// Contains the DebugInfo for the 'for' line. /// </summary> public DebugInfo ForDebug { get; set; } /// <summary> /// Contains the DebugInfo for the 'end' token. /// </summary> public DebugInfo EndDebug { get; set; } public IParseItem Accept(IParseItemVisitor visitor) { if (visitor == null) { throw new ArgumentNullException(nameof(visitor)); } return visitor.Visit(this); } } }
&dotnet pack "..\Truncon.Collections\Truncon.Collections.csproj" --configuration Release --output $PWD .\NuGet.exe push Truncon.Collections.*.nupkg -Source https://www.nuget.org/api/v2/package Remove-Item Truncon.Collections.*.nupkg
using My-Julia-ML-Package using Test @testset "My-Julia-ML-Package.jl" begin # Write your tests here. @test sum_of_three(3,4,5) == 12 @test sum_of_three(5,5,5) == 15 @test sum_of_three(6,6,6) == 18 end
import sys import pyjade sys.modules["pypug"] = sys.modules["pyjade"]
# frozen_string_literal: true module Alchemy class Config class << self # Returns the configuration for given parameter name. # # @param name [String] # def get(name) check_deprecation(name) show[name.to_s] end alias_method :parameter, :get # Returns a merged configuration of the following files # # Alchemys default config: +gems/../alchemy_cms/config/alchemy/config.yml+ # Your apps default config: +your_app/config/alchemy/config.yml+ # Environment specific config: +your_app/config/alchemy/development.config.yml+ # # An environment specific config overwrites the settings of your apps default config, # while your apps default config has precedence over Alchemys default config. # def show @config ||= merge_configs!(alchemy_config, main_app_config, env_specific_config) end # A list of deprecated configurations # a value of nil means there is no new default # any not nil value is the new default def deprecated_configs {} end private # Alchemy default configuration def alchemy_config read_file(File.join(File.dirname(__FILE__), "..", "..", "config/alchemy/config.yml")) end # Application specific configuration def main_app_config read_file("#{Rails.root}/config/alchemy/config.yml") end # Rails Environment specific configuration def env_specific_config read_file("#{Rails.root}/config/alchemy/#{Rails.env}.config.yml") end # Tries to load yaml file from given path. # If it does not exist, or its empty, it returns an empty Hash. # def read_file(file) YAML.safe_load(ERB.new(File.read(file)).result, YAML_WHITELIST_CLASSES, [], true) || {} rescue Errno::ENOENT {} end # Merges all given configs together # def merge_configs!(*config_files) raise LoadError, "No Alchemy config file found!" if config_files.map(&:blank?).all? config = {} config_files.each { |h| config.merge!(h.stringify_keys!) } config end def check_deprecation(name) if deprecated_configs.key?(name.to_sym) config = deprecated_configs[name.to_sym] if config.nil? Alchemy::Deprecation.warn("#{name} configuration is deprecated and will be removed from Alchemy 5.1") else value = show[name.to_s] if value != config Alchemy::Deprecation.warn("Setting #{name} configuration to #{value} is deprecated and will be always #{config} in Alchemy 5.1") end end end end end end end
<?php declare(strict_types = 1); use Alf\AlfBasicClass; use Alf\AlfBasicType; use Alf\AlfBasicTypeScalar; use Alf\Interfaces\Integers\AlfIntGetTrait; use Alf\Interfaces\Integers\AlfIntLikeTrait; use Alf\Interfaces\Integers\AlfIntSetTrait; use Alf\Interfaces\Integers\AlfIntWorkTrait; use Alf\Interfaces\Strings\AlfCharGetTrait; use Alf\Interfaces\Strings\AlfStringGetTrait; use Alf\Interfaces\Values\AlfEmptyGetTrait; use Alf\Interfaces\Values\AlfEmptySetTrait; use Alf\Interfaces\Values\AlfEmptyWorkTrait; use Alf\Interfaces\Values\AlfHumanDataTrait; use Alf\Interfaces\Values\AlfNullGetTrait; use Alf\Interfaces\Values\AlfNullOrEmptyWorkTrait; use Alf\Interfaces\Values\AlfNullSetTrait; use Alf\Interfaces\Values\AlfNullWorkTrait; use Alf\Interfaces\Values\AlfValueGetTrait; use Alf\Services\AlfPhpClassManager; use Alf\Types\Scalars\AlfInt; test('getParent AlfInt', function () : void { $obj = new AlfInt(); $this->assertSame(AlfPhpClassManager::_()->getParent($obj), AlfBasicTypeScalar::class); }); test('listParents AlfInt', function () : void { $obj = new AlfInt(); $parents = AlfPhpClassManager::_()->listParents($obj); $this->assertCount(3, $parents); $this->assertArrayHasKey(AlfBasicTypeScalar::class, $parents); $this->assertArrayHasKey(AlfBasicType::class, $parents); $this->assertArrayHasKey(AlfBasicClass::class, $parents); }); test('listTraits AlfInt', function () : void { $obj = new AlfInt(); $traits = AlfPhpClassManager::_()->listTraits($obj); $this->assertCount(15, $traits); $this->assertArrayHasKey(AlfNullOrEmptyWorkTrait::class, $traits); $this->assertArrayHasKey(AlfNullWorkTrait::class, $traits); $this->assertArrayHasKey(AlfNullGetTrait::class, $traits); $this->assertArrayHasKey(AlfNullSetTrait::class, $traits); $this->assertArrayHasKey(AlfEmptyWorkTrait::class, $traits); $this->assertArrayHasKey(AlfEmptyGetTrait::class, $traits); $this->assertArrayHasKey(AlfEmptySetTrait::class, $traits); $this->assertArrayHasKey(AlfValueGetTrait::class, $traits); $this->assertArrayHasKey(AlfIntGetTrait::class, $traits); $this->assertArrayHasKey(AlfIntSetTrait::class, $traits); $this->assertArrayHasKey(AlfIntWorkTrait::class, $traits); $this->assertArrayHasKey(AlfIntLikeTrait::class, $traits); $this->assertArrayHasKey(AlfHumanDataTrait::class, $traits); $this->assertArrayHasKey(AlfStringGetTrait::class, $traits); $this->assertArrayHasKey(AlfCharGetTrait::class, $traits); });
--- layout: issue title: "Zend_Dojo_Form Checkbox element generates incorrect HTML" id: ZF-4274 --- ZF-4274: Zend\_Dojo\_Form Checkbox element generates incorrect HTML ------------------------------------------------------------------- Issue Type: Patch Created: 2008-09-12T21:50:41.000+0000 Last Updated: 2009-01-15T00:24:06.000+0000 Status: Closed Fix version(s): Reporter: Andrew Yager (yogel) Assignee: Bart McLeod (mcleod@spaceweb.nl) Tags: - Zend\_Dojo Related issues: Attachments: - [DijitElement-ZF-4274-3.patch](/issues/secure/attachment/11644/DijitElement-ZF-4274-3.patch) - [DijitElement-ZF-4274-4.patch](/issues/secure/attachment/11645/DijitElement-ZF-4274-4.patch) - [DijitElement-ZF-4274-test.patch](/issues/secure/attachment/11625/DijitElement-ZF-4274-test.patch) - [DijitElement-ZF-4274.patch](/issues/secure/attachment/11626/DijitElement-ZF-4274.patch) - [DijitElement-ZF-4274.patch](/issues/secure/attachment/11624/DijitElement-ZF-4274.patch) - [DijitElement-ZF-4274.patch](/issues/secure/attachment/11623/DijitElement-ZF-4274.patch) ### Description Zend\_Dojo\_Form checkbox generates incorrect HTML. Assuming a working Zend\_Dojo environment, the following code produces the error: <pre class="highlight"> class TestController extends Zend_Controller_Action { function indexAction () { $this->view->addHelperPath('Zend/Dojo/View/Helper/', 'Zend_Dojo_View_Helper'); $form = new Zend_Dojo_Form(); $form->addElement( 'CheckBox', 'checkboxValue', array( 'label' => 'Label', 'checkedValue' => 'checkedValue', 'uncheckedValue' => 'notCheckedValue', ) ); $form->addDecorators(array('FormElements', 'Form')); $this->view->form = $form; } } This produces the following HTML: <pre class="highlight"> The correct HTML should be: <pre class="highlight"> Tested against SVN Trunk r 11380. ### Comments Posted by Andrew Yager (yogel) on 2008-09-12T22:12:26.000+0000 This is the wrong fix, but it is a fix. The problem is that the $checkedOptions parameter is not set when passed to the Helper. <pre class="highlight"> --- Zend/Dojo/View/Helper/CheckBox.php (revision 11380) +++ Zend/Dojo/View/Helper/CheckBox.php (working copy) /** Zend_Dojo_View_Helper_Dijit */ @@ -72,7 +72,11 @@ } elseif (isset($attribs['checked'])) { $checked = false; } + + $checkedOptions = $attribs['options']; + $checkboxInfo = Zend_View_Helper_FormCheckbox::determineCheckboxInfo($value, $checked, $checkedOptions); $attribs['checked'] = $checkboxInfo['checked']; if (!array_key_exists('id', $attribs)) { $attribs['id'] = $id; Posted by Benjamin Jeanjean (apsy) on 2008-10-01T08:34:46.000+0000 This component is unusable in it's actual state... It don't work at all ! Posted by Bernd Matzner (bmatzner) on 2008-11-04T05:35:05.000+0000 In addition, the view helper adds ``` even if the form element value is set to the unChecked value. I assume this is so because the value of the hidden field differs from the unchecked value, because of which it is incorrectly assumed that the field should thus be checked. Posted by Bart McLeod (mcleod@spaceweb.nl) on 2008-11-06T14:20:54.000+0000 The problem was the options set in FormCheckbox.php were left unused. We merge with existing options, because options may already be in use by multioptions. This is a risk, so we must further test if this wil work with a checkbox group that has multioptions. This code is added in DijitElement::render(): <pre class="highlight"> if(array_key_exists('options', $attribs)){ if(is_array($options)){ $options = array_merge($options, $attribs['options']); }else{ $options = $attribs['options']; } } Posted by Bart McLeod (mcleod@spaceweb.nl) on 2008-11-06T14:51:26.000+0000 trying to overwrite wrong patch file (is whole file instead of patch) Posted by Bart McLeod (mcleod@spaceweb.nl) on 2008-11-06T14:53:42.000+0000 Patch for tests/Zend/Dojo/Form/Element/CheckboxTest.php Posted by Bart McLeod (mcleod@spaceweb.nl) on 2008-11-06T14:56:08.000+0000 Do not use the last patch in the list (the oldest). It is the whole file (sorry). It's size is 6kb, you should use the 2kb version that is listed one higher up. Posted by Bart McLeod (mcleod@spaceweb.nl) on 2008-11-06T14:57:49.000+0000 Please review. I think it is quite allright, but you should take a look at how checkboxgroups behave in Dojo after patching. I should write a test for that myself, but I need to get some sleep. Posted by Bart McLeod (mcleod@spaceweb.nl) on 2008-11-07T03:32:15.000+0000 Found that the test patch and the dijit patch are the same: they both are the testpatch. So I try once again and upload the decorator/DijitElement.php patch. Posted by Bart McLeod (mcleod@spaceweb.nl) on 2008-11-18T05:23:47.000+0000 better batch, leaves multioptions intact instead of doubling them by using array\_merge Posted by Bart McLeod (mcleod@spaceweb.nl) on 2008-11-18T06:11:32.000+0000 Better patch (number 4) deletes the lines where getMultiOptions() is called internally. This line is no longer necessary and existing tests still pass. Also a manual test proved that these lines were no longer needed. Posted by Bart McLeod (mcleod@spaceweb.nl) on 2008-11-18T06:15:36.000+0000 Committed to svn after contacting Matthew about this and resolving the conflict with multioptions. Posted by Bart McLeod (mcleod@spaceweb.nl) on 2008-11-18T06:18:09.000+0000 affects version 1.7 and prior.
namespace Roket.NET.Models.API { public class ApiClient { public string BaseApiURL { get; set; } public string ApiRequestURL { get; set; } } }
--- title: Karrimor description: ~ image: "/images/brands/karrimor.png" remote-image: "https://upload.wikimedia.org/wikipedia/fa/9/95/Karrimor_logo.jpg" purposes: [bivouac, hike, mountaineer] types: [hiking-shoes, mountaineering-boots, backpack, tent, sleeping-bag] countries: [united-kingdom] website: "http://www.karrimor.com" shop: ~ store-finder: ~ about: ~ wiki: "https://en.wikipedia.org/wiki/Karrimor" ---
use super::item::Item; use chrono::Local; use colored::*; pub struct ItemPresenter<'a> { item: &'a Item, separator_spacing: usize, id_spacing: usize, } impl<'a> ItemPresenter<'a> { pub fn new(item: &'a Item, separator_spacing: usize, id_spacing: usize) -> Self { Self { item, separator_spacing, id_spacing, } } pub fn present(&self) -> String { let (completed, completed_len) = self.present_completed(); let (date, date_len) = self.present_date(); format!( "{:id_width$} {:completed_width$} {:date_width$} {} {}", self.present_id(), completed, date, self.present_desc(), self.present_tags(), id_width = self.id_spacing + self.separator_spacing, completed_width = completed_len + self.separator_spacing, date_width = date_len + self.separator_spacing, ) } fn present_id(&self) -> String { self.item.id.to_string() } fn present_completed(&self) -> (String, usize) { if self.item.completed { ("[X]".to_string().green().to_string(), 12) } else { ("[ ]".to_string(), 3) } } fn present_date(&self) -> (String, usize) { let _today = Local::now().naive_local().date(); let _yesterday = _today.pred(); let _tomorrow = _today.succ(); let (date, uncolored_len) = match self.item.date.date() { a if a == _yesterday => { let mut date_str = String::from("@"); date_str.push_str("Yesterday"); (date_str.magenta().to_string(), date_str.len()) } a if a == _today => { let mut date_str = String::from("@"); date_str.push_str("Today"); (date_str.bold().cyan().to_string(), date_str.len()) } a if a == _tomorrow => { let mut date_str = String::from("@"); date_str.push_str("Tomorrow"); (date_str.cyan().to_string(), date_str.len()) } a => { let mut date_str = String::from("@"); date_str.push_str(&a.to_string()); (date_str.cyan().to_string(), date_str.len()) } }; let longest_date_str_len = 11; let date_len = date.len() + (longest_date_str_len - uncolored_len); (date, date_len) } fn present_desc(&self) -> String { self.item.desc.clone() } fn present_tags(&self) -> String { let tags = &self.item.tags; let tags_augmented: Vec<String> = tags .into_iter() .map(|t| { let mut t = t.clone(); t.insert_str(0, "+"); t }) .collect(); tags_augmented.join(" ").blue().to_string() } } #[cfg(test)] mod tests { use super::*; use chrono::Local; #[test] fn it_presents_item_id() { let item = Item::new( 1, "Hello".to_string(), Local::today().naive_local(), vec![String::from("tag1")], false, 0, ); let item_presenter = ItemPresenter::new(&item, 2, 1); assert!(&item_presenter.present().contains(&1.to_string())); } #[test] fn it_presents_item_desc() { let item = Item::new( 1, "Hello".to_string(), Local::today().naive_local(), vec![String::from("tag1")], false, 0, ); let item_presenter = ItemPresenter::new(&item, 2, 1); assert!(&item_presenter.present().contains("Hello")); } #[test] fn it_presents_item_date() { let item = Item::new( 1, "Hello".to_string(), Local::today().naive_local(), vec![String::from("tag1")], false, 0, ); let item_presenter = ItemPresenter::new(&item, 2, 1); println!("{}", item_presenter.present()); assert!(&item_presenter.present().contains("@Today")); } #[test] fn it_presents_item_tags() { let item = Item::new( 1, "Hello".to_string(), Local::today().naive_local(), vec![String::from("tag1"), String::from("tag2")], false, 0, ); let item_presenter = ItemPresenter::new(&item, 2, 1); println!("{}", item_presenter.present()); assert!(&item_presenter.present().contains("+tag1 +tag2")); } #[test] fn it_presents_item_completed() { let item = Item::new( 1, "Hello".to_string(), Local::today().naive_local(), vec![String::from("tag1"), String::from("tag2")], true, 0, ); let item_presenter = ItemPresenter::new(&item, 2, 1); println!("{}", item_presenter.present()); assert!(&item_presenter.present().contains("[X]")); } }
@extends('layouts.master') @section('content') <div class="container content-container"> <h2 class="text-center">Search Results</h2> @foreach($result as $pokemon) <a href="{{ route('pokemon.show', $pokemon->id) }}"> @include('partials.pokemon-preview') </a> @endforeach </div> @endsection
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.quarkus.core.it.annotations; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import javax.enterprise.context.ApplicationScoped; import javax.inject.Inject; import javax.inject.Named; import javax.ws.rs.Consumes; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; import org.apache.camel.CamelContext; import org.apache.camel.EndpointInject; import org.apache.camel.FluentProducerTemplate; import org.apache.camel.Produce; import org.apache.camel.ProducerTemplate; import org.apache.camel.Route; @Path("/core/annotations") @ApplicationScoped public class CoreAnnotationsResource { @Inject ProducerTemplate producerTemplate; @Inject CamelContext context; @EndpointInject("direct:endpointInjectTemplate") ProducerTemplate endpointInjectTemplateProducer; @EndpointInject("direct:endpointInjectFluentTemplate") FluentProducerTemplate endpointInjectFluentTemplateProducer; @Produce("direct:produceProducer") ProducerTemplate produceProducer; @Produce("direct:produceProducerFluent") FluentProducerTemplate produceProducerFluent; @Inject @Named("results") Map<String, List<String>> results; @Path("/routes/lookup-routes") @GET @Produces(MediaType.TEXT_PLAIN) public String lookupRoutes() { // there should be 2 routes, the one with LambdaRouteBuilder method above and from CoreRoutes.java return context.getRoutes().stream().map(Route::getId).sorted().collect(Collectors.joining(",")); } @Path("/endpointInjectTemplate") @POST @Consumes(MediaType.TEXT_PLAIN) @Produces(MediaType.TEXT_PLAIN) public String endpointInjectTemplate(String payload) { endpointInjectTemplateProducer.sendBody("Sent to an @EndpointInject: " + payload); return awaitFirst("endpointInjectTemplate"); } @Path("/endpointInjectFluentTemplate") @POST @Consumes(MediaType.TEXT_PLAIN) @Produces(MediaType.TEXT_PLAIN) public String endpointInjectFluentTemplate(String payload) { endpointInjectFluentTemplateProducer .withBody("Sent to an @EndpointInject fluent: " + payload) .send(); return awaitFirst("endpointInjectFluentTemplate"); } @Path("/endpointInjectDirect/{index}") @POST @Consumes(MediaType.TEXT_PLAIN) @Produces(MediaType.TEXT_PLAIN) public String endpointInjectDirect(String payload, @PathParam("index") String index) { producerTemplate.sendBody("direct:endpointInjectDirectStart" + index, payload); return awaitFirst("endpointInjectDirect" + index); } @Path("/produceProducer") @POST @Consumes(MediaType.TEXT_PLAIN) @Produces(MediaType.TEXT_PLAIN) public String produceProducer(String payload) { produceProducer.sendBody("Sent to an @Produce: " + payload); return awaitFirst("produceProducer"); } @Path("/produceProducerFluent") @POST @Consumes(MediaType.TEXT_PLAIN) @Produces(MediaType.TEXT_PLAIN) public String produceProducerFluent(String payload) { produceProducerFluent .withBody("Sent to an @Produce fluent: " + payload) .send(); return awaitFirst("produceProducerFluent"); } String awaitFirst(String key) { final List<String> list = results.get(key); final long timeout = System.currentTimeMillis() + 10000; do { try { Thread.sleep(50); } catch (InterruptedException e) { Thread.currentThread().interrupt(); break; } } while (list.isEmpty() && System.currentTimeMillis() < timeout); return list.get(0); } }
# C-Help - The Calgary Helper ### Created by: - [Artem Golovin](https://github.com/awave1) - [Alex Zecevic](https://github.com/Zeka17) - [Daniel Artuso](https://github.com/dartuso) - [Igor Pieters](https://github.com/ipieters) - [Rakheem Dewji](https://github.com/raksdewji) ## The Problem Even in a digital age many people still lack the resources to communicate, navigate and connect with our cities. ## The Solution C Help is a smart billboard solution that can be deployed as an IoT device around the city. It enables people without the resources to navigate and communicate with the cities of today. #### Features: - AI chatbot to answer questions that citizens may have - Get navigation information - Get date, time and weather information - SOS/911 Emergency contact
using System.Runtime.InteropServices; using System.Text; namespace Ab3d.OculusWrap { public struct ExternalCamera { /// <summary> /// Byte array for camera Name string /// </summary> [MarshalAs(UnmanagedType.ByValArray, SizeConst = 32)] public byte[] NameBytes; /// <summary> /// Camera Name /// </summary> public string Name { get { return OvrWrap.GetAsciiString(NameBytes); } } public CameraIntrinsics Intrinsics; public CameraExtrinsics Extrinsics; } }
import matplotlib.pyplot as plt from matplotlib.colors import ListedColormap import numpy as np class Plotter: synth_tr: np.ndarray synth_te: np.ndarray pima_tr: np.ndarray pima_te: np.ndarray def __init__(self, synth_tr: np.ndarray, synth_te: np.ndarray, pima_tr: np.ndarray, pima_te: np.ndarray): self.synth_tr = synth_tr self.synth_te = synth_te self.pima_tr = pima_tr self.pima_te = pima_te def plot_dataset(self): fig, ax = plt.subplots(1, 3, figsize=(11, 4)) plot_color = 'dodgerblue' # synth_tr f1-f2 Scatter Plot ax[0].scatter(self.synth_tr[:, 0][self.synth_tr[:, -1] == 0], self.synth_tr[:, 1][self.synth_tr[:, -1] == 0], color='royalblue', s=12, marker='o', label="Class 0") ax[0].scatter(self.synth_tr[:, 0][self.synth_tr[:, -1] == 1], self.synth_tr[:, 1][self.synth_tr[:, -1] == 1], color='red', s=12, marker='o', label="Class 1") ax[0].margins(0.1) # 1% padding in all directions ax[0].set_title("Synth Dataset Scatter Plot") ax[0].set_xlabel("Feature 1") ax[0].set_ylabel("Feature 2") ax[0].legend() ax[0].grid(True) # f1 Hist hist, bins, patches = ax[1].hist(self.synth_tr[:, 0], density=True, bins=20, color=plot_color, edgecolor='black', linewidth=0.5) # density=False would make counts ax[1].set_title("Synth Dataset Density Histogram") ax[1].set_xlabel("Feature 1") ax[1].set_ylabel("Density") ax[1].margins(0.1) # 1% padding in all directions # f2 Hist hist, bins, patches = ax[2].hist(self.synth_tr[:, 1], density=True, bins=20, color=plot_color, edgecolor='black', linewidth=0.5) # density=False would make counts ax[2].set_title("Synth Dataset Density Histogram") ax[2].set_xlabel("Feature 2") ax[2].set_ylabel("Density") ax[2].margins(0.1) # 1% padding in all directions fig.tight_layout() fig.show() @staticmethod def plot_knn_overall_accuracies(synth_k_range, synth_accuracies, pima_k_range, pima_accuracies): fig, ax = plt.subplots(2, 1, figsize=(9, 9)) # Synth Dataset ax[0].plot(synth_k_range, synth_accuracies, label='Synthetic Dataset', color='deepskyblue') ax[0].set_title('Overall Classification accuracy vs k for the Synthetic Dataset') ax[0].set_xlabel('k') ax[0].set_ylabel('Overall Classification Accuracy') _ = ax[0].set_xticks(synth_k_range) ax[0].legend() # Pima Dataset ax[1].plot(pima_k_range, pima_accuracies, label='Pima Dataset', color='orange') ax[1].set_title('Overall Classification accuracy vs k for the Pima Dataset') ax[1].set_xlabel('k') ax[1].set_ylabel('Overall Classification Accuracy') _ = ax[1].set_xticks(pima_k_range) ax[1].legend() # Show plot fig.tight_layout() fig.show() @staticmethod def plot_decision_boundaries(knn, h: float = 0.2): # Init values statically from Project 1 a_eucl = -0.8326229483927666 b_eucl = 0.44378197841356054 a_maha = -0.13486408662390306 b_maha = 0.49454949088419903 A = -2.9353736949690252 B = -7.122064910873636 C = -9.131232270572491 D = -4.023021305932989 E = 29.777685196099192 F = -14.251862334038359 means = np.array([[-0.22147024, 0.32575494], [0.07595431, 0.68296891]]) means_center = np.array([-0.07275796159999995, 0.5043619269200001]) a_m = 1.2010238270880302 b_m = 0.591745972411956 # Plot the Decision Boundaries fig, ax = plt.subplots(1, 1, figsize=(11, 9)) eucl_x_range = np.linspace(-0.8, 0.9, 50) maha_x_range = np.linspace(-1, 1, 50) quadr_x_range = np.linspace(-1.1, 1.1, 50) quadr_y_range = np.linspace(-0.2, 1.1, 50) # KNN Decision Boundaries cmap_light = ListedColormap(['lightblue', 'moccasin']) # KNN Decision Boundaries x, y = knn.train_x, knn.train_y x_min, x_max = x[:, 0].min() - 1, x[:, 0].max() + 1 y_min, y_max = x[:, 1].min() - 1, x[:, 1].max() + 1 xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h)) x_target = np.c_[xx.ravel(), yy.ravel()] Z = knn.predict(x_target, only_x=True) Z = Z.reshape(xx.shape) knn_contour_plot = ax.contourf(xx, yy, Z, cmap=cmap_light) # Class 0 Scatter plot ax.scatter(x[:, 0][y == 0], x[:, 1][y == 0], color='royalblue', s=10, label='Class 0') # Class 1 Scatter plot ax.scatter(x[:, 0][y == 1], x[:, 1][y == 1], color='red', s=10, label='Class 1') # Decision Boundaries # Euclidean ax.plot(eucl_x_range, a_eucl * eucl_x_range + b_eucl, color='orange', label=f'Euclidean Decision Boundary') # Mahalanobis ax.plot(maha_x_range, a_maha * maha_x_range + b_maha, color='deepskyblue', label=f'Mahalanobis Decision Boundary') # Quadratic x_quad, y_quad = np.meshgrid(quadr_x_range, quadr_y_range) quadr_equation = A * x_quad ** 2 + B * y_quad ** 2 + C * x_quad * y_quad + D * x_quad + E * y_quad + F quad_contour_plt = ax.contour(x_quad, y_quad, quadr_equation, [0], colors='limegreen') ax.clabel(quad_contour_plt, inline=1, fontsize=10) quad_contour_plt.collections[0].set_label('Quadratic Decision Boundary') # Line that links the means of the two classes mline_x_range = np.linspace(means[0][0], means[1][0], 5) ax.plot(mline_x_range, a_m * mline_x_range + b_m, color='m', linestyle='dashed', label='Line linking the two means') # Class 0 Mean value ax.plot(means[0][0], means[0][1], 'bo', markersize=11, markeredgecolor='w', label='Class 0 Mean value') # Class 1 Mean value ax.plot(means[1][0], means[1][1], 'ro', markersize=11, markeredgecolor='w', label='Class 1 Mean value') # Center of the linking line ax.plot(means_center[0], means_center[1], 'mo', markersize=11, markeredgecolor='w', label=f'Center of the linking line') # Show figure ax.set_title( "The three Decision Boundaries plotted against the scatter plot of the two features") # ax.axis('equal') ax.set_xlim(-1.35, 1.3) ax.set_ylim(-0.35, 1.15) ax.set_xlabel("Feature 1") ax.set_ylabel("Feature 2") ax.legend(loc='upper left') # ax.margins(0.1) fig.show() @staticmethod def plot_membership_changes(kmeans_membership_changes, wta_membership_changes, epsilon): fig, ax = plt.subplots(2, 1, figsize=(9, 9)) # Pima, Kmeans kmeans_range = range(2, len(kmeans_membership_changes)+2) ax[0].plot(kmeans_range, kmeans_membership_changes, label=f'Kmeans', color='deepskyblue') ax[0].set_title('Membership Changes per epoch for Kmeans on Pima Dataset') ax[0].set_xlabel('Epoch') ax[0].set_ylabel('Membership Changes') _ = ax[0].set_xticks(kmeans_range) ax[0].legend() # Pima, WTA wta_range = range(2, len(wta_membership_changes) + 2) ax[1].plot(wta_range, wta_membership_changes, label=f'WTA: epsilon={epsilon}', color='orange') ax[1].set_title('Membership Changes per epoch for WTA on Pima Dataset') ax[1].set_xlabel('Epoch') ax[1].set_ylabel('Membership Changes') _ = ax[1].set_xticks(wta_range) ax[1].legend() # Show plot fig.tight_layout() fig.show()
package com.copperleaf.kodiak.swift.formatter import com.copperleaf.kodiak.common.RichTextComponent import com.copperleaf.kodiak.common.RichTextComponent.Companion.PUNCTUATION import com.copperleaf.kodiak.common.RichTextComponent.Companion.TEXT import com.copperleaf.kodiak.common.RichTextComponent.Companion.TYPE_NAME import com.copperleaf.kodiak.swift.internal.models.SourceKittenSubstructure import com.copperleaf.kodiak.swift.models.SwiftField fun SourceKittenSubstructure.toVariableDoc(structure: SourceKittenSubstructure): SwiftField { return SwiftField( this, this.name, this.name, this.getModifiers(), this.getComment(), this.typenameRawValue, this.typename, this.variableSignature() ) } fun SourceKittenSubstructure.variableSignature(): List<RichTextComponent> { val list = mutableListOf<RichTextComponent>() list.add(RichTextComponent(TEXT, "var")) list.add(RichTextComponent(TEXT, " ${this.name}")) list.add(RichTextComponent(PUNCTUATION, ":")) list.add(RichTextComponent(TYPE_NAME, " ${this.typenameRawValue}", this.typename)) return list }
/** * The MIT License * * Copyright (c) 2010 Adam Abrons and Misko Hevery http://getangular.com * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ /* NUGGGGGH MUST TONGUE WANGS \ ..... C C / /< / ___ __________/_#__=o /(- /(\_\________ \ \ ) \ )_ \o \ /|\ /|\ |' | | _| /o __\ / ' | / / | /_/\______| ( _( < \ \ \ \ \ | \____\____\ ____\_\__\_\ /` /` o\ |___ |_______|.. . b'ger IN THE FINAL BUILD THIS FILE DOESN'T HAVE DIRECT ACCESS TO GLOBAL FUNCTIONS DEFINED IN Angular.js YOU *MUST* REFER TO THEM VIA angular OBJECT (e.g. angular.forEach(...)) AND MAKE SURE THAT THE GIVEN FUNCTION IS EXPORTED TO THE angular NAMESPACE in AngularPublic.js */ /** * @ngdoc overview * @name angular.mock * @namespace Namespace for all built-in angular mocks. * * @description * `angular.mock` is a namespace for all built-in mocks that ship with angular and automatically * replace real services if `angular-mocks.js` file is loaded after `angular.js` and before any * tests. */ angular.mock = {}; /** * @workInProgress * @ngdoc service * @name angular.mock.service.$browser */ function MockBrowser() { var self = this, expectations = {}, requests = []; this.isMock = true; self.url = "http://server"; self.lastUrl = self.url; // used by url polling fn self.pollFns = []; // register url polling fn self.onHashChange = function(listener) { self.pollFns.push( function() { if (self.lastUrl != self.url) { self.lastUrl = self.url; listener(); } } ); return listener; }; self.xhr = function(method, url, data, callback, headers) { headers = headers || {}; if (data && angular.isObject(data)) data = angular.toJson(data); if (data && angular.isString(data)) url += "|" + data; var expect = expectations[method] || {}; var expectation = expect[url]; if (!expectation) { throw new Error("Unexpected request for method '" + method + "' and url '" + url + "'."); } requests.push(function(){ angular.forEach(expectation.headers, function(value, key){ if (headers[key] !== value) { throw new Error("Missing HTTP request header: " + key + ": " + value); } }); callback(expectation.code, expectation.response); }); }; self.xhr.expectations = expectations; self.xhr.requests = requests; self.xhr.expect = function(method, url, data, headers) { if (data && angular.isObject(data)) data = angular.toJson(data); if (data && angular.isString(data)) url += "|" + data; var expect = expectations[method] || (expectations[method] = {}); return { respond: function(code, response) { if (!angular.isNumber(code)) { response = code; code = 200; } expect[url] = {code:code, response:response, headers: headers || {}}; } }; }; self.xhr.expectGET = angular.bind(self, self.xhr.expect, 'GET'); self.xhr.expectPOST = angular.bind(self, self.xhr.expect, 'POST'); self.xhr.expectDELETE = angular.bind(self, self.xhr.expect, 'DELETE'); self.xhr.expectPUT = angular.bind(self, self.xhr.expect, 'PUT'); self.xhr.expectJSON = angular.bind(self, self.xhr.expect, 'JSON'); self.xhr.flush = function() { if (requests.length == 0) { throw new Error("No xhr requests to be flushed!"); } while(requests.length) { requests.pop()(); } }; self.cookieHash = {}; self.lastCookieHash = {}; self.deferredFns = []; self.defer = function(fn) { self.deferredFns.push(fn); }; self.defer.flush = function() { while (self.deferredFns.length) self.deferredFns.shift()(); }; } MockBrowser.prototype = { poll: function poll(){ angular.forEach(this.pollFns, function(pollFn){ pollFn(); }); }, addPollFn: function(pollFn) { this.pollFns.push(pollFn); return pollFn; }, hover: function(onHover) { }, getUrl: function(){ return this.url; }, setUrl: function(url){ this.url = url; }, cookies: function(name, value) { if (name) { if (value == undefined) { delete this.cookieHash[name]; } else { if (angular.isString(value) && //strings only value.length <= 4096) { //strict cookie storage limits this.cookieHash[name] = value; } } } else { if (!angular.equals(this.cookieHash, this.lastCookieHash)) { this.lastCookieHash = angular.copy(this.cookieHash); this.cookieHash = angular.copy(this.cookieHash); } return this.cookieHash; } } }; angular.service('$browser', function(){ return new MockBrowser(); }); /** * @workInProgress * @ngdoc service * @name angular.mock.service.$exceptionHandler * * @description * Mock implementation of {@link angular.service.$exceptionHandler} that rethrows any error passed * into `$exceptionHandler`. If any errors are are passed into the handler in tests, it typically * means that there is a bug in the application or test, so this mock will make these tests fail. * * See {@link angular.mock} for more info on angular mocks. */ angular.service('$exceptionHandler', function(e) { return function(e) {throw e;}; }); /** * @workInProgress * @ngdoc service * @name angular.mock.service.$log * * @description * Mock implementation of {@link angular.service.$log} that gathers all logged messages in arrays * (one array per logging level). These arrays are exposed as `logs` property of each of the * level-specific log function, e.g. for level `error` the array is exposed as `$log.error.logs`. * * See {@link angular.mock} for more info on angular mocks. */ angular.service('$log', MockLogFactory); function MockLogFactory() { var $log = { log: function(){ $log.log.logs.push(arguments); }, warn: function(){ $log.warn.logs.push(arguments); }, info: function(){ $log.info.logs.push(arguments); }, error: function(){ $log.error.logs.push(arguments); } }; $log.log.logs = []; $log.warn.logs = []; $log.info.logs = []; $log.error.logs = []; return $log; } /** * Mock of the Date type which has its timezone specified via constroctor arg. * * The main purpose is to create Date-like instances with timezone fixed to the specified timezone * offset, so that we can test code that depends on local timezone settings without dependency on * the time zone settings of the machine where the code is running. * * @param {number} offset Offset of the *desired* timezone in hours (fractions will be honored) * @param {(number|string)} timestamp Timestamp representing the desired time in *UTC* * * @example * !!!! WARNING !!!!! * This is not a complete Date object so only methods that were implemented can be called safely. * To make matters worse, TzDate instances inherit stuff from Date via a prototype. * * We do our best to intercept calls to "unimplemented" methods, but since the list of methods is * incomplete we might be missing some non-standard methods. This can result in errors like: * "Date.prototype.foo called on incompatible Object". * * <pre> * var newYearInBratislava = new TzDate(-1, '2009-12-31T23:00:00Z'); * newYearInBratislava.getTimezoneOffset() => -60; * newYearInBratislava.getFullYear() => 2010; * newYearInBratislava.getMonth() => 0; * newYearInBratislava.getDate() => 1; * newYearInBratislava.getHours() => 0; * newYearInBratislava.getMinutes() => 0; * </pre> * */ function TzDate(offset, timestamp) { if (angular.isString(timestamp)) { var tsStr = timestamp; this.origDate = angular.String.toDate(timestamp); timestamp = this.origDate.getTime(); if (isNaN(timestamp)) throw { name: "Illegal Argument", message: "Arg '" + tsStr + "' passed into TzDate constructor is not a valid date string" }; } else { this.origDate = new Date(timestamp); } var localOffset = new Date(timestamp).getTimezoneOffset(); this.offsetDiff = localOffset*60*1000 - offset*1000*60*60; this.date = new Date(timestamp + this.offsetDiff); this.getTime = function() { return this.date.getTime() - this.offsetDiff; }; this.toLocaleDateString = function() { return this.date.toLocaleDateString(); }; this.getFullYear = function() { return this.date.getFullYear(); }; this.getMonth = function() { return this.date.getMonth(); }; this.getDate = function() { return this.date.getDate(); }; this.getHours = function() { return this.date.getHours(); }; this.getMinutes = function() { return this.date.getMinutes(); }; this.getSeconds = function() { return this.date.getSeconds(); }; this.getTimezoneOffset = function() { return offset * 60; }; this.getUTCFullYear = function() { return this.origDate.getUTCFullYear(); }; this.getUTCMonth = function() { return this.origDate.getUTCMonth(); }; this.getUTCDate = function() { return this.origDate.getUTCDate(); }; this.getUTCHours = function() { return this.origDate.getUTCHours(); }; this.getUTCMinutes = function() { return this.origDate.getUTCMinutes(); }; this.getUTCSeconds = function() { return this.origDate.getUTCSeconds(); }; //hide all methods not implemented in this mock that the Date prototype exposes var unimplementedMethods = ['getDay', 'getMilliseconds', 'getTime', 'getUTCDay', 'getUTCMilliseconds', 'getYear', 'setDate', 'setFullYear', 'setHours', 'setMilliseconds', 'setMinutes', 'setMonth', 'setSeconds', 'setTime', 'setUTCDate', 'setUTCFullYear', 'setUTCHours', 'setUTCMilliseconds', 'setUTCMinutes', 'setUTCMonth', 'setUTCSeconds', 'setYear', 'toDateString', 'toJSON', 'toGMTString', 'toLocaleFormat', 'toLocaleString', 'toLocaleTimeString', 'toSource', 'toString', 'toTimeString', 'toUTCString', 'valueOf']; angular.forEach(unimplementedMethods, function(methodName) { this[methodName] = function() { throw { name: "MethodNotImplemented", message: "Method '" + methodName + "' is not implemented in the TzDate mock" }; }; }); } //make "tzDateInstance instanceof Date" return true TzDate.prototype = Date.prototype;
#!/bin/bash mkdir -p ../../out DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" #echo "Compiling" #g++ -std=c++11 -O2 -lpthread pipesquare.cpp -o pipesquare.o echo "Starting time measurement" time for i in {1..10} ; do "$DIR/pipesquare" ; done echo "Starting memory measurement" for i in {1..10}; do /usr/bin/time -f "%M" "$DIR/pipesquare"; done #perf record "$DIR/pipesquare"
using System; using Microsoft.Maui.Handlers; using Microsoft.Maui; using Microsoft.Maui.Graphics.Platform; namespace Comet.Handlers { public partial class ShapeViewHandler : ViewHandler<ShapeView, PlatformGraphicsView> { protected override PlatformGraphicsView CreatePlatformView() => new PlatformGraphicsView(MauiContext.Context); public static void MapShapeProperty(IElementHandler viewHandler, ShapeView virtualView) { var nativeView = (PlatformGraphicsView)viewHandler.PlatformView; nativeView.Drawable = virtualView; } } }
SET DEFINE OFF; ALTER TABLE AFW_25_VALR_ELEMN_CONFG ADD ( CONSTRAINT AFW_25_VALR_ELEMN_CONFG_UK1 UNIQUE (REF_ELEMN_CONFG_PUBLC, REF_DOMN) ENABLE VALIDATE) /
--- jupytext: formats: ipynb,md:myst text_representation: extension: .md format_name: myst kernelspec: display_name: ai language: python name: ai --- (dash:dcc/range-slider)= # `dcc.RangeSlider` 参考:[dcc.RangeSlider | Dash for Python Documentation | Plotly](https://dash.plotly.com/dash-core-components/rangeslider) ## 简单例子 一个绑定回调的基本 RangeSlider 的例子。 ```{include} ../examples/simple_range_slider.py :code: python ``` ```{code-cell} ipython3 :tags: [remove-input] from sanstyle.display.html import Embed snippet_url = 'https://dash-tests.herokuapp.com' Embed(snippet_url + '/examples/simple-range-slider', className='w3-pale-blue', height=100) ``` ## 标记和步长 如果滑块 `marks` 被定义并且 `step` 被设置为 `None`,那么滑块将只能选择标记预定义的值。注意,默认值是 `step=1`,因此必须显式指定 `None` 以获得此行为。 ```{include} ../examples/mark_range_slider.py :code: python ``` ```{code-cell} ipython3 :tags: [remove-input] from sanstyle.display.html import Embed snippet_url = 'https://dash-tests.herokuapp.com' Embed(snippet_url + '/examples/mark-range-slider', className='w3-pale-blue', height=100) ``` 待续。。。
using Api.Helper; using Core.Contracts; using Core.DataTransferObjects; using Microsoft.AspNetCore.Mvc; namespace Api.Controllers { /// <summary> /// Controller to manage measurements /// </summary> [Route("api/[controller]/[action]")] [ApiController] public class MeasurementsController : ControllerBase { /// <summary> /// /// </summary> public IUnitOfWork UnitOfWork { get; } /// <summary> /// /// </summary> /// <param name="unitOfWork"></param> public MeasurementsController(IUnitOfWork unitOfWork) { UnitOfWork = unitOfWork; } /// <summary> /// get all measurements ordered by time desc /// optional filtered by itemname /// </summary> /// <response code="200">Die Abfrage war erfolgreich.</response> [HttpGet] [ProducesResponseType(typeof(MeasurementGetDto[]), StatusCodes.Status200OK)] public async Task<IActionResult> Get([FromQuery]string? itemname) { var measurements = await UnitOfWork.MeasurementRepository .GetFilteredAsync(itemname); return Ok(measurements); } } }
# nsd1902_devops_day01 程序是计算机上存储的可执行文件,当它运行起来就会加载到内存,所以进程可以认为是程序的一次执行,或加载到内存中的一系列指令。进程的内部可以由一到多个线程构成。 ## 多进程编程 windows系统不支持多进程。 ```python # vim myfork import os print('Starting...') os.fork() print('Hello World!') # python3 myfork.py Starting... Hello World! Hello World! ``` ```mermaid graph LR p(主进程) c(子进程) ph(打印) ch(打印) p --生成-->c p --> ph c --> ch ``` os.fork它的返回值是数字,这个数字在父子进程中不一样,在父进程中是非零值(子进程的PID),子进程中是0。 ### 多进程编程的思路 - 想清楚父子进程分别负责哪些工作 - 一般来说,父进程只管生成子进程 - 子进程负责做具体的工作 - 一定要注意,子进程做完它的工作之后,要彻底结束。 ### 僵尸进程 - 如果程序只是短时间的运行,不用操心僵尸进程 - 如果父进程尚未结束,子进程已经没有可执行代码了,子进程将会变成僵尸进程 - 父进程通过waitpid()函数进行检测子进程。如果子进程已经变成僵尸进程,则处理它。 - waitpid(m, n)的接受的参数,m取值为-1,表示与wait()有相同的功能;n的值为0表示挂起父进程(父进程暂停执行),1表示不挂起。 - waitpid()返回值是元组(a, b),b表示状态;如果子进程是僵尸进程,b为非零值(子进程的pid),否则是0。 ## 多线程编程 - 一个进程的内部可以有一到多个线程 - 每个进程都有自己的资源,线程共享进程的资源。 ### 多线程的编程思路 - 思考主线程和工作线程分别负责哪些工作 - 一般来说,主线程只负责产生工作线程 - 工作线程做具体的工作 - 多线程没有递归生成工作线程的问题 - 多线程也没有僵尸进程的问题 ## urllib模块 - urllib.request可以用来发送request和获取request的结果 - urllib.error包含了urllib.request产生的异常 - urllib.parse用来解析和处理URL - urllib.robotparse用来解析页面的robots.txt文件 ### 获取网络资源 ```python >>> from urllib import request >>> html = request.urlopen('http://www.163.com') >>> data = html.read() >>> with open('/tmp/163.html', 'wb') as fobj: ... fobj.write(data) [root@room8pc16 day01]# firefox /tmp/163.html >>> html = request.urlopen('https://upload-images.jianshu.io/upload_images/12347101-bc5e84e92e23c692.jpg') >>> with open('/tmp/myimg.jpg', 'wb') as fobj: ... fobj.write(html.read()) [root@room8pc16 day01]# eog /tmp/myimg.jpg ``` 下载网上资源也可以使用wget模块 ```python (nsd1902) [root@room8pc16 day01]# pip3 install wget >>> import wget >>> wget.download('https://upload-images.jianshu.io/upload_images/12347101-bc5e84e92e23c692.jpg', '/tmp/abc.jpg') ``` ### 修改请求头,模拟firefox请求 ```python >>> headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64; rv:52.0) Gecko/20100101 Firefox/52.0'} >>> r = request.Request('http://127.0.0.1', headers=headers) >>> html = request.urlopen(r) # tail -f /var/log/httpd/access_log 发现客户端显示的是Firefox ``` ### 数据编码 在URL中只允许一部分ascii字符,所以在url中如果有非法字符,需要先对其进行编码。 ```python >>> html = request.urlopen('https://www.sogou.com/web?query=中国') # 报错,因为存在中文字符 # 对汉字进行编码 >>> url = 'https://www.sogou.com/web?query=' + request.quote('中国') >>> url 'https://www.sogou.com/web?query=%E4%B8%AD%E5%9B%BD' >>> html = request.urlopen(url) # 正常 ``` ### 异常处理 配置一个没有权限的目录 ```shell # mkdir -m 000 /var/www/html/ban/ ``` - http://127.0.0.1/abc -> 不存在 - http://127.0.0.1/ban -> 没权限 ```python >>> html = request.urlopen('http://127.0.0.1/abc') urllib.error.HTTPError: HTTP Error 404: Not Found >>> html = request.urlopen('http://127.0.0.1/ban') urllib.error.HTTPError: HTTP Error 403: Forbidden ``` ## paramiko模块 通过ssh远程管理服务器 安装 ```shell # cd zzg_pypkgs/paramiko_pkgs/ # pip3 install * # 或在线安装 # pip3 install paramiko ``` 应用 ```python >>> import paramiko # 创建一个SSHClient对象 >>> ssh = paramiko.SSHClient() # 服务器发来密钥是自动接收。相当于回答yes >>> ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # 连接服务器 >>> ssh.connect('192.168.4.3', username='root', password='123456', port=22) # 在服务器上执行命令后,返回值是一个元组,分别是输入、输出和错误的类文件对象 # 把这三个类文件对象分别赋值 >>> stdin, stdout, stderr = ssh.exec_command('id root; id john') # 把输出和错误信息读取出来 >>> out = stdout.read() >>> err = stderr.read() >>> out b'uid=0(root) gid=0(root) \xe7\xbb\x84=0(root)\n' >>> err b'id: john: no such user\n' >>> out.decode() # 将bytes类型转成str类型 'uid=0(root) gid=0(root) 组=0(root)\n' ```
/************************************************************* Event Agent checkpoint feature **************************************************************/ /************************************************************* Event Agent checkpoint table **************************************************************/ IF NOT EXISTS (SELECT 1 FROM sys.tables WHERE name = 'EventAgentCheckpoint') BEGIN CREATE TABLE dbo.EventAgentCheckpoint ( CheckpointId varchar(64) NOT NULL, LastProcessedDateTime datetimeoffset(7), LastProcessedIdentifier varchar(64), UpdatedOn datetime2(7) NOT NULL DEFAULT sysutcdatetime(), CONSTRAINT PK_EventAgentCheckpoint PRIMARY KEY CLUSTERED (CheckpointId) ) ON [PRIMARY] END GO
use crate::input::Input; pub fn solve(input: &mut Input) -> Result<u32, String> { let row_evaluator = if input.is_part_one() { |row: &[u32]| { let min = row.iter().min().unwrap_or(&0); let max = row.iter().max().unwrap_or(&0); max - min } } else { |row: &[u32]| { for (x_index, x) in row.iter().enumerate() { for (y_index, &y) in row.iter().enumerate() { if x_index != y_index && y != 0 && x % y == 0 { return x / y; } } } 0 } }; let mut checksum = 0; for line in input.text.lines() { let values: Vec<u32> = line .split_ascii_whitespace() .map(|cell| cell.parse::<u32>().map_err(|_| "Invalid input")) .collect::<Result<_, _>>()?; checksum += row_evaluator(&values); } Ok(checksum) } #[test] fn test() { use crate::input::{test_part_one, test_part_two}; test_part_one!("5 1 9 5\n7 5 3\n2 4 6 8" => 18); test_part_two!("5 9 2 8\n9 4 7 3\n3 8 6 5" => 9); let input = include_str!("day02_input.txt"); test_part_one!(input => 41919); test_part_two!(input => 303); }
package com.powerspace.openrtb.json.bidrequest import com.google.openrtb.BidRequest.Imp import com.google.openrtb.BidRequest.Imp.Banner.Format import com.powerspace.openrtb.json.EncoderProvider import com.powerspace.openrtb.json.OpenRtbExtensions.ExtensionRegistry import com.powerspace.openrtb.json.util.EncodingUtils import com.powerspace.openrtb.json.common.OpenRtbProtobufEnumEncoders import com.powerspace.openrtb.json.common.OpenRtbProtobufEnumDecoders /** * OpenRTB Banner Encoder and Decoder */ class OpenRtbBannerSerde(implicit er: ExtensionRegistry) extends EncoderProvider[Imp.Banner] { import io.circe._ import EncodingUtils._ import OpenRtbProtobufEnumEncoders._ import OpenRtbProtobufEnumDecoders._ implicit val formatEncoder: Encoder[Format] = extendedEncoder[Format] def encoder: Encoder[Imp.Banner] = extendedEncoder[Imp.Banner] implicit val formatDecoder: Decoder[Format] = extendedDecoder[Format] def decoder: Decoder[Imp.Banner] = extendedDecoder[Imp.Banner] }
package contacts.entities.custom.googlecontacts.userdefined import contacts.core.entities.custom.CustomDataRegistry.Entry import contacts.entities.custom.googlecontacts.GoogleContactsFields import contacts.entities.custom.googlecontacts.GoogleContactsMimeType import contacts.entities.custom.googlecontacts.UserDefinedField // Keep this internal. Consumers don't need to see this stuff. Less visibility the better! internal class UserDefinedEntry : Entry<UserDefinedField, UserDefinedDataCursor, UserDefinedEntity, UserDefined> { override val mimeType = GoogleContactsMimeType.UserDefined override val fieldSet = GoogleContactsFields.UserDefined override val fieldMapper = UserDefinedFieldMapper() override val countRestriction = USER_DEFINED_COUNT_RESTRICTION override val mapperFactory = UserDefinedMapperFactory() override val operationFactory = UserDefinedOperationFactory() }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #ifndef TRUST_STS_CLIENT #define TRUST_STS_CLIENT /** * @file trust_sts_client.h * @brief contains the specific sts client interface */ #include <stdio.h> #include <stdlib.h> #include <axiom.h> #include <axutil_utils.h> #include <axis2_client.h> #include <rp_includes.h> #include <rp_secpolicy.h> #include <neethi_policy.h> #include <neethi_util.h> #include <rampart_util.h> #include <trust_constants.h> #include <trust_util.h> #include <trust_policy_util.h> #include <trust_token.h> #include <rampart_config.h> #include <trust_rst.h> #include <trust_rstr.h> #include <trust_context.h> #ifdef __cplusplus extern "C" { #endif typedef struct trust_sts_client trust_sts_client_t; AXIS2_EXTERN trust_sts_client_t *AXIS2_CALL trust_sts_client_create( const axutil_env_t * env); AXIS2_EXTERN void AXIS2_CALL trust_sts_client_free( trust_sts_client_t * sts_client, const axutil_env_t * env); /*Send RST to the specified STS/IP. RST Node that is built from RST_Context should be passed*/ AXIS2_EXTERN void AXIS2_CALL trust_sts_client_request_security_token( trust_sts_client_t * sts_client, const axutil_env_t * env, trust_context_t *trust_context); AXIS2_EXTERN axis2_status_t AXIS2_CALL trust_sts_client_process_policies( trust_sts_client_t * sts_client, const axutil_env_t * env, neethi_policy_t * issuer_policy, neethi_policy_t * service_policy); AXIS2_EXTERN axis2_svc_client_t *AXIS2_CALL trust_sts_client_get_svc_client( trust_sts_client_t * sts_client, const axutil_env_t * env, axis2_char_t * action, axis2_char_t * address_version, axis2_bool_t is_soap11); AXIS2_EXTERN axis2_status_t AXIS2_CALL trust_sts_client_set_issuer_address( trust_sts_client_t * sts_client, const axutil_env_t * env, axis2_char_t * address); AXIS2_EXTERN axis2_status_t AXIS2_CALL trust_sts_client_set_home_dir( trust_sts_client_t * sts_client, const axutil_env_t * env, axis2_char_t * directory); AXIS2_EXTERN oxs_buffer_t* AXIS2_CALL trust_sts_client_request_security_token_using_policy( trust_sts_client_t * sts_client, const axutil_env_t * env, trust_context_t *trust_context, neethi_policy_t *issuer_policy, axis2_char_t *address_version, axis2_bool_t is_soap11, rampart_context_t *rampart_context); AXIS2_EXTERN axis2_status_t AXIS2_CALL trust_sts_client_set_issuer_policy_location( trust_sts_client_t * sts_client, const axutil_env_t * env, axis2_char_t * file_path); AXIS2_EXTERN axis2_char_t *AXIS2_CALL trust_sts_client_get_issuer_policy_location( trust_sts_client_t * sts_client, const axutil_env_t * env); AXIS2_EXTERN axis2_char_t *AXIS2_CALL trust_sts_client_get_service_policy_location( trust_sts_client_t * sts_client, const axutil_env_t * env); AXIS2_EXTERN axis2_status_t AXIS2_CALL trust_sts_client_set_service_policy_location( trust_sts_client_t * sts_client, const axutil_env_t * env, axis2_char_t * file_path); AXIS2_EXTERN axis2_status_t AXIS2_CALL trust_sts_client_set_auth_info( trust_sts_client_t * sts_client, const axutil_env_t * env, axis2_char_t *username, axis2_char_t *password, axis2_char_t * auth_type); AXIS2_EXTERN axis2_status_t AXIS2_CALL trust_sts_client_set_issued_token( trust_sts_client_t * sts_client, const axutil_env_t * env, rampart_saml_token_t *saml_token); AXIS2_EXTERN axis2_status_t AXIS2_CALL trust_sts_client_set_issued_token_func( trust_sts_client_t * sts_client, const axutil_env_t * env, issued_token_callback_func issue_token_func); #ifdef __cplusplus } #endif #endif /*TRUST_STS_CLIENT_H */
<?php namespace App\Http\Controllers\Admin; use App\Http\Controllers\Controller; use App\Http\Requests\Admin\AdminLogin; use App\Providers\RouteServiceProvider; use Illuminate\Http\Request; use Illuminate\Support\Facades\Auth; class LoginController extends Controller { /** * Create a new controller instance. * * @return void */ public function __construct(Request $request) { $this->middleware('guest:admin')->except(['logout']); } public function showLoginForm() { return view('admin::layout.login'); } public function login(AdminLogin $request) { $credentials = ['email' => $request->email, 'password' => $request->password, 'status' => 1]; if(Auth::guard('admin')->attempt($credentials,$request->filled('remember'))){ return redirect()->route('admin.home'); } toastr()->error(__('base.error.notLoggedInDesc'), __('base.error.notLoggedIn')); return back(); } public function logout(Request $request) { Auth::guard('admin')->logout(); return redirect()->route('admin.login'); } }
class TripsDashboardController < ApplicationController before_action :require_user def index @trips = Trip.all @conditions = Condition.all end end
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using MongoDB.Bson; using AuthoBson.Shared.Data.Models; using MongoDB.Bson.Serialization.Attributes; using Newtonsoft.Json; namespace AuthoBson.Shared.Data.Models { public class ModelReference : IModelBase { public string Id { get; set; } [BsonElement("Route")] [JsonProperty("Route")] public KeyValuePair<string, string> Route { get; } [BsonConstructor("Id", "Route")] public ModelReference(string Id, KeyValuePair<string, string> Route) { this.Id = Id; this.Route = Route; } } }
/** * // Definition for a Node. * function Node(val,children) { * this.val = val; * this.children = children; * }; */ /** * @param {Node} root * @return {number} */ var maxDepth = function(root) { return root ? recursion(root, 1) : 0; }; function recursion(node, depth) { if (node.children.length) { let biggest = 0; for (let i = 0, l = node.children.length; i < l; ++i) { const temp = recursion(node.children[i], depth + 1); if (temp > biggest) { biggest = temp; } } return biggest; } else { return depth; } }
<?php namespace App\Repositories; use App\Models\User; use App\Repositories\BaseRepositoryTrait; class UserRepository { use BaseRepositoryTrait; private $model; /** * constract * @param User $model */ public function __construct(User $contents) { $this->model = $contents; } }
<?php /* * (c) Jérémy Marodon <marodon.jeremy@gmail.com> * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ namespace Th3Mouk\RxTraining\Consumers\Simple; use Rx\Observer\CallbackObserver; use Rx\Scheduler\EventLoopScheduler; use Rxnet\RabbitMq\RabbitMessage; class SimpleDisconnectedConsumer extends SimpleBaseConsumer { /** * Use to store a disposable in case of problem to restart consumption * @var Rx\DisposableInterface */ protected $consumer; /** * @var \Rxnet\RabbitMq\RabbitQueue */ protected $queue; public function start() { $this->rabbit->connect() // This operator will catch any error thrown by the rabbit // connection. And relaunch the connect method. ->retryWhen(function ($errors) { // When the error is catched we stand 2s before trying to // reconnect to not bruteforce the reconnection return $errors->delay(2000) // When 2s are elapsed we prompted a message and we start // the reconnection ->doOnNext(function () { echo "Rabbit is disconnected, retrying\n"; }); }) ->subscribe(new CallbackObserver(function () { // If we are here, the connection is restablished but we need // to restart the consumption. To do that we need to dispose // the disposable return by the subscription to the consumer. // A dispose let the possibility to close all stream and event // binded on the loop if ($this->consumer) { $this->consumer->dispose(); } $this->queue = $this->rabbit->queue('simple_queue', []); $this->queue->setQos(1); // Don't forget to store the return of the subscription // The disposable is returned before any execution so it can be // disposed at any moment. $this->consumer = $this->queue->consume() ->delay(2000) ->subscribe(new CallbackObserver(function (RabbitMessage $message) { $data = $message->getData(); $perso_name = $data['name']; $message->ack(); $this->output->writeln('<info>Just received ' . $perso_name . ' order</info>'); }), new EventLoopScheduler($this->loop)); }), new EventLoopScheduler($this->loop)); } }
require 'prima/version' require 'prima/configuration' require 'prima/publisher' require 'prima/subscriber' require 'prima/benchmark' require 'prima/transformation' require 'prima/step' require 'prima/etl_step' require 'prima/missing_msgpack_types' require 'prima/msgpack_step' require 'prima/msgpack_input' require 'prima/msgpack_io_reader' require 'prima/msgpack_io_writer' require 'prima/msgpack_output' require 'prima/sink_step' require 'prima/source_step' require 'prima/transform_step' require 'prima/active_record_upsert_step' require 'prima/container_step' require 'prima/csv_parser_step' require 'prima/filter_step' require 'prima/mapper_step' require 'prima/null_step' require 'prima/regex_filter_step' require 'prima/text_file_input_step' require 'prima/progress_reporter' require 'prima/step_profiler' module Prima class << self attr_accessor :configuration end self.configuration = Configuration.new def self.configure yield configuration end def self.logger configuration.logger end unless Prima.configuration.use_threads $SAFE = 1 # Because we're using DRb end end
function factorial(n) { if (n < 0) { return null; } else if (n <= 1) { return 1; } else { let prod = 1; for (let i = 1; i <= n; i++) { prod *= i; } return prod; } } console.log(factorial(2)); console.log(factorial(3)); console.log(factorial(4)); console.log(factorial(5)); console.log(factorial(10));
<?php $edad = rand(1,15); function lugarJuguetes($edad) { switch ($edad) { case $edad <= 5: echo "El estudiante con $edad años tendrá sus juguetes en la parte inferior de la bodega."; break; case ($edad > 5 && $edad <= 7): echo "El estudiante con $edad años tendrá sus juguetes en la parte media de la bodega."; break; case ($edad > 7 && $edad <= 12): echo "El estudiante con $edad años tendrá sus juguetes en la parte alta de la bodega."; break; default: echo "En estudiante con $edad años tendrá sus juguetes en la parte de a lado de la bodega."; break; } } lugarJuguetes($edad);
<header class="header headroom headroom--not-bottom headroom--pinned headroom--top"> <div class="backdrop"></div> <div class="site-nav"> <a class="site-search" href="all-issues.php">往期</a> <!--<a class="site-search" href="devices.php">设备</a>--> <!--<a class="site-search" href="companys.php">公司</a>--> <? if(isset($_SESSION['user_id'])){ ?> <a class="site-search" href="user.php"> <?=$_SESSION['user_name']?> </a> <a class="site-search" href="logout.php">退出</a> <? }else{ ?> <a class="site-search" href="login.php">登录</a> <? } ?> </div> <div class="site-info"> <a href="index.php" class="site-title">煮豆</a> <span class="site-description"></span> </div> </header>
adb shell << EOF su touch /data/a.cap du /data/a.cap exit exit EOF
using FakeItEasy; using Faker; using FluentAssertions; using NUnit.Framework; using System; using System.Threading.Tasks; using Watchster.Application.Features.Commands; using Watchster.Application.Interfaces; using Watchster.Domain.Entities; namespace Watchster.Application.UnitTests.Features.Commands { public class CreateMovieCommandTests { private readonly CreateMovieCommandHandler handler; private readonly IMovieRepository movieRepository; public CreateMovieCommandTests() { movieRepository = A.Fake<IMovieRepository>(); handler = new CreateMovieCommandHandler(movieRepository); } [Test] public async Task Given_CreateMovieCommand_When_HandlerIsCalled_Should_CallAddAsyncMethodAsync() { //arange var command = new CreateMovieCommand { Title = Lorem.Sentence(), Overview = Lorem.Sentence(20), TMDbId = RandomNumber.Next(), ReleaseDate = DateTime.Now, Genres = Lorem.Sentences(RandomNumber.Next(1, 3)), PosterUrl = Internet.Url(), Popularity = RandomNumber.Next(), TMDbVoteAverage = RandomNumber.Next(0, 10) }; //act var response = await handler.Handle(command, default); //assert A.CallTo(() => movieRepository.AddAsync(A<Movie>._)).MustHaveHappenedOnceExactly(); response.Should().BeOfType(typeof(int)); } } }
#include <nlohmann/json.hpp> #include "ircbot/engine.hxx" using nlohmann::json; using tikirc::message; using namespace ircbot; engine::engine(bool verbose_msgin) : channels(), log_msgin(verbose_msgin) { add_default_plugins(); } engine::engine(tikirc::stream_codec& conn, bool verbose_msgin) : engine(verbose_msgin) { conn >> *this; *this >> conn; } void engine::add_plugin(const std::string& name, make_handler_fn plugin) { plugins.emplace(name, plugin); } std::shared_ptr<event_handler> engine::make_handler(const json& config) { auto enabled_it = config.find("enabled"); if (enabled_it != config.end() && !enabled_it->get<bool>()) { return std::make_shared<event_handler>(); } if (config.is_string()) { return make_handler(json { {"plugin", "respond"}, {"response", config.get<std::string>()} }); } auto handler_name = config.at("plugin").get<std::string>(); auto make = plugins.at(handler_name); return make(config); } void engine::join(const std::string& channel) { channels.emplace(channel, engine::channel()); } void engine::add_handler(const std::string& channel, std::shared_ptr<event_handler> handler) { channels.at(channel).handlers.push_back(handler); } void engine::join(const json& config) { auto channel = config.at("channel").get<std::string>(); auto handlers = config.at("handlers"); join(channel); for (auto& handler : handlers) { add_handler(channel, handler); } } void engine::add_handler(const std::string& channel, const json& config) { add_handler(channel, make_handler(config)); } void engine::on_data(message msg) { if (msg.command() == "PING") { produce_data(message::pong(msg)); } else if (msg.command() == "PRIVMSG") { using std::placeholders::_1; auto ch_it = channels.find(msg.params()[0]); if (ch_it != channels.end()) { auto& channel = ch_it->second; channel_event event( std::bind(&engine::produce_data, this, _1), channel.globals, msg ); for (auto& handler : channel.handlers) { handler->handle(event); } } } if (log_msgin) { std::cout << msg.line(false) << std::endl; } } void engine::on_error(const std::exception& error) { throw error; } void engine::on_end() { produce_end(); }
// Copyright (c) SimpleIdServer. All rights reserved. // Licensed under the Apache License, Version 2.0. See LICENSE in the project root for license information. using System.Collections.Concurrent; using System.Collections.Generic; namespace SimpleIdServer.OAuth.Extensions { public static class ConcurrentBagExtensions { public static void Remove<T>(this ConcurrentBag<T> bag, T item) { var lst = new List<T>(); while (bag.Count > 0) { T result; bag.TryTake(out result); if (result == null || result.Equals(item)) { break; } lst.Add(result); } foreach (var l in lst) { bag.Add(l); } } } }
package id.walt.essif import io.kotest.core.spec.style.AnnotationSpec class AuthorizationApiTest : AnnotationSpec() { //TODO @Test // fun testAuthApiFlow() { // EssifFlowRunner.authApi() // } }
$(() => { let monkeysDiv = document.querySelector('.monkeys'); fetch('./monkeyTemplate.hbs') .then(res => res.text()) .then(monkeyTemplate => { let template = Handlebars.compile(monkeyTemplate); let htmlResult = template({ monkeys }) monkeysDiv.innerHTML = htmlResult; let buttons = monkeysDiv.querySelectorAll('button'); buttons.forEach(x => x.addEventListener('click', showInformation)); }); }) function showInformation(e) { let pElement = e.target.nextElementSibling; if (pElement.style.display == 'none') { pElement.style.display = 'block'; } else { pElement.style.display = 'none'; } }
<style> #navbar .nav-link { display: flex; height: 100%; align-items: center; text-align: center; } canvas { -moz-user-select: none; -webkit-user-select: none; -ms-user-select: none; } </style> <?php // LLamada para carga la imagen de perfil $img = $this->editores_m->obtenerImagen($_SESSION['id']); ?> <nav class="navbar navbar-expand-sm navbar-dark bg-dark"> <div class="container"> <a class="navbar-brand text-white">Edicción</a> <button class="navbar-toggler" data-target="#navbar" data-toggle="collapse" aria-controls="navbar" aria-expanded="false" aria-label="Toggle navigation"> <span class="navbar-toggler-icon"></span> </button> <div id="navbar" class="collapse navbar-collapse"> <ul class="navbar-nav mx-auto"> <li class="nav-item"> <a class="nav-link" href="<?php echo base_url() ?>editor">Estadísticas</a> </li> <li class="nav-item"> <a class="nav-link" href="<?php echo base_url() ?>editor/publicarNoticia">Publicar noticia</a> </li> <li class="nav-item"> <a class="nav-link" href="<?php echo base_url() ?>editor/moderarNoticias">Moderar noticias</a> </li> <?php if ($_SESSION['admin']) : ?> <!-- Menu solo para los administradores --> <li class="nav-item dropdown"> <a class="nav-link dropdown-toggle" href="#" id="navbarDropdown" role="button" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false"> Opciones Admin </a> <div class="dropdown-menu bg-dark px-1 px-sm-0" aria-labelledby="navbarDropdown"> <a class="nav-link" href="<?php echo base_url() ?>editor/categoria">Admin categorias</a> <div class="dropdown-divider"></div> <a class="nav-link" href="<?php echo base_url() ?>editor/adminNoticias">Admin Noticias</a> <a class="nav-link" href="<?php echo base_url() ?>editor/adminSolicitudes">Admin Solicitudes</a> </div> </li> <?php endif; ?> </ul> <ul class="navbar-nav "> <li class="nav-item dropdown"> <a class="nav-link dropdown-toggle" href="#" id="navbarDropdown" role="button" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false"> <img class="img-fluid mr-2 rounded-circle" src="<?= base_url('assets/img/') . $img ?>" width="28px"> <?= $_SESSION['username'] ?> </a> <div class="dropdown-menu bg-dark px-1 px-sm-0" aria-labelledby="navbarDropdown"> <a class="nav-link" href="<?php echo base_url() ?>editor/modificarPerfil">Modificar perfil</a> <div class="dropdown-divider"></div> <a class="nav-link" href="<?php echo base_url() ?>">Zeitung</a> <div class="dropdown-divider"></div> <a class="nav-link" href="<?php echo base_url() ?>editor/cerrarSesion">Cerrar Sesión</a> </div> </li> <li class="nav-item"> </li> </ul> </div> </div> </nav> <script src="<?= base_url('assets/libraries/chartjs/moment-with-locales.js') ?>"></script> <script src="<?= base_url('assets/libraries/chartjs/Chart.bundle.js') ?>"></script>
package com.dev.boot.ws.app.service.impl; import java.util.ArrayList; import java.util.List; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import com.dev.boot.ws.app.commons.model.Producto; import com.dev.boot.ws.app.repository.ProductoRepository; import com.dev.boot.ws.app.service.ProductoService; @Service("productoService") public class ProductoServiceImpl implements ProductoService { @Autowired @Qualifier("productoRepository") private ProductoRepository productoRepository; @Override @Transactional(readOnly = true) public List<Producto> findAll() { Iterable<Producto> productos = productoRepository.findAll(); List<Producto> productosReturn = new ArrayList<Producto>(); productos.forEach(producto -> { productosReturn.add(producto); }); return productosReturn; } @Override public Producto findById(Long id) { Producto producto = productoRepository.findById(id).orElse(null); return producto; } @Override @Transactional public Producto saveProducto(Producto producto) { Producto productoReturn = productoRepository.save(producto); return productoReturn; } @Override @Transactional public void deleteById(Long id) { productoRepository.deleteById(id); } }
require "pdfjs_viewer-rails/version" require "pdfjs_viewer-rails/helpers" module PdfjsViewer module Rails class Engine < ::Rails::Engine isolate_namespace PdfjsViewer initializer 'pdfjs_viewer-rails.load_static_assets' do |app| app.middleware.use ::ActionDispatch::Static, "#{root}/public" end initializer "pdfjs_viewer-rails.view_helpers" do ActionView::Base.send :include, ViewHelpers end end end end
const router = require('express').Router() const {Listing} = require('../db/models') module.exports = router router.get('/', function(req, res, next) { Listing.findAll().then(listings => { res.json(listings) }) }) router.get('/search', function(req, res, next) { Listing.findByCriteria(req.body) .then(listings => { res.status(200) res.json(listings) }) .catch(next) }) router.get('/:id', function(req, res, next) { Listing.findById(req.params.id) .then(listing => { if (listing) { res.status(200) res.json(listing) } else { res.status(404) res.json() next() } }) .catch(next) }) router.post('/', function(req, res, next) { Listing.create(req.body) .then(listing => { res.status(200) res.json(listing) }) .catch(next) }) router.update('/:id', function(req, res, next) { Listing.findById(req.params.id) .then(listing => listing.update(req.body)) .then(listing => res.json(listing)) .catch(next) }) router.delete('/:id', function(req, res, next) { Listing.findById(req.params.id) .then(listing => listing.destroy(req.body)) .then(() => res.sendStatus(204)) .catch(next) })
// Copyright 2014 Google Inc. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. using UnityEngine; using System.Collections; /// @ingroup Scripts /// Applies the inverse of the lens distortion to the image. The image is "undistorted" so /// that when viewed through the lenses (which redistort), the image looks normal. In the case /// of Cardboard, the lenses apply a pincushion distortion, so this effect applies a barrel /// distortion to counteract that. /// /// It is used to show the effect of distortion correction when playing the /// scene in the Editor, and as the fall back when the native code distortion /// is not available or disabled. [RequireComponent(typeof(Camera))] public class RadialUndistortionEffect : MonoBehaviour { #if UNITY_EDITOR private StereoController controller; #endif private Material material; void Awake() { if (!SystemInfo.supportsRenderTextures) { Debug.Log("Radial Undistortion disabled: render textures not supported."); return; } Shader shader = Shader.Find("Cardboard/Radial Undistortion"); if (shader == null) { Debug.Log("Radial Undistortion disabled: shader not found."); return; } material = new Material(shader); } #if UNITY_EDITOR void Start() { var eye = GetComponent<CardboardEye>(); if (eye != null) { controller = eye.Controller; } } #endif void OnRenderImage(RenderTexture source, RenderTexture dest) { // Check if we found our shader, and that native distortion correction is OFF (except maybe in // the editor, since native is not available here). bool disabled = material == null || !Cardboard.SDK.UseDistortionEffect; #if UNITY_EDITOR bool mainCamera = controller != null && controller.GetComponent<Camera>().tag == "MainCamera"; disabled |= !mainCamera; #endif if (disabled) { // Pass through, no effect. Graphics.Blit(source, dest); } else { // Undistort the image. Graphics.Blit(source, dest, material); } } }
import tn from "./tn.png"; import config from "./Chenshazhifangchui.wcfg.vue"; export default { name: "chenshazhifangchui", chs: "辰砂之纺锤", url: tn, star: 4, type: "sword", config, effect: "元素战技造成的伤害值提高,提高数值相当于防御力的40/50/60/70/80%。该效果每1.5秒最多触发一次,并将在元素战技造成伤害后的0.1秒后清除效果。", }
package com.agenthun.eseallite; import android.app.Application; import com.agenthun.eseallite.utils.update.UpdateConfig; /** * @project ESeal * @authors agenthun * @date 16/3/4 上午6:48. */ public class App extends Application { public static final String GOOGLE_MAP_API_KEY = "AIzaSyBy5WtHdZ7Pbe-A2N57Kbf7iR0OIgo3yuY"; @Override public void onCreate() { super.onCreate(); UpdateConfig.initGet(this); } }
def hey_id str = 'hey' str.object_id end puts hey_id puts hey_id
#!/usr/bin/env bash ../run.sh run_cautious_best_effort_observations.lp --authorization-mode cautious --obligation-mode best_effort
NixArchive ========== NixArchive is a 'nix-based CLI tool for people who can't bear to 'rm' stuff ;-) Usage: archive foo bar baz Result: Archiving ./foo<br> Move successful.<br> File compressed.<br> Archiving ./bar<br> Move successful.<br> File compressed.<br> Archiving ./baz<br> Move successful.<br> File compressed.<br>
var combinedStream = require('combined-stream'); var parse = require('csv-stream'); var fs = require('fs'); var through2 = require('through2'); var path = require('path'); var logger = require( 'pelias-logger' ).get( 'whosonfirst' ); var isValidId = require('./components/isValidId'); var loadJSON = require('./components/loadJSON'); var recordHasIdAndProperties = require('./components/recordHasIdAndProperties'); var isActiveRecord = require('./components/isActiveRecord'); var extractFields = require('./components/extractFields'); var recordHasName = require('./components/recordHasName'); var notVisitingNullIsland = require('./components/recordNotVisitingNullIsland'); /* * Convert a base directory and list of types into a list of meta file paths */ function getMetaFilePaths(directory, bundles) { return bundles.map(function(bundle) { return path.join(directory, 'meta', bundle); }); } /* * Given the path to a meta CSV file, return a stream of the individual records * within that CSV file. */ function createOneMetaRecordStream(metaFilePath) { // All of these arguments are optional. var options = { escapeChar : '"', // default is an empty string enclosedChar : '"' // default is an empty string }; return fs.createReadStream(metaFilePath) .pipe(parse.createStream(options)); } /* * given a list of meta file paths, create a combined stream that reads all the * records via the csv parser */ function createMetaRecordStream(metaFilePaths, types) { var metaRecordStream = combinedStream.create(); metaFilePaths.forEach(function appendToCombinedStream(metaFilePath, idx) { var type = types[idx]; metaRecordStream.append( function ( next ){ logger.info( 'Loading ' + type + ' records from ' + metaFilePath ); next(createOneMetaRecordStream(metaFilePath)); }); }); return metaRecordStream; } /* This function creates a steram that finds all the `latest` files in `meta/`, CSV parses them, extracts the required fields, stores only admin records for later, and passes all records on for further processing */ function createReadStream(directory, types, wofAdminRecords) { var metaFilePaths = getMetaFilePaths(directory, types); return createMetaRecordStream(metaFilePaths, types) .pipe(isValidId.create()) .pipe(loadJSON.create(directory + 'data/')) .pipe(recordHasIdAndProperties.create()) .pipe(isActiveRecord.create()) .pipe(extractFields.create()) .pipe(notVisitingNullIsland.create()) .pipe(recordHasName.create()) .pipe(through2.obj(function(wofRecord, enc, callback) { // store admin records in memory to traverse the heirarchy if (wofRecord.place_type !== 'venue' && wofRecord.place_type !== 'postalcode') { wofAdminRecords[wofRecord.id] = wofRecord; } callback(null, wofRecord); })); } module.exports = { create: createReadStream };