answer stringlengths 15 1.25M |
|---|
// closure_node.h
#ifndef CLOSURE_NODE_H_
#define CLOSURE_NODE_H_
#include "branch_node.h"
namespace regex {
class ClosureNode : public BranchNode {
public:
ClosureNode(int min_repetition, int max_repetition, bool greedy);
~ClosureNode() override {}
void Accept(VisitorInterface* visitor) override;
int GetPri() const override;
int min_repetition() const;
int max_repetition() const;
bool greedy() const;
private:
std::string ConvertString() const override;
int min_repetition_;
int max_repetition_;
bool greedy_;
};
} // namespace regex
#endif // CLOSURE_NODE_H_ |
<!DOCTYPE html>
<html>
<head>
<meta charset='utf-8'>
<meta http-equiv="X-UA-Compatible" content="chrome=1">
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1">
<link href='https://fonts.googleapis.com/css?family=Architects+Daughter' rel='stylesheet' type='text/css'>
<link rel="stylesheet" type="text/css" href="stylesheets/stylesheet.css" media="screen">
<link rel="stylesheet" type="text/css" href="stylesheets/github-light.css" media="screen">
<link rel="stylesheet" type="text/css" href="stylesheets/print.css" media="print">
<!--[if lt IE 9]>
<script src="//html5shiv.googlecode.com/svn/trunk/html5.js"></script>
<![endif]
<title>Undo Git Pull</title>
</head>
<body>
<header>
<div class="inner">
<h1><a href="http://theredfoxfire.github.io/">Hasan Setiawan</a></h1>
<h2>Write, write, write give your wings on code!</h2>
<a href="https://github.com/theredfoxfire" class="button"><small>Follow me on</small> GitHub</a>
</div>
</header>
<div id="content-wrapper">
<div class="inner clearfix">
<section id="main-content">
<h2>Undo Git Pull</h2>
<h3>
You just made unwanted Pull
</h3>
<p>When it's happened you can do this trick to undo</p>
<p>
<blockquote>
<pre>
<code>
// you do this before
$ git pull
// then you have unwanted pull, you prefer to undo
$ git reset HEAD@{1}
</code>
</pre>
</blockquote>
</p>
</section>
<aside id="sidebar">
<p>This page was generated by <a href="https://pages.github.com">GitHub Pages</a> using the Architect theme by <a href="https://twitter.com/jasonlong">Jason Long</a>.<br><br><hr/><a action="action" onclick="window.history.go(-1); return false;" href="#">Back</a></p>
<h3>
</h3>
</aside>
</div>
</div>
</body>
</html> |
module JockeyCli
class Worker < Base
self.path = '/workers'
class << self
def deploys(id, options = {})
parsed_response get("#{path}/#{id}/deploys", query: options)
end
def restart(data)
parsed_response put("#{path}/restart", body: data)
end
end
end
end |
<?php
// SdzBlogBundle:Article:new.html.twig
return array (
); |
<!DOCTYPE html>
<html>
<head>
<title>Shawn Wilkinson - Blog</title>
<meta charset="UTF-8">
<script type="text/javascript">
<!
window.location = "http://shawndav.github.io/blog/index#variable_scope"
</script>
<body>
</body>
</html> |
{% extends "layout.html" %}
{% block body %}
<h2>Apply for a Job</h2>
<div> </div>
<div class="panel panel-info">
<div class="panel-heading">
<h3 class="panel-title">Instructions</h3>
</div>
<div class="panel-body">
<p>
Be sure you answer all of the questions. Failure to answer all questions will result in your application
being rejected. Keep all of your answers short and to the point for the best chance of making it to the
next round!
</p>
</div>
</div>
<fieldset>
<form action="{{ url_for('<API key>') }}" method="post" class="form-horizontal">
<div class="form-group">
<label for="inputName" class="col-lg-2 control-label">Name</label>
<div class="col-lg-10">
<input type="text" class="form-control" id="inputName" name="applicant_name" placeholder="Your Name">
</div>
</div>
{% for question in questions %}
<div class="form-group">
<label for="input{{ question['id'] }}" class="col-lg-2 control-label">{{ question['question'] }}</label>
<div class="col-lg-10">
<input type="text" class="form-control" id="input{{ question['id'] }}" name="{{ question['id'] }}">
</div>
</div>
{% endfor %}
<div class="form-group">
<div class="col-lg-10 col-lg-offset-2">
<button type="reset" class="btn btn-default">Reset</button>
<a href="{{ url_for('<API key>') }}" class="btn btn-default">Cancel</a>
<button type="submit" class="btn btn-primary">Submit</button>
</div>
</div>
</form>
</fieldset>
{% endblock %} |
import webpack from 'webpack'
import { join, sep } from 'path'
require('dotenv').config()
import * as config from './config.js'
export default {
entry: {
bundle: [`.${sep}` + join('src', 'js', 'index.js')],
vendor: [`.${sep}` + join('src', 'js', 'vendor.js')]
},
output: {
filename: `.${sep}` + join('dist', 'js', '[name].js'),
publicPath: config.PP
},
module: {
rules: [
{
test: /\.js$/,
exclude: /(node_modules)/,
use: [{
loader: 'babel-loader',
options: {
presets: [
['env', {modules: false}],
'stage-0'
]
}
}]
}
]
},
plugins: [
new webpack.DefinePlugin({
NODE_ENV: JSON.stringify(process.env.NODE_ENV),
PP: JSON.stringify(config.PP),
SITE_TITLE: JSON.stringify(config.SITE_TITLE),
SITE_SLUG: JSON.stringify(config.SITE_SLUG),
SITE_IMAGE: JSON.stringify(config.SITE_IMAGE),
DESCRIPTION: JSON.stringify(config.DESCRIPTION),
SITE_URL: JSON.stringify(config.SITE_URL),
DEVELOPER_NAME: JSON.stringify(config.DEVELOPER_NAME),
DEVELOPER_URL: JSON.stringify(config.DEVELOPER_URL)
})
]
} |
import os
import io
import sys
import time
import threading
import struct
import pickle
import tempfile
import shutil
import types
from threading import Thread
from Queue import Queue
from multiprocessing import cpu_count
from mapping import DetMap2
import dbg
import util
import services
import translators
from blobs import *
from params import *
# basic use of ThreadPool:
# pool.submit(func, arg1, arg2)
# pool.join()
class Worker(Thread):
"""Thread executing tasks from a given tasks queue"""
def __init__(self, tasks):
Thread.__init__(self)
self.tasks = tasks
self.daemon = True
self.start()
def run(self):
while True:
clone, func, args, kargs = self.tasks.get()
#print 'work on job now %s' % self.ident
try:
srv = clone()
args = (srv, ) + args
func(*args, **kargs)
except Exception as e:
print(str(srv))
print(str(func))
print(e)
self.tasks.task_done()
class ThreadPool:
"""Pool of threads consuming tasks from a queue"""
def __init__(self, num_threads):
self.tasks = Queue(num_threads)
for _ in range(num_threads):
Worker(self.tasks)
def submit(self, c, func, *args, **kargs):
self.tasks.put((c, func, args, kargs))
def join(self):
self.tasks.join()
class ServiceThread(Thread):
"""A dedicated thread for each service
requests to this thread will be serialized"""
def __init__(self, service):
Thread.__init__(self)
self.srv = service
self.tasks = Queue(5) # TODO: what's the proper number
self.daemon = True
self.start()
def run(self):
while True:
func, args, kargs = self.tasks.get()
args = (self.srv, ) + args
try: func(*args, **kargs)
except Exception as e:
print e
self.tasks.task_done()
def submit(self, func, *args, **kargs):
self.tasks.put((func, args, kargs))
def join(self):
self.tasks.join()
class Scheduler:
def __init__(self, services, maxthreads=None):
self.srv_threads = {}
# XXX. inflexible in dealing with changes of services (list)
if(maxthreads is None or cpu_count() > maxthreads): maxthreads = cpu_count()
nthreads = maxthreads - len(services)
for srv in services:
self.srv_threads[srv] = ServiceThread(srv)
# thredpool thread number?
dbg.dbg("init scheduler: %s" % nthreads)
self.pool = ThreadPool(min(nthreads, 3*len(services)))
def submit(self, srv, serialize, func, *args, **kargs):
if serialize:
thread = self.srv_threads[srv]
thread.submit(func, *args, **kargs)
else:
"""
# haichen what's the purpose of the following code?
if srv in self.srv_threads:
dbg.dbg("putting into service thread")
thread = self.srv_threads[srv]
if thread.tasks.empty():
thread.submit(func, *args, **kargs)
else:
"""
#dbg.dbg("putting into thread pool")
self.pool.submit(srv.copy, func, *args, **kargs)
def join(self):
self.pool.join()
for srv in self.srv_threads:
self.srv_threads[srv].join()
# handle user's inputs of config options
def _get_conf_services(default):
assert type(default) in [types.NoneType, list]
if default is not None:
return ",".join(default)
# dirty user's input
slugs = ",".join(slug for (slug, _) in services.backends())
print "input storage backends, (e.g., %s)" % slugs
for (_, doc) in services.backends():
print " %s" % doc
return raw_input("> ").strip()
def _get_conf_nreplicas(default, nservices):
assert type(default) in [types.NoneType, int]
if default is not None:
return str(default)
# dirty user's input
print "input the number of replicas (default=2)"
while True:
replicas = raw_input("> ").strip()
if replicas == "":
replicas = "2"
if replicas.isdigit():
if int(replicas) > nservices:
dbg.err("the number of replicas should not be larger than the number of services")
else:
return replicas
else:
print "input the number"
def <API key>(default):
assert type(default) in [types.NoneType, str]
if default is not None:
return default
# NOTE.
# empty encrypt_key means, no-encryption
encrypt_key = ""
print "do you use encryption (y/n)?"
while True:
encrypt_yn = raw_input("> ").strip().lower()
if(encrypt_yn not in ['y','n']):
dbg.err("input with y/n")
continue
break
if(encrypt_yn == 'y'):
print "input keyphrase:"
encrypt_key = raw_input("> ").strip()
return encrypt_key
# in charge of a local machine (.metasync)
class MetaSync:
def __init__(self, root, opts=None):
# repo/.metasync/
# | +-- meta
# +-- root
# useful path info
self.path_root = self._find_root(root)
self.path_meta = os.path.join(self.path_root, META_DIR)
self.path_conf = self.get_path("config")
self.path_objs = self.get_path("objects")
self.path_master = self.get_path("master")
self.path_head_history = self.get_path("head_history")
self.options = opts
# local blob store
self.blobstore = BlobStore2(self) #BlobStore(self.path_objs)
# load on demand
self.config = None
self.srvmap = {}
self.scheduler = None
self.translators = []
self.mapping = None
# post init
self._load()
def _find_root(self, curpath):
# find repo
curpath = os.path.abspath(curpath)
orgpath = curpath
auth_dir = os.path.join(os.path.expanduser("~"), ".metasync")
while True:
path = os.path.join(curpath, META_DIR)
if(path != auth_dir and os.path.exists(os.path.join(curpath, META_DIR))): return curpath
sp = os.path.split(curpath)
if(sp[1] == ""): break
curpath = sp[0]
return orgpath
@property
def services(self):
return self.srvmap.values()
# load member variables from config
def _load(self):
if not self.check_sanity():
return
if(not os.path.exists(AUTH_DIR)): os.mkdir(AUTH_DIR)
# load config
self.config = util.load_config(self.path_conf)
self.namespace = self.config.get("core", "namespace")
self.clientid = self.config.get("core", "clientid")
# load services from config
self.srvmap = {}
for tok in self.config.get("backend", "services").split(","):
srv = services.factory(tok)
self.srvmap[srv.sid()] = srv
self.nreplicas = int(self.config.get("backend", "nreplicas"))
nthreads = self.options.nthreads if self.options is not None else 2
self.scheduler = Scheduler(self.services, (nthreads+1)*len(self.srvmap))
# load translator pipe
if self.is_encypted():
self.translators.append(translators.TrEncrypt(self))
# TODO. for integrity option
# if self.is_signed():
# self.translators.append(TrSigned(self))
beg = time.time()
if(os.path.exists(self.get_path("mapping.pcl"))):
with open(self.get_path("mapping.pcl")) as f:
self.mapping = pickle.load(f)
else:
mapconfig = []
for srv in self.services:
mapconfig.append((srv.sid(), srv.info_storage()/GB))
hspacesum = sum(map(lambda x:x[1], mapconfig))
hspace = max(hspacesum+1, 1024)
self.mapping = DetMap2(mapconfig, hspace=hspace, replica=self.nreplicas)
self.mapping.pack()
with open(self.get_path("mapping.pcl"), "w") as f:
pickle.dump(self.mapping, f)
end = time.time()
dbg.time("mapping init %s" % (end-beg))
dbg.dbg("head: %s", self.get_head_name())
def cmd_reconfigure(self, backends, replica):
srvmap = {}
for tok in backends.split(","):
srv = services.factory(tok)
srvmap[srv.sid()] = srv
lst_services = srvmap.values()
mapconfig = []
lock_dic = {}
for srv in lst_services:
mapconfig.append((srv.sid(), srv.info_storage()/GB))
for srv in srvmap:
lock_dic[srv] = threading.Lock()
if srv not in self.srvmap:
srvmap[srv].putdir(self.get_remote_path("objects"))
for srv in self.srvmap:
if srv not in lock_dic:
lock_dic[srv] = threading.Lock()
beg = time.time()
self.mapping.reconfig(mapconfig, eval(replica))
end = time.time()
dbg.info("remap: %.3fs" % (end-beg))
beg = time.time()
lst_objs = self.blobstore.list()
added, removed = self.mapping.get_remapping(lst_objs)
nthreads = self.options.nthreads if self.options is not None else 2
#REFACTOR
def __put_next(srv, lst, lock):
dbg.job("submitted to: %s" % srv)
while True:
lock.acquire()
if(len(lst) == 0):
lock.release()
break
next = lst.pop()
lock.release()
if next is not None:
with open(self.get_local_obj_path(next), "rb") as f:
blob = f.read()
for tr in self.translators:
blob = tr.put(blob)
# XXX HACK
backoff = 0.5
remote_path = self.get_remote_obj_path(next)
while not srv.put(remote_path, blob):
time.sleep(backoff)
backoff *= 2
def __rm_next(srv, lst, lock):
dbg.job("submitted to: %s" % srv)
while True:
lock.acquire()
if(len(lst) == 0):
lock.release()
break
next = lst.pop()
lock.release()
if next is not None:
remote_path = self.get_remote_obj_path(next)
srv.rm(remote_path)
cnt_added = 0
for srv in added:
if(len(added[srv]) == 0): continue
cnt_added += len(added[srv])
for i in range(nthreads):
self.scheduler.submit(srvmap[srv], False, __put_next, added[srv], lock_dic[srv])
self._join()
end = time.time()
dbg.info("remap put: %.3fs" % (end-beg))
beg = time.time()
cnt_removed = 0
for srv in removed:
if(len(removed[srv]) == 0): continue
cnt_removed += len(removed[srv])
for i in range(nthreads):
self.scheduler.submit(self.srvmap[srv], False, __rm_next, removed[srv], lock_dic[srv])
self._join()
end = time.time()
dbg.info("remap rm: %.3fs" % (end-beg))
dbg.info("added %d, removed %d" % (cnt_added, cnt_removed))
# config-related parser
def is_encypted(self):
key = self.config.get('core', 'encryptkey').strip()
return key != ""
# handling dir/path names
def get_path(self, path):
return os.path.join(self.path_meta, path)
def get_head(self):
return self.get_path(self.get_head_name())
def get_head_name(self):
return "head_%s" % self.get_client_id()
def get_head_value(self):
with open(self.get_head()) as f:
return f.read().strip().split(".")[0]
return None
def get_head_and_config(self):
with open(self.get_head()) as f:
return f.read().strip()
return None
def get_prev(self):
return self.get_path(self.get_prev_name())
def get_prev_name(self):
return "prev_%s" % self.get_client_id()
def get_prev_value(self):
with open(self.get_prev()) as f:
return f.read().strip()
return None
def get_next_version(self):
with open(self.get_prev()) as f:
return int(f.read().strip().split(".")[2]) + 1
return None
#XXX: Cache?
def get_config_hash(self):
with open(self.get_head()) as f:
return f.read().strip().split(".")[1]
return None
def get_client_id(self):
return self.clientid
def get_relative_path(self, path):
return os.path.relpath(os.path.abspath(path), self.path_root)
def get_local_path(self, *path):
return os.path.join(self.path_root, *[p.strip("/") for p in path])
def get_local_obj_path(self, hv):
return os.path.join(self.path_objs, hv)
def get_remote_path(self, *path):
#return os.path.join(self.namespace, *path).rstrip("/")
return "/".join([self.namespace] + list(path)).rstrip("/").rstrip("\\")
def get_remote_obj_path(self, *hashes):
return self.get_remote_path("objects", *hashes)
def get_root_blob(self):
return self.blobstore.get_root_blob()
# check basic sanity of repo's meta info
def check_sanity(self, whynot=False):
def __err(why):
if whynot:
print >> sys.stderr, why
return False
if not os.path.exists(self.path_meta):
return __err("Can't find the root of repo (%s)" % self.path_meta)
if not os.path.exists(self.path_conf):
return __err("Can't find config (%s)" % self.path_conf)
if not os.path.exists(self.path_objs):
return __err("Can't find objects store (%s)" % self.path_objs)
return True
# schedule-related
def _put_all_content(self, content, remote_path, serial=False):
def __put(srv):
#dbg.job("submitted to: %s" % srv)
srv.put(remote_path, content)
# submit jobs
for srv in self.services:
self.scheduler.submit(srv, serial, __put)
def _put_all_dir(self, remote_path):
# XXX. handle errs
def __putdir(srv):
srv.putdir(remote_path)
# submit jobs
for srv in self.services:
self.scheduler.submit(srv, True, __putdir)
def _put_all(self, path, remote_path):
# XXX. handle errs
def __put(srv):
with open(path, "rb") as f:
srv.put(remote_path, f.read())
# submit jobs
for srv in self.services:
self.scheduler.submit(srv, True, __put)
def _update_all(self, path, remote_path):
# XXX. handle errs
def __update(srv):
#dbg.job("submitted to: %s" % srv)
with open(path, "rb") as f:
#print 'start to put'
srv.update(remote_path, f.read())
#print 'put ends'
# submit jobs
for srv in self.services:
self.scheduler.submit(srv, True, __update)
def _join(self):
self.scheduler.join()
def _get(self, srv, path, remote_path):
def __get(srv, path, remote_path):
dbg.job("submitted to: %s (%s)" % (srv, path))
with open(path, "wb") as f:
blob = srv.get(remote_path)
if(blob is None):
time.sleep(1)
blob = srv.get(remote_path)
for tr in reversed(self.translators):
blob = tr.get(blob)
f.write(blob)
self.scheduler.submit(srv, False, __get, path, remote_path)
# bstore-related
def bstore_download(self):
# TODO, handle when R > 1
lst = self.blobstore.list()
#dbg.dbg("lst files:%s" % lst)
lock = threading.Lock()
def __get_next(srv, hash_dic, lock, allset, srvname):
if(len(hash_dic[srvname]) == 0): return
while True:
lock.acquire()
try:
next = hash_dic[srvname].pop()
l = len(hash_dic[srvname])
if(l%10 == 0):
dbg.dbg("%s left %d" % (srvname, l))
if(next not in allset):
allset.add(next)
else:
next = None
except:
lock.release()
break
lock.release()
if(next is not None):
remote_path = self.get_remote_obj_path(next)
path = os.path.join(self.path_objs, next)
with open(path, "wb") as f:
backoff = 0.5
while True:
blob = srv.get(remote_path)
if(blob is not None): break
dbg.dbg("back off %s" % srvname)
time.sleep(backoff)
backoff*=2
for tr in reversed(self.translators):
blob = tr.get(blob)
f.write(blob)
hash_dic = {}
allset = set([])
for srv in self.services:
hash_dic[str(srv)] = []
srvlist = srv.listdir(self.get_remote_obj_path())
backoff = 1
while srvlist is None:
dbg.dbg("back off - listdir %s" % str(srv))
time.sleep(backoff)
srvlist = srv.listdir(self.get_remote_obj_path())
for hashname in srvlist:
if(hashname in lst):
#dbg.dbg("%s is already in bstore" % hashname)
continue
hash_dic[str(srv)].append(hashname)
nthreads = self.options.nthreads if self.options is not None else 2
for srv in self.services:
dbg.dbg("%s:%d dn" % (str(srv), len(hash_dic[str(srv)])))
##HACK
for i in range(nthreads):
self.scheduler.submit(srv, False, __get_next, hash_dic, lock, allset, str(srv))
def bstore_sync_left(self, hashdic):
cnt = 0
for i in hashdic:
cnt += len(hashdic[i])
if(cnt == 0): return
def __put_next(srv, lst, lock):
dbg.job("submitted to: %s" % srv)
while True:
lock.acquire()
if(len(lst) == 0):
lock.release()
break
next = lst.pop()
lock.release()
if next is not None:
with open(self.get_local_obj_path(next), "rb") as f:
blob = f.read()
for tr in self.translators:
blob = tr.put(blob)
# XXX HACK
backoff = 0.5
remote_path = self.get_remote_obj_path(next)
while not srv.put(remote_path, blob):
time.sleep(backoff)
backoff *= 2
lock_dic = {}
for i in hashdic:
lock_dic[i] = threading.Lock()
nthreads = self.options.nthreads if self.options is not None else 2
for srv in hashdic:
for i in range(nthreads):
self.scheduler.submit(self.srvmap[srv], False, __put_next, hashdic[srv], lock_dic[srv])
self._join()
#XXX: it needs to return after one set is put, and continue on replication.
def bstore_sync(self, hashnames):
dbg.dbg("need to sync: %s..@%d" % (hashnames[0], len(hashnames)))
def __put_next(srv, hashdic, hashdic_left, allset, key, lock):
dbg.job("submitted to: %s" % srv)
while True:
lock.acquire()
if(len(hashdic[key]) == 0 or len(allset) == 0):
lock.release()
break
next = hashdic[key].pop()
if(next in allset):
allset.remove(next)
else:
hashdic_left[key].append(next)
next = None
lock.release()
if next is not None:
with open(self.get_local_obj_path(next), "rb") as f:
blob = f.read()
for tr in self.translators:
blob = tr.put(blob)
# XXX HACK
backoff = 0.5
remote_path = self.get_remote_obj_path(next)
while not srv.put(remote_path, blob):
dbg.dbg("backoff %s" % srv)
time.sleep(backoff)
backoff *= 2
nthreads = self.options.nthreads if self.options is not None else 2
hashdic = {}
hashdic_left = {}
allset = set()
lock = threading.Lock()
for srv in self.srvmap:
hashdic[srv] = []
hashdic_left[srv] = []
for hashname in hashnames:
allset.add(hashname)
for i in self.mapping.get_mapping(hashname):
hashdic[i].append(hashname)
for srv in hashdic:
for i in range(nthreads):
self.scheduler.submit(self.srvmap[srv], False, __put_next, hashdic, hashdic_left, allset, srv, lock)
self._join()
return hashdic_left
# iterate bstore
def bstore_iter(self):
for root, dirs, files in os.walk(self.path_objs):
for name in files:
yield name
def bstore_iter_remote(self, srv):
assert srv in self.services
# NOTE. at some point, we need cascaded directory hierarchy
for obj in srv.listdir(self.get_remote_obj_path()):
yield obj
#XXX. update only changed files (SY)
def restore_from_master(self):
root = self.get_root_blob()
dbg.dbg("restore")
for name, blob in root.walk():
pn = os.path.join(self.path_root, name)
if blob.thv == "F":
content = blob.read()
util.write_file(pn, content.getvalue())
content.close()
if blob.thv == "m":
content = blob.read()
util.write_file(pn, content)
elif blob.thv == "D" or blob.thv == "M":
try:
os.mkdir(pn)
except:
pass
return True
def propose_value(self, prev, newvalue):
from paxos import Proposer
self.proposer = Proposer(self.clientid, self.services, self.get_remote_path("pPaxos/"+prev))
return self.proposer.propose(newvalue)
# need to truncate if history is too long.
def get_history(self, is_master=False):
pn = self.path_master_history if is_master else self.path_head_history
content = util.read_file(pn).strip()
if content:
history = content.split("\n")
history.reverse()
else:
history = []
return history
def get_common_ancestor(self, head_history, master_history, <API key>=None):
# change to use <API key>
for head in head_history:
if(head in master_history):
return head
return None
def try_merge(self, head_history, master_history):
# this need to be fixed.
dbg.dbg("Trying to merge")
# we may need to cache the last branched point
common = self.get_common_ancestor(head_history, master_history)
dbg.dbg("%s %s %s", head_history[0], master_history[0], common)
common = self.blobstore.get_blob(common, "D")
head = self.get_root_blob()
master = self.blobstore.get_blob(master_history[0], "D")
added1 = head.diff(common)
added2 = master.diff(common)
def intersect(a, b):
return list(set(a) & set(b))
if(len(intersect(added1.keys(), added2.keys())) != 0):
dbg.err("both modified--we need to handle it")
return False
for i in added2.keys():
path = os.path.join(self.path_root, i)
dirblob = self.blobstore.load_dir(os.path.dirname(path), dirty=True)
dirblob.add(os.path.basename(path), added2[i], dirty=False)
# HACK, need to go through all the non-overlapped history.
self.append_history(master.hv)
head.store()
self.append_history(head.hv)
# HACK, need to be changed
newblobs = self.blobstore.get_added_blobs()
# push new blobs remotely
self.bstore_sync(newblobs)
self._join()
return True
def get_uptodate_master(self, includeself=True, srv=None):
if srv == None:
srv = self.services[0]
prev_clients = filter(lambda x:x.startswith("prev_"), srv.listdir(self.get_remote_path("")))
pointers = set()
for prev in prev_clients:
if not includeself or not prev.endswith(self.clientid):
with open(self.get_path(prev), "w") as f:
pointer = srv.get(self.get_remote_path(prev))
pointers.add(pointer)
f.write(pointer)
if includeself:
pointers.add(self.get_prev_value())
return max(pointers, key=lambda x:int(x.split(".")[2]))
def <API key>(self):
srv = self.services[0]
remote_master = srv.get(self.get_remote_path("master"))
with open(self.path_master) as f:
master_head = f.read().strip()
if(master_head != remote_master): return False
return True
def cmd_poll(self):
srv = self.services[0]
srv.poll(self.namespace)
# end-user's interfaces (starting with cmd_ prefix)
# NOTE. explicitly return True/False to indicate status of 'cmd'
def cmd_share(self, target_email):
if not self.check_sanity():
dbg.err("this is not metasync repo")
return False
for srv in self.services:
srv.share(self.namespace, target_email)
def cmd_diff(self):
# work only for 1-level directory
# need to add diff for file
if not self.check_sanity():
dbg.err("this is not metasync repo")
return False
root = self.get_root_blob()
added = []
removed = []
files = os.listdir(".")
for f in files:
if(f == ".metasync"): continue
if("/"+f not in root.files):
added.append(f)
for f in root.files:
if(f[1:] not in files):
removed.append(f[1:])
for f in added:
print("+++ %s" % f)
for f in removed:
print("--- %s" % f)
def cmd_mv(self, src_pn, dst_pn):
if not self.check_sanity():
dbg.err("it's not a metasync repo.")
return False
src_pn = os.path.abspath(src_pn)
dst_pn = os.path.abspath(dst_pn)
#TODO: check src_pn exists
beg = time.time()
try:
dirname = os.path.dirname(src_pn)
dirblob = self.blobstore.load_dir(dirname, False, dirty=True)
if(dirblob is None):
dbg.err("%s does not exist" % src_pn)
return False
except NotTrackedException as e:
dbg.err(str(e))
return False
fname = os.path.basename(src_pn)
if(not fname in dirblob):
dbg.err("%s does not exist" % pn)
return False
fblob = dirblob[fname]
dirblob.rm(fname)
dst_dirname = os.path.dirname(dst_pn)
if(dirname != dst_dirname):
dirblob = self.blobstore.load_dir(dirname, True, dirty=True)
assert dirblob is not None
dst_fname = os.path.basename(dst_pn)
dirblob.add(dst_fname, fblob, dirty=False)
root = self.get_root_blob()
root.store()
newblobs = self.blobstore.get_added_blobs()
util.write_file(self.get_head(), root.hv)
self.append_history(root.hv)
end = time.time()
dbg.time("local write: %f" % (end-beg))
# push new blobs remotely
self.bstore_sync(newblobs)
self._put_all(self.get_head(), self.get_remote_path(self.get_head_name()))
end = time.time()
dbg.time("remote write: %f" % (end-beg))
# move the file
shutil.move(src_pn, dst_pn)
self._join()
return True
def cmd_peek(self):
root = self.get_root_blob()
for i in root.walk():
print(i)
# print("hash: %s" % root.hash_head)
# print(root.dump_info())
# with open(self.path_master) as f:
# master_head = f.read().strip()
# with open(self.get_head()) as f:
# head = f.read().strip()
# print("head_history %s" % ",".join(self.get_history(head)))
# print("master_history %s" % ",".join(self.get_history(master_head)))
def cmd_fetch(self):
if not self.check_sanity():
dbg.err("it's not a metasync repo.")
return False
# TODO: change it into comparing between masters
self.bstore_download()
self._join()
return True
def update_changed(self, head, master):
def _file_create(blob, pn):
if(blob.thv == "D" or blob.thv == "M"):
util.mkdirs(pn)
for i in blob.entries:
_file_create(blob[i], os.path.join(pn, i))
elif(blob.thv == "F"):
content = blob.read()
util.write_file(pn, content.getvalue())
content.close()
# touch metadata blob (for cmd_status)
os.utime(os.path.join(self.path_objs, blob.hv), None)
elif(blob.thv == "m"):
content = blob.read()
util.write_file(pn, content)
# touch metadata blob (for cmd_status)
os.utime(os.path.join(self.path_objs, blob.hv), None)
else:
assert False
def _update(old_dirblob, new_dirblob, path):
for fname in new_dirblob.entries:
blob = new_dirblob[fname]
if(fname not in old_dirblob):
_file_create(blob, os.path.join(path, fname))
elif(blob.hv != old_dirblob[fname].hv):
if(blob.thv == "D"):
_update(old_dirblob[fname], blob, os.path.join(path, fname))
elif(blob.thv == "F"):
_file_create(blob, os.path.join(path, fname))
else:
print(blob.thv)
assert False
headblob = self.blobstore.get_blob(head, "D")
masterblob = self.blobstore.get_blob(master, "D")
_update(headblob, masterblob, self.path_root)
def <API key>(self, master):
with open(self.get_prev(), "w") as f:
f.write(master)
with open(self.get_head(), "w") as f:
f.write(master)
def cmd_update(self):
master = self.get_uptodate_master()
# already up-to-date
prev = self.get_prev_value()
if (master == prev):
return True
head = self.get_head_and_config()
# XXX: need to check if non-checked in but modified files.
if (head == prev):
self.update_changed(head.split(".")[0], master.split(".")[0])
else:
need to merge
raise Exception('Merge required')
self.<API key>(master)
self.blobstore.rootblob = None
dbg.info("update done %s" % time.ctime())
return True
#XXX: Seungyeop is working on it.
def cmd_clone(self, namespace, backend=None, encrypt_key=None):
# if wrong target
if self.check_sanity():
return False
# reset all the path by including the namespace
self.path_root = os.path.join(self.path_root, namespace)
self.path_meta = os.path.join(self.path_root, META_DIR)
self.path_conf = self.get_path("config")
self.path_objs = self.get_path("objects")
#self.path_head_history = self.get_path("head_history")
if os.path.exists(self.path_root):
dbg.err("%s already exists." % self.path_root)
return False
if backend is None:
print "input one of the storage backends, (e.g., dropbox,google,box)"
print " for testing, use disk@/path (e.g., disk@/tmp)"
backend = raw_input("> ")
srv = services.factory(backend)
self.namespace = namespace
# create repo directory
os.mkdir(self.path_root)
os.mkdir(self.path_meta)
os.mkdir(self.path_objs)
curmaster = self.get_uptodate_master(False, srv)
sp = curmaster.split(".")
master = sp[0]
seed = sp[1]
seed = srv.get(self.get_remote_path("configs/%s" % seed))
conf = util.loads_config(seed)
# setup client specific info
conf.set('core', 'clientid' , util.gen_uuid())
conf.set('core', 'encryptkey', <API key>(encrypt_key))
with open(self.path_conf, "w") as fd:
conf.write(fd)
self._load()
beg = time.time()
self.bstore_download()
self._join()
with open(self.get_head(), "w") as f:
f.write(curmaster)
with open(self.get_prev(), "w") as f:
f.write(curmaster)
# send my head to remote
self._put_all(self.get_head(), self.get_remote_path(self.get_head_name()))
self._put_all(self.get_prev(), self.get_remote_path(self.get_prev_name()))
self._join()
if (master):
ret = self.restore_from_master()
end = time.time()
dbg.dbg("clone: %ss" % (end-beg))
return True
def cmd_init(self, namespace, backend=None, nreplicas=None, encrypt_key=None):
# already initialized?
if self.check_sanity():
dbg.err("already initialized %s (%s)" \
% (self.path_root, self.namespace))
return False
os.mkdir(self.path_meta)
os.mkdir(self.path_objs)
# build config opts
conf = util.new_config()
# core: unique/permanent info about local machine (often called client)
# NOTE. not sure if encryption_key should be in core, or unchangable
conf.add_section('core')
conf.set('core', 'namespace' , namespace)
conf.set('core', 'clientid' , util.gen_uuid())
conf.set('core', 'encryptkey', <API key>(encrypt_key))
# backend: info about sync service providers
# XXX: Error handling
conf.add_section('backend')
try:
services = _get_conf_services(backend)
conf.set('backend', 'services' , services)
conf.set('backend', 'nreplicas', _get_conf_nreplicas(nreplicas, len(services.split(","))))
except:
pass
# flush
with open(self.path_conf, "w") as fd:
conf.write(fd)
try:
self._load()
except NameError:
shutil.rmtree(self.path_meta)
return False
# put config into remote
conf.remove_option('core','clientid')
conf.remove_option('core','encryptkey')
with io.BytesIO() as out:
conf.write(out)
val = out.getvalue()
configname = util.sha1(val)
self._put_all_content(val, self.get_remote_path("configs/%s" % configname[:6]), True)
#self._put_all_content(configname[:6], self.get_remote_path("config"), True)
# Format for master: headhash.config[:6].version
prev_master = "." + configname[:6] + ".0"
# do we need both? or shall we put them into a file together.
with open(self.get_head(), "w") as f:
f.write(prev_master)
with open(self.get_prev(), "w") as f:
f.write(prev_master)
self._put_all_dir(self.get_remote_path("objects"))
# change to put_content
self._put_all(self.get_head() , self.get_remote_path(self.get_head_name()))
self._put_all(self.get_prev() , self.get_remote_path(self.get_prev_name()))
from paxos import Proposer
self.proposer = Proposer(None, self.services, self.get_pPaxos_path(prev_master))
self._join()
return True
def get_pPaxos_path(self, path):
return self.get_remote_path("pPaxos/" + path)
def cmd_gc(self):
if not self.check_sanity():
dbg.err("this is not a metasync repo")
return False
def _find_all_blobs(blob, tracked):
# we may need to move this to blobstore
if(blob.hv in tracked): return
tracked.add(blob.hv)
if(blob.thv == "C"): return
for name, childblob in blob.entries.iteritems():
_find_all_blobs(childblob, tracked)
# check head
head = self.get_head_value()
tracked = set([])
if(head is not None and len(head)>0):
blob = self.blobstore.get_blob(head, "D")
_find_all_blobs(blob, tracked)
# check master
with open(self.path_master) as f:
master_head = f.read().strip()
if(len(master_head) > 0):
blob = self.blobstore.get_blob(master_head, "D")
_find_all_blobs(blob, tracked)
allblobs = set(self.blobstore.list())
# remove following
blobs_to_remove = allblobs - tracked
def __rm(srv, remote_path):
dbg.job("submitted to: %s (%s)" % (srv, remote_path))
srv.rm(remote_path)
for hashname in blobs_to_remove:
for i in self.mapping.get_mapping(hashname):
self.scheduler.submit(self.srvmap[i], True, __rm, self.get_remote_obj_path(hashname))
os.unlink(self.get_local_obj_path(hashname))
return True
def cmd_rm(self, pn):
if not self.check_sanity():
dbg.err("this is not a metasync repo")
return False
#TODO: check if the file exists
beg = time.time()
try:
dirname = os.path.dirname(pn)
dirblob = self.blobstore.load_dir(dirname, False)
if(dirblob is None):
dbg.err("%s does not exist" % pn)
return False
except NotTrackedException as e:
dbg.err(str(e))
return False
fname = os.path.basename(pn)
if(not fname in dirblob):
dbg.err("%s does not exist" % pn)
return False
dirblob.rm(fname)
root = self.get_root_blob()
root.store()
newblobs = self.blobstore.get_added_blobs()
# we may need to include pointer for previous version.
util.write_file(self.get_head(), root.hv)
self.append_history(root.hv)
end = time.time()
dbg.time("local write: %f" % (end-beg))
# push new blobs remotely
self.bstore_sync(newblobs)
self._put_all(self.get_head(), self.get_remote_path(self.get_head_name()))
end = time.time()
dbg.time("remote write: %f" % (end-beg))
self._join()
# drop local copy
# TODO: rm only tracked files if removing file.
try:
os.unlink(pn)
except:
dbg.err("failed to rm %s" % pn)
return False
return True
def append_history(self, hv):
util.append_file(self.path_head_history, hv+"\n")
def cmd_checkin(self, paths, unit=BLOB_UNIT, upload_only_first=False):
if not self.check_sanity():
dbg.err("this is not a metasync repo")
return False
if type(paths) != types.ListType:
paths = [paths]
for pn in paths:
if not os.path.exists(pn):
dbg.err("File %s doesn't exits." % pn)
return False
beg = time.time()
#XXX: considering mtime, check hash of chunks?
changed = False
for path in paths:
if(not os.path.isfile(path)):
changed = True
for root, dirs, files in os.walk(path):
fsizesum = 0
for fname in files:
fsizesum += os.stat(os.path.join(root,fname)).st_size
print(root + " " + str(fsizesum))
if(fsizesum < unit):
dirblob = self.blobstore.load_dir(root, dirty=True, merge=True)
for fname in files:
dirblob.add_file(fname, os.path.join(root, fname))
dirblob.done_adding()
else:
dirblob = self.blobstore.load_dir(root, dirty=True)
for fname in files:
fileblob = self.blobstore.load_file(os.path.join(root, fname), unit)
if(fname in dirblob and dirblob[fname].hv == fileblob.hv):
continue
dirblob.add(fname, fileblob)
else:
fileblob = self.blobstore.load_file(path, unit)
dirname = os.path.dirname(path)
if(dirname == ""): dirname = "."
dirblob = self.blobstore.load_dir(dirname, dirty=True)
fname = os.path.basename(path)
if(fname in dirblob and dirblob[fname].hv == fileblob.hv):
continue
changed = True
dirblob.add(fname, fileblob)
if(not changed): return True
root = self.get_root_blob()
root.store()
newblobs = self.blobstore.get_added_blobs()
util.write_file(self.get_head(), "%s.%s.%d" % (root.hv, self.get_config_hash(), self.get_next_version()))
end = time.time()
dbg.time("local write: %f" % (end-beg))
# push new blobs remotely
leftover = self.bstore_sync(newblobs)
self._update_all(self.get_head(), self.get_remote_path(self.get_head_name()))
self._join()
end = time.time()
dbg.time("remote write for R1: %f" % (end-beg))
if(not upload_only_first):
self.bstore_sync_left(leftover)
end = time.time()
dbg.time("remote write for left: %f" % (end-beg))
return []
else:
return leftover
def cmd_push(self):
prev = self.get_prev_value()
newvalue = self.get_head_and_config()
val = self.propose_value(prev, newvalue)
if(val != newvalue):
dbg.err("You should fetch first")
return False
# with open(self.path_master) as f:
# master_head = f.read().strip()
# with open(self.get_head()) as f:
# head = f.read().strip()
# if(len(master_head) > 0):
# head_history = self.get_history()
# if(not master_head in head_history):
# dbg.err("You should update first")
# self.unlock_master()
# return False
# check master is ancestor of the head
shutil.copyfile(self.get_head(), self.get_prev())
self._update_all(self.get_prev(), self.get_remote_path(self.get_prev_name()))
from paxos import Proposer
self.proposer = Proposer(None, self.services, self.get_pPaxos_path(newvalue))
self._join()
return True
def cmd_status(self, unit=BLOB_UNIT):
def simple_walk(folder):
# simple_walk will skip dipping into the folder
# that are not tracked in the repo
untracked = []
changed = []
for f in os.listdir(folder):
if f == META_DIR:
continue
basename = os.path.basename(folder)
if basename == '.' or basename == '':
relpath = f
else:
relpath = os.path.join(folder, f)
if relpath in tracked:
if os.path.isdir(f):
_untracked, _changed = simple_walk(relpath)
untracked.extend(_untracked)
changed.extend(_changed)
else:
fblob = tracked[relpath]
# compare the file modified time and its metadata blob modified time
curr_mtime = os.path.getmtime(relpath)
last_mtime = os.path.getmtime(os.path.join(self.path_objs, fblob.hv))
if curr_mtime > last_mtime:
# only load file when the file modified time is greater than metadata modified time
fblob._load()
flag = False
# compare chunk hash
for (offset, chunk) in util.each_chunk2(relpath, unit):
if util.sha1(chunk) != fblob.entries[offset].hv:
flag = True
break
if flag:
changed.append(relpath)
else:
if os.path.isdir(relpath):
relpath = os.path.join(relpath, '')
untracked.append(relpath)
return untracked, changed
if not self.check_sanity():
dbg.err("this is not a metasync repo")
return False
# switch to metasync repo root folder
os.chdir(self.path_root)
# compare the head and master history
head_history = self.get_history()
master_history = self.get_history(True)
head_diverge = 0
for head in head_history:
if (head in master_history):
break
head_diverge += 1
if head_diverge == len(head_history):
master_diverge = len(master_history)
else:
master_diverge = master_history.index(head_history[head_diverge])
if head_diverge == 0 and master_diverge == 0:
print "\nYour branch is up-to-date with master."
elif head_diverge == 0:
print "\nYour branch is behind master by %d commit(s)." % master_diverge
elif master_diverge == 0:
print "\nYour branch is ahead of master by %d commit(s)." % head_diverge
else:
print "\nYour branch and master have diverged,"
print "and have %d and %d different commits each, respectively" % (head_diverge, master_diverge)
root = self.get_root_blob()
tracked = {}
for (path, blob) in root.walk():
tracked[path] = blob
untracked, changed = simple_walk('.')
if changed:
print("\nChanges not checked in:")
for f in changed:
print("\033[31m\tmodified: %s\033[m" % f)
if untracked:
print("\nUntracked files:")
for f in untracked:
print("\033[31m\t%s\033[m" % f)
return True |
using System;
using AdventureWorks.UILogic.ViewModels;
using Windows.UI.Xaml;
using Windows.UI.Xaml.Data;
namespace AdventureWorks.Shopper.Converters
{
<summary>
Value converter that translates FormStatus.Complete or FormStatus.Invalid to <see cref="Visibility.Visible"/>
and FormStatus.Incomplete to <see cref="Visibility.Collapsed"/>.
</summary>
public sealed class <API key> : IValueConverter
{
public object Convert(object value, Type targetType, object parameter, string language)
{
return (value is int && ((int)value) == FormStatus.Incomplete) ? Visibility.Collapsed : Visibility.Visible;
}
public object ConvertBack(object value, Type targetType, object parameter, string language)
{
throw new <API key>();
}
}
} |
angular.module('eventApp')
.controller('insertReturn_Ctr', function ($scope,executeResults ,$http, $route, $rootScope, $location ,$routeParams) {
}); |
module Pione
module Front
# <API key> is a front interface for
# <API key> command.
class <API key> < BasicFront
attr_reader :tuple_space
def initialize(cmd)
tuple_space = cmd.model[:parent_front].get_tuple_space(nil)
super(cmd, Global.<API key>)
@tuple_space = tuple_space
end
end
end
end |
<!DOCTYPE HTML PUBLIC "-
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (version 1.6.0_27) on Thu Jan 23 20:13:40 EST 2014 -->
<meta http-equiv="Content-Type" content="text/html" charset="utf-8">
<title>API Help (Lucene 4.6.1 API)</title>
<meta name="date" content="2014-01-23">
<link rel="stylesheet" type="text/css" href="stylesheet.css" title="Style">
</head>
<body>
<script type="text/javascript"><!
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="API Help (Lucene 4.6.1 API)";
}
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<div class="topNav"><a name="navbar_top">
</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow">
</a>
<ul class="navList" title="Navigation">
<li><a href="overview-summary.html">Overview</a></li>
<li>Package</li>
<li>Class</li>
<li>Use</li>
<li><a href="overview-tree.html">Tree</a></li>
<li><a href="deprecated-list.html">Deprecated</a></li>
<li class="navBarCell1Rev">Help</li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>PREV</li>
<li>NEXT</li>
</ul>
<ul class="navList">
<li><a href="index.html?help-doc.html" target="_top">FRAMES</a></li>
<li><a href="help-doc.html" target="_top">NO FRAMES</a></li>
</ul>
<ul class="navList" id="<API key>">
<li><a href="allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!
allClassesLink = document.getElementById("<API key>");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
</script>
</div>
<a name="skip-navbar_top">
</a></div>
<div class="header">
<h1 class="title">How This API Document Is Organized</h1>
<p class="subTitle">This API (Application Programming Interface) document has pages corresponding to the items in the navigation bar, described as follows.</p>
</div>
<div class="contentContainer">
<ul class="blockList">
<li class="blockList">
<h2>Overview</h2>
<p>The <a href="overview-summary.html">Overview</a> page is the front page of this API document and provides a list of all packages with a summary for each. This page can also contain an overall description of the set of packages.</p>
</li>
<li class="blockList">
<h2>Package</h2>
<p>Each package has a page that contains a list of its classes and interfaces, with a summary for each. This page can contain four categories:</p>
<ul>
<li>Interfaces (italic)</li>
<li>Classes</li>
<li>Enums</li>
<li>Exceptions</li>
<li>Errors</li>
<li>Annotation Types</li>
</ul>
</li>
<li class="blockList">
<h2>Class/Interface</h2>
<p>Each class, interface, nested class and nested interface has its own separate page. Each of these pages has three sections consisting of a class/interface description, summary tables, and detailed member descriptions:</p>
<ul>
<li>Class inheritance diagram</li>
<li>Direct Subclasses</li>
<li>All Known Subinterfaces</li>
<li>All Known Implementing Classes</li>
<li>Class/interface declaration</li>
<li>Class/interface description</li>
</ul>
<ul>
<li>Nested Class Summary</li>
<li>Field Summary</li>
<li>Constructor Summary</li>
<li>Method Summary</li>
</ul>
<ul>
<li>Field Detail</li>
<li>Constructor Detail</li>
<li>Method Detail</li>
</ul>
<p>Each summary entry contains the first sentence from the detailed description for that item. The summary entries are alphabetical, while the detailed descriptions are in the order they appear in the source code. This preserves the logical groupings established by the programmer.</p>
</li>
<li class="blockList">
<h2>Annotation Type</h2>
<p>Each annotation type has its own separate page with the following sections:</p>
<ul>
<li>Annotation Type declaration</li>
<li>Annotation Type description</li>
<li>Required Element Summary</li>
<li>Optional Element Summary</li>
<li>Element Detail</li>
</ul>
</li>
<li class="blockList">
<h2>Enum</h2>
<p>Each enum has its own separate page with the following sections:</p>
<ul>
<li>Enum declaration</li>
<li>Enum description</li>
<li>Enum Constant Summary</li>
<li>Enum Constant Detail</li>
</ul>
</li>
<li class="blockList">
<h2>Use</h2>
<p>Each documented package, class and interface has its own Use page. This page describes what packages, classes, methods, constructors and fields use any part of the given class or package. Given a class or interface A, its Use page includes subclasses of A, fields declared as A, methods that return A, and methods and constructors with parameters of type A. You can access this page by first going to the package, class or interface, then clicking on the "Use" link in the navigation bar.</p>
</li>
<li class="blockList">
<h2>Tree (Class Hierarchy)</h2>
<p>There is a <a href="overview-tree.html">Class Hierarchy</a> page for all packages, plus a hierarchy for each package. Each hierarchy page contains a list of classes and a list of interfaces. The classes are organized by inheritance structure starting with <code>java.lang.Object</code>. The interfaces do not inherit from <code>java.lang.Object</code>.</p>
<ul>
<li>When viewing the Overview page, clicking on "Tree" displays the hierarchy for all packages.</li>
<li>When viewing a particular package, class or interface page, clicking "Tree" displays the hierarchy for only that package.</li>
</ul>
</li>
<li class="blockList">
<h2>Deprecated API</h2>
<p>The <a href="deprecated-list.html">Deprecated API</a> page lists all of the API that have been deprecated. A deprecated API is not recommended for use, generally due to improvements, and a replacement API is usually given. Deprecated APIs may be removed in future implementations.</p>
</li>
<li class="blockList">
<h2>Prev/Next</h2>
<p>These links take you to the next or previous class, interface, package, or related page.</p>
</li>
<li class="blockList">
<h2>Frames/No Frames</h2>
<p>These links show and hide the HTML frames. All pages are available with or without frames.</p>
</li>
<li class="blockList">
<h2>Serialized Form</h2>
<p>Each serializable or externalizable class has a description of its serialization fields and methods. This information is of interest to re-implementors, not to developers using the API. While there is no link in the navigation bar, you can get to this information by going to any serialized class and clicking "Serialized Form" in the "See also" section of the class description.</p>
</li>
<li class="blockList">
<h2>Constant Field Values</h2>
<p>The <a href="constant-values.html">Constant Field Values</a> page lists the static final fields and their values.</p>
</li>
</ul>
<em>This help file applies to API documentation generated using the standard doclet.</em></div>
<div class="bottomNav"><a name="navbar_bottom">
</a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="<API key>">
</a>
<ul class="navList" title="Navigation">
<li><a href="overview-summary.html">Overview</a></li>
<li>Package</li>
<li>Class</li>
<li>Use</li>
<li><a href="overview-tree.html">Tree</a></li>
<li><a href="deprecated-list.html">Deprecated</a></li>
<li class="navBarCell1Rev">Help</li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>PREV</li>
<li>NEXT</li>
</ul>
<ul class="navList">
<li><a href="index.html?help-doc.html" target="_top">FRAMES</a></li>
<li><a href="help-doc.html" target="_top">NO FRAMES</a></li>
</ul>
<ul class="navList" id="<API key>">
<li><a href="allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!
allClassesLink = document.getElementById("<API key>");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
</script>
</div>
<a name="skip-navbar_bottom">
</a></div>
<p class="legalCopy"><small>
<i>Copyright © 2000-2014 Apache Software Foundation. All Rights Reserved.</i>
<script src='/prettify.js' type='text/javascript'></script>
<script type='text/javascript'>
(function(){
var oldonload = window.onload;
if (typeof oldonload != 'function') {
window.onload = prettyPrint;
} else {
window.onload = function() {
oldonload();
prettyPrint();
}
}
})();
</script>
</small></p>
</body>
</html> |
const Model = require('../libraries/model');
const Card = require('../schemas/card-schema');
class CardModel extends Model {
findRandomDeck() {
return this.SchemaModel.find().execAsync()
.then(collection => (this.shuffle(collection).slice(0, 8)));
}
shuffle(array) {
let currentIndex = array.length;
let temporaryValue;
let randomIndex;
// While there remain elements to shuffle...
while (currentIndex !== 0) {
// Pick a remaining element...
randomIndex = Math.floor(Math.random() * currentIndex);
currentIndex -= 1;
// And swap it with the current element.
temporaryValue = array[currentIndex];
array[currentIndex] = array[randomIndex]; // eslint-disable-line no-param-reassign
array[randomIndex] = temporaryValue; // eslint-disable-line no-param-reassign
}
return array;
}
}
module.exports = new CardModel(Card); |
# MOTU `auexamine`
This application performs a thorough test on a Mac [Audio Unit](https://developer.apple.com/library/mac/documentation/MusicAudio/Conceptual/<API key>/Introduction/Introduction.html) plug-in to ensure it is free of common defects.
## Usage
`auexamine` can be run from a terminal with the following command:
auexamine <au type> <au subtype> <au manufacturer> <[Optional] requires initialization (defaults true)> <[Optional] numeric arguments (defaults false)>
Arguments
<dl>
<dt>au type</dt>
<dd>The type of the au (example: <code>aufx</code> for an audio unit effect)</dd>
<dt>au subtype</dt>
<dd>The <API key> subtype (example: <code>pmeq</code> for parametric eq.)</dd>
<dt>au manufacturer</dt>
<dd>The manufacturer code (example: <code>appl</code> for Apple, Inc.)</dd>
<dt>requires initialization</dt>
<dd>An optional numeric argument to the test, defaulting to 1. If set to 0, this will make the test skip initialization of the audio-unit. Many audio units need initialization.</dd>
<dt>numeric arguments</dt>
<dd>An optional numeric argument to the test, defaulting to 0. If set to 1, this will make the test interpret the first three arguments as numbers rather than strings.</dd>
</dl>
Exit codes
`auexamine` uses non-standard exit codes for use as part of a build process. The meaning of each exit code is defined in the `AUValStatus.h` file. In addition to exit codes, `auexamine` reports on its status through informative messages to standard out and error.
## Building
Build the `auexamine` app under Xcode 4 or 5 on Mac 10.7 and above. Requires C++11 support.
The code is under copyright, but provided under an MIT-style license in the LICENSE file.
The third_party directory contains sources from other projects. |
<ol id="list" (drop)="onDrop($event)" (dragover)="onDragOver($event)" [ngClass]="{'is-dragging': isDragging}">
<li *ngFor="let item of items" (click)="onItemClick(item)">{{item.path}}</li>
</ol> |
require 'vesr/<API key>'
module VESR
class ReferenceBuilder
def self.call(customer_id, invoice_id, esr_id)
new(customer_id, invoice_id, esr_id).call
end
attr_reader :customer_id, :invoice_id, :esr_id
def initialize(customer_id, invoice_id, esr_id)
@customer_id = customer_id
@invoice_id = invoice_id
@esr_id = esr_id
end
def call
"#{esr_id}#{<API key>}#{<API key>}"
end
private
def <API key>
format "%0#{customer_id_length}i", customer_id
end
def customer_id_length
19 - esr_id.to_s.length
end
def <API key>
format '%07i', invoice_id
end
end
end |
# <API key>: true
require "spec_helper"
describe EveOnline::ESI::Models::DogmaAttribute do
it { should be_a(EveOnline::ESI::Models::Base) }
let(:options) { double }
subject { described_class.new(options) }
describe "#initialize" do
its(:options) { should eq(options) }
end
describe "#as_json" do
let(:dogma_attribute) { described_class.new(options) }
let(:icon_id) { double }
let(:published) { double }
let(:unit_id) { double }
before { expect(dogma_attribute).to receive(:attribute_id).and_return(2) }
before { expect(dogma_attribute).to receive(:default_value).and_return(0.0) }
before { expect(dogma_attribute).to receive(:description).and_return("Boolean to store status of online effect") }
before { expect(dogma_attribute).to receive(:display_name).and_return("") }
before { expect(dogma_attribute).to receive(:high_is_good).and_return(true) }
before { expect(dogma_attribute).to receive(:icon_id).and_return(icon_id) }
before { expect(dogma_attribute).to receive(:name).and_return("isOnline") }
before { expect(dogma_attribute).to receive(:published).and_return(published) }
before { expect(dogma_attribute).to receive(:stackable).and_return(true) }
before { expect(dogma_attribute).to receive(:unit_id).and_return(unit_id) }
subject { dogma_attribute.as_json }
its([:attribute_id]) { should eq(2) }
its([:default_value]) { should eq(0.0) }
its([:description]) { should eq("Boolean to store status of online effect") }
its([:display_name]) { should eq("") }
its([:high_is_good]) { should eq(true) }
its([:icon_id]) { should eq(icon_id) }
its([:name]) { should eq("isOnline") }
its([:published]) { should eq(published) }
its([:stackable]) { should eq(true) }
its([:unit_id]) { should eq(unit_id) }
end
describe "#attribute_id" do
before { expect(options).to receive(:[]).with("attribute_id") }
specify { expect { subject.attribute_id }.not_to raise_error }
end
describe "#default_value" do
before { expect(options).to receive(:[]).with("default_value") }
specify { expect { subject.default_value }.not_to raise_error }
end
describe "#description" do
before { expect(options).to receive(:[]).with("description") }
specify { expect { subject.description }.not_to raise_error }
end
describe "#display_name" do
before { expect(options).to receive(:[]).with("display_name") }
specify { expect { subject.display_name }.not_to raise_error }
end
describe "#high_is_good" do
before { expect(options).to receive(:[]).with("high_is_good") }
specify { expect { subject.high_is_good }.not_to raise_error }
end
describe "#icon_id" do
before { expect(options).to receive(:[]).with("icon_id") }
specify { expect { subject.icon_id }.not_to raise_error }
end
describe "#name" do
before { expect(options).to receive(:[]).with("name") }
specify { expect { subject.name }.not_to raise_error }
end
describe "#published" do
before { expect(options).to receive(:[]).with("published") }
specify { expect { subject.published }.not_to raise_error }
end
describe "#stackable" do
before { expect(options).to receive(:[]).with("stackable") }
specify { expect { subject.stackable }.not_to raise_error }
end
describe "#unit_id" do
before { expect(options).to receive(:[]).with("unit_id") }
specify { expect { subject.unit_id }.not_to raise_error }
end
end |
require "isolation/abstract_unit"
require "stringio"
require "rack/test"
module RailtiesTest
class EngineTest < ActiveSupport::TestCase
include ActiveSupport::Testing::Isolation
include Rack::Test::Methods
def setup
build_app
@plugin = engine "bukkits" do |plugin|
plugin.write "lib/bukkits.rb", <<-RUBY
module Bukkits
class Engine < ::Rails::Engine
railtie_name "bukkits"
end
end
RUBY
plugin.write "lib/another.rb", "class Another; end"
end
end
def teardown
teardown_app
end
def boot_rails
require "#{app_path}/config/environment"
end
test "serving sprocket's assets" do
@plugin.write "app/assets/javascripts/engine.js.erb", "<%= :alert %>();"
add_to_env_config "development", "config.assets.digest = false"
boot_rails
get "/assets/engine.js"
assert_match "alert()", last_response.body
end
test "rake environment can be called in the engine" do
boot_rails
@plugin.write "Rakefile", <<-RUBY
APP_RAKEFILE = '#{app_path}/Rakefile'
load 'rails/tasks/engine.rake'
task :foo => :environment do
puts "Task ran"
end
RUBY
Dir.chdir(@plugin.path) do
output = `bundle exec rake foo`
assert_match "Task ran", output
end
end
test "copying migrations" do
@plugin.write "db/migrate/1_create_users.rb", <<-RUBY
class CreateUsers < ActiveRecord::Migration::Current
end
RUBY
@plugin.write "db/migrate/<API key>.rb", <<-RUBY
class AddLastNameToUsers < ActiveRecord::Migration::Current
end
RUBY
@plugin.write "db/migrate/3_create_sessions.rb", <<-RUBY
class CreateSessions < ActiveRecord::Migration::Current
end
RUBY
app_file "db/migrate/1_create_sessions.rb", <<-RUBY
class CreateSessions < ActiveRecord::Migration::Current
def up
end
end
RUBY
add_to_config "ActiveRecord::Base.<API key> = false"
boot_rails
Dir.chdir(app_path) do
output = `bundle exec rake bukkits:install:migrations`
assert File.exist?("#{app_path}/db/migrate/2_create_users.bukkits.rb")
assert File.exist?("#{app_path}/db/migrate/<API key>.bukkits.rb")
assert_match(/Copied migration 2_create_users\.bukkits\.rb from bukkits/, output)
assert_match(/Copied migration <API key>\.bukkits\.rb from bukkits/, output)
assert_match(/NOTE: Migration 3_create_sessions\.rb from bukkits has been skipped/, output) |
package ivan.cstm.dao;
import java.sql.SQLException;
import jdbcUtils.TxQueryRunner;
import ivan.cstm.domain.Customer;
import org.apache.commons.dbutils.QueryRunner;
public class CustomerDao {
//QueryRunner
private QueryRunner qr = new TxQueryRunner();
public void add(Customer c) {
try {
String sql = "insert into t_customer values(?,?,?,?,?,?,?)";
Object[] params = { c.getCid(), c.getCname(), c.getGender(),
c.getBirthday(), c.getCellphone(), c.getEmail(),
c.getDescription() };
qr.update(sql, params);
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
} |
<?php
class Menu extends CI_Model {
function getMenu ()
{
$query = $this->db->get('menu');
return $query->result_array();
// $name = $this->input->post('name');
// $name = $this->input->post('quantity');
// $this->db->insert('inventory', $this, array('name' => $name,'quantity' => $quantity));
}
} |
<?php
namespace Networking\InitCmsBundle\Entity;
use Networking\InitCmsBundle\Model\GalleryView as ModelGalleryView;
/**
* Class GalleryView.
*/
class GalleryView extends ModelGalleryView
{
public function prePersist()
{
$this->createdAt = $this->updatedAt = new \DateTime('now');
}
/**
* Hook on pre-update operations.
*/
public function preUpdate()
{
$this->updatedAt = new \DateTime('now');
}
} |
<?php
namespace Sonata\CacheBundle\DependencyInjection;
use Symfony\Component\Config\Definition\Processor;
use Symfony\Component\Config\FileLocator;
use Symfony\Component\DependencyInjection\ContainerBuilder;
use Symfony\Component\DependencyInjection\Loader\XmlFileLoader;
use Symfony\Component\DependencyInjection\Reference;
use Symfony\Component\HttpKernel\DependencyInjection\Extension;
/**
* PageExtension.
*
*
* @author Thomas Rabaix <thomas.rabaix@sonata-project.org>
*/
class <API key> extends Extension
{
/**
* Loads the url shortener configuration.
*
* @param array $configs An array of configuration settings
* @param ContainerBuilder $container A ContainerBuilder instance
*/
public function load(array $configs, ContainerBuilder $container)
{
$processor = new Processor();
$configuration = new Configuration();
$config = $processor-><API key>($configuration, $configs);
$loader = new XmlFileLoader($container, new FileLocator(__DIR__.'/../Resources/config'));
$loader->load('cache.xml');
$loader->load('counter.xml');
$useOrm = 'auto' == $config['cache_invalidation']['orm_listener'] ?
class_exists('Doctrine\\ORM\\Version') :
$config['cache_invalidation']['orm_listener'];
if ($useOrm) {
$loader->load('orm.xml');
}
$usePhpcrOdm = 'auto' == $config['cache_invalidation']['phpcr_odm_listener'] ?
class_exists('Doctrine\\PHPCR\\ODM\\Version') :
$config['cache_invalidation']['phpcr_odm_listener'];
if ($usePhpcrOdm) {
$loader->load('phpcr_odm.xml');
}
$this-><API key>($container, $config);
if ($useOrm) {
$this->configureORM($container, $config);
}
if ($usePhpcrOdm) {
$this->configurePHPCRODM($container, $config);
}
$this->configureCache($container, $config);
$this->configureCounter($container, $config);
}
/**
* @param ContainerBuilder $container
* @param array $config
*/
public function <API key>(ContainerBuilder $container, $config)
{
$cacheManager = $container->getDefinition('sonata.cache.manager');
$cacheManager->replaceArgument(0, new Reference($config['cache_invalidation']['service']));
$recorder = $container->getDefinition('sonata.cache.model_identifier');
foreach ($config['cache_invalidation']['classes'] as $class => $method) {
$recorder->addMethodCall('addClass', array($class, $method));
}
$cacheManager->addMethodCall('setRecorder', array(new Reference($config['cache_invalidation']['recorder'])));
}
/**
* @param ContainerBuilder $container
* @param array $config
*/
public function configureORM(ContainerBuilder $container, $config)
{
$cacheManager = $container->getDefinition('sonata.cache.orm.event_subscriber');
$connections = array_keys($container->getParameter('doctrine.connections'));
foreach ($connections as $conn) {
$cacheManager->addTag('doctrine.event_subscriber', array('connection' => $conn));
}
}
/**
* @param ContainerBuilder $container
* @param array $config
*/
public function configurePHPCRODM(ContainerBuilder $container, $config)
{
$cacheManager = $container->getDefinition('sonata.cache.phpcr_odm.event_subscriber');
$sessions = array_keys($container->getParameter('doctrine_phpcr.odm.sessions'));
foreach ($sessions as $session) {
$cacheManager->addTag('doctrine_phpcr.event_subscriber', array('session' => $session));
}
}
/**
* @param ContainerBuilder $container
* @param array $config
*
* @throws \RuntimeException if the Mongo or Memcached library is not installed
*/
public function configureCache(ContainerBuilder $container, $config)
{
if ($config['default_cache']) {
$container->setAlias('sonata.cache', $config['default_cache']);
}
if (isset($config['caches']['esi'])) {
$container
->getDefinition('sonata.cache.esi')
->replaceArgument(0, $config['caches']['esi']['token'])
->replaceArgument(1, $config['caches']['esi']['servers'])
->replaceArgument(3, 3 === $config['caches']['esi']['version'] ? 'ban' : 'purge');
} else {
$container->removeDefinition('sonata.cache.esi');
}
if (isset($config['caches']['ssi'])) {
$container
->getDefinition('sonata.cache.ssi')
->replaceArgument(0, $config['caches']['ssi']['token'])
;
} else {
$container->removeDefinition('sonata.cache.ssi');
}
if (isset($config['caches']['mongo'])) {
$this->checkMongo();
$database = $config['caches']['mongo']['database'];
$servers = array();
foreach ($config['caches']['mongo']['servers'] as $server) {
if ($server['user']) {
$servers[] = sprintf('%s:%s@%s:%s/%s', $server['user'], $server['password'], $server['host'], $server['port'], $database);
} else {
$servers[] = sprintf('%s:%s', $server['host'], $server['port']);
}
}
$container
->getDefinition('sonata.cache.mongo')
->replaceArgument(0, $servers)
->replaceArgument(1, $database)
->replaceArgument(2, $config['caches']['mongo']['collection'])
;
} else {
$container->removeDefinition('sonata.cache.mongo');
}
if (isset($config['caches']['memcached'])) {
$this->checkMemcached();
$container
->getDefinition('sonata.cache.memcached')
->replaceArgument(0, $config['caches']['memcached']['prefix'])
->replaceArgument(1, $config['caches']['memcached']['servers'])
;
} else {
$container->removeDefinition('sonata.cache.memcached');
}
if (isset($config['caches']['predis'])) {
$this->checkPRedis();
$container
->getDefinition('sonata.cache.predis')
->replaceArgument(0, $config['caches']['predis']['servers'])
;
} else {
$container->removeDefinition('sonata.cache.predis');
}
if (isset($config['caches']['apc'])) {
$this->checkApc();
$container
->getDefinition('sonata.cache.apc')
->replaceArgument(1, $config['caches']['apc']['token'])
->replaceArgument(2, $config['caches']['apc']['prefix'])
->replaceArgument(3, $this->configureServers($config['caches']['apc']['servers']))
->replaceArgument(4, $config['caches']['apc']['timeout'])
;
} else {
$container->removeDefinition('sonata.cache.apc');
}
if (isset($config['caches']['symfony'])) {
$container
->getDefinition('sonata.cache.symfony')
->replaceArgument(3, $config['caches']['symfony']['token'])
->replaceArgument(4, $config['caches']['symfony']['php_cache_enabled'])
->replaceArgument(5, $config['caches']['symfony']['types'])
->replaceArgument(6, $this->configureServers($config['caches']['symfony']['servers']))
;
} else {
$container->removeDefinition('sonata.cache.symfony');
}
}
/**
* @param ContainerBuilder $container
* @param array $config
*
* @throws \RuntimeException if the Mongo or Memcached library is not installed
*/
public function configureCounter(ContainerBuilder $container, $config)
{
if ($config['default_counter']) {
$container->setAlias('sonata.counter', $config['default_counter']);
}
if (isset($config['counters']['mongo'])) {
$this->checkMongo();
$servers = array();
foreach ($config['counters']['mongo']['servers'] as $server) {
if ($server['user']) {
$servers[] = sprintf('%s:%s@%s:%s', $server['user'], $server['password'], $server['host'], $server['port']);
} else {
$servers[] = sprintf('%s:%s', $server['host'], $server['port']);
}
}
$container
->getDefinition('sonata.cache.counter.mongo')
->replaceArgument(0, $servers)
->replaceArgument(1, $config['counters']['mongo']['database'])
->replaceArgument(2, $config['counters']['mongo']['collection'])
;
} else {
$container->removeDefinition('sonata.cache.counter.mongo');
}
if (isset($config['counters']['memcached'])) {
$this->checkMemcached();
$container
->getDefinition('sonata.cache.counter.memcached')
->replaceArgument(0, $config['counters']['memcached']['prefix'])
->replaceArgument(1, $config['counters']['memcached']['servers'])
;
} else {
$container->removeDefinition('sonata.cache.counter.memcached');
}
if (isset($config['counters']['predis'])) {
$this->checkPRedis();
$container
->getDefinition('sonata.cache.counter.predis')
->replaceArgument(0, $config['counters']['predis']['servers'])
;
} else {
$container->removeDefinition('sonata.cache.counter.predis');
}
if (isset($config['counters']['apc'])) {
$this->checkApc();
$container
->getDefinition('sonata.cache.counter.apc')
->replaceArgument(0, $config['counters']['apc']['prefix'])
;
} else {
$container->removeDefinition('sonata.cache.counter.apc');
}
}
protected function checkMemcached()
{
if (!class_exists('\Memcached', true)) {
throw new \RuntimeException(<<<HELP
The `sonata.cache.memcached` service is configured, however the Memcached class is not available.
To resolve this issue, please install the related library : http://php.net/manual/en/book.memcached.php
or remove the memcached cache settings from the configuration file.
HELP
);
}
}
protected function checkApc()
{
if (!function_exists('apc_fetch')) {
throw new \RuntimeException(<<<HELP
The `sonata.cache.apc` service is configured, however the apc_* functions are not available.
To resolve this issue, please install the related library : http://php.net/manual/en/book.apc.php
or remove the APC cache settings from the configuration file.
HELP
);
}
}
protected function checkMongo()
{
if (!class_exists('\Mongo', true)) {
throw new \RuntimeException(<<<HELP
The `sonata.cache.mongo` service is configured, however the Mongo class is not available.
To resolve this issue, please install the related library : http://php.net/manual/en/book.mongo.php
or remove the mongo cache settings from the configuration file.
HELP
);
}
}
protected function checkPRedis()
{
if (!class_exists('\Predis\Client', true)) {
throw new \RuntimeException(<<<HELP
The `sonata.cache.predis` service is configured, however the Predis\Client class is not available.
Please add the lib in your composer.json file: "predis/predis": "~0.8".
HELP
);
}
}
/**
* Returns servers list with hash for basic auth computed if provided.
*
* @param array $servers
*
* @return array
*/
public function configureServers(array $servers)
{
return array_map(
function ($item) {
if ($item['basic']) {
$item['basic'] = base64_encode($item['basic']);
}
return $item;
},
$servers
);
}
} |
require 'helper'
class TestSafariCookiejar < Test::Unit::TestCase
should "probably rename this file and start testing for real" do
flunk "hey buddy, you should probably rename this file and start testing for real"
end
end |
SOURCE = dot-product-4
DELAY = 80
DENSITY = 300
WIDTH = 512
make:
pdflatex $(SOURCE).tex -output-format=pdf
make clean
clean:
rm -rf $(TARGET) *.class *.html *.log *.aux *.data *.gnuplot
gif:
pdfcrop $(SOURCE).pdf
convert -verbose -delay $(DELAY) -loop 0 -density $(DENSITY) $(SOURCE)-crop.pdf $(SOURCE).gif
make clean
png:
make
make svg
inkscape $(SOURCE).svg -w $(WIDTH) --export-png=$(SOURCE).png
transparentGif:
convert $(SOURCE).pdf -transparent white result.gif
make clean
svg:
make
#inkscape $(SOURCE).pdf --export-plain-svg=$(SOURCE).svg
pdf2svg $(SOURCE).pdf $(SOURCE).svg
# Necessary, as pdf2svg does not always create valid svgs:
inkscape $(SOURCE).svg --export-plain-svg=$(SOURCE).svg
rsvg-convert -a -w $(WIDTH) -f svg $(SOURCE).svg -o $(SOURCE)2.svg
inkscape $(SOURCE)2.svg --export-plain-svg=$(SOURCE).svg
rm $(SOURCE)2.svg |
<?php
namespace TwbsHelper\Form\View\Helper\Factory;
class <API key> implements \Laminas\ServiceManager\FactoryInterface
{
/**
* Compatibility with ZF2 (>= 2.2) -> proxy to __invoke
*
* @param \Laminas\ServiceManager\<API key> $oServiceLocator
* @param mixed $sCanonicalName
* @param mixed $sRequestedName
* @return \TwbsHelper\Form\View\Helper\FormCollection
*/
public function createService(
\Laminas\ServiceManager\<API key> $oServiceLocator,
$sCanonicalName = null,
$sRequestedName = null
): \TwbsHelper\Form\View\Helper\FormCollection {
return $this($oServiceLocator, $sRequestedName);
}
/**
* Compatibility with Laminas and ZF3
*
* @param \Interop\Container\ContainerInterface $oContainer
* @param mixed $sRequestedName
* @param array $aOptions
* @return \TwbsHelper\Form\View\Helper\FormCollection
*/
public function __invoke(
\Interop\Container\ContainerInterface $oContainer,
$sRequestedName,
array $aOptions = null
): \TwbsHelper\Form\View\Helper\FormCollection {
$oOptions = $oContainer->get(\TwbsHelper\Options\ModuleOptions::class);
return new \TwbsHelper\Form\View\Helper\FormCollection($oOptions);
}
} |
# TODO
## Retract rules
Facts already asserted into the working memory can be retracted using the FactHandle.
## Conflict resolution
Implement one or more of the following resolution strategies.
* Salience
* Recency
* Primacy
* Fifo
* Lifo
* Complexity
* Simplicity
* LoadOrder
* Random
Resolution strategies are chained together; so that when a conflict resolver has two equal Activations the next conlict resolver the chain will try to decide. <API key> is used and takes an array of ConflictResolvers as it's constructor.
Salience
Each rule has a salience attribute that can be assigned an Integer number, defaults to zero, the Integer and can be negative or positive.
Salience is a form of priority where rules with higher salience values are given higher priority when ordered in the activation queue. When a conflict occurs, i.e. more than one matching salience value for the current activation, then a sublist of those conflicts is returned.
Recency
Recency looks at the counter assigned to each Fact in an Activation. Activations with the highest counter are placed at the top of the Agenda
Primacy
Primacy looks at the counter assigned to each Fact in an Activation. Activations with the lowest counter are placed at the top of the Agenda
Fifo
A depth based strategy dictacted by the order of activation. New Activations are placed on the top of the agenda. Due to each Activation creation being given a unique number conflicts will not occur for this strategy.
Lifo
A breadth based strategy dictacted by the order of activation. New Activations are placed on the bottom of the agenda. Due to each Activation creation being given a unique number conflicts will not occur for this strategy.
Complexity
This is a specifity based strategy and takes into account the complexity of the conflicting rules. The more complex the rule, ie the more conditions it has, the more specific it is - the number of parameters is not taken into account. Activations with a higher specifity are placed at the top of the Agenda.
Simplicity
This is a specifity based strategy and takes into account the simplicity of the conflicting rules. The more simple the rule, ie the less conditions it has, the less specific it is - the number of parameters is not taken into account. Activations with a lower specifity are placed at the top of the Agenda.
LoadOrder
As each rule is added to a ruleset it is given a loadOrder number; this can be used to "arbitrarily" resolve conflicts. Rules with a higher loadOrder number are placed at the top of the Agenda. As each number is unique conflicts will not occur for this strategy. This rule is semamtic module implementation based and should not be consider a "constant"; as semantic modules are updated loadOrder may change.
Random
Activations are randomly inserted into the Agenda;
http://legacy.drools.codehaus.org/Conflict+Resolution
## Observer pattern for events
* After an object is asserted, modified or retracted.
* After each condition check
* After an Activation creation and cancellation
* After a consequence is executed.
workingMemory.addEventListener(new <API key>());
agenda.addEventListener(new <API key>());
http://legacy.drools.codehaus.org/Event+Model |
# venus-runner package
An Atom plugin that'll run your venus tests for the current file on ⌃⌥R (CTRL+Option+R) ;
## Things to Note
You need to add your path to your init.coffee file in Atom. If anyone knows a fix for this, pull request me PLEASE.
process.env.PATH = ["/usr/bin",
"/usr/local/bin",
process.env.PATH].join(":")
You also need Venus.js and Phantom.js installed (if you've found this plugin, you probably already have them installed).
## Use
Open the file that you want to run Venus tests for
Press CTRL+Option+R
Venus will run and a Modal will pop up with tests results.
Then, the modal will casually disappear and allow you to carry on with your work after 2 seconds.
The colors should help make your test results obvious. If not, file an issue!
## FAQ
*Why are my test results returning NaN?*
It's either one of two things.
1. You either have a malformed test file
2. Or your active tab isn't your test file. |
(function (GRA) {
"use strict";
GRA.kernel = GRA.kernel || {};
GRA.kernel.Bus = function Bus() {
/**
* @type {object}
*/
var subscribers = {},
/**
* @type {number}
*/
subscribersLength = 0;
this.notify = function notify(applicationId, message) {
if (subscribers.hasOwnProperty(applicationId)) {
subscribers[applicationId].receive(message);
}
};
this.notifyAll = function notifyAll(message) {
var subscriberId;
for (subscriberId in subscribers) {
if (subscribers.hasOwnProperty(subscriberId)) {
subscribers[subscriberId].receive(message);
}
}
};
this.size = function size() {
return subscribersLength;
};
this.subscribe = function subscribe(application) {
subscribers[application.getName()] = application;
subscribersLength += 1;
};
/**
* Permet d'enlever une application du Bus
*
* @param {string} applicationId Identifiant de l'application
*/
this.unsubscribe = function unsubscribe(applicationId) {
delete subscribers[applicationId];
subscribersLength -= 1;
};
};
}(GRA || {})); |
'use strict';
// Production specific configuration
module.exports = {
// Server IP
ip: process.env.OPENSHIFT_NODEJS_IP ||
process.env.IP ||
undefined,
// Server port
port: process.env.<API key> ||
process.env.PORT ||
8080,
// MongoDB connection options
mongo: {
uri: process.env.MONGOLAB_URI ||
process.env.MONGOHQ_URL ||
process.env.<API key>+process.env.OPENSHIFT_APP_NAME ||
'mongodb://localhost/fastorders'
}
}; |
var SplashLudusLayer = cc.Layer.extend({
init:function()
{
this._super();
var background = cc.Director.getInstance().getWinSizeInPixels();
var label = cc.LabelTTF.create("START!", "GhoulySolidRegular", 70);
label.setPosition(new cc.Point(background.width / 2, background.height / 2));
label.setColor(new cc.Color4B(0, 0, 0, 255));
this.addChild(label);
var fundo = cc.Sprite.create("res/screenshots/splashTeam_800-480.png");
fundo.setPositionX(background.width/2);
fundo.setPositionY(background.height/2);
this.addChild(fundo);
this.schedule(this.onTick1, 3);
return this;
},onTick1:function (dt) {
var scene = cc.Scene.create();
scene.addChild(new SplashGame());
cc.Director.getInstance().replaceScene(cc.TransitionFade.create(1.5, scene, cc.c3b(255, 255, 255)));
}
});
var SplashLudus = cc.Scene.extend({
onEnter:function(){
this._super();
var layer = new SplashLudusLayer();
layer.init();
this.addChild(layer);
}
}); |
<?php
require_once 'php_action/db_connect.php';
session_start();
if(isset($_SESSION['userId'])) {
header('location: '.$siteurl.'/dashboard.php');
}
$errors = array();
if($_POST) {
$username = $_POST['username'];
$password = $_POST['password'];
if(empty($username) || empty($password)) {
if($username == "") {
$errors[] = "Username is required";
}
if($password == "") {
$errors[] = "Password is required";
}
} else {
$sql = "SELECT * FROM users WHERE username = '$username'";
$result = $connect->query($sql);
if($result->num_rows == 1) {
$password = md5($password);
// exists
$mainSql = "SELECT * FROM users WHERE username = '$username' AND password = '$password'";
$mainResult = $connect->query($mainSql);
if($mainResult->num_rows == 1) {
$value = $mainResult->fetch_assoc();
$user_id = $value['user_id'];
// set session
$_SESSION['userId'] = $user_id;
header('location: '.$siteurl.'/dashboard.php');
} else{
$errors[] = "Incorrect username/password combination";
} // /else
} else {
$errors[] = "Username doesnot exists";
} // /else
} // /else not empty username // password
} // /if $_POST
?>
<!DOCTYPE html>
<html>
<head>
<title>Stock Management System</title>
<!-- bootstrap -->
<link rel="stylesheet" href="assests/bootstrap/css/bootstrap.min.css">
<!-- bootstrap theme-->
<link rel="stylesheet" href="assests/bootstrap/css/bootstrap-theme.min.css">
<!-- font awesome -->
<link rel="stylesheet" href="assests/font-awesome/css/font-awesome.min.css">
<!-- custom css -->
<link rel="stylesheet" href="custom/css/custom.css">
<!-- jquery -->
<script src="assests/jquery/jquery.min.js"></script>
<!-- jquery ui -->
<link rel="stylesheet" href="assests/jquery-ui/jquery-ui.min.css">
<script src="assests/jquery-ui/jquery-ui.min.js"></script>
<!-- bootstrap js -->
<script src="assests/bootstrap/js/bootstrap.min.js"></script>
</head>
<body>
<div class="container">
<div class="row vertical">
<div class="col-md-5 col-md-offset-4">
<div class="panel panel-info">
<div class="panel-heading">
<h3 class="panel-title">Please Sign in</h3>
</div>
<div class="panel-body">
<div class="messages">
<?php if($errors) {
foreach ($errors as $key => $value) {
echo '<div class="alert alert-warning" role="alert">
<i class="glyphicon <API key>"></i>
'.$value.'</div>';
}
} ?>
</div>
<form class="form-horizontal" action="<?php echo $_SERVER['PHP_SELF'] ?>" method="post" id="loginForm">
<fieldset>
<div class="form-group">
<label for="username" class="col-sm-2 control-label">Username</label>
<div class="col-sm-10">
<input type="text" class="form-control" id="username" name="username" placeholder="Username" autocomplete="off" />
</div>
</div>
<div class="form-group">
<label for="password" class="col-sm-2 control-label">Password</label>
<div class="col-sm-10">
<input type="password" class="form-control" id="password" name="password" placeholder="Password" autocomplete="off" />
</div>
</div>
<div class="form-group">
<div class="col-sm-offset-2 col-sm-10">
<button type="submit" class="btn btn-default"> <i class="glyphicon glyphicon-log-in"></i> Sign in</button>
</div>
</div>
</fieldset>
</form>
</div>
<!-- panel-body -->
</div>
<!-- /panel -->
</div>
<!-- /col-md-4 -->
</div>
<!-- /row -->
</div>
<!-- container -->
</body>
</html> |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
namespace Included.Areas.TestArea.Controllers
{
public class ExampleController : Controller
{
// GET: /TestArea/Example/
public ActionResult Index()
{
ViewBag.Message = "SUCCESS!!!";
return View();
}
}
} |
module.exports = estimateFloat;
function estimateFloat(predicate) {
var r = 0, l = predicate.length;
switch (l) {
case 1:
r = predicate[0];
break;
case 2:
r = predicate[0] + predicate[1];
break;
case 3:
r = predicate[0] + predicate[1] + predicate[2];
break;
case 4:
r = predicate[0] + predicate[1] + predicate[2] + predicate[3];
break;
default:
for (var i=0; i<l; i++) {
r+=predicate[i];
}
}
return r;
} |
var DialogCoolDownLayer = cc.Layer.extend({
ctor:function () {
this._super();
var size = cc.winSize;
var backgroundLayer = new cc.LayerColor();
backgroundLayer.setColor(cc.color(0,0,0));
backgroundLayer.opacity = 230;
this.addChild(backgroundLayer);
var pageView = new ccui.PageView();
pageView.setTouchEnabled(true);
pageView.setContentSize(cc.size(size.width, size.height));
pageView.x = (size.width - pageView.width) / 2;
pageView.y = (size.height - pageView.height) / 2;
this.addChild(pageView);
var backBtn = new SimpleMenu(res.close_png, res.close_png, res.close_png, function(sender) {
sender.parent.parent.runAction(new cc.Sequence(
new cc.FadeOut(0.1),
new cc.CallFunc(function(sender) {
sender.parent.coolDown(1);
sender.removeFromParent(true);
})
));
sender.runAction(new cc.ScaleTo(0.1, 1.5));
}, this);
backBtn.attr({
x : 470,
y : -50,
scale : 0.3
});
this.addChild(backBtn);
var sprite = new cc.Sprite(res.cooldown_yellow_png);
sprite.attr({
x : 180,
y : size.height / 2,
scale : 2
});
this.addChild(sprite);
var label = new cc.LabelTTF(" ?", "Arial", 60);
label.attr({
x : size.width / 2 + 50,
y : size.height / 2 + 250
});
this.addChild(label);
var label2 = new cc.LabelTTF(" .\n \n!", "Arial", 40);
label2.attr({
x : size.width / 2 + 50,
y : size.height / 2 + 50,
color : cc.color(200, 200, 200)
});
this.addChild(label2);
var okBtn = new SimpleMenu(res.<API key>, res.<API key>, res.<API key>, function(sender) {
sender.runAction(new cc.Sequence(new cc.ScaleTo(0.1, 1.5),
new cc.CallFunc(function(sender) {
sender.parent.parent.parent.coolDown(0);
sender.parent.parent.removeFromParent();
})));
}, this);
okBtn.attr({
x : 200,
y : 0,
scale : 0.4
});
this.addChild(okBtn);
if (cc.sys.capabilities.hasOwnProperty('keyboard')) {
cc.eventManager.addListener(cc.EventListener.create({
event: cc.EventListener.KEYBOARD,
onKeyReleased: function(keyCode, event){
if (keyCode == cc.KEY.back) {
cc.neurostudy.isSelectPkgShow = true;
cc.director.runScene(new MenuScene());
}
}
}), this);
}
}
}); |
// Includes
#include <stdio.h>
#include <windows.h>
#include "resource.h"
#include "Terrain.h"
// Globals
char szWinName[] = "CHAOSWindow";
HINSTANCE g_hGlobalInstance;
HWND g_hWnd;
CTerrain terrTile;
CLogFunc g_LogFunc;
// Definitions
LRESULT CALLBACK WindowProc( HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam );
BOOL FAR PASCAL FaultLineDialog( HWND hWnd, UINT msg, WPARAM wParam, LPARAM lParam );
BOOL FAR PASCAL SetGridDialog( HWND hWnd, UINT msg, WPARAM wParam, LPARAM lParam );
int ProcMenuEvent( HWND hWnd, WPARAM wParam, LPARAM lParam );
int ProcKeyEvent( HWND hWnd, WPARAM wParam, LPARAM lParam );
int ProcMouseEvent( HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam );
// Main entry point for the application
// Handles Win32 configuration and initialises the generator
int WINAPI WinMain( HINSTANCE hThisAppInstance,
HINSTANCE <API key>,
LPSTR lpszCmdArguments,
int iWindowShowState )
{
MSG message;
WNDCLASSEX wndClass;
g_hGlobalInstance = hThisAppInstance;
TCHAR acBuffer[MAX_PATH];
wndClass.cbSize = sizeof(WNDCLASSEX);
wndClass.hInstance = g_hGlobalInstance;
wndClass.lpszClassName = szWinName;
wndClass.lpfnWndProc = WindowProc;
wndClass.style = CS_HREDRAW | CS_VREDRAW;
wndClass.hIcon = LoadIcon( NULL, IDI_APPLICATION );
wndClass.hIconSm = LoadIcon( NULL, IDI_WINLOGO );
wndClass.hCursor = LoadCursor( NULL, IDC_ARROW );
wndClass.lpszMenuName = MAKEINTRESOURCE( CHAOSMENU );
wndClass.cbClsExtra = 0;
wndClass.cbWndExtra = 0;
wndClass.hbrBackground = (HBRUSH)GetStockObject( WHITE_BRUSH );
if ( !RegisterClassEx( &wndClass ) )
{
return 0;
}
sprintf( acBuffer, "Fractal Terrain Generator - [%s]", terrTile.GetFilename() );
g_hWnd = CreateWindow( szWinName,
acBuffer,
WS_OVERLAPPEDWINDOW,
CW_USEDEFAULT, CW_USEDEFAULT,
800, 600,
HWND_DESKTOP,
NULL,
g_hGlobalInstance,
NULL );
ShowWindow( g_hWnd, iWindowShowState );
UpdateWindow( g_hWnd );
while ( GetMessage( &message, NULL, 0, 0 ) )
{
TranslateMessage( &message );
DispatchMessage( &message );
}
return message.wParam;
}
LRESULT CALLBACK WindowProc( HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam )
{
static HDC hdc;
PAINTSTRUCT ps;
switch( message )
{
case WM_PAINT:
hdc = BeginPaint( hWnd, &ps );
terrTile.Draw( hWnd, hdc, -1, -1 );
EndPaint( hWnd, &ps );
break;
case WM_COMMAND:
ProcMenuEvent( hWnd, wParam, lParam );
break;
case WM_KEYDOWN:
ProcKeyEvent( hWnd, wParam, lParam );
break;
case WM_LBUTTONDOWN:
case WM_LBUTTONUP:
case WM_LBUTTONDBLCLK:
case WM_RBUTTONDOWN:
case WM_RBUTTONUP:
case WM_RBUTTONDBLCLK:
case WM_MBUTTONDOWN:
case WM_MBUTTONUP:
case WM_MBUTTONDBLCLK:
case WM_MOUSEMOVE:
ProcMouseEvent( hWnd, message, wParam, lParam );
break;
case WM_DESTROY:
PostQuitMessage( 0 );
break;
case WM_SIZE:
InvalidateRect( hWnd, NULL, TRUE );
break;
default:
return DefWindowProc( hWnd, message, wParam, lParam );
}
return 0;
}
// Process any meny events that occur
int ProcMenuEvent( HWND hWnd, WPARAM wParam, LPARAM lParam )
{
switch( LOWORD( wParam ) )
{
// File menu options
case CHAOS_FILE_EXIT:
{
PostQuitMessage( 0 );
}
break;
case CHAOS_FILE_SAVE:
{
terrTile.Save();
}
break;
case CHAOS_FILE_SAVEAS:
{
TCHAR szFilenameBuffer[MAX_PATH];
OPENFILENAME ofn;
ofn.lStructSize = sizeof(OPENFILENAME);
ofn.hwndOwner = hWnd;
ofn.hInstance = NULL;
ofn.lpstrFilter = TEXT( "*.tga" );
ofn.lpstrCustomFilter = NULL;
ofn.nMaxCustFilter = 0;
ofn.nFilterIndex = 0;
ofn.nMaxFile = MAX_PATH;
ofn.nMaxFileTitle = MAX_PATH;
ofn.lpstrInitialDir = NULL;
ofn.lpstrTitle = TEXT( "Save As" );
ofn.nFileOffset = 0;
ofn.nFileExtension = 0;
ofn.lpstrDefExt = TEXT ( "tga" );
ofn.lCustData = 0L;
ofn.lpfnHook = NULL;
ofn.lpTemplateName = NULL;
ofn.lpstrFile = NULL;
ofn.lpstrFileTitle = &szFilenameBuffer[0];
ofn.Flags = OFN_OVERWRITEPROMPT;
if ( GetSaveFileName( &ofn ) )
{
TCHAR szFilename[MAX_PATH];
if ( strstr( ofn.lpstrFileTitle, ".tga" ) != NULL )
{
sprintf( szFilename, "%s", ofn.lpstrFileTitle );
}
else
{
sprintf( szFilename, "%s%s%s", ofn.lpstrFileTitle, TEXT("."), ofn.lpstrDefExt );
}
terrTile.SetFilename( szFilename );
terrTile.Save();
}
}
break;
// Terrain menu options
case <API key>:
{
InvalidateRect( hWnd, NULL, TRUE );
}
break;
case <API key>:
{
char acBuffer[MAX_PATH];
FLOAT fDimension = terrTile.<API key>();
sprintf( acBuffer, "Fractal Dimension: %.3f", fDimension );
MessageBox( hWnd, acBuffer, "", MB_OK );
}
break;
case <API key>:
{
DialogBox( g_hGlobalInstance, MAKEINTRESOURCE(IDD_SETGRID), hWnd, (DLGPROC)SetGridDialog );
}
break;
case <API key>:
{
DialogBox( g_hGlobalInstance, MAKEINTRESOURCE(IDD_FAULTDLG), hWnd, (DLGPROC)FaultLineDialog );
}
break;
case CHAOS_TERRAIN_BLUR:
{
terrTile.Blur( 1 );
}
break;
case <API key>:
{
terrTile.Blur( 4 );
}
break;
}
return 1;
}
// Request settings for fault line terrain generation
BOOL FAR PASCAL FaultLineDialog( HWND hWnd, UINT msg, WPARAM wParam, LPARAM lParam )
{
switch (msg)
{
case WM_INITDIALOG:
{
SendDlgItemMessage( hWnd, IDC_ITERATION_NUM, WM_SETTEXT, 0, (LPARAM)(LPCTSTR)"512" );
SendDlgItemMessage( hWnd, <API key>, WM_SETTEXT, 0, (LPARAM)(LPCTSTR)"10" );
SendDlgItemMessage( hWnd, <API key>, WM_SETTEXT, 0, (LPARAM)(LPCTSTR)"1" );
}
break;
case WM_COMMAND:
switch(LOWORD(wParam))
{
case IDOK:
{
char acBuffer[64];
int iIterations, iFaultDepthStart, iFaultDepthFinish, iFixedFaultDepth, iNumChars;
bool bUseLogisticFunc, bRetainAllValues;
// Get number of iterations
iNumChars = SendDlgItemMessage( hWnd, IDC_ITERATION_NUM, WM_GETTEXTLENGTH, 0, 0 );
SendDlgItemMessage( hWnd, IDC_ITERATION_NUM, WM_GETTEXT, 62, (LPARAM)(LPCTSTR)acBuffer );
iIterations = atoi( &acBuffer[0] );
// Get fault depth start
iNumChars = SendDlgItemMessage( hWnd, <API key>, WM_GETTEXTLENGTH, 0, 0 );
SendDlgItemMessage( hWnd, <API key>, WM_GETTEXT, 62, (LPARAM)(LPCTSTR)acBuffer );
iFaultDepthStart = atoi( &acBuffer[0] );
// Get fault depth finish
iNumChars = SendDlgItemMessage( hWnd, <API key>, WM_GETTEXTLENGTH, 0, 0 );
SendDlgItemMessage( hWnd, <API key>, WM_GETTEXT, 62, (LPARAM)(LPCTSTR)acBuffer );
iFaultDepthFinish = atoi( &acBuffer[0] );
// Determine whether to interpolate the fault line depth
if ( IsDlgButtonChecked( hWnd, <API key> ) == BST_CHECKED )
{
iFixedFaultDepth = 0;
}
else
{
iFixedFaultDepth = iFaultDepthStart;
}
bUseLogisticFunc = IsDlgButtonChecked( hWnd, <API key> ) == BST_CHECKED ? true : false;
if ( bUseLogisticFunc )
{
if ( IsDlgButtonChecked( hWnd, IDC_CHK_RANDOM_SEED ) == BST_CHECKED )
{
g_LogFunc.Seed() = terrTile.GetAvgHeight() / terrTile.MaxHeight();
g_LogFunc.Reset();
}
else
{
FLOAT fMax = RAND_MAX;
FLOAT fRandomSeed = (FLOAT)rand() / fMax;
g_LogFunc.Seed() = fRandomSeed;
g_LogFunc.Reset();
}
}
bRetainAllValues = IsDlgButtonChecked( hWnd, IDC_CHK_RETAINALL ) == BST_CHECKED ? true : false;
terrTile.GenerateFaultLines( iIterations, iFaultDepthStart, iFaultDepthFinish, iFixedFaultDepth, bUseLogisticFunc, bRetainAllValues, hWnd );
EndDialog( hWnd, TRUE );
}
break;
case IDCANCEL:
{
EndDialog( hWnd, TRUE );
}
break;
case <API key>:
{
if ( IsDlgButtonChecked( hWnd, <API key> ) == BST_CHECKED )
{
// Enable the "seed from height field" check box
EnableWindow( GetDlgItem( hWnd, IDC_CHK_RANDOM_SEED ), TRUE );
}
else
{
// Disable the "seed from height field" check box
EnableWindow( GetDlgItem( hWnd, IDC_CHK_RANDOM_SEED ), FALSE );
}
}
break;
default:
break;
}
default:
break;
}
return FALSE;
}
// Set the grid to a specified value
BOOL FAR PASCAL SetGridDialog( HWND hWnd, UINT msg, WPARAM wParam, LPARAM lParam )
{
switch (msg)
{
case WM_INITDIALOG:
{
SendDlgItemMessage( hWnd, IDC_GRIDVALUE, WM_SETTEXT, 0, (LPARAM)(LPCTSTR)"0" );
}
break;
case WM_COMMAND:
switch(LOWORD(wParam))
{
case IDOK:
{
char acBuffer[64];
int iGridValue, iNumChars;
// Get required grid value
iNumChars = SendDlgItemMessage( hWnd, IDC_GRIDVALUE, WM_GETTEXTLENGTH, 0, 0 );
SendDlgItemMessage( hWnd, IDC_GRIDVALUE, WM_GETTEXT, 62, (LPARAM)(LPCTSTR)acBuffer );
iGridValue = atoi( &acBuffer[0] );
terrTile.ClearGrid( iGridValue );
EndDialog( hWnd, TRUE );
}
break;
case IDCANCEL:
{
EndDialog( hWnd, TRUE );
}
break;
break;
}
default:
{
}
break;
}
return FALSE;
}
// Process any key events that occur
int ProcKeyEvent( HWND hWnd, WPARAM wParam, LPARAM lParam )
{
// Retrieve key scan code
switch( (int)wParam )
{
case VK_UP:
break;
case VK_LEFT:
break;
case VK_DOWN:
break;
case VK_RIGHT:
break;
default:
break;
}
return 1;
}
// Process any mouse events that occur
int ProcMouseEvent( HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam )
{
unsigned int iXPos, iYPos;
// Retrieve mouse coords
switch( message )
{
case WM_LBUTTONDOWN:
case WM_LBUTTONUP:
case WM_RBUTTONDOWN:
case WM_RBUTTONUP:
case WM_MOUSEMOVE:
iXPos = LOWORD( lParam );
iYPos = HIWORD( lParam );
break;
default:
break;
}
// Process event
switch( message )
{
case WM_LBUTTONDOWN:
case WM_LBUTTONUP:
case WM_RBUTTONDOWN:
case WM_RBUTTONUP:
case WM_MOUSEMOVE:
default:
break;
}
return 1;
} |
package model;
/**
* @author Philipp Winter
* @author Jonas Heidecke
* @author Niklas Kaddatz
*/
@SuppressWarnings("unused")
public abstract class MapObject {
protected Position position;
protected boolean visible = true;
public boolean isOnPosition(Position pos) {
return this.getPosition().equals(pos);
}
public Position getPosition() {
return position;
}
protected void setPosition(Position pos) {
if (pos == null) {
throw new <API key>("Position cannot be null.");
}
Position oldPos = this.position;
if (oldPos != null) {
oldPos.remove(this);
Map.positionsToRender.add(oldPos);
}
this.position = pos;
this.position.add(this);
}
public boolean isVisible() {
return visible;
}
protected void setVisible(boolean value) {
visible = value;
}
public boolean equals(Object o) {
if (o != null) {
if (o instanceof MapObject) {
MapObject mO = (MapObject) o;
return this.getPosition().equals(mO.getPosition());
}
}
return false;
}
} |
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" /><meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1" />
<meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1" />
<meta name="viewport" content="width=device-width, user-scalable=no, initial-scale=1, maximum-scale=1.0, minimum-scale=1" />
<title>AXSearch - AXISJ</title>
<link rel="shortcut icon" href="../../ui/axisj.ico" type="image/x-icon" />
<link rel="icon" href="../../ui/axisj.ico" type="image/x-icon" />
<link rel="<API key>" sizes="114x114" href="../../ui/AXJ.png" />
<link rel="<API key>" href="../../ui/AXJ.png" />
<meta property="og:image" content="/samples/_img/axisj_sns.png" />
<meta property="og:site_name" content="Axis of Javascript - axisj.com" />
<meta property="og:description" id="meta_description" content="Javascript UI Library based on JQuery" />
<!-- css block -->
<link rel="stylesheet" type="text/css" href="../../ui/arongi/font-awesome.min.css">
<link rel="stylesheet" type="text/css" href="../../ui/arongi/page.css">
<link rel="stylesheet" type="text/css" href="../../ui/arongi/AXJ.min.css">
<script type="text/javascript" src="../../jquery/jquery.min.js"></script>
<script type="text/javascript" src="../../dist/AXJ.min.js"></script>
<script type="text/javascript" src="../page.js"></script>
<script type="text/javascript" src="pageTab.js"></script>
<!-- js block -->
<script>
/**
* Require Files for AXISJ UI Component...
* Based : jQuery
* Javascript : AXJ.js, AXInput.js, AXSelect.js, AXSearch.js, AXGrid.js, AXTab.js, AXButton.js
* CSS : AXJ.css, AXInput.css, AXSelect.css, AXSearch.css, AXGrid.css, AXTab.css, AXButton.css
*/
var pageID = "nonScript";
var fnObj = {
pageStart: function(){
jQuery("#expandHandle").click(function(){
if(jQuery(this).data("expand") == "open"){
jQuery(this).data("expand", "close");
jQuery(".<API key>").hide();
jQuery(this).html("");
}else{
jQuery(this).data("expand", "open");
jQuery(".<API key>").show();
jQuery(this).html(" ");
}
});
}
};
jQuery(document).ready(fnObj.pageStart.delay(0.1));
</script>
<style type="text/css">
</style>
</head>
<body>
<div id="AXPage">
<!-- s.AXPageBody -->
<div id="AXPageBody" class="SampleAXSelect">
<div id="demoPageTabTarget" class="AXdemoPageTabTarget"></div>
<div class="AXdemoPageContent">
<div class="title"><h1>AXSearch</h1></div>
<h3>style 3 - </h2>
<div>
<table cellpadding="0" cellspacing="0" class="AXSearchTable">
<colgroup>
<col width="100" />
<col />
<col width="100" />
<col />
</colgroup>
<tbody>
<tr class="gray">
<th>
<div class="tdRel"></div>
</th>
<td>
<div class="tdRel">
<input type="text" name="" value="" class="AXInput" />
</div>
</td>
<th>
<div class="tdRel"></div>
</th>
<td class="last">
<div class="tdRel">
<input type="text" name="" value="" class="AXInput" />
<input type="button" class="AXButton" value="Search" />
</div>
</td>
</tr>
<tr class="<API key>" style="display:none;">
<th>
<div class="tdRel"></div>
</th>
<td>
<div class="tdRel">
<input type="text" name="" value="" class="AXInput" />
</div>
</td>
<th>
<div class="tdRel"></div>
</th>
<td class="last">
<div class="tdRel">
<input type="text" name="" value="" class="AXInput" />
<input type="button" class="AXButton" value="Search" />
</div>
</td>
</tr>
<tr class="<API key>" style="display:none;">
<th>
<div class="tdRel"></div>
</th>
<td>
<div class="tdRel">
<input type="text" name="" value="" class="AXInput" />
</div>
</td>
<th>
<div class="tdRel"></div>
</th>
<td class="last">
<div class="tdRel">
<input type="text" name="" value="" class="AXInput" />
<input type="button" class="AXButton" value="Search" />
</div>
</td>
</tr>
</tbody>
</table>
<div class="AXSearch">
<a href="#axexec" class="expandHandle" id="expandHandle"></a>
</div>
</div>
</div>
</div>
<!-- e.AXPageBody -->
</div>
</body>
</html> |
"""Solvebio DatasetField API Resource"""
from .solveobject import <API key>
from .apiresource import <API key>
from .apiresource import ListableAPIResource
from .apiresource import <API key>
from .apiresource import <API key>
class DatasetField(<API key>,
ListableAPIResource,
<API key>,
<API key>):
"""
Each SolveBio dataset has a different set of fields, some of
which can be used as filters. Dataset field resources provide
users with documentation about each field.
"""
RESOURCE_VERSION = 2
def facets(self, **params):
response = self._client.get(self.facets_url, params)
return <API key>(response, client=self._client)
def help(self):
return self.facets() |
# Use the widgets beneath and the content will be
# inserted automagically in the webpage. To make
layout: page-fullwidth
header:
image_fullwidth: <API key>.jpg
permalink: /laos/index.html
<div class="medium-12" markdown="1">
<img width="424" src="{{ site.urlimg }}seaacc-logo.png">
<font face="Alice_5" size="5">
<!-- WHO WE ARE -->
<font size="12">r;dgIqk7n.z</font><br />
<div style="line-height: 1.0">
raomt,y8ly]xtc]t;ammtotma,8kg;aovvdlP’.8H0v’c-oc2iolyl3d wfH4ndd+8A’0bho.oxu @W!!
3fpd5J,8Jk’-k8gzqjkrao8Jk’Mmjuvklap1J6.og08c-oc2iolyl3dg[p%gvgiaP r;<API key>’glu,
vto5]adl%;ammtotma,.o-5,-qogvg-aP8kg;aovvdlP’.8Hc]tIH6;Jk9t8Hv’,udkoxJPocx’ dJ5,raomt,y84nd8A’0Bo
grnjvmju9t-J;p.sH-5,-qo.orNomjuoUwfHIa[I6Hg]njv’;ammtotma,0v’-k;8kg;aovvdlP’.8HwfHs]kp0Bo
<br /><br />
pJko Tenderloin ,uxts;aflklvaopk;oko;<API key>[9kdgvg-aP8kg;aovvdlP’.8HlJ;o.sP
vklap1J6 .o0totmju7qovqrpq[9kdgvg-aP8kg;aovvdlP’.8H .orNomjuoU9t9af’kol]v’<API key>’da[
-k8gzqjkrao0v’8q;gv’ r;dgIqk1kd9tlHk’raomt,y8mjuc0’cI’mjugxao8q;cmo0v’s]kp-qo-k8mju9tgIaf;PdIJ;,daogrnjvc[J’xaoc’J,5,0v’;ammtotma,,6o,=]tfqdmjuc8d8Jk’dao0v’c8J]txtgml.omjufP;daoc]tg;]k
fP;dao
<br /><br />
r;dgIqk8Hv’dkomju9tgl]u,l]v’7;k,s]kds]kp0v’-5,-qo.orNomju .sHxt-k-qomqj;wxwfHIH6dJP;da[dJ5,7qo
8Jk’<API key> c]tgrnjv lqj’glu, c]tltcf’ly]xt,=itfqd 0v’s]kpM-k80v’-k;vqrpq[mjuvklap
1J6.orNomjuoU ‘ko0v’ SEAACC <API key>\J5 ’ \<API key> gmogfu]vpo
wxmqj;c-oc2iolyl3dg[p%gvgiaPma’ \qf
<br /><br />
37’dko SEAACC-SF 4ndlHk’0Bo,k9kd Xc8J[+9edaflgrkt? sHk dJ5,8Jk’-k8gzqjkrao-bj’,uG rt,Hkvtg,iydaoF
0g,o vtg,iydaoF ]k;vtg,iydaoF wmp%vtg,iydaoF c]t ;Pfok,vtg,iydaoD dkoI;,8q;9kds]kpdJ5,.o
vq’dvo0v’r;dgIqk-Jvp.sHgIqklk,kfoegvqkzH67qomju,urNo4ko-u;yfm5dMmk’g0Qk,kF c8J]t7qowfHc[J ’ xaoly ’ j .\JM
mjugxaogvd]ad0v’8q;gv’.sHda[ vq’dvo0v’r;dgIqkD lt,k-ydmjud+8ah ’ I;,wxg4y ’ 8q;cmo9kd|J;p’ko8Jk ’m
it[5w;Hmk’0Hk ’]5J,oUD
<br /><br />
</div>
</font>
- Au Co Vietnamese Cultural Center
- Burmese Youth Association (BYA)
- International Lao New Year Festival
- Lao Seri Association
- Laotian American National Alliance
- Samaki Project
- San Francisco Recreation & Park
- South East Asian Cultural Heritage & Music Performing Arts (SEACHAMPA)
<font face="Alice_5" size="5">
<div style="line-height: 1.0">
r;dgIqkg]nvdmju9tgoAo|adwxmk’ly]xtc]t;ammtotma,0v’c8J]tdJ5,-k8gzqjkrao gxao;ymudkoI;,dJ5,0v’gIqk
.o0totfP;daopa ’ wfHIPoIH6g4y ’ 7;k,c8d8Jk ’0v’c8J]t-k8D ly ’ j mjur;dgIqk9tlcf’,usHkly ’ j fH;pdaoG rklkF
vkskoF cr8jecrm=F dko2Hvo;aoot0tfuc]tg7njv ’fqo8iuF c]tdkolg]u,l]v’xu.\J0v’c8J]t-k8D
grktgxaovq ’ dvomju[+lcs;’skdewimju,J5 ’ goAowxmjus]addkolqj ’ glu,.sH7;k,IH6fHkoly]xtmju ’ qf ’k,c]tfHko
;amotma,0v’s]kpr6,rkd.ogvg-aP8kg;aovvdlP’.8H SEAACC-SF 3fpdkooe0v’-5,-qo
r;dgIqk[+,u7;k,z6draomk’dkog,nv ’.fMc]tgIqk[+loa[lo5o,5,,v’mk’dkog,nv ’.fM
<br /><br />
r;dgIqkpa ’ 9t9af ’ko.og08 gmogfu]vpo% vud8+wxF c8Jr;dgIqkd=pa ’ s;a ’ mju9t-5dp6H.sH-5,-qomjuvklap.orNom
vnjoMg0Qk,kIJ;,’ko.og080v’r;dgIqkgrnjvgruj,7;k,IH6c]tla,zafda[7;k,’qf ’k,0v’g-Nv-k8;ammtotma,
0v’-k;8tg;aovvdlP’.8
</div>
<!-- OUR VISION -->
<font size="12">;ylapmalo%0v’r;dgIqk</font><br />
<div style="line-height: 1.0">
;ylapmalo%0v’ SEAACC oAo,k9kd 38 P lk,38G<br />
<strong>Iadlk </strong>– 3v[vH5,c]txt7a[xt7v’zqorvp7A ’ xts;aflklmjuly]xtc]t;amotma,wfHoe,k.s
-5,-qo-k;8kg;aovvdlP’.8<br />
<strong>lq ’ j glu,</strong> – r;dgIqkg-njv;Jk7;k,dHk;|Hkmk’;ammtotma,c]tgvd]ad0v’-k8gduf,k9kd
dkoIa[IH6;Jk;y4u-u;yfIkdgs’Qk0v’c8J]t7qo,u[qf[kfmjule7ao.odkoramok-5,-qo<br />
<strong>xt8y[af</strong> – dkolbdlkc]tmblfu9t[+,uzqo3fp[+xt8y[afD dy9da,mk’;amotma,c]tla ’ 7q,m
c8d8Jk ’4nd9af8A ’ 0Bo.og08c-oc2iolyl3dg[p%gvgiaPgrnjv.sHm5dM7qolk,kfg0Qk,kIJ;,c]t,
lJ;oIJ;,.o-5,-qo<br />
</div>
<!-- MISSION STATEMENT -->
<font size="12">4c]’rk]tdy9</font><br />
<div style="line-height: 1.0">
rk]tdy90v’ SEAACC 7ndkodHk;ramokc8J]t;amotma,0v’-k;8kg;aovvdlP’.8H.sH,u-u;yf-u;kc]tgrnjv.sH7qovnjo
wfHgsao3fpzJko-Jv ’mk’8Jk ’M dkolbdlkF dy9da,mk’la ’ 7q,c]t;amotma, fH;p95f,J5 ’ \kpmju9tglu,lHk ’7;k,la,rao
]ts;Jk ’-6,-qo-k;8kg;aovvdlP’.8H .og08c-oc2iolyl3dg[p%gvgiaP .sHc|Hoc2Ho0bho
</div>
</font>
</div> |
# encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::DeploymentManager::Mgmt::V2019_11_01_preview
module Models
# The resource that defines the source location where the artifacts are
# located.
class ArtifactSource < TrackedResource
include MsRestAzure
# @return [String] The type of artifact source used.
attr_accessor :source_type
# @return [String] The path from the location that the 'authentication'
# property [say, a SAS URI to the blob container] refers to, to the
# location of the artifacts. This can be used to differentiate different
# versions of the artifacts. Or, different types of artifacts like
# binaries or templates. The location referenced by the authentication
# property concatenated with this optional artifactRoot path forms the
# artifact source location where the artifacts are expected to be found.
attr_accessor :artifact_root
# @return [Authentication] The authentication method to use to access the
# artifact source.
attr_accessor :authentication
# Mapper for ArtifactSource class as Ruby Hash.
# This will be used for serialization/deserialization.
def self.mapper()
{
<API key>: true,
required: false,
serialized_name: 'ArtifactSource',
type: {
name: 'Composite',
class_name: 'ArtifactSource',
model_properties: {
id: {
<API key>: true,
required: false,
read_only: true,
serialized_name: 'id',
type: {
name: 'String'
}
},
name: {
<API key>: true,
required: false,
read_only: true,
serialized_name: 'name',
type: {
name: 'String'
}
},
type: {
<API key>: true,
required: false,
read_only: true,
serialized_name: 'type',
type: {
name: 'String'
}
},
tags: {
<API key>: true,
required: false,
serialized_name: 'tags',
type: {
name: 'Dictionary',
value: {
<API key>: true,
required: false,
serialized_name: 'StringElementType',
type: {
name: 'String'
}
}
}
},
location: {
<API key>: true,
required: true,
serialized_name: 'location',
type: {
name: 'String'
}
},
source_type: {
<API key>: true,
required: true,
serialized_name: 'properties.sourceType',
type: {
name: 'String'
}
},
artifact_root: {
<API key>: true,
required: false,
serialized_name: 'properties.artifactRoot',
type: {
name: 'String'
}
},
authentication: {
<API key>: true,
required: true,
serialized_name: 'properties.authentication',
type: {
name: 'Composite',
<API key>: 'type',
uber_parent: 'Authentication',
class_name: 'Authentication'
}
}
}
}
}
end
end
end
end |
# Linked List Utils - LIFO
#Interface required:
# Linked List class must have attributes:
# - lltail (tail node linker)
# - llhead (head node linker)
# Node class must have attributes:
# - llprev (previous node linker)
# - llnext (next node linker)
# Node class must have methods (for debug):
# __int__(self)
def isEmpty(ll):
return True if ll.lltail == None and ll.llhead == None else False
def hasOneElement(ll):
return True if ll.lltail == ll.llhead else False
def getTail(ll):
return ll.lltail
def getHead(ll):
return ll.llhead
def push(ll, new):
if isEmpty(ll):
ll.lltail = new
ll.llhead = new
new.llnext = None
new.llprev = None
return
tail = ll.lltail
#update tail
ll.lltail = new
#update link of new node
new.llprev = tail
new.llnext = None
#update link of old tail
tail.llnext = new
def pop(ll):
# empty list
if isEmpty(ll):
return None
oldtail = ll.lltail
# 1 object in list
if hasOneElement(ll):
ll.lltail = None
ll.llhead = None
return oldtail
# n objects in list
newtail = oldtail.llprev
newtail.llnext = None
ll.lltail = newtail
oldtail.llnext = None
oldtail.llprev = None
return oldtail
def increment(ll, node):
switch(ll,node.llprev , node )
#switch consecutive objects
def switch(ll,before, after):
if isEmpty(ll) or hasOneElement(ll):
return
# after is the head
if before == None:
return
if ll.llhead == before:
ll.llhead = after
else:
before.llprev.llnext = after
if ll.lltail == after:
ll.lltail = before
else:
after.llnext.llprev = before
before.llnext = after.llnext
after.llprev = before.llprev
before.llprev = after
after.llnext = before
return
def lastsWillBeFirsts(ll):
if isEmpty(ll) or hasOneElement(ll):
return
bringToFirst(ll, ll.lltail)
def bringToFirst(ll, node):
if isEmpty(ll) or hasOneElement(ll):
return
oldtail = ll.lltail
oldhead = ll.llhead
newhead = node
if oldhead == node:
return
# update tail if necessary
if oldtail == node:
newtail = oldtail.llprev
ll.lltail = newtail
newtail.llnext = None
else:
node.llnext.llprev = newhead.llprev
ll.llhead = newhead
newhead.llprev.llnext = newhead.llnext
#update new head
newhead.llnext = oldhead
newhead.llprev = None
oldhead.llprev = newhead
def nodeIndex(node):
if node != None:
return str(int(node))
else:
return "NN"
def nodeIndexPlus(node):
return nodeIndex(node)+"("+ nodeIndex(node.llprev)+","+nodeIndex(node.llnext) +")"
def listToString(ll):
if isEmpty(ll):
return "LL: Empty list"
if hasOneElement(ll):
return "LL 1 elment: " + nodeIndexPlus(ll.llhead)
string = "LL"+ "("+ nodeIndex(ll.llhead) + "," + nodeIndex(ll.lltail) + ")"+": "
node = ll.llhead
it = 0
while node != None and it < 20:
string += nodeIndexPlus(node) + "-"
node = node.llnext
it += 1
return string
def printList(ll):
print listToString(ll)
class LinkedList:
def __init__(self):
self.lltail = None
self.llhead = None
class Node:
def __init(self, index, data = None):
self.llnext = None
self.llprev = None
self.data = data
self.index = index
def __int__(self):
return str(self.index) |
package com.zimbra.qa.selenium.projects.ajax.tests.briefcase.document;
import org.testng.annotations.Test;
import com.zimbra.qa.selenium.framework.items.DocumentItem;
import com.zimbra.qa.selenium.framework.ui.Action;
import com.zimbra.qa.selenium.framework.ui.Button;
import com.zimbra.qa.selenium.framework.ui.Shortcut;
import com.zimbra.qa.selenium.framework.util.GeneralUtility;
import com.zimbra.qa.selenium.framework.util.HarnessException;
import com.zimbra.qa.selenium.framework.util.SleepUtil;
import com.zimbra.qa.selenium.framework.util.XmlStringUtil;
import com.zimbra.qa.selenium.framework.util.ZAssert;
import com.zimbra.qa.selenium.framework.util.ZimbraAccount;
import com.zimbra.qa.selenium.framework.util.<API key>;
import com.zimbra.qa.selenium.projects.ajax.core.<API key>;
import com.zimbra.qa.selenium.projects.ajax.ui.*;
import org.testng.annotations.AfterMethod;
import com.zimbra.qa.selenium.framework.items.FolderItem;
import com.zimbra.qa.selenium.framework.items.FolderItem.SystemFolder;
public class MoveDocument extends <API key> {
public MoveDocument() {
logger.info("New " + MoveDocument.class.getCanonicalName());
super.startingPage = app.zPageBriefcase;
super.<API key>.put("<API key>", "bottom");
// Make sure we are using an account with message view
// super.<API key>.put("<API key>", "message");
}
@Test(description = "Create document through SOAP - move & verify through GUI", groups = { "smoke" })
public void MoveDocument_01() throws HarnessException {
ZimbraAccount account = app.zGetActiveAccount();
FolderItem briefcaseFolder = FolderItem.importFromSOAP(account,
SystemFolder.Briefcase);
String briefcaseFolderId = briefcaseFolder.getId();
String name = "folder" + <API key>.getUniqueString();
// Create a subfolder to move the message into i.e. Briefcase/subfolder
account.soapSend("<CreateFolderRequest xmlns='urn:zimbraMail'>"
+ "<folder name='" + name + "' l='" + briefcaseFolderId + "'/>"
+ "</CreateFolderRequest>");
FolderItem subFolderItem = FolderItem.importFromSOAP(account, name);
// refresh briefcase page
app.zTreeBriefcase.zTreeItem(Action.A_LEFTCLICK, briefcaseFolder, true);
// double click on created subfolder
app.zPageBriefcase.zListItem(Action.A_DOUBLECLICK, subFolderItem);
// Create document item
DocumentItem docItem = new DocumentItem();
// Create document using SOAP
String contentHTML = XmlStringUtil.escapeXml("<html>" + "<body>"
+ docItem.getDocText() + "</body>" + "</html>");
account.soapSend("<SaveDocumentRequest requestId='0' xmlns='urn:zimbraMail'>"
+ "<doc name='"
+ docItem.getName()
+ "' l='"
+ briefcaseFolderId
+ "' ct='application/x-zimbra-doc'>"
+ "<content>"
+ contentHTML
+ "</content>"
+ "</doc>"
+ "</SaveDocumentRequest>");
GeneralUtility.<API key>(app.zGetActiveAccount());
// document.importFromSOAP(account, document.getDocName());
// refresh briefcase page
app.zTreeBriefcase.zTreeItem(Action.A_LEFTCLICK, briefcaseFolder, true);
SleepUtil.sleepVerySmall();
// Click on created document
app.zPageBriefcase.zListItem(Action.A_LEFTCLICK, docItem);
// Click on 'Move selected item' icon in toolbar
if (<API key>.<API key>().contains("7.2.")){
DialogMove chooseFolder = (DialogMove) app.zPageBriefcase
.zToolbarPressButton(Button.B_MOVE, docItem);
// Choose folder on Confirmation dialog
chooseFolder.zClickTreeFolder(subFolderItem);
// Click OK on Confirmation dialog
app.zPageBriefcase.zClick("//div[@id='<API key>']//td[contains(@id,'OK_')]//td[contains(@id,'_title')]");
}else{
// Click move -> subfolder
app.zPageBriefcase.<API key>(Button.B_MOVE, subFolderItem);
}
// refresh briefcase page
app.zTreeBriefcase
.zTreeItem(Action.A_LEFTCLICK, briefcaseFolder, false);
// Verify document was moved from the folder
ZAssert.assertFalse(
app.zPageBriefcase.isPresentInListView(docItem.getName()),
"Verify document was moved from the folder");
SleepUtil.sleepVerySmall();
// click on subfolder in tree view
app.zTreeBriefcase.zTreeItem(Action.A_LEFTCLICK, subFolderItem, true);
// Verify document was moved to the selected folder
ZAssert.assertTrue(
app.zPageBriefcase.isPresentInListView(docItem.getName()),
"Verify document was moved to the selected folder");
}
@Test(description = "Move Document using 'm' keyboard shortcut", groups = { "functional" })
public void MoveDocument_02() throws HarnessException {
ZimbraAccount account = app.zGetActiveAccount();
FolderItem briefcaseRootFolder = FolderItem.importFromSOAP(account,
SystemFolder.Briefcase);
String <API key> = briefcaseRootFolder.getId();
Shortcut shortcut = Shortcut.S_MOVE;
String[] subFolderNames = {
"subFolderName1" + <API key>.getUniqueString(),
"subFolderName2" + <API key>.getUniqueString() };
FolderItem[] subFolders = new FolderItem[subFolderNames.length];
// Create folders to move the message from/to: Briefcase/sub-folder
for (int i = 0; i < subFolderNames.length; i++) {
account.soapSend("<CreateFolderRequest xmlns='urn:zimbraMail'>"
+ "<folder name='" + subFolderNames[i] + "' l='"
+ <API key> + "'/>" + "</CreateFolderRequest>");
subFolders[i] = FolderItem.importFromSOAP(account,
subFolderNames[i]);
}
// refresh briefcase page
app.zTreeBriefcase.zTreeItem(Action.A_LEFTCLICK, briefcaseRootFolder,
true);
// Create document item
DocumentItem docItem = new DocumentItem();
// Create document in sub-folder1 using SOAP
String contentHTML = XmlStringUtil.escapeXml("<html>" + "<body>"
+ docItem.getDocText() + "</body>" + "</html>");
account.soapSend("<SaveDocumentRequest requestId='0' xmlns='urn:zimbraMail'>"
+ "<doc name='"
+ docItem.getName()
+ "' l='"
+ subFolders[0].getId()
+ "' ct='application/x-zimbra-doc'>"
+ "<content>"
+ contentHTML
+ "</content>"
+ "</doc>"
+ "</SaveDocumentRequest>");
// refresh briefcase page
app.zTreeBriefcase.zTreeItem(Action.A_LEFTCLICK, briefcaseRootFolder,
true);
// double-click on sub-folder1 in list view
app.zPageBriefcase.zListItem(Action.A_DOUBLECLICK, subFolders[0]);
// click on sub-folder1 in tree view to refresh view
// app.zTreeBriefcase.zTreeItem(Action.A_LEFTCLICK, subFolders[0],
// true);
SleepUtil.sleepVerySmall();
// Click on created document in list view
app.zPageBriefcase.zListItem(Action.A_LEFTCLICK, docItem);
// Click the Move keyboard shortcut
DialogMove chooseFolder = (DialogMove) app.zPageBriefcase
.zKeyboardShortcut(shortcut);
// Choose destination folder and Click OK on Confirmation dialog
chooseFolder.zClickTreeFolder(subFolders[1]);
// Click OK on Confirmation dialog
app.zPageBriefcase.zClick("//div[@id='<API key>']//td[contains(@id,'OK_')]//td[contains(@id,'_title')]");
// app.zPageBriefcase.zClickAt("css=div[id=<API key>]","0,0");
// refresh briefcase page
app.zTreeBriefcase.zTreeItem(Action.A_LEFTCLICK, briefcaseRootFolder,
true);
SleepUtil.sleepVerySmall();
// click on sub-folder1 in tree view
app.zTreeBriefcase.zTreeItem(Action.A_LEFTCLICK, subFolders[0], false);
// Verify document is no longer in the sub-folder1
ZAssert.assertFalse(
app.zPageBriefcase.isPresentInListView(docItem.getName()),
"Verify document is no longer in the folder: "
+ subFolders[0].getName());
SleepUtil.sleepVerySmall();
// click on sub-folder2 in tree view
app.zTreeBriefcase.zTreeItem(Action.A_LEFTCLICK, subFolders[1], true);
// Verify document was moved to sub-folder2
ZAssert.assertTrue(
app.zPageBriefcase.isPresentInListView(docItem.getName()),
"Verify document was moved to the folder: "
+ subFolders[1].getName());
}
@Test(description = "Create document through SOAP - move using Right Click Context Menu & verify through GUI", groups = { "functional" })
public void MoveDocument_03() throws HarnessException {
ZimbraAccount account = app.zGetActiveAccount();
FolderItem briefcaseFolder = FolderItem.importFromSOAP(account,
SystemFolder.Briefcase);
String briefcaseFolderId = briefcaseFolder.getId();
String name = "subFolder" + <API key>.getUniqueString();
// Create a subfolder to move the message into i.e. Briefcase/subfolder
account.soapSend("<CreateFolderRequest xmlns='urn:zimbraMail'>"
+ "<folder name='" + name + "' l='" + briefcaseFolderId + "'/>"
+ "</CreateFolderRequest>");
FolderItem subFolderItem = FolderItem.importFromSOAP(account, name);
// refresh briefcase page
app.zTreeBriefcase.zTreeItem(Action.A_LEFTCLICK, briefcaseFolder, true);
// double click on created subfolder
app.zPageBriefcase.zListItem(Action.A_DOUBLECLICK, subFolderItem);
// Create document item
DocumentItem docItem = new DocumentItem();
// Create document using SOAP
String contentHTML = XmlStringUtil.escapeXml("<html>" + "<body>"
+ docItem.getDocText() + "</body>" + "</html>");
account.soapSend("<SaveDocumentRequest requestId='0' xmlns='urn:zimbraMail'>"
+ "<doc name='"
+ docItem.getName()
+ "' l='"
+ briefcaseFolderId
+ "' ct='application/x-zimbra-doc'>"
+ "<content>"
+ contentHTML
+ "</content>"
+ "</doc>"
+ "</SaveDocumentRequest>");
// document.importFromSOAP(account, document.getDocName());
// refresh briefcase page
app.zTreeBriefcase.zTreeItem(Action.A_LEFTCLICK, briefcaseFolder, true);
SleepUtil.sleepVerySmall();
// Click on created document
app.zPageBriefcase.zListItem(Action.A_LEFTCLICK, docItem);
// Move using Right Click Context Menu
DialogMove chooseFolder = (DialogMove) app.zPageBriefcase.zListItem(
Action.A_RIGHTCLICK, Button.O_MOVE, docItem);
// Choose folder on Confirmation dialog
chooseFolder.zClickTreeFolder(subFolderItem);
// Click OK on Confirmation dialog
app.zPageBriefcase.zClick("//div[@id='<API key>']//td[contains(@id,'OK_')]//td[contains(@id,'_title')]");
// refresh briefcase page
app.zTreeBriefcase
.zTreeItem(Action.A_LEFTCLICK, briefcaseFolder, false);
// Verify document was moved from the folder
ZAssert.assertFalse(
app.zPageBriefcase.isPresentInListView(docItem.getName()),
"Verify document was moved from the folder");
SleepUtil.sleepVerySmall();
// click on subfolder in tree view
app.zTreeBriefcase.zTreeItem(Action.A_LEFTCLICK, subFolderItem, true);
// Verify document was moved to the selected folder
ZAssert.assertTrue(
app.zPageBriefcase.isPresentInListView(docItem.getName()),
"Verify document was moved to the selected folder");
}
@AfterMethod(groups = { "always" })
public void afterMethod() throws HarnessException {
logger.info("Checking for the Move Dialog ...");
// Check if the "Move Dialog is still open
DialogMove dialog = new DialogMove(app,
((AppAjaxClient) app).zPageBriefcase);
if (dialog.zIsActive()) {
logger.warn(dialog.myPageName()
+ " was still active. Cancelling ...");
dialog.zClickButton(Button.B_CANCEL);
}
}
} |
#pragma once
#pragma comment(lib, "dxguid.lib")
#pragma comment(lib, "d3dcompiler.lib")
#include <d3d11.h>
#include <d3dcompiler.h>
#include <DirectXMath.h>
#include <unordered_map>
#include <vector>
#include <string>
// Used by simple shaders to store information about
// specific variables in constant buffers
struct <API key>
{
unsigned int ByteOffset;
unsigned int Size;
unsigned int ConstantBufferIndex;
};
// Contains information about a specific
// constant buffer in a shader, as well as
// the local data buffer for it
struct <API key>
{
std::string Name;
unsigned int Size;
unsigned int BindIndex;
ID3D11Buffer* ConstantBuffer;
unsigned char* LocalDataBuffer;
std::vector<<API key>> Variables;
};
// Contains info about a single SRV in a shader
struct SimpleSRV
{
unsigned int Index; // The raw index of the SRV
unsigned int BindIndex; // The register of the SRV
};
// Contains info about a single Sampler in a shader
struct SimpleSampler
{
unsigned int Index; // The raw index of the Sampler
unsigned int BindIndex; // The register of the Sampler
};
// Base abstract class for simplifying shader handling
class ISimpleShader
{
public:
ISimpleShader(ID3D11Device* device, ID3D11DeviceContext* context);
virtual ~ISimpleShader();
// Initialization method (since we can't invoke derived class
// overrides in the base class constructor)
bool LoadShaderFile(LPCWSTR shaderFile);
// Simple helpers
bool IsShaderValid() { return shaderValid; }
// Activating the shader and copying data
void SetShader();
void CopyAllBufferData();
void CopyBufferData(unsigned int index);
void CopyBufferData(std::string bufferName);
// Sets arbitrary shader data
bool SetData(std::string name, const void* data, unsigned int size);
bool SetInt(std::string name, int data);
bool SetFloat(std::string name, float data);
bool SetFloat2(std::string name, const float data[2]);
bool SetFloat2(std::string name, const DirectX::XMFLOAT2 data);
bool SetFloat3(std::string name, const float data[3]);
bool SetFloat3(std::string name, const DirectX::XMFLOAT3 data);
bool SetFloat4(std::string name, const float data[4]);
bool SetFloat4(std::string name, const DirectX::XMFLOAT4 data);
bool SetMatrix4x4(std::string name, const float data[16]);
bool SetMatrix4x4(std::string name, const DirectX::XMFLOAT4X4 data);
// Setting shader resources
virtual bool <API key>(std::string name, <API key>* srv) = 0;
virtual bool SetSamplerState(std::string name, ID3D11SamplerState* samplerState) = 0;
// Getting data about variables and resources
const <API key>* GetVariableInfo(std::string name);
const SimpleSRV* <API key>(std::string name);
const SimpleSRV* <API key>(unsigned int index);
unsigned int <API key>() { return textureTable.size(); }
const SimpleSampler* GetSamplerInfo(std::string name);
const SimpleSampler* GetSamplerInfo(unsigned int index);
unsigned int GetSamplerCount() { return samplerTable.size(); }
// Get data about constant buffers
unsigned int GetBufferCount();
unsigned int GetBufferSize(unsigned int index);
const <API key>* GetBufferInfo(std::string name);
const <API key>* GetBufferInfo(unsigned int index);
// Misc getters
ID3DBlob* GetShaderBlob() { return shaderBlob; }
protected:
bool shaderValid;
ID3DBlob* shaderBlob;
ID3D11Device* mDevice;
ID3D11DeviceContext* deviceContext;
// Resource counts
unsigned int constantBufferCount;
// Maps for variables and buffers
<API key>* constantBuffers; // For index-based lookup
std::vector<SimpleSRV*> shaderResourceViews;
std::vector<SimpleSampler*> samplerStates;
std::unordered_map<std::string, <API key>*> cbTable;
std::unordered_map<std::string, <API key>> varTable;
std::unordered_map<std::string, SimpleSRV*> textureTable;
std::unordered_map<std::string, SimpleSampler*> samplerTable;
// Pure virtual functions for dealing with shader types
virtual bool CreateShader(ID3DBlob* shaderBlob) = 0;
virtual void SetShaderAndCBs() = 0;
virtual void CleanUp();
// Helpers for finding data by name
<API key>* FindVariable(std::string name, int size);
<API key>* FindConstantBuffer(std::string name);
};
// Derived class for VERTEX shaders ///////////////////////
class SimpleVertexShader : public ISimpleShader
{
public:
SimpleVertexShader(ID3D11Device* device, ID3D11DeviceContext* context);
SimpleVertexShader(ID3D11Device* device, ID3D11DeviceContext* context, ID3D11InputLayout* inputLayout, bool <API key>);
~SimpleVertexShader();
ID3D11VertexShader* GetDirectXShader() { return shader; }
ID3D11InputLayout* GetInputLayout() { return inputLayout; }
bool <API key>() { return <API key>; }
bool <API key>(std::string name, <API key>* srv);
bool SetSamplerState(std::string name, ID3D11SamplerState* samplerState);
protected:
bool <API key>;
ID3D11InputLayout* inputLayout;
ID3D11VertexShader* shader;
bool CreateShader(ID3DBlob* shaderBlob);
void SetShaderAndCBs();
void CleanUp();
};
// Derived class for PIXEL shaders ////////////////////////
class SimplePixelShader : public ISimpleShader
{
public:
SimplePixelShader(ID3D11Device* device, ID3D11DeviceContext* context);
~SimplePixelShader();
ID3D11PixelShader* GetDirectXShader() { return shader; }
bool <API key>(std::string name, <API key>* srv);
bool SetSamplerState(std::string name, ID3D11SamplerState* samplerState);
protected:
ID3D11PixelShader* shader;
bool CreateShader(ID3DBlob* shaderBlob);
void SetShaderAndCBs();
void CleanUp();
};
// Derived class for DOMAIN shaders ///////////////////////
class SimpleDomainShader : public ISimpleShader
{
public:
SimpleDomainShader(ID3D11Device* device, ID3D11DeviceContext* context);
~SimpleDomainShader();
ID3D11DomainShader* GetDirectXShader() { return shader; }
bool <API key>(std::string name, <API key>* srv);
bool SetSamplerState(std::string name, ID3D11SamplerState* samplerState);
protected:
ID3D11DomainShader* shader;
bool CreateShader(ID3DBlob* shaderBlob);
void SetShaderAndCBs();
void CleanUp();
};
// Derived class for HULL shaders /////////////////////////
class SimpleHullShader : public ISimpleShader
{
public:
SimpleHullShader(ID3D11Device* device, ID3D11DeviceContext* context);
~SimpleHullShader();
ID3D11HullShader* GetDirectXShader() { return shader; }
bool <API key>(std::string name, <API key>* srv);
bool SetSamplerState(std::string name, ID3D11SamplerState* samplerState);
protected:
ID3D11HullShader* shader;
bool CreateShader(ID3DBlob* shaderBlob);
void SetShaderAndCBs();
void CleanUp();
};
// Derived class for GEOMETRY shaders /////////////////////
class <API key> : public ISimpleShader
{
public:
<API key>(ID3D11Device* device, ID3D11DeviceContext* context, bool useStreamOut = 0, bool <API key> = 0);
~<API key>();
<API key>* GetDirectXShader() { return shader; }
bool <API key>(std::string name, <API key>* srv);
bool SetSamplerState(std::string name, ID3D11SamplerState* samplerState);
bool <API key>(ID3D11Buffer** buffer, int vertexCount);
static void <API key>(ID3D11DeviceContext* deviceContext);
protected:
// Shader itself
<API key>* shader;
// Stream out related
bool useStreamOut;
bool <API key>;
unsigned int streamOutVertexSize;
bool CreateShader(ID3DBlob* shaderBlob);
bool <API key>(ID3DBlob* shaderBlob);
void SetShaderAndCBs();
void CleanUp();
// Helpers
unsigned int CalcComponentCount(unsigned int mask);
};
// Derived class for COMPUTE shaders //////////////////////
class SimpleComputeShader : public ISimpleShader
{
public:
SimpleComputeShader(ID3D11Device* device, ID3D11DeviceContext* context);
~SimpleComputeShader();
ID3D11ComputeShader* GetDirectXShader() { return shader; }
void DispatchByGroups(unsigned int groupsX, unsigned int groupsY, unsigned int groupsZ);
void DispatchByThreads(unsigned int threadsX, unsigned int threadsY, unsigned int threadsZ);
bool <API key>(std::string name, <API key>* srv);
bool SetSamplerState(std::string name, ID3D11SamplerState* samplerState);
bool <API key>(std::string name, <API key>* uav, unsigned int appendConsumeOffset = -1);
int <API key>(std::string name);
protected:
ID3D11ComputeShader* shader;
std::unordered_map<std::string, unsigned int> uavTable;
unsigned int threadsX;
unsigned int threadsY;
unsigned int threadsZ;
unsigned int threadsTotal;
bool CreateShader(ID3DBlob* shaderBlob);
void SetShaderAndCBs();
void CleanUp();
}; |
# This migration comes from authentify (originally 20120608223732)
class CreateUsers < ActiveRecord::Migration
def change
create_table :authentify_users do |t|
t.string :name
t.string :email
t.string :login
t.string :encrypted_password
t.string :salt
t.string :status, :default => 'active'
t.integer :last_updated_by_id
t.integer :customer_id
t.string :auth_token
t.string :<API key>
t.datetime :<API key>
t.timestamps
end
end
end |
using System;
using NetOffice;
using NetOffice.Attributes;
namespace NetOffice.MSFormsApi.Enums
{
<summary>
SupportByVersion MSForms 2
</summary>
[SupportByVersion("MSForms", 2)]
[EntityType(EntityType.IsEnum)]
public enum <API key>
{
<summary>
SupportByVersion MSForms 2
</summary>
<remarks>0</remarks>
[SupportByVersion("MSForms", 2)]
<API key> = 0,
<summary>
SupportByVersion MSForms 2
</summary>
<remarks>1</remarks>
[SupportByVersion("MSForms", 2)]
<API key> = 1
}
} |
class <API key> < ActiveRecord::Migration[5.2]
def change
remove_index :unpublishings, :edition_id if index_exists?(:unpublishings, :edition_id)
add_index :unpublishings, :edition_id, unique: true
end
end |
#include <htool/htool.hpp>
using namespace std;
using namespace htool;
class MyMatrix: public IMatrix<double>{
const vector<R3>& p1;
const vector<R3>& p2;
public:
MyMatrix(const vector<R3>& p10,const vector<R3>& p20 ):IMatrix(p10.size(),p20.size()),p1(p10),p2(p20) {}
double get_coef(const int& i, const int& j)const {return 1./(norm2(p1[i]-p2[j]));}
std::vector<double> operator*(std::vector<double> a){
std::vector<double> result(p1.size(),0);
for (int i=0;i<p1.size();i++){
for (int k=0;k<p2.size();k++){
result[i]+=this->get_coef(i,k)*a[k];
}
}
return result;
}
};
template<typename ClusterImpl, template<typename,typename> class LowRankMatrix>
int hmat(int argc, char *argv[]){
// Initialize the MPI environment
MPI_Init(&argc,&argv);
// Get the number of processes
int size;
MPI_Comm_size(MPI_COMM_WORLD, &size);
// Get the rank of the process
int rank;
MPI_Comm_rank(MPI_COMM_WORLD, &rank);
// Check the number of parameters
if (argc < 3) {
// Tell the user how to run the program
cerr << "Usage: " << argv[0] << " distance \b outputfile \b outputpath \b epsilon \b eta \b minclustersize \b nr \b nc" << endl;
/* "Usage messages" are a conventional way of telling the user
* how to run a program if they enter the command incorrectly.
*/
return 1;
}
double distance = StrToNbr<double>(argv[1]);
std::string outputfile = argv[2];
std::string outputpath = argv[3];
double epsilon = StrToNbr<double>(argv[4]);
double eta = StrToNbr<double>(argv[5]);
double minclustersize = StrToNbr<double>(argv[6]);
int nr = StrToNbr<int>(argv[7]);
int nc = StrToNbr<int>(argv[8]);
SetEpsilon(epsilon);
SetEta(eta);
SetMinClusterSize(minclustersize);
// Create points randomly
srand (1);
// we set a constant seed for rand because we want always the same result if we run the check many times
// (two different initializations with the same seed will generate the same succession of results in the subsequent calls to rand)
vector<int> Ir(nr); // row indices for the lrmatrix
vector<int> Ic(nc); // column indices for the lrmatrix
double z1 = 1;
vector<R3> p1(nr);
vector<double> r1(nr,0);
vector<double> g1(nr,1);
vector<int> tab1(nr);
for(int j=0; j<nr; j++){
Ir[j] = j;
double rho = ((double) rand() / (double)(RAND_MAX)); // (double) otherwise integer division!
double theta = ((double) rand() / (double)(RAND_MAX));
p1[j][0] = sqrt(rho)*cos(2*M_PI*theta); p1[j][1] = sqrt(rho)*sin(2*M_PI*theta); p1[j][2] = z1;
tab1[j]=j;
// sqrt(rho) otherwise the points would be concentrated in the center of the disk
}
// p2: points in a unit disk of the plane z=z2
double z2 = 1+distance;
vector<R3> p2(nc);
vector<double> r2(nc,0);
vector<double> g2(nc,1);
vector<int> tab2(nc);
for(int j=0; j<nc; j++){
Ic[j] = j;
double rho = ((double) rand() / (RAND_MAX)); // (double) otherwise integer division!
double theta = ((double) rand() / (RAND_MAX));
p2[j][0] = sqrt(rho)*cos(2*M_PI*theta); p2[j][1] = sqrt(rho)*sin(2*M_PI*theta); p2[j][2] = z2;
tab2[j]=j;
}
// Matrix
MyMatrix A(p1,p2);
// Clustering
std::shared_ptr<ClusterImpl> t=make_shared<ClusterImpl>();
std::shared_ptr<ClusterImpl> s=make_shared<ClusterImpl>();
t->build(p1,r1,tab1,g1,2);
s->build(p2,r2,tab2,g2,2);
// Hmatrix
HMatrix<double,LowRankMatrix,ClusterImpl> HA(A,p1,p2);
double mytime, maxtime, meantime;
double meanmax, meanmean;
// Global vectors
std::vector<double> x_global(nc,1),f_global(nr);
// Global products
for (int i =0;i<10;i++){
MPI_Barrier(HA.get_comm());
mytime = MPI_Wtime();
f_global=HA*x_global;
mytime= MPI_Wtime() - mytime;
MPI_Reduce(&mytime, &maxtime, 1, MPI_DOUBLE, MPI_MAX, 0,HA.get_comm());
MPI_Reduce(&mytime, &meantime, 1, MPI_DOUBLE, MPI_SUM, 0,HA.get_comm());
meantime/=size;
if (i>4){
meanmean += meantime;
meanmax += maxtime;
}
}
meanmax /= 5;
meanmean /= 5;
std::ofstream output;
if (rank==0){
output.open((outputpath+"/"+outputfile).c_str());
output<<"# Hmatrix"<<std::endl;
}
HA.add_info("<API key>",NbrToStr(meanmean));
HA.add_info("<API key>",NbrToStr(meanmax));
HA.save_infos((outputpath+"/"+outputfile).c_str(),std::ios::app,": ");
// Finalize the MPI environment.
MPI_Finalize();
return 0;
} |
#include "uml/impl/InteractionImpl.hpp"
#ifdef NDEBUG
#define DEBUG_MESSAGE(a)
#else
#define DEBUG_MESSAGE(a) a
#endif
#ifdef ACTIVITY_DEBUG_ON
#define ACT_DEBUG(a) a
#else
#define ACT_DEBUG(a)
#endif
//#include "util/ProfileCallCount.hpp"
#include <cassert>
#include <iostream>
#include <sstream>
#include <stdexcept>
#include "abstractDataTypes/SubsetUnion.hpp"
#include "abstractDataTypes/AnyEObject.hpp"
#include "abstractDataTypes/AnyEObjectBag.hpp"
#include "abstractDataTypes/SubsetUnion.hpp"
#include "ecore/EAnnotation.hpp"
#include "ecore/EClass.hpp"
#include "ecore/EAttribute.hpp"
#include "ecore/EStructuralFeature.hpp"
#include "ecore/ecorePackage.hpp"
//Forward declaration includes
#include "persistence/interfaces/XLoadHandler.hpp" // used for Persistence
#include "persistence/interfaces/XSaveHandler.hpp" // used for Persistence
#include <exception> // used in Persistence
#include "uml/umlFactory.hpp"
#include "uml/Action.hpp"
#include "uml/Behavior.hpp"
#include "uml/BehavioralFeature.hpp"
#include "uml/<API key>.hpp"
#include "uml/Class.hpp"
#include "uml/Classifier.hpp"
#include "uml/CollaborationUse.hpp"
#include "uml/Comment.hpp"
#include "uml/ConnectableElement.hpp"
#include "uml/Connector.hpp"
#include "uml/Constraint.hpp"
#include "uml/Dependency.hpp"
#include "uml/Element.hpp"
#include "uml/ElementImport.hpp"
#include "uml/Extension.hpp"
#include "uml/Feature.hpp"
#include "uml/Gate.hpp"
#include "uml/GeneralOrdering.hpp"
#include "uml/Generalization.hpp"
#include "uml/GeneralizationSet.hpp"
#include "uml/Interaction.hpp"
#include "uml/InteractionFragment.hpp"
#include "uml/InteractionOperand.hpp"
#include "uml/<API key>.hpp"
#include "uml/Lifeline.hpp"
#include "uml/Message.hpp"
#include "uml/NamedElement.hpp"
#include "uml/Namespace.hpp"
#include "uml/Operation.hpp"
#include "uml/Package.hpp"
#include "uml/PackageImport.hpp"
#include "uml/PackageableElement.hpp"
#include "uml/Parameter.hpp"
#include "uml/ParameterSet.hpp"
#include "uml/Port.hpp"
#include "uml/Property.hpp"
#include "uml/Reception.hpp"
#include "uml/RedefinableElement.hpp"
#include "uml/StringExpression.hpp"
#include "uml/Substitution.hpp"
#include "uml/TemplateBinding.hpp"
#include "uml/TemplateParameter.hpp"
#include "uml/TemplateSignature.hpp"
#include "uml/UseCase.hpp"
//Factories and Package includes
#include "uml/umlPackage.hpp"
using namespace uml;
/*
NOTE: Due to virtual inheritance, base class constrcutors may not be called correctly
*/
}
InteractionImpl::~InteractionImpl()
{
#ifdef SHOW_DELETION
std::cout << "
#endif
}
//Additional constructor for the containments back reference
InteractionImpl::InteractionImpl(std::weak_ptr<uml::<API key>> <API key>)
:InteractionImpl()
{
<API key> = <API key>;
m_namespace = <API key>;
}
//Additional constructor for the containments back reference
InteractionImpl::InteractionImpl(std::weak_ptr<uml::Interaction> <API key>)
:InteractionImpl()
{
<API key> = <API key>;
m_namespace = <API key>;
}
//Additional constructor for the containments back reference
InteractionImpl::InteractionImpl(std::weak_ptr<uml::InteractionOperand> <API key>)
:InteractionImpl()
{
m_enclosingOperand = <API key>;
m_namespace = <API key>;
}
//Additional constructor for the containments back reference
InteractionImpl::InteractionImpl(std::weak_ptr<uml::Namespace> par_namespace)
:InteractionImpl()
{
m_namespace = par_namespace;
m_owner = par_namespace;
}
//Additional constructor for the containments back reference
InteractionImpl::InteractionImpl(std::weak_ptr<uml::Element> par_owner)
:InteractionImpl()
{
m_owner = par_owner;
}
//Additional constructor for the containments back reference
InteractionImpl::InteractionImpl(std::weak_ptr<uml::Package> par_Package, const int reference_id)
:InteractionImpl()
{
switch(reference_id)
{
case uml::umlPackage::<API key>:
m_owningPackage = par_Package;
m_namespace = par_Package;
return;
case uml::umlPackage::<API key>:
m_package = par_Package;
m_namespace = par_Package;
return;
default:
std::cerr << __PRETTY_FUNCTION__ <<" Reference not found in class with the given ID" << std::endl;
}
}
//Additional constructor for the containments back reference
InteractionImpl::InteractionImpl(std::weak_ptr<uml::TemplateParameter> <API key>)
:InteractionImpl()
{
<API key> = <API key>;
m_owner = <API key>;
}
InteractionImpl::InteractionImpl(const InteractionImpl & obj): InteractionImpl()
{
*this = obj;
}
InteractionImpl& InteractionImpl::operator=(const InteractionImpl & obj)
{
//call overloaded =Operator for each base class
BehaviorImpl::operator=(obj);
<API key>::operator=(obj);
/* TODO: Find out if this call is necessary
* Currently, this causes an error because it calls an implicit assignment operator of Interaction
* which is generated by the compiler (as Interaction is an abstract class and does not have a user-defined assignment operator).
* Implicit compiler-generated assignment operators however only create shallow copies of members,
* which implies, that not a real deep copy is created when using the copy()-method.
*
* NOTE: Since all members are deep-copied by this assignment-operator anyway, why is it even necessary to call this implicit assignment-operator?
* This is only done for ecore-models, not for UML-models.
*/
//Interaction::operator=(obj);
//create copy of all Attributes
#ifdef SHOW_COPIES
std::cout << "+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++\r\ncopy Interaction "<< this << "\r\n+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ " << std::endl;
#endif
//Clone Attributes with (deep copy)
//copy references with no containment (soft copy)
//Clone references with containment (deep copy)
//clone reference 'action'
std::shared_ptr<Subset<uml::Action, uml::Element>> actionList = obj.getAction();
if(actionList)
{
/*Subset*/
m_action.reset(new Subset<uml::Action, uml::Element >());
#ifdef SHOW_SUBSET_UNION
std::cout << "Initialising shared pointer Subset: " << "m_action - Subset<uml::Action, uml::Element >()" << std::endl;
#endif
/*Subset*/
getAction()->initSubset(getOwnedElement());
#ifdef SHOW_SUBSET_UNION
std::cout << "Initialising value Subset: " << "m_action - Subset<uml::Action, uml::Element >(getOwnedElement())" << std::endl;
#endif
for(const std::shared_ptr<uml::Action> actionindexElem: *actionList)
{
std::shared_ptr<uml::Action> temp = std::<API key><uml::Action>((actionindexElem)->copy());
m_action->push_back(temp);
}
}
else
{
DEBUG_MESSAGE(std::cout << "Warning: container is nullptr action."<< std::endl;)
}
//clone reference 'formalGate'
std::shared_ptr<Subset<uml::Gate, uml::NamedElement>> formalGateList = obj.getFormalGate();
if(formalGateList)
{
/*Subset*/
m_formalGate.reset(new Subset<uml::Gate, uml::NamedElement >());
#ifdef SHOW_SUBSET_UNION
std::cout << "Initialising shared pointer Subset: " << "m_formalGate - Subset<uml::Gate, uml::NamedElement >()" << std::endl;
#endif
/*Subset*/
getFormalGate()->initSubset(getOwnedMember());
#ifdef SHOW_SUBSET_UNION
std::cout << "Initialising value Subset: " << "m_formalGate - Subset<uml::Gate, uml::NamedElement >(getOwnedMember())" << std::endl;
#endif
for(const std::shared_ptr<uml::Gate> formalGateindexElem: *formalGateList)
{
std::shared_ptr<uml::Gate> temp = std::<API key><uml::Gate>((formalGateindexElem)->copy());
m_formalGate->push_back(temp);
}
}
else
{
DEBUG_MESSAGE(std::cout << "Warning: container is nullptr formalGate."<< std::endl;)
}
//clone reference 'fragment'
std::shared_ptr<Subset<uml::InteractionFragment, uml::NamedElement>> fragmentList = obj.getFragment();
if(fragmentList)
{
/*Subset*/
m_fragment.reset(new Subset<uml::InteractionFragment, uml::NamedElement >());
#ifdef SHOW_SUBSET_UNION
std::cout << "Initialising shared pointer Subset: " << "m_fragment - Subset<uml::InteractionFragment, uml::NamedElement >()" << std::endl;
#endif
/*Subset*/
getFragment()->initSubset(getOwnedMember());
#ifdef SHOW_SUBSET_UNION
std::cout << "Initialising value Subset: " << "m_fragment - Subset<uml::InteractionFragment, uml::NamedElement >(getOwnedMember())" << std::endl;
#endif
for(const std::shared_ptr<uml::InteractionFragment> fragmentindexElem: *fragmentList)
{
std::shared_ptr<uml::InteractionFragment> temp = std::<API key><uml::InteractionFragment>((fragmentindexElem)->copy());
m_fragment->push_back(temp);
}
}
else
{
DEBUG_MESSAGE(std::cout << "Warning: container is nullptr fragment."<< std::endl;)
}
//clone reference 'lifeline'
std::shared_ptr<Subset<uml::Lifeline, uml::NamedElement>> lifelineList = obj.getLifeline();
if(lifelineList)
{
/*Subset*/
m_lifeline.reset(new Subset<uml::Lifeline, uml::NamedElement >());
#ifdef SHOW_SUBSET_UNION
std::cout << "Initialising shared pointer Subset: " << "m_lifeline - Subset<uml::Lifeline, uml::NamedElement >()" << std::endl;
#endif
/*Subset*/
getLifeline()->initSubset(getOwnedMember());
#ifdef SHOW_SUBSET_UNION
std::cout << "Initialising value Subset: " << "m_lifeline - Subset<uml::Lifeline, uml::NamedElement >(getOwnedMember())" << std::endl;
#endif
for(const std::shared_ptr<uml::Lifeline> lifelineindexElem: *lifelineList)
{
std::shared_ptr<uml::Lifeline> temp = std::<API key><uml::Lifeline>((lifelineindexElem)->copy());
m_lifeline->push_back(temp);
}
}
else
{
DEBUG_MESSAGE(std::cout << "Warning: container is nullptr lifeline."<< std::endl;)
}
//clone reference 'message'
std::shared_ptr<Subset<uml::Message, uml::NamedElement>> messageList = obj.getMessage();
if(messageList)
{
/*Subset*/
m_message.reset(new Subset<uml::Message, uml::NamedElement >());
#ifdef SHOW_SUBSET_UNION
std::cout << "Initialising shared pointer Subset: " << "m_message - Subset<uml::Message, uml::NamedElement >()" << std::endl;
#endif
/*Subset*/
getMessage()->initSubset(getOwnedMember());
#ifdef SHOW_SUBSET_UNION
std::cout << "Initialising value Subset: " << "m_message - Subset<uml::Message, uml::NamedElement >(getOwnedMember())" << std::endl;
#endif
for(const std::shared_ptr<uml::Message> messageindexElem: *messageList)
{
std::shared_ptr<uml::Message> temp = std::<API key><uml::Message>((messageindexElem)->copy());
m_message->push_back(temp);
}
}
else
{
DEBUG_MESSAGE(std::cout << "Warning: container is nullptr message."<< std::endl;)
}
/*Subset*/
getAction()->initSubset(getOwnedElement());
#ifdef SHOW_SUBSET_UNION
std::cout << "Initialising value Subset: " << "m_action - Subset<uml::Action, uml::Element >(getOwnedElement())" << std::endl;
#endif
/*Subset*/
getFormalGate()->initSubset(getOwnedMember());
#ifdef SHOW_SUBSET_UNION
std::cout << "Initialising value Subset: " << "m_formalGate - Subset<uml::Gate, uml::NamedElement >(getOwnedMember())" << std::endl;
#endif
/*Subset*/
getFragment()->initSubset(getOwnedMember());
#ifdef SHOW_SUBSET_UNION
std::cout << "Initialising value Subset: " << "m_fragment - Subset<uml::InteractionFragment, uml::NamedElement >(getOwnedMember())" << std::endl;
#endif
/*Subset*/
getLifeline()->initSubset(getOwnedMember());
#ifdef SHOW_SUBSET_UNION
std::cout << "Initialising value Subset: " << "m_lifeline - Subset<uml::Lifeline, uml::NamedElement >(getOwnedMember())" << std::endl;
#endif
/*Subset*/
getMessage()->initSubset(getOwnedMember());
#ifdef SHOW_SUBSET_UNION
std::cout << "Initialising value Subset: " << "m_message - Subset<uml::Message, uml::NamedElement >(getOwnedMember())" << std::endl;
#endif
return *this;
}
std::shared_ptr<ecore::EObject> InteractionImpl::copy() const
{
std::shared_ptr<InteractionImpl> element(new InteractionImpl());
*element =(*this);
element-><API key>(element);
return element;
}
/* Getter & Setter for reference action */
/*Subset*/
/*Subset*/
/* Getter & Setter for reference formalGate */
/*Subset*/
/*Subset*/
/* Getter & Setter for reference fragment */
/*Subset*/
/*Subset*/
/* Getter & Setter for reference lifeline */
/*Subset*/
/*Subset*/
/* Getter & Setter for reference message */
/*Subset*/
/*Subset*/
/*SubsetUnion*/
/*SubsetUnion*/
/*SubsetUnion*/
/*SubsetUnion*/
/*Union*/
/*Union*/
/*SubsetUnion*/
/*SubsetUnion*/
/*Union*/
/*Union*/
/*SubsetUnion*/
/*SubsetUnion*/ |
package jacobi.core.decomp.qr;
import jacobi.api.Matrices;
import jacobi.api.Matrix;
import jacobi.core.decomp.qr.step.DefaultQRStep;
import jacobi.core.givens.Givens;
import jacobi.test.annotations.JacobiImport;
import jacobi.test.annotations.JacobiInject;
import jacobi.test.util.Jacobi;
import jacobi.test.util.<API key>;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.stream.IntStream;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
/**
*
* @author Y.K. Chan
*/
@JacobiImport("/jacobi/test/data/SymmTriDiagQRTest.xlsx")
@RunWith(<API key>.class)
public class SymmTriDiagQRTest {
@JacobiInject(1)
public Matrix input;
@JacobiInject(2)
public Matrix shifted;
@JacobiInject(3)
public Matrix intermit;
@JacobiInject(4)
public Matrix output;
@JacobiInject(10)
public Matrix ans;
@Test
@JacobiImport("ToTriDiag 6x6")
public void testToTriDiag6x6() {
double[] diags = new SymmTriDiagQR((m, p, up) -> m).toTriDiag(this.input).get();
Jacobi.assertEquals(this.output, this.diagsToRows(diags));
}
@Test
@JacobiImport("ToTriDiag Non-Symm 5x5")
public void <API key>() {
Assert.assertFalse(new SymmTriDiagQR((m, p, up) -> m).toTriDiag(this.input).isPresent());
}
@Test
@JacobiImport("ToTriDiag Non-Symm 4x4")
public void <API key>() {
Assert.assertFalse(new SymmTriDiagQR((m, p, up) -> m).toTriDiag(this.input).isPresent());
}
@Test
@JacobiImport("Step 6x6")
public void testStep6x6() {
double[] diags = this.toZNotation(this.input.getRow(0), this.input.getRow(1));
this.mockForStepTest(shifted, intermit, output).step(diags, null, 0, diags.length / 2);
}
@Test
@JacobiImport("Step 5x5")
public void testStep5x5One() {
//double[] diag = this.input.getRow(0);
//double[] subDiag = this.input.getRow(1);
//this.mockForStepTest(shifted, intermit, output).step(diag, subDiag, null, 0, diag.length);
double[] diags = this.toZNotation(this.input.getRow(0), this.input.getRow(1));
this.mockForStepTest(shifted, intermit, output).step(diags, null, 0, diags.length / 2);
}
@Test
@JacobiImport("Step 5x5 (2)")
public void testStep5x5Two() {
//double[] diag = this.input.getRow(0);
//double[] subDiag = this.input.getRow(1);
double[] diags = this.toZNotation(this.input.getRow(0), this.input.getRow(1));
this.mockForStepTest(shifted, intermit, output).step(diags, null, 0, diags.length / 2);
}
@Test
@JacobiImport("Step 5x5 (3)")
public void testStep5x5Three() {
double[] diag = this.input.getRow(0);
double[] subDiag = this.input.getRow(1);
double[] diags = this.toZNotation(this.input.getRow(0), this.input.getRow(1));
this.mockForStepTest(shifted, intermit, output).step(diags, null, 0, diags.length / 2);
}
@Test
@JacobiImport("Step 5x5 (4)")
public void testStep5x5Four() {
//double[] diag = this.input.getRow(0);
//double[] subDiag = this.input.getRow(1);
double[] diags = this.toZNotation(this.input.getRow(0), this.input.getRow(1));
this.mockForStepTest(shifted, intermit, output).step(diags, null, 0, diags.length / 2);
}
@Test
@JacobiImport("6x6")
public void test6x6() {
new HessenbergDecomp().compute(this.input);
Matrix values = this.mock().compute(this.input, null, true);
List<Double> eigs = new ArrayList<>(values.getRowCount());
for(int i = 0; i < values.getRowCount(); i++){
eigs.add(values.get(i, i));
}
List<Double> exp = new ArrayList<>(ans.getRowCount());
for(int i = 0; i < ans.getRowCount(); i++){
exp.add(ans.get(i, 0));
}
Jacobi.assertEquals(exp, eigs, 1e-12);
}
@Test
@SuppressWarnings("InfiniteRecursion") // false positive
public void <API key>() {
AtomicBoolean marker = new AtomicBoolean(false);
new SymmTriDiagQR((mat, par, full) -> {
marker.set(true);
return mat;
}).compute(Matrices.zeros(2, 2), null, true);
Assert.assertTrue(marker.get());
}
private Matrix diagsToRows(double[] diags) {
double[] diag = new double[diags.length / 2];
double[] supDiag = new double[diag.length];
for(int i = 0; i < diag.length; i++){
diag[i] = diags[2*i];
supDiag[i] = diags[2*i + 1];
}
return Matrices.wrap(new double[][]{diag, supDiag});
}
private double[] toZNotation(double[] diags, double[] supDiags) {
return IntStream.range(0, 2 * diags.length)
.mapToDouble((i) -> i % 2 == 0 ? diags[i/2] : supDiags[i/2])
.toArray();
}
private SymmTriDiagQR mock() {
return new SymmTriDiagQR(new BasicQR(new DefaultQRStep()));
}
private SymmTriDiagQR mockForStepTest(Matrix afterShift, Matrix afterQR, Matrix result) {
return new SymmTriDiagQR((m, p, up) -> m){
@Override
protected List<Givens> qrDecomp(double[] diags, int begin, int end, double shift) {
List<Givens> giv = super.qrDecomp(diags, begin, end, shift);
Jacobi.assertEquals(afterQR, diagsToRows(diags));
return giv;
}
@Override
protected int step(double[] diags, Matrix partner, int begin, int end) {
int split = super.step(diags, partner, begin, end);
Jacobi.assertEquals(result, diagsToRows(diags));
return split;
}
};
}
} |
#!/bin/bash
EXPECTED_ARGS=4
BSIZE=1024
EXPSCRIPT="expect -d `pwd`/readblock.exp"
if [ $# -ne $EXPECTED_ARGS ]
then
echo "usage: `basename $0` file tty address size"
exit 0
fi
if [ -f $1 ]
then
echo "$1 already exists, exiting"
exit 0
fi
if [ ! -c $2 ]
then
echo "couldn't find device $2"
exit 0
fi
#TODO check ADDR and size
TMPDIR=`mktemp -d`
OUTFILE="`pwd`/$1"
TTY=$2
#address and size are probably hex
STARTADDR=`echo $(($3))`
SIZE=`echo $(($4))`
ENDADDR=`expr $STARTADDR + $SIZE`
TMPFILE="$TMPDIR/tmp_$BSIZE"
echo "using $TMPDIR"
pushd $TMPDIR
for (( ADDR=$STARTADDR; ADDR<=$ENDADDR; ADDR=$ADDR+$BSIZE ))
do
echo "Processing address `printf "0x%08X" $ADDR` "
##todo check if less than BSIZE left
##todo while not ok
$EXPSCRIPT $2 $TMPFILE `printf "0x%08X" $ADDR` $BSIZE
if [ $? -gt 0 ]
then
echo "error when sending file"
exit 1
fi
##todo convert tmpfile and add to outfile
cat $TMPFILE >> $OUTFILE
done
popd
rm -rf $TMPDIR |
<?php
namespace Bolt\Extension\DanielKulbe\Shariff;
/**
* Class Reddit.
*/
class Reddit extends Request implements ServiceInterface
{
/**
* {@inheritdoc}
*/
public function getName()
{
return 'reddit';
}
/**
* {@inheritdoc}
*/
public function getRequest($url)
{
return new \GuzzleHttp\Message\Request('GET', 'https:
}
/**
* {@inheritdoc}
*/
public function extractCount(array $data)
{
$count = 0;
if (!empty($data['data']['children'])) {
foreach ($data['data']['children'] as $child) {
if (!empty($child['data']['score'])) {
$count += $child['data']['score'];
}
}
}
return $count;
}
} |
using System;
using System.Collections.ObjectModel;
using System.Linq;
using Windows.UI;
using Windows.UI.Xaml;
using Windows.UI.Xaml.Controls;
using Windows.UI.Xaml.Documents;
using Windows.UI.Xaml.Media;
using Windows.UI.Xaml.Navigation;
using WorkOrganizer.Specs;
namespace WorkOrganizer
{
<summary>
An empty page that can be used on its own or navigated to within a Frame.
</summary>
public sealed partial class HousePage : Page
{
public <API key><Owner> ComboDataOwners { get; set; }
public House HouseOnEdit { get; private set; }
public bool IsEdit { get; private set; }
public HousePage()
{
this.InitializeComponent();
ComboDataOwners = new <API key><Owner>( App.DB.ActiveOwners );
ComboOwners.ItemsSource = ComboDataOwners;
HouseOnEdit = null;
}
protected override void OnNavigatedTo(NavigationEventArgs e)
{
HouseOnEdit = (House)e.Parameter;
IsEdit = (HouseOnEdit != null);
if (IsEdit)
{
<API key>.Content = "Edit House";
TextBoxName.Text = HouseOnEdit.Name.ToString();
bool HasItem = false;
foreach (Owner item in ComboOwners.Items)
{
if (item.IdOwner == HouseOnEdit.IdOwner)
HasItem = true;
}
if (!HasItem)
{
ComboDataOwners.Add(App.DB.Owners.First(o => o.IdOwner == HouseOnEdit.IdOwner));
}
ComboOwners.SelectedValue = HouseOnEdit.IdOwner;
}
else
<API key>.Content = "Create House";
}
private async void <API key>(object sender, RoutedEventArgs e)
{
try
{
if (ComboOwners.SelectedIndex != -1)
{
DatabaseMessage msg = null;
House h = new House(TextBoxName.Text, ((int)ComboOwners.SelectedValue));
if (IsEdit)
msg = await App.DB.EditHouse(HouseOnEdit.IdHouse, h);
else
msg = await App.DB.AddHouse(h);
if (msg.State == <API key>.OK)
{
TextError.Text = "";
TextError.Visibility = Visibility.Collapsed;
Frame.GoBack();
}
else
{
TextError.Text = msg.Error;
TextError.Visibility = Visibility.Visible;
}
}
else
{
TextError.Text = "You have to select an owner";
TextError.Inlines.Add(new Run { Text = " Sweetie", Foreground = new SolidColorBrush(Colors.HotPink) });
TextError.Inlines.Add(new Run { Text = "." });
TextError.Visibility = Visibility.Visible;
}
}
catch (Exception ex)
{
throw ex;
}
}
private void ButtonGoBack_Click(object sender, RoutedEventArgs e)
{
Frame.GoBack();
}
}
} |
layout: page
title: "Jesus Francisco Villalpando Quinonez"
comments: true
description: "blanks"
keywords: "Jesus Francisco Villalpando Quinonez,CU,Boulder"
<head>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.3/jquery.min.js"></script>
<script src="https://dl.dropboxusercontent.com/s/pc42nxpaw1ea4o9/highcharts.js?dl=0"></script>
<!-- <script src="../assets/js/highcharts.js"></script> -->
<style type="text/css">@font-face {
font-family: "Bebas Neue";
src: url(https:
}
h1.Bebas {
font-family: "Bebas Neue", Verdana, Tahoma;
}
</style>
</head>
# TEACHING INFORMATION
**College**: College of Arts and Sciences
**Classes taught**: LING 4420
# LING 4420: Morphology and Syntax
**Terms taught**: Spring 2017
**Instructor rating**: 5.08
**Standard deviation in instructor rating**: 0.0
**Average grade** (4.0 scale): 3.29
**Standard deviation in grades** (4.0 scale): 0.0
**Average workload** (raw): 2.36
**Standard deviation in workload** (raw): 0.0 |
package main
import "fmt"
func main () {
x := []int{
48,96,86,68,
57,82,63,70,
37,34,83,27,
19,97, 9,17,
}
smallest := x[0]
for i := 1; i < len(x); i++ {
if x[i] < smallest {
smallest = x[i]
}
}
fmt.Println(smallest)
} |
<?php
return [
'files' => 'Fájlok',
'filemanager' => 'Fájlkezelő',
]; |
@font-face {
font-family: aaargh;
src: url('../fonts/aaargh.ttf');
}
h1, h2, h3, h4, h5 {
font-family: 'aaargh', sans-serif;
font-weight: 100;
text-align: center;
margin-top: 80%;
color: #F5F1EF;
text-transform: uppercase;
}
h1 {
font-size: 11rem;
}
.row {
height: 100rem;
}
.row-1 {
background-color: #093;
}
.row-2 {
background-color: #903;
}
.row-3 {
background-color: #309;
}
.row-4 {
background-color: #960;
} |
#include <utility>
#include <stdexcept>
#include <easy/udp_socket.hpp>
namespace fd {
namespace easy {
namespace udp_socket {
socket server(uint32_t addr, uint16_t port)
{
struct sockaddr_in saddr;
saddr.sin_family = AF_INET;
saddr.sin_addr.s_addr = htonl(addr);
saddr.sin_port = htons(port);
auto socket = fd::socket(AF_INET, SOCK_DGRAM);
socket.bind(saddr);
return socket;
}
socket server(const struct in6_addr &addr, uint16_t port)
{
struct sockaddr_in6 saddr;
saddr.sin6_family = AF_INET6;
saddr.sin6_addr = addr;
saddr.sin6_port = htons(port);
saddr.sin6_flowinfo = 0;
saddr.sin6_scope_id = 0;
return server(saddr);
}
socket server(const struct sockaddr_in6 &saddr)
{
auto socket = fd::socket(AF_INET6, SOCK_DGRAM);
socket.bind(saddr);
return socket;
}
}
}
} |
namespace System.Runtime.InteropServices
{
#if CONFIG_COM_INTEROP && <API key>
[ComVisible(false)]
public enum <API key>
{
SingleUse = 0x0000,
MultipleUse = 0x0001,
MultiSeparate = 0x0002,
Suspended = 0x0004,
Surrogate = 0x0008
}; // enum <API key>
#endif // CONFIG_COM_INTEROP && <API key>
}; // namespace System.Runtime.InteropServices |
export default {
labs: {
label: 'Laboratuvarlar',
filterTitle: 'Duruma Göre Filtrele',
search: 'Laboratuvarlar İçinde Ara',
successfullyUpdated: 'Başarılı bir şekilde güncellendi',
<API key>: 'Başarılı bir şekilde tamamlandı',
successfullyCreated: 'Başarılı bir şekilde oluşturuldu',
status: {
requested: 'Talep Oluşturuldu',
completed: 'Tamamlandı',
canceled: 'İptal Edildi',
},
filter: {
all: 'Tüm Durumlar',
},
requests: {
label: 'Laboratuvar Talepleri',
new: 'Yeni Laboratuvar Talebi',
save: 'Request',
view: 'Laboratuvarı Görüntüle',
cancel: 'Laboratuvarı İptal Et',
complete: 'Laboratuvarı Tamamla',
update: 'Update Lab',
error: {
unableToRequest: 'Yeni laboratuvar talebi oluşturulamıyor.',
unableToComplete: 'Laboratuvar talebi tamamlanamıyor.',
typeRequired: 'Tür alanının doldurulması zorunludur.',
patientRequired: 'Patient is required.',
<API key>: 'Tamamlama işlemi için sonuç gerekli.',
},
},
lab: {
code: 'Laboratuvar Kodu',
status: 'Durum',
for: 'İçin',
type: 'Tür',
result: 'Sonuç',
notes: 'Notlar',
requestedOn: 'Talep Tarihi',
completedOn: 'Tamamlanma Tarihi',
canceledOn: 'İptal Tarih',
patient: 'Hasta',
},
},
} |
var _ = require('underscore')
, pg = require('pg')
, config = require('../config.js');
function getWindowsForId (id, options, callback) {
if(_.isFunction(options)) {
// Check if we want to use the default options
callback = options;
options = { windows: 200 };
} else if(!_.isObject(options)) {
throw new TypeError("Bad arguments");
}
pg.connect(config.pgCon, function (err, client, done) {
if(err) { callback(err); done(); return; }
var values = [options.windows, id];
var query = "SELECT w,\
avg(x) AS avgX,\
min(x) AS minX,\
max(x) AS maxX,\
avg(y) AS avgY,\
min(y) AS minY,\
max(y) AS maxY,\
avg(z) AS avgZ,\
min(z) AS minZ,\
max(z) AS maxZ,\
min(ts) AS startTime,\
max(ts) AS endTime\
FROM (\
SELECT x, y, z, ts\
, NTILE($1) OVER (ORDER BY ts) AS w\
FROM <API key> WHERE trip_id=$2 {{LIMIT_TIME}}) A\
GROUP BY w\
ORDER BY w;";
var limitTime = "";
if(options.startTime && !options.endTime) {
limitTime = "AND ts>=$3";
values.push(options.startTime);
} else if(!options.startTime && options.endTime) {
limitTime = "AND ts<=$3";
values.push(options.endTime);
} else if(options.startTime && options.endTime) {
limitTime = "AND ts>=$3 AND ts<=$4";
values.push(options.startTime);
values.push(options.endTime);
}
query = query.replace("{{LIMIT_TIME}}", limitTime);
client.query(query, values, function (err, result) {
done();
if(err) { callback(err); return; }
var data = _.map(result.rows, function (e) {
return {
window: e.w,
avgX: e.avgx,
minX: e.minx,
maxX: e.maxx,
avgY: e.avgy,
minY: e.miny,
maxY: e.maxy,
avgZ: e.avgz,
minZ: e.minz,
maxZ: e.maxz,
startTime: parseInt(e.starttime),
endTime: parseInt(e.endtime),
midTime: (parseInt(e.starttime) + parseInt(e.endtime)) / 2
};
});
callback(null, data);
});
}) ;
}
function getRawForId (id, options, callback) {
if(_.isFunction(options)) {
// Check if we want to use the default options
callback = options;
} else if(!_.isObject(options)) {
throw new TypeError("Bad arguments");
}
pg.connect(config.pgCon, function (err, client, done) {
if(err) { callback(err); done(); return; }
var query = 'SELECT * FROM <API key> WHERE trip_id=$1'
var values = [id];
if(options.startTime && !options.endTime) {
limitTime = "AND ts>=$2";
values.push(options.startTime);
} else if(!options.startTime && options.endTime) {
limitTime = "AND ts<=$2";
values.push(options.endTime);
} else if(options.startTime && options.endTime) {
limitTime = "AND ts>=$2 AND ts<=$3";
values.push(options.startTime);
values.push(options.endTime);
}
query += limitTime;
client.query(query, values, function (err, result) {
done();
if(err) { callback(err); return; }
callback(null, result.rows);
});
});
}
function getCountForId (id, options, callback) {
if(_.isFunction(options)) {
// Check if we want to use the default options
callback = options;
} else if(!_.isObject(options)) {
throw new TypeError("Bad arguments");
}
pg.connect(config.pgCon, function (err, client, done) {
if(err) { callback(err); done(); return; }
var query = 'SELECT COUNT(*) FROM <API key> WHERE trip_id=$1;';
client.query(query, [id], function (err, result) {
done();
if(err) { callback(err); return; }
callback(null, {count: result.rows[0].count});
});
});
}
module.exports = {
getWindowsForId: getWindowsForId,
getCountForId: getCountForId,
getRawForId: getRawForId
}; |
#ifndef curl_multi_H
#define curl_multi_H
#include "curl_easy.h"
using curl::curl_easy;
using curl::<API key>;
namespace curl {
/**
* As libcurl documentation says, the multi interface offers several abilities that
* the easy interface doesn't. They are mainly:
* 1. Enable a "pull" interface. The application that uses libcurl decides where and
* when to ask libcurl to get/send data.
* 2. Enable multiple simultaneous transfers in the same thread without making it
* complicated for the application.
* 3. Enable the application to wait for action on its own file descriptors and curl's
* file descriptors simultaneous easily.
*/
class curl_multi : public curl_interface<CURLMcode> {
public:
/**
* The multi interface gives users the opportunity to get informations about
* transfers. These informations are wrapped in the following class. In this
* way users can access to these informations in an easy and efficiently way.
* This class is nested because these messages have sense just when using the
* multi interface.
*/
class curl_message {
public:
/**
* The attributes will be initialized with constructors parameters. With
* this constructor we provide a fast way to build this kind of object.
*/
curl_message(const CURLMsg *);
/**
* Inline getter method used to return
* the message for a single handler.
*/
const CURLMSG get_message() const;
/**
* Inline getter method used to return
* the code for a single handler.
*/
const CURLcode get_code() const;
/**
* Inline getter method used to return
* other data.
*/
const void *get_other() const;
private:
const CURLMSG message;
const void *whatever;
const CURLcode code;
};
/**
* Simple default constructor. It is used to give a
* default value to all the attributes and provide a
* fast way to create an object of this type. It also
* initialize the curl environment with the default
* values.
*/
curl_multi();
/**
* Overloaded constructor. Gives users the opportunity
* to initialize the entire curl environment using customs
* options.
*/
explicit curl_multi(const long);
/**
* Copy constuctor to perform a correct copy of the curl
* handler and attributes.
*/
curl_multi(const curl_multi &);
/**
* Assignment operator. Let's apply the rule of three to
* avoid strange situations!
*/
curl_multi &operator=(const curl_multi &);
/**
* Destructor to deallocate all the resources using
* libcurl.
*/
~curl_multi() noexcept;
/**
* This method allows users to add an option to the multi
* handler, using an object of curl_pair type.
*/
template<typename T> void add(const curl_pair<CURLMoption,T> &);
/**
* Allows users to specify a list of options for the current
* easy handler. In this way, you can specify any iterable data
* structure.
*/
template<typename Iterator> void add(Iterator, const Iterator);
/**
* Overloaded add method. Allows users to specify an easy handler
* to add to the multi handler, to perform more transfers at the same
* time.
*/
void add(const curl_easy &);
/**
* This method allows to remove an easy handler from the multi handler.
*/
void remove(const curl_easy &);
/**
* This method tries to obtain informations about all the handlers added
* to the multi handler.
*/
vector<unique_ptr<curl_message>> get_info();
/**
* This method tries to obtain informations regarding an easy handler
* that has been added to the multi handler.
*/
unique_ptr<curl_message> get_info(const curl_easy &);
/**
* This method checks if the transfer on a curl_easy object is finished.
*/
bool is_finished(const curl_easy &);
/**
* Perform all the operations. Go baby! If the performing operations
* has finished, the method returns true. Else, returns false. Check
* online documentation for further documentation.
*/
bool perform();
/**
* This method wraps the libcurl function that reads/writes available data
* given an action. Read the libcurl online documentation to learn more
* about this function!
*/
bool socket_action(const curl_socket_t, const int);
/**
* This method wraps the libcurl function that extracts file descriptor
* information from the multi handler.
* Read the libcurl online documentation to learn more about this function.
*/
void set_descriptors(fd_set *, fd_set *, fd_set *, int *);
/**
* This function polls on all file descriptors used by the curl easy handles
* contained in the given multi handle set.
*/
void wait(struct curl_waitfd [], const unsigned int, const int, int *);
/**
* This function creates an association in the multi handle between the given
* socket and a private pointer of the application.
*/
void assign(const curl_socket_t, void *);
/**
* If you are using the libcurl multi interface your should call this method
* to figure out how long your application should wait for socket actions
* - at most - before proceeding.
*/
void timeout(long *);
/**
* Inline getter method used to return the currently active transfers.
*/
int <API key>() const noexcept;
/**
* Inline getter method used to return the currently queued messages.
*/
int get_message_queued() const noexcept;
private:
int message_queued;
int active_transfers;
CURLM *curl;
};
// Implementation of add method
template<typename T> void curl_multi::add(const curl_pair<CURLMoption,T> &pair) {
const CURLMcode code = curl_multi_setopt(this->curl,pair.first(),pair.second());
if (code != CURLM_OK) {
throw <API key>(code,__FUNCTION__);
}
}
// Implementation of overloaded add method.
template<typename Iterator> void curl_multi::add(Iterator begin, const Iterator end) {
for (; begin != end; ++begin) {
this->add(*begin);
}
}
// Implementation of <API key> method.
inline int curl_multi::<API key>() const noexcept {
return this->active_transfers;
}
// Implementation of get_message_queueed method.
inline int curl_multi::get_message_queued() const noexcept {
return this->active_transfers;
}
// Implementation of curl_message get_message method.
inline const CURLMSG curl_multi::curl_message::get_message() const {
return this->message;
}
// Implementation of curl_message get_code method.
inline const CURLcode curl_multi::curl_message::get_code() const {
return this->code;
}
// Implementation of curl_message get_other method.
inline const void *curl_multi::curl_message::get_other() const {
return this->whatever;
}
}
#endif /* curl_multi_H */ |
package fireload
import (
"container/ring"
"testing"
"github.com/kr/pretty"
)
var testNodes = []Namespace{
{Domain: "node-1.firebaseio.com"},
{Domain: "node-2.firebaseio.com"},
{Domain: "node-3.firebaseio.com"},
{Domain: "node-4.firebaseio.com"},
{Domain: "node-5.firebaseio.com"},
}
func Test_NewPool(t *testing.T) {
p, err := NewPool(testNodes...)
if err != nil {
t.Fatal(err)
}
assertSeenSet(t, p.Nodes, testNodes)
}
func assertSeenSet(t *testing.T, nodes *ring.Ring, expected []Namespace) {
seen := map[string]int{}
for i := 0; i < nodes.Len(); i++ {
val := nodes.Value
ns, ok := val.(Namespace)
if !ok {
t.Fatalf("Expected nodes to be a Ring with Namespace values. Got %T", val)
}
seen[ns.Domain]++
nodes = nodes.Next()
}
for _, ns := range expected {
if _, ok := seen[ns.Domain]; !ok {
t.Logf("Expected: %# v", pretty.Formatter(expected))
t.Logf("Saw: %# v", pretty.Formatter(seen))
t.Fatalf("Missing %s.", ns.Domain)
}
seen[ns.Domain]
}
for domain, count := range seen {
if count != 0 {
t.Fatalf("Expected count for %s off by %d", domain, count)
}
}
}
func <API key>(t *testing.T) {
p, err := NewPool(testNodes...)
if err != nil {
t.Fatal(err)
}
if p.Strategy != StrategyRandom {
t.Fatalf("Expected default strategy to be %d but got %d", StrategyRandom, p.Strategy)
}
}
func <API key>(t *testing.T) {
p, err := NewPool(testNodes...)
if err != nil {
t.Fatal(err)
}
if err := p.SetStrategy(StrategyRoundRobin); err != nil {
t.Fatal(err)
}
if p.Strategy != StrategyRoundRobin {
t.Fatalf("Expected default strategy to be %d but got %d", StrategyRoundRobin, p.Strategy)
}
}
func <API key>(t *testing.T) {
p, err := NewPool(testNodes...)
if err != nil {
t.Fatal(err)
}
if err := p.SetStrategy(Strategy(-1)); err != ErrInvalidStrategy {
t.Fatalf("Expected SetStrategy with invalid strategy to return %s but got %v", ErrInvalidStrategy, err)
}
}
func Test_Pool_Add_Pass(t *testing.T) {
p, err := NewPool(testNodes...)
if err != nil {
t.Fatal(err)
}
newNS := NewNamespace("node-6.firebaseio.com")
p.Add(newNS)
assertSeenSet(t, p.Nodes, append(testNodes, newNS))
}
func Test_Pool_Drop_Pass(t *testing.T) {
p, err := NewPool(testNodes...)
if err != nil {
t.Fatal(err)
}
if err := p.Drop("node-1.firebaseio.com"); err != nil {
t.Fatal(err)
}
assertSeenSet(t, p.Nodes, testNodes[1:])
}
func <API key>(t *testing.T) {
p, err := NewPool(testNodes...)
if err != nil {
t.Fatal(err)
}
if err := p.Drop("node-nope.firebaseio.com"); err != nil {
t.Fatal(err)
}
assertSeenSet(t, p.Nodes, testNodes)
}
func <API key>(t *testing.T) {
p, err := NewPool(testNodes...)
if err != nil {
t.Fatal(err)
}
for i := 0; i <= 2*p.Nodes.Len(); i++ {
next := p.NextRandom()
if next.Domain == "" {
t.Fatalf("Expected NextRandom() not to yield nil value Namespace")
}
}
}
func <API key>(t *testing.T) {
p, err := NewPool(testNodes...)
if err != nil {
t.Fatal(err)
}
if err := p.SetStrategy(StrategyRandom); err != nil {
t.Fatal(err)
}
for i := 0; i <= 2*p.Nodes.Len(); i++ {
if got := p.Next(); got.Domain == "" {
t.Fatalf("Expected Next() not to yield nil value Namespace")
}
}
}
func <API key>(t *testing.T) {
p, err := NewPool(testNodes...)
if err != nil {
t.Fatal(err)
}
for i := 0; i <= 2*p.Nodes.Len(); i++ {
node := p.Nodes.Next()
expected, ok := node.Value.(Namespace)
if !ok {
t.Fatalf("Couldn't typecast %v as Namesapce", node.Value)
}
if got := p.NextRoundRobin(); got.String() != expected.String() {
t.Fatalf("Expected NextRoundRobin() to yield %v but got %v", expected, got)
}
}
}
func <API key>(t *testing.T) {
p, err := NewPool(testNodes...)
if err != nil {
t.Fatal(err)
}
if err := p.SetStrategy(StrategyRoundRobin); err != nil {
t.Fatal(err)
}
for i := 0; i <= 2*p.Nodes.Len(); i++ {
node := p.Nodes.Next()
expected, ok := node.Value.(Namespace)
if !ok {
t.Fatalf("Couldn't typecast %v as Namesapce", node.Value)
}
if got := p.Next(); got.String() != expected.String() {
t.Fatalf("Expected Next() to yield %v but got %v", expected, got)
}
}
} |
import math
import random
def test():
tmp1 = []
tmp2 = []
print('build')
for i in xrange(1024):
tmp1.append(chr(int(math.floor(random.random() * 256))))
tmp1 = ''.join(tmp1)
for i in xrange(1024):
tmp2.append(tmp1)
tmp2 = ''.join(tmp2)
print(len(tmp2))
print('run')
for i in xrange(5000):
res = tmp2.encode('hex')
test() |
using System;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Linq;
using EvolveDb.Metadata;
using EvolveDb.Migration;
namespace EvolveDb.Dialect.CockroachDB
{
internal class <API key> : MetadataTable
{
public <API key>(string schema, string tableName, DatabaseHelper database)
: base(schema, tableName, database)
{
}
<summary>
Implementing advisory locks in CockroachDB is being discussed, see:
https://forum.cockroachlabs.com/t/<API key>/742
</summary>
[SuppressMessage("Design", "CA1031: Do not catch general exception types")]
protected override bool InternalTryLock()
{
string sqlGetLock = $"SELECT * FROM \"{Schema}\".\"{TableName}\" WHERE id = 0";
string sqlAddLock = $"INSERT INTO \"{Schema}\".\"{TableName}\" (id, type, version, description, name, checksum, installed_by, success) " +
$"values(0, 0, '0', 'lock', 'lock', '', '{_database.CurrentUser}', true)";
try
{
_database.WrappedConnection.BeginTransaction();
var locks = _database.WrappedConnection.QueryForList(sqlGetLock, r => r.GetInt32(0));
if (locks.Count() == 0)
{
_database.WrappedConnection.ExecuteNonQuery(sqlAddLock);
_database.WrappedConnection.Commit();
return true;
}
else
{
_database.WrappedConnection.Commit();
return false;
}
}
catch
{
_database.WrappedConnection.TryRollback();
return false;
}
}
[SuppressMessage("Design", "CA1031: Do not catch general exception types")]
protected override bool InternalReleaseLock()
{
try
{
_database.WrappedConnection.ExecuteNonQuery($"DELETE FROM \"{Schema}\".\"{TableName}\" WHERE id = 0");
return true;
}
catch
{
return false;
}
}
protected override bool InternalIsExists()
{
if (!_database.GetSchema(Schema).IsExists())
{ // database does not exist, so the metadatatable
return false;
}
return _database.WrappedConnection.QueryForLong($"SELECT COUNT(*) FROM \"{Schema}\".information_schema.tables " +
$"WHERE table_catalog = '{Schema}' " +
$"AND table_schema = 'public' " +
$"AND table_name = '{TableName}'") == 1;
}
protected override void InternalCreate()
{
string sequenceName = $"{TableName}_id_seq";
string createSequenceSql = $"CREATE SEQUENCE \"{Schema}\".\"{sequenceName}\" MAXVALUE {Int32.MaxValue};";
string createTableSql = $"CREATE TABLE \"{Schema}\".\"{TableName}\" " +
"( " +
$"id INT4 PRIMARY KEY NOT NULL DEFAULT nextval('\"{Schema}\".\"{sequenceName}\"'), " +
"type SMALLINT, " +
"version VARCHAR(50), " +
"description VARCHAR(200) NOT NULL, " +
"name VARCHAR(300) NOT NULL, " +
"checksum VARCHAR(32), " +
"installed_by VARCHAR(100) NOT NULL, " +
"installed_on TIMESTAMP NOT NULL DEFAULT now(), " +
"success BOOLEAN NOT NULL " +
");";
_database.WrappedConnection.ExecuteNonQuery(createSequenceSql + "\n" + createTableSql);
}
protected override void InternalSave(MigrationMetadata metadata)
{
string sql = $"INSERT INTO \"{Schema}\".\"{TableName}\" (type, version, description, name, checksum, installed_by, success) VALUES" +
"( " +
$"{(int)metadata.Type}, " +
$"{(metadata.Version is null ? "null" : $"'{metadata.Version}'")}, " +
$"'{metadata.Description.NormalizeSqlString(200)}', " +
$"'{metadata.Name.NormalizeSqlString(1000)}', " +
$"'{metadata.Checksum}', " +
$"{_database.CurrentUser}, " +
$"{(metadata.Success ? "true" : "false")}" +
")";
_database.WrappedConnection.ExecuteNonQuery(sql);
}
protected override void <API key>(int migrationId, string checksum)
{
string sql = $"UPDATE \"{Schema}\".\"{TableName}\" " +
$"SET checksum = '{checksum}' " +
$"WHERE id = {migrationId}";
_database.WrappedConnection.ExecuteNonQuery(sql);
}
protected override IEnumerable<MigrationMetadata> <API key>()
{
string sql = $"SELECT id, type, version, description, name, checksum, installed_by, installed_on, success FROM \"{Schema}\".\"{TableName}\"";
return _database.WrappedConnection.QueryForList(sql, r =>
{
return new MigrationMetadata(r[2] as string, r.GetString(3), r.GetString(4), (MetadataType)r.GetInt16(1))
{
Id = r.GetInt32(0),
Checksum = r.GetString(5),
InstalledBy = r.GetString(6),
InstalledOn = r.GetDateTime(7),
Success = r.GetBoolean(8)
};
});
}
}
} |
# Merge requests
Merge requests allow you to exchange changes you made to source code and
collaborate with other people on the same project.
## Authorization for merge requests
There are two main ways to have a merge request flow with GitLab:
1. Working with [protected branches][] in a single repository
1. Working with forks of an authoritative project
[Learn more about the authorization for merge requests.](<API key>.md)
## Cherry-pick changes
Cherry-pick any commit in the UI by simply clicking the **Cherry-pick** button
in a merged merge requests or a commit.
[Learn more about cherry-picking changes.](cherry_pick_changes.md)
## Merge when pipeline succeeds
When reviewing a merge request that looks ready to merge but still has one or
more CI jobs running, you can set it to be merged automatically when CI
pipeline succeeds. This way, you don't have to wait for the pipeline to finish
and remember to merge the request manually.
[Learn more about merging when pipeline succeeds.](<API key>.md)
## Resolve discussion comments in merge requests reviews
Keep track of the progress during a code review with resolving comments.
Resolving comments prevents you from forgetting to address feedback and lets
you hide discussions that are no longer relevant.
[Read more about resolving discussion comments in merge requests reviews.](<API key>.md)
## Resolve conflicts
When a merge request has conflicts, GitLab may provide the option to resolve
those conflicts in the GitLab UI.
[Learn more about resolving merge conflicts in the UI.](resolve_conflicts.md)
## Revert changes
GitLab implements Git's powerful feature to revert any commit with introducing
a **Revert** button in merge requests and commit details.
[Learn more about reverting changes in the UI](revert_changes.md)
## Merge requests versions
Every time you push to a branch that is tied to a merge request, a new version
of merge request diff is created. When you visit a merge request that contains
more than one pushes, you can select and compare the versions of those merge
request diffs.
[Read more about the merge requests versions.](versions.md)
## Work In Progress merge requests
To prevent merge requests from accidentally being accepted before they're
completely ready, GitLab blocks the "Accept" button for merge requests that
have been marked as a **Work In Progress**.
[Learn more about settings a merge request as "Work In Progress".](<API key>.md)
## Ignore whitespace changes in Merge Request diff view
If you click the **Hide whitespace changes** button, you can see the diff
without whitespace changes (if there are any). This is also working when on a
specific commit page.

>**Tip:**
You can append `?w=1` while on the diffs page of a merge request to ignore any
whitespace changes.
## Tips
Here are some tips that will help you be more efficient with merge requests in
the command line.
> **Note:**
This section might move in its own document in the future.
Checkout merge requests locally
A merge request contains all the history from a repository, plus the additional
commits added to the branch associated with the merge request. Here's a few
tricks to checkout a merge request locally.
Please note that you can checkout a merge request locally even if the source
project is a fork (even a private fork) of the target project.
# Checkout locally by adding a git alias
Add the following alias to your `~/.gitconfig`:
[alias]
mr = !sh -c 'git fetch $1 merge-requests/$2/head:mr-$1-$2 && git checkout mr-$1-$2' -
Now you can check out a particular merge request from any repository and any
remote. For example, to check out the merge request with ID 5 as shown in GitLab
from the `upstream` remote, do:
git mr upstream 5
This will fetch the merge request into a local `mr-upstream-5` branch and check
it out.
# Checkout locally by modifying `.git/config` for a given repository
Locate the section for your GitLab remote in the `.git/config` file. It looks
like this:
[remote "origin"]
url = https://gitlab.com/gitlab-org/gitlab-ce.git
fetch = +refs/merge-requests/*/head:refs/remotes/origin/merge-requests/*
fetch = +refs/merge-requests/*/head:refs/remotes/origin/merge-requests/* |
# README
Simple PHP function which checks the price of given SMS number (PL) and
returns it with or without specified tax rate.
## Basic usage
// We need to include our function
include_once(__DIR__.'/GetSmsCost/GetSmsCost.php');
// Getting price with 23% TAX
$priceWithTax = getSmsCost('925123', 0.23);
// Getting price without TAX
$priceWithoutTax = getSmsCost('925123');
## Unit testing
To run unit tests just execute the following command:
php phpunit.phar /path/to/GetSmsCost/GetSmsCostTest.php
The files in this archive are released under the [MIT LICENSE](LICENSE). |
#include "ISFileManager.h"
#include <regex>
#if PLATFORM_IS_EVB_2
#include <ff.h>
#else
#include <sys/stat.h>
#include <cstdio>
#include <time.h>
#endif
#if PLATFORM_IS_LINUX || PLATFORM_IS_APPLE
#include <sys/statvfs.h>
#endif
namespace ISFileManager {
bool PathIsDir(const std::string& path)
{
#if PLATFORM_IS_EVB_2
FILINFO info;
FRESULT result = f_stat(path.c_str(), &info);
return (result == FR_OK) && (info.fattrib & AM_DIR);
#else
struct stat sb;
return (stat(path.c_str(), &sb) == 0 && S_ISDIR(sb.st_mode));
#endif
}
bool <API key>(const std::string& directory, bool recursive, const std::string& regexPattern, std::vector<std::string>& files)
{
size_t startSize = files.size();
std::regex* rePtr = NULL;
std::regex re;
if (regexPattern.length() != 0)
{
re = std::regex(regexPattern, std::regex::icase);
rePtr = &re;
}
#if PLATFORM_IS_EVB_2
{
FRESULT result;
FILINFO info;
DIR dir;
char *file_name;
#if _USE_LFN
static char longFileName[_MAX_LFN + 1];
info.lfname = longFileName;
info.lfsize = sizeof(longFileName);
#endif
result = f_opendir(&dir, directory.c_str());
if (result == FR_OK) {
while (true) {
result = f_readdir(&dir, &info);
if (result != FR_OK || info.fname[0] == 0)
{
break;
}
#if _USE_LFN
file_name = *info.lfname ? info.lfname : info.fname;
#else
file_name = info.fname;
#endif
std::string full_file_name = directory + "/" + file_name;
if (file_name[0] == '.' || (rePtr != NULL && !regex_search(full_file_name, re)))
{
continue;
}
else if (info.fattrib & AM_DIR) {
if (recursive)
{
<API key>(full_file_name, true, files);
}
continue;
}
files.push_back(full_file_name);
}
}
}
#elif PLATFORM_IS_WINDOWS
HANDLE dir;
WIN32_FIND_DATAA file_data;
/* Get volume information and free clusters of drive 1 */ |
import os.path
import tempfile
from time import time
from streaming_form_data import <API key>
from streaming_form_data.targets import FileTarget, ValueTarget
from tornado.ioloop import IOLoop
from tornado.web import Application, RequestHandler, stream_request_body
one_hundred_gb = 100 * 1024 * 1024 * 1024
@stream_request_body
class UploadHandler(RequestHandler):
def prepare(self):
self.request.connection.set_max_body_size(one_hundred_gb)
name = '<API key>-{}.dat'.format(int(time()))
self.value = ValueTarget()
self.file_ = FileTarget(os.path.join(tempfile.gettempdir(), name))
self._parser = <API key>(headers=self.request.headers)
self._parser.register('name', self.value)
self._parser.register('file', self.file_)
def data_received(self, chunk):
self._parser.data_received(chunk)
def post(self):
self.render(
'upload.html', name=self.value.value, filename=self.file_.filename
)
class IndexHandler(RequestHandler):
def get(self):
self.render('index.html')
def main():
handlers = [(r'/', IndexHandler), (r'/upload', UploadHandler)]
settings = dict(debug=True, template_path=os.path.dirname(__file__))
app = Application(handlers, **settings)
app.listen(9999, address='localhost')
IOLoop().current().start()
if __name__ == '__main__':
print('Listening on localhost:9999')
main() |
<?php
namespace Bellwether\BWCMSBundle\Classes\Service;
use Symfony\Component\DependencyInjection\ContainerInterface;
use Symfony\Component\HttpFoundation\RequestStack;
use Bellwether\BWCMSBundle\Classes\Base\BaseService;
use Bellwether\BWCMSBundle\Entity\PreferenceEntity;
use Bellwether\BWCMSBundle\Entity\ContentEntity;
use Bellwether\BWCMSBundle\Entity\SearchEntity;
use Bellwether\BWCMSBundle\Classes\Constants\PreferenceFieldType;
use Bellwether\BWCMSBundle\Classes\Content\ContentType;
use Bellwether\BWCMSBundle\Classes\Constants\ContentFieldType;
use Bellwether\BWCMSBundle\Classes\Constants\ContentPublishType;
use Bellwether\Common\Pagination;
class SearchService extends BaseService
{
private $commonWords = null;
function __construct(ContainerInterface $container = null, RequestStack $request_stack = null)
{
$this->setContainer($container);
$this->setRequestStack($request_stack);
}
/**
* @return SearchService
*/
public function getManager()
{
return $this;
}
/**
* Service Init.
*/
public function init()
{
if (!$this->loaded) {
}
$this->loaded = true;
}
/**
* @param string $searchString
* @param Pagination $pager
* @param null $type
* @param null $schema
* @return Pagination
*/
public function searchIndex($searchString, Pagination $pager, $type = null, $schema = null)
{
if (!empty($searchString)) {
$searchString = filter_var($searchString, <API key>);
}
$searchString = $this->search()->cleanText($searchString);
$searchWords = explode(' ', $searchString);
$searchWords = array_map('trim', $searchWords);
$searchWords = array_filter($searchWords);
if (empty($searchWords)) {
return $pager;
}
$searchRepo = $this->getSearchRepository();
$qb = $searchRepo->createQueryBuilder('si');
$qb->leftJoin('si.content', 'c');
$qb->leftJoin('si.site', 's');
$qb->select('si', 'c');
$searchLikeExp = $qb->expr()->orX();
foreach ($searchWords as $index => $value) {
$searchLikeExp->add($qb->expr()->like('si.keywords', ':KEYWORD_' . $index));
$qb->setParameter(':KEYWORD_' . $index, '%' . $value . '%');
}
$qb->andWhere($searchLikeExp);
$qb->andWhere(" c.status ='" . ContentPublishType::Published . "' ");
$qb->andWhere(" si.site ='" . $this->sm()->getCurrentSite()->getId() . "' ");
$qb->add('orderBy', 'c.publishDate DESC');
$qb->setFirstResult($pager->getStart());
$qb->setMaxResults($pager->getLimit());
$result = $qb->getQuery()->getResult();
$pager->setItems($result);
$qb2 = clone $qb; // don't modify existing query
$qb2->resetDQLPart('orderBy');
$qb2->resetDQLPart('having');
$qb2->select('COUNT(si) AS cnt');
$countResult = $qb2->getQuery()->setFirstResult(0)->getScalarResult();
$totalCount = $countResult[0]['cnt'];
$pager->setTotalItems($totalCount);
return $pager;
}
public function runIndex()
{
$indexContentTypes = $this->cm()-><API key>();
if (empty($indexContentTypes)) {
return;
}
$contentRepository = $this->cm()-><API key>();
$qb = $contentRepository->createQueryBuilder('node');
$qb->add('orderBy', 'node.modifiedDate ASC');
$qb->add('orderBy', 'node.createdDate ASC');
/**
* @var ContentType $contentType
*/
$condition = array();
foreach ($indexContentTypes as $contentType) {
$condition[] = " (node.type = '" . $contentType->getType() . "' AND node.schema = '" . $contentType->getSchema() . "' )";
}
if (!empty($condition)) {
$qb->andWhere(' ( ' . implode(' OR ', $condition) . ' ) ');
}
$qb->andWhere(
$qb->expr()->orX(
$qb->expr()->andX(
$qb->expr()->isNull('node.modifiedDate'),
$qb->expr()->gt('node.createdDate', ':date_modified')),
$qb->expr()->gt('node.modifiedDate', ':date_modified'))
);
$qb->setParameter('date_modified', $this->getLastIndexedDate(), \Doctrine\DBAL\Types\Type::DATETIME);
$qb->setFirstResult(0);
// $qb->setMaxResults(99999);
$result = $qb->getQuery()->getResult();
$<API key> = new \DateTime();
if (!empty($result)) {
/**
* @var ContentEntity $content
*/
foreach ($result as $content) {
print "Indexing: {" . $content->getId() . "} - " . $content->getTitle() . "\n";
$this->indexContent($content);
}
$<API key> = $content->getModifiedDate();
if (empty($<API key>)) {
$<API key> = $content->getCreatedDate();
}
}
$this->saveLastIndexDate($<API key>);
}
public function dropSearchIndex()
{
$connection = $this->em()->getConnection();
$platform = $connection->getDatabasePlatform();
print "Emptying search index.\n";
$connection->executeUpdate($platform->getTruncateTableSQL('BWSearch', false));
print "Emptying search last search date.\n";
$this->saveLastIndexDate(new \DateTime('@0'));
}
public function indexContent(ContentEntity $content = null)
{
if (is_null($content)) {
return;
}
$contentClass = $this->cm()->getContentClass($content->getType(), $content->getSchema());
if (!$contentClass->isIndexed()) {
return;
}
$contentFields = $this->cm()->getContentFields();
$indexedFields = $contentClass->getIndexedFields();
$indexCoreFields = array_intersect($contentFields, $indexedFields);
$indexMetaFields = array_diff($indexedFields, $contentFields);
$indexText = null;
if (!empty($indexCoreFields)) {
foreach ($indexCoreFields as $field) {
$indexText = $indexText . ' ' . $content->{'get' . $field}(true);
}
}
if (!empty($indexMetaFields)) {
$metaFields = $this->cm()->getContentAllMeta($content);
foreach ($indexMetaFields as $field) {
if (isset($metaFields[$field])) {
$fieldType = $contentClass->getFieldType($field);
if ($fieldType == ContentFieldType::String) {
$indexText = $indexText . ' ' . $metaFields[$field];
} else {
$indexText = $indexText . ' ' . $contentClass-><API key>($field, $metaFields[$field]);
}
}
}
}
$searchEntity = $this->searchIndexEntity($content);
if (is_null($indexText)) {
if (!is_null($searchEntity)) {
$this->em()->remove($searchEntity);
$this->em()->flush();
}
return;
}
if (is_null($searchEntity)) {
$searchEntity = new SearchEntity();
$searchEntity->setContent($content);
$searchEntity->setSite($content->getSite());
}
$indexText = $this->cleanText($indexText);
$searchEntity->setKeywords($indexText);
$searchEntity->setIndexedDate(new \DateTime());
$this->em()->persist($searchEntity);
$this->em()->flush();
return true;
}
public function searchIndexEntity(ContentEntity $content)
{
$criteria = array(
'site' => $content->getSite(),
'content' => $content
);
return $this->getSearchRepository()->findOneBy($criteria);
}
public function cleanText($string)
{
//remove html
$string = strip_tags($string);
//remove html entites
$string = preg_replace("/&#?[a-z0-9]+;/i", "", $string);
//$only words
//$string = preg_replace('/\W+/', ' ', $string);
//unique iy
$arrayWords = explode(" ", $string);
$arrayWords = array_unique($arrayWords);
$string = implode(" ", $arrayWords);
//remove stop words
$string = preg_replace('/\b(' . implode('|', $this->getCommonWords()) . ')\b/i', '', $string);
$string = str_replace(' ', ' ', $string);
$string = str_replace(' ', ' ', $string);
return $string;
}
public function getCommonWords()
{
if (!is_null($this->commonWords)) {
return $this->commonWords;
}
// MYSQL FULLtext Stop words
$this->commonWords = array(" a ", "a's", "able", "about", "above", "according", "accordingly", "across", "actually",
"after", "afterwards", "again", "against", "ain't", "all", "allow", "allows", "almost", "alone", "along",
"already", "also", "although", "always", "am", "among", "amongst", "an", "and", "another", "any", "anybody",
"anyhow", "anyone", "anything", "anyway", "anyways", "anywhere", "apart", "appear", "appreciate", "appropriate",
"are", "aren't", "around", "as", "aside", "ask", "asking", "associated", "at", "available", "away", "awfully",
"be", "became", "because", "become", "becomes", "becoming", "been", "before", "beforehand", "behind", "being",
"believe", "below", "beside", "besides", "best", "better", "between", "beyond", "both", "brief", "but", "by",
"c'mon", "c's", "came", "can", "can't", "cannot", "cant", "cause", "causes", "certain", "certainly", "changes",
"clearly", "co", "com", "come", "comes", "concerning", "consequently", "consider", "considering", "contain",
"containing", "contains", "corresponding", "could", "couldn't", "course", "currently", "definitely", "described",
"despite", "did", "didn't", "different", "do", "does", "doesn't", "doing", "don't", "done", "down", "downwards",
"during", "each", "edu", "eg", "eight", "either", "else", "elsewhere", "enough", "entirely", "especially", "et",
"etc", "even", "ever", "every", "everybody", "everyone", "everything", "everywhere", "ex", "exactly", "example",
"except", "far", "few", "fifth", "first", "five", "followed", "following", "follows", "for", "former", "formerly",
"forth", "four", "from", "further", "furthermore", "get", "gets", "getting", "given", "gives", "go", "goes", "going",
"gone", "got", "gotten", "greetings", "had", "hadn't", "happens", "hardly", "has", "hasn't", "have", "haven't", "having",
"he", "he's", "hello", "help", "hence", "her", "here", "here's", "hereafter", "hereby", "herein", "hereupon", "hers",
"herself", "hi", "him", "himself", "his", "hither", "hopefully", "how", "howbeit", "however", "i'd", "i'll", "i'm",
"i've", "ie", "if", "ignored", "immediate", "in", "inasmuch", "inc", "indeed", "indicate", "indicated", "indicates",
"inner", "insofar", "instead", "into", "inward", "is", "isn't", "it", "it'd", "it'll", "it's", "its", "itself", "just",
"keep", "keeps", "kept", "know", "knows", "known", "last", "lately", "later", "latter", "latterly", "least", "less", "lest",
"let", "let's", "like", "liked", "likely", "little", "look", "looking", "looks", "ltd", "mainly", "many", "may", "maybe", "me",
"mean", "meanwhile", "merely", "might", "more", "moreover", "most", "mostly", "much", "must", "my", "myself", "name", "namely",
"nd", "near", "nearly", "necessary", "need", "needs", "neither", "never", "nevertheless", "new", "next", "nine", "no", "nobody",
"non", "none", "noone", "nor", "normally", "not", "nothing", "novel", "now", "nowhere", "obviously", "of", "off", "often", "oh",
"ok", "okay", "old", "on", "once", "one", "ones", "only", "onto", "or", "other", "others", "otherwise", "ought", "our", "ours",
"ourselves", "out", "outside", "over", "overall", "own", "particular", "particularly", "per", "perhaps", "placed", "please", "plus",
"possible", "presumably", "probably", "provides", "que", "quite", "qv", "rather", "rd", "re", "really", "reasonably", "regarding",
"regardless", "regards", "relatively", "respectively", "right", "said", "same", "saw", "say", "saying", "says", "second", "secondly",
"see", "seeing", "seem", "seemed", "seeming", "seems", "seen", "self", "selves", "sensible", "sent", "serious", "seriously", "seven",
"several", "shall", "she", "should", "shouldn't", "since", "six", "so", "some", "somebody", "somehow", "someone", "something", "sometime",
"sometimes", "somewhat", "somewhere", "soon", "sorry", "specified", "specify", "specifying", "still", "sub", "such", "sup", "sure",
"t's", "take", "taken", "tell", "tends", "th", "than", "thank", "thanks", "thanx", "that", "that's", "thats", "the", "their",
"theirs", "them", "themselves", "then", "thence", "there", "there's", "thereafter", "thereby", "therefore", "therein", "theres",
"thereupon", "these", "they", "they'd", "they'll", "they're", "they've", "think", "third", "this", "thorough", "thoroughly",
"those", "though", "three", "through", "throughout", "thru", "thus", "to", "together", "too", "took", "toward", "towards", "tried",
"tries", "truly", "try", "trying", "twice", "two", "un", "under", "unfortunately", "unless", "unlikely", "until", "unto", "up",
"upon", "us", "use", "used", "useful", "uses", "using", "usually", "value", "various", "very", "via", "viz", "vs", "want", "wants",
"was", "wasn't", "way", "we", "we'd", "we'll", "we're", "we've", "welcome", "well", "went", "were", "weren't", "what", "what's",
"whatever", "when", "whence", "whenever", "where", "where's", "whereafter", "whereas", "whereby", "wherein", "whereupon", "wherever",
"whether", "which", "while", "whither", "who", "who's", "whoever", "whole", "whom", "whose", "why", "will", "willing", "wish", "with",
"within", "without", "won't", "wonder", "would", "wouldn't", "yes", "yet", "you", "you'd", "you'll", "you're", "you've", "your",
"yours", "yourself", "yourselves", "zer");
return $this->commonWords;
}
public function getLastIndexedDate()
{
$criteria = array(
'field' => '_SEARCH_LIT_',
'fieldType' => PreferenceFieldType::Internal,
'type' => '_SEARCH_',
'site' => null
);
/**
* @var \Bellwether\BWCMSBundle\Entity\PreferenceEntity $preferenceEntity
*/
$preferenceRepo = $this->pref()-><API key>();
$preferenceEntity = $preferenceRepo->findOneBy($criteria);
if (is_null($preferenceEntity)) {
return new \DateTime('@0');
}
return new \DateTime($preferenceEntity->getValue());
}
public function saveLastIndexDate(\DateTime $dateTime)
{
$criteria = array(
'field' => '_SEARCH_LIT_',
'fieldType' => PreferenceFieldType::Internal,
'type' => '_SEARCH_',
'site' => null
);
/**
* @var \Bellwether\BWCMSBundle\Entity\PreferenceEntity $preferenceEntity
*/
$preferenceRepo = $this->pref()-><API key>();
$preferenceEntity = $preferenceRepo->findOneBy($criteria);
if (is_null($preferenceEntity)) {
$preferenceEntity = new PreferenceEntity();
$preferenceEntity->setField('_SEARCH_LIT_');
$preferenceEntity->setType('_SEARCH_');
$preferenceEntity->setFieldType(PreferenceFieldType::Internal);
$preferenceEntity->setSite(null);
}
$dateString = $dateTime->format(\DateTime::ISO8601);
$preferenceEntity->setValue($dateString);
$this->em()->persist($preferenceEntity);
$this->em()->flush();
return true;
}
/**
* @return \Bellwether\BWCMSBundle\Entity\SearchRepository
*/
public function getSearchRepository()
{
return $this->em()->getRepository('BWCMSBundle:SearchEntity');
}
} |
package command;
public class ForwardCommand extends MoveCommand {
public ForwardCommand (String s) {
super(s, 1);
}
@Override
public int getDirection () {
return FORWARD;
}
} |
<!DOCTYPE html>
<html>
<head>
<title>Page Title</title>
<link rel="stylesheet" href="style.css">
</head>
<body>
<img src="w3schools-logo.jpg">
<h1>This is a Heading</h1>
<p>This is a paragraph.</p>
</body>
</html> |
require 'spec_helper'
describe Gliffy::Document::Presentation::PNG do
let(:content) { "SAMPLE CONTENT" }
let(:document) { double(Gliffy::Document) }
let(:png) { Gliffy::Document::Presentation::PNG.new(document) }
<API key> "a document presentation" do
let(:presentation) { png }
end
it "has content" do
expect(png).to respond_to :content
end
it "delegates task of fetching image content to the API facade" do
account = double(Gliffy::Account, :id => 11)
api = double(Gliffy::API)
document.stub(:api).and_return(api)
document.stub(:owner).and_return(account)
document.stub(:id).and_return(22)
api.should_receive(
:raw
).with(
"/accounts/11/documents/22.png",
hash_including(
:action => "get",
:size => "T"
)
).and_return(content)
expect(png.content(Gliffy::Document::Presentation::PNG::SIZE_THUMBNAIL)).to eq content
end
it "has a thumbnail" do
png.stub(:content).and_return(content)
expect(png.thumbnail).to eq content
expect(png).to have_received(:content).with(
Gliffy::Document::Presentation::PNG::SIZE_THUMBNAIL
)
end
it "has a small image" do
png.stub(:content).and_return(content)
expect(png.small).to eq content
expect(png).to have_received(:content).with(
Gliffy::Document::Presentation::PNG::SIZE_SMALL
)
end
it "has a medium image" do
png.stub(:content).and_return(content)
expect(png.medium).to eq content
expect(png).to have_received(:content).with(
Gliffy::Document::Presentation::PNG::SIZE_MEDIUM
)
end
it "has a full image" do
png.stub(:content).and_return(content)
expect(png.full).to eq content
expect(png).to have_received(:content).with(
Gliffy::Document::Presentation::PNG::SIZE_FULL
)
end
end |
"""Test flash_air_music.configuration functions/classes."""
import pytest
from flash_air_music import configuration, exceptions
from flash_air_music.__main__ import __doc__ as doc
def common_init(monkeypatch, tmpdir, skip_ffmpeg=False):
"""Perform common test setup.
:param monkeypatch: pytest fixture.
:param tmpdir: pytest fixture.
:param bool skip_ffmpeg: Do not setup fake ffmpeg.
:return: argv list, config dict, ffmpeg py.path
:rtype: tuple
"""
argv, config, ffmpeg = ['FlashAirMusic'], dict(), None
# Set fake ffmpeg.
if not skip_ffmpeg:
ffmpeg = tmpdir.ensure('ffmpeg')
ffmpeg.chmod(0o0755)
monkeypatch.setattr(configuration, '<API key>', ffmpeg)
# Patch functions and global variables.
monkeypatch.setattr(configuration, '<API key>', config)
monkeypatch.setattr(configuration, 'setup_logging', lambda _: None)
monkeypatch.setattr('sys.argv', argv)
# Patch home dir and create default directories.
monkeypatch.setenv('HOME', tmpdir)
tmpdir.ensure_dir('fam_working_dir')
tmpdir.ensure_dir('fam_music_source')
return argv, config, ffmpeg
@pytest.mark.parametrize('bad', [False, True])
def test_config_file(monkeypatch, tmpdir, caplog, bad):
"""Test for DocoptcfgFileError in initialize_config().
:param monkeypatch: pytest fixture.
:param tmpdir: pytest fixture.
:param caplog: pytest extension fixture
:param bool bad: Error?
"""
argv, config = common_init(monkeypatch, tmpdir)[:2]
# Setup config file.
path = tmpdir.join('config.ini')
monkeypatch.setenv('FAM_CONFIG', path)
if bad:
path.write('Bad File')
else:
path.write('[FlashAirMusic]\nverbose = true')
# Setup argv.
argv.extend(['run', '--music-source', str(tmpdir.ensure_dir('source'))])
# Run.
if not bad:
configuration.initialize_config(doc)
assert config['--verbose'] is True
return
# Run.
with pytest.raises(exceptions.ConfigError):
configuration.initialize_config(doc)
# Verify.
messages = [r.message for r in caplog.records]
assert messages[-1] == 'Config file specified but invalid: Unable to parse config file.'
@pytest.mark.parametrize('mode', ['not_used', 'used', 'no_parent', 'dir_perm', 'file_perm'])
def <API key>(monkeypatch, tmpdir, caplog, mode):
"""Test _validate_config() --log validation via initialize_config().
:param monkeypatch: pytest fixture.
:param tmpdir: pytest fixture.
:param caplog: pytest extension fixture.
:param str mode: Scenario to test for.
"""
argv, config = common_init(monkeypatch, tmpdir)[:2]
# Setup argv.
argv.extend(['run', '--music-source', str(tmpdir.ensure_dir('source'))])
if mode != 'not_used':
argv.extend(['--log', str(tmpdir.join('parent', 'logfile.log'))])
# Populate tmpdir.
if mode != 'no_parent':
tmpdir.ensure_dir('parent')
if mode == 'dir_perm':
tmpdir.join('parent').chmod(0o444)
elif mode == 'file_perm':
tmpdir.ensure('parent', 'logfile.log').chmod(0o444)
# Run.
if mode in ('not_used', 'used'):
configuration.initialize_config(doc)
if mode == 'used':
assert config['--log'] == str(tmpdir.join('parent', 'logfile.log'))
else:
assert config['--log'] is None
return
# Run.
with pytest.raises(exceptions.ConfigError):
configuration.initialize_config(doc)
# Verify.
messages = [r.message for r in caplog.records]
assert messages[-1].startswith('Log file')
if mode == 'no_parent':
assert messages[-1].endswith('not a directory.')
elif mode == 'dir_perm':
assert messages[-1].endswith('not writable.')
else:
assert messages[-1].endswith('not read/writable.')
@pytest.mark.parametrize('mode', ['good', 'default', 'dne', 'perm'])
def <API key>(monkeypatch, tmpdir, caplog, mode):
"""Test _validate_config() --music-source validation via initialize_config().
:param monkeypatch: pytest fixture.
:param tmpdir: pytest fixture.
:param caplog: pytest extension fixture.
:param str mode: Scenario to test for.
"""
argv, config = common_init(monkeypatch, tmpdir)[:2]
# Setup argv.
argv.extend(['run'])
if mode != 'default':
argv.extend(['--music-source', str(tmpdir.join('music'))])
# Populate tmpdir.
if mode != 'dne':
tmpdir.ensure_dir('music')
if mode == 'perm':
tmpdir.join('music').chmod(0o444)
# Run.
if mode == 'good':
configuration.initialize_config(doc)
assert config['--music-source'] == str(tmpdir.join('music'))
return
elif mode == 'default':
configuration.initialize_config(doc)
assert config['--music-source'] == str(tmpdir.join('fam_music_source'))
return
# Run.
with pytest.raises(exceptions.ConfigError):
configuration.initialize_config(doc)
# Verify.
messages = [r.message for r in caplog.records]
if mode == 'dne':
assert messages[-1].startswith('Music source directory does not exist')
else:
assert messages[-1].startswith('No access to music source directory')
@pytest.mark.parametrize('mode', ['specified', 'default', 'dne', 'perm', 'collision1', 'collision2'])
def <API key>(monkeypatch, tmpdir, caplog, mode):
"""Test _validate_config() --working-dir validation via initialize_config().
:param monkeypatch: pytest fixture.
:param tmpdir: pytest fixture.
:param caplog: pytest extension fixture.
:param str mode: Scenario to test for.
"""
argv, config = common_init(monkeypatch, tmpdir)[:2]
# Setup argv.
argv.extend(['run'])
if mode == 'collision1':
argv.extend(['--music-source', str(tmpdir)])
elif mode == 'collision2':
argv.extend(['--music-source', str(tmpdir.ensure_dir('fam_working_dir', 'source'))])
else:
argv.extend(['--music-source', str(tmpdir.ensure_dir('source'))])
if mode != 'default':
argv.extend(['--working-dir', str(tmpdir.join('not_default'))])
# Populate tmpdir.
if mode != 'dne':
tmpdir.ensure_dir('not_default')
if mode == 'perm':
tmpdir.join('not_default').chmod(0o444)
# Run.
if mode in ('specified', 'default'):
configuration.initialize_config(doc)
assert config['--working-dir'] == str(tmpdir.join('fam_working_dir' if mode == 'default' else 'not_default'))
return
# Run.
with pytest.raises(exceptions.ConfigError):
configuration.initialize_config(doc)
# Verify.
messages = [r.message for r in caplog.records]
if mode == 'dne':
assert messages[-1].startswith('Working directory does not exist')
elif mode == 'perm':
assert messages[-1].startswith('No access to working directory')
elif mode == 'collision1':
assert messages[-1] == 'Working directory cannot be in music source dir.'
else:
assert messages[-1] == 'Music source dir cannot be in working directory.'
@pytest.mark.parametrize('mode', ['missing', 'ip', 'hostname', 'bad'])
def <API key>(monkeypatch, tmpdir, caplog, mode):
"""Test _validate_config() --ip-addr validation via initialize_config().
:param monkeypatch: pytest fixture.
:param tmpdir: pytest fixture.
:param caplog: pytest extension fixture.
:param str mode: Scenario to test for.
"""
argv, config = common_init(monkeypatch, tmpdir)[:2]
# Setup argv.
argv.extend(['run', '--music-source', str(tmpdir.ensure_dir('source'))])
if mode == 'ip':
argv.extend(['--ip-addr', '127.0.0.1'])
elif mode == 'hostname':
argv.extend(['--ip-addr', 'flashair.home.net'])
elif mode == 'bad':
argv.extend(['--ip-addr', 'Inv@lid'])
# Run.
if mode != 'bad':
configuration.initialize_config(doc)
if mode == 'missing':
assert config['--ip-addr'] is None
else:
assert config['--ip-addr'] == argv[-1]
return
# Run.
with pytest.raises(exceptions.ConfigError):
configuration.initialize_config(doc)
# Verify.
messages = [r.message for r in caplog.records]
assert messages[-1] == 'Invalid hostname/IP address: Inv@lid'
@pytest.mark.parametrize('mode', ['default', '0', '1', '10', '5.5', 'a'])
def <API key>(monkeypatch, tmpdir, caplog, mode):
"""Test _validate_config() --threads validation via initialize_config().
:param monkeypatch: pytest fixture.
:param tmpdir: pytest fixture.
:param caplog: pytest extension fixture.
:param str mode: Scenario to test for.
"""
argv, config = common_init(monkeypatch, tmpdir)[:2]
# Setup argv.
argv.extend(['run', '--music-source', str(tmpdir.ensure_dir('source'))])
if mode != 'default':
argv.extend(['--threads', mode])
# Run.
if mode not in ('a', '5.5'):
configuration.initialize_config(doc)
assert config['--threads'] == '0' if mode == 'default' else mode
return
# Run.
with pytest.raises(exceptions.ConfigError):
configuration.initialize_config(doc)
# Verify.
messages = [r.message for r in caplog.records]
assert messages[-1] == 'Thread count must be a number: {}'.format(mode)
@pytest.mark.parametrize('mode', ['specified', 'default', 'default missing', 'dne', 'perm'])
def <API key>(monkeypatch, tmpdir, caplog, mode):
"""Test _validate_config() --ffmpeg-bin validation via initialize_config().
:param monkeypatch: pytest fixture.
:param tmpdir: pytest fixture.
:param caplog: pytest extension fixture.
:param str mode: Scenario to test for.
"""
argv, config = common_init(monkeypatch, tmpdir, skip_ffmpeg=True)[:2]
# Set fake ffmpeg.
ffmpeg = tmpdir.join('ffmpeg')
if mode != 'dne':
ffmpeg.ensure()
if mode != 'perm':
ffmpeg.chmod(0o0755)
monkeypatch.setattr(configuration, '<API key>', ffmpeg if mode != 'default missing' else None)
# Setup argv.
argv.extend(['run', '--music-source', str(tmpdir.ensure_dir('source'))])
if mode == 'specified':
argv.extend(['--ffmpeg-bin', str(ffmpeg)])
# Run.
if mode in ('specified', 'default'):
configuration.initialize_config(doc)
assert config['--ffmpeg-bin'] == str(ffmpeg)
return
# Run.
with pytest.raises(exceptions.ConfigError):
configuration.initialize_config(doc)
# Verify.
messages = [r.message for r in caplog.records]
if mode == 'dne':
assert messages[-1].startswith('ffmpeg binary does not exist')
elif mode == 'perm':
assert messages[-1].startswith('No access to ffmpeg')
else:
assert messages[-1] == 'Unable to find ffmpeg in PATH.'
@pytest.mark.parametrize('mode', ['good', 'no_config', 'corrupted', 'bad_config'])
def test_update_config(monkeypatch, tmpdir, caplog, mode):
"""Test update_config() and SIGHUP handling.
:param monkeypatch: pytest fixture.
:param tmpdir: pytest fixture.
:param caplog: pytest extension fixture.
:param str mode: Scenario to test for.
"""
argv, config, ffmpeg = common_init(monkeypatch, tmpdir)
# Setup argv.
argv.extend(['run', '--music-source', str(tmpdir.ensure_dir('source'))])
if mode != 'no_config':
argv.extend(['--config', str(tmpdir.join('config.ini'))])
tmpdir.join('config.ini').write('[FlashAirMusic]\n')
# Initialize.
configuration.initialize_config(doc)
# Setup config file.
sample = tmpdir.join('sample.log')
if mode == 'good':
tmpdir.join('config.ini').write('[FlashAirMusic]\nlog = {}\nffmpeg-bin = {}'.format(sample, ffmpeg))
elif mode == 'corrupted':
tmpdir.join('config.ini').write('\x00\x00\x00\x00'.encode(), mode='wb')
elif mode == 'bad_config':
tmpdir.join('config.ini').write('[FlashAirMusic]\nworking-dir = {}'.format(ffmpeg))
# Call.
before = config.copy()
configuration.update_config(doc, 1)
# Verify
messages = [r.message for r in caplog.records]
if mode == 'good':
assert config['--log'] == str(sample)
assert messages[-1] == 'Done reloading configuration.'
elif mode == 'corrupted':
assert config == before
assert 'Unable to parse' in messages[-1]
elif mode == 'no_config':
assert config == before
assert messages[-1] == 'No previously defined configuration file. Nothing to read.'
else:
assert config == before
assert messages[-1].startswith('Working directory does not exist')
assert [m for m in messages if 'Caught signal' in m] |
/* globals msgStream */
import _ from 'underscore';
const removeUserReaction = (message, reaction, username) => {
message.reactions[reaction].usernames.splice(message.reactions[reaction].usernames.indexOf(username), 1);
if (message.reactions[reaction].usernames.length === 0) {
delete message.reactions[reaction];
}
return message;
};
Meteor.methods({
setReaction(reaction, messageId, shouldReact) {
if (!Meteor.userId()) {
throw new Meteor.Error('error-invalid-user', 'Invalid user', { method: 'setReaction' });
}
const message = RocketChat.models.Messages.findOneById(messageId);
if (!message) {
throw new Meteor.Error('error-not-allowed', 'Not allowed', { method: 'setReaction' });
}
const room = Meteor.call('canAccessRoom', message.rid, Meteor.userId());
if (!room) {
throw new Meteor.Error('error-not-allowed', 'Not allowed', { method: 'setReaction' });
}
reaction = `:${ reaction.replace(/:/g, '') }:`;
if (!RocketChat.emoji.list[reaction] && RocketChat.models.EmojiCustom.findByNameOrAlias(reaction).count() === 0) {
throw new Meteor.Error('error-not-allowed', 'Invalid emoji provided.', { method: 'setReaction' });
}
const user = Meteor.user();
if (Array.isArray(room.muted) && room.muted.indexOf(user.username) !== -1 && !room.reactWhenReadOnly) {
RocketChat.Notifications.notifyUser(Meteor.userId(), 'message', {
_id: Random.id(),
rid: room._id,
ts: new Date(),
msg: TAPi18n.__('You_have_been_muted', {}, user.language)
});
return false;
} else if (!RocketChat.models.Subscriptions.findOne({ rid: message.rid })) {
return false;
}
const userAlreadyReacted = Boolean(message.reactions) && Boolean(message.reactions[reaction]) && message.reactions[reaction].usernames.indexOf(user.username) !== -1;
// When shouldReact was not informed, toggle the reaction.
if (shouldReact === undefined) {
shouldReact = !userAlreadyReacted;
}
if (userAlreadyReacted === shouldReact) {
return;
}
if (userAlreadyReacted) {
removeUserReaction(message, reaction, user.username);
if (_.isEmpty(message.reactions)) {
delete message.reactions;
RocketChat.models.Messages.unsetReactions(messageId);
RocketChat.callbacks.run('unsetReaction', messageId, reaction);
} else {
RocketChat.models.Messages.setReactions(messageId, message.reactions);
RocketChat.callbacks.run('setReaction', messageId, reaction);
}
} else {
if (!message.reactions) {
message.reactions = {};
}
if (!message.reactions[reaction]) {
message.reactions[reaction] = {
usernames: []
};
}
message.reactions[reaction].usernames.push(user.username);
RocketChat.models.Messages.setReactions(messageId, message.reactions);
RocketChat.callbacks.run('setReaction', messageId, reaction);
}
msgStream.emit(message.rid, message);
return;
}
}); |
# Rails::Clean::Logs
TODO: Write a gem description
## Installation
Add this line to your application's Gemfile:
gem 'rails-clean-logs'
And then execute:
$ bundle
Or install it yourself as:
$ gem install rails-clean-logs
## Usage
TODO: Write usage instructions here
## Contributing
1. Fork it
2. Create your feature branch (`git checkout -b my-new-feature`)
3. Commit your changes (`git commit -am 'Added some feature'`)
4. Push to the branch (`git push origin my-new-feature`)
5. Create new Pull Request |
var should = require('should'),
sinon = require('sinon'),
_ = require('lodash'),
Promise = require('bluebird'),
hbs = require('express-hbs'),
utils = require('./utils'),
configUtils = require('../../utils/configUtils'),
moment = require('moment'),
// Stuff we are testing
handlebars = hbs.handlebars,
helpers = require('../../../server/helpers'),
api = require('../../../server/api'),
labs = require('../../../server/utils/labs'),
sandbox = sinon.sandbox.create();
describe('{{ghost_head}} helper', function () {
var settingsReadStub;
before(function () {
utils.loadHelpers();
});
afterEach(function () {
sandbox.restore();
configUtils.restore();
});
// TODO: stub `getImageDimensions` to make things faster
beforeEach(function () {
settingsReadStub = sandbox.stub(api.settings, 'read').returns(new Promise.resolve({
settings: [
{value: ''}
]
}));
sandbox.stub(api.clients, 'read').returns(new Promise.resolve({
clients: [
{slug: 'ghost-frontend', secret: 'a1bcde23cfe5', status: 'enabled'}
]
}));
sandbox.stub(labs, 'isSet').returns(true);
});
describe('without Code Injection', function () {
beforeEach(function () {
configUtils.set({
url: 'http://testurl.com/',
theme: {
title: 'Ghost',
description: 'blog description',
cover: '/content/images/blog-cover.png',
amp: true
}
});
});
it('has loaded ghost_head helper', function () {
should.exist(handlebars.helpers.ghost_head);
});
it('returns meta tag string on paginated index page without structured data and schema', function (done) {
helpers.ghost_head.call(
{safeVersion: '0.3', relativeUrl: '/page/2/', context: ['paged', 'index']},
{data: {root: {context: ['paged', 'index']}}}
).then(function (rendered) {
should.exist(rendered);
rendered.string.should.match(/<link rel="canonical" href="http:\/\/testurl.com\/page\/2\/" \/>/);
rendered.string.should.match(/<meta name="generator" content="Ghost 0.3" \/>/);
rendered.string.should.match(/<link rel="alternate" type="application\/rss\+xml" title="Ghost" href="http:\/\/testurl.com\/rss\/" \/>/);
rendered.string.should.not.match(/<meta property="og/);
rendered.string.should.not.match(/<script type=\"application\/ld\+json\">/);
done();
}).catch(done);
});
it('returns structured data on first index page', function (done) {
helpers.ghost_head.call(
{safeVersion: '0.3', relativeUrl: '/', context: ['home', 'index']},
{data: {root: {context: ['home', 'index']}}}
).then(function (rendered) {
should.exist(rendered);
rendered.string.should.match(/<link rel="canonical" href="http:\/\/testurl.com\/" \/>/);
rendered.string.should.match(/<meta name="referrer" content="<API key>" \/>/);
rendered.string.should.match(/<meta property="og:site_name" content="Ghost" \/>/);
rendered.string.should.match(/<meta property="og:type" content="website" \/>/);
rendered.string.should.match(/<meta property="og:title" content="Ghost" \/>/);
rendered.string.should.match(/<meta property="og:description" content="blog description" \/>/);
rendered.string.should.match(/<meta property="og:url" content="http:\/\/testurl.com\/" \/>/);
rendered.string.should.match(/<meta property="og:image" content="http:\/\/testurl.com\/content\/images\/blog-cover.png" \/>/);
rendered.string.should.match(/<meta name="twitter:card" content="summary_large_image" \/>/);
rendered.string.should.match(/<meta name="twitter:title" content="Ghost" \/>/);
rendered.string.should.match(/<meta name="<TwitterConsumerkey>" content="blog description" \/>/);
rendered.string.should.match(/<meta name="twitter:url" content="http:\/\/testurl.com\/" \/>/);
rendered.string.should.match(/<meta name="twitter:image" content="http:\/\/testurl.com\/content\/images\/blog-cover.png" \/>/);
rendered.string.should.match(/<meta name="generator" content="Ghost 0.3" \/>/);
rendered.string.should.match(/<link rel="alternate" type="application\/rss\+xml" title="Ghost" href="http:\/\/testurl.com\/rss\/" \/>/);
rendered.string.should.match(/<script type=\"application\/ld\+json\">/);
rendered.string.should.match(/"@context": "https:\/\/schema.org"/);
rendered.string.should.match(/"@type": "Website"/);
rendered.string.should.match(/"publisher": {\n "@type": "Organization",\n "name": "Ghost",/);
rendered.string.should.match(/"url": "http:\/\/testurl.com\/"/);
rendered.string.should.match(/"image": "http:\/\/testurl.com\/content\/images\/blog-cover.png"/);
rendered.string.should.match(/"description": "blog description"/);
done();
}).catch(done);
});
it('returns structured data on static page', function (done) {
var post = {
meta_description: 'all about our blog',
title: 'About',
image: '/content/images/test-image-about.png',
published_at: moment('2008-05-31T19:18:15').toISOString(),
updated_at: moment('2014-10-06T15:23:54').toISOString(),
page: true,
author: {
name: 'Author name',
url: 'http://testauthorurl.com',
slug: 'Author',
image: '/content/images/test-author-image.png',
website: 'http://authorwebsite.com',
facebook: 'testuser',
twitter: '@testuser',
bio: 'Author bio'
}
};
helpers.ghost_head.call(
{safeVersion: '0.3', relativeUrl: '/about/', context: ['page'], post: post},
{data: {root: {context: ['page']}}}
).then(function (rendered) {
should.exist(rendered);
rendered.string.should.match(/<link rel="canonical" href="http:\/\/testurl.com\/about\/" \/>/);
rendered.string.should.match(/<meta name="referrer" content="<API key>" \/>/);
rendered.string.should.match(/<meta property="og:site_name" content="Ghost" \/>/);
rendered.string.should.match(/<meta property="og:type" content="website" \/>/);
rendered.string.should.match(/<meta property="og:title" content="About" \/>/);
rendered.string.should.match(/<meta property="og:description" content="all about our blog" \/>/);
rendered.string.should.match(/<meta property="og:url" content="http:\/\/testurl.com\/about\/" \/>/);
rendered.string.should.match(/<meta property="og:image" content="http:\/\/testurl.com\/content\/images\/test-image-about.png" \/>/);
rendered.string.should.match(/<meta property="article:author" content="https:\/\/www.facebook.com\/testuser" \/>/);
rendered.string.should.match(/<meta name="twitter:card" content="summary_large_image" \/>/);
rendered.string.should.match(/<meta name="twitter:title" content="About" \/>/);
rendered.string.should.match(/<meta name="twitter:creator" content="@testuser" \/>/);
rendered.string.should.match(/<meta name="<TwitterConsumerkey>" content="all about our blog" \/>/);
rendered.string.should.match(/<meta name="twitter:url" content="http:\/\/testurl.com\/about\/" \/>/);
rendered.string.should.match(/<meta name="twitter:image" content="http:\/\/testurl.com\/content\/images\/test-image-about.png" \/>/);
rendered.string.should.match(/<meta name="generator" content="Ghost 0.3" \/>/);
rendered.string.should.match(/<link rel="alternate" type="application\/rss\+xml" title="Ghost" href="http:\/\/testurl.com\/rss\/" \/>/);
rendered.string.should.match(/<script type=\"application\/ld\+json\">/);
rendered.string.should.match(/"@context": "https:\/\/schema.org"/);
rendered.string.should.match(/"@type": "Article"/);
rendered.string.should.match(/"publisher": {/);
rendered.string.should.match(/"@type": "Organization"/);
rendered.string.should.match(/"name": "Ghost"/);
rendered.string.should.match(/"url": "http:\/\/testurl.com\/about\/"/);
rendered.string.should.match(/"sameAs": \[\n "http:\/\/authorwebsite.com",\n "https:\/\/www.facebook.com\/testuser",\n "https:\/\/twitter.com\/testuser"\n \]/);
rendered.string.should.match(/"image": "http:\/\/testurl.com\/content\/images\/test-image-about.png"/);
rendered.string.should.match(/"image\": \"http:\/\/testurl.com\/content\/images\/test-author-image.png\"/);
rendered.string.should.match(/"description": "all about our blog"/);
done();
}).catch(done);
});
it('returns structured data and schema first tag page with meta description and meta title', function (done) {
var tag = {
meta_description: 'tag meta description',
name: 'tagtitle',
meta_title: 'tag meta title',
image: '/content/images/tag-image.png'
};
helpers.ghost_head.call(
{safeVersion: '0.3', relativeUrl: '/tag/tagtitle/', tag: tag, context: ['tag']},
{data: {root: {context: ['tag']}}}
).then(function (rendered) {
should.exist(rendered);
rendered.string.should.match(/<link rel="canonical" href="http:\/\/testurl.com\/tag\/tagtitle\/" \/>/);
rendered.string.should.match(/<meta property="og:site_name" content="Ghost" \/>/);
rendered.string.should.match(/<meta property="og:type" content="website" \/>/);
rendered.string.should.match(/<meta property="og:title" content="tag meta title" \/>/);
rendered.string.should.match(/<meta property="og:description" content="tag meta description" \/>/);
rendered.string.should.match(/<meta property="og:url" content="http:\/\/testurl.com\/tag\/tagtitle\/" \/>/);
rendered.string.should.match(/<meta property="og:image" content="http:\/\/testurl.com\/content\/images\/tag-image.png" \/>/);
rendered.string.should.match(/<meta name="twitter:card" content="summary_large_image" \/>/);
rendered.string.should.match(/<meta name="twitter:title" content="tag meta title" \/>/);
rendered.string.should.match(/<meta name="<TwitterConsumerkey>" content="tag meta description" \/>/);
rendered.string.should.match(/<meta name="twitter:url" content="http:\/\/testurl.com\/tag\/tagtitle\/" \/>/);
rendered.string.should.match(/<meta name="twitter:image" content="http:\/\/testurl.com\/content\/images\/tag-image.png" \/>/);
rendered.string.should.match(/<meta name="generator" content="Ghost 0.3" \/>/);
rendered.string.should.match(/<link rel="alternate" type="application\/rss\+xml" title="Ghost" href="http:\/\/testurl.com\/rss\/" \/>/);
rendered.string.should.match(/<script type=\"application\/ld\+json\">/);
rendered.string.should.match(/"@context": "https:\/\/schema.org"/);
rendered.string.should.match(/"@type": "Series"/);
rendered.string.should.match(/"publisher": {\n "@type": "Organization",\n "name": "Ghost",/);
rendered.string.should.match(/"url": "http:\/\/testurl.com\/tag\/tagtitle\/"/);
rendered.string.should.match(/"image": "http:\/\/testurl.com\/content\/images\/tag-image.png"/);
rendered.string.should.match(/"name": "tagtitle"/);
rendered.string.should.match(/"description": "tag meta description"/);
done();
}).catch(done);
});
it('tag first page without meta description uses tag description, and title if no meta title', function (done) {
var tag = {
meta_description: '',
description: 'tag description',
name: 'tagtitle',
meta_title: '',
image: '/content/images/tag-image.png'
};
helpers.ghost_head.call(
{safeVersion: '0.3', relativeUrl: '/tag/tagtitle/', tag: tag, context: ['tag']},
{data: {root: {context: ['tag']}}}
).then(function (rendered) {
should.exist(rendered);
rendered.string.should.match(/<link rel="canonical" href="http:\/\/testurl.com\/tag\/tagtitle\/" \/>/);
rendered.string.should.match(/<meta property="og:site_name" content="Ghost" \/>/);
rendered.string.should.match(/<meta property="og:type" content="website" \/>/);
rendered.string.should.match(/<meta property="og:title" content="tagtitle - Ghost" \/>/);
rendered.string.should.match(/<meta property="og:description" content="tag description" \/>/);
rendered.string.should.match(/<meta property="og:url" content="http:\/\/testurl.com\/tag\/tagtitle\/" \/>/);
rendered.string.should.match(/<meta property="og:image" content="http:\/\/testurl.com\/content\/images\/tag-image.png" \/>/);
rendered.string.should.match(/<meta name="twitter:card" content="summary_large_image" \/>/);
rendered.string.should.match(/<meta name="twitter:title" content="tagtitle - Ghost" \/>/);
rendered.string.should.match(/<meta name="<TwitterConsumerkey>" content="tag description" \/>/);
rendered.string.should.match(/<meta name="twitter:url" content="http:\/\/testurl.com\/tag\/tagtitle\/" \/>/);
rendered.string.should.match(/<meta name="twitter:image" content="http:\/\/testurl.com\/content\/images\/tag-image.png" \/>/);
rendered.string.should.match(/<meta name="generator" content="Ghost 0.3" \/>/);
rendered.string.should.match(/<link rel="alternate" type="application\/rss\+xml" title="Ghost" href="http:\/\/testurl.com\/rss\/" \/>/);
rendered.string.should.match(/<script type=\"application\/ld\+json\">/);
rendered.string.should.match(/"@context": "https:\/\/schema.org"/);
rendered.string.should.match(/"@type": "Series"/);
rendered.string.should.match(/"publisher": {\n "@type": "Organization",\n "name": "Ghost",/);
rendered.string.should.match(/"url": "http:\/\/testurl.com\/tag\/tagtitle\/"/);
rendered.string.should.match(/"image": "http:\/\/testurl.com\/content\/images\/tag-image.png"/);
rendered.string.should.match(/"name": "tagtitle"/);
rendered.string.should.match(/"description": "tag description"/);
done();
}).catch(done);
});
it('tag first page with meta and model description returns no description fields', function (done) {
var tag = {
meta_description: '',
name: 'tagtitle',
meta_title: '',
image: '/content/images/tag-image.png'
};
helpers.ghost_head.call(
{safeVersion: '0.3', relativeUrl: '/tag/tagtitle/', tag: tag, context: ['tag']},
{data: {root: {context: ['tag']}}}
).then(function (rendered) {
should.exist(rendered);
rendered.string.should.not.match(/<meta property="og:description" \/>/);
rendered.string.should.not.match(/<meta name="<TwitterConsumerkey>"\/>/);
rendered.string.should.not.match(/"description":/);
done();
}).catch(done);
});
it('does not return structured data on paginated tag pages', function (done) {
var tag = {
meta_description: 'tag meta description',
title: 'tagtitle',
meta_title: 'tag meta title',
image: '/content/images/tag-image.png'
};
helpers.ghost_head.call(
{safeVersion: '0.3', relativeUrl: '/tag/tagtitle/page/2/', tag: tag, context: ['paged', 'tag']},
{data: {root: {context: ['tag']}}}
).then(function (rendered) {
should.exist(rendered);
rendered.string.should.match(/<link rel="canonical" href="http:\/\/testurl.com\/tag\/tagtitle\/page\/2\/" \/>/);
rendered.string.should.match(/<meta name="generator" content="Ghost 0.3" \/>/);
rendered.string.should.match(/<link rel="alternate" type="application\/rss\+xml" title="Ghost" href="http:\/\/testurl.com\/rss\/" \/>/);
rendered.string.should.not.match(/<meta property="og/);
rendered.string.should.not.match(/<script type=\"application\/ld\+json\">/);
done();
}).catch(done);
});
it('returns structured data and schema on first author page with cover image', function (done) {
var author = {
name: 'Author name',
slug: 'AuthorName',
bio: 'Author bio',
image: '/content/images/test-author-image.png',
cover: '/content/images/author-cover-image.png',
website: 'http://authorwebsite.com',
facebook: 'testuser',
twitter: '@testuser'
}, authorBk = _.cloneDeep(author);
helpers.ghost_head.call(
{safeVersion: '0.3', relativeUrl: '/author/AuthorName/', author: author, context: ['author']},
{data: {root: {context: ['author']}}}
).then(function (rendered) {
should.exist(rendered);
rendered.string.should.match(/<link rel="canonical" href="http:\/\/testurl.com\/author\/AuthorName\/" \/>/);
rendered.string.should.match(/<meta property="og:site_name" content="Ghost" \/>/);
rendered.string.should.match(/<meta property="og:type" content="profile" \/>/);
rendered.string.should.match(/<meta property="og:description" content="Author bio" \/>/);
rendered.string.should.match(/<meta property="og:url" content="http:\/\/testurl.com\/author\/AuthorName\/" \/>/);
rendered.string.should.match(/<meta property="og:image" content="http:\/\/testurl.com\/content\/images\/author-cover-image.png" \/>/);
rendered.string.should.match(/<meta property="article:author" content="https:\/\/www.facebook.com\/testuser\" \/>/);
rendered.string.should.match(/<meta name="twitter:card" content="summary_large_image" \/>/);
rendered.string.should.match(/<meta name="twitter:title" content="Author name - Ghost" \/>/);
rendered.string.should.match(/<meta name="<TwitterConsumerkey>" content="Author bio" \/>/);
rendered.string.should.match(/<meta name="twitter:url" content="http:\/\/testurl.com\/author\/AuthorName\/" \/>/);
rendered.string.should.match(/<meta name="twitter:creator" content="@testuser" \/>/);
rendered.string.should.match(/<meta name="twitter:image" content="http:\/\/testurl.com\/content\/images\/author-cover-image.png" \/>/);
rendered.string.should.match(/<meta name="generator" content="Ghost 0.3" \/>/);
rendered.string.should.match(/<link rel="alternate" type="application\/rss\+xml" title="Ghost" href="http:\/\/testurl.com\/rss\/" \/>/);
rendered.string.should.match(/<script type=\"application\/ld\+json\">/);
rendered.string.should.match(/"@context": "https:\/\/schema.org"/);
rendered.string.should.match(/"@type": "Person"/);
rendered.string.should.match(/"sameAs": \[\n "http:\/\/authorwebsite.com",\n "https:\/\/www.facebook.com\/testuser",\n "https:\/\/twitter.com\/testuser"\n \]/);
rendered.string.should.match(/"url": "http:\/\/testurl.com\/author\/AuthorName\/"/);
rendered.string.should.match(/"image": "http:\/\/testurl.com\/content\/images\/author-cover-image.png"/);
rendered.string.should.match(/"name": "Author name"/);
rendered.string.should.match(/"description": "Author bio"/);
author.should.eql(authorBk);
done();
}).catch(done);
});
it('does not return structured data on paginated author pages', function (done) {
var author = {
name: 'Author name',
slug: 'AuthorName',
bio: 'Author bio',
image: '/content/images/test-author-image.png',
cover: '/content/images/author-cover-image.png',
website: 'http://authorwebsite.com'
};
helpers.ghost_head.call(
{safeVersion: '0.3', relativeUrl: '/author/AuthorName/page/2/', author: author, context: ['paged', 'author']},
{data: {root: {context: ['paged', 'author']}}}
).then(function (rendered) {
should.exist(rendered);
rendered.string.should.match(/<link rel="canonical" href="http:\/\/testurl.com\/author\/AuthorName\/page\/2\/" \/>/);
rendered.string.should.match(/<meta name="generator" content="Ghost 0.3" \/>/);
rendered.string.should.match(/<link rel="alternate" type="application\/rss\+xml" title="Ghost" href="http:\/\/testurl.com\/rss\/" \/>/);
rendered.string.should.not.match(/<meta property="og/);
rendered.string.should.not.match(/<script type=\"application\/ld\+json\">/);
done();
}).catch(done);
});
it('returns meta tag string even if safeVersion is invalid', function (done) {
helpers.ghost_head.call(
{safeVersion: '0.9', context: []},
{data: {root: {context: []}}}
).then(function (rendered) {
should.exist(rendered);
rendered.string.should.match(/<meta name="generator" content="Ghost 0.9" \/>/);
rendered.string.should.match(/<link rel="alternate" type="application\/rss\+xml" title="Ghost" href="http:\/\/testurl.com\/rss\/" \/>/);
done();
}).catch(done);
});
it('returns structured data on post page with author image and post cover image', function (done) {
var post = {
meta_description: 'blog description',
title: 'Welcome to Ghost',
image: '/content/images/test-image.png',
published_at: moment('2008-05-31T19:18:15').toISOString(),
updated_at: moment('2014-10-06T15:23:54').toISOString(),
tags: [{name: 'tag1'}, {name: 'tag2'}, {name: 'tag3'}],
author: {
name: 'Author name',
url: 'http://testauthorurl.com',
slug: 'Author',
image: '/content/images/test-author-image.png',
website: 'http://authorwebsite.com',
bio: 'Author bio',
facebook: 'testuser',
twitter: '@testuser'
}
}, postBk = _.cloneDeep(post);
helpers.ghost_head.call(
{relativeUrl: '/post/', safeVersion: '0.3', context: ['post'], post: post},
{data: {root: {context: ['post']}}}
).then(function (rendered) {
var re1 = new RegExp('<meta property="article:published_time" content="' + post.published_at),
re2 = new RegExp('<meta property="article:modified_time" content="' + post.updated_at),
re3 = new RegExp('"datePublished": "' + post.published_at),
re4 = new RegExp('"dateModified": "' + post.updated_at);
should.exist(rendered);
rendered.string.should.match(/<link rel="canonical" href="http:\/\/testurl.com\/post\/" \/>/);
rendered.string.should.match(/<link rel="amphtml" href="http:\/\/testurl.com\/post\/amp\/" \/>/);
rendered.string.should.match(/<meta property="og:site_name" content="Ghost" \/>/);
rendered.string.should.match(/<meta property="og:type" content="article" \/>/);
rendered.string.should.match(/<meta property="og:title" content="Welcome to Ghost" \/>/);
rendered.string.should.match(/<meta property="og:description" content="blog description" \/>/);
rendered.string.should.match(/<meta property="og:url" content="http:\/\/testurl.com\/post\/" \/>/);
rendered.string.should.match(/<meta property="og:image" content="http:\/\/testurl.com\/content\/images\/test-image.png" \/>/);
rendered.string.should.match(re1);
rendered.string.should.match(re2);
rendered.string.should.match(/<meta property="article:tag" content="tag1" \/>/);
rendered.string.should.match(/<meta property="article:tag" content="tag2" \/>/);
rendered.string.should.match(/<meta property="article:tag" content="tag3" \/>/);
rendered.string.should.match(/<meta property="article:author" content="https:\/\/www.facebook.com\/testuser" \/>/);
rendered.string.should.match(/<meta name="twitter:title" content="Welcome to Ghost" \/>/);
rendered.string.should.match(/<meta name="<TwitterConsumerkey>" content="blog description" \/>/);
rendered.string.should.match(/<meta name="twitter:url" content="http:\/\/testurl.com\/post\/" \/>/);
rendered.string.should.match(/<meta name="twitter:image" content="http:\/\/testurl.com\/content\/images\/test-image.png" \/>/);
rendered.string.should.match(/<meta name="twitter:creator" content="@testuser" \/>/);
rendered.string.should.match(/"@context": "https:\/\/schema.org"/);
rendered.string.should.match(/"@type": "Article"/);
rendered.string.should.match(/"publisher": {/);
rendered.string.should.match(/"@type": "Organization"/);
rendered.string.should.match(/"name": "Ghost"/);
rendered.string.should.match(/"author": {/);
rendered.string.should.match(/"@type": "Person"/);
rendered.string.should.match(/"name": "Author name"/);
rendered.string.should.match(/"image\": \"http:\/\/testurl.com\/content\/images\/test-author-image.png\"/);
rendered.string.should.match(/"url": "http:\/\/testurl.com\/author\/Author\/"/);
rendered.string.should.match(/"sameAs": \[\n "http:\/\/authorwebsite.com",\n "https:\/\/www.facebook.com\/testuser",\n "https:\/\/twitter.com\/testuser"\n \]/);
rendered.string.should.match(/"description": "Author bio"/);
rendered.string.should.match(/"headline": "Welcome to Ghost"/);
rendered.string.should.match(/"url": "http:\/\/testurl.com\/post\/"/);
rendered.string.should.match(re3);
rendered.string.should.match(re4);
rendered.string.should.match(/"image": "http:\/\/testurl.com\/content\/images\/test-image.png"/);
rendered.string.should.match(/"keywords": "tag1, tag2, tag3"/);
rendered.string.should.match(/"description": "blog description"/);
rendered.string.should.match(/"@context": "https:\/\/schema.org"/);
rendered.string.should.match(/<meta name="generator" content="Ghost 0.3" \/>/);
rendered.string.should.match(/<link rel="alternate" type="application\/rss\+xml" title="Ghost" href="http:\/\/testurl.com\/rss\/" \/>/);
post.should.eql(postBk);
done();
}).catch(done);
});
it('returns structured data on AMP post page with author image and post cover image', function (done) {
var post = {
meta_description: 'blog description',
title: 'Welcome to Ghost',
image: '/content/images/test-image.png',
published_at: moment('2008-05-31T19:18:15').toISOString(),
updated_at: moment('2014-10-06T15:23:54').toISOString(),
tags: [{name: 'tag1'}, {name: 'tag2'}, {name: 'tag3'}],
author: {
name: 'Author name',
url: 'http://testauthorurl.com',
slug: 'Author',
image: '/content/images/test-author-image.png',
website: 'http://authorwebsite.com',
bio: 'Author bio',
facebook: 'testuser',
twitter: '@testuser'
}
}, postBk = _.cloneDeep(post);
helpers.ghost_head.call(
{relativeUrl: '/post/amp/', safeVersion: '0.3', context: ['amp', 'post'], post: post},
{data: {root: {context: ['amp', 'post']}}}
).then(function (rendered) {
var re1 = new RegExp('<meta property="article:published_time" content="' + post.published_at),
re2 = new RegExp('<meta property="article:modified_time" content="' + post.updated_at),
re3 = new RegExp('"datePublished": "' + post.published_at),
re4 = new RegExp('"dateModified": "' + post.updated_at);
should.exist(rendered);
rendered.string.should.match(/<link rel="canonical" href="http:\/\/testurl.com\/post\/" \/>/);
rendered.string.should.not.match(/<link rel="amphtml" href="http:\/\/testurl.com\/post\/amp\/" \/>/);
rendered.string.should.match(/<meta property="og:site_name" content="Ghost" \/>/);
rendered.string.should.match(/<meta property="og:type" content="article" \/>/);
rendered.string.should.match(/<meta property="og:title" content="Welcome to Ghost" \/>/);
rendered.string.should.match(/<meta property="og:description" content="blog description" \/>/);
rendered.string.should.match(/<meta property="og:url" content="http:\/\/testurl.com\/post\/" \/>/);
rendered.string.should.match(/<meta property="og:image" content="http:\/\/testurl.com\/content\/images\/test-image.png" \/>/);
rendered.string.should.match(re1);
rendered.string.should.match(re2);
rendered.string.should.match(/<meta property="article:tag" content="tag1" \/>/);
rendered.string.should.match(/<meta property="article:tag" content="tag2" \/>/);
rendered.string.should.match(/<meta property="article:tag" content="tag3" \/>/);
rendered.string.should.match(/<meta property="article:author" content="https:\/\/www.facebook.com\/testuser" \/>/);
rendered.string.should.match(/<meta name="twitter:title" content="Welcome to Ghost" \/>/);
rendered.string.should.match(/<meta name="<TwitterConsumerkey>" content="blog description" \/>/);
rendered.string.should.match(/<meta name="twitter:url" content="http:\/\/testurl.com\/post\/" \/>/);
rendered.string.should.match(/<meta name="twitter:image" content="http:\/\/testurl.com\/content\/images\/test-image.png" \/>/);
rendered.string.should.match(/<meta name="twitter:creator" content="@testuser" \/>/);
rendered.string.should.match(/"@context": "https:\/\/schema.org"/);
rendered.string.should.match(/"@type": "Article"/);
rendered.string.should.match(/"publisher": {/);
rendered.string.should.match(/"@type": "Organization"/);
rendered.string.should.match(/"name": "Ghost"/);
rendered.string.should.match(/"author": {/);
rendered.string.should.match(/"@type": "Person"/);
rendered.string.should.match(/"name": "Author name"/);
rendered.string.should.match(/"image\": \"http:\/\/testurl.com\/content\/images\/test-author-image.png\"/);
rendered.string.should.match(/"url": "http:\/\/testurl.com\/author\/Author\/"/);
rendered.string.should.match(/"sameAs": \[\n "http:\/\/authorwebsite.com",\n "https:\/\/www.facebook.com\/testuser",\n "https:\/\/twitter.com\/testuser"\n \]/);
rendered.string.should.match(/"description": "Author bio"/);
rendered.string.should.match(/"headline": "Welcome to Ghost"/);
rendered.string.should.match(/"url": "http:\/\/testurl.com\/post\/"/);
rendered.string.should.match(re3);
rendered.string.should.match(re4);
rendered.string.should.match(/"image": "http:\/\/testurl.com\/content\/images\/test-image.png"/);
rendered.string.should.match(/"keywords": "tag1, tag2, tag3"/);
rendered.string.should.match(/"description": "blog description"/);
rendered.string.should.match(/"@context": "https:\/\/schema.org"/);
rendered.string.should.match(/<meta name="generator" content="Ghost 0.3" \/>/);
rendered.string.should.match(/<link rel="alternate" type="application\/rss\+xml" title="Ghost" href="http:\/\/testurl.com\/rss\/" \/>/);
post.should.eql(postBk);
done();
}).catch(done);
});
it('returns structured data if metaTitle and metaDescription have double quotes', function (done) {
var post = {
meta_description: 'blog "test" description',
title: 'title',
meta_title: 'Welcome to Ghost "test"',
image: '/content/images/test-image.png',
published_at: moment('2008-05-31T19:18:15').toISOString(),
updated_at: moment('2014-10-06T15:23:54').toISOString(),
tags: [{name: 'tag1'}, {name: 'tag2'}, {name: 'tag3'}],
author: {
name: 'Author name',
url: 'http//:testauthorurl.com',
slug: 'Author',
image: '/content/images/test-author-image.png',
website: 'http://authorwebsite.com',
facebook: 'testuser',
twitter: '@testuser'
}
};
helpers.ghost_head.call(
{relativeUrl: '/post/', safeVersion: '0.3', context: ['post'], post: post},
{data: {root: {context: ['post']}}}
).then(function (rendered) {
var re1 = new RegExp('<meta property="article:published_time" content="' + post.published_at),
re2 = new RegExp('<meta property="article:modified_time" content="' + post.updated_at),
re3 = new RegExp('"datePublished": "' + post.published_at),
re4 = new RegExp('"dateModified": "' + post.updated_at);
should.exist(rendered);
rendered.string.should.match(/<link rel="canonical" href="http:\/\/testurl.com\/post\/" \/>/);
rendered.string.should.match(/<link rel="amphtml" href="http:\/\/testurl.com\/post\/amp\/" \/>/);
rendered.string.should.match(/<meta property="og:site_name" content="Ghost" \/>/);
rendered.string.should.match(/<meta property="og:type" content="article" \/>/);
rendered.string.should.match(/<meta property="og:title" content="Welcome to Ghost "test"" \/>/);
rendered.string.should.match(/<meta property="og:description" content="blog "test" description" \/>/);
rendered.string.should.match(/<meta property="og:url" content="http:\/\/testurl.com\/post\/" \/>/);
rendered.string.should.match(/<meta property="og:image" content="http:\/\/testurl.com\/content\/images\/test-image.png" \/>/);
rendered.string.should.match(/<meta property="article:author" content="https:\/\/www.facebook.com\/testuser" \/>/);
rendered.string.should.match(re1);
rendered.string.should.match(re2);
rendered.string.should.match(/<meta property="article:tag" content="tag1" \/>/);
rendered.string.should.match(/<meta property="article:tag" content="tag2" \/>/);
rendered.string.should.match(/<meta property="article:tag" content="tag3" \/>/);
rendered.string.should.match(/<meta name="twitter:card" content="summary_large_image" \/>/);
rendered.string.should.match(/<meta name="twitter:title" content="Welcome to Ghost "test"" \/>/);
rendered.string.should.match(/<meta name="<TwitterConsumerkey>" content="blog "test" description" \/>/);
rendered.string.should.match(/<meta name="twitter:url" content="http:\/\/testurl.com\/post\/" \/>/);
rendered.string.should.match(/<meta name="twitter:creator" content="@testuser" \/>/);
rendered.string.should.match(/<meta name="twitter:image" content="http:\/\/testurl.com\/content\/images\/test-image.png" \/>/);
rendered.string.should.match(/<script type=\"application\/ld\+json\">/);
rendered.string.should.match(/"@context": "https:\/\/schema.org"/);
rendered.string.should.match(/"@type": "Article"/);
rendered.string.should.match(/"publisher": {/);
rendered.string.should.match(/"@type": "Organization"/);
rendered.string.should.match(/"name": "Ghost"/);
rendered.string.should.match(/"author": {/);
rendered.string.should.match(/"@type": "Person"/);
rendered.string.should.match(/"name": "Author name"/);
rendered.string.should.match(/"image\": \"http:\/\/testurl.com\/content\/images\/test-author-image.png\"/);
rendered.string.should.match(/"url": "http:\/\/testurl.com\/author\/Author\/"/);
rendered.string.should.match(/"sameAs": \[\n "http:\/\/authorwebsite.com",\n "https:\/\/www.facebook.com\/testuser",\n "https:\/\/twitter.com\/testuser"\n \]/);
rendered.string.should.match(/"headline": "Welcome to Ghost "test""/);
rendered.string.should.match(/"url": "http:\/\/testurl.com\/post\/"/);
rendered.string.should.match(/"@context": "https:\/\/schema.org"/);
rendered.string.should.match(re3);
rendered.string.should.match(re4);
rendered.string.should.match(/"image": "http:\/\/testurl.com\/content\/images\/test-image.png"/);
rendered.string.should.match(/"keywords": "tag1, tag2, tag3"/);
rendered.string.should.match(/"description": "blog "test" description"/);
rendered.string.should.match(/"@context": "https:\/\/schema.org"/);
rendered.string.should.match(/<meta name="generator" content="Ghost 0.3" \/>/);
rendered.string.should.match(/<link rel="alternate" type="application\/rss\+xml" title="Ghost" href="http:\/\/testurl.com\/rss\/" \/>/);
done();
}).catch(done);
});
it('returns structured data without tags if there are no tags', function (done) {
var post = {
meta_description: 'blog description',
title: 'Welcome to Ghost',
image: '/content/images/test-image.png',
published_at: moment('2008-05-31T19:18:15').toISOString(),
updated_at: moment('2014-10-06T15:23:54').toISOString(),
tags: [],
author: {
name: 'Author name',
url: 'http//:testauthorurl.com',
slug: 'Author',
image: '/content/images/test-author-image.png',
website: 'http://authorwebsite.com',
facebook: 'testuser',
twitter: '@testuser'
}
};
helpers.ghost_head.call(
{relativeUrl: '/post/', safeVersion: '0.3', context: ['post'], post: post},
{data: {root: {context: ['post']}}}).then(function (rendered) {
var re1 = new RegExp('<meta property="article:published_time" content="' + post.published_at),
re2 = new RegExp('<meta property="article:modified_time" content="' + post.updated_at),
re3 = new RegExp('"datePublished": "' + post.published_at),
re4 = new RegExp('"dateModified": "' + post.updated_at);
should.exist(rendered);
rendered.string.should.match(/<link rel="canonical" href="http:\/\/testurl.com\/post\/" \/>/);
rendered.string.should.match(/<link rel="amphtml" href="http:\/\/testurl.com\/post\/amp\/" \/>/);
rendered.string.should.match(/<meta property="og:site_name" content="Ghost" \/>/);
rendered.string.should.match(/<meta property="og:type" content="article" \/>/);
rendered.string.should.match(/<meta property="og:title" content="Welcome to Ghost" \/>/);
rendered.string.should.match(/<meta property="og:description" content="blog description" \/>/);
rendered.string.should.match(/<meta property="og:url" content="http:\/\/testurl.com\/post\/" \/>/);
rendered.string.should.match(/<meta property="og:image" content="http:\/\/testurl.com\/content\/images\/test-image.png" \/>/);
rendered.string.should.match(/<meta property="article:author" content="https:\/\/www.facebook.com\/testuser" \/>/);
rendered.string.should.match(re1);
rendered.string.should.match(re2);
rendered.string.should.not.match(/<meta property="article:tag"/);
rendered.string.should.match(/<meta name="twitter:card" content="summary_large_image" \/>/);
rendered.string.should.match(/<meta name="twitter:title" content="Welcome to Ghost" \/>/);
rendered.string.should.match(/<meta name="<TwitterConsumerkey>" content="blog description" \/>/);
rendered.string.should.match(/<meta name="twitter:url" content="http:\/\/testurl.com\/post\/" \/>/);
rendered.string.should.match(/<meta name="twitter:image" content="http:\/\/testurl.com\/content\/images\/test-image.png" \/>/);
rendered.string.should.match(/<meta name="twitter:creator" content="@testuser" \/>/);
rendered.string.should.match(/<script type=\"application\/ld\+json\">/);
rendered.string.should.match(/"@context": "https:\/\/schema.org"/);
rendered.string.should.match(/"@type": "Article"/);
rendered.string.should.match(/"publisher": {/);
rendered.string.should.match(/"@type": "Organization"/);
rendered.string.should.match(/"author": {/);
rendered.string.should.match(/"@type": "Person"/);
rendered.string.should.match(/"name": "Author name"/);
rendered.string.should.match(/"image\": \"http:\/\/testurl.com\/content\/images\/test-author-image.png\"/);
rendered.string.should.match(/"url": "http:\/\/testurl.com\/author\/Author\/"/);
rendered.string.should.match(/"sameAs": \[\n "http:\/\/authorwebsite.com",\n "https:\/\/www.facebook.com\/testuser",\n "https:\/\/twitter.com\/testuser"\n \]/);
rendered.string.should.match(/"headline": "Welcome to Ghost"/);
rendered.string.should.match(/"url": "http:\/\/testurl.com\/post\/"/);
rendered.string.should.match(/"@context": "https:\/\/schema.org"/);
rendered.string.should.match(re3);
rendered.string.should.match(re4);
rendered.string.should.match(/"image": "http:\/\/testurl.com\/content\/images\/test-image.png"/);
rendered.string.should.not.match(/"keywords":/);
rendered.string.should.match(/"description": "blog description"/);
rendered.string.should.match(/"@context": "https:\/\/schema.org"/);
rendered.string.should.match(/<meta name="generator" content="Ghost 0.3" \/>/);
rendered.string.should.match(/<link rel="alternate" type="application\/rss\+xml" title="Ghost" href="http:\/\/testurl.com\/rss\/" \/>/);
done();
}).catch(done);
});
it('returns structured data on post page with null author image and post cover image', function (done) {
var post = {
meta_description: 'blog description',
title: 'Welcome to Ghost',
image: null,
published_at: moment('2008-05-31T19:18:15').toISOString(),
updated_at: moment('2014-10-06T15:23:54').toISOString(),
tags: [{name: 'tag1'}, {name: 'tag2'}, {name: 'tag3'}],
author: {
name: 'Author name',
url: 'http//:testauthorurl.com',
slug: 'Author',
image: null,
website: 'http://authorwebsite.com',
facebook: 'testuser',
twitter: '@testuser'
}
};
helpers.ghost_head.call(
{relativeUrl: '/post/', safeVersion: '0.3', context: ['post'], post: post},
{data: {root: {context: ['post']}}}
).then(function (rendered) {
var re1 = new RegExp('<meta property="article:published_time" content="' + post.published_at),
re2 = new RegExp('<meta property="article:modified_time" content="' + post.updated_at),
re3 = new RegExp('"datePublished": "' + post.published_at),
re4 = new RegExp('"dateModified": "' + post.updated_at);
should.exist(rendered);
rendered.string.should.match(/<link rel="canonical" href="http:\/\/testurl.com\/post\/" \/>/);
rendered.string.should.match(/<link rel="amphtml" href="http:\/\/testurl.com\/post\/amp\/" \/>/);
rendered.string.should.match(/<meta property="og:site_name" content="Ghost" \/>/);
rendered.string.should.match(/<meta property="og:type" content="article" \/>/);
rendered.string.should.match(/<meta property="og:title" content="Welcome to Ghost" \/>/);
rendered.string.should.match(/<meta property="og:description" content="blog description" \/>/);
rendered.string.should.match(/<meta property="og:url" content="http:\/\/testurl.com\/post\/" \/>/);
rendered.string.should.match(/<meta property="article:author" content="https:\/\/www.facebook.com\/testuser" \/>/);
rendered.string.should.not.match(/<meta property="og:image"/);
rendered.string.should.match(re1);
rendered.string.should.match(re2);
rendered.string.should.match(/<meta property="article:tag" content="tag1" \/>/);
rendered.string.should.match(/<meta property="article:tag" content="tag2" \/>/);
rendered.string.should.match(/<meta property="article:tag" content="tag3" \/>/);
rendered.string.should.match(/<meta name="twitter:card" content="summary" \/>/);
rendered.string.should.match(/<meta name="twitter:title" content="Welcome to Ghost" \/>/);
rendered.string.should.match(/<meta name="<TwitterConsumerkey>" content="blog description" \/>/);
rendered.string.should.match(/<meta name="twitter:url" content="http:\/\/testurl.com\/post\/" \/>/);
rendered.string.should.match(/<meta name="twitter:creator" content="@testuser" \/>/);
rendered.string.should.not.match(/<meta name="twitter:image"/);
rendered.string.should.match(/<script type=\"application\/ld\+json\">/);
rendered.string.should.match(/"@context": "https:\/\/schema.org"/);
rendered.string.should.match(/"@type": "Article"/);
rendered.string.should.match(/"publisher": {/);
rendered.string.should.match(/"@type": "Organization"/);
rendered.string.should.match(/"name": "Ghost"/);
rendered.string.should.match(/"author": {/);
rendered.string.should.match(/"@type": "Person"/);
rendered.string.should.match(/"name": "Author name"/);
rendered.string.should.not.match(/"image\"/);
rendered.string.should.match(/"url": "http:\/\/testurl.com\/author\/Author\/"/);
rendered.string.should.match(/"sameAs": \[\n "http:\/\/authorwebsite.com",\n "https:\/\/www.facebook.com\/testuser",\n "https:\/\/twitter.com\/testuser"\n \]/);
rendered.string.should.match(/"headline": "Welcome to Ghost"/);
rendered.string.should.match(/"url": "http:\/\/testurl.com\/post\/"/);
rendered.string.should.match(re3);
rendered.string.should.match(re4);
rendered.string.should.match(/"keywords": "tag1, tag2, tag3"/);
rendered.string.should.match(/"description": "blog description"/);
rendered.string.should.match(/<meta name="generator" content="Ghost 0.3" \/>/);
rendered.string.should.match(/<link rel="alternate" type="application\/rss\+xml" title="Ghost" href="http:\/\/testurl.com\/rss\/" \/>/);
done();
}).catch(done);
});
it('outputs structured data but not schema for custom channel', function (done) {
helpers.ghost_head.call(
{safeVersion: '0.3', relativeUrl: '/featured/', context: ['featured']},
{data: {root: {context: ['featured']}}}
).then(function (rendered) {
should.exist(rendered);
rendered.string.should.match(/<link rel="canonical" href="http:\/\/testurl.com\/featured\/" \/>/);
rendered.string.should.match(/<meta name="generator" content="Ghost 0.3" \/>/);
rendered.string.should.match(/<link rel="alternate" type="application\/rss\+xml" title="Ghost" href="http:\/\/testurl.com\/rss\/" \/>/);
rendered.string.should.match(/<meta property="og:site_name" content="Ghost" \/>/);
rendered.string.should.match(/<meta property="og:type" content="website" \/>/);
rendered.string.should.match(/<meta property="og:title" content="Ghost" \/>/);
rendered.string.should.match(/<meta property="og:url" content="http:\/\/testurl.com\/featured\/" \/>/);
rendered.string.should.not.match(/<script type=\"application\/ld\+json\">/);
done();
}).catch(done);
});
it('returns twitter and facebook descriptions if no meta description available', function (done) {
var post = {
title: 'Welcome to Ghost',
html: '<p>This is a short post</p>',
author: {
name: 'Author name'
}
};
helpers.ghost_head.call(
{relativeUrl: '/post/', safeVersion: '0.3', context: ['post'], post: post},
{data: {root: {context: ['post']}}}
).then(function (rendered) {
should.exist(rendered);
rendered.string.should.match(/<link rel="amphtml" href="http:\/\/testurl.com\/post\/amp\/" \/>/);
rendered.string.should.match(/<meta property="og:description" content="This is a short post" \/>/);
rendered.string.should.match(/<meta name="<TwitterConsumerkey>" content="This is a short post" \/>/);
done();
}).catch(done);
});
it('returns canonical URL', function (done) {
var post = {
title: 'Welcome to Ghost',
html: '<p>This is a short post</p>',
author: {
name: 'Author name'
}
};
helpers.ghost_head.call(
{safeVersion: '0.3', relativeUrl: '/about/', context: ['page'], post: post},
{data: {root: {context: ['page']}}}
).then(function (rendered) {
should.exist(rendered);
rendered.string.should.match(/<link rel="canonical" href="http:\/\/testurl.com\/about\/" \/>/);
rendered.string.should.match(/<meta name="generator" content="Ghost 0.3" \/>/);
rendered.string.should.match(/<link rel="alternate" type="application\/rss\+xml" title="Ghost" href="http:\/\/testurl.com\/rss\/" \/>/);
done();
}).catch(done);
});
it('returns next & prev URL correctly for middle page', function (done) {
helpers.ghost_head.call(
{safeVersion: '0.3', relativeUrl: '/page/3/', context: ['paged', 'index'], pagination: {next: '4', prev: '2'}},
{data: {root: {context: ['index', 'paged'], pagination: {total: 4, page: 3, next: 4, prev: 2}}}}
).then(function (rendered) {
should.exist(rendered);
rendered.string.should.match(/<link rel="canonical" href="http:\/\/testurl.com\/page\/3\/" \/>/);
rendered.string.should.match(/<meta name="generator" content="Ghost 0.3" \/>/);
rendered.string.should.match(/<link rel="prev" href="http:\/\/testurl.com\/page\/2\/" \/>/);
rendered.string.should.match(/<link rel="next" href="http:\/\/testurl.com\/page\/4\/" \/>/);
rendered.string.should.match(/<link rel="alternate" type="application\/rss\+xml" title="Ghost" href="http:\/\/testurl.com\/rss\/" \/>/);
rendered.string.should.not.match(/<meta property="og/);
rendered.string.should.not.match(/<script type=\"application\/ld\+json\">/);
done();
}).catch(done);
});
it('returns next & prev URL correctly for second page', function (done) {
helpers.ghost_head.call(
{safeVersion: '0.3', relativeUrl: '/page/2/', context: ['paged', 'index'], pagination: {next: '3', prev: '1'}},
{data: {root: {context: ['index', 'paged'], pagination: {total: 3, page: 2, next: 3, prev: 1}}}}
).then(function (rendered) {
should.exist(rendered);
rendered.string.should.match(/<link rel="canonical" href="http:\/\/testurl.com\/page\/2\/" \/>/);
rendered.string.should.match(/<meta name="generator" content="Ghost 0.3" \/>/);
rendered.string.should.match(/<link rel="prev" href="http:\/\/testurl.com\/" \/>/);
rendered.string.should.match(/<link rel="next" href="http:\/\/testurl.com\/page\/3\/" \/>/);
rendered.string.should.match(/<link rel="alternate" type="application\/rss\+xml" title="Ghost" href="http:\/\/testurl.com\/rss\/" \/>/);
rendered.string.should.not.match(/<meta property="og/);
rendered.string.should.not.match(/<script type=\"application\/ld\+json\">/);
done();
}).catch(done);
});
describe('with /blog subdirectory', function () {
beforeEach(function () {
configUtils.set({
url: 'http://testurl.com/blog/',
theme: {
title: 'Ghost',
description: 'blog description',
cover: '/content/images/blog-cover.png'
}
});
});
it('returns correct rss url with subdirectory', function (done) {
helpers.ghost_head.call(
{safeVersion: '0.3', context: ['paged', 'index']},
{data: {root: {context: []}}}
).then(function (rendered) {
should.exist(rendered);
rendered.string.should.match(/<link rel="canonical" href="http:\/\/testurl.com\/blog\/" \/>/);
rendered.string.should.match(/<meta name="generator" content="Ghost 0.3" \/>/);
rendered.string.should.match(/<link rel="alternate" type="application\/rss\+xml" title="Ghost" href="http:\/\/testurl.com\/blog\/rss\/" \/>/);
done();
}).catch(done);
});
});
});
describe('with changed origin in config file', function () {
beforeEach(function () {
configUtils.set({
url: 'http://testurl.com/blog/',
theme: {
title: 'Ghost',
description: 'blog description',
cover: '/content/images/blog-cover.png',
amp: true
},
referrerPolicy: 'origin'
});
});
it('contains the changed origin', function (done) {
helpers.ghost_head.call(
{safeVersion: '0.3', context: ['paged', 'index']},
{data: {root: {context: []}}}
).then(function (rendered) {
should.exist(rendered);
rendered.string.should.match(/<meta name="referrer" content="origin" \/>/);
done();
}).catch(done);
});
});
describe('with useStructuredData is set to false in config file', function () {
beforeEach(function () {
configUtils.set({
url: 'http://testurl.com/',
theme: {
title: 'Ghost',
description: 'blog description',
cover: '/content/images/blog-cover.png',
amp: true
},
privacy: {
useStructuredData: false
}
});
});
it('does not return structured data', function (done) {
var post = {
meta_description: 'blog description',
title: 'Welcome to Ghost',
image: 'content/images/test-image.png',
published_at: moment('2008-05-31T19:18:15').toISOString(),
updated_at: moment('2014-10-06T15:23:54').toISOString(),
tags: [{name: 'tag1'}, {name: 'tag2'}, {name: 'tag3'}],
author: {
name: 'Author name',
url: 'http//:testauthorurl.com',
slug: 'Author',
image: 'content/images/test-author-image.png',
website: 'http://authorwebsite.com',
facebook: 'testuser',
twitter: '@testuser'
}
};
helpers.ghost_head.call(
{relativeUrl: '/post/', safeVersion: '0.3', context: ['post'], post: post},
{data: {root: {context: ['post']}}}
).then(function (rendered) {
should.exist(rendered);
rendered.string.should.match(/<link rel="canonical" href="http:\/\/testurl.com\/post\/" \/>/);
rendered.string.should.match(/<link rel="amphtml" href="http:\/\/testurl.com\/post\/amp\/" \/>/);
rendered.string.should.match(/<meta name="generator" content="Ghost 0.3" \/>/);
rendered.string.should.match(/<link rel="alternate" type="application\/rss\+xml" title="Ghost" href="http:\/\/testurl.com\/rss\/" \/>/);
rendered.string.should.not.match(/<meta property="og/);
rendered.string.should.not.match(/<script type=\"application\/ld\+json\">/);
done();
}).catch(done);
});
});
describe('with Code Injection', function () {
beforeEach(function () {
settingsReadStub.returns(new Promise.resolve({
settings: [{value: '<style>body {background: red;}</style>'}]
}));
configUtils.set({
url: 'http://testurl.com/',
theme: {
title: 'Ghost',
description: 'blog description',
cover: '/content/images/blog-cover.png'
}
});
});
it('returns meta tag plus injected code', function (done) {
helpers.ghost_head.call(
{safeVersion: '0.3', context: ['paged', 'index'], post: false},
{data: {root: {context: []}}}
).then(function (rendered) {
should.exist(rendered);
rendered.string.should.match(/<link rel="canonical" href="http:\/\/testurl.com\/" \/>/);
rendered.string.should.match(/<meta name="generator" content="Ghost 0.3" \/>/);
rendered.string.should.match(/<link rel="alternate" type="application\/rss\+xml" title="Ghost" href="http:\/\/testurl.com\/rss\/" \/>/);
rendered.string.should.match(/<style>body {background: red;}<\/style>/);
done();
}).catch(done);
});
it('returns meta tag without injected code for amp context', function (done) {
var post = {
meta_description: 'blog description',
title: 'Welcome to Ghost',
image: 'content/images/test-image.png',
published_at: moment('2008-05-31T19:18:15').toISOString(),
updated_at: moment('2014-10-06T15:23:54').toISOString(),
tags: [{name: 'tag1'}, {name: 'tag2'}, {name: 'tag3'}],
author: {
name: 'Author name',
url: 'http//:testauthorurl.com',
slug: 'Author',
image: 'content/images/test-author-image.png',
website: 'http://authorwebsite.com',
facebook: 'testuser',
twitter: '@testuser'
}
};
helpers.ghost_head.call(
{safeVersion: '0.3', context: ['amp', 'post'], post: post},
{data: {root: {context: []}}}
).then(function (rendered) {
should.exist(rendered);
rendered.string.should.match(/<link rel="canonical" href="http:\/\/testurl.com\/" \/>/);
rendered.string.should.match(/<meta name="generator" content="Ghost 0.3" \/>/);
rendered.string.should.match(/<link rel="alternate" type="application\/rss\+xml" title="Ghost" href="http:\/\/testurl.com\/rss\/" \/>/);
rendered.string.should.not.match(/<style>body {background: red;}<\/style>/);
done();
}).catch(done);
});
});
describe('with Ajax Helper', function () {
it('renders script tag with src', function (done) {
helpers.ghost_head.call(
{safeVersion: '0.3', context: ['paged', 'index'], post: false},
{data: {root: {context: []}}}
).then(function (rendered) {
should.exist(rendered);
rendered.string.should.match(/<script type="text\/javascript" src="\/shared\/ghost-url\.js\?v=/);
done();
});
});
it('renders script tag with init correctly', function (done) {
helpers.ghost_head.call(
{safeVersion: '0.3', context: ['paged', 'index'], post: false},
{data: {root: {context: []}}}
).then(function (rendered) {
should.exist(rendered);
rendered.string.should.match(/<script type="text\/javascript">\n/);
rendered.string.should.match(/ghost\.init\(\{/);
rendered.string.should.match(/\tclientId: "/);
rendered.string.should.match(/\tclientSecret: "/);
rendered.string.should.match(/}\);\n/);
rendered.string.should.match(/\n<\/script>/);
done();
});
});
it('does not render script tag with for amp context', function (done) {
var post = {
meta_description: 'blog description',
title: 'Welcome to Ghost',
image: 'content/images/test-image.png',
published_at: moment('2008-05-31T19:18:15').toISOString(),
updated_at: moment('2014-10-06T15:23:54').toISOString(),
tags: [{name: 'tag1'}, {name: 'tag2'}, {name: 'tag3'}],
author: {
name: 'Author name',
url: 'http//:testauthorurl.com',
slug: 'Author',
image: 'content/images/test-author-image.png',
website: 'http://authorwebsite.com',
facebook: 'testuser',
twitter: '@testuser'
}
};
helpers.ghost_head.call(
{safeVersion: '0.3', context: ['amp', 'post'], post: post},
{data: {root: {context: ['amp', 'post']}}}
).then(function (rendered) {
should.exist(rendered);
rendered.string.should.not.match(/<script type="text\/javascript">\n/);
rendered.string.should.not.match(/ghost\.init\(\{/);
rendered.string.should.not.match(/\tclientId: "/);
rendered.string.should.not.match(/\tclientSecret: "/);
rendered.string.should.not.match(/}\);\n/);
rendered.string.should.not.match(/\n<\/script>/);
done();
});
});
});
describe('amp is disabled', function () {
beforeEach(function () {
configUtils.set({
url: 'http://testurl.com/',
theme: {
amp: false
}
});
});
it('does not contain amphtml link', function (done) {
var post = {
meta_description: 'blog description',
title: 'Welcome to Ghost',
image: 'content/images/test-image.png',
published_at: moment('2008-05-31T19:18:15').toISOString(),
updated_at: moment('2014-10-06T15:23:54').toISOString(),
tags: [{name: 'tag1'}, {name: 'tag2'}, {name: 'tag3'}],
author: {
name: 'Author name',
url: 'http//:testauthorurl.com',
slug: 'Author',
image: 'content/images/test-author-image.png',
website: 'http://authorwebsite.com',
facebook: 'testuser',
twitter: '@testuser'
}
};
helpers.ghost_head.call(
{relativeUrl: '/post/', safeVersion: '0.3', context: ['post'], post: post},
{data: {root: {context: ['post']}}}
).then(function (rendered) {
should.exist(rendered);
rendered.string.should.not.match(/<link rel="amphtml"/);
done();
}).catch(done);
});
});
}); |
from django.apps import AppConfig
class AccountsConfig(AppConfig):
name = 'clarityv2.accounts' |
<?php
defined('BASEPATH') OR exit('No direct script access allowed');
class Counters extends RM_Controller {
public function __construct()
{
parent::__construct();
$this->load->model('companies_model');
$this->load->helper('url');
if ( ! $this->session->userdata('logged_in') ) {
redirect('/login');
}
}
public function index($company_id = NULL)
{
$this->data['css_files'] = array(
base_url('seatassets\css\counters-view.css'),
base_url('assets/global/plugins/datatables/datatables.min.css'),
base_url('assets/global/plugins/datatables/plugins/bootstrap/datatables.bootstrap.css'),
);
$this->data['js_files'] = array(
base_url('assets/global/scripts/datatable.js'),
base_url('assets/global/plugins/datatables/datatables.min.js'),
base_url('assets/global/plugins/datatables/plugins/bootstrap/datatables.bootstrap.js'),
base_url('seatassets/js/<API key>.js'),
base_url('seatassets/js/counters-view.js'),
);
$this->data['title'] = 'Company Counters';
$breadcrumb[] = array('name' => 'Companies', 'url' => 'admin/companies');
$breadcrumb[] = array('name' => 'Counters', 'url' => '');
$this->data['breadcrumb'] = $breadcrumb;
$this->data['current_page'] = 'counters';
$this->load->view('templates/header',$this->data);
$this->load->view('templates/sidebar', $this->data);
$this->load->view('admin/counters/counters', $this->data);
$this->load->view('templates/footer', $this->data);
}
function get_all()
{
$keyword = '';
if( isset( $_REQUEST['search']['value'] ) && $_REQUEST['search']['value'] != '' ) {
$keyword = $_REQUEST['search']['value'];
}
$join_arr_left = array();
$condition = '';
if( $keyword != '' ) {
$condition .= '(c.ID LIKE "%'.$keyword.'%" OR c.counter_name LIKE "%'.$keyword.'%" OR c.address LIKE "%'.$keyword.'%")';
}
$iTotalRecords = $this->common->get_total_count( 'company_counter c', $condition, $join_arr_left );
$iDisplayLength = intval($_REQUEST['length']);
$iDisplayLength = $iDisplayLength < 0 ? $iTotalRecords : $iDisplayLength;
$iDisplayStart = intval($_REQUEST['start']);
$sEcho = intval($_REQUEST['draw']);
$records = array();
$records["data"] = array();
$limit = $iDisplayLength;
$offset = $iDisplayStart;
$columns = array(
1 => 'ID',
2 => 'counter_name',
8 => 'address',
);
$order_by = $columns[$_REQUEST['order'][0]['column']];
$order = $_REQUEST['order'][0]['dir'];
$sort = $order_by.' '.$order;
$result = $this->common->get_all( 'company_counter c', $condition, 'c.*', $sort, $limit, $offset, $join_arr_left );
foreach( $result as $row ) {
$counter_slug = $row->counter_slug;
$counter_name = $row->counter_name;
$content = strip_tags(html_entity_decode($row->address));
$address = substr($content,0,50);
$incharge = $row->incharge_name;
$mobile = $row->incharge_mobile;
$thana = $row->thana_id;
$zone = $row->zone_id;
$records["data"][] = array(
$counter_name,
$address,
'<a href="'.site_url('admin/counters/details/'.$counter_slug).'" title="">Details</a>',
$counter_name,
$incharge,
$mobile,
$thana,
$zone,
'<div class="center-block"><a href="'.site_url('admin/counters/edit/'.encrypt($row->ID)).'" title="Edit"><i class="fa fa-edit font-blue-ebonyclay"></i></a> <a onclick="return confirm(\'Are you sure you want to delete this counter?\');" href="'.site_url('admin/counters/delete/'.encrypt($row->ID)).'" title="Delete"><i class="fa fa-trash-o text-danger"></i></a></div>',
);
}
$records["draw"] = $sEcho;
$records["recordsTotal"] = $iTotalRecords;
$records["recordsFiltered"] = $iTotalRecords;
header('Content-type: application/json');
echo json_encode($records);
}
public function company($company_salt_id)
{
$company_id = decrypt($company_salt_id)*1;
if( !is_int($company_id) || !$company_id ) {
$this->session->set_flashdata('delete_msg','No company found');
redirect('admin/counters');
}else{
$this->data['css_files'] = array(
base_url('assets/global/plugins/datatables/datatables.min.css'),
base_url('assets/global/plugins/datatables/plugins/bootstrap/datatables.bootstrap.css'),
);
$result = $this->common->get_all( 'company_counter', array('company_id' => $company_id ) );
$this->data['counter_rows'] = $result;
$this->data['js_files'] = array(
base_url('assets/global/scripts/datatable.js'),
base_url('assets/global/plugins/datatables/datatables.min.js'),
base_url('assets/global/plugins/datatables/plugins/bootstrap/datatables.bootstrap.js'),
base_url('seatassets/js/<API key>.js'),
);
$this->data['title'] = 'Company Counters';
$breadcrumb[] = array('name' => 'Companies', 'url' => 'admin/companies');
$breadcrumb[] = array('name' => 'Counters', 'url' => '');
$this->data['breadcrumb'] = $breadcrumb;
$this->data['current_page'] = 'counters';
$this->load->view('templates/header',$this->data);
$this->load->view('templates/sidebar', $this->data);
$this->load->view('admin/counters/counters', $this->data);
$this->load->view('templates/footer', $this->data);
}
}
public function details($slug = NULL)
{
$counter_details = $this->companies_model->get_counters($slug);
$this->data['counter_data'] = $counter_details;
if (empty($this->data['counter_data']))
{
show_404();
}
$this->data['title'] = html_escape($counter_details['counter_name']);
$breadcrumb[] = array('name' => 'Companies', 'url' => 'admin/companies');
$breadcrumb[] = array('name' => 'Counters', 'url' => 'admin/counters');
$breadcrumb[] = array('name' => html_escape($counter_details['counter_name']), 'url' => '');
$this->data['breadcrumb'] = $breadcrumb;
$this->data['current_page'] = 'counter_details';
$this->load->view('templates/header',$this->data);
$this->load->view('templates/sidebar', $this->data);
$this->load->view('admin/counters/counter-details', $this->data);
$this->load->view('templates/footer', $this->data);
}
public function register()
{
$this->data['css_files'] = array(
base_url('assets/global/plugins/bootstrap-fileinput/bootstrap-fileinput.css'),
base_url('assets/global/plugins/select2/css/select2.min.css'),
base_url('assets/global/plugins/select2/css/select2-bootstrap.min.css'),
);
$this->data['js_files'] = array(
base_url('assets/global/plugins/ckeditor/ckeditor.js'),
base_url('assets/global/plugins/bootstrap-fileinput/bootstrap-fileinput.js'),
base_url('seatassets/js/seat-editor.js'),
base_url('assets/global/plugins/select2/js/select2.full.min.js'),
base_url('assets/global/plugins/jquery-validation/js/jquery.validate.min.js'),
);
// Start: Process register counter
$this-><API key>();
// End: Process register counter
$this->data['title'] = 'Register New Counter';
$breadcrumb[] = array('name' => 'Companies', 'url' => 'admin/companies');
$breadcrumb[] = array('name' => 'Counters', 'url' => 'admin/counters');
$breadcrumb[] = array('name' => 'New Counter', 'url' => '');
$this->data['breadcrumb'] = $breadcrumb;
$this->data['current_page'] = 'counter_add';
$this->load->view('templates/header', $this->data);
$this->load->view('templates/sidebar', $this->data);
$this->load->view('admin/counters/register', $this->data);
$this->load->view('templates/footer', $this->data);
}
public function edit($row_salt_id = 0)
{
$this->data['css_files'] = array(
base_url('assets/global/plugins/bootstrap-fileinput/bootstrap-fileinput.css'),
base_url('assets/global/plugins/select2/css/select2.min.css'),
base_url('assets/global/plugins/select2/css/select2-bootstrap.min.css'),
);
$this->data['js_files'] = array(
base_url('assets/global/plugins/ckeditor/ckeditor.js'),
base_url('assets/global/plugins/bootstrap-fileinput/bootstrap-fileinput.js'),
base_url('seatassets/js/seat-editor.js'),
base_url('assets/global/plugins/select2/js/select2.full.min.js'),
base_url('assets/global/plugins/jquery-validation/js/jquery.validate.min.js'),
);
//Get row ID of this Entry
$row_id = decrypt($row_salt_id)*1;
if( !is_int($row_id) || !$row_id ) {
$this->session->set_flashdata('delete_msg','Can not be edited');
redirect('admin/counters');
}else{
$counter_details = $this->common->get( 'company_counter', array( 'ID' => $row_id ), 'array' );
$this->data['counter_data'] = $counter_details;
if (empty($this->data['counter_data']))
{
show_404();
}
$this->data['title'] = html_escape($counter_details['counter_name']);
$breadcrumb[] = array('name' => 'Companies', 'url' => 'admin/companies');
$breadcrumb[] = array('name' => 'Counters', 'url' => 'admin/counters');
$breadcrumb[] = array('name' => 'Edit Counter', 'url' => '');
$this->data['breadcrumb'] = $breadcrumb;
$this->data['current_page'] = 'counter_edit';
}
// Start: Process register company
$this-><API key>();
// End: Process register company
$this->load->view('templates/header', $this->data);
$this->load->view('templates/sidebar', $this->data);
$this->load->view('admin/counters/edit-counter', $this->data);
$this->load->view('templates/footer', $this->data);
}
public function delete($row_salt_id = 0)
{
//Get row ID of this Entry
$row_id = decrypt($row_salt_id)*1;
if( !is_int($row_id) || !$row_id ) {
redirect('admin/counters');
}else{
$this->data['row_id'] = $row_id;
$this->common->delete( 'company_counter', array( 'ID' => $row_id ) );
$this->session->set_flashdata('delete_msg','Counter have been successfully deleted!');
redirect('admin/counters');
}
}
//Process Regsiter New Company
private function <API key>(){
//Add New Company
if(($this->input->post('<API key>') !== NULL) || ($this->input->post('update_counter') !== NULL)){
$this->form_validation->set_rules('counter_name', 'Counter Name', 'trim|required|htmlspecialchars|min_length[2]');
$this->form_validation->set_rules('incharge_name', 'Incharge Name', 'trim|required|htmlspecialchars|min_length[2]');
$this->form_validation->set_rules('incharge_mobile', 'Incharge Mobile', 'trim|required|htmlspecialchars|min_length[2]');
$this->form_validation->set_rules('incharge_email', 'Incharge Email', 'trim|valid_email|htmlspecialchars|min_length[2]');
$this->form_validation->set_rules('contact_info', 'Contact Info', 'trim|htmlspecialchars|min_length[2]');
$this->form_validation->set_rules('address', 'Address', 'trim|htmlspecialchars|min_length[2]');
$this->form_validation->set_rules('thana_id', 'Thana', 'trim|htmlspecialchars');
$this->form_validation->set_rules('district_id', 'District', 'trim|htmlspecialchars');
if( !$this->form_validation->run() ) {
$error_message_array = $this->form_validation->error_array();
$this->session->set_flashdata('error_msg_arr', $error_message_array);
}else{
$data_arr = array(
'company_id'=> trim($this->input->post('company_id')),
'counter_name'=> trim($this->input->post('counter_name')),
'incharge_name'=> trim($this->input->post('incharge_name')),
'incharge_mobile'=> trim($this->input->post('incharge_mobile')),
'incharge_email'=> trim($this->input->post('incharge_email')),
'contact_info'=> trim($this->input->post('contact_info')),
'address'=> trim($this->input->post('address')),
'thana_id'=> trim($this->input->post('thana_id')),
'district_id'=> trim($this->input->post('district_id')),
'updated_at'=> date('Y-m-d H:i:s'),
'updated_by'=> $this->session->userdata('user_id'),
);
if(($this->input->post('update_counter_id') !== NULL) && ($this->input->post('update_counter') !== NULL)){
$counter_id = $this->input->post('update_counter_id');
$this->common->update( 'company_counter', $data_arr, array( 'ID' => $counter_id ) );
$this->session->set_flashdata('success_msg','Updated done!');
redirect('admin/counters');
}else{
$counter_id = $this->common->insert( 'company_counter', $data_arr );
$<API key> = $this->input->post('counter_name').'-'.$counter_id;
$counter_slug = url_title($<API key>, 'dash', TRUE);
$this->common->update( 'company_counter', array('counter_slug' => $counter_slug), array( 'ID' => $counter_id ) );
$this->session->set_flashdata('success_msg','Added done!');
redirect('admin/counters');
}
}
}
}//EOF process company register info
} |
/**
* @fileoverview Used for creating a suggested configuration based on project code.
* @author Ian VanSchooten
*/
"use strict";
// Requirements
const assert = require("chai").assert,
autoconfig = require("../../../lib/init/autoconfig"),
sourceCodeUtils = require("../../../lib/init/source-code-utils"),
baseDefaultOptions = require("../../../conf/default-cli-options"),
recommendedConfig = require("../../conf/eslint-recommended");
const defaultOptions = Object.assign({}, baseDefaultOptions, { cwd: process.cwd() });
// Data
const <API key> = "./tests/fixtures/autoconfig/source.js";
const <API key> = "./tests/fixtures/autoconfig/<API key>.js";
const SEVERITY = 2;
// Tests
const rulesConfig = {
semi: [SEVERITY, [SEVERITY, "always"], [SEVERITY, "never"]],
"semi-spacing": [SEVERITY,
[SEVERITY, { before: true, after: true }],
[SEVERITY, { before: true, after: false }],
[SEVERITY, { before: false, after: true }],
[SEVERITY, { before: false, after: false }]
],
quotes: [SEVERITY,
[SEVERITY, "single"],
[SEVERITY, "double"],
[SEVERITY, "backtick"],
[SEVERITY, "single", "avoid-escape"],
[SEVERITY, "double", "avoid-escape"],
[SEVERITY, "backtick", "avoid-escape"]]
};
const errorRulesConfig = {
"no-unused-vars": [SEVERITY],
"semi-spacing": [SEVERITY,
[SEVERITY, { before: true, after: true }],
[SEVERITY, { before: true, after: false }],
[SEVERITY, { before: false, after: true }],
[SEVERITY, { before: false, after: false }]
]
};
describe("autoconfig", () => {
describe("Registry", () => {
it("should set up a registry for rules in a provided rulesConfig", () => {
const expectedRules = Object.keys(rulesConfig);
const registry = new autoconfig.Registry(rulesConfig);
assert.strictEqual(Object.keys(registry.rules).length, 3);
assert.sameMembers(Object.keys(registry.rules), expectedRules);
assert.isArray(registry.rules.semi);
assert.isArray(registry.rules["semi-spacing"]);
assert.isArray(registry.rules.quotes);
assert.lengthOf(registry.rules.semi, 3);
assert.lengthOf(registry.rules["semi-spacing"], 5);
assert.lengthOf(registry.rules.quotes, 7);
});
it("should not have any rules if constructed without a config argument", () => {
const registry = new autoconfig.Registry();
assert.isObject(registry.rules);
assert.lengthOf(Object.keys(registry.rules), 0);
});
it("should create registryItems for each rule with the proper keys", () => {
const registry = new autoconfig.Registry(rulesConfig);
assert.isObject(registry.rules.semi[0]);
assert.isObject(registry.rules["semi-spacing"][0]);
assert.isObject(registry.rules.quotes[0]);
assert.property(registry.rules.semi[0], "config");
assert.property(registry.rules.semi[0], "specificity");
assert.property(registry.rules.semi[0], "errorCount");
});
it("should populate the config property correctly", () => {
const registry = new autoconfig.Registry(rulesConfig);
assert.strictEqual(registry.rules.quotes[0].config, SEVERITY);
assert.deepStrictEqual(registry.rules.quotes[1].config, [SEVERITY, "single"]);
assert.deepStrictEqual(registry.rules.quotes[2].config, [SEVERITY, "double"]);
assert.deepStrictEqual(registry.rules.quotes[3].config, [SEVERITY, "backtick"]);
assert.deepStrictEqual(registry.rules.quotes[4].config, [SEVERITY, "single", "avoid-escape"]);
assert.deepStrictEqual(registry.rules.quotes[5].config, [SEVERITY, "double", "avoid-escape"]);
assert.deepStrictEqual(registry.rules.quotes[6].config, [SEVERITY, "backtick", "avoid-escape"]);
});
it("should assign the correct specificity", () => {
const registry = new autoconfig.Registry(rulesConfig);
assert.strictEqual(registry.rules.quotes[0].specificity, 1);
assert.strictEqual(registry.rules.quotes[1].specificity, 2);
assert.strictEqual(registry.rules.quotes[6].specificity, 3);
});
it("should initially leave the errorCount as undefined", () => {
const registry = new autoconfig.Registry(rulesConfig);
assert.isUndefined(registry.rules.quotes[0].errorCount);
assert.isUndefined(registry.rules.quotes[1].errorCount);
assert.isUndefined(registry.rules.quotes[6].errorCount);
});
describe("<API key>()", () => {
it("should add core rules to registry", () => {
const registry = new autoconfig.Registry();
registry.<API key>();
const finalRuleCount = Object.keys(registry.rules).length;
assert(finalRuleCount > 0);
assert.include(Object.keys(registry.rules), "eqeqeq");
});
it("should not add duplicate rules", () => {
const registry = new autoconfig.Registry(rulesConfig);
registry.<API key>();
const semiCount = Object.keys(registry.rules).filter(ruleId => ruleId === "semi").length;
assert.strictEqual(semiCount, 1);
});
});
describe("buildRuleSets()", () => {
let ruleSets;
beforeEach(() => {
const registry = new autoconfig.Registry(rulesConfig);
ruleSets = registry.buildRuleSets();
});
it("should create an array of rule configuration sets", () => {
assert.isArray(ruleSets);
});
it("should include configs for each rule (at least for the first set)", () => {
assert.sameMembers(Object.keys(ruleSets[0]), ["semi", "semi-spacing", "quotes"]);
});
it("should create the first set from default rule configs (severity only)", () => {
assert.deepStrictEqual(ruleSets[0], { semi: SEVERITY, "semi-spacing": SEVERITY, quotes: SEVERITY });
});
it("should create as many ruleSets as the highest number of configs in a rule", () => {
// `quotes` has 7 possible configurations
assert.lengthOf(ruleSets, 7);
});
});
describe("lintSourceCode()", () => {
let registry;
beforeEach(() => {
const config = { ignore: false };
const sourceCode = sourceCodeUtils.<API key>(<API key>, config);
registry = new autoconfig.Registry(rulesConfig);
registry = registry.lintSourceCode(sourceCode, defaultOptions);
});
it("should populate the errorCount of all registryItems", () => {
const expectedRules = ["semi", "semi-spacing", "quotes"];
assert.sameMembers(Object.keys(registry.rules), expectedRules);
expectedRules.forEach(ruleId => {
assert(registry.rules[ruleId].length > 0);
registry.rules[ruleId].forEach(conf => {
assert.isNumber(conf.errorCount);
});
});
});
it("should correctly set the error count of configurations", () => {
assert.strictEqual(registry.rules.semi[0].config, SEVERITY);
assert.strictEqual(registry.rules.semi[0].errorCount, 0);
assert.deepStrictEqual(registry.rules.semi[2].config, [SEVERITY, "never"]);
assert.strictEqual(registry.rules.semi[2].errorCount, 3);
});
it("should respect inline eslint config comments (and not crash when they make linting errors)", () => {
const config = { ignore: false };
const sourceCode = sourceCodeUtils.<API key>(<API key>, config);
const expectedRegistry = [
{ config: 2, specificity: 1, errorCount: 3 },
{ config: [2, "always"], specificity: 2, errorCount: 3 },
{ config: [2, "never"], specificity: 2, errorCount: 3 }
];
registry = new autoconfig.Registry(rulesConfig);
registry = registry.lintSourceCode(sourceCode, defaultOptions);
assert.deepStrictEqual(registry.rules.semi, expectedRegistry);
});
});
describe("stripFailingConfigs()", () => {
let registry;
beforeEach(() => {
const config = { ignore: false };
const sourceCode = sourceCodeUtils.<API key>(<API key>, config);
registry = new autoconfig.Registry(rulesConfig);
registry = registry.lintSourceCode(sourceCode, defaultOptions);
registry = registry.stripFailingConfigs();
});
it("should remove all registryItems with a non-zero errorCount", () => {
assert.lengthOf(registry.rules.semi, 2);
assert.lengthOf(registry.rules["semi-spacing"], 3);
assert.lengthOf(registry.rules.quotes, 1);
registry.rules.semi.forEach(registryItem => {
assert.strictEqual(registryItem.errorCount, 0);
});
registry.rules["semi-spacing"].forEach(registryItem => {
assert.strictEqual(registryItem.errorCount, 0);
});
registry.rules.quotes.forEach(registryItem => {
assert.strictEqual(registryItem.errorCount, 0);
});
});
});
describe("<API key>()", () => {
let failingRegistry;
beforeEach(() => {
const config = { ignore: false };
const sourceCode = sourceCodeUtils.<API key>(<API key>, config);
let registry = new autoconfig.Registry(errorRulesConfig);
registry = registry.lintSourceCode(sourceCode, defaultOptions);
failingRegistry = registry.<API key>();
});
it("should return a registry with no registryItems with an errorCount of zero", () => {
const failingRules = Object.keys(failingRegistry.rules);
assert.deepStrictEqual(failingRules, ["no-unused-vars"]);
assert.lengthOf(failingRegistry.rules["no-unused-vars"], 1);
assert(failingRegistry.rules["no-unused-vars"][0].errorCount > 0);
});
});
describe("createConfig()", () => {
let createdConfig;
beforeEach(() => {
const config = { ignore: false };
const sourceCode = sourceCodeUtils.<API key>(<API key>, config);
let registry = new autoconfig.Registry(rulesConfig);
registry = registry.lintSourceCode(sourceCode, defaultOptions);
registry = registry.stripFailingConfigs();
createdConfig = registry.createConfig();
});
it("should create a config with a rules property", () => {
assert.property(createdConfig, "rules");
});
it("should add rules which have only one registryItem to the config", () => {
const configuredRules = Object.keys(createdConfig.rules);
assert.deepStrictEqual(configuredRules, ["quotes"]);
});
it("should set the configuration of the rule to the registryItem's `config` value", () => {
assert.deepStrictEqual(createdConfig.rules.quotes, [2, "double", "avoid-escape"]);
});
it("should not care how many errors the config has", () => {
const config = { ignore: false };
const sourceCode = sourceCodeUtils.<API key>(<API key>, config);
let registry = new autoconfig.Registry(errorRulesConfig);
registry = registry.lintSourceCode(sourceCode, defaultOptions);
const failingRegistry = registry.<API key>();
createdConfig = failingRegistry.createConfig();
const configuredRules = Object.keys(createdConfig.rules);
assert.deepStrictEqual(configuredRules, ["no-unused-vars"]);
});
});
describe("filterBySpecificity()", () => {
let registry;
beforeEach(() => {
registry = new autoconfig.Registry(rulesConfig);
});
it("should return a registry where all configs have a desired specificity", () => {
const filteredRegistry1 = registry.filterBySpecificity(1);
const filteredRegistry2 = registry.filterBySpecificity(2);
const filteredRegistry3 = registry.filterBySpecificity(3);
assert.lengthOf(filteredRegistry1.rules.semi, 1);
assert.lengthOf(filteredRegistry1.rules["semi-spacing"], 1);
assert.lengthOf(filteredRegistry1.rules.quotes, 1);
assert.lengthOf(filteredRegistry2.rules.semi, 2);
assert.lengthOf(filteredRegistry2.rules["semi-spacing"], 4);
assert.lengthOf(filteredRegistry2.rules.quotes, 3);
assert.lengthOf(filteredRegistry3.rules.quotes, 3);
});
});
});
describe("<API key>()", () => {
it("should return a configuration which has `extends` key with Array type value", () => {
const oldConfig = { extends: [], rules: {} };
const newConfig = autoconfig.<API key>(oldConfig);
assert.exists(newConfig.extends);
assert.isArray(newConfig.extends);
});
it("should return a configuration which has array property `extends`", () => {
const oldConfig = { extends: [], rules: {} };
const newConfig = autoconfig.<API key>(oldConfig);
assert.include(newConfig.extends, "eslint:recommended");
});
it("should return a configuration which preserves the previous extending configurations", () => {
const oldConfig = { extends: ["previous:configuration1", "previous:configuration2"], rules: {} };
const newConfig = autoconfig.<API key>(oldConfig);
assert.includeMembers(newConfig.extends, oldConfig.extends);
});
it("should return a configuration which has `eslint:recommended` at the first of `extends`", () => {
const oldConfig = { extends: ["previous:configuration1", "previous:configuration2"], rules: {} };
const newConfig = autoconfig.<API key>(oldConfig);
const [<API key>] = newConfig.extends;
assert.strictEqual(<API key>, "eslint:recommended");
});
it("should return a configuration which not includes rules configured in `eslint:recommended`", () => {
const oldConfig = { extends: [], rules: { ...recommendedConfig.rules } };
const newConfig = autoconfig.<API key>(oldConfig);
assert.notInclude(newConfig.rules, oldConfig.rules);
});
});
}); |
/* @echo header */
var
_local
;
var jira = {
};
/**
* Do stuff
*
* @method publicMethod
*/
jira.publicMethod = function() {
};
return jira;
/* @echo footer */ |
body,
html {
width: 100%;
height: 100%;
}
body,
h1,
h2,
h3,
h4,
h5,
h6 {
font-family: "Lato","Helvetica Neue",Helvetica,Arial,sans-serif;
font-weight: 700;
}
.lead {
font-size: 18px;
font-weight: 400;
}
.intro-header {
padding-top: 50px; /* If you're making other pages, make sure there is 50px of padding to make sure the navbar doesn't overlap content! */
padding-bottom: 50px;
text-align: center;
color: #f8f8f8;
background: url(../img/intro-bg.jpg) no-repeat center center;
background-size: cover;
}
.intro-message {
position: relative;
padding-top: 20%;
padding-bottom: 20%;
}
.intro-message > h1 {
margin: 0;
text-shadow: 2px 2px 3px rgba(0,0,0,0.6);
font-size: 5em;
}
.intro-divider {
width: 400px;
border-top: 1px solid #f8f8f8;
border-bottom: 1px solid rgba(0,0,0,0.2);
}
.intro-message > h3 {
text-shadow: 2px 2px 3px rgba(0,0,0,0.6);
}
@media(max-width:767px) {
.intro-message {
padding-bottom: 15%;
}
.intro-message > h1 {
font-size: 3em;
}
ul.<API key> > li {
display: block;
margin-bottom: 20px;
padding: 0;
}
ul.<API key> > li:last-child {
margin-bottom: 0;
}
.intro-divider {
width: 100%;
}
}
.network-name {
text-transform: uppercase;
font-size: 14px;
font-weight: 400;
letter-spacing: 2px;
}
.content-section-a {
padding: 50px 0;
background-color: #f8f8f8;
}
.content-section-b {
padding: 50px 0;
border-top: 1px solid #e7e7e7;
border-bottom: 1px solid #e7e7e7;
}
.section-heading {
margin-bottom: 30px;
}
.<API key> {
float: left;
width: 200px;
border-top: 3px solid #e7e7e7;
}
.banner {
padding: 100px 0;
color: #f8f8f8;
background: url(../img/banner-bg.jpg) no-repeat center center;
background-size: cover;
}
.banner h2 {
margin: 0;
text-shadow: 2px 2px 3px rgba(0,0,0,0.6);
font-size: 3em;
}
.banner ul {
margin-bottom: 0;
}
.<API key> {
float: right;
margin-top: 0;
}
@media(max-width:1199px) {
ul.<API key> {
float: left;
margin-top: 15px;
}
}
@media(max-width:767px) {
.banner h2 {
margin: 0;
text-shadow: 2px 2px 3px rgba(0,0,0,0.6);
font-size: 3em;
}
ul.<API key> > li {
display: block;
margin-bottom: 20px;
padding: 0;
}
ul.<API key> > li:last-child {
margin-bottom: 0;
}
}
footer {
padding: 50px 0;
background-color: #f8f8f8;
}
p.copyright {
margin: 15px 0 0;
}
.pnumber {
font-size: 1.5em;
}
.email {
font-size: 1.5em;
} |
using MagicCuisine.Models;
using MagicCuisine.Providers;
using Services.Contracts;
using System;
using System.Linq;
using System.Web.Mvc;
namespace MagicCuisine.Controllers
{
public class HomeController : Controller
{
private readonly IRecipeService recipeService;
private readonly IMapProvider mapProvider;
public HomeController(IRecipeService recipeService, IMapProvider mapProvider)
{
if (recipeService == null)
{
throw new <API key>();
}
this.recipeService = recipeService;
if (mapProvider == null)
{
throw new <API key>();
}
this.mapProvider = mapProvider;
}
public ActionResult Index()
{
var recipes = this.recipeService.GetAll(false)
.Select(r => this.mapProvider.GetMap<RecipeViewModel>(r))
.ToList();
var model = new HomeIndexViewModel()
{
LogoImg = "Content/Images/general/logo.png",
Recipes = recipes
};
return View(model);
}
}
} |
layout: page
title: Other Projects
permalink: /other_projects/
order: 6
Outlining my non-published projects here (open source contributions, course research projects, designed homeworks). My research work can be seen [here](/research/).
# **Other Research (Course Projects)**
[Self-supervised Learning on 3D Point Clouds](https:
[MixMatch on Vision + Language Tasks (NLVR2)](https:
[Research Exchange - A Collaborative Paper Annotation Tool](https://github.com/<API key>/research-exchange) - A platform to collaboratively annotate scientific literature to help newcomers understand research papers, built during an Human Computer Interaction course project ([report]({{ site.url }}/assets/research-exchange.pdf)).
[Inference Networks for Structured Prediction](https:
[Diversity Sampling in Machine Learning](http:
[Macro Actions in Reinforcement Learning](https:
[Single Image Haze Removal](https:
[CNNs for Sentence Classification](https:
[Brittle Fracture Simulation](https:
[ECG Signal Analysis](https:
# **Course Materials**
[Homework](https:
# **Open Source Contributions**
* Primary Contributor / Maintainer - [mozilla/wptview](https://github.com/mozilla/wptview)
* Significant Contributions - [mozilla/gecko-dev](https:
* Other Contributions - [mozilla-b2g/gaia](https:
# **Indian Language Datasets**
As a part of my RnD project at [IIT Bombay](http:
* Malayalam (denoted by `ml`)
* Tamil (denoted by `ta`)
* Kannada (denoted by `kn`)
* Telugu (denoted by `te`)
* Hindi (denoted by `hi`)
* PTB (denoted by `ptb`)
All these datasets are compatible with [SRILM](http: |
<html lang="en" class=" js csstransforms3d">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<link href="./inc/style.css" type="text/css" rel="stylesheet">
<link href="./inc/custom.css" type="text/css" rel="stylesheet">
</head>
<body>
<h1>Reset Settings to Defaults</h1>
<p>
Let's say that you're not satisfied with changes you made in Devlom Configurator. If you want to revert back forum to initial settings (all Devlom Configurator sections will be reverted to default settings including footer content, copyright etc..) you just have to delete one file:
</p>
<pre>solace/config.yaml</pre>
<p>
That's it. You may also <strong>Purge the cache</strong> and you're done.
</p>
<br />
<script src="./inc/iframeResizer.contentWindow.min.js" type="text/javascript" ></script>
</body>
</html> |
<?php
namespace Tlt\Bundle\<API key>\Model;
use Tlt\Bundle\<API key>\Entity\GeneralLocation;
use Doctrine\ORM\EntityManager;
use Oro\Bundle\SecurityBundle\ORM\Walker\AclHelper;
class <API key>
{
/**
* @var EntityManager
*/
protected $entityManager;
/**
* @var AclHelper
*/
protected $aclHelper;
/**
* @param EntityManager $entityManager
* @param AclHelper $aclHelper
*/
public function __construct(
EntityManager $entityManager,
AclHelper $aclHelper
)
{
$this->entityManager = $entityManager;
$this->aclHelper = $aclHelper;
}
/**
* @return GeneralLocation
*/
public function <API key>()
{
return $this-><API key>();
}
/**
* @return GeneralLocation
*/
protected function <API key>()
{
return new GeneralLocation();
}
} |
(function(global) {
"use strict";
var inNodeJS = false;
if (typeof process !== 'undefined') {
inNodeJS = true;
var request = require('request');
}
if (!Array.prototype.indexOf) {
Array.prototype.indexOf = function (searchElement, fromIndex) {
if (this == null) {
throw new TypeError();
}
var t = Object(this);
var len = t.length >>> 0;
if (len === 0) {
return -1;
}
var n = 0;
if (arguments.length > 1) {
n = Number(arguments[1]);
if (n != n) { // shortcut for verifying if it's NaN
n = 0;
} else if (n != 0 && n != Infinity && n != -Infinity) {
n = (n > 0 || -1) * Math.floor(Math.abs(n));
}
}
if (n >= len) {
return -1;
}
var k = n >= 0 ? n : Math.max(len - Math.abs(n), 0);
for (; k < len; k++) {
if (k in t && t[k] === searchElement) {
return k;
}
}
return -1;
}
}
var Tabletop = function(options) {
// Make sure Tabletop is being used as a constructor no matter what.
if(!this || !(this instanceof Tabletop)) {
return new Tabletop(options);
}
if(typeof(options) === 'string') {
options = { key : options };
}
this.callback = options.callback;
this.wanted = options.wanted || [];
this.key = options.key;
this.simpleSheet = !!options.simpleSheet;
this.parseNumbers = !!options.parseNumbers;
this.wait = !!options.wait;
this.postProcess = options.postProcess;
this.debug = !!options.debug;
this.query = options.query || '';
this.endpoint = options.endpoint || "https://spreadsheets.google.com";
this.singleton = !!options.singleton;
this.simple_url = !!options.simple_url;
this.callbackContext = options.callbackContext;
if(typeof(options.proxy) !== 'undefined') {
this.endpoint = options.proxy;
this.simple_url = true;
this.singleton = true;
}
this.parameterize = options.parameterize || false;
if(this.singleton) {
if(typeof(Tabletop.singleton) !== 'undefined') {
this.log("WARNING! Tabletop singleton already defined");
}
Tabletop.singleton = this;
}
/* Be friendly about what you accept */
if(/key=/.test(this.key)) {
this.log("You passed a key as a URL! Attempting to parse.");
this.key = this.key.match("key=(.*?)&")[1];
}
if(!this.key) {
this.log("You need to pass Tabletop a key!");
return;
}
this.log("Initializing with key " + this.key);
this.models = {};
this.model_names = [];
this.base_json_path = "/feeds/worksheets/" + this.key + "/public/basic?alt=";
if (inNodeJS) {
this.base_json_path += 'json';
} else {
this.base_json_path += 'json-in-script';
}
if(!this.wait) {
this.fetch();
}
};
// A global storage for callbacks.
Tabletop.callbacks = {};
// Backwards compatibility.
Tabletop.init = function(options) {
return new Tabletop(options);
};
Tabletop.sheets = function() {
this.log("Times have changed! You'll want to use var tabletop = Tabletop.init(...); tabletop.sheets(...); instead of Tabletop.sheets(...)");
};
Tabletop.prototype = {
fetch: function(callback) {
if(typeof(callback) !== "undefined") {
this.callback = callback;
}
this.requestData(this.base_json_path, this.loadSheets);
},
/*
This will call the environment appropriate request method.
In browser it will use JSON-P, in node it will use request()
*/
requestData: function(path, callback) {
if (inNodeJS) {
this.serverSideFetch(path, callback);
} else {
this.injectScript(path, callback);
}
},
injectScript: function(path, callback) {
var script = document.createElement('script');
var callbackName;
if(this.singleton) {
if(callback === this.loadSheets) {
callbackName = 'Tabletop.singleton.loadSheets';
} else if (callback === this.loadSheet) {
callbackName = 'Tabletop.singleton.loadSheet';
}
} else {
var self = this;
callbackName = 'tt' + (+new Date()) + (Math.floor(Math.random()*100000));
// Create a temp callback which will get removed once it has executed,
// this allows multiple instances of Tabletop to coexist.
Tabletop.callbacks[ callbackName ] = function () {
var args = Array.prototype.slice.call( arguments, 0 );
callback.apply(self, args);
script.parentNode.removeChild(script);
delete Tabletop.callbacks[callbackName];
};
callbackName = 'Tabletop.callbacks.' + callbackName;
}
var url = path + "&callback=" + callbackName;
if(this.simple_url) {
// We've gone down a rabbit hole of passing injectScript the path, so let's
// just pull the sheet_id out of the path like the least efficient worker bees
if(path.indexOf("/list/") !== -1) {
script.src = this.endpoint + "/" + this.key + "-" + path.split("/")[4];
} else {
script.src = this.endpoint + "/" + this.key;
}
} else {
script.src = this.endpoint + url;
}
if (this.parameterize) {
script.src = this.parameterize + encodeURIComponent(script.src);
}
document.<API key>('script')[0].parentNode.appendChild(script);
},
/*
This will only run if tabletop is being run in node.js
*/
serverSideFetch: function(path, callback) {
var self = this
request({url: this.endpoint + path, json: true}, function(err, resp, body) {
if (err) {
return console.error(err);
}
callback.call(self, body);
});
},
/*
Is this a sheet you want to pull?
If { wanted: ["Sheet1"] } has been specified, only Sheet1 is imported
Pulls all sheets if none are specified
*/
isWanted: function(sheetName) {
if(this.wanted.length === 0) {
return true;
} else {
return this.wanted.indexOf(sheetName) !== -1;
}
},
data: function() {
// If the instance is being queried before the data's been fetched
// then return undefined.
if(this.model_names.length === 0) {
return undefined;
}
if(this.simpleSheet) {
if(this.model_names.length > 1 && this.debug) {
this.log("WARNING You have more than one sheet but are using simple sheet mode! Don't blame me when something goes wrong.");
}
return this.models[ this.model_names[0] ].all();
} else {
return this.models;
}
},
/*
Add another sheet to the wanted list
*/
addWanted: function(sheet) {
if(this.wanted.indexOf(sheet) === -1) {
this.wanted.push(sheet);
}
},
/*
Load all worksheets of the spreadsheet, turning each into a Tabletop Model.
Need to use injectScript because the worksheet view that you're working from
doesn't actually include the data. The list-based feed (/feeds/list/key..) does, though.
Calls back to loadSheet in order to get the real work done.
Used as a callback for the worksheet-based JSON
*/
loadSheets: function(data) {
var i, ilen;
var toLoad = [];
this.foundSheetNames = [];
for(i = 0, ilen = data.feed.entry.length; i < ilen ; i++) {
this.foundSheetNames.push(data.feed.entry[i].title.$t);
// Only pull in desired sheets to reduce loading
if( this.isWanted(data.feed.entry[i].content.$t) ) {
var sheet_id = data.feed.entry[i].link[3].href.substr( data.feed.entry[i].link[3].href.length - 3, 3);
var json_path = "/feeds/list/" + this.key + "/" + sheet_id + "/public/values?sq=" + this.query + '&alt='
if (inNodeJS) {
json_path += 'json';
} else {
json_path += 'json-in-script';
}
toLoad.push(json_path);
}
}
this.sheetsToLoad = toLoad.length;
for(i = 0, ilen = toLoad.length; i < ilen; i++) {
this.requestData(toLoad[i], this.loadSheet);
}
},
/*
Access layer for the this.models
.sheets() gets you all of the sheets
.sheets('Sheet1') gets you the sheet named Sheet1
*/
sheets: function(sheetName) {
if(typeof sheetName === "undefined") {
return this.models;
} else {
if(typeof(this.models[ sheetName ]) === "undefined") {
// alert( "Can't find " + sheetName );
return;
} else {
return this.models[ sheetName ];
}
}
},
/*
Parse a single list-based worksheet, turning it into a Tabletop Model
Used as a callback for the list-based JSON
*/
loadSheet: function(data) {
var model = new Tabletop.Model( { data: data,
parseNumbers: this.parseNumbers,
postProcess: this.postProcess,
tabletop: this } );
this.models[ model.name ] = model;
if(this.model_names.indexOf(model.name) === -1) {
this.model_names.push(model.name);
}
this.sheetsToLoad
if(this.sheetsToLoad === 0)
this.doCallback();
},
/*
Execute the callback upon loading! Rely on this.data() because you might
only request certain pieces of data (i.e. simpleSheet mode)
Tests this.sheetsToLoad just in case a race condition happens to show up
*/
doCallback: function() {
if(this.sheetsToLoad === 0) {
this.callback.apply(this.callbackContext || this, [this.data(), this]);
}
},
log: function(msg) {
if(this.debug) {
if(typeof console !== "undefined" && typeof console.log !== "undefined") {
Function.prototype.apply.apply(console.log, [console, arguments]);
}
}
}
};
/*
Tabletop.Model stores the attribute names and parses the worksheet data
to turn it into something worthwhile
Options should be in the format { data: XXX }, with XXX being the list-based worksheet
*/
Tabletop.Model = function(options) {
var i, j, ilen, jlen;
this.column_names = [];
this.name = options.data.feed.title.$t;
this.elements = [];
this.raw = options.data; // A copy of the sheet's raw data, for accessing minutiae
if(typeof(options.data.feed.entry) === 'undefined') {
options.tabletop.log("Missing data for " + this.name + ", make sure you didn't forget column headers");
this.elements = [];
return;
}
for(var key in options.data.feed.entry[0]){
if(/^gsx/.test(key))
this.column_names.push( key.replace("gsx$","") );
}
for(i = 0, ilen = options.data.feed.entry.length ; i < ilen; i++) {
var source = options.data.feed.entry[i];
var element = {};
for(var j = 0, jlen = this.column_names.length; j < jlen ; j++) {
var cell = source[ "gsx$" + this.column_names[j] ];
if (typeof(cell) !== 'undefined') {
if(options.parseNumbers && cell.$t !== '' && !isNaN(cell.$t))
element[ this.column_names[j] ] = +cell.$t;
else
element[ this.column_names[j] ] = cell.$t;
} else {
element[ this.column_names[j] ] = '';
}
}
if(element.rowNumber === undefined)
element.rowNumber = i + 1;
if( options.postProcess )
options.postProcess(element);
this.elements.push(element);
}
};
Tabletop.Model.prototype = {
/*
Returns all of the elements (rows) of the worksheet as objects
*/
all: function() {
return this.elements;
},
/*
Return the elements as an array of arrays, instead of an array of objects
*/
toArray: function() {
var array = [],
i, j, ilen, jlen;
for(i = 0, ilen = this.elements.length; i < ilen; i++) {
var row = [];
for(j = 0, jlen = this.column_names.length; j < jlen ; j++) {
row.push( this.elements[i][ this.column_names[j] ] );
}
array.push(row);
}
return array;
}
};
if(inNodeJS) {
module.exports = Tabletop;
} else {
global.Tabletop = Tabletop;
}
})(this); |
"""Single slice vgg with normalised scale.
"""
import functools
import lasagne as nn
import numpy as np
import theano
import theano.tensor as T
import data_loader
import <API key>
import image_transform
import layers
import preprocess
import postprocess
import objectives
import theano_printer
import updates
import utils
# Random params
rng = np.random
take_a_dump = False # dump a lot of data in a pkl-dump file. (for debugging)
<API key> = False # dump the outputs from the dataloader (for debugging)
# Memory usage scheme
caching = None
# Save and validation frequency
validate_every = 20
validate_train_set = True
save_every = 20
restart_from_save = False
<API key> = False
# Training (schedule) parameters
# - batch sizes
batch_size = 4
sunny_batch_size = 4
batches_per_chunk = 32
num_epochs_train = 150
# - learning rate and method
base_lr = 0.0001
<API key> = {
0: base_lr,
8*num_epochs_train/10: base_lr/10,
19*num_epochs_train/20: base_lr/100,
}
momentum = 0.9
build_updates = updates.build_adam_updates
# Preprocessing stuff
cleaning_processes = [
preprocess.set_upside_up,]
<API key> = [
functools.partial(preprocess.<API key>, z=2)]
augmentation_params = {
"rotation": (-180, 180),
"shear": (0, 0),
"translation": (-8, 8),
"flip_vert": (0, 1),
"roll_time": (0, 0),
"flip_time": (0, 0),
}
use_hough_roi = True
preprocess_train = functools.partial( # <API key> has a bug
preprocess.<API key>,
<API key>=functools.partial(
image_transform.<API key>,
<API key>=(64,64)))
<API key> = functools.partial(preprocess_train, augment=False)
preprocess_test = preprocess_train
<API key> = preprocess.<API key>
<API key> = preprocess.<API key>
<API key> = preprocess.<API key>
# Data generators
create_train_gen = data_loader.<API key>
<API key> = functools.partial(data_loader.<API key>, set="validation")
<API key> = functools.partial(data_loader.<API key>, set="train")
create_test_gen = functools.partial(data_loader.generate_test_batch, set=["validation", "test"])
def filter_samples(folders):
# don't use patients who don't have mre than 6 slices
import glob
return folders
# Input sizes
image_size = 64
nr_slices = 22
data_sizes = {
"sliced:data:sax": (batch_size, nr_slices, 30, image_size, image_size),
"sliced:data:sax:locations": (batch_size, nr_slices),
"sliced:data:sax:is_not_padded": (batch_size, nr_slices),
"sliced:data:randomslices": (batch_size, nr_slices, 30, image_size, image_size),
"sliced:data:singleslice:2ch": (batch_size, 30, image_size, image_size), # 30 time steps, 30 mri_slices, 100 px wide, 100 px high,
"sliced:data:singleslice:4ch": (batch_size, 30, image_size, image_size), # 30 time steps, 30 mri_slices, 100 px wide, 100 px high,
"sliced:data:singleslice:difference:middle": (batch_size, 29, image_size, image_size),
"sliced:data:singleslice:difference": (batch_size, 29, image_size, image_size),
"sliced:data:singleslice": (batch_size, 30, image_size, image_size),
"sliced:data:ax": (batch_size, 30, 15, image_size, image_size),
"sliced:data:shape": (batch_size, 2,),
"sunny": (sunny_batch_size, 1, image_size, image_size)
# TBC with the metadata
}
# Objective
l2_weight = 0.000
l2_weight_out = 0.000
def build_objective(interface_layers):
# l2 regu on certain layers
l2_penalty = nn.regularization.<API key>(
interface_layers["regularizable"], nn.regularization.l2)
# build objective
return objectives.KaggleObjective(interface_layers["outputs"], penalty=l2_penalty)
# Testing
postprocess = postprocess.postprocess
<API key> = 100 # More augmentations since a we only use single slices
tta_average_method = lambda x: np.cumsum(utils.<API key>(utils.cdf_to_pdf(x)))
# nonlinearity putting a lower bound on it's output
def lb_softplus(lb):
return lambda x: nn.nonlinearities.softplus(x) + lb
init = nn.init.Orthogonal()
rnn_layer = functools.partial(nn.layers.RecurrentLayer,
W_in_to_hid=init,
W_hid_to_hid=init,
b=nn.init.Constant(0.1),
nonlinearity=nn.nonlinearities.rectify,
hid_init=nn.init.Constant(0.),
backwards=False,
learn_init=True,
gradient_steps=-1,
grad_clipping=False,
unroll_scan=False,
precompute_input=False)
# Architecture
def build_model():
import j6_2ch_gauss, j6_4ch_gauss
meta_2ch = j6_2ch_gauss.build_model()
meta_4ch = j6_4ch_gauss.build_model()
l_meta_2ch_systole = nn.layers.DenseLayer(meta_2ch["meta_outputs"]["systole"], num_units=64, W=nn.init.Orthogonal(), b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.rectify)
l_meta_2ch_diastole = nn.layers.DenseLayer(meta_2ch["meta_outputs"]["diastole"], num_units=64, W=nn.init.Orthogonal(), b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.rectify)
l_meta_4ch_systole = nn.layers.DenseLayer(meta_4ch["meta_outputs"]["systole"], num_units=64, W=nn.init.Orthogonal(), b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.rectify)
l_meta_4ch_diastole = nn.layers.DenseLayer(meta_4ch["meta_outputs"]["diastole"], num_units=64, W=nn.init.Orthogonal(), b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.rectify)
# Regular model #
input_size = data_sizes["sliced:data:sax"]
input_size_mask = data_sizes["sliced:data:sax:is_not_padded"]
<API key> = data_sizes["sliced:data:sax:locations"]
l0 = nn.layers.InputLayer(input_size)
lin_slice_mask = nn.layers.InputLayer(input_size_mask)
lin_slice_locations = nn.layers.InputLayer(<API key>)
# PREPROCESS SLICES SEPERATELY
# Convolutional layers and some dense layers are defined in a submodel
l0_slices = nn.layers.ReshapeLayer(l0, (-1, [2], [3], [4]))
import <API key>
submodel = <API key>.build_model(l0_slices)
# Systole Dense layers
l_sys_mu = submodel["meta_outputs"]["systole:mu"]
l_sys_sigma = submodel["meta_outputs"]["systole:sigma"]
l_sys_meta = submodel["meta_outputs"]["systole"]
# Diastole Dense layers
l_dia_mu = submodel["meta_outputs"]["diastole:mu"]
l_dia_sigma = submodel["meta_outputs"]["diastole:sigma"]
l_dia_meta = submodel["meta_outputs"]["diastole"]
# AGGREGATE SLICES PER PATIENT
<API key> = layers.TrainableScaleLayer(lin_slice_locations, scale=nn.init.Constant(0.1), trainable=False)
# Systole
l_pat_sys_ss_mu = nn.layers.ReshapeLayer(l_sys_mu, (-1, nr_slices))
l_pat_sys_ss_sigma = nn.layers.ReshapeLayer(l_sys_sigma, (-1, nr_slices))
<API key> = layers.JeroenLayer([l_pat_sys_ss_mu, l_pat_sys_ss_sigma, lin_slice_mask, <API key>], rescale_input=100.)
l_systole = layers.MuSigmaErfLayer(<API key>)
l_sys_meta = nn.layers.DenseLayer(nn.layers.ReshapeLayer(l_sys_meta, (-1, nr_slices, 512)), num_units=64, W=nn.init.Orthogonal(), b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.rectify)
l_meta_systole = nn.layers.ConcatLayer([l_meta_2ch_systole, l_meta_4ch_systole, l_sys_meta])
l_weights = nn.layers.DenseLayer(l_meta_systole, num_units=512, W=nn.init.Orthogonal(), b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.rectify)
l_weights = nn.layers.DenseLayer(l_weights, num_units=3, W=nn.init.Orthogonal(), b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.rectify)
systole_output = layers.WeightedMeanLayer(l_weights, [l_systole, meta_2ch["outputs"]["systole"], meta_4ch["outputs"]["systole"]])
# Diastole
l_pat_dia_ss_mu = nn.layers.ReshapeLayer(l_dia_mu, (-1, nr_slices))
l_pat_dia_ss_sigma = nn.layers.ReshapeLayer(l_dia_sigma, (-1, nr_slices))
<API key> = layers.JeroenLayer([l_pat_dia_ss_mu, l_pat_dia_ss_sigma, lin_slice_mask, <API key>], rescale_input=100.)
l_diastole = layers.MuSigmaErfLayer(<API key>)
l_dia_meta = nn.layers.DenseLayer(nn.layers.ReshapeLayer(l_dia_meta, (-1, nr_slices, 512)), num_units=64, W=nn.init.Orthogonal(), b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.rectify)
l_meta_diastole = nn.layers.ConcatLayer([l_meta_2ch_diastole, l_meta_4ch_diastole, l_dia_meta])
l_weights = nn.layers.DenseLayer(l_meta_diastole, num_units=512, W=nn.init.Orthogonal(), b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.rectify)
l_weights = nn.layers.DenseLayer(l_weights, num_units=3, W=nn.init.Orthogonal(), b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.identity)
diastole_output = layers.WeightedMeanLayer(l_weights, [l_diastole, meta_2ch["outputs"]["diastole"], meta_4ch["outputs"]["diastole"]])
submodels = [submodel, meta_2ch, meta_4ch]
return {
"inputs":dict({
"sliced:data:sax": l0,
"sliced:data:sax:is_not_padded": lin_slice_mask,
"sliced:data:sax:locations": lin_slice_locations,
}, **{ k: v for d in [model["inputs"] for model in [meta_2ch, meta_4ch]]
for k, v in d.items() }
),
"outputs": {
"systole": systole_output,
"diastole": diastole_output,
},
"regularizable": dict(
{},
**{
k: v
for d in [model["regularizable"] for model in submodels if "regularizable" in model]
for k, v in d.items() }
),
"pretrained":{
<API key>.__name__: submodel["outputs"],
j6_2ch_gauss.__name__: meta_2ch["outputs"],
j6_4ch_gauss.__name__: meta_4ch["outputs"],
},
#"cutoff_gradients": [
#] + [ v for d in [model["meta_outputs"] for model in [meta_2ch, meta_4ch] if "meta_outputs" in model]
# for v in d.values() ]
} |
'use strict';
var mongoose = require('mongoose');
var Schema = mongoose.Schema;
var crypto = require('crypto');
var UserSchema = new Schema({
name: String,
email: { type: String, lowercase: true },
role: {
type: String,
default: 'user'
},
hashedPassword: String,
provider: String,
salt: String
});
/**
* Virtuals
*/
UserSchema
.virtual('password')
.set(function(password) {
this._password = password;
this.salt = this.makeSalt();
this.hashedPassword = this.encryptPassword(password);
})
.get(function() {
return this._password;
});
// Public profile information
UserSchema
.virtual('profile')
.get(function() {
return {
'name': this.name,
'role': this.role
};
});
// Non-sensitive info we'll be putting in the token
UserSchema
.virtual('token')
.get(function() {
return {
'_id': this._id,
'role': this.role
};
});
/**
* Validations
*/
// Validate empty email
UserSchema
.path('email')
.validate(function(email) {
return email.length;
}, 'Email cannot be blank');
// Validate empty password
UserSchema
.path('hashedPassword')
.validate(function(hashedPassword) {
return hashedPassword.length;
}, 'Password cannot be blank');
// Validate email is not taken
UserSchema
.path('email')
.validate(function(value, respond) {
var self = this;
this.constructor.findOne({email: value}, function(err, user) {
if(err) throw err;
if(user) {
if(self.id === user.id) return respond(true);
return respond(false);
}
respond(true);
});
}, 'The specified email address is already in use.');
var validatePresenceOf = function(value) {
return value && value.length;
};
/**
* Pre-save hook
*/
UserSchema
.pre('save', function(next) {
if (!this.isNew) return next();
if (!validatePresenceOf(this.hashedPassword))
next(new Error('Invalid password'));
else
next();
});
/**
* Methods
*/
UserSchema.methods = {
/**
* Authenticate - check if the passwords are the same
*
* @param {String} plainText
* @return {Boolean}
* @api public
*/
authenticate: function(plainText) {
return this.encryptPassword(plainText) === this.hashedPassword;
},
/**
* Make salt
*
* @return {String}
* @api public
*/
makeSalt: function() {
return crypto.randomBytes(16).toString('base64');
},
/**
* Encrypt password
*
* @param {String} password
* @return {String}
* @api public
*/
encryptPassword: function(password) {
if (!password || !this.salt) return '';
var salt = new Buffer(this.salt, 'base64');
return crypto.pbkdf2Sync(password, salt, 10000, 64).toString('base64');
}
};
module.exports = mongoose.model('User', UserSchema); |
# -*- coding: utf-8 -
# See the NOTICE for more information.
from __future__ import with_statement
import os
import sys
from datetime import datetime
# workaround on osx, disable kqueue
if sys.platform == "darwin":
os.environ['EVENT_NOKQUEUE'] = "1"
try:
import gevent
except ImportError:
raise RuntimeError("You need gevent installed to use this worker.")
from gevent.pool import Pool
from gevent.server import StreamServer
from gevent import pywsgi
import gunicorn
from gunicorn.workers.async import AsyncWorker
VERSION = "gevent/%s gunicorn/%s" % (gevent.__version__, gunicorn.__version__)
BASE_WSGI_ENV = {
'GATEWAY_INTERFACE': 'CGI/1.1',
'SERVER_SOFTWARE': VERSION,
'SCRIPT_NAME': '',
'wsgi.version': (1, 0),
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False
}
class GeventWorker(AsyncWorker):
server_class = None
wsgi_handler = None
@classmethod
def setup(cls):
from gevent import monkey
monkey.noisy = False
monkey.patch_all()
def timeout_ctx(self):
return gevent.Timeout(self.cfg.keepalive, False)
def run(self):
self.socket.setblocking(1)
pool = Pool(self.worker_connections)
if self.server_class is not None:
server = self.server_class(
self.socket, application=self.wsgi, spawn=pool, log=self.log,
handler_class=self.wsgi_handler)
else:
server = StreamServer(self.socket, handle=self.handle, spawn=pool)
server.start()
try:
while self.alive:
self.notify()
if self.ppid != os.getppid():
self.log.info("Parent changed, shutting down: %s", self)
break
gevent.sleep(1.0)
except KeyboardInterrupt:
pass
try:
# Try to stop connections until timeout
self.notify()
server.stop(timeout=self.timeout)
except:
pass
def handle_request(self, *args):
try:
super(GeventWorker, self).handle_request(*args)
except gevent.GreenletExit:
pass
if hasattr(gevent.core, 'dns_shutdown'):
def init_process(self):
#gevent 0.13 and older doesn't reinitialize dns for us after forking
#here's the workaround
gevent.core.dns_shutdown(fail_requests=1)
gevent.core.dns_init()
super(GeventWorker, self).init_process()
class GeventResponse(object):
status = None
headers = None
response_length = None
def __init__(self, status, headers, clength):
self.status = status
self.headers = headers
self.response_length = clength
class PyWSGIHandler(pywsgi.WSGIHandler):
def log_request(self):
start = datetime.fromtimestamp(self.time_start)
finish = datetime.fromtimestamp(self.time_finish)
response_time = finish - start
resp = GeventResponse(self.status, self.response_headers,
self.response_length)
req_headers = [h.split(":", 1) for h in self.headers.headers]
self.server.log.access(resp, req_headers, self.environ, response_time)
def get_environ(self):
env = super(PyWSGIHandler, self).get_environ()
env['gunicorn.sock'] = self.socket
env['RAW_URI'] = self.path
return env
class PyWSGIServer(pywsgi.WSGIServer):
base_env = BASE_WSGI_ENV
class GeventPyWSGIWorker(GeventWorker):
"The Gevent StreamServer based workers."
server_class = PyWSGIServer
wsgi_handler = PyWSGIHandler |
package com.devicehive;
import com.datastax.driver.core.querybuilder.QueryBuilder;
import com.datastax.driver.core.querybuilder.Select;
import com.datastax.driver.core.utils.UUIDs;
import com.devicehive.domain.DeviceNotification;
import org.hamcrest.core.IsEqual;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.cassandra.core.CassandraOperations;
import java.sql.Timestamp;
import java.util.List;
import java.util.UUID;
import static org.hamcrest.CoreMatchers.hasItem;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
;
public class <API key> extends BaseIntegrationTest {
@Autowired
private CassandraOperations cassandraTemplate;
final private Timestamp date = new Timestamp(System.currentTimeMillis());
final private String deviceGuid = UUID.randomUUID().toString();
@Test
public void <API key>() {
final DeviceNotification notif = new DeviceNotification(String.valueOf(UUIDs.timeBased().timestamp()), deviceGuid, date, "notification1", null);
cassandraTemplate.insert(notif);
Select select = QueryBuilder.select().from("device_notification").where(QueryBuilder.eq("device_guid", deviceGuid)).limit(10);
DeviceNotification <API key> = cassandraTemplate.selectOne(select, DeviceNotification.class);
assertThat(<API key>, IsEqual.equalTo(notif));
List<DeviceNotification> <API key> = cassandraTemplate.select(select, DeviceNotification.class);
assertThat(<API key>.size(), is(1));
assertThat(<API key>, hasItem(notif));
}
} |
#include "ByteEncoder.h"
#include "EncodingHelper.h"
#include "../utils/QrException.h"
using namespace qr::encoding;
std::string ByteEncoder::encode(char ch, bool) {
// cating to uchar which could represent as a binary
unsigned char character = (unsigned char)ch;
return EncodingHelper::getBinaryOf(character, 8, true);
}
std::string ByteEncoder::getCharacterCount(int count, int version) {
/**
* Versions 1 through 9
* Numeric mode: 10 bits
* Alphanumeric mode: 9 bits
* Byte mode: 8 bits
* Japanese mode: 8 bits
*
* Versions 10 through 26
* Numeric mode: 12 bits
* Alphanumeric mode: 11 bits
* Byte mode: 16
* Japanese mode: 10 bits
*
* Versions 27 through 40
* Numeric mode: 14 bits
* Alphanumeric mode: 13 bits
* Byte mode: 16 bits
* Japanese mode: 12 bits
*/
int totalLength = 0;
if (version < 10) {
totalLength = 8;
} else if (version < 27) {
totalLength = 16;
} else if (version < 41) {
totalLength = 16;
} else {
throw QRException("Wrong version! Version must be smaller than 40.");
}
// filling buffer with reverse bit-pattern of count
return EncodingHelper::getBinaryOf(count, totalLength, true);;
}
std::string ByteEncoder::getEncoderName() {
return "Byte";
}
std::string ByteEncoder::getModeIndicator() {
return "0100";
}
uint ByteEncoder::getVersion(uint length, <API key> EClevel) {
constexpr uint table[160] = {
17, 14, 11, 7, 32, 26, 20, 14, 53, 42, 32, 24, 78, 62, 46,
34, 106, 84, 60, 44, 134, 106, 74, 58, 154, 122, 86, 64, 192,
152, 108, 84, 230, 180, 130, 98, 271, 213, 151, 119, 321, 251,
177, 137, 367, 287, 203, 155, 425, 331, 241, 177, 458, 362,
258, 194, 520, 412, 292, 220, 586, 450, 322, 250, 644, 504,
364, 280, 718, 560, 394, 310, 792, 624, 442, 338, 858, 666,
482, 382, 929, 711, 509, 403, 1003, 779, 565, 439, 1091, 857,
611, 461, 1171, 911, 661, 511, 1273, 997, 715, 535, 1367, 1059,
751, 593, 1465, 1125, 805, 625, 1528, 1190, 868, 658, 1628,
1264, 908, 698, 1732, 1370, 982, 742, 1840, 1452, 1030, 790,
1952, 1538, 1112, 842, 2068, 1628, 1168, 898, 2188, 1722,
1228, 958, 2303, 1809, 1283, 983, 2431, 1911, 1351, 1051,
2563, 1989, 1423, 1093, 2699, 2099, 1499, 1139, 2809, 2213,
1579, 1219, 2953, 2331, 1663, 1273
};
// every 4th element is the next version with the same EC level
// so k*4+Q is version k with Q Error Correction level
// so convert ECLevel into an int
uint i = (uint) EClevel;
for (; i < 160; i += 4) {
if (table[i] > length) {
return i / 4 + 1;
}
}
return 0;
}
ByteEncoder::~ByteEncoder() {
} |
<div class="row">
{{#games}}
<div class="col-lg-4">
<ul class="list-group">
<li class="list-group-item">
<a href="#/game/play/{{id}}/{{status}}"><b>{{title}}</b></a> from <i>{{creator}}</i>
<span class="badge">{{status}}</span>
</li>
</ul>
</div>
{{/games}}
</div> |
.game2048{
width: 500px;
height: 500px;
} |
{% extends "base.html" %}
{% load comments %}
{% block title %}
: {{ shared_bookmark.bookmark.title|escape }}
{% endblock title %}
{% block content %}
<a href="/vote/?id={{ shared_bookmark.id }}" class="vote">[+]</a>
:
<a href="/user/{{ shared_bookmark.bookmark.user.username }}" class="username">{{ shared_bookmark.bookmark.user.username }}</a> | <span class="vote-count">: {{ shared_bookmark.votes }}</span>
<h2></h2>
{% get_comment_count for bookmarks.sharedbookmark shared_bookmark.id as comment_count %}
{% get_comment_list for bookmarks.sharedbookmark shared_bookmark.id as comment_list %}
{% for comment in comment_list %}
<div class="comment">
<p><b>{{ comment.user_name }}</b> :</p>
{{ comment.comment|escape|urlizetrunc:40|linebreaks }}
</div>
{% endfor %}
<p> : {{ comment_count }}</p>
{% render_comment_form for bookmarks.sharedbookmark shared_bookmark.id %}
{% endblock content %} |
module.exports = {
staticFileGlobs: [ |
-- Report tables that were not updated recently
-- They may contain archive data or the script that updates it broke.
-- https://github.com/macbre/index-digest/issues/28
DROP TABLE IF EXISTS `<API key>`;
CREATE TABLE `<API key>` (
`item_id` int(8) unsigned NOT NULL AUTO_INCREMENT,
`cnt` int(8) unsigned NOT NULL,
`timestamp` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`item_id`)
) ENGINE=InnoDB;
-- table with old data (6 months old)
INSERT INTO <API key>(cnt, `timestamp`) VALUES
(20, NOW() - INTERVAL 50 DAY),
(20, NOW() - INTERVAL 45 DAY),
(20, NOW() - INTERVAL 40 DAY); |
#include <graph.h>
#include <conio.h>
#include <ctype.h>
short rand(short);
void main(void)
{
int ch;
short a;
short g;
struct {
short x;
short y;
short s;
} s[100];
short p[12];
for (a = 0 ; a <= 13 ; a++)
p[a]=(a/20)*100;
_setvideomode (19);
for (a = 1;a < 100; a++)
{
s[a].x=rand(200)-100;
s[a].y=rand(200)-100;
s[a].s=rand(12);
}
do
{
for (g = 1;g<10;g++)
{
for (a = 1;a < 101; a++)
{
_setcolor (0);
_setpixel(s[a].x+160,s[a].y+100);
s[a].s=s[a].s+1;
if (s[a].s > 12)
{
s[a].s=1;
s[a].x=rand(200)-100;
s[a].y=rand(200)-100;
}
s[a].x=s[a].x*(1+p[s[a].s]/100);
s[a].y=s[a].y*(1+p[s[a].s]/100);
_setcolor (s[a].s);
_setpixel(s[a].x+160,s[a].y+100);
}
}
}
while (isspace(getch()));
}
short rand(rg)
short rg;
{
static short seed = 100;
seed = (seed * 25173 + 13849) % 65536;
return (((seed+32768)*rg)/2/32768);
} |
// flow-typed signature: <API key>
declare module '<API key>' {
declare module.exports: any;
}
/**
* We include stubs for each file inside this npm package in case you need to
* require those files directly. Feel free to delete any files that aren't
* needed.
*/
declare module '<API key>/build/adapter' {
declare module.exports: any;
}
declare module '<API key>/gruntfile' {
declare module.exports: any;
}
declare module '<API key>/lib/adapter' {
declare module.exports: any;
}
declare module '<API key>/test/adapter.spec' {
declare module.exports: any;
}
declare module '<API key>/test/error.handling.spec' {
declare module.exports: any;
}
declare module '<API key>/test/fixtures/error.handling.async.spec' {
declare module.exports: any;
}
declare module '<API key>/test/fixtures/error.handling.promise.async.spec' {
declare module.exports: any;
}
declare module '<API key>/test/fixtures/error.handling.promise.spec' {
declare module.exports: any;
}
declare module '<API key>/test/fixtures/error.handling.spec' {
declare module.exports: any;
}
declare module '<API key>/test/fixtures/hooks.using.async.conf' {
declare module.exports: any;
}
declare module '<API key>/test/fixtures/hooks.using.custom.commands' {
declare module.exports: any;
}
declare module '<API key>/test/fixtures/hooks.using.native.promises' {
declare module.exports: any;
}
declare module '<API key>/test/fixtures/hooks.using.q.promises' {
declare module.exports: any;
}
declare module '<API key>/test/fixtures/hooks.using.wdio.commands' {
declare module.exports: any;
}
declare module '<API key>/test/fixtures/sample.spec' {
declare module.exports: any;
}
declare module '<API key>/test/fixtures/sample2.spec' {
declare module.exports: any;
}
declare module '<API key>/test/fixtures/sample3.spec' {
declare module.exports: any;
}
declare module '<API key>/test/fixtures/sample4.spec' {
declare module.exports: any;
}
declare module '<API key>/test/fixtures/sample5.spec' {
declare module.exports: any;
}
declare module '<API key>/test/fixtures/tests.async.spec' {
declare module.exports: any;
}
declare module '<API key>/test/fixtures/tests.only.spec' {
declare module.exports: any;
}
declare module '<API key>/test/fixtures/tests.options.compilers' {
declare module.exports: any;
}
declare module '<API key>/test/fixtures/tests.options.compilers.spec' {
declare module.exports: any;
}
declare module '<API key>/test/fixtures/tests.options.require' {
declare module.exports: any;
}
declare module '<API key>/test/fixtures/tests.options.require.spec' {
declare module.exports: any;
}
declare module '<API key>/test/fixtures/tests.retry.async.spec' {
declare module.exports: any;
}
declare module '<API key>/test/fixtures/tests.retry.sync.spec' {
declare module.exports: any;
}
declare module '<API key>/test/fixtures/tests.sync.async.spec' {
declare module.exports: any;
}
declare module '<API key>/test/fixtures/tests.sync.spec' {
declare module.exports: any;
}
declare module '<API key>/test/hooks.spec' {
declare module.exports: any;
}
declare module '<API key>/test/options.spec' {
declare module.exports: any;
}
declare module '<API key>/test/retry.spec' {
declare module.exports: any;
}
declare module '<API key>/test/tests.spec' {
declare module.exports: any;
}
// Filename aliases
declare module '<API key>/build/adapter.js' {
declare module.exports: $Exports<'<API key>/build/adapter'>;
}
declare module '<API key>/gruntfile.js' {
declare module.exports: $Exports<'<API key>/gruntfile'>;
}
declare module '<API key>/lib/adapter.js' {
declare module.exports: $Exports<'<API key>/lib/adapter'>;
}
declare module '<API key>/test/adapter.spec.js' {
declare module.exports: $Exports<'<API key>/test/adapter.spec'>;
}
declare module '<API key>/test/error.handling.spec.js' {
declare module.exports: $Exports<'<API key>/test/error.handling.spec'>;
}
declare module '<API key>/test/fixtures/error.handling.async.spec.js' {
declare module.exports: $Exports<'<API key>/test/fixtures/error.handling.async.spec'>;
}
declare module '<API key>/test/fixtures/error.handling.promise.async.spec.js' {
declare module.exports: $Exports<'<API key>/test/fixtures/error.handling.promise.async.spec'>;
}
declare module '<API key>/test/fixtures/error.handling.promise.spec.js' {
declare module.exports: $Exports<'<API key>/test/fixtures/error.handling.promise.spec'>;
}
declare module '<API key>/test/fixtures/error.handling.spec.js' {
declare module.exports: $Exports<'<API key>/test/fixtures/error.handling.spec'>;
}
declare module '<API key>/test/fixtures/hooks.using.async.conf.js' {
declare module.exports: $Exports<'<API key>/test/fixtures/hooks.using.async.conf'>;
}
declare module '<API key>/test/fixtures/hooks.using.custom.commands.js' {
declare module.exports: $Exports<'<API key>/test/fixtures/hooks.using.custom.commands'>;
}
declare module '<API key>/test/fixtures/hooks.using.native.promises.js' {
declare module.exports: $Exports<'<API key>/test/fixtures/hooks.using.native.promises'>;
}
declare module '<API key>/test/fixtures/hooks.using.q.promises.js' {
declare module.exports: $Exports<'<API key>/test/fixtures/hooks.using.q.promises'>;
}
declare module '<API key>/test/fixtures/hooks.using.wdio.commands.js' {
declare module.exports: $Exports<'<API key>/test/fixtures/hooks.using.wdio.commands'>;
}
declare module '<API key>/test/fixtures/sample.spec.js' {
declare module.exports: $Exports<'<API key>/test/fixtures/sample.spec'>;
}
declare module '<API key>/test/fixtures/sample2.spec.js' {
declare module.exports: $Exports<'<API key>/test/fixtures/sample2.spec'>;
}
declare module '<API key>/test/fixtures/sample3.spec.js' {
declare module.exports: $Exports<'<API key>/test/fixtures/sample3.spec'>;
}
declare module '<API key>/test/fixtures/sample4.spec.js' {
declare module.exports: $Exports<'<API key>/test/fixtures/sample4.spec'>;
}
declare module '<API key>/test/fixtures/sample5.spec.js' {
declare module.exports: $Exports<'<API key>/test/fixtures/sample5.spec'>;
}
declare module '<API key>/test/fixtures/tests.async.spec.js' {
declare module.exports: $Exports<'<API key>/test/fixtures/tests.async.spec'>;
}
declare module '<API key>/test/fixtures/tests.only.spec.js' {
declare module.exports: $Exports<'<API key>/test/fixtures/tests.only.spec'>;
}
declare module '<API key>/test/fixtures/tests.options.compilers.js' {
declare module.exports: $Exports<'<API key>/test/fixtures/tests.options.compilers'>;
}
declare module '<API key>/test/fixtures/tests.options.compilers.spec.js' {
declare module.exports: $Exports<'<API key>/test/fixtures/tests.options.compilers.spec'>;
}
declare module '<API key>/test/fixtures/tests.options.require.js' {
declare module.exports: $Exports<'<API key>/test/fixtures/tests.options.require'>;
}
declare module '<API key>/test/fixtures/tests.options.require.spec.js' {
declare module.exports: $Exports<'<API key>/test/fixtures/tests.options.require.spec'>;
}
declare module '<API key>/test/fixtures/tests.retry.async.spec.js' {
declare module.exports: $Exports<'<API key>/test/fixtures/tests.retry.async.spec'>;
}
declare module '<API key>/test/fixtures/tests.retry.sync.spec.js' {
declare module.exports: $Exports<'<API key>/test/fixtures/tests.retry.sync.spec'>;
}
declare module '<API key>/test/fixtures/tests.sync.async.spec.js' {
declare module.exports: $Exports<'<API key>/test/fixtures/tests.sync.async.spec'>;
}
declare module '<API key>/test/fixtures/tests.sync.spec.js' {
declare module.exports: $Exports<'<API key>/test/fixtures/tests.sync.spec'>;
}
declare module '<API key>/test/hooks.spec.js' {
declare module.exports: $Exports<'<API key>/test/hooks.spec'>;
}
declare module '<API key>/test/options.spec.js' {
declare module.exports: $Exports<'<API key>/test/options.spec'>;
}
declare module '<API key>/test/retry.spec.js' {
declare module.exports: $Exports<'<API key>/test/retry.spec'>;
}
declare module '<API key>/test/tests.spec.js' {
declare module.exports: $Exports<'<API key>/test/tests.spec'>;
} |
'use strict';
require('./init');
require('./lib');
// Is Server.
// Import Init
if (Meteor.isServer) {
require('./server');
}
// Is Client
// Import libs
if (Meteor.isClient) {
require('./client');
} |
<?php
class CronjobMigration extends Migration {
function up() {
$job_file = dirname(__FILE__).'../cronjobs/createStatCronJob.php';
$task_id = CronjobScheduler::registerTask($job_file, true);
// Schedule job to run 1 minute from now
CronjobScheduler::scheduleOnce($task_id, strtotime('+1 minute'));
// Schedule job to run every day at 23:59
CronjobScheduler::scheduleRegular($task_id, 23, 59);
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.