answer
stringlengths
15
1.25M
#include "mega/file.h" #include "mega/transfer.h" #include "mega/transferslot.h" #include "mega/megaclient.h" #include "mega/sync.h" #include "mega/command.h" #include "mega/logging.h" #include "mega/heartbeats.h" namespace mega { File::File() { transfer = NULL; chatauth = NULL; hprivate = true; hforeign = false; syncxfer = false; temporaryfile = false; tag = 0; } File::~File() { // if transfer currently running, stop if (transfer) { transfer->client->stopxfer(this, nullptr); } delete [] chatauth; } bool File::serialize(string *d) { char type = char(transfer->type); d->append((const char*)&type, sizeof(type)); if (!FileFingerprint::serialize(d)) { LOG_err << "Error serializing File: Unable to serialize FileFingerprint"; return false; } unsigned short ll; bool flag; ll = (unsigned short)name.size(); d->append((char*)&ll, sizeof(ll)); d->append(name.data(), ll); auto tmpstr = localname.platformEncoded(); ll = (unsigned short)tmpstr.size(); d->append((char*)&ll, sizeof(ll)); d->append(tmpstr.data(), ll); ll = (unsigned short)targetuser.size(); d->append((char*)&ll, sizeof(ll)); d->append(targetuser.data(), ll); ll = (unsigned short)privauth.size(); d->append((char*)&ll, sizeof(ll)); d->append(privauth.data(), ll); ll = (unsigned short)pubauth.size(); d->append((char*)&ll, sizeof(ll)); d->append(pubauth.data(), ll); d->append((const char*)&h, sizeof(h)); d->append((const char*)filekey, sizeof(filekey)); flag = hprivate; d->append((const char*)&flag, sizeof(flag)); flag = hforeign; d->append((const char*)&flag, sizeof(flag)); flag = syncxfer; d->append((const char*)&flag, sizeof(flag)); flag = temporaryfile; d->append((const char*)&flag, sizeof(flag)); char hasChatAuth = (chatauth && chatauth[0]) ? 1 : 0; d->append((char *)&hasChatAuth, 1); d->append("\0\0\0\0\0\0\0", 8); if (hasChatAuth) { ll = (unsigned short) strlen(chatauth); d->append((char*)&ll, sizeof(ll)); d->append(chatauth, ll); } return true; } File *File::unserialize(string *d) { if (!d->size()) { LOG_err << "Error unserializing File: Empty string"; return NULL; } d->erase(0, 1); FileFingerprint *fp = FileFingerprint::unserialize(d); if (!fp) { LOG_err << "Error unserializing File: Unable to unserialize FileFingerprint"; return NULL; } const char* ptr = d->data(); const char* end = ptr + d->size(); if (ptr + sizeof(unsigned short) > end) { LOG_err << "File unserialization failed - serialized string too short"; delete fp; return NULL; } // read name unsigned short namelen = MemAccess::get<unsigned short>(ptr); ptr += sizeof(namelen); if (ptr + namelen + sizeof(unsigned short) > end) { LOG_err << "File unserialization failed - name too long"; delete fp; return NULL; } const char *name = ptr; ptr += namelen; // read localname unsigned short localnamelen = MemAccess::get<unsigned short>(ptr); ptr += sizeof(localnamelen); if (ptr + localnamelen + sizeof(unsigned short) > end) { LOG_err << "File unserialization failed - localname too long"; delete fp; return NULL; } const char *localname = ptr; ptr += localnamelen; // read targetuser unsigned short targetuserlen = MemAccess::get<unsigned short>(ptr); ptr += sizeof(targetuserlen); if (ptr + targetuserlen + sizeof(unsigned short) > end) { LOG_err << "File unserialization failed - targetuser too long"; delete fp; return NULL; } const char *targetuser = ptr; ptr += targetuserlen; // read private auth unsigned short privauthlen = MemAccess::get<unsigned short>(ptr); ptr += sizeof(privauthlen); if (ptr + privauthlen + sizeof(unsigned short) > end) { LOG_err << "File unserialization failed - private auth too long"; delete fp; return NULL; } const char *privauth = ptr; ptr += privauthlen; unsigned short pubauthlen = MemAccess::get<unsigned short>(ptr); ptr += sizeof(pubauthlen); if (ptr + pubauthlen + sizeof(handle) + FILENODEKEYLENGTH + sizeof(bool) + sizeof(bool) + sizeof(bool) + 10 > end) { LOG_err << "File unserialization failed - public auth too long"; delete fp; return NULL; } const char *pubauth = ptr; ptr += pubauthlen; File *file = new File(); *(FileFingerprint *)file = *(FileFingerprint *)fp; delete fp; file->name.assign(name, namelen); file->localname = LocalPath::fromPlatformEncoded(std::string(localname, localnamelen)); file->targetuser.assign(targetuser, targetuserlen); file->privauth.assign(privauth, privauthlen); file->pubauth.assign(pubauth, pubauthlen); file->h.set6byte(MemAccess::get<handle>(ptr)); ptr += sizeof(handle); memcpy(file->filekey, ptr, FILENODEKEYLENGTH); ptr += FILENODEKEYLENGTH; file->hprivate = MemAccess::get<bool>(ptr); ptr += sizeof(bool); file->hforeign = MemAccess::get<bool>(ptr); ptr += sizeof(bool); file->syncxfer = MemAccess::get<bool>(ptr); ptr += sizeof(bool); file->temporaryfile = MemAccess::get<bool>(ptr); ptr += sizeof(bool); char hasChatAuth = MemAccess::get<char>(ptr); ptr += sizeof(char); if (memcmp(ptr, "\0\0\0\0\0\0\0", 8)) { LOG_err << "File unserialization failed - invalid version"; delete file; return NULL; } ptr += 8; if (hasChatAuth) { if (ptr + sizeof(unsigned short) <= end) { unsigned short chatauthlen = MemAccess::get<unsigned short>(ptr); ptr += sizeof(chatauthlen); if (!chatauthlen || ptr + chatauthlen > end) { LOG_err << "File unserialization failed - incorrect size of chat auth"; delete file; return NULL; } file->chatauth = new char[chatauthlen + 1]; memcpy(file->chatauth, ptr, chatauthlen); file->chatauth[chatauthlen] = '\0'; ptr += chatauthlen; } else { LOG_err << "File unserialization failed - chat auth not found"; delete file; return NULL; } } d->erase(0, ptr - d->data()); return file; } void File::prepare() { transfer->localfilename = localname; } void File::start() { } void File::progress() { } void File::completed(Transfer* t, LocalNode* l) { if (t->type == PUT) { vector<NewNode> newnodes(1); NewNode* newnode = &newnodes[0]; // build new node newnode->source = NEW_UPLOAD; // upload handle required to retrieve/include pending file attributes newnode->uploadhandle = t->uploadhandle; // reference to uploaded file memcpy(newnode->uploadtoken, t->ultoken.get(), sizeof newnode->uploadtoken); // file's crypto key newnode->nodekey.assign((char*)t->filekey, FILENODEKEYLENGTH); newnode->type = FILENODE; newnode->parenthandle = UNDEF; #ifdef ENABLE_SYNC if (l) { l->newnode.crossref(newnode, l); newnode->syncid = l->syncid; } #endif AttrMap attrs; t->client-><API key>(previousNode, attrs); // store filename attrs.map['n'] = name; // store fingerprint t-><API key>(&attrs.map['c']); string tattrstring; attrs.getjson(&tattrstring); newnode->attrstring.reset(new string); t->client->makeattr(t->transfercipher(), newnode->attrstring, tattrstring.c_str()); if (targetuser.size()) { // drop file into targetuser's inbox int creqtag = t->client->reqtag; t->client->reqtag = tag; t->client->putnodes(targetuser.c_str(), move(newnodes)); t->client->reqtag = creqtag; } else { handle th = h.as8byte(); // inaccessible target folder - use //bin instead if (!t->client->nodebyhandle(th)) { th = t->client->rootnodes[RUBBISHNODE - ROOTNODE]; } #ifdef ENABLE_SYNC if (l) { // tag the previous version in the synced folder (if any) or move to SyncDebris if (l->node && l->node->parent && l->node->parent->localnode) { if (t->client->versions_disabled) { t->client->movetosyncdebris(l->node, l->sync->inshare); t->client->execsyncdeletions(); } else { newnode->ovhandle = l->node->nodehandle; } } t->client->syncadding++; } #endif if (!t->client->versions_disabled && ISUNDEF(newnode->ovhandle)) { newnode->ovhandle = t->client->getovhandle(t->client->nodebyhandle(th), &name); } t->client->reqs.add(new CommandPutNodes(t->client, th, NULL, move(newnodes), tag, #ifdef ENABLE_SYNC l ? PUTNODES_SYNC : PUTNODES_APP)); #else PUTNODES_APP)); #endif } } } void File::terminated() { } // do not retry crypto errors or administrative takedowns; retry other types of // failuresup to 16 times, except I/O errors (6 times) bool File::failed(error e) { if (e == API_EKEY) { if (!transfer->hascurrentmetamac) { // several integrity check errors uploading chunks return transfer->failcount < 1; } if (transfer->hasprevmetamac && transfer->prevmetamac == transfer->currentmetamac) { // integrity check failed after download, two times with the same value return false; } // integrity check failed once, try again transfer->prevmetamac = transfer->currentmetamac; transfer->hasprevmetamac = true; return transfer->failcount < 16; } return // Non fatal errors, up to 16 retries ((e != API_EBLOCKED && e != API_ENOENT && e != API_EINTERNAL && e != API_EACCESS && e != API_ETOOMANY && transfer->failcount < 16) // I/O errors up to 6 retries && !((e == API_EREAD || e == API_EWRITE) && transfer->failcount > 6)) // Retry sync transfers up to 8 times for erros that doesn't have a specific management // to prevent immediate retries triggered by the sync engine || (syncxfer && e != API_EBLOCKED && e != API_EKEY && transfer->failcount <= 8) // Infinite retries for storage overquota errors || e == API_EOVERQUOTA || e == API_EGOINGOVERQUOTA; } void File::displayname(string* dname) { if (name.size()) { *dname = name; } else { Node* n; if ((n = transfer->client->nodeByHandle(h))) { *dname = n->displayname(); } else { *dname = "DELETED/UNAVAILABLE"; } } } #ifdef ENABLE_SYNC SyncFileGet::SyncFileGet(Sync* csync, Node* cn, const LocalPath& clocalname) { sync = csync; n = cn; h = n->nodeHandle(); *(FileFingerprint*)this = *n; localname = clocalname; syncxfer = true; n->syncget = this; sync->mUnifiedSync.mNextHeartbeat-><API key>(0, 1, size, 0) ; } SyncFileGet::~SyncFileGet() { if (n) { n->syncget = NULL; } } // create sync-specific temp download directory and set unique filename void SyncFileGet::prepare() { if (transfer->localfilename.empty()) { LocalPath tmpname = LocalPath::fromName("tmp", *sync->client->fsaccess, sync->mFilesystemType); if (!sync->tmpfa) { sync->tmpfa = sync->client->fsaccess->newfileaccess(); int i = 3; while (i { LOG_verbose << "Creating tmp folder"; transfer->localfilename = sync->localdebris; sync->client->fsaccess->mkdirlocal(transfer->localfilename, true); transfer->localfilename.appendWithSeparator(tmpname, true); sync->client->fsaccess->mkdirlocal(transfer->localfilename); // lock it LocalPath lockname = LocalPath::fromName("lock", *sync->client->fsaccess, sync->mFilesystemType); transfer->localfilename.appendWithSeparator(lockname, true); if (sync->tmpfa->fopen(transfer->localfilename, false, true)) { break; } } // if we failed to create the tmp dir three times in a row, fall // back to the sync's root if (i < 0) { sync->tmpfa.reset(); } } if (sync->tmpfa) { transfer->localfilename = sync->localdebris; transfer->localfilename.appendWithSeparator(tmpname, true); } else { transfer->localfilename = sync->localroot->localname; } LocalPath tmpfilename; sync->client->fsaccess->tmpnamelocal(tmpfilename); transfer->localfilename.appendWithSeparator(tmpfilename, true); } if (n->parent && n->parent->localnode) { n->parent->localnode->treestate(TREESTATE_SYNCING); } } bool SyncFileGet::failed(error e) { bool retry = File::failed(e); if (n->parent && n->parent->localnode) { n->parent->localnode->treestate(TREESTATE_PENDING); if (!retry && (e == API_EBLOCKED || e == API_EKEY)) { if (e == API_EKEY) { int creqtag = n->parent->client->reqtag; n->parent->client->reqtag = 0; n->parent->client->sendevent(99433, "Undecryptable file"); n->parent->client->reqtag = creqtag; } n->parent->client->movetosyncdebris(n, n->parent->localnode->sync->inshare); } } return retry; } void SyncFileGet::progress() { File::progress(); if (n->parent && n->parent->localnode && n->parent->localnode->ts != TREESTATE_SYNCING) { n->parent->localnode->treestate(TREESTATE_SYNCING); } } // update localname (parent's localnode) void SyncFileGet::updatelocalname() { attr_map::iterator ait; if ((ait = n->attrs.map.find('n')) != n->attrs.map.end()) { if (n->parent && n->parent->localnode) { localname = n->parent->localnode->getLocalPath(); localname.appendWithSeparator(LocalPath::fromName(ait->second, *sync->client->fsaccess, sync->mFilesystemType), true); } } } // add corresponding LocalNode (by path), then self-destruct void SyncFileGet::completed(Transfer*, LocalNode*) { sync->mUnifiedSync.mNextHeartbeat-><API key>(0, -1, 0, size); LocalNode *ll = sync->checkpath(NULL, &localname, nullptr, nullptr, false, nullptr); if (ll && ll != (LocalNode*)~0 && n && (*(FileFingerprint *)ll) == (*(FileFingerprint *)n)) { LOG_debug << "LocalNode created, associating with remote Node"; ll->setnode(n); ll->treestate(TREESTATE_SYNCED); ll->sync->statecacheadd(ll); ll->sync->cachenodes(); } delete this; } void SyncFileGet::terminated() { sync->mUnifiedSync.mNextHeartbeat-><API key>(0, -1, -size, 0); delete this; } #endif } // namespace
#!/usr/bin/env python from setuptools import setup, find_packages # Always prefer setuptools over distutils from codecs import open # To use a consistent encoding from os import path here = path.abspath(path.dirname(__file__)) # Get the long description from the relevant file readme = path.join(here, 'README.md') try: from pypandoc import convert long_description = convert(readme, 'rst') except ImportError: print("warning: pypandoc module not found, could not convert Markdown to RST") with open(readme, 'r', encoding='utf-8') as f: long_description = f.read() setup( name='callipy', description='Calling IPython notebooks with arguments', long_description=long_description, version='0.3.2', author='Damien Drix', author_email='damien.drix+pypi@gmail.com', url='https://github.com/damiendr/callipy', classifiers=[ 'Development Status :: 3 - Alpha', 'Framework :: IPython', ], py_modules=['callipy'], install_requires=[ "runipy", "ipython", ], )
<?xml version="1.0" encoding="utf-8"?> <!DOCTYPE TS> <TS version="2.0" language="az_AZ"> </TS>
package org.jvnet.hyperjaxb3.ejb.strategy.model; import java.util.Collection; import com.sun.tools.xjc.model.CPropertyInfo; public interface <API key> extends ClassInfoProcessor<Collection<CPropertyInfo>, ProcessModel>, <API key><Collection<CPropertyInfo>, ProcessModel> { }
<?php require_once("guiconfig.inc"); require_once("interfaces.inc"); $a_vip = &config_read_array('virtualip', 'vip'); $act = null; if ($_SERVER['REQUEST_METHOD'] === 'POST') { if (!empty($_POST['<API key>'])) { $act = "maintenance"; if (isset($config["<API key>"])) { unset($config["<API key>"]); $<API key> = '0'; foreach ($config['sysctl']['item'] as $tunable) { if ($tunable['tunable'] == 'net.inet.carp.demotion' && ctype_digit($tunable['value'])) { $<API key> = $tunable['value']; } } $carp_diff = $<API key> - get_single_sysctl('net.inet.carp.demotion'); set_single_sysctl('net.inet.carp.demotion', $carp_diff); write_config("Leave CARP maintenance mode"); } else { $config["<API key>"] = true; set_single_sysctl('net.inet.carp.demotion', '240'); write_config("Enter CARP maintenance mode"); } } elseif (!empty($_POST['disablecarp'])) { if (get_single_sysctl('net.inet.carp.allow') > 0) { $act = "disable"; $savemsg = gettext("All virtual IPs have been disabled. Please note that disabling does not survive a reboot."); set_single_sysctl('net.inet.carp.allow', '0'); } else { $act = "enable"; $savemsg = gettext("CARP has been enabled."); <API key>(); set_single_sysctl('net.inet.carp.allow', '1'); } } foreach ($a_vip as $vip) { if (!empty($vip['vhid'])) { switch ($act) { case 'maintenance': break; case 'enable': if ($vip['mode'] == 'carp') { <API key>($vip); } else { <API key>($vip); } break; case 'disable': <API key>($vip); break; default: break; } } } header(url_safe('Location: /carp_status.php?savemsg=%s', array($savemsg))); exit; } elseif ($_SERVER['REQUEST_METHOD'] === 'GET') { if (!empty($_GET['savemsg'])) { $savemsg = htmlspecialchars($_GET['savemsg']); } } $carpcount = 0; foreach ($a_vip as $carp) { if ($carp['mode'] == "carp") { $carpcount++; break; } } // fetch pfsync info $pfsyncnodes = json_decode(configd_run("filter list pfsync json"), true); $<API key> = get_single_sysctl("net.inet.carp.demotion"); <API key>($a_vip); $status = (get_single_sysctl('net.inet.carp.allow') > 0); if (!empty($config["<API key>"])) { $<API key> = false; } else { $<API key> = $<API key> > 0; } include("head.inc"); ?> <body> <?php include("fbegin.inc"); ?> <section class="page-content-main"> <div class="container-fluid"> <div class="row"> <?php if (isset($savemsg)) { print_info_box($savemsg); } if ($<API key>) { print_info_box(gettext("CARP has detected a problem and this unit has been demoted to BACKUP status.") . "<br />" . gettext("Check link status on all interfaces with configured CARP VIPs.")); } ?> <section class="col-xs-12"> <div class="content-box"> <form method="post"> <table class="table table-condensed"> <tbody> <tr> <td> <input type="submit" class="btn btn-primary" name="disablecarp" value="<?= ($carpcount > 0 && !$status) ? html_safe(gettext('Enable CARP')) : html_safe(gettext('Temporarily Disable CARP')) ?>" /> <input type="submit" class="btn btn-primary" name="<API key>" value="<?= isset($config["<API key>"]) ? html_safe(gettext('Leave Persistent CARP Maintenance Mode')) : html_safe(gettext('Enter Persistent CARP Maintenance Mode')) ?>" /> </td> </tr> </tbody> </table> </form> </div> </section> <section class="col-xs-12"> <div class="content-box"> <div class="table-responsive"> <table class="table table-striped table-condensed"> <thead> <tr> <td><?=gettext("CARP Interface"); ?></td> <td><?=gettext("Virtual IP"); ?></td> <td><?=gettext("Status"); ?></td> </tr> </thead> <tbody> <?php if ($carpcount == 0):?> <tr> <td colspan="3"><?=gettext("Could not locate any defined CARP interfaces.");?></td> </tr> <?php else: $intf_details = <API key>(); foreach ($a_vip as $carp): if ($carp['mode'] != "carp") { continue; } $icon = ''; $intf = get_real_interface($carp['interface']); if (!empty($intf_details[$intf]) && !empty($intf_details[$intf]['carp'][$carp['vhid']])) { $intf_status = $intf_details[$intf]['carp'][$carp['vhid']]['status']; } else { $intf_status = null; } if (($carpcount > 0 && !$status)) { $icon = "fa fa-remove fa-fw text-danger"; $intf_status_i18n = gettext('DISABLED'); } elseif ($intf_status == 'MASTER') { $icon = "fa fa-play fa-fw text-success"; $intf_status_i18n = gettext('MASTER'); } elseif ($intf_status == 'BACKUP') { $icon = "fa fa-play fa-fw text-muted"; $intf_status_i18n = gettext('BACKUP'); } elseif ($intf_status == 'INIT') { $icon = "fa fa-info-circle fa-fw"; $intf_status_i18n = gettext('INIT'); }?> <tr> <td><?=<API key>($carp['interface']) . "@{$carp['vhid']}" ;?></td> <td><?=$carp['subnet'];?></td> <td><span class="<?=$icon;?>"></span> <?=$intf_status_i18n;?></td> </tr> <?php endforeach; endif;?> </tbody> <tfoot> <tr> <td colspan="2"><?=gettext("Current CARP demotion level");?></td> <td><?=$<API key>;?> </tr> </tfoot> </table> </div> </div> </section> <section class="col-xs-12"> <div class="content-box"> <div class="table-responsive"> <table class="table table-striped table-condensed"> <thead> <tr> <td><?=gettext("pfSync nodes");?></td> </tr> </thead> <tbody> <?php if (isset($pfsyncnodes['nodes'])): foreach ($pfsyncnodes['nodes'] as $node):?> <tr> <td><?=$node;?></td> </tr> <?php endforeach; endif;?> </tbody> </table> </div> </div> </section> </div> </div> </section> <?php include("foot.inc"); ?>
FILE(REMOVE_RECURSE "CMakeFiles/<API key>.dir/src/<API key>.cpp.o" "devel/lib/laser_scanner_eric/<API key>.pdb" "devel/lib/laser_scanner_eric/<API key>" ) # Per-language clean rules from dependency scanning. FOREACH(lang CXX) INCLUDE(CMakeFiles/<API key>.dir/cmake_clean_${lang}.cmake OPTIONAL) ENDFOREACH(lang)
// Programmer: Craig Stuart Sapp <craig@ccrma.stanford.edu> // Last Modified: Thu Jul 16 19:05:13 PDT 2020 // Filename: tool-tie.cpp // Syntax: C++11; humlib // vim: ts=3 noexpandtab // Description: Interface for splitting or merging tied notes. // Todo: Currently does not handle chords in a fully generalized manner (all // notes in chord have to have the same duration. But splitting // does not even allow ties for now as well. // Fix case where merged tie group exceeds one measure and -i is also used. // Limitation: At least one part in a score must not have an overfill note // at the end of any given measure in order for the timing analysis // to be done correctly. #include "tool-tie.h" #include "Convert.h" #include "HumRegex.h" using namespace std; namespace hum { // START_MERGE // Tool_tie::Tool_tie -- Set the recognized options for the tool. Tool_tie::Tool_tie(void) { define("s|split=b", "split overfill notes into tied notes across barlines."); define("m|merge=b", "merge tied notes into a single note."); define("p|printable=b", "merge tied notes only if single note is a printable note."); define("M|mark=b", "Mark overfill notes."); define("i|invisible=b", "Mark overfill barlines invisible."); } // Tool_tie::run -- Do the main work of the tool. bool Tool_tie::run(HumdrumFileSet& infiles) { bool status = true; for (int i=0; i<infiles.getCount(); i++) { status &= run(infiles[i]); } return status; } bool Tool_tie::run(const string& indata, ostream& out) { HumdrumFile infile(indata); bool status = run(infile); if (hasAnyText()) { getAllText(out); } else { out << infile; } return status; } bool Tool_tie::run(HumdrumFile& infile, ostream& out) { bool status = run(infile); if (hasAnyText()) { getAllText(out); } else { out << infile; } return status; } bool Tool_tie::run(HumdrumFile& infile) { initialize(); processFile(infile); infile.<API key>(); m_humdrum_text << infile; return true; } // Tool_tie::initialize -- void Tool_tie::initialize(void) { m_printQ = getBoolean("printable"); m_mergeQ = getBoolean("merge"); m_splitQ = getBoolean("split"); m_markQ = getBoolean("mark"); m_invisibleQ = getBoolean("invisible"); } // Tool_tie::processFile -- void Tool_tie::processFile(HumdrumFile& infile) { if (m_mergeQ) { mergeTies(infile); } else if (m_splitQ) { splitOverfills(infile); } else if (m_markQ) { int count = markOverfills(infile); if (count > 0) { string rdfline = "!!!RDF**kern: "; rdfline += m_mark; rdfline += " = marked note, overfill (total: "; rdfline += to_string(count); rdfline += ")"; infile.appendLine(rdfline); } } } // Tool_tie::splitOverfills -- Both notes and rests that extend // past the end of the measure are split into two or more notes/rests, // with the notes connected with ties. void Tool_tie::splitOverfills(HumdrumFile& infile) { for (int i=0; i<infile.getStrandCount(); i++) { HTp stok = infile.getStrandStart(i); if (!stok->isKern()) { continue; } HTp etok = infile.getStrandEnd(i); HTp tok = stok; while (tok && (tok != etok)) { if (!tok->isData()) { tok = tok->getNextToken(); continue; } if (tok->isNull()) { tok = tok->getNextToken(); continue; } bool overQ = checkForOverfill(tok); if (overQ) { splitToken(tok); } tok = tok->getNextToken(); } } infile.<API key>(); } // Tool_tie::splitToken -- void Tool_tie::splitToken(HTp tok) { HumNum duration = tok->getDuration(); HumNum toBarline = tok-><API key>(); HumNum newdur = toBarline; duration = duration - toBarline; string text = "["; text += tok->getText(); HumRegex hre; string recip = Convert::durationToRecip(newdur); hre.replaceDestructive(text, recip, "\\d+(?:%\\d+)?\\.*", "g"); tok->setText(text); <API key>(duration, tok); } // Tool_tie::<API key> -- void Tool_tie::<API key>(HumNum duration, HTp tok) { if (duration <= 0) { return; } HTp current = tok->getNextToken(); // find next barline: while (current) { if (current->isBarline()) { break; } current = current->getNextToken(); } if (!current) { // strange problem: no next barline return; } if (!current->isBarline()) { // strange problem that cannot happen return; } HTp barline = current; if (m_invisibleQ && (barline->find('-') != string::npos)) { HumRegex hre; string text = *barline; hre.replaceDestructive(text, "", "-", "g"); barline->setText(text); } HumNum bardur = current-><API key>(); // find first null token after barline (that is not on a grace-note line) // if the original note is an overfill note, there must be // a null data token. current = current->getNextToken(); bool foundQ = false; while (current) { if (current->isNull()) { HLp line = current->getOwner(); if (!line) { // strange error return; } if (line->getDuration() > 0) { // non-grace note null token to exit loop foundQ = true; break; } } current = current->getNextToken(); } if (!foundQ) { // strange error return; } if (!current->isNull()) { // strange error return; } HTp storage = current; // get next note or barline after null token current = current->getNextToken(); foundQ = 0; while (current) { if (current->isBarline()) { foundQ = true; break; } if (current->isData()) { foundQ = true; break; } current = current->getNextToken(); } if (!foundQ) { // strange error return; } HumNum barstart = barline-><API key>(); HumNum nextstart = current-><API key>(); HumNum available = nextstart - barstart; if (duration < available) { cerr << "DURATION " << duration << " IS LESS THAN AVAILABLE " << available << endl; // strange error return; } string text = *tok; HumRegex hre; hre.replaceDestructive(text, "", "[_[]", "g"); string recip = Convert::durationToRecip(available); hre.replaceDestructive(text, recip, "\\d+(?:%\\d+)?\\.*", "g"); if (available == duration) { // this is the last note in the tie group; text += ']'; storage->setText(text); return; } // There is some more space for the remaining duration, but not // big enough for all of it. Place the piece that can fit here // and then kick the can down the road for the remainder. text += '_'; storage->setText(text); duration = duration - available; <API key>(duration, storage); } // Tool_tie::mergeTies -- void Tool_tie::mergeTies(HumdrumFile& infile) { // infile.analyzeKernTies(); for (int i=0; i<infile.getStrandCount(); i++) { HTp stok = infile.getStrandStart(i); if (!stok->isKern()) { continue; } HTp etok = infile.getStrandEnd(i); HTp tok = stok; while (tok && (tok != etok)) { if (!tok->isData()) { tok = tok->getNextToken(); continue; } if (tok->isNull()) { tok = tok->getNextToken(); continue; } if (tok->find('[') == string::npos) { tok = tok->getNextToken(); continue; } mergeTie(tok); tok = tok->getNextToken(); } } infile.<API key>(); } // Tool_tie::mergeTie -- void Tool_tie::mergeTie(HTp token) { if (token->find('[') == string::npos) { return; } vector<HTp> tiednotes; HumNum totaldur = token->getDuration(); HTp current = token->getNextToken(); while (current) { if (!current->isData()) { current = current->getNextToken(); continue; } if (current->isNull()) { current = current->getNextToken(); continue; } bool isMiddle = current->find('_') != string::npos; bool isEnd = current->find(']') != string::npos; if (!(isMiddle ^ isEnd)) { // strange problem so don't merge any more notes break; } tiednotes.push_back(current); totaldur += current->getDuration(); if (isEnd) { break; } current = current->getNextToken(); } string recip = Convert::durationToRecip(totaldur); // cerr << "TOTAL DURATION OF " << token << " IS " << totaldur << " RECIP " << recip << endl; bool makeinvis = false; if (m_invisibleQ) { makeinvis = checkForInvisible(token); } for (int i=0; i<(int)tiednotes.size(); i++) { if (m_invisibleQ) { if (checkForInvisible(tiednotes[i])) { <API key>(tiednotes[i]); } } tiednotes[i]->setText("."); } // set initial tied notes with updated recip. string text = *token; HumRegex hre; hre.replaceDestructive(text, recip, "\\d+(?:%\\d+)?\\.*", "g"); hre.replaceDestructive(text, "", "\\[", "g"); token->setText(text); if (makeinvis) { <API key>(token); } } // Tool_tie::<API key> -- Multiple layers are not dealt with yet. void Tool_tie::<API key>(HTp tok) { HTp current = tok; while (current) { if (!current->isBarline()) { current = current->getNextToken(); continue; } if (current->find('-') != string::npos) { break; } string text = *current; text += '-'; current->setText(text); break; } } // Tool_tie::markOverfills -- int Tool_tie::markOverfills(HumdrumFile& infile) { int counter = 0; for (int i=0; i<infile.getStrandCount(); i++) { HTp stok = infile.getStrandStart(i); if (!stok->isKern()) { continue; } HTp etok = infile.getStrandEnd(i); HTp tok = stok; while (tok && (tok != etok)) { if (!tok->isData()) { tok = tok->getNextToken(); continue; } if (tok->isNull()) { tok = tok->getNextToken(); continue; } bool overQ = checkForOverfill(tok); if (overQ) { string text = *tok; text += m_mark; tok->setText(text); counter++; } tok = tok->getNextToken(); } } return counter; } // Tool_tie::checkForInvisible -- bool Tool_tie::checkForInvisible(HTp tok) { HumNum duration = tok->getDuration(); HumNum tobarline = tok-><API key>(); if ((tok->find('[') != string::npos) || (tok->find('_') != string::npos)) { if (duration >= tobarline) { return true; } else { return false; } } if (duration > tobarline) { return true; } else { return false; } } // Tool_tie::checkForOverfill -- bool Tool_tie::checkForOverfill(HTp tok) { HumNum duration = tok->getDuration(); HumNum tobarline = tok-><API key>(); if (duration > tobarline) { return true; } else { return false; } } // END_MERGE } // end namespace hum
import json class Cache(object): def __init__(self, backend): self.backend = backend def set(self, key, data): if type(data) is not str: data = ('json', json.dumps(data)) self.backend.store(key, data) def get(self, key): data = self.backend.retrieve(key) if type(data) is tuple: encoding, data = data if encoding != 'json': raise TypeError('No decoder found for encoding "{0}".' + ' Available decoder: "json"'.format(encoding)) return json.loads(data) return data def has(self, key): return self.backend.check(key) def delete(self, key): self.backend.remove(key) def default(): import Local return Cache(Local.LocalBackend())
#include "conf.h" #include <stdlib.h> #include <algorithm> #include <memory> using namespace std; namespace handy { static string makeKey(string section, string name) { string key = section + "." + name; // Convert to lower case to make section/name lookups case-insensitive std::transform(key.begin(), key.end(), key.begin(), ::tolower); return key; } string Conf::get(string section, string name, string default_value) { string key = makeKey(section, name); auto p = values_.find(key); return p == values_.end() ? default_value : p->second.back(); } list<string> Conf::getStrings(string section, string name) { string key = makeKey(section, name); auto p = values_.find(key); return p == values_.end() ? list<string>() : p->second; } long Conf::getInteger(string section, string name, long default_value) { string valstr = get(section, name, ""); const char *value = valstr.c_str(); char *end; // This parses "1234" (decimal) and also "0x4D2" (hex) long n = strtol(value, &end, 0); return end > value ? n : default_value; } double Conf::getReal(string section, string name, double default_value) { string valstr = get(section, name, ""); const char *value = valstr.c_str(); char *end; double n = strtod(value, &end); return end > value ? n : default_value; } bool Conf::getBoolean(string section, string name, bool default_value) { string valstr = get(section, name, ""); // Convert to lower case to make string comparisons case-insensitive std::transform(valstr.begin(), valstr.end(), valstr.begin(), ::tolower); if (valstr == "true" || valstr == "yes" || valstr == "on" || valstr == "1") return true; else if (valstr == "false" || valstr == "no" || valstr == "off" || valstr == "0") return false; else return default_value; } namespace { struct LineScanner { char *p; int err; LineScanner(char *ln) : p(ln), err(0) {} LineScanner &skipSpaces() { while (!err && *p && isspace(*p)) { p++; } return *this; } string rstrip(char *s, char *e) { while (e > s && isspace(e[-1])) { e } return string(s, e); } int peekChar() { skipSpaces(); return *p; } LineScanner &skip(int i) { p += i; return *this; } LineScanner &match(char c) { skipSpaces(); err = *p++ != c; return *this; } string consumeTill(char c) { skipSpaces(); char *e = p; while (!err && *e && *e != c) { e++; } if (*e != c) { err = 1; return ""; } char *s = p; p = e; return rstrip(s, e); } string consumeTillEnd() { skipSpaces(); char *e = p; int wasspace = 0; while (!err && *e && *e != ';' && *e != ' if (wasspace) { break; } wasspace = isspace(*e); e++; } char *s = p; p = e; return rstrip(s, e); } }; } // namespace int Conf::parse(const string &filename) { this->filename = filename; FILE *file = fopen(this->filename.c_str(), "r"); if (!file) return -1; unique_ptr<FILE, decltype(fclose) *> release2(file, fclose); static const int MAX_LINE = 16 * 1024; char *ln = new char[MAX_LINE]; unique_ptr<char[]> release1(ln); int lineno = 0; string section, key; int err = 0; while (!err && fgets(ln, MAX_LINE, file) != NULL) { lineno++; LineScanner ls(ln); int c = ls.peekChar(); if (c == ';' || c == '#' || c == '\0') { continue; } else if (c == '[') { section = ls.skip(1).consumeTill(']'); err = ls.match(']').err; key = ""; } else if (isspace(ln[0])) { /* Non-black line with leading whitespace, treat as continuation of previous name's value (as per Python ConfigParser). */ if (!key.empty()) { values_[makeKey(section, key)].push_back(ls.consumeTill('\0')); } else { err = 1; } } else { LineScanner lsc = ls; key = ls.consumeTill('='); if (ls.peekChar() == '=') { ls.skip(1); } else { ls = lsc; key = ls.consumeTill(':'); err = ls.match(':').err; } string value = ls.consumeTillEnd(); values_[makeKey(section, key)].push_back(value); } } return err ? lineno : 0; } } // namespace handy
package bloom import ( "bytes" "encoding/binary" "encoding/gob" "encoding/json" "fmt" "runtime" "sync" "testing" ) func TestConcurrent(t *testing.T) { gmp := runtime.GOMAXPROCS(2) defer runtime.GOMAXPROCS(gmp) f := New(1000, 4) n1 := []byte("Bess") n2 := []byte("Jane") f.Add(n1) f.Add(n2) var wg sync.WaitGroup const try = 1000 var err1, err2 error wg.Add(1) go func() { for i := 0; i < try; i++ { n1b := f.Test(n1) if !n1b { err1 = fmt.Errorf("%v should be in", n1) break } } wg.Done() }() wg.Add(1) go func() { for i := 0; i < try; i++ { n2b := f.Test(n2) if !n2b { err2 = fmt.Errorf("%v should be in", n2) break } } wg.Done() }() wg.Wait() if err1 != nil { t.Fatal(err1) } if err2 != nil { t.Fatal(err2) } } func TestBasic(t *testing.T) { f := New(1000, 4) n1 := []byte("Bess") n2 := []byte("Jane") n3 := []byte("Emma") f.Add(n1) n3a := f.TestAndAdd(n3) n1b := f.Test(n1) n2b := f.Test(n2) n3b := f.Test(n3) if !n1b { t.Errorf("%v should be in.", n1) } if n2b { t.Errorf("%v should not be in.", n2) } if n3a { t.Errorf("%v should not be in the first time we look.", n3) } if !n3b { t.Errorf("%v should be in the second time we look.", n3) } } func TestBasicUint32(t *testing.T) { f := New(1000, 4) n1 := make([]byte, 4) n2 := make([]byte, 4) n3 := make([]byte, 4) n4 := make([]byte, 4) binary.BigEndian.PutUint32(n1, 100) binary.BigEndian.PutUint32(n2, 101) binary.BigEndian.PutUint32(n3, 102) binary.BigEndian.PutUint32(n4, 103) f.Add(n1) n3a := f.TestAndAdd(n3) n1b := f.Test(n1) n2b := f.Test(n2) n3b := f.Test(n3) f.Test(n4) if !n1b { t.Errorf("%v should be in.", n1) } if n2b { t.Errorf("%v should not be in.", n2) } if n3a { t.Errorf("%v should not be in the first time we look.", n3) } if !n3b { t.Errorf("%v should be in the second time we look.", n3) } } func TestString(t *testing.T) { f := NewWithEstimates(1000, 0.001) n1 := "Love" n2 := "is" n3 := "in" n4 := "bloom" f.AddString(n1) n3a := f.TestAndAddString(n3) n1b := f.TestString(n1) n2b := f.TestString(n2) n3b := f.TestString(n3) f.TestString(n4) if !n1b { t.Errorf("%v should be in.", n1) } if n2b { t.Errorf("%v should not be in.", n2) } if n3a { t.Errorf("%v should not be in the first time we look.", n3) } if !n3b { t.Errorf("%v should be in the second time we look.", n3) } } func testEstimated(n uint, maxFp float64, t *testing.T) { m, k := EstimateParameters(n, maxFp) f := NewWithEstimates(n, maxFp) fpRate := f.<API key>(n) if fpRate > 1.10*maxFp { t.Errorf("False positive rate too high: n: %v; m: %v; k: %v; maxFp: %f; fpRate: %f, fpRate/maxFp: %f", n, m, k, maxFp, fpRate, fpRate/maxFp) } } func <API key>(t *testing.T) { testEstimated(1000, 0.000100, t) } func <API key>(t *testing.T) { testEstimated(10000, 0.000100, t) } func <API key>(t *testing.T) { testEstimated(100000, 0.000100, t) } func <API key>(t *testing.T) { testEstimated(1000000, 0.000100, t) } func <API key>(t *testing.T) { testEstimated(10000000, 0.000100, t) } func <API key>(t *testing.T) { testEstimated(1000, 0.001000, t) } func <API key>(t *testing.T) { testEstimated(10000, 0.001000, t) } func <API key>(t *testing.T) { testEstimated(100000, 0.001000, t) } func <API key>(t *testing.T) { testEstimated(1000000, 0.001000, t) } func <API key>(t *testing.T) { testEstimated(10000000, 0.001000, t) } func <API key>(t *testing.T) { testEstimated(1000, 0.010000, t) } func <API key>(t *testing.T) { testEstimated(10000, 0.010000, t) } func <API key>(t *testing.T) { testEstimated(100000, 0.010000, t) } func <API key>(t *testing.T) { testEstimated(1000000, 0.010000, t) } func <API key>(t *testing.T) { testEstimated(10000000, 0.010000, t) } func <API key>(t *testing.T) { f := New(1000, 4) data, err := json.Marshal(f) if err != nil { t.Fatal(err.Error()) } var g BloomFilter err = json.Unmarshal(data, &g) if err != nil { t.Fatal(err.Error()) } if g.m != f.m { t.Error("invalid m value") } if g.k != f.k { t.Error("invalid k value") } if g.b == nil { t.Fatal("bitset is nil") } if !g.b.Equal(f.b) { t.Error("bitsets are not equal") } } func TestWriteToReadFrom(t *testing.T) { var b bytes.Buffer f := New(1000, 4) _, err := f.WriteTo(&b) if err != nil { t.Fatal(err) } g := New(1000, 1) _, err = g.ReadFrom(&b) if err != nil { t.Fatal(err) } if g.m != f.m { t.Error("invalid m value") } if g.k != f.k { t.Error("invalid k value") } if g.b == nil { t.Fatal("bitset is nil") } if !g.b.Equal(f.b) { t.Error("bitsets are not equal") } g.Test([]byte("")) } func TestReadWriteBinary(t *testing.T) { f := New(1000, 4) var buf bytes.Buffer bytesWritten, err := f.WriteTo(&buf) if err != nil { t.Fatal(err.Error()) } if bytesWritten != int64(buf.Len()) { t.Errorf("incorrect write length %d != %d", bytesWritten, buf.Len()) } var g BloomFilter bytesRead, err := g.ReadFrom(&buf) if err != nil { t.Fatal(err.Error()) } if bytesRead != bytesWritten { t.Errorf("read unexpected number of bytes %d != %d", bytesRead, bytesWritten) } if g.m != f.m { t.Error("invalid m value") } if g.k != f.k { t.Error("invalid k value") } if g.b == nil { t.Fatal("bitset is nil") } if !g.b.Equal(f.b) { t.Error("bitsets are not equal") } } func TestEncodeDecodeGob(t *testing.T) { f := New(1000, 4) f.Add([]byte("one")) f.Add([]byte("two")) f.Add([]byte("three")) var buf bytes.Buffer err := gob.NewEncoder(&buf).Encode(f) if err != nil { t.Fatal(err.Error()) } var g BloomFilter err = gob.NewDecoder(&buf).Decode(&g) if err != nil { t.Fatal(err.Error()) } if g.m != f.m { t.Error("invalid m value") } if g.k != f.k { t.Error("invalid k value") } if g.b == nil { t.Fatal("bitset is nil") } if !g.b.Equal(f.b) { t.Error("bitsets are not equal") } if !g.Test([]byte("three")) { t.Errorf("missing value 'three'") } if !g.Test([]byte("two")) { t.Errorf("missing value 'two'") } if !g.Test([]byte("one")) { t.Errorf("missing value 'one'") } } func TestEqual(t *testing.T) { f := New(1000, 4) f1 := New(1000, 4) g := New(1000, 20) h := New(10, 20) n1 := []byte("Bess") f1.Add(n1) if !f.Equal(f) { t.Errorf("%v should be equal to itself", f) } if f.Equal(f1) { t.Errorf("%v should not be equal to %v", f, f1) } if f.Equal(g) { t.Errorf("%v should not be equal to %v", f, g) } if f.Equal(h) { t.Errorf("%v should not be equal to %v", f, h) } } func BenchmarkEstimated(b *testing.B) { for n := uint(100000); n <= 100000; n *= 10 { for fp := 0.1; fp >= 0.0001; fp /= 10.0 { f := NewWithEstimates(n, fp) f.<API key>(n) } } } func <API key>(b *testing.B) { f := NewWithEstimates(uint(b.N), 0.0001) key := make([]byte, 100) b.ResetTimer() for i := 0; i < b.N; i++ { binary.BigEndian.PutUint32(key, uint32(i)) f.Test(key) f.Add(key) } } func <API key>(b *testing.B) { f := NewWithEstimates(uint(b.N), 0.0001) key := make([]byte, 100) b.ResetTimer() for i := 0; i < b.N; i++ { binary.BigEndian.PutUint32(key, uint32(i)) f.TestAndAdd(key) } } func MergeTest(b *testing.B) { f := New(1000, 4) n1 := []byte("f") f.Add(n1) g := New(1000, 4) n2 := []byte("g") g.Add(n2) h := New(999, 4) n3 := []byte("h") h.Add(n3) j := New(1000, 5) n4 := []byte("j") j.Add(n4) var err error err = f.Merge(g) if err != nil { b.Errorf("There should be no error when merging two similar filters") } err = f.Merge(h) if err == nil { b.Errorf("There should be an error when merging filters with mismatched m") } err = f.Merge(j) if err == nil { b.Errorf("There should be an error when merging filters with mismatched k") } n2b := f.Test(n2) if !n2b { b.Errorf("The value doesn't exist after a valid merge") } n3b := f.Test(n3) if n3b { b.Errorf("The value exists after an invalid merge") } n4b := f.Test(n4) if n4b { b.Errorf("The value exists after an invalid merge") } } func TestCopy(t *testing.T) { f := New(1000, 4) n1 := []byte("f") f.Add(n1) // copy here instead of New g := f.Copy() n2 := []byte("g") g.Add(n2) n1fb := f.Test(n1) if !n1fb { t.Errorf("The value doesn't exist in original after making a copy") } n1gb := g.Test(n1) if !n1gb { t.Errorf("The value doesn't exist in the copy") } n2fb := f.Test(n2) if n2fb { t.Errorf("The value exists in the original, it should only exist in copy") } n2gb := g.Test(n2) if !n2gb { t.Errorf("The value doesn't exist in copy after Add()") } }
const util = require("util"); const assert = module.exports = ok; class AssertionError extends Error { constructor(options) { if (typeof options == "string") { options = { message: options }; } options.message = options.message || "Default message"; super(options.message); this.name = "AssertionError"; this.message = options.message; this.stack = (new Error()).stack; if (options.stackStart) { let frames = this.stack.split("\n"); for (let i = 0; i < frames.length; ++i) { if (frames[i].indexOf(options.stackStart) === 0) { this.stack = frames.splice(i + 1).join("\n"); // format: <function>@<file>:<line>:<col> let frame = frames[i]; let split1 = frame.split("@"); // 0 = function let split2 = split1[1].split(":"); // 0 = file, 1 = line, 2 = col this.fileName = split2[0]; this.lineNumber = parseInt(split2[1]); this.columnNumber = parseInt(split2[2]); break; } } } } } assert.AssertionError = AssertionError; function fail(actual, expected, message, operator, stackStart) { // throw new Error(message); // throw new AssertionError(message); throw new assert.AssertionError({ message: message, actual: actual, expected: expected, operator: operator, stackStart: stackStart, }); } assert.fail = fail; function ok(guard, message) { if (!guard) { fail(guard, true, message, "==", "ok"); } } assert.ok = ok; assert.equal = function (actual, expected, message) { if (actual != expected) { fail(actual, expected, message, "==", "assert.equal"); } }; assert.notEqual = function (actual, expected, message) { if (actual == expected) { fail(actual, expected, message, "!=", "assert.notEqual"); } }; assert.deepEqual = function (actual, expected, message) { if (!deepEqual(actual, expected)) { fail(actual, expected, message, "deepEqual", "assert.deepEqual"); } }; assert.notDeepEqual = function (actual, expected, message) { if (deepEqual(actual, expected)) { fail(actual, expected, message, "notDeepEqual", "assert.notDeepEqual"); } }; function deepEqual(actual, expected) { if (actual === expected) { return true; } // 2) If the expected value is a Date object, the actual value is equivalent // if it is also a Date object that refers to the same time. else if (util.isDate(actual) && util.isDate(expected)) { return actual.getTime() === expected.getTime(); } // If the expected value is a RegExp, the value and flags must match. else if (util.isRegExp(actual) && util.isRegExp(expected)) { return actual.source === expected.source && actual.global === expected.global && actual.multiline === expected.multiline && actual.lastIndex === expected.lastIndex && actual.ignoreCase === expected.ignoreCase; } // 3) Other pairs that do not both pass typeof value == "object", equivalence is determined by ==. else if ((actual === null || typeof actual !== "object") && (expected === null || typeof expected !== "object")) { return actual == expected; // For strict, use ==. } // 4) or all other Object pairs, including Array objects, equivalence is determined by having the same // number of owned properties (as verified with Object.prototype.hasOwnProperty.call), the same set // of keys (although not necessarily the same order), equivalent values for every corresponding key, // and an identical "prototype" property. Note: this accounts for both named and indexed properties on Arrays. else { return objectEquivalence(actual, expected); } } function objectEquivalence(a, b) { if (a === null || a === undefined || b === null || b === undefined) { return false; } if (util.isPrimitive(a) || util.isPrimitive(b)) { return a === b; } } assert.strictEqual = function (actual, expected, message) { if (actual !== expected) { fail(actual, expected, message, "===", "assert.strictEqual"); } }; assert.notStrictEqual = function (actual, expected, message) { if (actual === expected) { fail(actual, expected, message, "!==", "assert.notStrictEqual"); } }; assert.throws = function (block, error, message) { }; assert.doesNotThrow = function (block, error, message) { };
// // FFV-C : Frontflow / violet Cartesian // /* * @file IP_Sphere.C * @brief IP_Sphere class * @author aics */ #include "IP_Sphere.h" using namespace Vec3class; // REAL_TYPE IP_Sphere::cut_line(const Vec3r p, const int dir, const REAL_TYPE r) { REAL_TYPE x, y, z, s; REAL_TYPE c; x = p.x; y = p.y; z = p.z; s = 0.0; switch (dir) { case 1: c = sqrtf(r*r - y*y - z*z); if ( x < 0.0 ) c *= -1.0; s = fabs(c-x) / pitch[0]; break; case 2: c = sqrtf(r*r - y*y - z*z); if ( x < 0.0 ) c *= -1.0; s = fabs(c-x) / pitch[0]; break; case 3: c = sqrtf(r*r - x*x - z*z); if ( y < 0.0 ) c *= -1.0; s = fabs(c-y) / pitch[1]; break; case 4: c = sqrtf(r*r - x*x - z*z); if ( y < 0.0 ) c *= -1.0; s = fabs(c-y) / pitch[1]; break; case 5: c = sqrtf(r*r - x*x - y*y); if ( z < 0.0 ) c *= -1.0; s = fabs(c-z) / pitch[2]; break; case 6: c = sqrtf(r*r - x*x - y*y); if ( z < 0.0 ) c *= -1.0; s = fabs(c-z) / pitch[2]; break; default: Exit(0); break; } return s; } // // Fortran index Vec3i IP_Sphere::find_index(const Vec3r p, const Vec3r ol, const Vec3r pch) { Vec3r q = (p-ol)/pch; Vec3i idx( ceil(q.x), ceil(q.y), ceil(q.z) ); int ix = size[0]; int jx = size[1]; int kx = size[2]; if ( idx.x < 1 ) idx.x = 1; if ( idx.y < 1 ) idx.y = 1; if ( idx.z < 1 ) idx.z = 1; if ( idx.x > ix ) idx.x = ix; if ( idx.y > jx ) idx.y = jx; if ( idx.z > kx ) idx.z = kx; return idx; } // bool IP_Sphere::getTP(Control* R, TextParser* tpCntl) { std::string str; std::string label; REAL_TYPE ct; // radius label = "/IntrinsicExample/Radius"; if ( !(tpCntl->getInspectedValue(label, ct)) ) { Hostonly_ stamped_printf("\tParsing error : fail to get '%s'\n", label.c_str()); return false; } else { radius = ( R->Unit.Param == DIMENSIONAL ) ? ct : ct * RefL; } label = "/IntrinsicExample/Driver"; if ( tpCntl->getInspectedValue(label, ct ) ) { drv_length = ( R->Unit.Param == DIMENSIONAL ) ? ct : ct * RefL; } else { Hostonly_ stamped_printf("\tParsing error : fail to get '%s'\n", label.c_str()); return false; } if ( drv_length < 0.0 ) { Hostonly_ stamped_printf("\tError : Value of 'Driver' in 'IntrinsicExample' must be positive.\n"); return false; } if ( drv_length > 0.0 ) { drv_mode = ON; } else{ drv_mode = OFF; } label = "/IntrinsicExample/FluidMedium"; if ( !(tpCntl->getInspectedValue(label, str)) ) { Hostonly_ stamped_printf("\tParsing error : fail to get '%s'\n", label.c_str()); return false; } m_fluid = str; label = "/IntrinsicExample/SolidMedium"; if ( !(tpCntl->getInspectedValue(label, str)) ) { Hostonly_ stamped_printf("\tParsing error : fail to get '%s'\n", label.c_str()); return false; } m_solid = str; if (drv_length > 0.0 ) { label = "/IntrinsicExample/DriverMedium"; if ( !(tpCntl->getInspectedValue(label, str)) ) { Hostonly_ stamped_printf("\tParsing error : fail to get '%s'\n", label.c_str()); return false; } m_driver = str; label = "/IntrinsicExample/DriverFaceMedium"; if ( !(tpCntl->getInspectedValue(label, str)) ) { Hostonly_ stamped_printf("\tParsing error : fail to get '%s'\n", label.c_str()); return false; } m_driver_face = str; } return true; } // void IP_Sphere::printPara(FILE* fp, const Control* R) { if ( !fp ) { stamped_printf("\tFail to write into file\n"); Exit(0); } fprintf(fp,"\n fprintf(fp,"\n\t>> Intrinsic Sphere Class Parameters\n\n"); fprintf(fp,"\tRadius of Sphere [m] / [-] : %12.5e / %12.5e\n", radius, radius/RefL); if ( drv_mode == ON ) { fprintf(fp,"\tDriver Length [m] / [-] : %12.5e / %12.5e\n", drv_length, drv_length/RefL); } } // void IP_Sphere::setup(int* bcd, Control* R, const int NoMedium, const MediumList* mat, const int NoCompo, const CompoList* cmp, int* cutL, int* cutU, int* bid) { int mid_fluid; int mid_solid; int mid_driver; int mid_driver_face; REAL_TYPE dx = pitch[0]; REAL_TYPE dy = pitch[1]; REAL_TYPE dz = pitch[2]; REAL_TYPE rs = radius/R->RefLength; REAL_TYPE Lx = region[0]; REAL_TYPE Ly = region[1]; REAL_TYPE Lz = region[2]; Vec3r pch(pitch); Vec3r org(origin); // bbox Vec3r box_min; ///< Bounding box Vec3r box_max; ///< Bounding box Vec3i box_st; ///< Bounding box Vec3i box_ed; ///< Bounding box box_min = - rs; box_max = + rs; box_st = find_index(box_min, org, pch); box_ed = find_index(box_max, org, pch); int ix = size[0]; int jx = size[1]; int kx = size[2]; int gd = guide; if ( (mid_fluid = FBUtility::findIDfromLabel(mat, NoMedium, m_fluid)) == 0 ) { Hostonly_ printf("\tLabel '%s' is not listed in MediumList\n", m_fluid.c_str()); Exit(0); } if ( (mid_solid = FBUtility::findIDfromLabel(mat, NoMedium, m_solid)) == 0 ) { Hostonly_ printf("\tLabel '%s' is not listed in MediumList\n", m_solid.c_str()); Exit(0); } Vec3r p[7]; Vec3r base, b; REAL_TYPE r_min=10.0, r_max=0.0; REAL_TYPE lb[7]; for (int k=box_st.z-2; k<=box_ed.z+2; k++) { for (int j=box_st.y-2; j<=box_ed.y+2; j++) { for (int i=box_st.x-2; i<=box_ed.x+2; i++) { base.assign((REAL_TYPE)i-0.5, (REAL_TYPE)j-0.5, (REAL_TYPE)k-0.5); b = org + base*pch; p[0].assign(b.x , b.y , b.z ); p[1].assign(b.x-dx, b.y , b.z ); p[2].assign(b.x+dx, b.y , b.z ); p[3].assign(b.x , b.y-dy, b.z ); p[4].assign(b.x , b.y+dy, b.z ); p[5].assign(b.x , b.y , b.z-dz); p[6].assign(b.x , b.y , b.z+dz); // (0.0, 0.0, 0.0) for (int l=0; l<7; l++) { lb[l] = ( p[l].length() <= rs ) ? -1.0 : 1.0; } // cut test // 1-6 for (int l=1; l<=6; l++) { if ( lb[0]*lb[l] < 0.0 ) { REAL_TYPE s = cut_line(p[0], l, rs); size_t m = _F_IDX_S3D(i, j, k, ix, jx, kx, gd); int r = quantize9(s); (l<=3) ? setCutL9(cutL[m], r, l-1) : setCutU9(cutU[m], r, l-1); setBit5(bid[m], mid_solid, l-1); int rr = quantize9(1.0-s); size_t m1; switch (l-1) { case X_minus: m1 = _F_IDX_S3D(i-1, j, k, ix, jx, kx, gd); setBit5(bid[m1], mid_solid, X_plus); setCutL9(cutL[m1], rr, X_plus); break; case X_plus: m1 = _F_IDX_S3D(i+1, j, k, ix, jx, kx, gd); setBit5(bid[m1], mid_solid, X_minus); setCutL9(cutL[m1], rr, X_minus); break; case Y_minus: m1 = _F_IDX_S3D(i, j-1, k, ix, jx, kx, gd); setBit5(bid[m1], mid_solid, Y_plus); setCutU9(cutU[m1], rr, Y_plus); break; case Y_plus: m1 = _F_IDX_S3D(i, j+1, k, ix, jx, kx, gd); setBit5(bid[m1], mid_solid, Y_minus); setCutL9(cutL[m1], rr, Y_minus); break; case Z_minus: m1 = _F_IDX_S3D(i, j, k-1, ix, jx, kx, gd); setBit5(bid[m1], mid_solid, Z_plus); setCutU9(cutU[m1], rr, Z_plus);; break; case Z_plus: m1 = _F_IDX_S3D(i, j, k+1, ix, jx, kx, gd); setBit5(bid[m1], mid_solid, Z_minus); setCutU9(cutU[m1], rr, Z_minus); break; } //printf("(%2d %2d %2d) %2d %d %f\n", i,j,k,mid_solid, l-1, s); r_min = (std::min)(r_min, s); r_max = (std::max)(r_max, s); } } } } } Hostonly_ { printf("\n\tCut info. for Sphere\n"); printf("\tmin. cut = %f\n", r_min); printf("\tmax. cut = %f\n", r_max); } // driver iff if ( drv_mode == OFF ) return; if ( (mid_driver = FBUtility::findIDfromLabel(mat, NoMedium, m_driver)) == 0 ) { Hostonly_ printf("\tLabel '%s' is not listed in MediumList\n", m_driver.c_str()); Exit(0); } if ( (mid_driver_face = FBUtility::findIDfromLabel(mat, NoMedium, m_driver_face)) == 0 ) { Hostonly_ printf("\tLabel '%s' is not listed in MediumList\n", m_driver_face.c_str()); Exit(0); } REAL_TYPE x, y, z; REAL_TYPE ox, oy, oz; // length REAL_TYPE len = G_origin[0] + (drv_length)/R->RefLength; if ( drv_length > 0.0 ) { for (int k=1; k<=kx; k++) { for (int j=1; j<=jx; j++) { for (int i=1; i<=ix; i++) { size_t m = _F_IDX_S3D(i, j, k, ix, jx, kx, gd); x = ox + 0.5*dx + dx*(i-1); if ( x < len ) bcd[m] |= mid_driver; } } } } if ( drv_length > 0.0 ) { for (int k=1; k<=kx; k++) { for (int j=1; j<=jx; j++) { for (int i=1; i<=ix; i++) { size_t m = _F_IDX_S3D(i, j, k, ix, jx, kx, gd); size_t m1= _F_IDX_S3D(i+1, j, k, ix, jx, kx, gd); if ( (DECODE_CMP(bcd[m]) == mid_driver) && (DECODE_CMP(bcd[m1]) == mid_fluid) ) { bcd[m] |= mid_driver_face; } } } } } for (int k=1; k<=kx; k++) { for (int j=1; j<=jx; j++) { for (int i=1; i<=ix; i++) { size_t m = _F_IDX_S3D(i, j, k, ix, jx, kx, gd); x = ox + 0.5*dx + dx*(i-1); y = oy + 0.5*dy + dy*(j-1); if ( x < len ) { bcd[m] |= mid_solid; } } } } }
#include <Core/Core.h> using namespace Upp; CONSOLE_APP_MAIN { Cout() << GetSysTime() << "\n"; Cout() << GetUtcTime() << "\n"; }
goog.provide('ol.TransformFunction'); /** * A transform function accepts an array of input coordinate values, an optional * output array, and an optional dimension (default should be 2). The function * transforms the input coordinate values, populates the output array, and * returns the output array. * * @typedef {function(Array.<number>, Array.<number>=, number=): Array.<number>} * @todo api */ ol.TransformFunction;
import os import shutil import time from xlog import getLogger xlog = getLogger("gae_proxy") from .config import config class Scan_ip_log(): <API key> = 3000 def __init__(self): self.log_path = os.path.join(config.DATA_PATH, "scan_ip.log") self.open_log() def get_log_content(self): if not os.path.isfile(self.log_path): return "" with open(self.log_path, "r") as fd: content = fd.read() return content def open_log(self): if os.path.isfile(self.log_path): with open(self.log_path, "r") as fd: lines = fd.readlines() line_num = len(lines) if line_num >= self.<API key>: self.roll_log() self.log_fd = open(self.log_path, "a") def roll_log(self): for i in range(1000): file_name = os.path.join(config.DATA_PATH, "scan_ip.%d.log" % i) if os.path.isfile(file_name): continue xlog.info("scan_ip_log roll %s -> %s", self.log_path, file_name) shutil.move(self.log_path, file_name) return def log(self, level, fmt, *args, **kwargs): string = '%s - [%s] %s\n' % (time.ctime()[4:-5], level, fmt % args) #print string #sys.stderr.write(string) self.log_fd.write(string) try: self.log_fd.flush() except: pass def debug(self, fmt, *args, **kwargs): self.log('DEBUG', fmt, *args, **kwargs) def info(self, fmt, *args, **kwargs): self.log('INFO', fmt, *args) def warn(self, fmt, *args, **kwargs): self.log('WARNING', fmt, *args, **kwargs) scan_ip_log = Scan_ip_log() if __name__ == '__main__': scan_ip_log.info("ADD abc") scan_ip_log.info("ADD ab1") scan_ip_log.info("ADD ab2") scan_ip_log.info("ADD ab3")
#define MODULE MAIN #include <emmc.h> #include <framebuffer.h> #include <gpio.h> #include <led.h> #include <log.h> #include <timer.h> #include <util.h> /* * entry point to our operating system */ slice_main() { int i, ret; /* prepare LED pin */ <API key>(16, 1); /* initialize framebuffer */ ret = fb_init(); if (ret != 0) { error_blink(); } /* initialize console */ console_init(); kprintf("Console initialized, welcome to Slice.\n"); /* initialize SD card */ emmc_init(); kprintf("Done.\n"); error_solid(); }
<?php class <API key> extends <API key> { public function <API key>() { $error = new Twig_Error( 'foo' ); $error->setTemplateFile( new SplFileInfo( __FILE__ ) ); $this->assertContains( 'test'.DIRECTORY_SEPARATOR.'Twig'.DIRECTORY_SEPARATOR.'Tests'.DIRECTORY_SEPARATOR.'ErrorTest.php', $error->getMessage() ); } public function <API key>() { $error = new Twig_Error( 'foo' ); $error->setTemplateFile( array( 'foo' => 'bar' ) ); $this->assertEquals( 'foo in {"foo":"bar"}', $error->getMessage() ); } public function <API key>() { $loader = new <API key>( dirname( __FILE__ ).'/Fixtures/errors' ); $twig = new Twig_Environment( $loader, array( 'strict_variables' => true, 'debug' => true, 'cache' => false ) ); $template = $twig->loadTemplate( 'index.html' ); try { $template->render( array() ); $this->fail(); } catch( Twig_Error_Runtime $e ) { $this->assertEquals( 'Variable "foo" does not exist in "index.html" at line 3', $e->getMessage() ); $this->assertEquals( 3, $e->getTemplateLine() ); $this->assertEquals( 'index.html', $e->getTemplateFile() ); } try { $template->render( array( 'foo' => new <API key>() ) ); $this->fail(); } catch( Twig_Error_Runtime $e ) { $this->assertEquals( 'An exception has been thrown during the rendering of a template ("Runtime error...") in "index.html" at line 3.', $e->getMessage() ); $this->assertEquals( 3, $e->getTemplateLine() ); $this->assertEquals( 'index.html', $e->getTemplateFile() ); } } /** * @dataProvider getErroredTemplates */ public function <API key>( $templates, $name, $line ) { $loader = new Twig_Loader_Array( $templates ); $twig = new Twig_Environment( $loader, array( 'strict_variables' => true, 'debug' => true, 'cache' => false ) ); $template = $twig->loadTemplate( 'index' ); try { $template->render( array() ); $this->fail(); } catch( Twig_Error_Runtime $e ) { $this->assertEquals( sprintf( 'Variable "foo" does not exist in "%s" at line %d', $name, $line ), $e->getMessage() ); $this->assertEquals( $line, $e->getTemplateLine() ); $this->assertEquals( $name, $e->getTemplateFile() ); } try { $template->render( array( 'foo' => new <API key>() ) ); $this->fail(); } catch( Twig_Error_Runtime $e ) { $this->assertEquals( sprintf( 'An exception has been thrown during the rendering of a template ("Runtime error...") in "%s" at line %d.', $name, $line ), $e->getMessage() ); $this->assertEquals( $line, $e->getTemplateLine() ); $this->assertEquals( $name, $e->getTemplateFile() ); } } public function getErroredTemplates() { return array( // error occurs in a template array( array( 'index' => "\n\n{{ foo.bar }}\n\n\n{{ 'foo' }}", ), 'index', 3, ), // error occurs in an included template array( array( 'index' => "{% include 'partial' %}", 'partial' => '{{ foo.bar }}', ), 'partial', 1, ), // error occurs in a parent block when called via parent() array( array( 'index' => "{% extends 'base' %} {% block content %} {{ parent() }} {% endblock %}", 'base' => '{% block content %}{{ foo.bar }}{% endblock %}', ), 'base', 1, ), // error occurs in a block from the child array( array( 'index' => "{% extends 'base' %} {% block content %} {{ foo.bar }} {% endblock %} {% block foo %} {{ foo.bar }} {% endblock %}", 'base' => '{% block content %}{% endblock %}', ), 'index', 3, ), ); } } class <API key> { public function bar() { throw new Exception( 'Runtime error...' ); } }
/*! * \example httpd_simple/httpd_simple.c * * Simple Webserver * */ #include <dev/board.h> #include <dev/urom.h> #include <sys/version.h> #include <sys/timer.h> #include <sys/confnet.h> #include <sys/socket.h> #include <arpa/inet.h> #include <pro/dhcp.h> #include <pro/httpd.h> #include <io.h> #include <errno.h> /*! * \brief Main application routine. * * Nut/OS automatically calls this entry after initialization. */ int main(void) { uint32_t baud = 115200; TCPSOCKET *sock; FILE *stream; /* * Initialize the console. */ NutRegisterDevice(&DEV_CONSOLE, 0, 0); freopen(DEV_CONSOLE.dev_name, "w", stdout); _ioctl(_fileno(stdout), UART_SETSPEED, &baud); printf("\n\nSimple HTTP Daemon running on Nut/OS %s\n", NutVersionString()); /* * Initialize the network interface. */ printf("Configure %s...", DEV_ETHER_NAME); if (NutRegisterDevice(&DEV_ETHER, 0, 0)) { puts("failed. Cannot register Ethernet device."); for (;;); } if (NutDhcpIfConfig(DEV_ETHER_NAME, NULL, 60000)) { puts("failed. Cannot configure network.\nUse editconf."); for (;;); } printf("%s ready\n", inet_ntoa(confnet.cdn_ip_addr)); /* * Initialize the file system. */ printf("Register UROM file system..."); if (NutRegisterDevice(&devUrom, 0, 0)) { puts("failed."); for (;;); } puts("OK."); /* * Now loop endless for connections. */ for (;;) { /* Create a socket. */ sock = NutTcpCreateSocket(); if (sock == NULL) { printf("Error %d creating socket.\n", errno); NutSleep(1000); continue; } /* Listen on port 80. NutTcpAccept() will block until we get a connection from a client. */ printf("Listening..."); NutTcpAccept(sock, 80); printf("Connected..."); /* Associate a binary stdio stream with the socket. */ stream = _fdopen((int) ((uintptr_t) sock), "r+b"); if (stream == NULL) { printf("Error %d creating stream.\n", errno); } else { /* This API call saves us a lot of work. It will parse the client's HTTP request, send the requested file. */ <API key>(stream); /* Destroy the associated stream. */ fclose(stream); } /* Close the socket. */ NutTcpCloseSocket(sock); puts("Disconnected"); } return 0; }
from __future__ import absolute_import from .notedown import * from .main import convert, markdown_template, __version__ # avoid having to require the notebook to install notedown try: from .contentsmanager import <API key> except ImportError: <API key> = 'You need to install the jupyter notebook.'
<!DOCTYPE HTML PUBLIC "- <!-- NewPage --> <html lang="en"> <head> <!-- Generated by javadoc (version 1.7.0_02) on Tue Apr 29 11:29:43 CEST 2014 --> <meta http-equiv="Content-Type" content="text/html" charset="utf-8"> <title>org.gradle.api.tasks (Gradle API 1.12)</title> <meta name="date" content="2014-04-29"> <link rel="stylesheet" type="text/css" href="../../../../javadoc.css" title="Style"> </head> <body> <script type="text/javascript"><! if (location.href.indexOf('is-external=true') == -1) { parent.document.title="org.gradle.api.tasks (Gradle API 1.12)"; } </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <div class="topNav"><a name="navbar_top"> </a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow"> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../overview-summary.html">Overview</a></li> <li class="navBarCell1Rev">Package</li> <li>Class</li> <li><a href="package-tree.html">Tree</a></li> <li><a href="../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../index-all.html">Index</a></li> <li><a href="../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li><a href="../../../../org/gradle/api/specs/package-summary.html">Prev Package</a></li> <li><a href="../../../../org/gradle/api/tasks/ant/package-summary.html">Next Package</a></li> </ul> <ul class="navList"> <li><a href="../../../../index.html?org/gradle/api/tasks/package-summary.html" target="_top">Frames</a></li> <li><a href="package-summary.html" target="_top">No Frames</a></li> </ul> <ul class="navList" id="<API key>"> <li><a href="../../../../allclasses-noframe.html">All Classes</a></li> </ul> <div> <script type="text/javascript"><! allClassesLink = document.getElementById("<API key>"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } </script> </div> <a name="skip-navbar_top"> </a></div> <div class="header"> <h1 title="Package" class="title">Package&nbsp;org.gradle.api.tasks</h1> <div class="docSummary"> <div class="block">The standard <a href="../../../../org/gradle/api/Task.html" title="interface in org.gradle.api"><code>Task</code></a> implementations.</div> </div> <p>See:&nbsp;<a href="#package_description">Description</a></p> </div> <div class="contentContainer"> <ul class="blockList"> <li class="blockList"> <table class="packageSummary" border="0" cellpadding="3" cellspacing="0" summary="Interface Summary table, listing interfaces, and an explanation"> <caption><span>Interface Summary</span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Interface</th> <th class="colLast" scope="col">Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><a href="../../../../org/gradle/api/tasks/ConventionValue.html" title="interface in org.gradle.api.tasks">ConventionValue</a></td> <td class="colLast">Deprecated <div class="block"><i>Use <a href="http: </td> </tr> <tr class="rowColor"> <td class="colFirst"><a href="../../../../org/gradle/api/tasks/GroovySourceSet.html" title="interface in org.gradle.api.tasks">GroovySourceSet</a></td> <td class="colLast"> <div class="block">A <code><API key></code> defines the properties and methods added to a <a href="../../../../org/gradle/api/tasks/SourceSet.html" title="interface in org.gradle.api.tasks"><code>SourceSet</code></a> by the <a href="../../../../org/gradle/api/plugins/GroovyPlugin.html" title="class in org.gradle.api.plugins"><code>GroovyPlugin</code></a>.</div> </td> </tr> <tr class="altColor"> <td class="colFirst"><a href="../../../../org/gradle/api/tasks/ScalaSourceSet.html" title="interface in org.gradle.api.tasks">ScalaSourceSet</a></td> <td class="colLast"> <div class="block">A <code><API key></code> defines the properties and methods added to a <a href="../../../../org/gradle/api/tasks/SourceSet.html" title="interface in org.gradle.api.tasks"><code>SourceSet</code></a> by the <code>ScalaPlugin</code>.</div> </td> </tr> <tr class="rowColor"> <td class="colFirst"><a href="../../../../org/gradle/api/tasks/SourceSet.html" title="interface in org.gradle.api.tasks">SourceSet</a></td> <td class="colLast"> <div class="block">A <code>SourceSet</code> represents a logical group of Java source and resources.</div> </td> </tr> <tr class="altColor"> <td class="colFirst"><a href="../../../../org/gradle/api/tasks/SourceSetContainer.html" title="interface in org.gradle.api.tasks">SourceSetContainer</a></td> <td class="colLast"> <div class="block">A <code>SourceSetContainer</code> manages a set of <a href="../../../../org/gradle/api/tasks/SourceSet.html" title="interface in org.gradle.api.tasks"><code>SourceSet</code></a> objects.</div> </td> </tr> <tr class="rowColor"> <td class="colFirst"><a href="../../../../org/gradle/api/tasks/SourceSetOutput.html" title="interface in org.gradle.api.tasks">SourceSetOutput</a></td> <td class="colLast"> <div class="block">A collection of all output directories (compiled classes, processed resources, etc.) - notice that <a href="../../../../org/gradle/api/tasks/SourceSetOutput.html" title="interface in org.gradle.api.tasks"><code>SourceSetOutput</code></a> extends <a href="../../../../org/gradle/api/file/FileCollection.html" title="interface in org.gradle.api.file"><code>FileCollection</code></a>.</div> </td> </tr> <tr class="altColor"> <td class="colFirst"><a href="../../../../org/gradle/api/tasks/TaskCollection.html" title="interface in org.gradle.api.tasks">TaskCollection</a>&lt;T extends <a href="../../../../org/gradle/api/Task.html" title="interface in org.gradle.api">Task</a>&gt;</td> <td class="colLast"> <div class="block">A <code>TaskCollection</code> contains a set of <a href="../../../../org/gradle/api/Task.html" title="interface in org.gradle.api"><code>Task</code></a> instances, and provides a number of query methods.</div> </td> </tr> <tr class="rowColor"> <td class="colFirst"><a href="../../../../org/gradle/api/tasks/TaskContainer.html" title="interface in org.gradle.api.tasks">TaskContainer</a></td> <td class="colLast"> <div class="block">A <code>TaskContainer</code> is responsible for managing a set of <a href="../../../../org/gradle/api/Task.html" title="interface in org.gradle.api"><code>Task</code></a> instances.</div> </td> </tr> <tr class="altColor"> <td class="colFirst"><a href="../../../../org/gradle/api/tasks/TaskDependency.html" title="interface in org.gradle.api.tasks">TaskDependency</a></td> <td class="colLast"> <div class="block">A <code>TaskDependency</code> represents an <em>unordered</em> set of tasks which a <a href="../../../../org/gradle/api/Task.html" title="interface in org.gradle.api"><code>Task</code></a> depends on.</div> </td> </tr> <tr class="rowColor"> <td class="colFirst"><a href="../../../../org/gradle/api/tasks/TaskInputs.html" title="interface in org.gradle.api.tasks">TaskInputs</a></td> <td class="colLast"> <div class="block">A <code>TaskInputs</code> represents the inputs for a task.</div> </td> </tr> <tr class="altColor"> <td class="colFirst"><a href="../../../../org/gradle/api/tasks/TaskOutputs.html" title="interface in org.gradle.api.tasks">TaskOutputs</a></td> <td class="colLast"> <div class="block">A <code>TaskOutputs</code> represents the outputs of a task.</div> </td> </tr> <tr class="rowColor"> <td class="colFirst"><a href="../../../../org/gradle/api/tasks/TaskState.html" title="interface in org.gradle.api.tasks">TaskState</a></td> <td class="colLast"> <div class="block"><code>TaskState</code> provides information about the execution state of a <a href="../../../../org/gradle/api/Task.html" title="interface in org.gradle.api"><code>Task</code></a>.</div> </td> </tr> <tr class="altColor"> <td class="colFirst"><a href="../../../../org/gradle/api/tasks/VerificationTask.html" title="interface in org.gradle.api.tasks">VerificationTask</a></td> <td class="colLast"> <div class="block">A <code>VerificationTask</code> is a task which performs some verification of the artifacts produced by a build.</div> </td> </tr> <tr class="rowColor"> <td class="colFirst"><a href="../../../../org/gradle/api/tasks/WorkResult.html" title="interface in org.gradle.api.tasks">WorkResult</a></td> <td class="colLast"> <div class="block">Provides information about some work which was performed.</div> </td> </tr> </tbody> </table> </li> <li class="blockList"> <table class="packageSummary" border="0" cellpadding="3" cellspacing="0" summary="Class Summary table, listing classes, and an explanation"> <caption><span>Class Summary</span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Class</th> <th class="colLast" scope="col">Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><a href="../../../../org/gradle/api/tasks/AbstractCopyTask.html" title="class in org.gradle.api.tasks">AbstractCopyTask</a></td> <td class="colLast"> <div class="block"><code>AbstractCopyTask</code> is the base class for all copy tasks.</div> </td> </tr> <tr class="rowColor"> <td class="colFirst"><a href="../../../../org/gradle/api/tasks/Copy.html" title="class in org.gradle.api.tasks">Copy</a></td> <td class="colLast"> <div class="block">Copies files into a destination directory.</div> </td> </tr> <tr class="altColor"> <td class="colFirst"><a href="../../../../org/gradle/api/tasks/Delete.html" title="class in org.gradle.api.tasks">Delete</a></td> <td class="colLast"> <div class="block">Deletes files or directories.</div> </td> </tr> <tr class="rowColor"> <td class="colFirst"><a href="../../../../org/gradle/api/tasks/Exec.html" title="class in org.gradle.api.tasks">Exec</a></td> <td class="colLast"> <div class="block">Executes a command line process.</div> </td> </tr> <tr class="altColor"> <td class="colFirst"><a href="../../../../org/gradle/api/tasks/GradleBuild.html" title="class in org.gradle.api.tasks">GradleBuild</a></td> <td class="colLast"> <div class="block">Executes a Gradle build.</div> </td> </tr> <tr class="rowColor"> <td class="colFirst"><a href="../../../../org/gradle/api/tasks/GroovyRuntime.html" title="class in org.gradle.api.tasks">GroovyRuntime</a></td> <td class="colLast"> <div class="block">Provides information related to the Groovy runtime(s) used in a project.</div> </td> </tr> <tr class="altColor"> <td class="colFirst"><a href="../../../../org/gradle/api/tasks/JavaExec.html" title="class in org.gradle.api.tasks">JavaExec</a></td> <td class="colLast"> <div class="block">Executes a Java application in a child process.</div> </td> </tr> <tr class="rowColor"> <td class="colFirst"><a href="../../../../org/gradle/api/tasks/SourceTask.html" title="class in org.gradle.api.tasks">SourceTask</a></td> <td class="colLast"> <div class="block">A <code>SourceTask</code> performs some operation on source files.</div> </td> </tr> <tr class="altColor"> <td class="colFirst"><a href="../../../../org/gradle/api/tasks/Sync.html" title="class in org.gradle.api.tasks">Sync</a></td> <td class="colLast"> <div class="block">Synchronises the contents of a destination directory with some source directories and files.</div> </td> </tr> <tr class="rowColor"> <td class="colFirst"><a href="../../../../org/gradle/api/tasks/Upload.html" title="class in org.gradle.api.tasks">Upload</a></td> <td class="colLast"> <div class="block">Uploads the artifacts of a <a href="../../../../org/gradle/api/artifacts/Configuration.html" title="interface in org.gradle.api.artifacts"><code>Configuration</code></a> to a set of repositories.</div> </td> </tr> </tbody> </table> </li> <li class="blockList"> <table class="packageSummary" border="0" cellpadding="3" cellspacing="0" summary="Exception Summary table, listing exceptions, and an explanation"> <caption><span>Exception Summary</span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Exception</th> <th class="colLast" scope="col">Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><a href="../../../../org/gradle/api/tasks/StopActionException.html" title="class in org.gradle.api.tasks">StopActionException</a></td> <td class="colLast"> <div class="block">A <code>StopActionException</code> is be thrown by a task <a href="../../../../org/gradle/api/Action.html" title="interface in org.gradle.api"><code>Action</code></a> or task action closure to stop its own execution and to start execution of the task's next action.</div> </td> </tr> <tr class="rowColor"> <td class="colFirst"><a href="../../../../org/gradle/api/tasks/<API key>.html" title="class in org.gradle.api.tasks"><API key></a></td> <td class="colLast"> <div class="block">A <code><API key></code> is thrown by a <a href="../../../../org/gradle/api/Action.html" title="interface in org.gradle.api"><code>Action</code></a> or task action closure to stop execution of the current task and start execution of the next task.</div> </td> </tr> <tr class="altColor"> <td class="colFirst"><a href="../../../../org/gradle/api/tasks/<API key>.html" title="class in org.gradle.api.tasks"><API key></a></td> <td class="colLast"> <div class="block">A <code><API key></code> is thrown when a task fails to execute successfully.</div> </td> </tr> <tr class="rowColor"> <td class="colFirst"><a href="../../../../org/gradle/api/tasks/<API key>.html" title="class in org.gradle.api.tasks"><API key></a></td> <td class="colLast"> <div class="block">A <code><API key></code> is thrown when a task cannot be instantiated for some reason.</div> </td> </tr> <tr class="altColor"> <td class="colFirst"><a href="../../../../org/gradle/api/tasks/<API key>.html" title="class in org.gradle.api.tasks"><API key></a></td> <td class="colLast"> <div class="block">A <code><API key></code> is thrown when there is some validation problem with a task.</div> </td> </tr> </tbody> </table> </li> <li class="blockList"> <table class="packageSummary" border="0" cellpadding="3" cellspacing="0" summary="Annotation Types Summary table, listing annotation types, and an explanation"> <caption><span>Annotation Types Summary</span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Annotation Type</th> <th class="colLast" scope="col">Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><a href="../../../../org/gradle/api/tasks/Input.html" title="annotation in org.gradle.api.tasks">Input</a></td> <td class="colLast"> <div class="block">Attached to a task property to indicate that the property specifies some input value for the task.</div> </td> </tr> <tr class="rowColor"> <td class="colFirst"><a href="../../../../org/gradle/api/tasks/InputDirectory.html" title="annotation in org.gradle.api.tasks">InputDirectory</a></td> <td class="colLast"> <div class="block">Marks a property as specifying an input directory for a task.</div> </td> </tr> <tr class="altColor"> <td class="colFirst"><a href="../../../../org/gradle/api/tasks/InputFile.html" title="annotation in org.gradle.api.tasks">InputFile</a></td> <td class="colLast"> <div class="block">Marks a property as specifying an input file for a task.</div> </td> </tr> <tr class="rowColor"> <td class="colFirst"><a href="../../../../org/gradle/api/tasks/InputFiles.html" title="annotation in org.gradle.api.tasks">InputFiles</a></td> <td class="colLast"> <div class="block">Marks a property as specifying the input files for a task.</div> </td> </tr> <tr class="altColor"> <td class="colFirst"><a href="../../../../org/gradle/api/tasks/Nested.html" title="annotation in org.gradle.api.tasks">Nested</a></td> <td class="colLast"> <div class="block">Marks a property as specifying a nested bean, whose properties should be checked for annotations.</div> </td> </tr> <tr class="rowColor"> <td class="colFirst"><a href="../../../../org/gradle/api/tasks/Optional.html" title="annotation in org.gradle.api.tasks">Optional</a></td> <td class="colLast"> <div class="block">Marks a task property as optional.</div> </td> </tr> <tr class="altColor"> <td class="colFirst"><a href="../../../../org/gradle/api/tasks/OutputDirectories.html" title="annotation in org.gradle.api.tasks">OutputDirectories</a></td> <td class="colLast"> <div class="block">Marks a property as specifying one or more output directories for a task.</div> </td> </tr> <tr class="rowColor"> <td class="colFirst"><a href="../../../../org/gradle/api/tasks/OutputDirectory.html" title="annotation in org.gradle.api.tasks">OutputDirectory</a></td> <td class="colLast"> <div class="block">Marks a property as specifying an output directory for a task.</div> </td> </tr> <tr class="altColor"> <td class="colFirst"><a href="../../../../org/gradle/api/tasks/OutputFile.html" title="annotation in org.gradle.api.tasks">OutputFile</a></td> <td class="colLast"> <div class="block">Marks a property as specifying an output file for a task.</div> </td> </tr> <tr class="rowColor"> <td class="colFirst"><a href="../../../../org/gradle/api/tasks/OutputFiles.html" title="annotation in org.gradle.api.tasks">OutputFiles</a></td> <td class="colLast"> <div class="block">Marks a property as specifying one or more output files for a task.</div> </td> </tr> <tr class="altColor"> <td class="colFirst"><a href="../../../../org/gradle/api/tasks/SkipWhenEmpty.html" title="annotation in org.gradle.api.tasks">SkipWhenEmpty</a></td> <td class="colLast"> <div class="block">Attached to a task property to indicate that the task should be skipped when the value of the property is an empty <a href="../../../../org/gradle/api/file/FileCollection.html" title="interface in org.gradle.api.file"><code>FileCollection</code></a> or directory.</div> </td> </tr> <tr class="rowColor"> <td class="colFirst"><a href="../../../../org/gradle/api/tasks/TaskAction.html" title="annotation in org.gradle.api.tasks">TaskAction</a></td> <td class="colLast"> <div class="block">Marks a method as the action to run when the task is executed.</div> </td> </tr> </tbody> </table> </li> </ul> <a name="package_description"> </a> <h2 title="Package org.gradle.api.tasks Description">Package org.gradle.api.tasks Description</h2> <div class="block">The standard <a href="../../../../org/gradle/api/Task.html" title="interface in org.gradle.api"><code>Task</code></a> implementations.</div> </div> <div class="bottomNav"><a name="navbar_bottom"> </a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="<API key>"> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../overview-summary.html">Overview</a></li> <li class="navBarCell1Rev">Package</li> <li>Class</li> <li><a href="package-tree.html">Tree</a></li> <li><a href="../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../index-all.html">Index</a></li> <li><a href="../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li><a href="../../../../org/gradle/api/specs/package-summary.html">Prev Package</a></li> <li><a href="../../../../org/gradle/api/tasks/ant/package-summary.html">Next Package</a></li> </ul> <ul class="navList"> <li><a href="../../../../index.html?org/gradle/api/tasks/package-summary.html" target="_top">Frames</a></li> <li><a href="package-summary.html" target="_top">No Frames</a></li> </ul> <ul class="navList" id="<API key>"> <li><a href="../../../../allclasses-noframe.html">All Classes</a></li> </ul> <div> <script type="text/javascript"><! allClassesLink = document.getElementById("<API key>"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } </script> </div> <a name="skip-navbar_bottom"> </a></div> </body> </html>
# If you did not, you can find one at the following link. from core.plugins import ProtocolPlugin from ConfigParser import RawConfigParser as ConfigParser from core.decorators import * class FetchPlugin(ProtocolPlugin): commands = { "fetch": "commandFetch", "bring": "commandFetch", "invite": "commandInvite", "fp": "commandFetchProtect", "fo": "<API key>", } hooks = { "chatmsg": "message" } def gotClient(self): self.client.var_fetchrequest = False self.client.var_fetchdata = () def message(self, message): if self.client.var_fetchrequest: self.client.var_fetchrequest = False if message in ["y", "yes"]: sender, world, rx, ry, rz = self.client.var_fetchdata if self.client.world == world: self.client.teleportTo(rx, ry, rz) else: self.client.changeToWorld(world.id, position=(rx, ry, rz)) self.client.sendServerMessage("You have accepted the fetch request.") sender.sendServerMessage("%s has accepted your fetch request." % self.client.username) elif message in ["n", "no"]: sender = self.client.var_fetchdata[0] self.client.sendServerMessage("You did not accept the fetch request.") sender.sendServerMessage("%s did not accept your request." % self.client.username) else: sender = self.client.var_fetchdata[0] self.client.sendServerMessage("You have ignored the fetch request.") sender.sendServerMessage("%s has ignored your request." % self.client.username) return return True @player_list @username_command def commandInvite(self, user, fromloc, overriderank): "/invite username - Guest\Invites a user to be where you are." # Shift the locations right to make them into block coords rx = self.client.x >> 5 ry = self.client.y >> 5 rz = self.client.z >> 5 user.var_prefetchdata = (self.client, self.client.world) if self.client.world.id == user.world.id: user.sendServerMessage("%s would like to fetch you." % self.client.username) else: user.sendServerMessage("%s would like to fetch you to %s." % (self.client.username, self.client.world.id)) user.sendServerMessage("Do you wish to accept? [y]es [n]o") user.var_fetchrequest = True user.var_fetchdata = (self.client, self.client.world, rx, ry, rz) self.client.sendServerMessage("The fetch request has been sent.") @mod_only def commandFetchProtect(self, parts, fromloc, overriderank): "/fp on|off - Mod\nToggles Fetch Protection for yourself." if len(parts) != 2: self.client.sendServerMessage("You must specify either \'on\' or \'off\'.") elif parts[1] == "on": config = ConfigParser() config.read('config/data/fprot.meta') config.add_section(self.client.username) fp = open('config/data/fprot.meta', "w") config.write(fp) fp.close() self.client.sendServerMessage("Fetch protection is now on.") elif parts[1] == "off": config = ConfigParser() config.read('config/data/fprot.meta') config.remove_section(self.client.username) fp = open('config/data/fprot.meta', "w") config.write(fp) fp.close() self.client.sendServerMessage("Fetch protection is now off.") else: self.client.sendServerMessage("You must specify either \'on\' or \'off\'.") @player_list @admin_only @username_command def <API key>(self, user, fromloc, overriderank): "/fo username - Mod\nTeleports a user to be where you are" # Shift the locations right to make them into block coords rx = self.client.x >> 5 ry = self.client.y >> 5 rz = self.client.z >> 5 if user.world == self.client.world: user.teleportTo(rx, ry, rz) else: if self.client.isModPlus(): user.changeToWorld(self.client.world.id, position=(rx, ry, rz)) else: self.client.sendServerMessage("%s cannot be fetched from '%s'" % (self.client.username, user.world.id)) return user.sendServerMessage("You have been fetched by %s" % self.client.username) @player_list @op_only @username_command def commandFetch(self, user, fromloc, overriderank): "/fetch username - Op\nAliases: bring\nTeleports a user to be where you are" # Shift the locations right to make them into block coords rx = self.client.x >> 5 ry = self.client.y >> 5 rz = self.client.z >> 5 config = ConfigParser() config.read('config/data/fprot.meta') if config.has_section(user.username): self.client.sendServerMessage("You can't fetch this person; they're Fetch Protected!") else: if user.world == self.client.world: user.teleportTo(rx, ry, rz) else: if self.client.isModPlus(): user.changeToWorld(self.client.world.id, position=(rx, ry, rz)) else: self.client.sendServerMessage("%s cannot be fetched from '%s'" % (self.client.username, user.world.id)) return user.sendServerMessage("You have been fetched by %s" % self.client.username)
/*lint -save -e525 -e539 */ #define NODEF_NOTICE \ "NB: This log record is masked by default.\n\n" SLTM(Debug, SLT_F_BINARY, "Debug messages", "Debug messages can normally be ignored, but are sometimes" " helpful during trouble-shooting. Most debug messages must" " be explicitly enabled with parameters.\n\n" ) SLTM(Error, 0, "Error messages", "Error messages are stuff you probably want to know.\n\n" ) SLTM(CLI, 0, "CLI communication", "CLI communication between varnishd master and child process.\n\n" ) SLTM(SessOpen, 0, "Client connection opened", "The first record for a client connection, with the socket-endpoints" " of the connection.\n\n" "The format is::\n\n" "\t%s %d %s %s %s %d\n" "\t| | | | | |\n" "\t| | | | | +- File descriptor number\n" "\t| | | | +---- Local TCP port ('-' if !$log_local_addr)\n" "\t| | | + "\t| | + "\t| + "\t+ "\n" ) /* * XXX: compilers are _so_ picky, and won't let us do an #include * XXX: in the middle of a macro invocation :-( * XXX: If we could, these three lines would have described the * XXX: 'reason' field below. #define SESS_CLOSE(nm, s, err, desc) " " #nm "\n\t" desc "\n\n" #include "tbl/sess_close.h" #undef SESS_CLOSE */ SLTM(SessClose, 0, "Client connection closed", "SessionClose is the last record for any client connection.\n\n" "The format is::\n\n" "\t%s %f\n" "\t| |\n" "\t| +- How long the session was open\n" "\t+---- Why the connection closed\n" "\n" ) SLTM(BackendOpen, 0, "Backend connection opened", "Logged when a new backend connection is opened.\n\n" "The format is::\n\n" "\t%d %s %s %s %s %s\n" "\t| | | | | |\n" "\t| | | | | +- Local port\n" "\t| | | | +---- Local address\n" "\t| | | + "\t| | + "\t| + "\t+ "\n" ) SLTM(BackendReuse, 0, "Backend connection put up for reuse", "Logged when a backend connection is put up for reuse by a later" " connection.\n\n" "The format is::\n\n" "\t%d %s\n" "\t| |\n" "\t| +- Backend display name\n" "\t+---- Connection file descriptor\n" "\n" ) SLTM(BackendClose, 0, "Backend connection closed", "Logged when a backend connection is closed.\n\n" "The format is::\n\n" "\t%d %s [ %s ]\n" "\t| | |\n" "\t| | +- Optional reason\n" "\t| + "\t+ "\n" ) SLTM(HttpGarbage, SLT_F_BINARY, "Unparseable HTTP request", "Logs the content of unparseable HTTP requests.\n\n" ) SLTM(Proxy, 0, "PROXY protocol information", "PROXY protocol information.\n\n" "The format is::\n\n" "\t%d %s %d %s %d [key value]...\n" "\t| | | | | |\n" "\t| | | | | +- optional information\n" "\t| | | | +- server port\n" "\t| | | +- server ip\n" "\t| | +- client port\n" "\t| +- client ip\n" "\t+---- PROXY protocol version\n" ) SLTM(ProxyGarbage, 0, "Unparseable PROXY request", "A PROXY protocol header was unparseable.\n\n" ) SLTM(Backend, 0, "Backend selected", "Logged when a connection is selected for handling a backend" " request.\n\n" "The format is::\n\n" "\t%d %s %s\n" "\t| | |\n" "\t| | +- Backend display name\n" "\t| +---- VCL name\n" "\t+ "\n" ) SLTM(Length, 0, "Size of object body", "Logs the size of a fetch object body.\n\n" ) SLTM(FetchError, 0, "Error while fetching object", "Logs the error message of a failed fetch operation.\n\n" ) #define SLTH(tag, ind, req, resp, sdesc, ldesc) \ SLTM(Req##tag, (req ? 0 : SLT_F_UNUSED), "Client request " sdesc, ldesc) #include "tbl/vsl_tags_http.h" #undef SLTH #define SLTH(tag, ind, req, resp, sdesc, ldesc) \ SLTM(Resp##tag, (resp ? 0 : SLT_F_UNUSED), "Client response " sdesc, \ ldesc) #include "tbl/vsl_tags_http.h" #undef SLTH #define SLTH(tag, ind, req, resp, sdesc, ldesc) \ SLTM(Bereq##tag, (req ? 0 : SLT_F_UNUSED), "Backend request " sdesc, \ ldesc) #include "tbl/vsl_tags_http.h" #undef SLTH #define SLTH(tag, ind, req, resp, sdesc, ldesc) \ SLTM(Beresp##tag, (resp ? 0 : SLT_F_UNUSED), "Backend response " \ sdesc, ldesc) #include "tbl/vsl_tags_http.h" #undef SLTH #define SLTH(tag, ind, req, resp, sdesc, ldesc) \ SLTM(Obj##tag, (resp ? 0 : SLT_F_UNUSED), "Object " sdesc, ldesc) #include "tbl/vsl_tags_http.h" #undef SLTH SLTM(BogoHeader, 0, "Bogus HTTP received", "Contains the first 20 characters of received HTTP headers we could" " not make sense of. Applies to both req.http and beresp.http.\n\n" ) SLTM(LostHeader, 0, "Failed attempt to set HTTP header", "Logs the header name of a failed HTTP header operation due to" " resource exhaustion or configured limits.\n\n" ) SLTM(TTL, 0, "TTL set on object", "A TTL record is emitted whenever the ttl, grace or keep" " values for an object is set.\n\n" "The format is::\n\n" "\t%s %d %d %d %d [ %d %d %u %u ]\n" "\t| | | | | | | | |\n" "\t| | | | | | | | +- Max-Age from Cache-Control header\n" "\t| | | | | | | +---- Expires header\n" "\t| | | | | | + "\t| | | | | + "\t| | | | + "\t| | | + "\t| | + "\t| + "\t+ "\n" "The last four fields are only present in \"RFC\" headers.\n\n" "Examples::\n\n" "\tRFC 60 10 -1 1312966109 1312966109 1312966109 0 60\n" "\tVCL 120 10 0 1312966111\n" "\n" ) SLTM(Fetch_Body, 0, "Body fetched from backend", "Ready to fetch body from backend.\n\n" "The format is::\n\n" "\t%d (%s) %s\n" "\t| | |\n" "\t| | +---- 'stream' or '-'\n" "\t| + "\t+ "\n" ) SLTM(VCL_acl, 0, "VSL ACL check results", "Logs VCL ACL evaluation results.\n\n" ) SLTM(VCL_call, 0, "VCL method called", "Logs the VCL method name when a VCL method is called.\n\n" ) SLTM(VCL_trace, 0, "VCL trace data", "Logs VCL execution trace data.\n\n" "The format is::\n\n" "\t%u %u.%u\n" "\t| | |\n" "\t| | +- VCL program line position\n" "\t| +---- VCL program line number\n" "\t+ "\n" NODEF_NOTICE ) SLTM(VCL_return, 0, "VCL method return value", "Logs the VCL method terminating statement.\n\n" ) SLTM(ReqStart, 0, "Client request start", "Start of request processing. Logs the client IP address and port" " number.\n\n" "The format is::\n\n" "\t%s %s\n" "\t| |\n" "\t| +- Client Port number\n" "\t+---- Client IP4/6 address\n" "\n" ) SLTM(Hit, 0, "Hit object in cache", "Object looked up in cache. Shows the VXID of the object.\n\n" ) SLTM(HitPass, 0, "Hit for pass object in cache.", "Hit-for-pass object looked up in cache. Shows the VXID of the" " hit-for-pass object.\n\n" ) SLTM(ExpBan, 0, "Object evicted due to ban", "Logs the VXID when an object is banned.\n\n" ) SLTM(ExpKill, 0, "Object expiry event", "Logs events related to object expiry. The events are:\n\n" "EXP_Rearm\n" "\tLogged when the expiry time of an object changes.\n\n" "EXP_Inbox\n" "\tLogged when the expiry thread picks an object from the inbox for" " processing.\n\n" "EXP_Kill\n" "\tLogged when the expiry thread kills an object from the inbox.\n\n" "EXP_When\n" "\tLogged when the expiry thread moves an object on the binheap.\n\n" "EXP_Expired\n" "\tLogged when the expiry thread expires an object.\n\n" "LRU_Cand\n" "\tLogged when an object is evaluated for LRU force expiry.\n\n" "LRU\n" "\tLogged when an object is force expired due to LRU.\n\n" "LRU_Fail\n" "\tLogged when no suitable candidate object is found for LRU force" " expiry.\n\n" "The format is::\n\n" "\tEXP_Rearm p=%p E=%f e=%f f=0x%x\n" "\tEXP_Inbox p=%p e=%f f=0x%x\n" "\tEXP_Kill p=%p e=%f f=0x%x\n" "\tEXP_When p=%p e=%f f=0x%x\n" "\tEXP_Expired x=%u t=%f\n" "\tLRU_Cand p=%p f=0x%x r=%d\n" "\tLRU x=%u\n" "\tLRU_Fail\n" "\t\n" "\tLegend:\n" "\tp=%p Objcore pointer\n" "\tt=%f Remaining TTL (s)\n" "\te=%f Expiry time (unix epoch)\n" "\tE=%f Old expiry time (unix epoch)\n" "\tf=0x%x Objcore flags\n" "\tr=%d Objcore refcount\n" "\tx=%u Object VXID\n" "\n" ) SLTM(WorkThread, 0, "Logs thread start/stop events", "Logs worker thread creation and termination events.\n\n" "The format is::\n\n" "\t%p %s\n" "\t| |\n" "\t| +- [start|end]\n" "\t+---- Worker struct pointer\n" "\n" NODEF_NOTICE ) SLTM(ESI_xmlerror, 0, "ESI parser error or warning message", "An error or warning was generated during parsing of an ESI object." " The log record describes the problem encountered." ) SLTM(Hash, SLT_F_BINARY, "Value added to hash", "This value was added to the object lookup hash.\n\n" NODEF_NOTICE ) SLTM(Backend_health, 0, "Backend health check", "The result of a backend health probe.\n\n" "The format is::\n\n" "\t%s %s %s %u %u %u %f %f %s\n" "\t| | | | | | | | |\n" "\t| | | | | | | | +- Probe HTTP response\n" "\t| | | | | | | +---- Average response time\n" "\t| | | | | | + "\t| | | | | + "\t| | | | + "\t| | | + "\t| | + "\t| + "\t+ "\n" ) SLTM(VCL_Log, 0, "Log statement from VCL", "User generated log messages insert from VCL through std.log()" ) SLTM(VCL_Error, 0, "VCL execution error message", "Logs error messages generated during VCL execution.\n\n" ) SLTM(Gzip, 0, "G(un)zip performed on object", "A Gzip record is emitted for each instance of gzip or gunzip" " work performed. Worst case, an ESI transaction stored in" " gzip'ed objects but delivered gunziped, will run into many of" " these.\n\n" "The format is::\n\n" "\t%c %c %c %d %d %d %d %d\n" "\t| | | | | | | |\n" "\t| | | | | | | +- Bit length of compressed data\n" "\t| | | | | | +---- Bit location of 'last' bit\n" "\t| | | | | + "\t| | | | + "\t| | | + "\t| | + "\t| + "\t+ "\n" "Examples::\n\n" "\tU F E 182 159 80 80 1392\n" "\tG F E 159 173 80 1304 1314\n" "\n" ) SLTM(Link, 0, "Links to a child VXID", "Links this VXID to any child VXID it initiates.\n\n" "The format is::\n\n" "\t%s %d %s\n" "\t| | |\n" "\t| | +- Reason\n" "\t| +---- Child vxid\n" "\t+ "\n" ) SLTM(Begin, 0, "Marks the start of a VXID", "The first record of a VXID transaction.\n\n" "The format is::\n\n" "\t%s %d %s\n" "\t| | |\n" "\t| | +- Reason\n" "\t| +---- Parent vxid\n" "\t+ "\n" ) SLTM(End, 0, "Marks the end of a VXID", "The last record of a VXID transaction.\n\n" ) SLTM(VSL, 0, "VSL API warnings and error message", "Warnings and error messages genererated by the VSL API while" " reading the shared memory log.\n\n" ) SLTM(Storage, 0, "Where object is stored", "Type and name of the storage backend the object is stored in.\n\n" "The format is::\n\n" "\t%s %s\n" "\t| |\n" "\t| +- Name of storage backend\n" "\t+---- Type (\"malloc\", \"file\", \"persistent\" etc.)\n" "\n" ) SLTM(Timestamp, 0, "Timing information", "Contains timing information for the Varnish worker threads.\n\n" "Time stamps are issued by Varnish on certain events," " and show the absolute time of the event, the time spent since the" " start of the work unit, and the time spent since the last timestamp" " was logged. See vsl(7) for information about the individual" " timestamps.\n\n" "The format is::\n\n" "\t%s: %f %f %f\n" "\t| | | |\n" "\t| | | +- Time since last timestamp\n" "\t| | +---- Time since start of work unit\n" "\t| + "\t+ "\n" ) SLTM(ReqAcct, 0, "Request handling byte counts", "Contains byte counts for the request handling.\n" "ESI sub-request counts are also added to their parent request.\n" "The body bytes count does not include transmission " "(ie: chunked encoding) overhead.\n" "The format is::\n\n" "\t%d %d %d %d %d %d\n" "\t| | | | | |\n" "\t| | | | | +- Total bytes transmitted\n" "\t| | | | +---- Body bytes transmitted\n" "\t| | | + "\t| | + "\t| + "\t+ "\n" ) SLTM(PipeAcct, 0, "Pipe byte counts", "Contains byte counters for pipe sessions.\n\n" "The format is::\n\n" "\t%d %d %d %d\n" "\t| | | |\n" "\t| | | + "\t| | + "\t| + "\t+ "\n" ) SLTM(BereqAcct, 0, "Backend request accounting", "Contains byte counters from backend request processing.\n\n" "The format is::\n\n" "\t%d %d %d %d %d %d\n" "\t| | | | | |\n" "\t| | | | | +- Total bytes received\n" "\t| | | | +---- Body bytes received\n" "\t| | | + "\t| | + "\t| + "\t+ "\n" ) SLTM(VfpAcct, 0, "Fetch filter accounting", "Contains name of VFP and statistics.\n\n" "The format is::\n\n" "\t%s %d %d\n" "\t| | |\n" "\t| | +- Total bytes produced\n" "\t| +---- Number of calls made\n" "\t+ "\n" NODEF_NOTICE ) SLTM(Witness, 0, "Lock order witness records", "Diagnostic recording of locking order.\n" ) #undef NODEF_NOTICE /*lint -restore */
#ifndef CDK_COMPLETER_H_ #define CDK_COMPLETER_H_ 1 #include <cdk/cdkdocumenthelper.h> G_BEGIN_DECLS #define CDK_TYPE_COMPLETER (<API key> ()) #define CDK_COMPLETER(obj) (<API key> ((obj), CDK_TYPE_COMPLETER, CdkCompleter)) #define CDK_COMPLETER_CLASS(klass) (<API key> ((klass), CDK_TYPE_COMPLETER, CdkCompleterClass)) #define CDK_IS_COMPLETER(obj) (<API key> ((obj), CDK_TYPE_COMPLETER)) #define <API key>(klass) (<API key> ((klass), CDK_TYPE_COMPLETER)) #define <API key>(obj) (<API key> ((obj), CDK_TYPE_COMPLETER, CdkCompleterClass)) typedef struct CdkCompleter_ CdkCompleter; typedef struct CdkCompleterClass_ CdkCompleterClass; typedef struct <API key> CdkCompleterPrivate; struct CdkCompleter_ { CdkDocumentHelper parent; CdkCompleterPrivate *priv; }; struct CdkCompleterClass_ { <API key> parent_class; }; GType <API key> (void); CdkCompleter *cdk_completer_new (struct CdkPlugin_ *plugin, struct GeanyDocument *doc); CdkCompleter *<API key> (struct GeanyDocument *doc); G_END_DECLS #endif /* CDK_COMPLETER_H_ */
#include "gtest/gtest.h" #include "coredd/variant.hh" using namespace coredd; TEST(variant, unary_visitor) { struct foo { foo(int) {} }; struct visitor { unsigned int operator()(const double) const { return 0; } unsigned int operator()(const char) const { return 1; } unsigned int operator()(const int) const { return 2; } unsigned int operator()(const long) const { return 3; } unsigned int operator()(const foo&) const { return 4; } }; using variant_type = variant<double, char, int, long, foo>; { variant_type v((construct<double>())); ASSERT_EQ(0u, apply_visitor(visitor(), v)); } { variant_type v((construct<char>())); ASSERT_EQ(1u, apply_visitor(visitor(), v)); } { variant_type v((construct<int>())); ASSERT_EQ(2u, apply_visitor(visitor(), v)); } { variant_type v((construct<long>())); ASSERT_EQ(3u, apply_visitor(visitor(), v)); } { variant_type v(construct<foo>(), 42); ASSERT_EQ(4u, apply_visitor(visitor(), v)); } } TEST(variant, binary_visitor) { struct foo { foo(int) {} }; struct visitor { unsigned int operator()(const double, const double) const { return 0; } unsigned int operator()(const double, const int) const { return 1; } unsigned int operator()(const double, const foo&) const { return 2; } unsigned int operator()(const int, const int) const { return 3; } unsigned int operator()(const int, const double) const { return 4; } unsigned int operator()(const int, const foo&) const { return 5; } unsigned int operator()(const foo&, const foo&) const { return 6; } unsigned int operator()(const foo&, const int) const { return 7; } unsigned int operator()(const foo&, const double) const { return 8; } }; using variant_type = variant<double, int, foo>; { variant_type v1((construct<double>())); variant_type v2((construct<double>())); ASSERT_EQ(0u, <API key>(visitor(), v1, v2)); } { variant_type v1((construct<double>())); variant_type v2((construct<int>())); ASSERT_EQ(1u, <API key>(visitor(), v1, v2)); } { variant_type v1((construct<double>())); variant_type v2(construct<foo>(), 42); ASSERT_EQ(2u, <API key>(visitor(), v1, v2)); } { variant_type v1((construct<int>())); variant_type v2((construct<int>())); ASSERT_EQ(3u, <API key>(visitor(), v1, v2)); } { variant_type v1((construct<int>())); variant_type v2((construct<double>())); ASSERT_EQ(4u, <API key>(visitor(), v1, v2)); } { variant_type v1((construct<int>())); variant_type v2(construct<foo>(), 42); ASSERT_EQ(5u, <API key>(visitor(), v1, v2)); } { variant_type v1(construct<foo>(), 42); variant_type v2(construct<foo>(), 33); ASSERT_EQ(6u, <API key>(visitor(), v1, v2)); } { variant_type v1(construct<foo>(), 42); variant_type v2((construct<int>())); ASSERT_EQ(7u, <API key>(visitor(), v1, v2)); } { variant_type v1(construct<foo>(), 42); variant_type v2((construct<double>())); ASSERT_EQ(8u, <API key>(visitor(), v1, v2)); } } struct foo { foo() { for (size_t i = 0; i < 4096; ++i) { raw[i] = i; } } size_t raw[4096]; }; struct bar { bar() { for (auto& elem : raw) { elem = 0; } } size_t raw[8192]; }; struct visitor1 { void operator()(const foo& f) const { for (size_t i = 0; i < 4096; ++i) { ASSERT_EQ(i, f.raw[i]); } } template <typename T> void operator()(const T&) const { FAIL() << "Shouldn't be here."; } }; struct visitor2 { void operator()(const foo& f) const { for (size_t i = 0; i < 4096; ++i) { ASSERT_EQ(i, f.raw[i]); } } template <typename T> void operator()(const T&) const { FAIL() << "Shouldn't be here."; } }; struct visitor3 { void operator()(const foo& f) const { for (size_t i = 0; i < 4096; ++i) { ASSERT_EQ(i, f.raw[i]); } } template <typename T> void operator()(const T&) const { FAIL() << "Shouldn't be here."; } }; struct visitor4 { void operator()(const bar& b) const { for (auto & elem : b.raw) { ASSERT_EQ(0u, elem); } } template <typename T> void operator()(const T&) const { FAIL() << "Shouldn't be here."; } }; TEST(variant, construction) { { using variant_type = variant<bar, char, foo>; ASSERT_LE(8192 * sizeof(size_t), sizeof(variant_type)); variant_type v((construct<foo>())); apply_visitor(visitor1(), v); } { using variant_type = variant<char, bar, foo>; ASSERT_LE(8192 * sizeof(size_t), sizeof(variant_type)); variant_type v((construct<foo>())); apply_visitor(visitor2(), v); } { using variant_type = variant<foo, char, bar>; ASSERT_LE(8192 * sizeof(size_t), sizeof(variant_type)); variant_type v((construct<foo>())); apply_visitor(visitor3(), v); } { using variant_type = variant<char, bar, foo>; ASSERT_LE(8192 * sizeof(size_t), sizeof(variant_type)); variant_type v((construct<bar>())); apply_visitor(visitor4(), v); } }
<?php namespace hzphp\SQLite; /** * Adds some generic features to the base SQLite3 class. Used as a base * class for other database objects in this module. */ class Client extends \SQLite3 { public $filename; //stores the database file name /** * Constructs a new Client instance * * @param filename Path to the SQLite database, or :memory: to use * in-memory database. * @param flags Optional flags used to determine how to open the * SQLite database. (see: SQLite3::__construct()). * @param encryption_key * An optional encryption key used when encrypting and * decrypting an SQLite database. */ public function __construct( $filename, $flags = false, $encryption_key = '' ) { //if necessary, default the constructor flags if( $flags === false ) { $flags = <API key> | SQLITE3_OPEN_CREATE; } //invoke the parent's constructor parent::__construct( $filename, $flags, $encryption_key ); //store the database file name for later $this->filename = $filename; } /** * Properly constructs a new Table object * * @param name The name of the table in the database * @return The new Table object */ protected function makeTable( $name ) { //construct the table with a reference to the owner client return new Table( $this, $name ); } }
#include <stdio.h> #include <stdlib.h> #include <err.h> #include <mpi.h> int main(int argc, char **argv) { int ierr = 0; /* Error status */ int rank = 0; /* MPI rank */ int nprocs = 0; /* MPI number of processors */ int rtag = 10; /* MPI tag for the ring */ int i = 0; /* Indexer */ int N = 0; /* Array size */ int left = 0; /* Left neighbour rank */ int right = 0; /* Right neighbour rank */ int offset = 0; /* Offset into data array */ int *data = NULL; /* Data array */ int total = 0; /* Total of the data array */ /* Initialize MPI */ ierr = MPI_Init(&argc, &argv); /* Find out our rank and total number of processors */ ierr = MPI_Comm_rank(MPI_COMM_WORLD, &rank); ierr = MPI_Comm_size(MPI_COMM_WORLD, &nprocs); /* Only the root rank reads the command line arguments */ if (rank == 0) { N = strtol(argv[1], NULL, 10); if (N <= 0) { warnx("Will not run with negative array size."); MPI_Abort(MPI_COMM_WORLD, ierr); } printf("Running on %d processors.\nWith %d elements.\n", nprocs, N * nprocs); } /* Broadcast the data array size */ ierr = MPI_Bcast(&N, 1, MPI_INT, 0, MPI_COMM_WORLD); /* Allocate the data array */ data = malloc(N * nprocs * sizeof(int)); if (!data) { warnx("Unable to allocate: %lu", N * nprocs * sizeof(int)); MPI_Abort(MPI_COMM_WORLD, ierr); } /* Initialize the data array to contain my rank number */ for (i = 0; i < N * nprocs; ++i) { data[i] = rank; } /* Figure out my neighbours */ left = rank + 1; right = rank - 1; /* Create periodic boundry conditions */ if (left > nprocs -1) left = 0; if (right < 0) right = nprocs - 1; /* Send/Receive for all processors */ offset = 0; for (i = 0; i < nprocs -1; ++i) { if ((rank % 2) == 0) { ierr = MPI_Send(&(data[offset]), N, MPI_INT, left, rtag, MPI_COMM_WORLD); offset += N; ierr = MPI_Recv(&(data[offset]), N, MPI_INT, right, rtag, MPI_COMM_WORLD, MPI_STATUS_IGNORE); } else { offset += N; ierr = MPI_Recv(&(data[offset]), N, MPI_INT, right, rtag, MPI_COMM_WORLD, MPI_STATUS_IGNORE); ierr = MPI_Send(&(data[offset-N]), N, MPI_INT, left, rtag, MPI_COMM_WORLD); } } /* Sum our data array */ for (i = 0; i < N * nprocs; ++i) { total += data[i]; } printf("[%02d] Total is: %d\n", rank, total); /* Free the data array */ if (data) { free(data); data = NULL; } /* Finalize MPI and exit */ ierr = MPI_Finalize(); return(EXIT_SUCCESS); }
import simuvex from itertools import count <API key> = count() class receive(simuvex.SimProcedure): #pylint:disable=arguments-differ,<API key>,<API key> IS_SYSCALL = True def run(self, fd, buf, count, rx_bytes): if simuvex.options.CGC_ENFORCE_FD in self.state.options: fd = 0 if self.state.mode == 'fastpath': # Special case for CFG generation if not self.state.se.symbolic(count): actual_size = count data = self.state.se.Unconstrained( 'receive_data_%d' % <API key>.next(), self.state.se.exactly_int(actual_size) * 8 ) self.state.memory.store(buf, data) else: actual_size = self.state.se.Unconstrained('receive_length', self.state.arch.bits) self.state.memory.store(rx_bytes, actual_size, endness='Iend_LE') return self.state.se.BVV(0, self.state.arch.bits) if <API key> in self.state.options: # rules for invalid # greater than 0xc0 or wraps around if self.state.se.max_int(buf + count) > 0xc0000000 or \ self.state.se.min_int(buf + count) < self.state.se.min_int(buf): return 2 try: writable = self.state.se.any_int(self.state.memory.permissions(self.state.se.any_int(buf))) & 2 != 0 except simuvex.SimMemoryError: writable = False if not writable: return 2 read_length = self.state.posix.read(fd, buf, count) self.state.memory.store(rx_bytes, read_length, condition=rx_bytes != 0, endness='Iend_LE') self.size = read_length return self.state.se.BVV(0, self.state.arch.bits) else: if ABSTRACT_MEMORY in self.state.options: actual_size = count else: actual_size = self.state.se.Unconstrained('receive_length', self.state.arch.bits) self.state.add_constraints(self.state.se.ULE(actual_size, count), action=True) if self.state.se.solution(count != 0, True): read_length = self.state.posix.read(fd, buf, actual_size) action_list = list(self.state.log.actions) try: # get and fix up the memory write action = next( a for a in reversed(action_list) if isinstance(a, SimActionData) and a.action == 'write' and a.type == 'mem' ) action.size.ast = actual_size action.data.ast = action.actual_value.ast self.data = self.state.memory.load(buf, read_length) except StopIteration: # the write didn't occur (i.e., size of 0) self.data = None else: self.data = None self.size = actual_size self.state.memory.store(rx_bytes, actual_size, condition=rx_bytes != 0, endness='Iend_LE') # return values return self.state.se.If( actual_size == 0, self.state.se.BVV(0xffffffff, self.state.arch.bits), self.state.se.BVV(0, self.state.arch.bits) ) from ...s_options import ABSTRACT_MEMORY, <API key> from ...s_action import SimActionData
{-# LANGUAGE CPP, FlexibleContexts, <API key>, InstanceSigs, RankNTypes, ScopedTypeVariables, TypeFamilies, <API key> module Text.Grampa.ContextFree.Memoizing {-# DEPRECATED "Use Text.Grampa.ContextFree.SortedMemoizing instead" #-} (ResultList(..), Parser(..), BinTree(..), reparseTails, longest, peg, terminalPEG) where import Control.Applicative import Control.Monad (Monad(..), MonadPlus(..)) #if MIN_VERSION_base(4,13,0) import Control.Monad (MonadFail(fail)) #endif import Data.Function (on) import Data.Foldable (toList) import Data.Functor.Classes (Show1(..)) import Data.Functor.Compose (Compose(..)) import Data.List (maximumBy) import Data.Monoid (Monoid(mappend, mempty)) import Data.Monoid.Null (MonoidNull(null)) import Data.Monoid.Factorial (FactorialMonoid, length, splitPrimePrefix) import Data.Monoid.Textual (TextualMonoid) import qualified Data.Monoid.Factorial as Factorial import qualified Data.Monoid.Textual as Textual import Data.Ord (Down(Down)) import Data.Semigroup (Semigroup((<>))) import Data.Semigroup.Cancellative (LeftReductive(isPrefixOf)) import Data.String (fromString) import Debug.Trace (trace) import Witherable (Filterable(mapMaybe)) import qualified Text.Parser.Char import Text.Parser.Char (CharParsing) import Text.Parser.Combinators (Parsing(..)) import Text.Parser.LookAhead (LookAheadParsing(..)) import qualified Rank2 import Text.Grampa.Class (GrammarParsing(..), MultiParsing(..), <API key>(..), InputParsing(..), InputCharParsing(..), TailsParsing(parseTails), ParseResults, ParseFailure(..), FailureDescription(..), Pos) import Text.Grampa.Internal (BinTree(..), TraceableParsing(..), expected, erroneous) import qualified Text.Grampa.PEG.Backtrack.Measured as Backtrack import Prelude hiding (iterate, length, null, showList, span, takeWhile) -- | Parser for a context-free grammar with packrat-like sharing of parse results. It does not support left-recursive -- grammars. newtype Parser g s r = Parser{applyParser :: [(s, g (ResultList g s))] -> ResultList g s r} data ResultList g s r = ResultList !(BinTree (ResultInfo g s r)) {-# UNPACK #-} !(ParseFailure Pos s) data ResultInfo g s r = ResultInfo !Int ![(s, g (ResultList g s))] !r instance (Show s, Show r) => Show (ResultList g s r) where show (ResultList l f) = "ResultList (" ++ shows l (") (" ++ shows f ")") instance Show s => Show1 (ResultList g s) where liftShowsPrec _sp showList _prec (ResultList l f) rest = "ResultList " ++ showList (simplify <$> toList l) (shows f rest) where simplify (ResultInfo _ _ r) = r instance (Show s, Show r) => Show (ResultInfo g s r) where show (ResultInfo l _ r) = "(ResultInfo @" ++ show l ++ " " ++ shows r ")" instance Functor (ResultInfo g s) where fmap f (ResultInfo l t r) = ResultInfo l t (f r) instance Foldable (ResultInfo g s) where foldMap f (ResultInfo _ _ r) = f r instance Traversable (ResultInfo g s) where traverse f (ResultInfo l t r) = ResultInfo l t <$> f r instance Functor (ResultList g s) where fmap f (ResultList l failure) = ResultList ((f <$>) <$> l) failure instance Filterable (ResultList g s) where mapMaybe f (ResultList l failure) = ResultList (mapMaybe (traverse f) l) failure instance Ord s => Semigroup (ResultList g s r) where ResultList rl1 f1 <> ResultList rl2 f2 = ResultList (rl1 <> rl2) (f1 <> f2) instance Ord s => Monoid (ResultList g s r) where mempty = ResultList mempty mempty mappend = (<>) instance Functor (Parser g i) where fmap f (Parser p) = Parser (fmap f . p) {-# INLINABLE fmap #-} instance Ord s => Applicative (Parser g s) where pure a = Parser (\rest-> ResultList (Leaf $ ResultInfo 0 rest a) mempty) Parser p <*> Parser q = Parser r where r rest = case p rest of ResultList results failure -> ResultList mempty failure <> foldMap continue results continue (ResultInfo l rest' f) = continue' l f (q rest') continue' l f (ResultList rs failure) = ResultList (adjust l f <$> rs) failure adjust l f (ResultInfo l' rest' a) = ResultInfo (l+l') rest' (f a) {-# INLINABLE pure #-} {-# INLINABLE (<*>) #-} instance Ord s => Alternative (Parser g s) where empty = Parser (\rest-> ResultList mempty $ ParseFailure (Down $ length rest) [] []) Parser p <|> Parser q = Parser r where r rest = p rest <> q rest {-# INLINABLE (<|>) #-} instance Filterable (Parser g i) where mapMaybe f (Parser p) = Parser (mapMaybe f . p) {-# INLINABLE mapMaybe #-} instance Ord s => Monad (Parser g s) where return = pure Parser p >>= f = Parser q where q rest = case p rest of ResultList results failure -> ResultList mempty failure <> foldMap continue results continue (ResultInfo l rest' a) = continue' l (applyParser (f a) rest') continue' l (ResultList rs failure) = ResultList (adjust l <$> rs) failure adjust l (ResultInfo l' rest' a) = ResultInfo (l+l') rest' a #if MIN_VERSION_base(4,13,0) instance Ord s => MonadFail (Parser g s) where #endif fail msg = Parser p where p rest = ResultList mempty (erroneous (Down $ length rest) msg) instance Ord s => MonadPlus (Parser g s) where mzero = empty mplus = (<|>) instance (Semigroup x, Ord s) => Semigroup (Parser g s x) where (<>) = liftA2 (<>) instance (Monoid x, Ord s) => Monoid (Parser g s x) where mempty = pure mempty mappend = liftA2 mappend instance (Ord s, LeftReductive s, FactorialMonoid s) => GrammarParsing (Parser g s) where type ParserGrammar (Parser g s) = g type GrammarFunctor (Parser g s) = ResultList g s parsingResult _ = Compose . fromResultList nonTerminal f = Parser p where p ((_, d) : _) = f d p _ = ResultList mempty (expected 0 "NonTerminal at endOfInput") {-# INLINE nonTerminal #-} instance (Ord s, LeftReductive s, FactorialMonoid s) => TailsParsing (Parser g s) where parseTails = applyParser -- recursion support. -- 'parseComplete' :: ("Rank2".'Rank2.Functor' g, 'FactorialMonoid' s) => -- g (Memoizing.'Parser' g s) -> s -> g ('Compose' ('ParseResults' s) []) instance (LeftReductive s, FactorialMonoid s, Ord s) => MultiParsing (Parser g s) where type GrammarConstraint (Parser g s) g' = (g ~ g', Rank2.Functor g) type ResultFunctor (Parser g s) = Compose (ParseResults s) [] -- | Returns the list of all possible input prefix parses paired with the remaining input suffix. parsePrefix g input = Rank2.fmap (Compose . Compose . fromResultList) (snd $ head $ parseGrammarTails g input) -- parseComplete :: (Rank2.Functor g, Eq s, FactorialMonoid s) => -- g (Parser g s) -> s -> g (Compose (ParseResults s) []) parseComplete g input = Rank2.fmap ((snd <$>) . Compose . fromResultList) (snd $ head $ reparseTails close $ parseGrammarTails g input) where close = Rank2.fmap (<* eof) g parseGrammarTails :: (Rank2.Functor g, FactorialMonoid s) => g (Parser g s) -> s -> [(s, g (ResultList g s))] parseGrammarTails g input = foldr parseTail [] (Factorial.tails input) where parseTail s parsedTail = parsed where parsed = (s,d):parsedTail d = Rank2.fmap (($ parsed) . applyParser) g reparseTails :: Rank2.Functor g => g (Parser g s) -> [(s, g (ResultList g s))] -> [(s, g (ResultList g s))] reparseTails _ [] = [] reparseTails final parsed@((s, _):_) = (s, gd):parsed where gd = Rank2.fmap (`applyParser` parsed) final instance (LeftReductive s, FactorialMonoid s, Ord s) => InputParsing (Parser g s) where type ParserInput (Parser g s) = s getInput = Parser p where p rest@((s, _):_) = ResultList (Leaf $ ResultInfo 0 rest s) mempty p [] = ResultList (Leaf $ ResultInfo 0 [] mempty) mempty anyToken = Parser p where p rest@((s, _):t) = case splitPrimePrefix s of Just (first, _) -> ResultList (Leaf $ ResultInfo 1 t first) mempty _ -> ResultList mempty (expected (Down $ length rest) "anyToken") p [] = ResultList mempty (expected 0 "anyToken") satisfy predicate = Parser p where p rest@((s, _):t) = case splitPrimePrefix s of Just (first, _) | predicate first -> ResultList (Leaf $ ResultInfo 1 t first) mempty _ -> ResultList mempty (expected (Down $ length rest) "satisfy") p [] = ResultList mempty (expected 0 "satisfy") scan s0 f = Parser (p s0) where p s rest@((i, _) : _) = ResultList (Leaf $ ResultInfo l (drop l rest) prefix) mempty where (prefix, _, _) = Factorial.spanMaybe' s f i l = Factorial.length prefix p _ [] = ResultList (Leaf $ ResultInfo 0 [] mempty) mempty take 0 = mempty take n = Parser p where p rest@((s, _) : _) | x <- Factorial.take n s, l <- Factorial.length x, l == n = ResultList (Leaf $ ResultInfo l (drop l rest) x) mempty p rest = ResultList mempty (expected (Down $ length rest) $ "take " ++ show n) takeWhile predicate = Parser p where p rest@((s, _) : _) | x <- Factorial.takeWhile predicate s, l <- Factorial.length x = ResultList (Leaf $ ResultInfo l (drop l rest) x) mempty p [] = ResultList (Leaf $ ResultInfo 0 [] mempty) mempty takeWhile1 predicate = Parser p where p rest@((s, _) : _) | x <- Factorial.takeWhile predicate s, l <- Factorial.length x, l > 0 = ResultList (Leaf $ ResultInfo l (drop l rest) x) mempty p rest = ResultList mempty (expected (Down $ length rest) "takeWhile1") string s = Parser p where p rest@((s', _) : _) | s `isPrefixOf` s' = ResultList (Leaf $ ResultInfo l (Factorial.drop l rest) s) mempty p rest = ResultList mempty (ParseFailure (Down $ length rest) [LiteralDescription s] []) l = Factorial.length s notSatisfy predicate = Parser p where p rest@((s, _):_) | Just (first, _) <- splitPrimePrefix s, predicate first = ResultList mempty (expected (Down $ length rest) "notSatisfy") p rest = ResultList (Leaf $ ResultInfo 0 rest ()) mempty {-# INLINABLE string #-} instance InputParsing (Parser g s) => TraceableParsing (Parser g s) where traceInput description (Parser p) = Parser q where q rest@((s, _):_) = case traceWith "Parsing " (p rest) of rl@(ResultList EmptyTree _) -> traceWith "Failed " rl rl -> traceWith "Parsed " rl where traceWith prefix = trace (prefix <> description s) q [] = p [] instance (Ord s, Show s, TextualMonoid s) => InputCharParsing (Parser g s) where satisfyCharInput predicate = Parser p where p rest@((s, _):t) = case Textual.characterPrefix s of Just first | predicate first -> ResultList (Leaf $ ResultInfo 1 t $ Factorial.primePrefix s) mempty _ -> ResultList mempty (expected (Down $ length rest) "satisfyCharInput") p [] = ResultList mempty (expected 0 "satisfyCharInput") scanChars s0 f = Parser (p s0) where p s rest@((i, _) : _) = ResultList (Leaf $ ResultInfo l (drop l rest) prefix) mempty where (prefix, _, _) = Textual.spanMaybe_' s f i l = Factorial.length prefix p _ [] = ResultList (Leaf $ ResultInfo 0 [] mempty) mempty takeCharsWhile predicate = Parser p where p rest@((s, _) : _) | x <- Textual.takeWhile_ False predicate s, l <- Factorial.length x = ResultList (Leaf $ ResultInfo l (drop l rest) x) mempty p [] = ResultList (Leaf $ ResultInfo 0 [] mempty) mempty takeCharsWhile1 predicate = Parser p where p rest@((s, _) : _) | x <- Textual.takeWhile_ False predicate s, l <- Factorial.length x, l > 0 = ResultList (Leaf $ ResultInfo l (drop l rest) x) mempty p rest = ResultList mempty (expected (Down $ length rest) "takeCharsWhile1") notSatisfyChar predicate = Parser p where p rest@((s, _):_) | Just first <- Textual.characterPrefix s, predicate first = ResultList mempty (expected (Down $ length rest) "notSatisfyChar") p rest = ResultList (Leaf $ ResultInfo 0 rest ()) mempty instance (MonoidNull s, Ord s) => Parsing (Parser g s) where try (Parser p) = Parser q where q rest = rewindFailure (p rest) where rewindFailure (ResultList rl _) = ResultList rl (ParseFailure (Down $ length rest) [] []) Parser p <?> msg = Parser q where q rest = replaceFailure (p rest) where replaceFailure (ResultList EmptyTree (ParseFailure pos msgs erroneous')) = ResultList EmptyTree (ParseFailure pos (if pos == Down (length rest) then [StaticDescription msg] else msgs) erroneous') replaceFailure rl = rl notFollowedBy (Parser p) = Parser (\input-> rewind input (p input)) where rewind t (ResultList EmptyTree _) = ResultList (Leaf $ ResultInfo 0 t ()) mempty rewind t ResultList{} = ResultList mempty (expected (Down $ length t) "notFollowedBy") skipMany p = go where go = pure () <|> p *> go unexpected msg = Parser (\t-> ResultList mempty $ expected (Down $ length t) msg) eof = Parser f where f rest@((s, _):_) | null s = ResultList (Leaf $ ResultInfo 0 rest ()) mempty | otherwise = ResultList mempty (expected (Down $ length rest) "endOfInput") f [] = ResultList (Leaf $ ResultInfo 0 [] ()) mempty instance (MonoidNull s, Ord s) => <API key> (Parser g s) where Parser p <<|> Parser q = Parser r where r rest = case p rest of rl@(ResultList EmptyTree _failure) -> rl <> q rest rl -> rl takeSome p = (:) <$> p <*> takeMany p takeMany (Parser p) = Parser (q 0 id) where q len acc rest = case p rest of ResultList EmptyTree _failure -> ResultList (Leaf $ ResultInfo len rest (acc [])) mempty ResultList rl _ -> foldMap continue rl where continue (ResultInfo len' rest' result) = q (len + len') (acc . (result:)) rest' skipAll (Parser p) = Parser (q 0) where q len rest = case p rest of ResultList EmptyTree _failure -> ResultList (Leaf $ ResultInfo len rest ()) mempty ResultList rl _failure -> foldMap continue rl where continue (ResultInfo len' rest' _) = q (len + len') rest' instance (MonoidNull s, Ord s) => LookAheadParsing (Parser g s) where lookAhead (Parser p) = Parser (\input-> rewind input (p input)) where rewind t (ResultList rl failure) = ResultList (rewindInput t <$> rl) failure rewindInput t (ResultInfo _ _ r) = ResultInfo 0 t r instance (Ord s, Show s, TextualMonoid s) => CharParsing (Parser g s) where satisfy predicate = Parser p where p rest@((s, _):t) = case Textual.characterPrefix s of Just first | predicate first -> ResultList (Leaf $ ResultInfo 1 t first) mempty _ -> ResultList mempty (expected (Down $ length rest) "Char.satisfy") p [] = ResultList mempty (expected 0 "Char.satisfy") string s = Textual.toString (error "unexpected non-character") <$> string (fromString s) text t = (fromString . Textual.toString (error "unexpected non-character")) <$> string (Textual.fromText t) fromResultList :: FactorialMonoid s => ResultList g s r -> ParseResults s [(s, r)] fromResultList (ResultList EmptyTree (ParseFailure pos positive negative)) = Left (ParseFailure (pos - 1) positive negative) fromResultList (ResultList rl _failure) = Right (f <$> toList rl) where f (ResultInfo _ ((s, _):_) r) = (s, r) f (ResultInfo _ [] r) = (mempty, r) -- | Turns a context-free parser into a backtracking PEG parser that consumes the longest possible prefix of the list -- of input tails, opposite of 'peg' longest :: Parser g s a -> Backtrack.Parser g [(s, g (ResultList g s))] a longest p = Backtrack.Parser q where q rest = case applyParser p rest of ResultList EmptyTree (ParseFailure pos positive negative) -> Backtrack.NoParse (ParseFailure pos (map message positive) (map message negative)) ResultList rs _ -> parsed (maximumBy (compare `on` resultLength) rs) resultLength (ResultInfo l _ _) = l parsed (ResultInfo l s r) = Backtrack.Parsed l r s message (StaticDescription msg) = StaticDescription msg message (LiteralDescription s) = LiteralDescription [(s, error "longest")] -- | Turns a backtracking PEG parser of the list of input tails into a context-free parser, opposite of 'longest' peg :: Ord s => Backtrack.Parser g [(s, g (ResultList g s))] a -> Parser g s a peg p = Parser q where q rest = case Backtrack.applyParser p rest of Backtrack.Parsed l result suffix -> ResultList (Leaf $ ResultInfo l suffix result) mempty Backtrack.NoParse (ParseFailure pos positive negative) -> ResultList mempty (ParseFailure pos (original <$> positive) (original <$> negative)) where original = (fst . head <$>) -- | Turns a backtracking PEG parser into a context-free parser terminalPEG :: (Monoid s, Ord s) => Backtrack.Parser g s a -> Parser g s a terminalPEG p = Parser q where q [] = case Backtrack.applyParser p mempty of Backtrack.Parsed l result _ -> ResultList (Leaf $ ResultInfo l [] result) mempty Backtrack.NoParse failure -> ResultList mempty failure q rest@((s, _):_) = case Backtrack.applyParser p s of Backtrack.Parsed l result _ -> ResultList (Leaf $ ResultInfo l (drop l rest) result) mempty Backtrack.NoParse failure -> ResultList mempty failure
<?php $root = dirname(dirname(__FILE__)); ini_set("include_path", "{$root}/www:{$root}/www/include"); set_time_limit(0); include("include/init.php"); loadlib("dbtickets_flickr"); $rsp = <API key>(); dumper($rsp); ?>
<!DOCTYPE html> <html> <head> <title>index.js</title> <meta http-equiv="content-type" content="text/html; charset=UTF-8"> <link rel="stylesheet" media="all" href="public/stylesheets/normalize.css" /> <link rel="stylesheet" media="all" href="docco.css" /> </head> <body> <div class="container"> <div class="page"> <div class="header"> <h1>index.js</h1> </div> <h2 id="index-js">index.js</h2> <p>This is the main file for the user API.</p> <p>So far, it only returns a key given a username/password</p> <div class='highlight'><pre> (<span class="function"><span class="keyword">function</span><span class="params">()</span> {</span></pre></div> <p>We use strict because we’re only worried about modern browsers and we should be strict. JSHint actually insists on this and it’s a good idea.</p> <div class='highlight'><pre> <span class="string">'use strict'</span>;</pre></div> <p>It’s also a good idea to predeclare all variables at the top of a scope. Javascript doesn’t support block scoping so putting them all at the beginning is a smart move.</p> <div class='highlight'><pre> <span class="keyword">var</span> _, crypto, echo, envConfig, port, restify, server;</pre></div> <p>Server code needs the environment.</p> <div class='highlight'><pre> envConfig = process.env;</pre></div> <p>Restify helps us with building a RESTful API.</p> <div class='highlight'><pre> restify = require(<span class="string">'restify'</span>); _ = require(<span class="string">'underscore'</span>); crypto = require(<span class="string">'crypto-js'</span>); server = restify.createServer({</pre></div> <p>The name is sent as one of the server headers</p> <div class='highlight'><pre> name: <span class="string">'TidepoolUser'</span> });</pre></div> <p>Two standard restify handler plugins:</p> <div class='highlight'><pre> server.use(restify.queryParser()); server.use(restify.bodyParser());</pre></div> <p>This function merely echoes everything it got as a block of text. Useful for debugging.</p> <div class='highlight'><pre> echo = <span class="function"><span class="keyword">function</span><span class="params">(req, res, next)</span> {</span> console.log(<span class="string">'request'</span>, req.params, req.url, req.method); res.send([ <span class="string">'Echo!'</span>, { params: req.params, headers: req.headers, method: req.method } ]); <span class="keyword">return</span> next(); }; <span class="keyword">var</span> status = <span class="function"><span class="keyword">function</span><span class="params">(req, res, next)</span> {</span> console.log(<span class="string">'status'</span>, req.params, req.url, req.method); res.send(<span class="string">'Ok'</span>); <span class="keyword">return</span> next(); };</pre></div> <p>this is a stupid simple userid generation by creating a hash from the username and password given. If either one changes, it will be a different hash. </p> <div class='highlight'><pre> <span class="keyword">var</span> login = <span class="function"><span class="keyword">function</span><span class="params">(req, res, next)</span> {</span> console.log(<span class="string">'login'</span>, <span class="string">'(parameters masked)'</span>, req.url, req.method); <span class="keyword">if</span> (!(req.params.username &amp;&amp; req.params.password)) { res.send(<span class="number">400</span>, <span class="string">'Both username and password are required.'</span>); } <span class="keyword">else</span> { <span class="keyword">var</span> hash = crypto.algo.SHA1.create(); hash.update(req.params.username); hash.update(req.params.password); res.send({username: req.params.username, userid: hash.finalize().toString()}); } <span class="keyword">return</span> next(); };</pre></div> <p>We need to have sensible responses for all the standard verbs, so we’ve got a system that makes it easy to reuse the same handlers for different verbs.</p> <div class='highlight'><pre> <span class="keyword">var</span> v01api = [ { verbs: [<span class="string">'get'</span>, <span class="string">'post'</span>, <span class="string">'put'</span>, <span class="string">'del'</span>, <span class="string">'head'</span>], path: <span class="string">'/echo'</span>, func: echo }, { verb: <span class="string">'get'</span>, path: <span class="string">'/status'</span>, func: status }, { verbs: [<span class="string">'get'</span>, <span class="string">'post'</span>], path: <span class="string">'/login'</span>, func: login } ];</pre></div> <p>helper function to set up one endpoint for one verb</p> <div class='highlight'><pre> <span class="keyword">var</span> doVerb = <span class="function"><span class="keyword">function</span><span class="params">(verb, path, version, func)</span> {</span> server[verb]({path: path, version: version }, func); };</pre></div> <p>installs all the items defined in a version of the API</p> <div class='highlight'><pre> <span class="keyword">var</span> installAPI = <span class="function"><span class="keyword">function</span><span class="params">(api, version)</span> {</span> _.each(api, <span class="function"><span class="keyword">function</span><span class="params">(elt, idx, list)</span> {</span> <span class="keyword">if</span> (elt.verbs) { _.each(elt.verbs, <span class="function"><span class="keyword">function</span><span class="params">(verb)</span> {</span> doVerb(verb, elt.path, version, elt.func); }); } <span class="keyword">else</span> <span class="keyword">if</span> (elt.verb) { doVerb(elt.verb, elt.path, version, elt.func); } }); }; installAPI(v01api, <span class="string">'0.1.1'</span>);</pre></div> <p>If the port is specified in the environment we’ll use it, but for deploys we want to run on port 80 and then map it in the router.</p> <div class='highlight'><pre> port = envConfig.USER_PORT || <span class="number">80</span>; console.log(<span class="string">'echo API server serving on port'</span>, port); server.listen(port);</pre></div> <p>Wrap up the javascript namespacing model.</p> <div class='highlight'><pre>}).call(<span class="keyword">this</span>);</pre></div> <div class="fleur">h</div> </div> </div> </body> </html>
#ifndef MP_FADER_HPP #define MP_FADER_HPP #include <stdint.h> #include <atomic> struct PlayerObject; class Fader { private: std::atomic<uint16_t> cur; std::atomic<uint16_t> limit; std::atomic<int8_t> inc; // -1 or 1 or 0 public: Fader(); // If samplerate differs, it resets. void change(int8_t inc /* 1 for fading in, -1 for fading out */, int Samplerate, bool reset); void finish(); void wait(PlayerObject* player); double sampleFactor() const { if(limit == 0) return 1; return double(cur) / limit; } bool finished() const { int8_t incCpy(inc); if(incCpy == 0) return true; if(incCpy > 0) return cur >= limit; if(incCpy < 0) return cur == 0; assert(false); return false; } struct Scope { Fader& fader; uint16_t cur, limit; int8_t inc; Scope(Fader& _fader) : fader(_fader) { cur = fader.cur; limit = fader.limit; inc = fader.inc; } ~Scope() { if(inc == 0) return; // no change if(inc != fader.inc) return; // changed meanwhile if(limit != fader.limit) return; // changed meanwhile fader.cur = cur; } double sampleFactor() const { if(limit == 0) return 1; return double(cur) / limit; } void frameTick() { if(finished()) return; cur += inc; } bool finished() { if(inc == 0) return true; if(inc > 0) return cur >= limit; if(inc < 0) return cur == 0; assert(false); return false; } void resetZero() { cur = 0; } }; }; #endif // FADER_HPP
using System; using System.Collections.Generic; using System.Linq; using System.Text; namespace bv.model.Model.Core { public class SearchPanelMetaItem { public string Name { get; protected set; } public EditorType EditorType { get; protected set; } public bool IsRange { get; protected set; } public bool IsRangeDefDates { get; protected set; } public string LabelId { get; protected set; } public Func<object> DefaultValue { get; protected set; } public string DefaultOper { get; protected set; } public bool IsMandatory { get; protected set; } public bool IsMultiple { get; protected set; } public SearchPanelLocation Location { get; protected set; } public bool IsParam { get; protected set; } public string Dependent { get; protected set; } public string LookupName { get; protected set; } public Type LookupType { get; protected set; } public Func<object, long> LookupValue { get; protected set; } public Func<object, string> LookupText { get; protected set; } public SearchPanelMetaItem( string name, EditorType edtype, bool range, bool rangeDefDates, string label, Func<object> def, string oper, bool mandatory, bool multiple, SearchPanelLocation location, bool param, string dependent, string lookupName, Type lookupType, Func<object, long> lookupValue, Func<object, string> lookupText ) { Name = name; EditorType = edtype; IsRange = range; IsRangeDefDates = rangeDefDates; LabelId = label; DefaultValue = def; DefaultOper = oper; IsMandatory = mandatory; IsMultiple = multiple; Location = location; IsParam = param; Dependent = dependent; LookupName = lookupName; LookupType = lookupType; LookupValue = lookupValue; LookupText = lookupText; } } }
/* mpi_pi.reducer.c (2014-01-10) */ /** \file mpi_pi.reducer.c \brief Example for KMRRUN. It is a reducer for PI calculation implemented using MPI. How to run. 1. create input files in a directory. work/ 000 001 002 Each file have one line which represents number of points to plot. $ cat work/000 100000 2. run by kmrrun $ mpirun -np 2 ./kmrrun --mpi-proc 4 --m-mpi "./mpi_pi.mapper" \ -k "./mpi_pi.kvgen.sh" --r-mpi "./mpi_pi.reducer" ./work */ #include <stdio.h> #include <string.h> #include <stdlib.h> #include <mpi.h> #define LINELEN 80 /** \brief Main function. Read a file which has key-values separated by lines. One line is like this. 0 7932/10000 '0' is key and '7932/10000' is value. 7932 is number of points plotted in a circle and 10000 is total number of points plotted. By reading these numbers, it calculates pi and writes result to a file. */ int main(int argc, char *argv[]) { char line[LINELEN]; int rank; FILE *ifp, *ofp; MPI_Init(&argc, &argv); MPI_Comm_rank(MPI_COMM_WORLD, &rank); if (argc != 2) { if (rank == 0) { fprintf(stderr, "specify an input file\n"); } MPI_Abort(MPI_COMM_WORLD, 1); } if (rank == 0) { int sum_count = 0; int sum_point = 0; ifp = fopen(argv[1], "r"); while (fgets(line, sizeof(line), ifp) != NULL) { char *count_s, *point_s; char *cp = line; int len = (int)strlen(line); // chomp if (cp[len-1] == '\n') { cp[len-1] = '\0'; } // find In count position cp = strchr(line, ' '); count_s = cp + 1; // find Total point position cp = strchr(line, '/'); point_s = cp + 1; cp[0] = '\0'; sum_count += atoi(count_s); sum_point += atoi(point_s); } fclose(ifp); double pi = 4.0 * sum_count / sum_point; ofp = fopen("mpi_pi.out", "w"); fprintf(ofp, "%f\n", pi); fclose(ofp); } MPI_Finalize(); return 0; }
/* { dg-do run } */ struct tag { int m1; char *m2[5]; } s1, *p1; int i; main() { s1.m1 = -1; p1 = &s1; if ( func1( &p1->m1 ) == -1 ) foo ("ok"); else abort (); i = 3; s1.m2[3]= "123"; if ( strlen( (p1->m2[i])++ ) == 3 ) foo ("ok"); else abort (); exit (0); } func1(int *p) { return(*p); } foo (char *s) {}
<!DOCTYPE html> <html> <head> <title>Conway's game of life</title> <link href="life.css" rel="stylesheet" type="text/css"> </head> <body> <div id="content"> <canvas id="life" width="480" height="480"> If you had a browser that supported HTML5, you'd see Conway's Game of Life here. </canvas> <div class="controls"> <button id="start" type="button">Start</button> <button id="stop" type="button" disabled="disabled">Stop</button> <button id="step" type="button">Step</button> <button id="reset" type="button" disabled="disabled">Reset</button> <input id="seed" type="text" value="" placeholder="Enter a seed value"> <p> <span id="fps">0</span> fps </p> </div> </div> <script src="http://code.jquery.com/jquery-2.0.0.min.js"></script> <script src="seedrandom.js"></script> <script src="life.js"></script> </body> </html>
namespace Humidifier.AppSync { using System.Collections.Generic; public class GraphQLSchema : Humidifier.Resource { public override string AWSTypeName { get { return @"AWS::AppSync::GraphQLSchema"; } } <summary> Definition http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/<API key>.html#<API key> Required: False UpdateType: Mutable PrimitiveType: String </summary> public dynamic Definition { get; set; } <summary> <API key> http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/<API key>.html#<API key> Required: False UpdateType: Mutable PrimitiveType: String </summary> public dynamic <API key> { get; set; } <summary> ApiId http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/<API key>.html#<API key> Required: True UpdateType: Immutable PrimitiveType: String </summary> public dynamic ApiId { get; set; } } }
# NativeDemo for iOS Fully native (Objective-C and C) app that connects to [http: ## Installation The app uses the `OpenWebRTC` and `OpenWebRTC-SDK` CocoaPods from [https: If you haven't installed CocoaPods yet, do so first: sudo gem install cocoapods pod setup Then run: pod install open NativeDemo.xcworkspace **NOTE!** When using CocoaPods, you should always use the `.xcworkspace` file and not the usual `.xcproject`.
const timespanSelection = (function (d3) { const module = {}; const fromDaysAgo = function (days) { return function () { const today = new Date(), daysAgo = new Date( today.getFullYear(), today.getMonth(), today.getDate() - days ); return +daysAgo; }; }; module.timespans = { all: { label: "all time", shortLabel: "all time", timestamp: function () { return 0; }, }, twoMonths: { label: "last two months", shortLabel: "2 months", timestamp: fromDaysAgo(60), }, twoWeeks: { label: "last two weeks", shortLabel: "2 weeks", timestamp: fromDaysAgo(14), }, sevenDays: { label: "last 7 days", shortLabel: "7 days", timestamp: fromDaysAgo(7), }, twoDays: { label: "last 2 days", shortLabel: "2 days", timestamp: fromDaysAgo(2), }, today: { label: "today", shortLabel: "today", timestamp: fromDaysAgo(1), }, }; const <API key> = function (onTimespanSelected, span) { onTimespanSelected(span.timestamp.call()); }; module.create = function (selectedSpan) { let onTimespanSelected; const container = d3 .select(document.createElement("div")) .attr("class", "timespan"); const currentlySelected = container .append("span") .on("click", function () { d3.event.preventDefault(); }); const timespanList = container .append("div") .attr("class", "timespanSelection") .append("ol") .attr("class", "timespanList"); const timespans = Object.keys(module.timespans).map(function ( spanName ) { return module.timespans[spanName]; }); const selection = timespanList .selectAll(".item") .data(timespans, function (d) { return d.label; }); const updateSelection = function () { currentlySelected.text(selectedSpan.shortLabel); selection.classed("selected", function (span) { return span === selectedSpan; }); }; selection .enter() .append("li") .attr("class", "item") .append("button") .text(function (span) { return span.label; }) .on("click", function (span) { selectedSpan = span; updateSelection(); <API key>(onTimespanSelected, selectedSpan); d3.event.preventDefault(); }); updateSelection(); return { widget: container.node(), load: function (<API key>) { onTimespanSelected = <API key>; <API key>(onTimespanSelected, selectedSpan); }, }; }; return module; })(d3);
//import org.junit.Test; //import java.util.Arrays; //import java.util.Collection; //import java.util.Collections; //import java.util.stream.Stream; ** // * <API key>: BSD-2-Clause //public class ProcTest { // @Test // public void foo() { // Stream<Integer> stream = Stream.of( 1, 2, 3, 4, 5, 6, 7, 8, 9 ); // Proc<Integer, Boolean> proc = new Proc<>( false, b -> b ? 1 : 0, stream ); // proc.proc( ( p, e ) -> { // if( e.equals( 4 ) ) { // p.setState( true ); // return Arrays.asList( 42, 74 ); // return Collections.singletonList( e ); // ( p, e ) -> Collections.singletonList( e ) ). // flatMap( Collection::stream ). // forEach( System.out::println );
goog.provide('ol.proj.transforms'); goog.require('ol'); goog.require('ol.obj'); /** * @private * @type {Object.<string, Object.<string, ol.TransformFunction>>} */ ol.proj.transforms.cache_ = {}; /** * Clear the transform cache. */ ol.proj.transforms.clear = function() { ol.proj.transforms.cache_ = {}; }; /** * Registers a conversion function to convert coordinates from the source * projection to the destination projection. * * @param {ol.proj.Projection} source Source. * @param {ol.proj.Projection} destination Destination. * @param {ol.TransformFunction} transformFn Transform. */ ol.proj.transforms.add = function(source, destination, transformFn) { var sourceCode = source.getCode(); var destinationCode = destination.getCode(); var transforms = ol.proj.transforms.cache_; if (!(sourceCode in transforms)) { transforms[sourceCode] = {}; } transforms[sourceCode][destinationCode] = transformFn; }; /** * Unregisters the conversion function to convert coordinates from the source * projection to the destination projection. This method is used to clean up * cached transforms during testing. * * @param {ol.proj.Projection} source Source projection. * @param {ol.proj.Projection} destination Destination projection. * @return {ol.TransformFunction} transformFn The unregistered transform. */ ol.proj.transforms.remove = function(source, destination) { var sourceCode = source.getCode(); var destinationCode = destination.getCode(); var transforms = ol.proj.transforms.cache_; ol.DEBUG && console.assert(sourceCode in transforms, 'sourceCode should be in transforms'); ol.DEBUG && console.assert(destinationCode in transforms[sourceCode], 'destinationCode should be in transforms of sourceCode'); var transform = transforms[sourceCode][destinationCode]; delete transforms[sourceCode][destinationCode]; if (ol.obj.isEmpty(transforms[sourceCode])) { delete transforms[sourceCode]; } return transform; }; /** * Get a transform given a source code and a destination code. * @param {string} sourceCode The code for the source projection. * @param {string} destinationCode The code for the destination projection. * @return {ol.TransformFunction|undefined} The transform function (if found). */ ol.proj.transforms.get = function(sourceCode, destinationCode) { var transform; var transforms = ol.proj.transforms.cache_; if (sourceCode in transforms && destinationCode in transforms[sourceCode]) { transform = transforms[sourceCode][destinationCode]; } return transform; };
#!/bin/bash # extactSamFromSam.sh # extract SAM reads corresponding to a SAM header file from another SAM reference file and writes to # SAM output file # Chiu Laboratory # University of California, San Francisco # 3/15/2014 scriptname=${0 if [ $# -lt 3 ]; then echo "Usage: $scriptname <SAM header file> <SAM reference file> <SAM output file> <optional: # of cores>" exit fi basef=$1 baseg=$2 output_file=$3 cores=$4 echo -e "$(date)\t$scriptname\tstarting: " START1=$(date +%s) if [ $# -lt 4 ]; then # using 1 core only echo -e "$(date)\t$scriptname\textracting reads from $baseg using headers from $basef..." # associative array for lookup awk 'FNR==NR { a[$1]=$1; next} $1 in a {print $0}' "$basef" "$baseg" > $output_file echo -e "$(date)\t$scriptname\tdone" else # splitting input SAM header file by number of cores echo -e "$(date)\t$scriptname\tsplitting $basef..." let "numlines = `wc -l basef | awk '{print $1}'`" let "LinesPerCore = numlines / $cores" echo -e "$(date)\t$scriptname\twill use $cores cores with $LinesPerCore entries per core" split -l $LinesPerCore $basef echo -e "$(date)\t$scriptname\textracting reads from $baseg using headers from $basef" rm -f $output_file # delete previous output file, if present for f in `ls x??` do # associative array for lookup, running in background awk 'FNR==NR { a[$1]=$1; next} $1 in a {print $0)}' "$f" "$baseg" >> $output_file & done for job in `jobs -p` do wait $job done echo -e "$(date)\t$scriptname\tdone extracting reads for each chunk" rm -f x?? fi END1=$(date +%s) echo -e "$(date)\t$scriptname\tDone with extractSamFromSam.sh" diff=$(( $END1 - $START1 )) echo -e "$(date)\t$scriptname\textractSamFromSam.sh took $diff seconds"
#ifndef OutputFormat_h #define OutputFormat_h #include <string> #include <vector> using std::string; using std::vector; using std::ifstream; using std::ofstream; namespace GlassBR_program { void write_output(string filename, bool is_safe1, bool is_safe2, double P_b); } #endif
<?php declare(strict_types=1); namespace Instagram\SDK\Response\Responses\Friendships; use Instagram\SDK\Response\DTO\General\FriendshipStatus; use Instagram\SDK\Response\Responses\ResponseEnvelope; /** * Class FollowResponse * * @package Instagram\SDK\Response\Responses\Friendships */ final class FollowResponse extends ResponseEnvelope { /** @var FriendshipStatus */ private $friendshipStatus; /** * @return FriendshipStatus */ public function getFriendshipStatus(): FriendshipStatus { return $this->friendshipStatus; } }
// <API key>.h // Definition of the <API key> class. // and Contributors. // obtaining a copy of the software and accompanying documentation covered by // execute, and transmit the Software, and to prepare derivative works of the // Software, and to permit third-parties to whom the Software is furnished to // do so, all subject to the following: // must be included in all copies of the Software, in whole or in part, and // all derivative works of the Software, unless such copies or derivative // works are solely in the form of machine-executable object code generated by // a source language processor. // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT // FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, // ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER // DEALINGS IN THE SOFTWARE. #ifndef <API key> #define <API key> #include "Poco/Util/Util.h" #include "CppUnit/TestCase.h" #include "Poco/Util/<API key>.h" class <API key>: public CppUnit::TestCase { public: <API key>(const std::string& name); ~<API key>(); void testMapper1(); void testMapper2(); void testMapper3(); void testMapper4(); void setUp(); void tearDown(); static CppUnit::Test* suite(); private: Poco::Util::<API key>* createConfiguration() const; }; #endif // <API key>
package gov.hhs.fha.nhinc.dsa; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlType; @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "<API key>", propOrder = { "status" }) public class <API key> { @XmlElement(required = true) protected String status; /** * Gets the value of the status property. * * @return * possible object is * {@link String } * */ public String getStatus() { return status; } /** * Sets the value of the status property. * * @param value * allowed object is * {@link String } * */ public void setStatus(String value) { this.status = value; } }
<?php defined('SYSPATH') or die('No direct script access.'); class Security extends Kohana_Security { public static $htmlpurifier = NULL; public static function purify($dirty_html) { if (self::$htmlpurifier === NULL) { $config = HTMLPurifier_Config::createDefault(); $config->set('Cache.SerializerPath', CONFIGPATH.'cache'); $config->set('HTML.SafeObject', TRUE); $config->set('HTML.SafeEmbed', TRUE); self::$htmlpurifier = new HTMLPurifier($config); } return self::$htmlpurifier->purify($dirty_html); } }
<?php use yii\helpers\Html; /** * @var yii\web\View $this * @var common\models\Event $model */ $this->title = Yii::t('gip', 'Create {modelClass}', [ 'modelClass' => 'Event', ]); $this->params['breadcrumbs'][] = ['label' => Yii::t('gip', 'Events'), 'url' => ['index']]; $this->params['breadcrumbs'][] = $this->title; ?> <div class="event-create"> <div class="page-header"> <h1><?= Html::encode($this->title) ?></h1> </div> <?= $this->render('_form', [ 'model' => $model, ]) ?> </div>
{-# LANGUAGE <API key> #-} module ApiTypes where import Control.Applicative (Applicative) import Control.Concurrent.STM (TVar) import Control.Monad.Reader (MonadReader, ReaderT (..)) import Control.Monad.Trans (MonadIO) import Data.HashMap.Strict (HashMap) import Data.Set (Set) import Type.Comment (Comment) import Type.Invoice (Invoice) import Type.Customer (Customer) import qualified Type.Invoice as Invoice data ServerData = ServerData { customers :: TVar (Set Customer) , invoices :: TVar (Set Invoice) , comments :: TVar (HashMap Invoice.Id (Set Comment)) } newtype BlogApi a = BlogApi { unBlogApi :: ReaderT ServerData IO a } deriving ( Applicative , Functor , Monad , MonadIO , MonadReader ServerData ) runBlogApi :: ServerData -> BlogApi a -> IO a runBlogApi serverdata = flip runReaderT serverdata . unBlogApi
# Makefile for Sphinx documentation # You can set these variables from the command line. PYENV := . ../env/bin/activate; PYTHON := $(PYENV) python SPHINXOPTS = SPHINXBUILD = $(PYTHON) $(shell which sphinx-build) PAPER = BUILDDIR = _build # User-friendly check for sphinx-build #ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) #endif # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . # the i18n builder cannot share the environment and doctrees with the others I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest coverage gettext help: @echo "Please use \`make <target>' where <target> is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " applehelp to make an Apple Help Book" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" @echo " text to make text files" @echo " man to make manual pages" @echo " texinfo to make Texinfo files" @echo " info to make Texinfo files and run them through makeinfo" @echo " gettext to make PO message catalogs" @echo " changes to make an overview of all changed/added/deprecated items" @echo " xml to make Docutils-native XML files" @echo " pseudoxml to make pseudoxml-XML files for display purposes" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" @echo " coverage to run coverage check of the documentation (if enabled)" clean:
#include "chrome/browser/chromeos/login/shutdown_button.h" #include "base/<API key>.h" #include "chrome/browser/chromeos/cros/cros_library.h" #include "chrome/browser/chromeos/cros/power_library.h" #include "chrome/browser/chromeos/login/<API key>.h" #include "chrome/browser/chromeos/view_ids.h" #include "grit/generated_resources.h" #include "grit/theme_resources.h" #include "ui/base/l10n/l10n_util.h" #include "ui/base/resource/resource_bundle.h" #include "ui/gfx/gtk_util.h" #include "views/background.h" namespace { // Style parameters for Shutdown button. // Bottom/Right padding to locale the shutdown button. const int kBottomPadding = 12; const int kRightPadding = 12; // Normal/Hover colors. const SkColor <API key> = 0xFF303845; const SkColor kShutdownHoverColor = 0xFF353E4E; // Padding inside button. const int kVerticalPadding = 13; const int kIconTextPadding = 10; const int kHorizontalPadding = 13; // Rounded corner radious. const int kCornerRadius = 4; class HoverBackground : public views::Background { public: HoverBackground(views::Background* normal, views::Background* hover) : normal_(normal), hover_(hover) { } // views::Background implementation. virtual void Paint(gfx::Canvas* canvas, views::View* view) const OVERRIDE { views::TextButton* button = static_cast<views::TextButton*>(view); if (button->state() == views::CustomButton::BS_HOT) { hover_->Paint(canvas, view); } else { normal_->Paint(canvas, view); } } private: views::Background* normal_; views::Background* hover_; <API key>(HoverBackground); }; } // namespace namespace chromeos { ShutdownButton::ShutdownButton() : <API key>(TextButton(this, std::wstring())) { } void ShutdownButton::Init() { ResourceBundle& rb = ResourceBundle::GetSharedInstance(); SetIcon(*rb.GetBitmapNamed(IDR_SHUTDOWN_ICON)); <API key>(kIconTextPadding); SetFocusable(true); SetID(<API key>); // Set label colors. SetEnabledColor(SK_ColorWHITE); SetDisabledColor(SK_ColorWHITE); SetHighlightColor(SK_ColorWHITE); SetHoverColor(SK_ColorWHITE); // Disable throbbing and make border always visible. <API key>(0); SetNormalHasBorder(true); // Setup round shapes. set_background( new HoverBackground( <API key>( kCornerRadius, 0, <API key>, 0), <API key>( kCornerRadius, 0, kShutdownHoverColor, 0))); set_border( views::Border::CreateEmptyBorder(kVerticalPadding, kHorizontalPadding, kVerticalPadding, kHorizontalPadding)); OnLocaleChanged(); // set text } void ShutdownButton::LayoutIn(views::View* parent) { // No RTL for now. RTL will be handled in new WebUI based Login/Locker. gfx::Size button_size = GetPreferredSize(); SetBounds( parent->width() - button_size.width()- kRightPadding, parent->height() - button_size.height() - kBottomPadding, button_size.width(), button_size.height()); } gfx::NativeCursor ShutdownButton::GetCursor(const views::MouseEvent& event) { return IsEnabled() ? gfx::GetCursor(GDK_HAND2) : NULL; } void ShutdownButton::OnLocaleChanged() { SetText(UTF8ToWide(l10n_util::GetStringUTF8(IDS_SHUTDOWN_BUTTON))); if (parent()) { parent()->Layout(); parent()->SchedulePaint(); } } void ShutdownButton::ButtonPressed(views::Button* sender, const views::Event& event) { DCHECK(CrosLibrary::Get()->EnsureLoaded()); CrosLibrary::Get()->GetPowerLibrary()->RequestShutdown(); } } // namespace chromeos
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN"> <html> <head> <title><API key></title> <meta http-equiv="Content-Type" Content="text/html; charset=Windows-1251"> <link rel="stylesheet" type="text/css" href="../../styles/styles.css"> </head> <body> <h1><API key></h1> <div class=navbar> <a href="../index.html">ãëàâíàÿ</a> | <a href="index.html">ýêñïîðòèðóåìûå ôóíêöèè</a> | <a href="../miscapi/index.html">Miscellaneous API</a> </div> <div class=shortdescr> <code><API key></code> ïîçâîëÿåò ïëàãèíó âûïîëíèòü êîä â êîíòåêñòå ãëàâíîãî ïîòîêà Far Manager. </div> <pre class=syntax> intptr_t WINAPI <API key>( const struct <API key> *Info ); </pre> <h3>Ïàðàìåòðû</h3> <div class=descr> <div class=dfn>Info</div> <div class=dfndescr>Óêàçàòåëü íà ñòðóêòóðó <a href="../structures/<API key>.html"><API key></a>, Far Manager ïåðåäàåò èíôîðìàöèþ î ñîáûòèè, ïîñëàííîì èç ñîçäàííîãî ïëàãèíîì ïîòîêà.<br>  íà÷àëå ôóíêöèè ïðîâåðüòå, ÷òî ïîëå <code><API key>.StructSize >= sizeof(<API key>)</code>.</div> </div> <h3>Âîçâðàùàåìîå çíà÷åíèå</h3> <div class=descr> Âîçâðàùàåìîå çíà÷åíèå çàâèñèò îò òèïà ñîáûòèÿ, ñìîòðèòå îïèñàíèå ñîáûòèé äëÿ óòî÷íåíèÿ èíôîðìàöèè.<br> Äëÿ íåîáðàáàòûâàåìûõ ïëàãèíîì ñîáûòèé, ôóíêöèÿ äîëæíà âîçâðàùàòü <code>0</code>. </div> <div class=see>Ñìîòðèòå òàêæå:</div> <div class=seecont> </div> </body> </html>
from __future__ import division import sys import pytest from gmpy_cffi import mpz, MAX_UI PY3 = sys.version.startswith('3') if PY3: long = int invalids = [(), [], set(), dict(), lambda x: x**2] class TestInit(object): small_ints = [-1, 0, 1, 123, -9876, sys.maxsize, -sys.maxsize - 1] big_ints = [sys.maxsize + 1, -sys.maxsize - 2, 2 * sys.maxsize + 1, 2 * sys.maxsize + 2] @pytest.mark.parametrize('n', small_ints + big_ints) def test_init_int(self, n): assert mpz(n) == n @pytest.mark.parametrize('f', [0.0, 1.0, 1.5, 1e15 + 0.9]) def test_init_float(self, f): assert mpz(f) == int(f) assert mpz(-f) == int(-f) @pytest.mark.parametrize('n', small_ints + big_ints) def <API key>(self, n): assert mpz(str(n), 10) == n assert mpz(str(n)) == n assert mpz(str(n), 0) == n assert mpz(hex(n).rstrip('L'), 0) == n if PY3: assert mpz(oct(n).rstrip('L').replace('0o', '0'), 0) == n else: assert mpz(oct(n).rstrip('L'), 0) == n @pytest.mark.parametrize('n', small_ints + big_ints) def test_init_hex_str(self, n): assert mpz("%x" % n, 16) == n assert mpz("%#x" % n, 0) == n @pytest.mark.parametrize(('n', 'base'), [('0x1', 16), ('g', 16), ('a', 10)]) def <API key>(self, n, base): with pytest.raises(ValueError): mpz(n, base) @pytest.mark.parametrize(('n', 'base'), [('0', -1), ('0', 1), ('0', 63), (0, 10)]) def <API key>(self, n, base): with pytest.raises(ValueError): mpz(n, base) @pytest.mark.parametrize('type_', [int, float, mpz, str]) def test_init_type(self, type_): assert mpz(type_(1)) == 1 @pytest.mark.parametrize('n', invalids) def test_init_invalid(self, n): with pytest.raises(TypeError): mpz(n) class TestMath(object): numbers = [-1, 0, 1, sys.maxsize, -sys.maxsize - 1, MAX_UI, MAX_UI + 1] @pytest.mark.parametrize('b', numbers) def test_add(self, b): assert mpz(1) + mpz(b) == mpz(1 + b) assert mpz(1) + b == mpz(1 + b) @pytest.mark.parametrize('b', numbers) def test_radd(self, b): assert b + mpz(1) == mpz(b + 1) @pytest.mark.parametrize('b', numbers) def test_sub(self, b): assert mpz(1) - mpz(b) == mpz(1 - b) assert mpz(1) - b == mpz(1 - b) @pytest.mark.parametrize('b', numbers) def test_rsub(self, b): assert b - mpz(1) == mpz(b - 1) @pytest.mark.parametrize('b', numbers) def test_mul(self, b): assert mpz(2) * mpz(b) == mpz(2 * b) assert mpz(2) * b == mpz(2 * b) @pytest.mark.parametrize('b', numbers) def test_rmul(self, b): assert b * mpz(2) == mpz(b * 2) @pytest.mark.parametrize('b', numbers) def test_floordiv(self, b): if b != 0: assert mpz(2) // mpz(b) == mpz(2 // b) assert mpz(2) // b == mpz(2 // b) else: with pytest.raises(ZeroDivisionError): mpz(2) // mpz(b) with pytest.raises(ZeroDivisionError): mpz(2) @pytest.mark.parametrize('b', numbers) def test_rfloordiv(self, b): assert b // mpz(2) == mpz(b // 2) def <API key>(self): with pytest.raises(ZeroDivisionError): 1 // mpz(0) @pytest.mark.xfail(reason='__truediv__ needs mpf') def test_truediv(self): assert mpz(3) / mpz(2) == 1.5 @pytest.mark.parametrize('b', numbers) def test_mod(self, b): if b != 0: assert mpz(2) % mpz(b) == mpz(2 % b) assert mpz(2) % b == mpz(2 % b) else: with pytest.raises(ZeroDivisionError): mpz(2) % mpz(b) with pytest.raises(ZeroDivisionError): mpz(2) % b @pytest.mark.parametrize('b', numbers) def test_rmod(self, b): assert b % mpz(2) == mpz(b % 2) def test_rmod_by_zero(self): with pytest.raises(ZeroDivisionError): 1 % mpz(0) @pytest.mark.parametrize('b', numbers) def test_divmod(self, b): if b != 0: assert divmod(mpz(2), mpz(b)) == tuple(map(mpz, divmod(2, b))) assert divmod(mpz(2), b) == tuple(map(mpz, divmod(2, b))) else: with pytest.raises(ZeroDivisionError): divmod(mpz(2), mpz(b)) with pytest.raises(ZeroDivisionError): divmod(mpz(2), b) @pytest.mark.parametrize('b', numbers) def test_rdivmod(self, b): assert divmod(b, mpz(2)) == tuple(map(mpz, divmod(b, 2))) def <API key>(self): with pytest.raises(ZeroDivisionError): divmod(1, mpz(0)) @pytest.mark.parametrize('b', [0, 2, 1 << 16]) def test_shifts(self, b): assert mpz(1) << mpz(b) == mpz(1 << b) assert mpz(1) << b == mpz(1 << b) assert mpz(1 << 100) >> mpz(b) == mpz((1 << 100) >> b) assert mpz(1 << 100) >> b == mpz((1 << 100) >> b) @pytest.mark.parametrize('b', [0, 2, sys.maxsize, MAX_UI]) def test_rshifts(self, b): assert b << mpz(1) == mpz(b << 1) assert b >> mpz(1) == mpz(b >> 1) @pytest.mark.parametrize('b', [-1, MAX_UI + 1]) def <API key>(self, b): with pytest.raises(OverflowError): mpz(1) << b with pytest.raises(OverflowError): mpz(1) >> b @pytest.mark.parametrize('type_', [int, long, mpz]) def <API key>(self, type_): assert mpz(1) << type_(1) == mpz(2) assert mpz(4) >> type_(1) == mpz(2) @pytest.mark.parametrize('type_', [float, str]) def <API key>(self, type_): with pytest.raises(TypeError): mpz(1) << type_(1) with pytest.raises(TypeError): mpz(1) >> type_(1) @pytest.mark.parametrize('type_', [float, str]) def <API key>(self, type_): with pytest.raises(TypeError): type_(1) << mpz(1) with pytest.raises(TypeError): type_(1) >> mpz(1) def test_str(self): n = mpz('123456789abcdef0', 16) assert str(n) == '1311768467463790320' assert repr(n) == 'mpz(1311768467463790320)' assert hex(n) == '0x123456789abcdef0' if PY3: assert oct(n) == '<API key>' else: assert oct(n) == '<API key>' n = -mpz('123456789abcdef0', 16) assert str(n) == '-1311768467463790320' assert repr(n) == 'mpz(-1311768467463790320)' assert hex(n) == '-0x123456789abcdef0' if PY3: assert oct(n) == '-<API key>' else: assert oct(n) == '-<API key>' def <API key>(self): for n in self.numbers: for type_ in [int, long]: n1 = type_(n) mpz_n = type_(mpz(n)) assert type(n1) == type(mpz_n) assert n1 == mpz_n def <API key>(self): for n in self.numbers: n1 = float(n) mpz_n = float(mpz(n)) assert type(n1) == type(mpz_n) assert abs(n1 - mpz_n) <= abs(n1 * sys.float_info.epsilon) def <API key>(self): for n in self.numbers: n1 = complex(n) mpz_n = complex(mpz(n)) assert type(n1) == type(mpz_n) assert abs(n1.real - mpz_n.real) <= abs(n1.real * sys.float_info.epsilon) and n1.imag == mpz_n.imag @pytest.mark.parametrize('n', numbers) def test_unary_methods(self, n): assert mpz(-n) == -mpz(n) assert mpz(+n) == +mpz(n) assert mpz(abs(n)) == abs(mpz(n)) assert mpz(~n) == ~mpz(n) @pytest.mark.parametrize('n', numbers) def test_bit_ops(self, n): assert mpz(n) & mpz(n + 1) == mpz(n & (n + 1)) assert mpz(n) & (n + 1) == mpz(n & (n + 1)) assert mpz(n) | mpz(n + 1) == mpz(n | (n + 1)) assert mpz(n) | (n + 1) == mpz(n | (n + 1)) assert mpz(n) ^ mpz(n + 1) == mpz(n ^ (n + 1)) assert mpz(n) ^ (n + 1) == mpz(n ^ (n + 1)) @pytest.mark.parametrize('n', numbers) def test_bit_rops(self, n): assert n & mpz(n + 1) == mpz(n & (n + 1)) assert n | mpz(n + 1) == mpz(n | (n + 1)) assert n ^ mpz(n + 1) == mpz(n ^ (n + 1)) def test_index(self): l = range(5) assert l[mpz(2)] == l[2] assert l[mpz(-1)] == l[-1] with pytest.raises(IndexError): l[mpz(10)] def test_nonzero(self): assert mpz(23) assert not mpz(0) assert mpz(-1) @pytest.mark.parametrize('b', [-1, 0, 1, 1024, MAX_UI + 1]) def test_pow_no_mod(self, b): if b < 0: for exp in [mpz(b), b]: with pytest.raises(ValueError) as exc: mpz(2) ** exp assert exc.value.args == ('mpz.pow with negative exponent',) elif b > MAX_UI: for exp in [mpz(b), b]: with pytest.raises(ValueError) as exc: mpz(2) ** exp assert exc.value.args == ('mpz.pow with outragous exponent',) else: res = mpz(2 ** b) assert mpz(2) ** mpz(b) == res assert mpz(2) ** b == res @pytest.mark.parametrize('b', [-1, 0, 1, 1024, MAX_UI + 1]) def test_pow_with_mod(self, b): if b < 0: for exp in [mpz(b), b]: for mod in [mpz(7), 7]: with pytest.raises(ValueError) as exc: pow(mpz(2), exp, mod) assert exc.value.args == ('mpz.pow with negative exponent',) else: res = mpz(pow(2, b, 7)) assert pow(mpz(2), mpz(b), mpz(7)) == res assert pow(mpz(2), b, mpz(7)) == res assert pow(mpz(2), mpz(b), 7) == res assert pow(mpz(2), b, 7) == res @pytest.mark.parametrize('b', numbers) def test_rpow(self, b): assert b ** mpz(3) == mpz(b ** 3) def test_rpow_invalid(self): with pytest.raises(ValueError) as exc: 1 ** mpz(-1) assert exc.value.args == ('mpz.pow with negative exponent',) with pytest.raises(ValueError) as exc: 1 ** mpz(MAX_UI + 1) assert exc.value.args == ('mpz.pow with outragous exponent',) def test_pow_invalid(self): with pytest.raises(TypeError): mpz(2) ** 2.0 with pytest.raises(TypeError): 2.0 ** mpz(2) with pytest.raises(TypeError): pow(mpz(2), 2, 2.0) def <API key>(self): # XXX causes core dump on pypy with jit enabled x = mpz(1) for i in range(10000): # This bug occurs randomly, so repeat assert x * x == x @pytest.mark.parametrize('n', invalids) def test_invalid_op(self, n): with pytest.raises(TypeError): mpz(1) - n with pytest.raises(TypeError): mpz(1) + n with pytest.raises(TypeError): mpz(1) with pytest.raises(TypeError): mpz(1) / n with pytest.raises(TypeError): mpz(1) % n with pytest.raises(TypeError): divmod(mpz(1), n) @pytest.mark.parametrize('n', invalids) def test_invalid_rop(self, n): with pytest.raises(TypeError): n - mpz(1) with pytest.raises(TypeError): n + mpz(1) with pytest.raises(TypeError): n // mpz(1) with pytest.raises(TypeError): n / mpz(1) with pytest.raises(TypeError): n % mpz(1) with pytest.raises(TypeError): divmod(n, mpz(1)) class TestCmp(object): def test_cmp_int(self): assert mpz(1) < 2 assert mpz(1) <= 2 assert mpz(2) > 1 assert mpz(2) >= 1 assert mpz(2) == 2 assert mpz(1) != 2 assert mpz(sys.maxsize - 1) < sys.maxsize assert mpz(sys.maxsize + 1) > sys.maxsize assert mpz(sys.maxsize) == sys.maxsize assert mpz(2*sys.maxsize - 1) < 2*sys.maxsize assert mpz(2*sys.maxsize + 1) > 2*sys.maxsize assert mpz(2*sys.maxsize) == 2*sys.maxsize assert mpz(4*sys.maxsize - 1) < 4*sys.maxsize assert mpz(4*sys.maxsize + 1) > 4*sys.maxsize assert mpz(4*sys.maxsize) == 4*sys.maxsize def test_cmp_float(self): assert mpz(1) > 0.5 assert mpz(1) < 1.5 assert mpz(1) == 1.0 def test_cmp_mpz(self): assert mpz(2) > mpz(1) assert mpz(1) < mpz(2) assert mpz(2) == mpz(2) @pytest.mark.xfail("sys.version.startswith('2')", reason="python2 comparison") @pytest.mark.parametrize('n', invalids) def test_invalid_cmp(self, n): with pytest.raises(TypeError): mpz(1) > n with pytest.raises(TypeError): mpz(1) < n with pytest.raises(TypeError): mpz(1) >= n with pytest.raises(TypeError): mpz(1) <= n @pytest.mark.xfail(reason='cpython __hash__ implementation bug (feature)') def test_hash_neg1(self): assert hash(mpz(-1)) == -1 def test_hash(self): assert hash(mpz(1)) == 1 assert hash(mpz(-2)) == -2 assert hash(mpz(sys.maxsize)) == sys.maxsize assert hash(mpz(sys.maxsize+1)) == -sys.maxsize - 1
package org.locationtech.geogig.remote; import static org.locationtech.geogig.storage.datastream.FormatCommonV1.readObjectId; import java.io.DataInput; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.HashSet; import java.util.Iterator; import java.util.Set; import org.eclipse.jdt.annotation.Nullable; import org.locationtech.geogig.model.ObjectId; import org.locationtech.geogig.model.RevObject; import org.locationtech.geogig.repository.DiffEntry; import org.locationtech.geogig.repository.NodeRef; import org.locationtech.geogig.repository.Repository; import org.locationtech.geogig.storage.BulkOpListener; import org.locationtech.geogig.storage.BulkOpListener.CountingListener; import org.locationtech.geogig.storage.ObjectStore; import org.locationtech.geogig.storage.datastream.<API key>; import org.locationtech.geogig.storage.datastream.FormatCommonV1; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Preconditions; import com.google.common.base.Throwables; import com.google.common.collect.AbstractIterator; import com.google.common.io.<API key>; /** * Provides a method of packing a set of changes and the affected objects to and from a binary * stream. */ public final class BinaryPackedChanges { private static final Logger LOGGER = LoggerFactory.getLogger(BinaryPackedChanges.class); private static final <API key> serializer = <API key>.INSTANCE; private final Repository repository; private boolean filtered; private static enum CHUNK_TYPE { DIFF_ENTRY { @Override public int value() { return 0; } }, <API key> { @Override public int value() { return 1; } }, <API key> { @Override public int value() { return 2; } }, FILTER_FLAG { @Override public int value() { return 3; } }; public abstract int value(); private static final CHUNK_TYPE[] values = CHUNK_TYPE.values(); public static CHUNK_TYPE valueOf(int value) { // abusing the fact that value() coincides with ordinal() return values[value]; } }; /** * Constructs a new {@code BinaryPackedChanges} instance using the provided {@link Repository}. * * @param repository the repository to save objects to, or read objects from, depending on the * operation */ public BinaryPackedChanges(Repository repository) { this.repository = repository; filtered = false; } public boolean wasFiltered() { return filtered; } /** * Writes the set of changes to the provided output stream. * * @param out the stream to write to * @param changes the changes to write * @throws IOException * @return the number of objects written */ public long write(OutputStream out, Iterator<DiffEntry> changes) throws IOException { final ObjectStore objectDatabase = repository.objectDatabase(); out = new <API key>(out); // avoids sending the same metadata object multiple times Set<ObjectId> writtenMetadataIds = new HashSet<ObjectId>(); // buffer to avoid ObjectId cloning its internal state for each object byte[] oidbuffer = new byte[ObjectId.NUM_BYTES]; long objectCount = 0; while (changes.hasNext()) { DiffEntry diff = changes.next(); if (diff.isDelete()) { out.write(CHUNK_TYPE.DIFF_ENTRY.value()); } else { // its a change or an addition, new object is guaranteed to be present NodeRef newObject = diff.getNewObject(); ObjectId metadataId = newObject.getMetadataId(); if (writtenMetadataIds.contains(metadataId)) { out.write(CHUNK_TYPE.<API key>.value()); } else { out.write(CHUNK_TYPE.<API key>.value()); RevObject metadata = objectDatabase.get(metadataId); writeObjectId(metadataId, out, oidbuffer); serializer.write(metadata, out); writtenMetadataIds.add(metadataId); objectCount++; } ObjectId objectId = newObject.getObjectId(); writeObjectId(objectId, out, oidbuffer); RevObject object = objectDatabase.get(objectId); serializer.write(object, out); objectCount++; } DataOutputStream dataOut = new DataOutputStream(out); FormatCommonV1.writeDiff(diff, dataOut); dataOut.flush(); } // signal the end of changes out.write(CHUNK_TYPE.FILTER_FLAG.value()); final boolean filtersApplied = changes instanceof <API key> && ((<API key>) changes).wasFiltered(); out.write(filtersApplied ? 1 : 0); LOGGER.info(String.format("Written %,d bytes to remote accounting for %,d objects.", ((<API key>) out).getCount(), objectCount)); return objectCount; } private void writeObjectId(ObjectId objectId, OutputStream out, byte[] oidbuffer) throws IOException { objectId.getRawValue(oidbuffer); out.write(oidbuffer); } /** * Read in the changes from the provided input stream and call the provided callback for each * change. The input stream represents the output of another {@code BinaryPackedChanges} * instance. * * @param in the stream to read from * @param callback the callback to call for each item */ public void ingest(final InputStream in, Callback callback) { <API key> readingIterator = new <API key>(in); Iterator<RevObject> asObjects = asObjects(readingIterator, callback); ObjectStore objectDatabase = repository.objectDatabase(); CountingListener listener = BulkOpListener.newCountingListener(); objectDatabase.putAll(asObjects, listener); LOGGER.info("Ingested %,d objects. Inserted: %,d. Already existing: %,d\n", listener.inserted() + listener.found(), listener.inserted(), listener.found()); this.filtered = readingIterator.isFiltered(); } /** * Returns an iterator that calls the {@code callback} for each {@link DiffPacket}'s * {@link DiffEntry} once, and returns either zero, one, or two {@link RevObject}s, depending on * which information the diff packet carried over. */ private Iterator<RevObject> asObjects(final <API key> readingIterator, final Callback callback) { return new AbstractIterator<RevObject>() { private DiffPacket current; @Override protected RevObject computeNext() { if (current != null) { Preconditions.checkState(current.metadataObject != null); RevObject ret = current.metadataObject; current = null; return ret; } while (readingIterator.hasNext()) { DiffPacket diffPacket = readingIterator.next(); callback.callback(diffPacket.entry); RevObject obj = diffPacket.newObject; RevObject md = diffPacket.metadataObject; Preconditions.checkState(obj != null || (obj == null && md == null)); if (obj != null) { if (md != null) { current = diffPacket; } return obj; } } return endOfData(); } }; } private static class DiffPacket { public final DiffEntry entry; @Nullable public final RevObject newObject; @Nullable public final RevObject metadataObject; public DiffPacket(DiffEntry entry, @Nullable RevObject newObject, @Nullable RevObject metadata) { this.entry = entry; this.newObject = newObject; this.metadataObject = metadata; } } private static class <API key> extends AbstractIterator<DiffPacket> { private InputStream in; private DataInput data; private boolean filtered; public <API key>(InputStream in) { this.in = in; this.data = new DataInputStream(in); } /** * @return {@code true} if the stream finished with a non zero "filter applied" marker */ public boolean isFiltered() { return filtered; } @Override protected DiffPacket computeNext() { try { return readNext(); } catch (IOException e) { throw Throwables.propagate(e); } } private DiffPacket readNext() throws IOException { final CHUNK_TYPE chunkType = CHUNK_TYPE.valueOf((int) (data.readByte() & 0xFF)); RevObject revObj = null; RevObject metadata = null; switch (chunkType) { case DIFF_ENTRY: break; case <API key>: { ObjectId id = readObjectId(data); revObj = serializer.read(id, in); } break; case <API key>: { ObjectId mdid = readObjectId(data); metadata = serializer.read(mdid, in); ObjectId id = readObjectId(data); revObj = serializer.read(id, in); } break; case FILTER_FLAG: { int changesFiltered = in.read(); if (changesFiltered != 0) { filtered = true; } return endOfData(); } default: throw new <API key>("Unknown chunk type: " + chunkType); } DiffEntry diff = FormatCommonV1.readDiff(data); return new DiffPacket(diff, revObj, metadata); } } /** * Interface for callback methods to be used by the read and write operations. */ public static interface Callback { public abstract void callback(DiffEntry diff); } }
<?php use yii\helpers\Html; use yii\widgets\DetailView; /* @var $this yii\web\View */ /* @var $model common\models\News */ $this->title = $model->title; $this->params['breadcrumbs'][] = ['label' => 'News', 'url' => ['index']]; $this->params['breadcrumbs'][] = $this->title; ?> <div class="news-view"> <h1><?= Html::encode($this->title) ?></h1> <p> <?= Html::a('Update', ['update', 'id' => $model->id], ['class' => 'btn btn-primary']) ?> <?= Html::a('Delete', ['delete', 'id' => $model->id], [ 'class' => 'btn btn-danger', 'data' => [ 'confirm' => 'Are you sure you want to delete this item?', 'method' => 'post', ], ]) ?> </p> <?= DetailView::widget([ 'model' => $model, 'attributes' => [ 'id', 'publicated_at:datetime', 'title:ntext', 'text:ntext', [ 'attribute'=>'subject_id', 'value'=>$model->subject?Html::a($model->subject->title, ['subject/view', 'id'=>$model->subject_id]):null, 'format'=>'html', ], ], ]) ?> </div>
<?php namespace backend\controllers; class TestController extends \yii\web\Controller { public function actionIndex() { echo 'test'; } }
using System; using System.Threading.Tasks; using Microsoft.AspNetCore.Html; using OrchardCore.ContentManagement; using OrchardCore.ContentManagement.Display.ContentDisplay; using OrchardCore.ContentManagement.Metadata.Settings; using OrchardCore.DisplayManagement; using OrchardCore.DisplayManagement.Handlers; using OrchardCore.DisplayManagement.Title; using OrchardCore.DisplayManagement.Views; using OrchardCore.ResourceManagement; using OrchardCore.Seo.Models; using OrchardCore.Shortcodes.Services; using Shortcodes; namespace OrchardCore.Seo.Drivers { public class SeoContentDriver : <API key> { private readonly IContentManager _contentManager; private readonly IPageTitleBuilder _pageTitleBuilder; private readonly IResourceManager _resourceManager; private readonly IShortcodeService _shortcodeService; private bool <API key> { get; set; } public SeoContentDriver( IContentManager contentManager, IPageTitleBuilder pageTitleBuilder, IResourceManager resourceManager, IShortcodeService shortcodeService ) { _contentManager = contentManager; _pageTitleBuilder = pageTitleBuilder; _resourceManager = resourceManager; _shortcodeService = shortcodeService; } public override async Task<IDisplayResult> DisplayAsync(ContentItem contentItem, BuildDisplayContext context) { // We only apply this on the primary content item, which is considered the first call to BuildDisplay. if (<API key>) { return null; } <API key> = true; // Do not include Widgets or any display type other than detail. if (context.DisplayType != "Detail" || context.Shape.TryGetProperty(nameof(ContentTypeSettings.Stereotype), out string _)) { return null; } var aspect = await _contentManager.PopulateAspectAsync<SeoAspect>(contentItem); if (!aspect.Render) { return null; } if (!String.IsNullOrEmpty(aspect.PageTitle)) { _pageTitleBuilder.SetFixedTitle(new HtmlString(await RenderAsync(aspect.PageTitle, contentItem))); } if (!String.IsNullOrEmpty(aspect.MetaDescription)) { _resourceManager.RegisterMeta(new MetaEntry { Name = "description", Content = await RenderAsync(aspect.MetaDescription, contentItem) }); } if (!String.IsNullOrEmpty(aspect.MetaKeywords)) { _resourceManager.RegisterMeta(new MetaEntry { Name = "keywords", Content = await RenderAsync(aspect.MetaKeywords, contentItem) }); } if (!String.IsNullOrEmpty(aspect.Canonical)) { _resourceManager.RegisterLink(new LinkEntry { Href = aspect.Canonical, Rel = "canonical" }); } if (!String.IsNullOrEmpty(aspect.MetaRobots)) { _resourceManager.RegisterMeta(new MetaEntry { Name = "robots", Content = await RenderAsync(aspect.MetaRobots, contentItem) }); } foreach (var customMetaTag in aspect.CustomMetaTags) { // Generate a new meta entry as the builder is preopulated. _resourceManager.RegisterMeta(new MetaEntry( await RenderAsync(customMetaTag.Name, contentItem), await RenderAsync(customMetaTag.Property, contentItem), await RenderAsync(customMetaTag.Content, contentItem), await RenderAsync(customMetaTag.HttpEquiv, contentItem), await RenderAsync(customMetaTag.Charset, contentItem))); } // OpenGraph. if (!String.IsNullOrEmpty(aspect.OpenGraphType)) { _resourceManager.RegisterMeta(new MetaEntry { Property = "og:type", Content = await RenderAsync(aspect.OpenGraphType, contentItem) }); } if (!String.IsNullOrEmpty(aspect.OpenGraphTitle)) { _resourceManager.RegisterMeta(new MetaEntry { Property = "og:title", Content = await RenderAsync(aspect.OpenGraphTitle, contentItem) }); } if (!String.IsNullOrEmpty(aspect.<API key>)) { _resourceManager.RegisterMeta(new MetaEntry { Property = "og:description", Content = await RenderAsync(aspect.<API key>, contentItem) }); } if (!String.IsNullOrEmpty(aspect.OpenGraphImage)) { _resourceManager.RegisterMeta(new MetaEntry { Property = "og:image", Content = await RenderAsync(aspect.OpenGraphImage, contentItem) }); } if (!String.IsNullOrEmpty(aspect.OpenGraphImageAlt)) { _resourceManager.RegisterMeta(new MetaEntry { Property = "og:image:alt", Content = await RenderAsync(aspect.OpenGraphImageAlt, contentItem) }); } if (!String.IsNullOrEmpty(aspect.OpenGraphUrl)) { _resourceManager.RegisterMeta(new MetaEntry { Property = "og:url", Content = await RenderAsync(aspect.OpenGraphUrl, contentItem) }); } if (!String.IsNullOrEmpty(aspect.OpenGraphSiteName)) { _resourceManager.RegisterMeta(new MetaEntry { Property = "og:site_name", Content = await RenderAsync(aspect.OpenGraphSiteName, contentItem) }); } if (!String.IsNullOrEmpty(aspect.OpenGraphAppId)) { _resourceManager.RegisterMeta(new MetaEntry { Property = "fb:app_id", Content = await RenderAsync(aspect.OpenGraphAppId, contentItem) }); } if (!String.IsNullOrEmpty(aspect.OpenGraphLocale)) { _resourceManager.RegisterMeta(new MetaEntry { Property = "og:locale", Content = await RenderAsync(aspect.OpenGraphLocale, contentItem) }); } // Twitter. if (!String.IsNullOrEmpty(aspect.TwitterCard)) { _resourceManager.RegisterMeta(new MetaEntry { Property = "twitter:card", Content = await RenderAsync(aspect.TwitterCard, contentItem) }); } if (!String.IsNullOrEmpty(aspect.TwitterSite)) { _resourceManager.RegisterMeta(new MetaEntry { Property = "twitter:site", Content = await RenderAsync(aspect.TwitterSite, contentItem) }); } if (!String.IsNullOrEmpty(aspect.TwitterTitle)) { _resourceManager.RegisterMeta(new MetaEntry { Name = "twitter:title", Content = await RenderAsync(aspect.TwitterTitle, contentItem) }); } if (!String.IsNullOrEmpty(aspect.<TwitterConsumerkey>)) { _resourceManager.RegisterMeta(new MetaEntry { Name = "<TwitterConsumerkey>", Content = await RenderAsync(aspect.<TwitterConsumerkey>, contentItem) }); } if (!String.IsNullOrEmpty(aspect.TwitterImage)) { _resourceManager.RegisterMeta(new MetaEntry { Name = "twitter:image", Content = await RenderAsync(aspect.TwitterImage, contentItem) }); } if (!String.IsNullOrEmpty(aspect.TwitterImageAlt)) { _resourceManager.RegisterMeta(new MetaEntry { Name = "twitter:image:alt", Content = await RenderAsync(aspect.TwitterImageAlt, contentItem) }); } if (!String.IsNullOrEmpty(aspect.TwitterCreator)) { _resourceManager.RegisterMeta(new MetaEntry { Name = "twitter:creator", Content = await RenderAsync(aspect.TwitterCreator, contentItem) }); } if (!String.IsNullOrEmpty(aspect.TwitterUrl)) { _resourceManager.RegisterMeta(new MetaEntry { Name = "twitter:url", Content = await RenderAsync(aspect.TwitterUrl, contentItem) }); } if (!String.IsNullOrEmpty(aspect.GoogleSchema)) { _resourceManager.RegisterHeadScript(new HtmlString($"<script type=\"application/ld+json\">\n{aspect.GoogleSchema}\n</script>")); } return null; } private ValueTask<string> RenderAsync(string template, ContentItem contentItem) => _shortcodeService.ProcessAsync(template, new Context { ["ContentItem"] = contentItem }); } }
// WARNING // This file has been generated automatically by Xamarin Studio from the outlets and // actions declared in your storyboard file. // Manual changes to this file will not be maintained. using Foundation; using System; using System.CodeDom.Compiler; using UIKit; namespace <API key> { [Register ("<API key>")] partial class <API key> { void <API key> () { } } }
#include "base/rand_util.h" #include "chrome/browser/profiles/profile.h" #include "chrome/browser/sync/test/integration/bookmarks_helper.h" #include "chrome/browser/sync/test/integration/passwords_helper.h" #include "chrome/browser/sync/test/integration/<API key>.h" #include "chrome/browser/sync/test/integration/<API key>.h" #include "chrome/browser/sync/test/integration/sync_test.h" #include "sync/internal_api/public/sessions/<API key>.h" #include "ui/base/layout.h" using bookmarks_helper::AddFolder; using bookmarks_helper::AddURL; using bookmarks_helper::AllModelsMatch; using bookmarks_helper::<API key>; using bookmarks_helper::<API key>; using bookmarks_helper::<API key>; using bookmarks_helper::CreateFavicon; using bookmarks_helper::GetBookmarkBarNode; using bookmarks_helper::GetOtherNode; using bookmarks_helper::<API key>; using bookmarks_helper::GetUniqueNodeByURL; using bookmarks_helper::HasNodeWithURL; using bookmarks_helper::IndexedFolderName; using bookmarks_helper::<API key>; using bookmarks_helper::<API key>; using bookmarks_helper::IndexedURL; using bookmarks_helper::IndexedURLTitle; using bookmarks_helper::Move; using bookmarks_helper::Remove; using bookmarks_helper::RemoveAll; using bookmarks_helper::ReverseChildOrder; using bookmarks_helper::SetFavicon; using bookmarks_helper::SetTitle; using bookmarks_helper::SetURL; using bookmarks_helper::SortChildren; using passwords_helper::<API key>; using passwords_helper::<API key>; using <API key>::<API key>; using <API key>::<API key>; using <API key>::<API key>; const std::string kGenericURL = "http: const std::wstring kGenericURLTitle = L"URL Title"; const std::wstring kGenericFolderName = L"Folder Name"; const std::wstring <API key> = L"Subfolder Name"; const std::wstring <API key> = L"Subsubfolder Name"; const char* kValidPassphrase = "passphrase!"; class <API key> : public SyncTest { public: <API key>() : SyncTest(TWO_CLIENT) {} virtual ~<API key>() {} private: <API key>(<API key>); }; <API key>(<API key>, Sanity) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); GURL google_url("http: ASSERT_TRUE(AddURL(0, L"Google", google_url) != NULL); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(AddURL(1, L"Yahoo", GURL("http: ASSERT_TRUE(GetClient(1)-><API key>(GetClient(0))); ASSERT_TRUE(<API key>()); const BookmarkNode* new_folder = AddFolder(0, 2, L"New Folder"); Move(0, GetUniqueNodeByURL(0, google_url), new_folder, 0); SetTitle(0, GetBookmarkBarNode(0)->GetChild(0), L"Yahoo!!"); ASSERT_TRUE(AddURL(0, GetBookmarkBarNode(0), 1, L"CNN", GURL("http: ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); ASSERT_TRUE(AddURL(1, L"Facebook", GURL("http: ASSERT_TRUE(GetClient(1)-><API key>(GetClient(0))); ASSERT_TRUE(<API key>()); SortChildren(1, GetBookmarkBarNode(1)); ASSERT_TRUE(GetClient(1)-><API key>(GetClient(0))); ASSERT_TRUE(<API key>()); DisableVerifier(); SetTitle(0, GetUniqueNodeByURL(0, google_url), L"Google++"); SetTitle(1, GetUniqueNodeByURL(1, google_url), L"Google ASSERT_TRUE(AwaitQuiescence()); ASSERT_TRUE(AllModelsMatch()); } <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); GURL initial_url("http: GURL second_url("http: GURL third_url("http: std::wstring title = L"Google"; ASSERT_TRUE(AddURL(0, title, initial_url) != NULL); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); DisableVerifier(); ASSERT_TRUE(SetURL( 0, GetUniqueNodeByURL(0, initial_url), second_url) != NULL); ASSERT_TRUE(SetURL( 1, GetUniqueNodeByURL(1, initial_url), third_url) != NULL); ASSERT_TRUE(AwaitQuiescence()); ASSERT_TRUE(AllModelsMatch()); SetTitle(0, GetBookmarkBarNode(0)->GetChild(0), L"Google1"); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(AllModelsMatch()); } // Test Scribe ID - 370558. <API key>(<API key>, SC_AddFirstFolder) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); ASSERT_TRUE(AddFolder(0, kGenericFolderName) != NULL); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); } // Test Scribe ID - 370559. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); ASSERT_TRUE(AddURL(0, kGenericURLTitle, GURL(kGenericURL)) != NULL); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); } // Test Scribe ID - 370489. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); const GURL page_url(kGenericURL); const GURL icon_url("http: const BookmarkNode* bookmark = AddURL(0, kGenericURLTitle, page_url); ASSERT_TRUE(bookmark != NULL); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); SetFavicon(0, bookmark, icon_url, CreateFavicon(SK_ColorWHITE), bookmarks_helper::FROM_UI); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); } // Test that the history service logic for not losing the hidpi versions of // favicons as a result of sync does not result in dropping sync updates. // In particular, the synced 16x16 favicon bitmap should overwrite 16x16 // favicon bitmaps on all clients. (Though non-16x16 favicon bitmaps // are unchanged). <API key>(<API key>, SC_SetFaviconHiDPI) { // Set the supported scale factors to include 2x such that CreateFavicon() // creates a favicon with hidpi representations and that methods in the // FaviconService request hidpi favicons. std::vector<ui::ScaleFactor> <API key>; <API key>.push_back(ui::SCALE_FACTOR_100P); <API key>.push_back(ui::SCALE_FACTOR_200P); ui::<API key>(<API key>); const GURL page_url(kGenericURL); const GURL icon_url1("http: const GURL icon_url2("http: ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); const BookmarkNode* bookmark0 = AddURL(0, kGenericURLTitle, page_url); ASSERT_TRUE(bookmark0 != NULL); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); SetFavicon(0, bookmark0, icon_url1, CreateFavicon(SK_ColorWHITE), bookmarks_helper::FROM_UI); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); const BookmarkNode* bookmark1 = GetUniqueNodeByURL(1, page_url); SetFavicon(1, bookmark1, icon_url1, CreateFavicon(SK_ColorBLUE), bookmarks_helper::FROM_UI); ASSERT_TRUE(GetClient(1)-><API key>(GetClient(0))); ASSERT_TRUE(<API key>()); SetFavicon(0, bookmark0, icon_url2, CreateFavicon(SK_ColorGREEN), bookmarks_helper::FROM_UI); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); } // Test Scribe ID - 370560. <API key>(<API key>, SC_AddNonHTTPBMs) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); ASSERT_TRUE(AddURL( 0, L"FTP URL", GURL("ftp://user:password@host:1234/path")) != NULL); ASSERT_TRUE(AddURL(0, L"File URL", GURL("file://host/path")) != NULL); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); } // Test Scribe ID - 370561. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); const BookmarkNode* folder = AddFolder(0, kGenericFolderName); ASSERT_TRUE(folder != NULL); ASSERT_TRUE(AddURL( 0, folder, 0, kGenericURLTitle, GURL(kGenericURL)) != NULL); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); } // Test Scribe ID - 370562. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); for (int i = 0; i < 20; ++i) { std::wstring title = IndexedURLTitle(i); GURL url = GURL(IndexedURL(i)); ASSERT_TRUE(AddURL(0, i, title, url) != NULL); ASSERT_TRUE(AddURL( 0, GetOtherNode(0), i, title, url) != NULL); } ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); } // Test Scribe ID - 370563. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); for (int i = 0; i < 15; ++i) { if (base::RandDouble() > 0.6) { std::wstring title = IndexedURLTitle(i); GURL url = GURL(IndexedURL(i)); ASSERT_TRUE(AddURL(0, i, title, url) != NULL); } else { std::wstring title = IndexedFolderName(i); const BookmarkNode* folder = AddFolder(0, i, title); ASSERT_TRUE(folder != NULL); if (base::RandDouble() > 0.4) { for (int i = 0; i < 20; ++i) { std::wstring title = IndexedURLTitle(i); GURL url = GURL(IndexedURL(i)); ASSERT_TRUE( AddURL(0, folder, i, title, url) != NULL); } } } } for (int i = 0; i < 10; i++) { std::wstring title = IndexedURLTitle(i); GURL url = GURL(IndexedURL(i)); ASSERT_TRUE(AddURL(0, GetOtherNode(0), i, title, url) != NULL); } ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); } // Test Scribe ID - 370641. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); GURL url0 = GURL(IndexedURL(0)); GURL url1 = GURL(IndexedURL(1)); ASSERT_TRUE(AddURL(0, kGenericURLTitle, url0) != NULL); ASSERT_TRUE(AddURL(0, kGenericURLTitle, url1) != NULL); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); } // Test Scribe ID - 370639 - Add bookmarks with different name and same URL. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); std::wstring title0 = IndexedURLTitle(0); std::wstring title1 = IndexedURLTitle(1); ASSERT_TRUE(AddURL(0, title0, GURL(kGenericURL)) != NULL); ASSERT_TRUE(AddURL(0, title1, GURL(kGenericURL)) != NULL); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); } // Test Scribe ID - 371817. <API key>(<API key>, SC_RenameBMName) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); std::wstring title = IndexedURLTitle(1); const BookmarkNode* bookmark = AddURL(0, title, GURL(kGenericURL)); ASSERT_TRUE(bookmark != NULL); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); std::wstring new_title = IndexedURLTitle(2); SetTitle(0, bookmark, new_title); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); } // Test Scribe ID - 371822. <API key>(<API key>, SC_RenameBMURL) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); GURL url = GURL(IndexedURL(1)); const BookmarkNode* bookmark = AddURL(0, kGenericURLTitle, url); ASSERT_TRUE(bookmark != NULL); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); GURL new_url = GURL(IndexedURL(2)); ASSERT_TRUE(SetURL(0, bookmark, new_url) != NULL); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); } // Test Scribe ID - 371818 - Renaming the same bookmark name twice. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); std::wstring title = IndexedURLTitle(1); const BookmarkNode* bookmark = AddURL(0, title, GURL(kGenericURL)); ASSERT_TRUE(bookmark != NULL); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); std::wstring new_title = IndexedURLTitle(2); SetTitle(0, bookmark, new_title); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); SetTitle(0, bookmark, title); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); } // Test Scribe ID - 371823 - Renaming the same bookmark URL twice. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); GURL url = GURL(IndexedURL(1)); const BookmarkNode* bookmark = AddURL(0, kGenericURLTitle, url); ASSERT_TRUE(bookmark != NULL); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); GURL new_url = GURL(IndexedURL(2)); ASSERT_TRUE(SetURL(0, bookmark, new_url) != NULL); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); ASSERT_TRUE(SetURL(0, bookmark, url) != NULL); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); } // Test Scribe ID - 371824. <API key>(<API key>, SC_RenameBMFolder) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); std::wstring title = IndexedFolderName(1); const BookmarkNode* folder = AddFolder(0, title); ASSERT_TRUE(AddURL( 0, folder, 0, kGenericURLTitle, GURL(kGenericURL)) != NULL); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); std::wstring new_title = IndexedFolderName(2); SetTitle(0, folder, new_title); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); } // Test Scribe ID - 371825. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); std::wstring title = IndexedFolderName(1); const BookmarkNode* folder = AddFolder(0, title); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); std::wstring new_title = IndexedFolderName(2); SetTitle(0, folder, new_title); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); } // Test Scribe ID - 371826. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); std::wstring title = IndexedFolderName(1); const BookmarkNode* folder = AddFolder(0, title); ASSERT_TRUE(folder != NULL); for (int i = 0; i < 120; ++i) { if (base::RandDouble() > 0.15) { std::wstring title = IndexedURLTitle(i); GURL url = GURL(IndexedURL(i)); ASSERT_TRUE(AddURL(0, folder, i, title, url) != NULL); } else { std::wstring title = <API key>(i); ASSERT_TRUE(AddFolder(0, folder, i, title) != NULL); } } ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); std::wstring new_title = IndexedFolderName(2); SetTitle(0, folder, new_title); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); } // Test Scribe ID - 371827. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); const BookmarkNode* folder = AddFolder(0, kGenericFolderName); ASSERT_TRUE(folder != NULL); for (int i = 1; i < 15; ++i) { std::wstring title = IndexedURLTitle(i); GURL url = GURL(IndexedURL(i)); ASSERT_TRUE(AddURL(0, i, title, url) != NULL); } std::wstring title = <API key>(1); const BookmarkNode* subfolder = AddFolder(0, folder, 0, title); for (int i = 0; i < 120; ++i) { if (base::RandDouble() > 0.15) { std::wstring title = IndexedURLTitle(i); GURL url = GURL(IndexedURL(i)); ASSERT_TRUE(AddURL(0, subfolder, i, title, url) != NULL); } else { std::wstring title = <API key>(i); ASSERT_TRUE(AddFolder(0, subfolder, i, title) != NULL); } } ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); std::wstring new_title = <API key>(2); SetTitle(0, subfolder, new_title); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); } // Test Scribe ID - 371828. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); GURL url = GURL(IndexedURL(1)); std::wstring title = IndexedURLTitle(1); const BookmarkNode* bookmark = AddURL(0, title, url); ASSERT_TRUE(bookmark != NULL); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); GURL new_url = GURL(IndexedURL(2)); std::wstring new_title = IndexedURLTitle(2); bookmark = SetURL(0, bookmark, new_url); ASSERT_TRUE(bookmark != NULL); SetTitle(0, bookmark, new_title); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); } // Test Scribe ID - 371832. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); ASSERT_TRUE(AddURL( 0, kGenericURLTitle, GURL(kGenericURL)) != NULL); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); Remove(0, GetBookmarkBarNode(0), 0); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); } // Test Scribe ID - 371833. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); for (int i = 0; i < 20; ++i) { std::wstring title = IndexedURLTitle(i); GURL url = GURL(IndexedURL(i)); ASSERT_TRUE(AddURL(0, i, title, url) != NULL); } ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); Remove(0, GetBookmarkBarNode(0), 0); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); } // Test Scribe ID - 371835. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); const BookmarkNode* folder = AddFolder(0, kGenericFolderName); ASSERT_TRUE(folder != NULL); for (int i = 0; i < 10; ++i) { std::wstring title = IndexedURLTitle(i); GURL url = GURL(IndexedURL(i)); ASSERT_TRUE(AddURL(0, folder, i, title, url) != NULL); } ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); Remove(0, folder, 0); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); } // Test Scribe ID - 371836. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); const BookmarkNode* folder = AddFolder(0, kGenericFolderName); ASSERT_TRUE(folder != NULL); for (int i = 0; i < 10; ++i) { std::wstring title = IndexedURLTitle(i); GURL url = GURL(IndexedURL(i)); ASSERT_TRUE(AddURL(0, folder, i, title, url) != NULL); } ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); Remove(0, folder, folder->child_count() - 1); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); } // Test Scribe ID - 371856. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); const BookmarkNode* folder = AddFolder(0, kGenericFolderName); ASSERT_TRUE(folder != NULL); for (int i = 0; i < 10; ++i) { std::wstring title = IndexedURLTitle(i); GURL url = GURL(IndexedURL(i)); ASSERT_TRUE(AddURL(0, folder, i, title, url) != NULL); } ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); Remove(0, folder, 4); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); } // Test Scribe ID - 371857. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); const BookmarkNode* folder = AddFolder(0, kGenericFolderName); ASSERT_TRUE(folder != NULL); for (int i = 0; i < 10; ++i) { std::wstring title = IndexedURLTitle(i); GURL url = GURL(IndexedURL(i)); ASSERT_TRUE(AddURL(0, folder, i, title, url) != NULL); } ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); int child_count = folder->child_count(); for (int i = 0; i < child_count; ++i) { Remove(0, folder, 0); } ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); } // Test Scribe ID - 371858. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); ASSERT_TRUE(AddFolder(0, kGenericFolderName) != NULL); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); Remove(0, GetBookmarkBarNode(0), 0); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); } // Test Scribe ID - 371869. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); ASSERT_TRUE(AddFolder(0, kGenericFolderName) != NULL); for (int i = 1; i < 15; ++i) { if (base::RandDouble() > 0.6) { std::wstring title = IndexedURLTitle(i); GURL url = GURL(IndexedURL(i)); ASSERT_TRUE(AddURL(0, i, title, url) != NULL); } else { std::wstring title = IndexedFolderName(i); ASSERT_TRUE(AddFolder(0, i, title) != NULL); } } ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); Remove(0, GetBookmarkBarNode(0), 0); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); } // Test Scribe ID - 371879. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); ASSERT_TRUE(AddURL(0, kGenericURLTitle, GURL(kGenericURL)) != NULL); const BookmarkNode* folder = AddFolder(0, 1, kGenericFolderName); ASSERT_TRUE(folder != NULL); for (int i = 2; i < 10; ++i) { if (base::RandDouble() > 0.6) { std::wstring title = IndexedURLTitle(i); GURL url = GURL(IndexedURL(i)); ASSERT_TRUE(AddURL(0, i, title, url) != NULL); } else { std::wstring title = IndexedFolderName(i); ASSERT_TRUE(AddFolder(0, i, title) != NULL); } } for (int i = 0; i < 15; ++i) { std::wstring title = IndexedURLTitle(i); GURL url = GURL(IndexedURL(i)); ASSERT_TRUE(AddURL(0, folder, i, title, url) != NULL); } ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); Remove(0, GetBookmarkBarNode(0), 1); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); } // Test Scribe ID - 371880. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); ASSERT_TRUE(AddURL(0, kGenericURLTitle, GURL(kGenericURL)) != NULL); const BookmarkNode* folder = AddFolder(0, 1, kGenericFolderName); ASSERT_TRUE(folder != NULL); for (int i = 2; i < 10; ++i) { if (base::RandDouble() > 0.6) { std::wstring title = IndexedURLTitle(i); GURL url = GURL(IndexedURL(i)); ASSERT_TRUE(AddURL(0, i, title, url) != NULL); } else { std::wstring title = IndexedFolderName(i); ASSERT_TRUE(AddFolder(0, i, title) != NULL); } } for (int i = 0; i < 10; ++i) { if (base::RandDouble() > 0.6) { std::wstring title = IndexedURLTitle(i); GURL url = GURL(IndexedURL(i)); ASSERT_TRUE(AddURL(0, folder, i, title, url) != NULL); } else { std::wstring title = <API key>(i); const BookmarkNode* subfolder = AddFolder(0, folder, i, title); ASSERT_TRUE(subfolder != NULL); if (base::RandDouble() > 0.3) { for (int j = 0; j < 10; ++j) { if (base::RandDouble() > 0.6) { std::wstring title = IndexedURLTitle(j); GURL url = GURL(IndexedURL(j)); ASSERT_TRUE(AddURL( 0, subfolder, j, title, url) != NULL); } else { std::wstring title = <API key>(j); ASSERT_TRUE(AddFolder( 0, subfolder, j, title) != NULL); } } } } } ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); Remove(0, GetBookmarkBarNode(0), 1); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); } // Test Scribe ID - 371882. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); const BookmarkNode* folder = AddFolder(0, kGenericFolderName); ASSERT_TRUE(folder != NULL); for (int i = 1; i < 11; ++i) { std::wstring title = IndexedURLTitle(i); GURL url = GURL(IndexedURL(i)); ASSERT_TRUE(AddURL(0, i, title, url) != NULL); } const BookmarkNode* subfolder = AddFolder(0, folder, 0, <API key>); ASSERT_TRUE(subfolder != NULL); for (int i = 0; i < 30; ++i) { if (base::RandDouble() > 0.2) { std::wstring title = IndexedURLTitle(i); GURL url = GURL(IndexedURL(i)); ASSERT_TRUE(AddURL(0, subfolder, i, title, url) != NULL); } else { std::wstring title = <API key>(i); ASSERT_TRUE(AddFolder(0, subfolder, i, title) != NULL); } } ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); Remove(0, folder, 0); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); } // Test Scribe ID - 371931. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); GURL url0 = GURL(IndexedURL(0)); GURL url1 = GURL(IndexedURL(1)); std::wstring title0 = IndexedURLTitle(0); std::wstring title1 = IndexedURLTitle(1); const BookmarkNode* bookmark0 = AddURL(0, 0, title0, url0); const BookmarkNode* bookmark1 = AddURL(0, 1, title1, url1); ASSERT_TRUE(bookmark0 != NULL); ASSERT_TRUE(bookmark1 != NULL); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); Move(0, bookmark0, GetBookmarkBarNode(0), 2); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); } // Test Scribe ID - 371933. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); for (int i = 0; i < 10; ++i) { std::wstring title = IndexedURLTitle(i); GURL url = GURL(IndexedURL(i)); ASSERT_TRUE(AddURL(0, i, title, url) != NULL); } ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); ReverseChildOrder(0, GetBookmarkBarNode(0)); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); } // Test Scribe ID - 371954. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); ASSERT_TRUE(AddURL(0, kGenericURLTitle, GURL(kGenericURL)) != NULL); const BookmarkNode* folder = AddFolder(0, 1, kGenericFolderName); ASSERT_TRUE(folder != NULL); for (int i = 2; i < 10; ++i) { std::wstring title = IndexedURLTitle(i); GURL url = GURL(IndexedURL(i)); ASSERT_TRUE(AddURL(0, i, title, url) != NULL); } ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); int <API key> = GetBookmarkBarNode(0)->child_count() - 2; for (int i = 0; i < <API key>; ++i) { Move( 0, GetBookmarkBarNode(0)->GetChild(2), folder, i); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); } } // Test Scribe ID - 371957. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); ASSERT_TRUE(AddURL(0, kGenericURLTitle, GURL(kGenericURL)) != NULL); const BookmarkNode* folder = AddFolder(0, 1, kGenericFolderName); ASSERT_TRUE(folder != NULL); for (int i = 0; i < 10; ++i) { std::wstring title = IndexedURLTitle(i); GURL url = GURL(IndexedURL(i)); ASSERT_TRUE(AddURL(0, folder, i, title, url) != NULL); } ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); int <API key> = folder->child_count() - 2; for (int i = 0; i < <API key>; ++i) { Move(0, folder->GetChild(0), GetBookmarkBarNode(0), i); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); } } // Test Scribe ID - 371961. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); const BookmarkNode* folder = AddFolder(0, kGenericFolderName); ASSERT_TRUE(folder != NULL); for (int i = 0; i < 3; ++i) { std::wstring title = IndexedURLTitle(i); GURL url = GURL(IndexedURL(i)); ASSERT_TRUE(AddURL(0, folder, i, title, url) != NULL); } const BookmarkNode* subfolder = AddFolder(0, folder, 3, <API key>); ASSERT_TRUE(subfolder != NULL); for (int i = 0; i < 10; ++i) { std::wstring title = IndexedURLTitle(i + 3); GURL url = GURL(IndexedURL(i + 3)); ASSERT_TRUE(AddURL(0, subfolder, i, title, url) != NULL); } ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); for (int i = 0; i < 3; ++i) { GURL url = GURL(IndexedURL(i)); Move(0, GetUniqueNodeByURL(0, url), subfolder, i + 10); } ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); } // Test Scribe ID - 371964. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); const BookmarkNode* folder = AddFolder(0, kGenericFolderName); ASSERT_TRUE(folder != NULL); for (int i = 0; i < 3; ++i) { std::wstring title = IndexedURLTitle(i); GURL url = GURL(IndexedURL(i)); ASSERT_TRUE(AddURL(0, folder, i, title, url) != NULL); } const BookmarkNode* subfolder = AddFolder(0, folder, 3, <API key>); ASSERT_TRUE(subfolder != NULL); for (int i = 0; i < 5; ++i) { std::wstring title = IndexedURLTitle(i + 3); GURL url = GURL(IndexedURL(i + 3)); ASSERT_TRUE(AddURL(0, subfolder, i, title, url) != NULL); } ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); for (int i = 0; i < 3; ++i) { GURL url = GURL(IndexedURL(i + 3)); Move(0, GetUniqueNodeByURL(0, url), folder, i + 4); } ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); } // Test Scribe ID - 371967. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); const BookmarkNode* folder = GetBookmarkBarNode(0); const BookmarkNode* folder_L0 = NULL; const BookmarkNode* folder_L10 = NULL; for (int level = 0; level < 15; ++level) { int num_bookmarks = base::RandInt(0, 9); for (int i = 0; i < num_bookmarks; ++i) { std::wstring title = IndexedURLTitle(i); GURL url = GURL(IndexedURL(i)); ASSERT_TRUE(AddURL(0, folder, i, title, url) != NULL); } std::wstring title = IndexedFolderName(level); folder = AddFolder(0, folder, folder->child_count(), title); ASSERT_TRUE(folder != NULL); if (level == 0) folder_L0 = folder; if (level == 10) folder_L10 = folder; } for (int i = 0; i < 3; ++i) { std::wstring title = IndexedURLTitle(i + 10); GURL url = GURL(IndexedURL(i + 10)); ASSERT_TRUE(AddURL(0, folder_L10, i, title, url) != NULL); } ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); GURL url10 = GURL(IndexedURL(10)); Move(0, GetUniqueNodeByURL( 0, url10), folder_L0, folder_L0->child_count()); GURL url11 = GURL(IndexedURL(11)); Move(0, GetUniqueNodeByURL(0, url11), folder_L0, 0); GURL url12 = GURL(IndexedURL(12)); Move(0, GetUniqueNodeByURL(0, url12), folder_L0, 1); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); } // Test Scribe ID - 371968. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); const BookmarkNode* folder = GetBookmarkBarNode(0); const BookmarkNode* folder_L0 = NULL; const BookmarkNode* folder_L10 = NULL; for (int level = 0; level < 15; ++level) { int num_bookmarks = base::RandInt(0, 9); for (int i = 0; i < num_bookmarks; ++i) { std::wstring title = IndexedURLTitle(i); GURL url = GURL(IndexedURL(i)); ASSERT_TRUE(AddURL(0, folder, i, title, url) != NULL); } std::wstring title = IndexedFolderName(level); folder = AddFolder(0, folder, folder->child_count(), title); ASSERT_TRUE(folder != NULL); if (level == 0) folder_L0 = folder; if (level == 10) folder_L10 = folder; } for (int i = 0; i < 3; ++i) { std::wstring title = IndexedURLTitle(i + 10); GURL url = GURL(IndexedURL(i + 10)); ASSERT_TRUE(AddURL(0, folder_L0, 0, title, url) != NULL); } ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); GURL url10 = GURL(IndexedURL(10)); Move(0, GetUniqueNodeByURL(0, url10), folder_L10, folder_L10->child_count()); GURL url11 = GURL(IndexedURL(11)); Move(0, GetUniqueNodeByURL(0, url11), folder_L10, 0); GURL url12 = GURL(IndexedURL(12)); Move(0, GetUniqueNodeByURL(0, url12), folder_L10, 1); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); } // Test Scribe ID - 371980. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); const BookmarkNode* folder = GetBookmarkBarNode(0); const BookmarkNode* folder_L5 = NULL; for (int level = 0; level < 15; ++level) { int num_bookmarks = base::RandInt(0, 9); for (int i = 0; i < num_bookmarks; ++i) { std::wstring title = IndexedURLTitle(i); GURL url = GURL(IndexedURL(i)); ASSERT_TRUE(AddURL(0, folder, i, title, url) != NULL); } std::wstring title = IndexedFolderName(level); folder = AddFolder( 0, folder, folder->child_count(), title); ASSERT_TRUE(folder != NULL); if (level == 5) folder_L5 = folder; } folder = AddFolder( 0, GetBookmarkBarNode(0)->child_count(), kGenericFolderName); ASSERT_TRUE(folder != NULL); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); Move(0, folder, folder_L5, folder_L5->child_count()); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); } // Test Scribe ID - 371997. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); const BookmarkNode* folder = GetBookmarkBarNode(0); const BookmarkNode* folder_L5 = NULL; for (int level = 0; level < 6; ++level) { int num_bookmarks = base::RandInt(0, 9); for (int i = 0; i < num_bookmarks; ++i) { std::wstring title = IndexedURLTitle(i); GURL url = GURL(IndexedURL(i)); ASSERT_TRUE(AddURL(0, folder, i, title, url) != NULL); } std::wstring title = IndexedFolderName(level); folder = AddFolder(0, folder, folder->child_count(), title); ASSERT_TRUE(folder != NULL); if (level == 5) folder_L5 = folder; } folder = AddFolder( 0, GetBookmarkBarNode(0)->child_count(), kGenericFolderName); ASSERT_TRUE(folder != NULL); for (int i = 0; i < 10; ++i) { std::wstring title = IndexedURLTitle(i); GURL url = GURL(IndexedURL(i)); ASSERT_TRUE(AddURL(0, folder, i, title, url) != NULL); } ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); Move(0, folder, folder_L5, folder_L5->child_count()); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); } // Test Scribe ID - 372006. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); const BookmarkNode* folder = GetBookmarkBarNode(0); const BookmarkNode* folder_L5 = NULL; for (int level = 0; level < 6; ++level) { int num_bookmarks = base::RandInt(0, 9); for (int i = 0; i < num_bookmarks; ++i) { std::wstring title = IndexedURLTitle(i); GURL url = GURL(IndexedURL(i)); ASSERT_TRUE(AddURL(0, folder, i, title, url) != NULL); } std::wstring title = IndexedFolderName(level); folder = AddFolder( 0, folder, folder->child_count(), title); ASSERT_TRUE(folder != NULL); if (level == 5) folder_L5 = folder; } folder = AddFolder( 0, folder_L5, folder_L5->child_count(), kGenericFolderName); ASSERT_TRUE(folder != NULL); for (int i = 0; i < 10; ++i) { std::wstring title = IndexedURLTitle(i); GURL url = GURL(IndexedURL(i)); ASSERT_TRUE(AddURL(0, folder, i, title, url) != NULL); } ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); Move(0, folder, GetBookmarkBarNode(0), GetBookmarkBarNode(0)->child_count()); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); } // Test Scribe ID - 372026. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); for (int i = 0; i < 2; ++i) { std::wstring title = IndexedFolderName(i); const BookmarkNode* folder = AddFolder(0, i, title); ASSERT_TRUE(folder != NULL); for (int j = 0; j < 10; ++j) { std::wstring title = IndexedURLTitle(j); GURL url = GURL(IndexedURL(j)); ASSERT_TRUE(AddURL(0, folder, j, title, url) != NULL); } } ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); ReverseChildOrder(0, GetBookmarkBarNode(0)); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); } // Test Scribe ID - 372028. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); for (int i = 0; i < 10; ++i) { std::wstring title = IndexedFolderName(i); const BookmarkNode* folder = AddFolder(0, i, title); ASSERT_TRUE(folder != NULL); for (int j = 0; j < 10; ++j) { std::wstring title = IndexedURLTitle(1000 * i + j); GURL url = GURL(IndexedURL(j)); ASSERT_TRUE(AddURL(0, folder, j, title, url) != NULL); } } ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); ReverseChildOrder(0, GetBookmarkBarNode(0)); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); } // Test Scribe ID - 373379. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); DisableVerifier(); for (int i = 0; i < 2; ++i) { std::wstring title0 = IndexedURLTitle(2*i); GURL url0 = GURL(IndexedURL(2*i)); ASSERT_TRUE(AddURL(0, title0, url0) != NULL); std::wstring title1 = IndexedURLTitle(2*i+1); GURL url1 = GURL(IndexedURL(2*i+1)); ASSERT_TRUE(AddURL(1, title1, url1) != NULL); } ASSERT_TRUE(AwaitQuiescence()); ASSERT_TRUE(AllModelsMatch()); ASSERT_FALSE(<API key>(0)); } // Test Scribe ID - 373503. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); // Note: When a racy commit is done with identical bookmarks, it is possible DisableVerifier(); for (int i = 0; i < 2; ++i) { std::wstring title = IndexedURLTitle(i); GURL url = GURL(IndexedURL(i)); ASSERT_TRUE(AddURL(0, title, url) != NULL); ASSERT_TRUE(AddURL(1, title, url) != NULL); } ASSERT_TRUE(AwaitQuiescence()); ASSERT_TRUE(AllModelsMatch()); } // Test Scribe ID - 373506. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(AwaitQuiescence()); ASSERT_TRUE(<API key>()); } // Test Scribe ID - 373505. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupClients()) << "SetupClients() failed."; DisableVerifier(); const BookmarkNode* folder0 = AddFolder(0, L"Folder"); ASSERT_TRUE(folder0 != NULL); ASSERT_TRUE(AddURL(0, folder0, 0, L"Bookmark 0", GURL(kGenericURL)) != NULL); ASSERT_TRUE(AddURL(0, folder0, 1, L"Bookmark 1", GURL(kGenericURL)) != NULL); ASSERT_TRUE(AddURL(0, folder0, 2, L"Bookmark 2", GURL(kGenericURL)) != NULL); const BookmarkNode* folder1 = AddFolder(1, L"fOlDeR"); ASSERT_TRUE(folder1 != NULL); ASSERT_TRUE(AddURL(1, folder1, 0, L"bOoKmArK 0", GURL(kGenericURL)) != NULL); ASSERT_TRUE(AddURL(1, folder1, 1, L"BooKMarK 1", GURL(kGenericURL)) != NULL); ASSERT_TRUE(AddURL(1, folder1, 2, L"bOOKMARK 2", GURL(kGenericURL)) != NULL); ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(AwaitQuiescence()); ASSERT_TRUE(AllModelsMatch()); ASSERT_FALSE(<API key>(0)); } // Test Scribe ID - 373508. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupClients()) << "SetupClients() failed."; DisableVerifier(); for (int i = 0; i < 3; ++i) { std::wstring title = IndexedURLTitle(i); GURL url = GURL(IndexedURL(i)); ASSERT_TRUE(AddURL(0, i, title, url) != NULL); ASSERT_TRUE(AddURL(1, i, title, url) != NULL); } for (int i = 3; i < 10; ++i) { std::wstring title0 = IndexedURLTitle(i); GURL url0 = GURL(IndexedURL(i)); ASSERT_TRUE(AddURL(0, i, title0, url0) != NULL); std::wstring title1 = IndexedURLTitle(i+7); GURL url1 = GURL(IndexedURL(i+7)); ASSERT_TRUE(AddURL(1, i, title1, url1) != NULL); } ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(AwaitQuiescence()); ASSERT_TRUE(AllModelsMatch()); ASSERT_FALSE(<API key>(0)); } // Test Scribe ID - 386586. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupClients()) << "SetupClients() failed."; DisableVerifier(); for (int i = 0; i < 3; ++i) { std::wstring title = IndexedURLTitle(i); GURL url = GURL(IndexedURL(i)); ASSERT_TRUE(AddURL(0, i, title, url) != NULL); ASSERT_TRUE(AddURL(1, i, title, url) != NULL); } for (int i = 3; i < 10; ++i) { std::wstring title = IndexedURLTitle(i); GURL url = GURL(IndexedURL(i)); ASSERT_TRUE(AddURL(1, i, title, url) != NULL); } ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(AwaitQuiescence()); ASSERT_TRUE(AllModelsMatch()); ASSERT_FALSE(<API key>(0)); } // Test Scribe ID - 386589. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupClients()) << "SetupClients() failed."; DisableVerifier(); for (int i = 0; i < 3; ++i) { std::wstring title = IndexedURLTitle(i); GURL url = GURL(IndexedURL(i)); ASSERT_TRUE(AddURL(0, i, title, url) != NULL); ASSERT_TRUE(AddURL(1, i, title, url) != NULL); } ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(AwaitQuiescence()); ASSERT_TRUE(AllModelsMatch()); ASSERT_FALSE(<API key>(0)); } // Test Scribe ID - 373504 - Merge bookmark folders with different bookmarks. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupClients()) << "SetupClients() failed."; DisableVerifier(); const BookmarkNode* folder0 = AddFolder(0, kGenericFolderName); ASSERT_TRUE(folder0 != NULL); const BookmarkNode* folder1 = AddFolder(1, kGenericFolderName); ASSERT_TRUE(folder1 != NULL); for (int i = 0; i < 2; ++i) { std::wstring title0 = IndexedURLTitle(2*i); GURL url0 = GURL(IndexedURL(2*i)); ASSERT_TRUE(AddURL(0, folder0, i, title0, url0) != NULL); std::wstring title1 = IndexedURLTitle(2*i+1); GURL url1 = GURL(IndexedURL(2*i+1)); ASSERT_TRUE(AddURL(1, folder1, i, title1, url1) != NULL); } ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(AwaitQuiescence()); ASSERT_TRUE(AllModelsMatch()); ASSERT_FALSE(<API key>(0)); } // Test Scribe ID - 373509 - Merge moderately complex bookmark models. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupClients()) << "SetupClients() failed."; DisableVerifier(); for (int i = 0; i < 25; ++i) { std::wstring title0 = IndexedURLTitle(i); GURL url0 = GURL(IndexedURL(i)); ASSERT_TRUE(AddURL(0, i, title0, url0) != NULL); std::wstring title1 = IndexedURLTitle(i+50); GURL url1 = GURL(IndexedURL(i+50)); ASSERT_TRUE(AddURL(1, i, title1, url1) != NULL); } for (int i = 25; i < 30; ++i) { std::wstring title0 = IndexedFolderName(i); const BookmarkNode* folder0 = AddFolder(0, i, title0); ASSERT_TRUE(folder0 != NULL); std::wstring title1 = IndexedFolderName(i+50); const BookmarkNode* folder1 = AddFolder(1, i, title1); ASSERT_TRUE(folder1 != NULL); for (int j = 0; j < 5; ++j) { std::wstring title0 = IndexedURLTitle(i+5*j); GURL url0 = GURL(IndexedURL(i+5*j)); ASSERT_TRUE(AddURL(0, folder0, j, title0, url0) != NULL); std::wstring title1 = IndexedURLTitle(i+5*j+50); GURL url1 = GURL(IndexedURL(i+5*j+50)); ASSERT_TRUE(AddURL(1, folder1, j, title1, url1) != NULL); } } for (int i = 100; i < 125; ++i) { std::wstring title = IndexedURLTitle(i); GURL url = GURL(IndexedURL(i)); ASSERT_TRUE(AddURL(0, title, url) != NULL); ASSERT_TRUE(AddURL(1, title, url) != NULL); } ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(AwaitQuiescence()); ASSERT_TRUE(AllModelsMatch()); ASSERT_FALSE(<API key>(0)); } // TCM ID - 3675271 - Merge simple bookmark subset under bookmark folder. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupClients()) << "SetupClients() failed."; DisableVerifier(); for (int i = 0; i < 2; ++i) { const BookmarkNode* folder = AddFolder(i, kGenericFolderName); ASSERT_TRUE(folder != NULL); for (int j = 0; j < 4; ++j) { if (base::RandDouble() < 0.5) { std::wstring title = IndexedURLTitle(j); GURL url = GURL(IndexedURL(j)); ASSERT_TRUE(AddURL(i, folder, j, title, url) != NULL); } else { std::wstring title = IndexedFolderName(j); ASSERT_TRUE(AddFolder(i, folder, j, title) != NULL); } } } ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(AwaitQuiescence()); ASSERT_TRUE(AllModelsMatch()); ASSERT_FALSE(<API key>(0)); } // TCM ID - 3727284 - Merge subsets of bookmark under bookmark bar. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupClients()) << "SetupClients() failed."; DisableVerifier(); for (int i = 0; i < 4; ++i) { std::wstring title = IndexedURLTitle(i); GURL url = GURL(IndexedURL(i)); ASSERT_TRUE(AddURL(0, i, title, url) != NULL); } for (int j = 0; j < 2; ++j) { std::wstring title = IndexedURLTitle(j); GURL url = GURL(IndexedURL(j)); ASSERT_TRUE(AddURL(1, j, title, url) != NULL); } ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(AwaitQuiescence()); ASSERT_TRUE(AllModelsMatch()); ASSERT_FALSE(<API key>(0)); ASSERT_FALSE(<API key>(1)); } // TCM ID - 3659294 - Merge simple bookmark hierarchy under bookmark folder. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupClients()) << "SetupClients() failed."; DisableVerifier(); const BookmarkNode* folder0 = AddFolder(0, 0, kGenericFolderName); ASSERT_TRUE(folder0 != NULL); ASSERT_TRUE(AddURL( 0, folder0, 0, IndexedURLTitle(1), GURL(IndexedURL(1))) != NULL); ASSERT_TRUE(AddFolder(0, folder0, 1, <API key>(2)) != NULL); ASSERT_TRUE(AddURL( 0, folder0, 2, IndexedURLTitle(3), GURL(IndexedURL(3))) != NULL); ASSERT_TRUE(AddFolder(0, folder0, 3, <API key>(4)) != NULL); const BookmarkNode* folder1 = AddFolder(1, 0, kGenericFolderName); ASSERT_TRUE(folder1 != NULL); ASSERT_TRUE(AddFolder(1, folder1, 0, <API key>(0)) != NULL); ASSERT_TRUE(AddFolder(1, folder1, 1, <API key>(2)) != NULL); ASSERT_TRUE(AddURL( 1, folder1, 2, IndexedURLTitle(3), GURL(IndexedURL(3))) != NULL); ASSERT_TRUE(AddFolder(1, folder1, 3, <API key>(5)) != NULL); ASSERT_TRUE(AddURL( 1, folder1, 4, IndexedURLTitle(1), GURL(IndexedURL(1))) != NULL); ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(AwaitQuiescence()); ASSERT_TRUE(AllModelsMatch()); ASSERT_FALSE(<API key>(0)); } // TCM ID - 3711273 - Merge disjoint sets of bookmark hierarchy under bookmark // folder. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupClients()) << "SetupClients() failed."; DisableVerifier(); const BookmarkNode* folder0 = AddFolder(0, 0, kGenericFolderName); ASSERT_TRUE(folder0 != NULL); ASSERT_TRUE(AddURL( 0, folder0, 0, IndexedURLTitle(1), GURL(IndexedURL(1))) != NULL); ASSERT_TRUE(AddFolder(0, folder0, 1, <API key>(2)) != NULL); ASSERT_TRUE(AddURL( 0, folder0, 2, IndexedURLTitle(3), GURL(IndexedURL(3))) != NULL); ASSERT_TRUE(AddFolder(0, folder0, 3, <API key>(4)) != NULL); const BookmarkNode* folder1 = AddFolder(1, 0, kGenericFolderName); ASSERT_TRUE(folder1 != NULL); ASSERT_TRUE(AddFolder(1, folder1, 0, <API key>(5)) != NULL); ASSERT_TRUE(AddFolder(1, folder1, 1, <API key>(6)) != NULL); ASSERT_TRUE(AddURL( 1, folder1, 2, IndexedURLTitle(7), GURL(IndexedURL(7))) != NULL); ASSERT_TRUE(AddURL( 1, folder1, 3, IndexedURLTitle(8), GURL(IndexedURL(8))) != NULL); ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(AwaitQuiescence()); ASSERT_TRUE(AllModelsMatch()); ASSERT_FALSE(<API key>(0)); } // TCM ID - 3639296 - Merge disjoint sets of bookmark hierarchy under bookmark // bar. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupClients()) << "SetupClients() failed."; DisableVerifier(); for (int i = 0; i < 3; ++i) { std::wstring title = IndexedURLTitle(i+1); GURL url = GURL(IndexedURL(i+1)); ASSERT_TRUE(AddURL(0, i, title, url) != NULL); } for (int j = 0; j < 3; ++j) { std::wstring title = IndexedURLTitle(j+4); GURL url = GURL(IndexedURL(j+4)); ASSERT_TRUE(AddURL(0, j, title, url) != NULL); } ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(AwaitQuiescence()); ASSERT_TRUE(AllModelsMatch()); ASSERT_FALSE(<API key>(0)); } // TCM ID - 3616282 - Merge sets of duplicate bookmarks under bookmark bar. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupClients()) << "SetupClients() failed."; DisableVerifier(); // Let's add duplicate set of bookmark {1,2,2,3,3,3,4,4,4,4} to client0. int node_index = 0; for (int i = 1; i < 5 ; ++i) { for (int j = 0; j < i; ++j) { std::wstring title = IndexedURLTitle(i); GURL url = GURL(IndexedURL(i)); ASSERT_TRUE(AddURL(0, node_index, title, url) != NULL); ++node_index; } } // Let's add a set of bookmarks {1,2,3,4} to client1. for (int i = 0; i < 4; ++i) { std::wstring title = IndexedURLTitle(i+1); GURL url = GURL(IndexedURL(i+1)); ASSERT_TRUE(AddURL(1, i, title, url) != NULL); } ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(AwaitQuiescence()); ASSERT_TRUE(AllModelsMatch()); for (int i = 1; i < 5 ; ++i) { ASSERT_TRUE(<API key>(1, IndexedURLTitle(i)) == i); } } // TCM ID - 6593872. <API key>(<API key>, DisableBookmarks) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); ASSERT_TRUE(GetClient(1)-><API key>(syncer::BOOKMARKS)); ASSERT_TRUE(AddFolder(1, kGenericFolderName) != NULL); ASSERT_TRUE(AwaitQuiescence()); ASSERT_FALSE(AllModelsMatch()); ASSERT_TRUE(GetClient(1)-><API key>(syncer::BOOKMARKS)); ASSERT_TRUE(AwaitQuiescence()); ASSERT_TRUE(AllModelsMatch()); } // TCM ID - 7343544. <API key>(<API key>, DisableSync) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); ASSERT_TRUE(GetClient(1)-><API key>()); ASSERT_TRUE(AddFolder(0, IndexedFolderName(0)) != NULL); ASSERT_TRUE(<API key>(GetClient(0)->service())); ASSERT_FALSE(AllModelsMatch()); ASSERT_TRUE(AddFolder(1, IndexedFolderName(1)) != NULL); ASSERT_FALSE(AllModelsMatch()); ASSERT_TRUE(GetClient(1)-><API key>()); ASSERT_TRUE(AwaitQuiescence()); ASSERT_TRUE(AllModelsMatch()); } // TCM ID - 3662298 - Test adding duplicate folder - Both with different BMs // underneath. <API key>(<API key>, MC_DuplicateFolders) { ASSERT_TRUE(SetupClients()) << "SetupClients() failed."; DisableVerifier(); const BookmarkNode* folder0 = AddFolder(0, kGenericFolderName); ASSERT_TRUE(folder0 != NULL); const BookmarkNode* folder1 = AddFolder(1, kGenericFolderName); ASSERT_TRUE(folder1 != NULL); for (int i = 0; i < 5; ++i) { std::wstring title0 = IndexedURLTitle(i); GURL url0 = GURL(IndexedURL(i)); ASSERT_TRUE(AddURL(0, folder0, i, title0, url0) != NULL); std::wstring title1 = IndexedURLTitle(i+5); GURL url1 = GURL(IndexedURL(i+5)); ASSERT_TRUE(AddURL(1, folder1, i, title1, url1) != NULL); } ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(AwaitQuiescence()); ASSERT_TRUE(AllModelsMatch()); ASSERT_FALSE(<API key>(0)); } <API key>(<API key>, MC_DeleteBookmark) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(GetClient(1)-><API key>(syncer::BOOKMARKS)); const GURL bar_url("http://example.com/bar"); const GURL other_url("http://example.com/other"); ASSERT_TRUE(AddURL(0, GetBookmarkBarNode(0), 0, L"bar", bar_url) != NULL); ASSERT_TRUE(AddURL(0, GetOtherNode(0), 0, L"other", other_url) != NULL); ASSERT_TRUE(<API key>(GetClient(0)->service())); ASSERT_TRUE(HasNodeWithURL(0, bar_url)); ASSERT_TRUE(HasNodeWithURL(0, other_url)); ASSERT_FALSE(HasNodeWithURL(1, bar_url)); ASSERT_FALSE(HasNodeWithURL(1, other_url)); Remove(0, GetBookmarkBarNode(0), 0); ASSERT_TRUE(<API key>(GetClient(0)->service())); ASSERT_FALSE(HasNodeWithURL(0, bar_url)); ASSERT_TRUE(HasNodeWithURL(0, other_url)); ASSERT_TRUE(GetClient(1)-><API key>(syncer::BOOKMARKS)); ASSERT_TRUE(AwaitQuiescence()); ASSERT_FALSE(HasNodeWithURL(0, bar_url)); ASSERT_TRUE(HasNodeWithURL(0, other_url)); ASSERT_FALSE(HasNodeWithURL(1, bar_url)); ASSERT_TRUE(HasNodeWithURL(1, other_url)); } // TCM ID - 3719307 - Test a scenario of updating the name of the same bookmark // from two clients at the same time. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; const BookmarkNode* folder0 = AddFolder(0, kGenericFolderName); ASSERT_TRUE(folder0 != NULL); for (int i = 0; i < 3; ++i) { std::wstring title = IndexedURLTitle(i); GURL url = GURL(IndexedURL(i)); ASSERT_TRUE(AddURL(0, folder0, i, title, url) != NULL); } ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); ASSERT_FALSE(<API key>(0)); DisableVerifier(); GURL url(IndexedURL(0)); SetTitle(0, GetUniqueNodeByURL(0, url), L"Title++"); SetTitle(1, GetUniqueNodeByURL(1, url), L"Title ASSERT_TRUE(AwaitQuiescence()); ASSERT_TRUE(AllModelsMatch()); ASSERT_FALSE(<API key>(0)); } // TCM ID - 3672299 - Test a scenario of updating the URL of the same bookmark // from two clients at the same time. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; const BookmarkNode* folder0 = AddFolder(0, kGenericFolderName); ASSERT_TRUE(folder0 != NULL); for (int i = 0; i < 3; ++i) { std::wstring title = IndexedURLTitle(i); GURL url = GURL(IndexedURL(i)); ASSERT_TRUE(AddURL(0, folder0, i, title, url) != NULL); } ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); ASSERT_FALSE(<API key>(0)); DisableVerifier(); GURL url(IndexedURL(0)); ASSERT_TRUE(SetURL( 0, GetUniqueNodeByURL(0, url), GURL("http: ASSERT_TRUE(SetURL( 1, GetUniqueNodeByURL(1, url), GURL("http: ASSERT_TRUE(AwaitQuiescence()); ASSERT_TRUE(AllModelsMatch()); ASSERT_FALSE(<API key>(0)); } // TCM ID - 3699290 - Test a scenario of updating the BM Folder name from two // clients at the same time. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupClients()) << "SetupClients() failed."; DisableVerifier(); const BookmarkNode* folderA[2]; const BookmarkNode* folderB[2]; const BookmarkNode* folderC[2]; // Create empty folder A on both clients. folderA[0] = AddFolder(0, IndexedFolderName(0)); ASSERT_TRUE(folderA[0] != NULL); folderA[1] = AddFolder(1, IndexedFolderName(0)); ASSERT_TRUE(folderA[1] != NULL); // Create folder B with bookmarks on both clients. folderB[0] = AddFolder(0, IndexedFolderName(1)); ASSERT_TRUE(folderB[0] != NULL); folderB[1] = AddFolder(1, IndexedFolderName(1)); ASSERT_TRUE(folderB[1] != NULL); for (int i = 0; i < 3; ++i) { std::wstring title = IndexedURLTitle(i); GURL url = GURL(IndexedURL(i)); ASSERT_TRUE(AddURL(0, folderB[0], i, title, url) != NULL); } // Create folder C with bookmarks and subfolders on both clients. folderC[0] = AddFolder(0, IndexedFolderName(2)); ASSERT_TRUE(folderC[0] != NULL); folderC[1] = AddFolder(1, IndexedFolderName(2)); ASSERT_TRUE(folderC[1] != NULL); for (int i = 0; i < 3; ++i) { std::wstring folder_name = <API key>(i); const BookmarkNode* subfolder = AddFolder(0, folderC[0], i, folder_name); ASSERT_TRUE(subfolder != NULL); for (int j = 0; j < 3; ++j) { std::wstring title = IndexedURLTitle(j); GURL url = GURL(IndexedURL(j)); ASSERT_TRUE(AddURL(0, subfolder, j, title, url) != NULL); } } ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(AwaitQuiescence()); ASSERT_TRUE(AllModelsMatch()); ASSERT_FALSE(<API key>(0)); // Simultaneously rename folder A on both clients. SetTitle(0, folderA[0], L"Folder A++"); SetTitle(1, folderA[1], L"Folder A ASSERT_TRUE(AwaitQuiescence()); ASSERT_TRUE(AllModelsMatch()); ASSERT_FALSE(<API key>(0)); // Simultaneously rename folder B on both clients. SetTitle(0, folderB[0], L"Folder B++"); SetTitle(1, folderB[1], L"Folder B ASSERT_TRUE(AwaitQuiescence()); ASSERT_TRUE(AllModelsMatch()); ASSERT_FALSE(<API key>(0)); // Simultaneously rename folder C on both clients. SetTitle(0, folderC[0], L"Folder C++"); SetTitle(1, folderC[1], L"Folder C ASSERT_TRUE(AwaitQuiescence()); ASSERT_TRUE(AllModelsMatch()); ASSERT_FALSE(<API key>(0)); } <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); ASSERT_TRUE(EnableEncryption(0)); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>(0)); ASSERT_TRUE(<API key>(1)); ASSERT_TRUE(<API key>()); } <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); ASSERT_TRUE(EnableEncryption(0)); ASSERT_TRUE(AddURL(0, IndexedURLTitle(0), GURL(IndexedURL(0))) != NULL); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>(0)); ASSERT_TRUE(<API key>(1)); ASSERT_TRUE(<API key>()); } <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); ASSERT_TRUE(EnableEncryption(0)); ASSERT_TRUE(EnableEncryption(1)); ASSERT_TRUE(AwaitQuiescence()); ASSERT_TRUE(<API key>(0)); ASSERT_TRUE(<API key>(1)); ASSERT_TRUE(<API key>()); } <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); ASSERT_TRUE(EnableEncryption(0)); ASSERT_TRUE(AwaitQuiescence()); ASSERT_TRUE(<API key>(0)); ASSERT_TRUE(<API key>(1)); ASSERT_TRUE(AddURL(0, IndexedURLTitle(0), GURL(IndexedURL(0))) != NULL); ASSERT_TRUE(AddURL(0, IndexedURLTitle(1), GURL(IndexedURL(1))) != NULL); ASSERT_TRUE(AwaitQuiescence()); ASSERT_TRUE(<API key>()); ASSERT_TRUE(<API key>(0)); ASSERT_TRUE(<API key>(1)); } <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); ASSERT_TRUE(AddURL(0, IndexedURLTitle(0), GURL(IndexedURL(0))) != NULL); ASSERT_TRUE(EnableEncryption(0)); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>(0)); ASSERT_TRUE(<API key>(1)); ASSERT_TRUE(<API key>()); ASSERT_TRUE(AddURL(0, IndexedURLTitle(1), GURL(IndexedURL(1))) != NULL); ASSERT_TRUE(AddFolder(0, IndexedFolderName(0)) != NULL); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); } <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); // Add initial bookmarks. ASSERT_TRUE(AddURL(0, 0, IndexedURLTitle(0), GURL(IndexedURL(0))) != NULL); ASSERT_TRUE(AddURL(0, 1, IndexedURLTitle(1), GURL(IndexedURL(1))) != NULL); ASSERT_TRUE(AddURL(0, 2, IndexedURLTitle(2), GURL(IndexedURL(2))) != NULL); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>()); // Set a passphrase and enable encryption on Client 0. Client 1 will not // understand the bookmark updates. <API key>(0, kValidPassphrase, ProfileSyncService::EXPLICIT); ASSERT_TRUE(<API key>(GetClient(0)->service())); ASSERT_TRUE(EnableEncryption(0)); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(<API key>(0)); ASSERT_TRUE(<API key>(1)); ASSERT_TRUE(GetClient(1)->service()-><API key>()); // Client 1 adds bookmarks between the first two and between the second two. ASSERT_TRUE(AddURL(0, 1, IndexedURLTitle(3), GURL(IndexedURL(3))) != NULL); ASSERT_TRUE(AddURL(0, 3, IndexedURLTitle(4), GURL(IndexedURL(4))) != NULL); EXPECT_FALSE(<API key>()); EXPECT_FALSE(AllModelsMatch()); // Set the passphrase. Everything should resolve. ASSERT_TRUE(<API key>(GetClient(1)->service())); ASSERT_TRUE(<API key>(1, kValidPassphrase)); ASSERT_TRUE(<API key>(GetClient(1)->service())); ASSERT_TRUE(AwaitQuiescence()); EXPECT_TRUE(AllModelsMatch()); ASSERT_EQ(0, GetClient(1)-><API key>().<API key>()); // Ensure everything is syncing normally by appending a final bookmark. ASSERT_TRUE(AddURL(1, 5, IndexedURLTitle(5), GURL(IndexedURL(5))) != NULL); ASSERT_TRUE(GetClient(1)-><API key>(GetClient(0))); EXPECT_TRUE(AllModelsMatch()); ASSERT_EQ(0, GetClient(1)-><API key>().<API key>()); } // Deliberately racy rearranging of bookmarks to test that our conflict resolver // code results in a consistent view across machines (no matter what the final // order is). <API key>(<API key>, RacyPositionChanges) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); // Add initial bookmarks. size_t num_bookmarks = 5; for (size_t i = 0; i < num_bookmarks; ++i) { ASSERT_TRUE(AddURL(0, i, IndexedURLTitle(i), GURL(IndexedURL(i))) != NULL); } // Once we make diverging changes the verifer is helpless. ASSERT_TRUE(AwaitQuiescence()); ASSERT_TRUE(<API key>()); DisableVerifier(); // Make changes on client 0. for (size_t i = 0; i < num_bookmarks; ++i) { const BookmarkNode* node = GetUniqueNodeByURL(0, GURL(IndexedURL(i))); int rand_pos = base::RandInt(0, num_bookmarks-1); DVLOG(1) << "Moving client 0's bookmark " << i << " to position " << rand_pos; Move(0, node, node->parent(), rand_pos); } // Make changes on client 1. for (size_t i = 0; i < num_bookmarks; ++i) { const BookmarkNode* node = GetUniqueNodeByURL(1, GURL(IndexedURL(i))); int rand_pos = base::RandInt(0, num_bookmarks-1); DVLOG(1) << "Moving client 1's bookmark " << i << " to position " << rand_pos; Move(1, node, node->parent(), rand_pos); } ASSERT_TRUE(AwaitQuiescence()); ASSERT_TRUE(AllModelsMatch()); // Now make changes to client 1 first. for (size_t i = 0; i < num_bookmarks; ++i) { const BookmarkNode* node = GetUniqueNodeByURL(1, GURL(IndexedURL(i))); int rand_pos = base::RandInt(0, num_bookmarks-1); DVLOG(1) << "Moving client 1's bookmark " << i << " to position " << rand_pos; Move(1, node, node->parent(), rand_pos); } // Make changes on client 0. for (size_t i = 0; i < num_bookmarks; ++i) { const BookmarkNode* node = GetUniqueNodeByURL(0, GURL(IndexedURL(i))); int rand_pos = base::RandInt(0, num_bookmarks-1); DVLOG(1) << "Moving client 0's bookmark " << i << " to position " << rand_pos; Move(0, node, node->parent(), rand_pos); } ASSERT_TRUE(AwaitQuiescence()); ASSERT_TRUE(AllModelsMatch()); } // Trigger the server side creation of Synced Bookmarks. Ensure both clients // remain syncing afterwards. Add bookmarks to the synced bookmarks folder // and ensure both clients receive the boomkmark. <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); <API key>(); // Add a bookmark on Client 0 and ensure it syncs over. This will also trigger // both clients downloading the new Synced Bookmarks folder. ASSERT_TRUE(AddURL(0, L"Google", GURL("http: ASSERT_TRUE(AwaitQuiescence()); ASSERT_TRUE(AllModelsMatch()); // Now add a bookmark within the Synced Bookmarks folder and ensure it syncs // over. const BookmarkNode* synced_bookmarks = <API key>(0); ASSERT_TRUE(synced_bookmarks); ASSERT_TRUE(AddURL(0, synced_bookmarks, 0, L"Google2", GURL("http: ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); ASSERT_TRUE(AllModelsMatch()); } <API key>(<API key>, <API key>) { ASSERT_TRUE(SetupSync()) << "SetupSync() failed."; ASSERT_TRUE(<API key>()); // Starting state: // other_node // -> folder0 // -> tier1_a // bookmark_bar // -> empty_folder // -> folder1 const BookmarkNode* folder0 = AddFolder(0, GetOtherNode(0), 0, L"folder0"); const BookmarkNode* tier1_a = AddFolder(0, folder0, 0, L"tier1_a"); ASSERT_TRUE(AddURL(0, folder0, 1, L"News", GURL("http://news.google.com"))); ASSERT_TRUE(AddURL(0, folder0, 2, L"Yahoo", GURL("http: ASSERT_TRUE(AddURL(0, tier1_a, 0, L"Gmail", GURL("http://mail.google.com"))); ASSERT_TRUE(AddURL(0, tier1_a, 1, L"Google", GURL("http: ASSERT_TRUE( AddURL(0, GetOtherNode(0), 1, L"CNN", GURL("http: ASSERT_TRUE(AddFolder(0, GetBookmarkBarNode(0), 0, L"empty_folder")); const BookmarkNode* folder1 = AddFolder(0, GetBookmarkBarNode(0), 1, L"folder1"); ASSERT_TRUE(AddURL(0, folder1, 0, L"Yahoo", GURL("http: ASSERT_TRUE( AddURL(0, GetBookmarkBarNode(0), 2, L"Gmail", GURL("http://gmail.com"))); ASSERT_TRUE(AwaitQuiescence()); ASSERT_TRUE(AllModelsMatch()); // Remove all RemoveAll(0); ASSERT_TRUE(GetClient(0)-><API key>(GetClient(1))); // Verify other node has no children now. EXPECT_EQ(0, GetOtherNode(0)->child_count()); EXPECT_EQ(0, GetBookmarkBarNode(0)->child_count()); ASSERT_TRUE(AllModelsMatch()); }
#endregion using System; using System.Collections.Generic; using ClearCanvas.Common.Utilities; namespace SearchComponent { public interface ICodeSequenceItem { string CodeValue { get; } string CodeMeaning { get; } string <API key> { get; } } [Cloneable] public class AnatomicEntity : ICodeSequenceItem, IEquatable<AnatomicEntity> { private static List<AnatomicEntity> <API key>; public AnatomicEntity() {} protected AnatomicEntity(string codeValue, string codeMeaning, string <API key>) { _codeValue = codeValue; _codeMeaning = codeMeaning; <API key> = <API key>; } private string _codeValue; public string CodeValue { get { return _codeValue; } set { _codeValue = value; } } private string _codeMeaning; public string CodeMeaning { get { return _codeMeaning; } set { _codeMeaning = value; } } private string <API key>; public string <API key> { get { return <API key>; } set { <API key> = value; } } public static List<AnatomicEntity> <API key>() { if (<API key> == null) <API key> = <API key>(); return <API key>; } private static List<AnatomicEntity> <API key>() { List<AnatomicEntity> daList = new List<AnatomicEntity>(); daList.Add(new AnatomicEntity("77", "occipital lobe", "VASARI")); daList.Add(new AnatomicEntity("73", "frontal lobe", "VASARI")); daList.Add(new AnatomicEntity("76", "parietal lobe", "VASARI")); daList.Add(new AnatomicEntity("79", "brain stem", "VASARI")); daList.Add(new AnatomicEntity("78", "cerebellum", "VASARI")); daList.Add(new AnatomicEntity("75", "insula", "VASARI")); daList.Add(new AnatomicEntity("74", "temporal lobe", "VASARI")); return daList; } public override bool Equals(object obj) { if (obj is AnatomicEntity) return Equals((AnatomicEntity)obj); return base.Equals(obj); } #region IEquatable<AnatomicEntity> Members public bool Equals(AnatomicEntity other) { if (other == null) return false; return other.CodeValue == CodeValue; } #endregion public override string ToString() { return string.Format("[{0}], [{1}], [{2}]", this.CodeValue ?? "", this.CodeMeaning ?? "", this.<API key> ?? ""); } } [Cloneable] public class <API key> : ICodeSequenceItem, IEquatable<<API key>> { private static List<AnatomicEntity> <API key>; public <API key>() { } protected <API key>(string codeValue, string codeMeaning, string <API key>) { _codeValue = codeValue; _codeMeaning = codeMeaning; <API key> = <API key>; } private string _codeValue; public string CodeValue { get { return _codeValue; } set { _codeValue = value; } } private string _codeMeaning; public string CodeMeaning { get { return _codeMeaning; } set { _codeMeaning = value; } } private string <API key>; public string <API key> { get { return <API key>; } set { <API key> = value; } } public override bool Equals(object obj) { if (obj is <API key>) return Equals((<API key>)obj); return base.Equals(obj); } #region IEquatable<AnatomicEntity> Members public bool Equals(<API key> other) { if (other == null) return false; return other.CodeValue == CodeValue; } #endregion public override string ToString() { return string.Format("[{0}], [{1}], [{2}]", this.CodeValue ?? "", this.CodeMeaning ?? "", this.<API key> ?? ""); } } [Cloneable] public class ImagingObservation : ICodeSequenceItem, IEquatable<ImagingObservation> { private static List<ImagingObservation> <API key>; public ImagingObservation() { } protected ImagingObservation(string codeValue, string codeMeaning, string <API key>) { _codeValue = codeValue; _codeMeaning = codeMeaning; <API key> = <API key>; } private string _codeValue; public string CodeValue { get { return _codeValue; } set { _codeValue = value; } } private string _codeMeaning; public string CodeMeaning { get { return _codeMeaning; } set { _codeMeaning = value; } } private string <API key>; public string <API key> { get { return <API key>; } set { <API key> = value; } } public override bool Equals(object obj) { if (obj is ImagingObservation) return Equals((ImagingObservation)obj); return base.Equals(obj); } #region IEquatable<ImagingObservation> Members public bool Equals(ImagingObservation other) { if (other == null) return false; return other.CodeValue == CodeValue; } #endregion public override string ToString() { return string.Format("[{0}], [{1}], [{2}]", this.CodeValue ?? "", this.CodeMeaning ?? "", this.<API key> ?? ""); } public static List<ImagingObservation> <API key>() { if (<API key> == null) <API key> = <API key>(); return <API key>; } private static List<ImagingObservation> <API key>() { List<ImagingObservation> daList = new List<ImagingObservation>(); daList.Add(new ImagingObservation("94", "ependymal invasion absent", "VASARI")); daList.Add(new ImagingObservation("93", "ependymal invasion present", "VASARI")); daList.Add(new ImagingObservation("71", "smooth non-enhancing margin", "VASARI")); daList.Add(new ImagingObservation("70", "non-enhancing margin not applicable", "VASARI")); daList.Add(new ImagingObservation("72", "irregular non-enhancing margin", "VASARI")); daList.Add(new ImagingObservation("92", "infiltrative T1/FLAIR ratio", "VASARI")); daList.Add(new ImagingObservation("90", "expansive T1/FLAIR ratio", "VASARI")); daList.Add(new ImagingObservation("95", "T1/FLAIR ratio not applicable", "VASARI")); daList.Add(new ImagingObservation("91", "mixed T1/FLAIR ratio", "VASARI")); daList.Add(new ImagingObservation("59", "greater than 95% enhancement", "VASARI")); daList.Add(new ImagingObservation("57", "34-67% enhancement", "VASARI")); daList.Add(new ImagingObservation("56", "6-33% enhancement", "VASARI")); daList.Add(new ImagingObservation("58", "68-95% enhancement", "VASARI")); daList.Add(new ImagingObservation("55", "less than 5% enhancement", "VASARI")); daList.Add(new ImagingObservation("54", "no enhancement", "VASARI")); daList.Add(new ImagingObservation("53", "enhancement indeterminate", "VASARI")); daList.Add(new ImagingObservation("60", "100% enhancement", "VASARI")); daList.Add(new ImagingObservation("52", "proportion of enhancement not applicable", "VASARI")); daList.Add(new ImagingObservation("64", "center epicenter", "VASARI")); daList.Add(new ImagingObservation("66", "left epicenter", "VASARI")); daList.Add(new ImagingObservation("65", "right epicenter", "VASARI")); daList.Add(new ImagingObservation("61", "ncet tumor does cross midline", "VASARI")); daList.Add(new ImagingObservation("63", "ncet tumor crosses midline not applicable", "VASARI")); daList.Add(new ImagingObservation("62", "ncet tumor does not cross midline", "VASARI")); daList.Add(new ImagingObservation("3", "enhancing tumor crosses midline not applicable", "VASARI")); daList.Add(new ImagingObservation("1", "enhancing tumor does cross midline", "VASARI")); daList.Add(new ImagingObservation("2", "enhancing tumor does not cross midline", "VASARI")); daList.Add(new ImagingObservation("34", "100% edema", "VASARI")); daList.Add(new ImagingObservation("32", "68-95% edema", "VASARI")); daList.Add(new ImagingObservation("28", "no edema", "VASARI")); daList.Add(new ImagingObservation("33", "greater than 95% edema", "VASARI")); daList.Add(new ImagingObservation("31", "34-67% edema", "VASARI")); daList.Add(new ImagingObservation("29", "less than 5% edema", "VASARI")); daList.Add(new ImagingObservation("27", "edema indeterminate", "VASARI")); daList.Add(new ImagingObservation("30", "6-33% edema", "VASARI")); daList.Add(new ImagingObservation("49", "68-95% necrosis", "VASARI")); daList.Add(new ImagingObservation("48", "34-67% necrosis", "VASARI")); daList.Add(new ImagingObservation("43", "proportion of necrosis not applicable", "VASARI")); daList.Add(new ImagingObservation("46", "less than 5% necrosis", "VASARI")); daList.Add(new ImagingObservation("45", "no necrosis", "VASARI")); daList.Add(new ImagingObservation("44", "necrosis indeterminate", "VASARI")); daList.Add(new ImagingObservation("51", "100% necrosis", "VASARI")); daList.Add(new ImagingObservation("50", "greater than 95% necrosis", "VASARI")); daList.Add(new ImagingObservation("47", "6-33% necrosis", "VASARI")); daList.Add(new ImagingObservation("19", "no ncet", "VASARI")); daList.Add(new ImagingObservation("23", "68-95% ncet", "VASARI")); daList.Add(new ImagingObservation("24", "greater than 95% ncet", "VASARI")); daList.Add(new ImagingObservation("17", "proportion of ncet not applicable", "VASARI")); daList.Add(new ImagingObservation("20", "less than 5% ncet", "VASARI")); daList.Add(new ImagingObservation("25", "100% ncet", "VASARI")); daList.Add(new ImagingObservation("18", "ncet indeterminate", "VASARI")); daList.Add(new ImagingObservation("22", "34-67% ncet", "VASARI")); daList.Add(new ImagingObservation("21", "6-33% ncet", "VASARI")); daList.Add(new ImagingObservation("10", "poorly-defined enhancing margin", "VASARI")); daList.Add(new ImagingObservation("12", "enhancing margin definition not applicable", "VASARI")); daList.Add(new ImagingObservation("11", "well-defined enhancing margin", "VASARI")); daList.Add(new ImagingObservation("80", "no enhancing margin", "VASARI")); daList.Add(new ImagingObservation("81", "thin enhancing margin", "VASARI")); daList.Add(new ImagingObservation("82", "thick enhancing margin", "VASARI")); daList.Add(new ImagingObservation("15", "multicentric", "VASARI")); daList.Add(new ImagingObservation("16", "multifocal", "VASARI")); daList.Add(new ImagingObservation("14", "gliomatosis", "VASARI")); daList.Add(new ImagingObservation("13", "morphology region not applicable", "VASARI")); daList.Add(new ImagingObservation("69", "marked/avid enhancement", "VASARI")); daList.Add(new ImagingObservation("68", "mild/minimal enhancement", "VASARI")); daList.Add(new ImagingObservation("67", "no enhancement", "VASARI")); daList.Add(new ImagingObservation("6", "deep wm invasion present", "VASARI")); daList.Add(new ImagingObservation("7", "deep wm invasion absent", "VASARI")); daList.Add(new ImagingObservation("84", "tumor", "VASARI")); daList.Add(new ImagingObservation("87", "speech receptive center involvement", "VASARI")); daList.Add(new ImagingObservation("89", "vision center involvement", "VASARI")); daList.Add(new ImagingObservation("88", "motor center involvement", "VASARI")); daList.Add(new ImagingObservation("86", "speech motor center involvement", "VASARI")); daList.Add(new ImagingObservation("85", "no eloquent brain involvement", "VASARI")); return daList; } } [Cloneable] public class <API key> : ICodeSequenceItem, IEquatable<<API key>> { private static List<<API key>> <API key>; public <API key>() { } protected <API key>(string codeValue, string codeMeaning, string <API key>) { _codeValue = codeValue; _codeMeaning = codeMeaning; <API key> = <API key>; } private string _codeValue; public string CodeValue { get { return _codeValue; } set { _codeValue = value; } } private string _codeMeaning; public string CodeMeaning { get { return _codeMeaning; } set { _codeMeaning = value; } } private string <API key>; public string <API key> { get { return <API key>; } set { <API key> = value; } } private string _comment; public string Comment { get { return _comment; } set { _comment = value; } } public override bool Equals(object obj) { if (obj is ImagingObservation) return Equals((ImagingObservation)obj); return base.Equals(obj); } #region IEquatable<ImagingObservation> Members public bool Equals(<API key> other) { if (other == null) return false; return other.CodeValue == CodeValue; } #endregion public override string ToString() { return string.Format("[{0}], [{1}], [{2}]", this.CodeValue ?? "", this.CodeMeaning ?? "", this.<API key> ?? ""); } } [Cloneable] public class TrialDataProvenance { public TrialDataProvenance() {} private string project; public string Project { get { return project; } set { project = value; } } } [Cloneable] public class <API key> { public <API key>() { } private string protocolId; public string ProtocolId { get { return protocolId; } set { protocolId = value; } } private string protocolName; public string ProtocolName { get { return protocolName; } set { protocolName = value; } } } [Cloneable] public class ClinicalTrialSite { public ClinicalTrialSite() {} private string siteId; public string SiteId { get { return siteId; } set { siteId = value; } } private string siteName; public string SiteName { get { return siteName; } set { siteName = value; } } } [Cloneable] public class Patient { public Patient() { } private DateTime? patientBirthDate; public DateTime? PatientBirthDate { get { return patientBirthDate; } set { patientBirthDate = value; } } private string patientId; public string PatientId { get { return patientId; } set { patientId = value; } } private string patientsName; public string PatientsName { get { return patientsName; } set { patientsName = value; } } private string patientsSex; public string PatientsSex { get { return patientsSex; } set { patientsSex = value; } } } [Cloneable] public class User { public User() { } private int caGridId; public int CaGridId { get { return caGridId; } set { caGridId = value; } } private string name; public string Name { get { return name; } set { name = value; } } private string loginName; public string LoginName { get { return loginName; } set { loginName = value; } } private string roleInTrial; public string RoleInTrial { get { return roleInTrial; } set { roleInTrial = value; } } private int <API key>; public int <API key> { get { return <API key>; } set { <API key> = value; } } } [Cloneable] public class Study { public Study() { } private string studyInstanceUid; public string StudyInstanceUid { get { return studyInstanceUid; } set { studyInstanceUid = value; } } } [Cloneable] public class Series { public Series() { } private string seriesInstanceUid; public string SeriesInstanceUid { get { return seriesInstanceUid; } set { seriesInstanceUid = value; } } private string modality; public string Modality { get { return modality; } set { modality = value; } } } [Cloneable] public class Image { public Image() { } private string sopInstanceUid; public string SopInstanceUid { get { return sopInstanceUid; } set { sopInstanceUid = value; } } private string sliceThickness; public string SliceThickness { get { return sliceThickness; } set { sliceThickness = value; } } } }
#!/usr/bin/perl -w # bin2png.pl # includes use File::Basename; # main if ( @ARGV == 0 ) { print STDERR "usage: bin2png <file.bin>+\n"; } else { while ( @ARGV ) { my $file = shift @ARGV; process_file($file); } } # process_file sub process_file { my $file = shift; my $dest = $file . ".png"; open INPUT, $file || die "Could not open $file: $!"; open OUTPUT, ">$dest" || die "Could not create $dest: $!"; while ( <INPUT> ) { chomp; s/^([0-9]+).*$/$1/g; print OUTPUT chr($_); } close OUTPUT; close INPUT; }
<?php // autoload_real.php @generated by Composer class <API key> { private static $loader; public static function loadClassLoader($class) { if ('Composer\Autoload\ClassLoader' === $class) { require __DIR__ . '/ClassLoader.php'; } } public static function getLoader() { if (null !== self::$loader) { return self::$loader; } <API key>(array('<API key>', 'loadClassLoader'), true, true); self::$loader = $loader = new \Composer\Autoload\ClassLoader(); <API key>(array('<API key>', 'loadClassLoader')); $map = require __DIR__ . '/autoload_namespaces.php'; foreach ($map as $namespace => $path) { $loader->set($namespace, $path); } $map = require __DIR__ . '/autoload_psr4.php'; foreach ($map as $namespace => $path) { $loader->setPsr4($namespace, $path); } $classMap = require __DIR__ . '/autoload_classmap.php'; if ($classMap) { $loader->addClassMap($classMap); } $loader->register(true); $includeFiles = require __DIR__ . '/autoload_files.php'; foreach ($includeFiles as $fileIdentifier => $file) { <API key>($fileIdentifier, $file); } return $loader; } } function <API key>($fileIdentifier, $file) { if (empty($GLOBALS['<API key>'][$fileIdentifier])) { require $file; $GLOBALS['<API key>'][$fileIdentifier] = true; } }
<?php include_once 'genericdumper.php'; /** * mssql dumper * **/ class MSSQLDumper extends GenericDumper { /** * Override the construction method of parent **/ function __construct($objDB, $strDBName) { parent::__construct($objDB, $strDBName); $this->AddEscapeTables(array('dtproperties')); } /** * Dump names of all the tables in the given database, except those to be escaped * * @return array One-dimensional array, holding table names dumped from the database **/ function DumpTableNames() { $arrTableName = array(); $strOrderCond = " order by obj.name"; $strEscapeCond = ""; $strIncludeCond = ""; if (is_array($this->arrEscapeTable) && count($this->arrEscapeTable)>0) { $strEscapeCond = " and obj.name not in ('".implode("','", $this->arrEscapeTable)."')"; } if (is_array($this->arrIncludeTable) && count($this->arrIncludeTable)>0) { $strIncludeCond = " and obj.name in ('".implode("','", $this->arrIncludeTable)."')"; } $sql = "select obj.name from $this->strDBName..sysobjects obj where obj.xtype='U'"; $sql .= $strEscapeCond; $sql .= $strIncludeCond; $sql .= $strOrderCond; $rs = $this->objDB->query($sql); if ($rs) { while (($arrRow = $this->objDB->read($rs)) !== false) { $arrTableName[] = strtolower($arrRow['name']); } $this->objDB->free($rs); } sort($arrTableName); return $arrTableName; } function DumpColumnInfo($strTableName) { $arrName = array(); $arrType = array(); $arrLength = array(); $arrPrecision = array(); $arrNullable = array(); $arrDefault = array(); $arrField = array(); $strOrderCond = " order by col.colorder"; //$strOrderCond = " order by col.name"; $sql = "select col.name as fld_name,type.name as fld_type,col.prec as fld_length,isnull(col.scale,0) as fld_precision,col.isnullable as fld_nullable,isnull(dft.text,'') as fld_default"; $sql .= " from $this->strDBName..syscolumns col"; $sql .= " inner join $this->strDBName..sysobjects obj"; $sql .= " on col.id=obj.id and obj.xtype='U' and obj.name<>'dtproperties'"; $sql .= " left join $this->strDBName..systypes type"; $sql .= " on col.xtype=type.xusertype"; $sql .= " left join $this->strDBName..syscomments dft"; $sql .= " on col.cdefault=dft.id"; $sql .= " where obj.name='$strTableName'"; $sql .= $strOrderCond; //echo htmlspecialchars($sql); $rs = $this->objDB->query($sql); if ($rs) { while (($arrRow = $this->objDB->read($rs)) !== false) { $arrName[] = $arrRow['fld_name']; $arrType[] = $arrRow['fld_type']; $arrLength[] = $arrRow['fld_length']; $arrPrecision[] = $arrRow['fld_precision']; $arrNullable[] = $arrRow['fld_nullable']; $strDefault = trim($arrRow['fld_default']); while (strstr($strDefault, '(')) { $strDefault = substr($strDefault, 1, strlen($strDefault)-2); } $arrDefault[] = $strDefault; } $this->objDB->free($rs); } $arrField = array($arrName, $arrType, $arrLength, $arrPrecision, $arrNullable, $arrDefault); return $arrField; } /** * Dump primary key constraint * * @param string Table name * @return object Instance of TableConstraint **/ function <API key>($strTableName) { $strName = ''; $strType = 'PRIMARY'; $arrCol = array(); $sql = "select idx.name as constraint_name,col.name as column_name"; $sql .= " from $this->strDBName..sysobjects obj,$this->strDBName..sysindexes idx,$this->strDBName..sysindexkeys keys,$this->strDBName..syscolumns col"; $sql .= " where obj.parent_obj=object_id('$strTableName')"; $sql .= " and obj.name=idx.name"; $sql .= " and idx.indid=keys.indid"; $sql .= " and idx.id=keys.id"; $sql .= " and keys.id=col.id"; $sql .= " and keys.colid=col.colid"; $sql .= " and obj.xtype='PK'"; $rs = $this->objDB->query($sql); if ($rs) { while (($arrRow = $this->objDB->read($rs)) !== false) { $strName = $arrRow['constraint_name']; $arrCol[] = $arrRow['column_name']; } $this->objDB->free($rs); $objCnst = new TableConstraint($strName, $strType, $arrCol); } if (0 < count($arrCol)) { $objCnst = new TableConstraint($strName, $strType, $arrCol); return $objCnst; } else { return false; } } /** * Dump unique constraints * * @param string Table name * @return array Instances of TableConstraint **/ function <API key>($strTableName) { $arrCnst = array(); $sql = "select idx.name as constraint_name,col.name as column_name"; $sql .= " from $this->strDBName..sysobjects obj,$this->strDBName..sysindexes idx,$this->strDBName..sysindexkeys keys,$this->strDBName..syscolumns col"; $sql .= " where obj.parent_obj=object_id('$strTableName')"; $sql .= " and obj.name=idx.name"; $sql .= " and idx.indid=keys.indid"; $sql .= " and idx.id=keys.id"; $sql .= " and keys.id=col.id"; $sql .= " and keys.colid=col.colid"; $sql .= " and obj.xtype='UQ'"; $rs = $this->objDB->query($sql); if ($rs) { while (($arrRow = $this->objDB->read($rs)) !== false) { $arrCol = array(); $strName = $arrRow['constraint_name']; $strColumn = $arrRow['column_name']; $arrCol[] = $strColumn; if (0 == count($arrCnst)) { $objCnst = new TableConstraint($strName, 'UNIQUE', $arrCol); } else { $objCnst = array_pop($arrCnst); if ($objCnst->GetName() == $strName) { $objCnst->AppendColumn($strColumn); } else { $objCnst = new TableConstraint($strName, 'UNIQUE', $arrCol); } } $arrCnst[] = $objCnst; } $this->objDB->free($rs); } return $arrCnst; } /** * Return the batch seperator of Microsoft SQL Server * * @return string **/ function GetBatchSeperator() { return 'go'; } /** * Generate create table statement * * @param object Table structure * @return string Create table statement **/ public static function <API key>($objStruct) { $strTableBody = self::<API key>($objStruct); $strSQL = "if not exists (select 1 from sysobjects where id=object_id('".$objStruct->GetTableName()."') and type='U')\n"; $strSQL .= "begin\n"; $strSQL .= str_repeat(' ', 4)."create table ".$objStruct->GetTableName()." (\n"; $strSQL .= $strTableBody; $strSQL .= str_repeat(' ', 4).")\n"; $strSQL .= "end;"; return $strSQL; } /** * Generate body of create table statement **/ private static function <API key>($objStruct) { $strBody = ""; $arrLine = array(); // Append column names one by one $intLen = 0; $objStruct->Reset(); while ($arrCol = $objStruct->GetNextCol()) { $strLine = str_repeat(' ', 8).$arrCol['name']; $arrLine[] = $strLine; if (strlen($strLine) > $intLen) { $intLen = strlen($strLine); } } // Append column types one by one $intLen += 4; $intNewLen = 0; $intIdx = 0; $objStruct->Reset(); while ($arrCol = $objStruct->GetNextCol()) { $strPrecision = 0 < $arrCol['precision'] ? ",".$arrCol['precision'] : ''; $strLength = 0 < $arrCol['length'] ? "(".$arrCol['length'].$strPrecision.")" : ''; $strLine = $arrLine[$intIdx]; $strLine .= str_repeat(' ', $intLen-strlen($strLine)).$arrCol['type'].$strLength; $arrLine[$intIdx] = $strLine; if (strlen($strLine) > $intNewLen) { $intNewLen = strlen($strLine); } $intIdx++; } // Append column nullable information and default values $intLen = $intNewLen+4; $intIdx = 0; $objStruct->Reset(); while ($arrCol = $objStruct->GetNextCol()) { $strLine = $arrLine[$intIdx]; if (false === $arrCol['nullable']) { $strLine .= str_repeat(' ', $intLen-strlen($strLine)).'not null'; if (false !== $arrCol['default']) { if (self::IsCharCol($arrCol['type'])) { $strLine .= " default ".$arrCol['default'].""; } else if (self::IsNumCol($arrCol['type'])) { $strLine .= " default ".(is_numeric($arrCol['default']) ? $arrCol['default'] : 0); }else { $strLine .= " default ".$arrCol['default']; } } } if ($objStruct-><API key>($arrCol['name'])) { $strLine .= " unique"; } $strLine .= ",\n"; $arrLine[$intIdx] = $strLine; if (strlen($strLine) > $intNewLen) { $intNewLen = strlen($strLine); } $intIdx++; } $strBody = implode('', $arrLine); $strBody = substr($strBody, 0, strlen($strBody)-2)."\n"; $boolHasCnst = false; // Add primary constraint if ($objStruct-><API key>()) { $objCnst = $objStruct-><API key>(); $strBody .= str_repeat(' ', 8)."constraint ".$objCnst->GetName()." primary key (".implode(',', $objCnst->GetColumns())."),\n"; $boolHasCnst = true; } // Add clustered unique constraints $arrCluster = $objStruct-><API key>(); if (count($arrCluster) > 0) { foreach ($arrCluster as $objCnst) { $strBody .= str_repeat(' ', 8)."constraint ".$objCnst->GetName(); $strBody .= " unique (".implode(',', $objCnst->GetColumns())."),\n"; } $boolHasCnst = true; } if ($boolHasCnst) { $strBody = substr($strBody, 0, strlen($strBody)-2)."\n"; } return $strBody; } /** * Generate partition of select statement for date and time fields * * @param string Table alias * @param string Column name * @param string Column alias * @return string **/ function GetSelectDateStr($strTableAlias=false, $strColumnName=false, $strColumnAlias=false) { if (false === $strTableAlias || false === $strColumnName) { return "convert(varchar(32), getdate(), 120)"; } $strColumnAlias = false === $strColumnAlias ? $strColumnName : $strColumnAlias; return "convert(varchar(32), $strTableAlias.$strColumnName, 120) as $strColumnAlias"; } /** * Generate partition of insert statement for date and time fields * * @param string Column value * @return string **/ function GetInsertDateStr($strVal) { return "convert(datetime, '$strVal', 120)"; } /** * Add limitation statement to the given select statement * * @param string Select statement * @param int Limitation * @return string Select statement **/ function HookLimit($strSQL, $intLimit) { return preg_replace('/^select([ ]top[ ][0-9]+)?/', "select top $intLimit", $strSQL); } function hookColHandler(&$sql, $arr) { return false; } } ?>
--TEST Test parse and dump of use declarations --FILE <?php require __DIR__ . '/../util.php'; $code = <<<'PHP' <?php use Foo\Bar as Baz; use function foo\bar as baz; use Foo\{Bar, function bar}; use function foo\{bar, baz}; PHP; echo ast_dump(ast\parse_code($code)); ?> --EXPECT AST_STMT_LIST 0: AST_USE flags: T_CLASS (361) 0: AST_USE_ELEM flags: 0 0: "Foo\Bar" 1: "Baz" 1: AST_USE flags: T_FUNCTION (346) 0: AST_USE_ELEM flags: 0 0: "foo\bar" 1: "baz" 2: AST_GROUP_USE flags: 0 0: "Foo" 1: AST_USE flags: 0 0: AST_USE_ELEM flags: T_CLASS (361) 0: "Bar" 1: null 1: AST_USE_ELEM flags: T_FUNCTION (346) 0: "bar" 1: null 3: AST_GROUP_USE flags: T_FUNCTION (346) 0: "foo" 1: AST_USE flags: 0 0: AST_USE_ELEM flags: 0 0: "bar" 1: null 1: AST_USE_ELEM flags: 0 0: "baz" 1: null
<?php use yii\helpers\Html; use yii\grid\GridView; use app\models\Estilo; use app\models\<API key>; /* @var $this yii\web\View */ /* @var $searchModel app\models\ObjetoEstiloSearch */ /* @var $dataProvider yii\data\ActiveDataProvider */ $this->title = 'Relações entre objetos e estilos'; ?> <div class="objeto-estilo-index"> <h1><?= Html::encode($this->title) ?></h1> <?php // echo $this->render('_search', ['model' => $searchModel]); ?> <p> <?= Html::a('Nova relação objeto-estilo', ['create'], ['class' => 'btn btn-success']) ?> <a href="?r=site/index" class="btn btn-default">Voltar</a> </p> <?= GridView::widget([ 'dataProvider' => $dataProvider, 'filterModel' => $searchModel, 'columns' => [ ['class' => 'yii\grid\SerialColumn'], [ 'attribute' => 'id_objeto', 'value'=>function ($model, $key, $index, $widget) { if ( $model->id_objeto === null ) return ""; else { $obj = <API key>::find()->where(['id_objeto' => $model->id_objeto])->one(); return $obj->titulo_objeto; } }, ], [ 'attribute' => 'id_estilo', 'value'=>function ($model, $key, $index, $widget) { if ( $model->id_estilo === null ) return ""; else { $estilo = Estilo::find()->where(['id_estilo' => $model->id_estilo])->one(); return $estilo->nome_estilo; } }, ], [ 'class' => 'yii\grid\ActionColumn', 'header'=>'Ações', 'headerOptions' => ['style' => 'text-align:center; color:#337AB7'], 'contentOptions' => ['style' => 'text-align:center; vertical-align:middle'], 'template' => '{update} {delete}', ], ], ]); ?> </div>
#ifndef <API key> #define <API key> #include "chrome/browser/ui/webui/chromeos/login/base_screen_handler.h" #include "chromeos/dbus/os_install/os_install_client.h" #include "third_party/abseil-cpp/absl/types/optional.h" namespace ash { class OsInstallScreen; } namespace login { class <API key>; } // namespace login namespace chromeos { class JSCallsContainer; // Interface for dependency injection between OsInstallScreen and its // WebUI representation. class OsInstallScreenView { public: constexpr static StaticOobeScreenId kScreenId{"os-install"}; virtual ~OsInstallScreenView() = default; // Shows the contents of the screen. virtual void Show() = 0; // Binds |screen| to the view. virtual void Bind(ash::OsInstallScreen* screen) = 0; // Unbinds the screen from the view. virtual void Unbind() = 0; virtual void ShowStep(const char* step) = 0; virtual void SetStatus(OsInstallClient::Status status) = 0; virtual void SetServiceLogs(const std::string& service_log) = 0; virtual void <API key>(base::TimeDelta time_left) = 0; }; class <API key> : public BaseScreenHandler, public OsInstallScreenView { public: using TView = OsInstallScreenView; explicit <API key>(JSCallsContainer* js_calls_container); <API key>(const <API key>&) = delete; <API key>& operator=(const <API key>&) = delete; ~<API key>() override; private: // BaseScreenHandler: void <API key>( ::login::<API key>* builder) override; void Initialize() override; // OsInstallScreenView: void Show() override; void Bind(ash::OsInstallScreen* screen) override; void Unbind() override; void ShowStep(const char* step) override; void SetStatus(OsInstallClient::Status status) override; void SetServiceLogs(const std::string& service_log) override; void <API key>(base::TimeDelta time_left) override; ash::OsInstallScreen* screen_ = nullptr; base::WeakPtrFactory<<API key>> weak_factory_{this}; }; } // namespace chromeos // source migration is finished. namespace ash { using ::chromeos::<API key>; using ::chromeos::OsInstallScreenView; } // namespace ash #endif // <API key>
<HTML> <BODY> <A HREF="index.html"> <IMG SRC="gebop.bmp"> </A> <BR> <H2> Reversi Information </H2> Reversi was invented in England in 1888. In 1975 the game was reïntroduced under the name Othello, with a minor change of the rules. <H3> <A HREF="rules_reversi.html"> Reversi Rules </A> </H3> <H3> <A HREF="index.html"> Index </A> </H3> </BODY> </HTML>
# - Config file for the Armadillo package # It defines the following variables # <API key> - include directories for Armadillo # <API key> - library directories for Armadillo (normally not used!) # ARMADILLO_LIBRARIES - libraries to link against # Tell the user project where to find our headers and libraries set(<API key> "/home/liberty/Downloads/armadillo-6.700.4;/home/liberty/Downloads/armadillo-6.700.4/build") set(<API key> "/home/liberty/Downloads/armadillo-6.700.4/build") # Our library dependencies (contains definitions for IMPORTED targets) include("/home/liberty/Downloads/armadillo-6.700.4/build/<API key>.cmake") set(ARMADILLO_LIBRARIES armadillo)
/* $Id: minissdp.c,v 1.48 2013/02/07 12:22:25 nanard Exp $ */ #include <stdio.h> #include <stdlib.h> #include <string.h> #include <unistd.h> #include <sys/socket.h> #include <sys/un.h> #include <netinet/in.h> #include <arpa/inet.h> #include <errno.h> #include <syslog.h> #include "config.h" #include "upnpdescstrings.h" #include "miniupnpdpath.h" #include "upnphttp.h" #include "upnpglobalvars.h" #include "minissdp.h" #include "upnputils.h" #include "getroute.h" #include "codelength.h" /* SSDP ip/port */ #define SSDP_PORT (1900) #define SSDP_MCAST_ADDR ("239.255.255.250") #define LL_SSDP_MCAST_ADDR "FF02::C" #define SL_SSDP_MCAST_ADDR "FF05::C" /* <API key>() * param s socket * param ifaddr ip v4 address */ static int <API key>(int s, in_addr_t ifaddr) { struct ip_mreq imr; /* Ip multicast membership */ /* setting up imr structure */ imr.imr_multiaddr.s_addr = inet_addr(SSDP_MCAST_ADDR); /*imr.imr_interface.s_addr = htonl(INADDR_ANY);*/ imr.imr_interface.s_addr = ifaddr; /*inet_addr(ifaddr);*/ if (setsockopt(s, IPPROTO_IP, IP_ADD_MEMBERSHIP, (void *)&imr, sizeof(struct ip_mreq)) < 0) { syslog(LOG_ERR, "setsockopt(udp, IP_ADD_MEMBERSHIP): %m"); return -1; } return 0; } /* <API key>() * param s socket (IPv6) * To be improved to target specific network interfaces */ #ifdef ENABLE_IPV6 static int <API key>(int s) { struct ipv6_mreq mr; /*unsigned int ifindex;*/ memset(&mr, 0, sizeof(mr)); inet_pton(AF_INET6, LL_SSDP_MCAST_ADDR, &mr.ipv6mr_multiaddr); /*mr.ipv6mr_interface = ifindex;*/ mr.ipv6mr_interface = 0; /* 0 : all interfaces */ #ifndef IPV6_ADD_MEMBERSHIP #define IPV6_ADD_MEMBERSHIP IPV6_JOIN_GROUP #endif if(setsockopt(s, IPPROTO_IPV6, IPV6_ADD_MEMBERSHIP, &mr, sizeof(struct ipv6_mreq)) < 0) { syslog(LOG_ERR, "setsockopt(udp, IPV6_ADD_MEMBERSHIP): %m"); return -1; } inet_pton(AF_INET6, SL_SSDP_MCAST_ADDR, &mr.ipv6mr_multiaddr); if(setsockopt(s, IPPROTO_IPV6, IPV6_ADD_MEMBERSHIP, &mr, sizeof(struct ipv6_mreq)) < 0) { syslog(LOG_ERR, "setsockopt(udp, IPV6_ADD_MEMBERSHIP): %m"); return -1; } return 0; } #endif /* Open and configure the socket listening for * SSDP udp packets sent on 239.255.255.250 port 1900 * SSDP v6 udp packets sent on FF02::C, or FF05::C, port 1900 */ int <API key>(int ipv6) { int s; struct sockaddr_storage sockname; socklen_t sockname_len; struct lan_addr_s * lan_addr; int j = 1; if( (s = socket(ipv6 ? PF_INET6 : PF_INET, SOCK_DGRAM, 0)) < 0) { syslog(LOG_ERR, "%s: socket(udp): %m", "<API key>"); return -1; } memset(&sockname, 0, sizeof(struct sockaddr_storage)); if(ipv6) { struct sockaddr_in6 * saddr = (struct sockaddr_in6 *)&sockname; saddr->sin6_family = AF_INET6; saddr->sin6_port = htons(SSDP_PORT); saddr->sin6_addr = in6addr_any; sockname_len = sizeof(struct sockaddr_in6); } else { struct sockaddr_in * saddr = (struct sockaddr_in *)&sockname; saddr->sin_family = AF_INET; saddr->sin_port = htons(SSDP_PORT); /* NOTE : it seems it doesnt work when binding on the specific address */ /*saddr->sin_addr.s_addr = inet_addr(UPNP_MCAST_ADDR);*/ saddr->sin_addr.s_addr = htonl(INADDR_ANY); /*saddr->sin_addr.s_addr = inet_addr(ifaddr);*/ sockname_len = sizeof(struct sockaddr_in); } if(setsockopt(s, SOL_SOCKET, SO_REUSEADDR, &j, sizeof(j)) < 0) { syslog(LOG_WARNING, "setsockopt(udp, SO_REUSEADDR): %m"); } if(!set_non_blocking(s)) { syslog(LOG_WARNING, "%s: set_non_blocking(): %m", "<API key>"); } if(bind(s, (struct sockaddr *)&sockname, sockname_len) < 0) { syslog(LOG_ERR, "%s: bind(udp%s): %m", "<API key>", ipv6 ? "6" : ""); close(s); return -1; } #ifdef ENABLE_IPV6 if(ipv6) { <API key>(s); } else #endif { for(lan_addr = lan_addrs.lh_first; lan_addr != NULL; lan_addr = lan_addr->list.le_next) { if(<API key>(s, lan_addr->addr.s_addr) < 0) { syslog(LOG_WARNING, "Failed to add multicast membership for interface %s", lan_addr->str ? lan_addr->str : "NULL"); } } } return s; } /* open the UDP socket used to send SSDP notifications to * the multicast group reserved for them */ static int <API key>(in_addr_t addr) { int s; unsigned char loopchar = 0; int bcast = 1; unsigned char ttl = 2; /* UDA v1.1 says : The TTL for the IP packet SHOULD default to 2 and SHOULD be configurable. */ /* TODO: Make TTL be configurable */ struct in_addr mc_if; struct sockaddr_in sockname; if( (s = socket(PF_INET, SOCK_DGRAM, 0)) < 0) { syslog(LOG_ERR, "socket(udp_notify): %m"); return -1; } mc_if.s_addr = addr; /*inet_addr(addr);*/ if(setsockopt(s, IPPROTO_IP, IP_MULTICAST_LOOP, (char *)&loopchar, sizeof(loopchar)) < 0) { syslog(LOG_ERR, "setsockopt(udp_notify, IP_MULTICAST_LOOP): %m"); close(s); return -1; } if(setsockopt(s, IPPROTO_IP, IP_MULTICAST_IF, (char *)&mc_if, sizeof(mc_if)) < 0) { syslog(LOG_ERR, "setsockopt(udp_notify, IP_MULTICAST_IF): %m"); close(s); return -1; } if(setsockopt(s, IPPROTO_IP, IP_MULTICAST_TTL, &ttl, sizeof(ttl)) < 0) { syslog(LOG_WARNING, "setsockopt(udp_notify, IP_MULTICAST_TTL,): %m"); } if(setsockopt(s, SOL_SOCKET, SO_BROADCAST, &bcast, sizeof(bcast)) < 0) { syslog(LOG_ERR, "setsockopt(udp_notify, SO_BROADCAST): %m"); close(s); return -1; } memset(&sockname, 0, sizeof(struct sockaddr_in)); sockname.sin_family = AF_INET; sockname.sin_addr.s_addr = addr; /*inet_addr(addr);*/ if (bind(s, (struct sockaddr *)&sockname, sizeof(struct sockaddr_in)) < 0) { syslog(LOG_ERR, "bind(udp_notify): %m"); close(s); return -1; } return s; } #ifdef ENABLE_IPV6 /* open the UDP socket used to send SSDP notifications to * the multicast group reserved for them. IPv6 */ static int <API key>(unsigned int if_index) { int s; unsigned int loop = 0; s = socket(PF_INET6, SOCK_DGRAM, 0); if(s < 0) { syslog(LOG_ERR, "socket(udp_notify IPv6): %m"); return -1; } if(setsockopt(s, IPPROTO_IPV6, IPV6_MULTICAST_IF, &if_index, sizeof(if_index)) < 0) { syslog(LOG_ERR, "setsockopt(udp_notify IPv6, IPV6_MULTICAST_IF, %u): %m", if_index); close(s); return -1; } if(setsockopt(s, IPPROTO_IPV6, IPV6_MULTICAST_LOOP, &loop, sizeof(loop)) < 0) { syslog(LOG_ERR, "setsockopt(udp_notify, IPV6_MULTICAST_LOOP): %m"); close(s); return -1; } return s; } #endif int <API key>(int * sockets) /*<API key>(int * sockets, struct lan_addr_s * lan_addr, int n_lan_addr)*/ { int i; struct lan_addr_s * lan_addr; for(i=0, lan_addr = lan_addrs.lh_first; lan_addr != NULL; lan_addr = lan_addr->list.le_next) { sockets[i] = <API key>(lan_addr->addr.s_addr); if(sockets[i] < 0) goto error; i++; #ifdef ENABLE_IPV6 sockets[i] = <API key>(lan_addr->index); if(sockets[i] < 0) goto error; i++; #endif } return 0; error: while(--i >= 0) { close(sockets[i]); sockets[i] = -1; } return -1; } /* not really an SSDP "announce" as it is the response * to a SSDP "M-SEARCH" */ static void SendSSDPAnnounce2(int s, const struct sockaddr * addr, const char * st, int st_len, const char * suffix, const char * host, unsigned short port) { int l, n; char buf[512]; char addr_str[64]; socklen_t addrlen; int st_is_uuid; #ifdef ENABLE_HTTP_DATE char http_date[64]; time_t t; struct tm tm; time(&t); gmtime_r(&t, &tm); strftime(http_date, sizeof(http_date), "%a, %d %b %Y %H:%M:%S GMT", &tm); #endif st_is_uuid = (st_len == (int)strlen(uuidvalue)) && (memcmp(uuidvalue, st, st_len) == 0); l = snprintf(buf, sizeof(buf), "HTTP/1.1 200 OK\r\n" "CACHE-CONTROL: max-age=120\r\n" #ifdef ENABLE_HTTP_DATE "DATE: %s\r\n" #endif "ST: %.*s%s\r\n" "USN: %s%s%.*s%s\r\n" "EXT:\r\n" "SERVER: " <API key> "\r\n" "LOCATION: http://%s:%u" ROOTDESC_PATH "\r\n" "OPT: \"http://schemas.upnp.org/upnp/1/0/\"; ns=01\r\n" /* UDA v1.1 */ "01-NLS: %u\r\n" /* same as BOOTID. UDA v1.1 */ "BOOTID.UPNP.ORG: %u\r\n" /* UDA v1.1 */ "CONFIGID.UPNP.ORG: %u\r\n" /* UDA v1.1 */ "\r\n", #ifdef ENABLE_HTTP_DATE http_date, #endif st_len, st, suffix, uuidvalue, st_is_uuid ? "" : "::", st_is_uuid ? 0 : st_len, st, suffix, host, (unsigned int)port, upnp_bootid, upnp_bootid, upnp_configid); addrlen = (addr->sa_family == AF_INET6) ? sizeof(struct sockaddr_in6) : sizeof(struct sockaddr_in); n = sendto(s, buf, l, 0, addr, addrlen); sockaddr_to_string(addr, addr_str, sizeof(addr_str)); syslog(LOG_INFO, "SSDP Announce %d bytes to %s ST: %.*s",n, addr_str, l, buf); if(n < 0) { /* XXX handle EINTR, EAGAIN, EWOULDBLOCK */ syslog(LOG_ERR, "sendto(udp): %m"); } } #ifndef IGD_V2 #define IGD_VER 1 #define WANIPC_VER 1 #else #define IGD_VER 2 #define WANIPC_VER 2 #endif static struct { const char * s; const int version; } const known_service_types[] = { {"upnp:rootdevice", 0}, {"urn:schemas-upnp-org:device:<API key>:", IGD_VER}, {"urn:schemas-upnp-org:device:WANConnectionDevice:", 1}, {"urn:schemas-upnp-org:device:WANDevice:", 1}, {"urn:schemas-upnp-org:service:<API key>:", 1}, {"urn:schemas-upnp-org:service:WANIPConnection:", WANIPC_VER}, {"urn:schemas-upnp-org:service:WANPPPConnection:", 1}, #ifdef ENABLE_L3F_SERVICE {"urn:schemas-upnp-org:service:Layer3Forwarding:", 1}, #endif #ifdef ENABLE_6FC_SERVICE {"url:schemas-upnp-org:service:<API key>:", 1}, #endif {0, 0} }; static void SendSSDPNotify(int s, const struct sockaddr * dest, const char * host, unsigned short port, const char * nt, const char * suffix, const char * usn1, const char * usn2, const char * usn3, unsigned int lifetime, int ipv6) { char bufr[512]; int n, l; l = snprintf(bufr, sizeof(bufr), "NOTIFY * HTTP/1.1\r\n" "HOST: %s:%d\r\n" "CACHE-CONTROL: max-age=%u\r\n" "LOCATION: http://%s:%d" ROOTDESC_PATH"\r\n" "SERVER: " <API key> "\r\n" "NT: %s%s\r\n" "USN: %s%s%s%s\r\n" "NTS: ssdp:alive\r\n" "OPT: \"http://schemas.upnp.org/upnp/1/0/\"; ns=01\r\n" /* UDA v1.1 */ "01-NLS: %u\r\n" /* same as BOOTID field. UDA v1.1 */ "BOOTID.UPNP.ORG: %u\r\n" /* UDA v1.1 */ "CONFIGID.UPNP.ORG: %u\r\n" /* UDA v1.1 */ "\r\n", ipv6 ? "[" LL_SSDP_MCAST_ADDR "]" : SSDP_MCAST_ADDR, SSDP_PORT, lifetime, host, port, nt, suffix, usn1, usn2, usn3, suffix, /* USN: */ upnp_bootid, upnp_bootid, upnp_configid ); if(l<0) { syslog(LOG_ERR, "SendSSDPNotifies() snprintf error"); return; } else if((unsigned int)l >= sizeof(bufr)) { syslog(LOG_WARNING, "SendSSDPNotifies(): truncated output"); l = sizeof(bufr); } n = sendto(s, bufr, l, 0, dest, #ifdef ENABLE_IPV6 ipv6 ? sizeof(struct sockaddr_in6) : sizeof(struct sockaddr_in) #else sizeof(struct sockaddr_in) #endif ); if(n < 0) { /* XXX handle EINTR, EAGAIN, EWOULDBLOCK */ syslog(LOG_ERR, "sendto(udp_notify=%d, %s): %m", s, host ? host : "NULL"); } else if(n != l) { syslog(LOG_NOTICE, "sendto() sent %d out of %d bytes", n, l); } } static void SendSSDPNotifies(int s, const char * host, unsigned short port, unsigned int lifetime, int ipv6) { #ifdef ENABLE_IPV6 struct sockaddr_storage sockname; #else struct sockaddr_in sockname; #endif int i=0; char ver_str[4]; memset(&sockname, 0, sizeof(sockname)); #ifdef ENABLE_IPV6 if(ipv6) { struct sockaddr_in6 * p = (struct sockaddr_in6 *)&sockname; p->sin6_family = AF_INET6; p->sin6_port = htons(SSDP_PORT); inet_pton(AF_INET6, LL_SSDP_MCAST_ADDR, &(p->sin6_addr)); } else #endif { struct sockaddr_in *p = (struct sockaddr_in *)&sockname; p->sin_family = AF_INET; p->sin_port = htons(SSDP_PORT); p->sin_addr.s_addr = inet_addr(SSDP_MCAST_ADDR); } while(known_service_types[i].s) { if(i==0) ver_str[0] = '\0'; else snprintf(ver_str, sizeof(ver_str), "%d", known_service_types[i].version); SendSSDPNotify(s, (struct sockaddr *)&sockname, host, port, known_service_types[i].s, ver_str, uuidvalue, "::", known_service_types[i].s, /* ver_str, USN: */ lifetime, ipv6); if(i==0) /* rootdevice */ SendSSDPNotify(s, (struct sockaddr *)&sockname, host, port, uuidvalue, "", uuidvalue, "", "", /* ver_str, USN: */ lifetime, ipv6); i++; } } void SendSSDPNotifies2(int * sockets, unsigned short port, unsigned int lifetime) { int i; struct lan_addr_s * lan_addr; for(i=0, lan_addr = lan_addrs.lh_first; lan_addr != NULL; lan_addr = lan_addr->list.le_next) { SendSSDPNotifies(sockets[i], lan_addr->str, port, lifetime, 0); i++; #ifdef ENABLE_IPV6 SendSSDPNotifies(sockets[i], <API key>, port, lifetime, 1); i++; #endif } } /* ProcessSSDPRequest() * process SSDP M-SEARCH requests and responds to them */ void ProcessSSDPRequest(int s, unsigned short port) { int n; char bufr[1500]; socklen_t len_r; #ifdef ENABLE_IPV6 struct sockaddr_storage sendername; len_r = sizeof(struct sockaddr_storage); #else struct sockaddr_in sendername; len_r = sizeof(struct sockaddr_in); #endif n = recvfrom(s, bufr, sizeof(bufr), 0, (struct sockaddr *)&sendername, &len_r); if(n < 0) { /* EAGAIN, EWOULDBLOCK, EINTR : silently ignore (try again next time) * other errors : log to LOG_ERR */ if(errno != EAGAIN && errno != EWOULDBLOCK && errno != EINTR) { syslog(LOG_ERR, "recvfrom(udp): %m"); } return; } ProcessSSDPData(s, bufr, n, (struct sockaddr *)&sendername, port); } void ProcessSSDPData(int s, const char *bufr, int n, const struct sockaddr * sender, unsigned short port) { int i, l; struct lan_addr_s * lan_addr = NULL; const char * st = NULL; int st_len = 0; int st_ver = 0; char sender_str[64]; char ver_str[4]; const char * announced_host = NULL; #ifdef UPNP_STRICT #ifdef ENABLE_IPV6 char announced_host_buf[64]; #endif int mx_value = -1; #endif /* get the string representation of the sender address */ sockaddr_to_string(sender, sender_str, sizeof(sender_str)); lan_addr = get_lan_for_peer(sender); if(lan_addr == NULL) { syslog(LOG_WARNING, "SSDP packet sender %s not from a LAN, ignoring", sender_str); return; } if(memcmp(bufr, "NOTIFY", 6) == 0) { /* ignore NOTIFY packets. We could log the sender and device type */ return; } else if(memcmp(bufr, "M-SEARCH", 8) == 0) { i = 0; while(i < n) { while((i < n - 1) && (bufr[i] != '\r' || bufr[i+1] != '\n')) i++; i += 2; if((i < n - 3) && (strncasecmp(bufr+i, "st:", 3) == 0)) { st = bufr+i+3; st_len = 0; while((*st == ' ' || *st == '\t') && (st < bufr + n)) st++; while(st[st_len]!='\r' && st[st_len]!='\n' && (st + st_len < bufr + n)) st_len++; l = st_len; while(l > 0 && st[l-1] != ':') l st_ver = atoi(st+l); syslog(LOG_DEBUG, "ST: %.*s (ver=%d)", st_len, st, st_ver); /*j = 0;*/ /*while(bufr[i+j]!='\r') j++;*/ /*syslog(LOG_INFO, "%.*s", j, bufr+i);*/ } #ifdef UPNP_STRICT else if((i < n - 3) && (strncasecmp(bufr+i, "mx:", 3) == 0)) { const char * mx; int mx_len; mx = bufr+i+3; mx_len = 0; while((*mx == ' ' || *mx == '\t') && (mx < bufr + n)) mx++; while(mx[mx_len]!='\r' && mx[mx_len]!='\n' && (mx + mx_len < bufr + n)) mx_len++; mx_value = atoi(mx); syslog(LOG_DEBUG, "MX: %.*s (value=%d)", mx_len, mx, mx_value); } #endif } #ifdef UPNP_STRICT if(mx_value < 0) { syslog(LOG_INFO, "ignoring SSDP packet missing MX: header"); return; } #endif /*syslog(LOG_INFO, "SSDP M-SEARCH packet received from %s", sender_str );*/ if(st && (st_len > 0)) { /* TODO : doesnt answer at once but wait for a random time */ syslog(LOG_INFO, "SSDP M-SEARCH from %s ST: %.*s", sender_str, st_len, st); /* find in which sub network the client is */ if(sender->sa_family == AF_INET) { if (lan_addr == NULL) { syslog(LOG_ERR, "Can't find in which sub network the client is"); return; } announced_host = lan_addr->str; } #ifdef ENABLE_IPV6 else { /* IPv6 address with brackets */ #ifdef UPNP_STRICT int index; struct in6_addr addr6; size_t addr6_len = sizeof(addr6); /* retrieve the IPv6 address which * will be used locally to reach sender */ memset(&addr6, 0, sizeof(addr6)); if(<API key> (sender, &addr6, &addr6_len, &index) < 0) { syslog(LOG_WARNING, "<API key>() failed, using %s", <API key>); announced_host = <API key>; } else { if(inet_ntop(AF_INET6, &addr6, announced_host_buf+1, sizeof(announced_host_buf) - 2)) { announced_host_buf[0] = '['; i = strlen(announced_host_buf); if(i < (int)sizeof(announced_host_buf) - 1) { announced_host_buf[i] = ']'; announced_host_buf[i+1] = '\0'; } else { syslog(LOG_NOTICE, "cannot suffix %s with ']'", announced_host_buf); } announced_host = announced_host_buf; } else { syslog(LOG_NOTICE, "inet_ntop() failed %m"); announced_host = <API key>; } } #else announced_host = <API key>; #endif } #endif /* Responds to request with a device as ST header */ for(i = 0; known_service_types[i].s; i++) { l = (int)strlen(known_service_types[i].s); if(l<=st_len && (0 == memcmp(st, known_service_types[i].s, l)) #ifdef UPNP_STRICT && (st_ver <= known_service_types[i].version) /* only answer for service version lower or equal of supported one */ #endif ) { syslog(LOG_INFO, "Single search found"); SendSSDPAnnounce2(s, sender, st, st_len, "", announced_host, port); break; } } /* Responds to request with ST: ssdp:all */ /* strlen("ssdp:all") == 8 */ if(st_len==8 && (0 == memcmp(st, "ssdp:all", 8))) { syslog(LOG_INFO, "ssdp:all found"); for(i=0; known_service_types[i].s; i++) { if(i==0) ver_str[0] = '\0'; else snprintf(ver_str, sizeof(ver_str), "%d", known_service_types[i].version); l = (int)strlen(known_service_types[i].s); SendSSDPAnnounce2(s, sender, known_service_types[i].s, l, ver_str, announced_host, port); } /* also answer for uuid */ SendSSDPAnnounce2(s, sender, uuidvalue, strlen(uuidvalue), "", announced_host, port); } /* responds to request by UUID value */ l = (int)strlen(uuidvalue); if(l==st_len && (0 == memcmp(st, uuidvalue, l))) { syslog(LOG_INFO, "ssdp:uuid found"); SendSSDPAnnounce2(s, sender, st, st_len, "", announced_host, port); } } else { syslog(LOG_INFO, "Invalid SSDP M-SEARCH from %s", sender_str); } } else { syslog(LOG_NOTICE, "Unknown udp packet received from %s", sender_str); } } static int SendSSDPbyebye(int s, const struct sockaddr * dest, const char * nt, const char * suffix, const char * usn1, const char * usn2, const char * usn3, int ipv6) { int n, l; char bufr[512]; l = snprintf(bufr, sizeof(bufr), "NOTIFY * HTTP/1.1\r\n" "HOST: %s:%d\r\n" "NT: %s%s\r\n" "USN: %s%s%s%s\r\n" "NTS: ssdp:byebye\r\n" "OPT: \"http://schemas.upnp.org/upnp/1/0/\"; ns=01\r\n" /* UDA v1.1 */ "01-NLS: %u\r\n" /* same as BOOTID field. UDA v1.1 */ "BOOTID.UPNP.ORG: %u\r\n" /* UDA v1.1 */ "CONFIGID.UPNP.ORG: %u\r\n" /* UDA v1.1 */ "\r\n", ipv6 ? "[" LL_SSDP_MCAST_ADDR "]" : SSDP_MCAST_ADDR, SSDP_PORT, nt, suffix, usn1, usn2, usn3, suffix, /* USN: */ upnp_bootid, upnp_bootid, upnp_configid); if(l<0) { syslog(LOG_ERR, "SendSSDPbyebye() snprintf error"); return -1; } else if((unsigned int)l >= sizeof(bufr)) { syslog(LOG_WARNING, "SendSSDPbyebye(): truncated output"); l = sizeof(bufr); } n = sendto(s, bufr, l, 0, dest, #ifdef ENABLE_IPV6 ipv6 ? sizeof(struct sockaddr_in6) : sizeof(struct sockaddr_in) #else sizeof(struct sockaddr_in) #endif ); if(n < 0) { syslog(LOG_ERR, "sendto(udp_shutdown=%d): %m", s); return -1; } else if(n != l) { syslog(LOG_NOTICE, "sendto() sent %d out of %d bytes", n, l); return -1; } return 0; } /* This will broadcast ssdp:byebye notifications to inform * the network that UPnP is going down. */ int SendSSDPGoodbye(int * sockets, int n_sockets) { struct sockaddr_in sockname; #ifdef ENABLE_IPV6 struct sockaddr_in6 sockname6; #endif int i, j; char ver_str[4]; int ret = 0; int ipv6 = 0; memset(&sockname, 0, sizeof(struct sockaddr_in)); sockname.sin_family = AF_INET; sockname.sin_port = htons(SSDP_PORT); sockname.sin_addr.s_addr = inet_addr(SSDP_MCAST_ADDR); #ifdef ENABLE_IPV6 memset(&sockname6, 0, sizeof(struct sockaddr_in6)); sockname6.sin6_family = AF_INET6; sockname6.sin6_port = htons(SSDP_PORT); inet_pton(AF_INET6, LL_SSDP_MCAST_ADDR, &(sockname6.sin6_addr)); #endif for(j=0; j<n_sockets; j++) { #ifdef ENABLE_IPV6 ipv6 = j & 1; #endif for(i=0; known_service_types[i].s; i++) { if(i==0) ver_str[0] = '\0'; else snprintf(ver_str, sizeof(ver_str), "%d", known_service_types[i].version); ret += SendSSDPbyebye(sockets[j], #ifdef ENABLE_IPV6 ipv6 ? (struct sockaddr *)&sockname6 : (struct sockaddr *)&sockname, #else (struct sockaddr *)&sockname, #endif known_service_types[i].s, ver_str, uuidvalue, "::", known_service_types[i].s, /* ver_str, USN: */ ipv6); if(i==0) /* root device */ { ret += SendSSDPbyebye(sockets[j], #ifdef ENABLE_IPV6 ipv6 ? (struct sockaddr *)&sockname6 : (struct sockaddr *)&sockname, #else (struct sockaddr *)&sockname, #endif uuidvalue, "", uuidvalue, "", "", /* ver_str, USN: */ ipv6); } } } return ret; } /* <API key>() : * register services offered by MiniUPnPd to a running instance of * MiniSSDPd */ int <API key>(const char * host, unsigned short port) { struct sockaddr_un addr; int s; unsigned char buffer[2048]; char strbuf[256]; unsigned char * p; int i, l, n; char ver_str[4]; s = socket(AF_UNIX, SOCK_STREAM, 0); if(s < 0) { syslog(LOG_ERR, "socket(unix): %m"); return -1; } addr.sun_family = AF_UNIX; strncpy(addr.sun_path, minissdpdsocketpath, sizeof(addr.sun_path)); if(connect(s, (struct sockaddr *)&addr, sizeof(struct sockaddr_un)) < 0) { syslog(LOG_ERR, "connect(\"%s\"): %m", minissdpdsocketpath); close(s); return -1; } for(i = 0; known_service_types[i].s; i++) { buffer[0] = 4; /* request type 4 : submit service */ /* 4 strings following : ST (service type), USN, Server, Location */ p = buffer + 1; l = (int)strlen(known_service_types[i].s); if(i > 0) l++; CODELENGTH(l, p); memcpy(p, known_service_types[i].s, l); if(i > 0) p[l-1] = '1'; p += l; if(i==0) ver_str[0] = '\0'; else snprintf(ver_str, sizeof(ver_str), "%d", known_service_types[i].version); l = snprintf(strbuf, sizeof(strbuf), "%s::%s%s", uuidvalue, known_service_types[i].s, ver_str); CODELENGTH(l, p); memcpy(p, strbuf, l); p += l; l = (int)strlen(<API key>); CODELENGTH(l, p); memcpy(p, <API key>, l); p += l; l = snprintf(strbuf, sizeof(strbuf), "http://%s:%u" ROOTDESC_PATH, host, (unsigned int)port); CODELENGTH(l, p); memcpy(p, strbuf, l); p += l; /* now write the encoded data */ n = p - buffer; /* bytes to send */ p = buffer; /* start */ while(n > 0) { l = write(s, p, n); if (l < 0) { syslog(LOG_ERR, "write(): %m"); close(s); return -1; } else if (l == 0) { syslog(LOG_ERR, "write() returned 0"); close(s); return -1; } p += l; n -= l; } } close(s); return 0; }
import numpy as np import pytest from pandas._libs import join as _join from pandas import Categorical, DataFrame, Index, merge import pandas._testing as tm class TestIndexer: @pytest.mark.parametrize( "dtype", ["int32", "int64", "float32", "float64", "object"] ) def <API key>(self, dtype): indexer = _join.outer_join_indexer left = np.arange(3, dtype=dtype) right = np.arange(2, 5, dtype=dtype) empty = np.array([], dtype=dtype) result, lindexer, rindexer = indexer(left, right) assert isinstance(result, np.ndarray) assert isinstance(lindexer, np.ndarray) assert isinstance(rindexer, np.ndarray) tm.<API key>(result, np.arange(5, dtype=dtype)) exp = np.array([0, 1, 2, -1, -1], dtype=np.int64) tm.<API key>(lindexer, exp) exp = np.array([-1, -1, 0, 1, 2], dtype=np.int64) tm.<API key>(rindexer, exp) result, lindexer, rindexer = indexer(empty, right) tm.<API key>(result, right) exp = np.array([-1, -1, -1], dtype=np.int64) tm.<API key>(lindexer, exp) exp = np.array([0, 1, 2], dtype=np.int64) tm.<API key>(rindexer, exp) result, lindexer, rindexer = indexer(left, empty) tm.<API key>(result, left) exp = np.array([0, 1, 2], dtype=np.int64) tm.<API key>(lindexer, exp) exp = np.array([-1, -1, -1], dtype=np.int64) tm.<API key>(rindexer, exp) def <API key>(): a = np.array([1, 2, 3, 4, 5], dtype=np.int64) b = np.array([2, 2, 3, 4, 4], dtype=np.int64) result = _join.<API key>(b, a) expected = np.array([1, 1, 2, 3, 3], dtype=np.int64) tm.<API key>(result, expected) def <API key>(): left = np.array( [ 0, 1, 0, 1, 1, 2, 3, 1, 0, 2, 1, 2, 0, 1, 1, 2, 3, 2, 3, 2, 1, 1, 3, 0, 3, 2, 3, 0, 0, 2, 3, 2, 0, 3, 1, 3, 0, 1, 3, 0, 0, 1, 0, 3, 1, 0, 1, 0, 1, 1, 0, 2, 2, 2, 2, 2, 0, 3, 1, 2, 0, 0, 3, 1, 3, 2, 2, 0, 1, 3, 0, 2, 3, 2, 3, 3, 2, 3, 3, 1, 3, 2, 0, 0, 3, 1, 1, 1, 0, 2, 3, 3, 1, 2, 0, 3, 1, 2, 0, 2, ], dtype=np.int64, ) right = np.array([3, 1], dtype=np.int64) max_groups = 4 lidx, ridx = _join.left_outer_join(left, right, max_groups, sort=False) exp_lidx = np.arange(len(left), dtype=np.int64) exp_ridx = -np.ones(len(left), dtype=np.int64) exp_ridx[left == 1] = 1 exp_ridx[left == 3] = 0 tm.<API key>(lidx, exp_lidx) tm.<API key>(ridx, exp_ridx) def <API key>(): a = np.array([1, 2, 3, 4, 5], dtype=np.int64) b = np.array([0, 3, 5, 7, 9], dtype=np.int64) index, ares, bres = _join.inner_join_indexer(a, b) index_exp = np.array([3, 5], dtype=np.int64) tm.assert_almost_equal(index, index_exp) aexp = np.array([2, 4], dtype=np.int64) bexp = np.array([1, 2], dtype=np.int64) tm.assert_almost_equal(ares, aexp) tm.assert_almost_equal(bres, bexp) a = np.array([5], dtype=np.int64) b = np.array([5], dtype=np.int64) index, ares, bres = _join.inner_join_indexer(a, b) tm.<API key>(index, np.array([5], dtype=np.int64)) tm.<API key>(ares, np.array([0], dtype=np.int64)) tm.<API key>(bres, np.array([0], dtype=np.int64)) def <API key>(): a = np.array([1, 2, 3, 4, 5], dtype=np.int64) b = np.array([0, 3, 5, 7, 9], dtype=np.int64) index, ares, bres = _join.outer_join_indexer(a, b) index_exp = np.array([0, 1, 2, 3, 4, 5, 7, 9], dtype=np.int64) tm.assert_almost_equal(index, index_exp) aexp = np.array([-1, 0, 1, 2, 3, 4, -1, -1], dtype=np.int64) bexp = np.array([0, -1, -1, 1, -1, 2, 3, 4], dtype=np.int64) tm.assert_almost_equal(ares, aexp) tm.assert_almost_equal(bres, bexp) a = np.array([5], dtype=np.int64) b = np.array([5], dtype=np.int64) index, ares, bres = _join.outer_join_indexer(a, b) tm.<API key>(index, np.array([5], dtype=np.int64)) tm.<API key>(ares, np.array([0], dtype=np.int64)) tm.<API key>(bres, np.array([0], dtype=np.int64)) def <API key>(): a = np.array([1, 2, 3, 4, 5], dtype=np.int64) b = np.array([0, 3, 5, 7, 9], dtype=np.int64) index, ares, bres = _join.left_join_indexer(a, b) tm.assert_almost_equal(index, a) aexp = np.array([0, 1, 2, 3, 4], dtype=np.int64) bexp = np.array([-1, -1, 1, -1, 2], dtype=np.int64) tm.assert_almost_equal(ares, aexp) tm.assert_almost_equal(bres, bexp) a = np.array([5], dtype=np.int64) b = np.array([5], dtype=np.int64) index, ares, bres = _join.left_join_indexer(a, b) tm.<API key>(index, np.array([5], dtype=np.int64)) tm.<API key>(ares, np.array([0], dtype=np.int64)) tm.<API key>(bres, np.array([0], dtype=np.int64)) def <API key>(): idx = Index([1, 1, 2, 5]) idx2 = Index([1, 2, 5, 7, 9]) res, lidx, ridx = _join.left_join_indexer(idx2.values, idx.values) exp_res = np.array([1, 1, 2, 5, 7, 9], dtype=np.int64) tm.assert_almost_equal(res, exp_res) exp_lidx = np.array([0, 0, 1, 2, 3, 4], dtype=np.int64) tm.assert_almost_equal(lidx, exp_lidx) exp_ridx = np.array([0, 1, 2, 3, -1, -1], dtype=np.int64) tm.assert_almost_equal(ridx, exp_ridx) def <API key>(): idx = Index([1, 1, 2, 5]) idx2 = Index([1, 2, 5, 7, 9]) res, lidx, ridx = _join.outer_join_indexer(idx2.values, idx.values) exp_res = np.array([1, 1, 2, 5, 7, 9], dtype=np.int64) tm.assert_almost_equal(res, exp_res) exp_lidx = np.array([0, 0, 1, 2, 3, 4], dtype=np.int64) tm.assert_almost_equal(lidx, exp_lidx) exp_ridx = np.array([0, 1, 2, 3, -1, -1], dtype=np.int64) tm.assert_almost_equal(ridx, exp_ridx) def <API key>(): idx = Index([1, 1, 2, 5]) idx2 = Index([1, 2, 5, 7, 9]) res, lidx, ridx = _join.inner_join_indexer(idx2.values, idx.values) exp_res = np.array([1, 1, 2, 5], dtype=np.int64) tm.assert_almost_equal(res, exp_res) exp_lidx = np.array([0, 0, 1, 2], dtype=np.int64) tm.assert_almost_equal(lidx, exp_lidx) exp_ridx = np.array([0, 1, 2, 3], dtype=np.int64) tm.assert_almost_equal(ridx, exp_ridx) def <API key>(): # From issue 16627 a = { "Cat1": Categorical(["a", "b", "a", "c", "a", "b"], ["a", "b", "c"]), "Int1": [0, 1, 0, 1, 0, 0], } a = DataFrame(a) b = { "Cat": Categorical(["a", "b", "c", "a", "b", "c"], ["a", "b", "c"]), "Int": [0, 0, 0, 1, 1, 1], "Factor": [1.1, 1.2, 1.3, 1.4, 1.5, 1.6], } b = DataFrame(b).set_index(["Cat", "Int"])["Factor"] expected = merge( a, b.reset_index(), left_on=["Cat1", "Int1"], right_on=["Cat", "Int"], how="left", ) result = a.join(b, on=["Cat1", "Int1"]) expected = expected.drop(["Cat", "Int"], axis=1) tm.assert_frame_equal(expected, result) # Same test, but with ordered categorical a = { "Cat1": Categorical( ["a", "b", "a", "c", "a", "b"], ["b", "a", "c"], ordered=True ), "Int1": [0, 1, 0, 1, 0, 0], } a = DataFrame(a) b = { "Cat": Categorical( ["a", "b", "c", "a", "b", "c"], ["b", "a", "c"], ordered=True ), "Int": [0, 0, 0, 1, 1, 1], "Factor": [1.1, 1.2, 1.3, 1.4, 1.5, 1.6], } b = DataFrame(b).set_index(["Cat", "Int"])["Factor"] expected = merge( a, b.reset_index(), left_on=["Cat1", "Int1"], right_on=["Cat", "Int"], how="left", ) result = a.join(b, on=["Cat1", "Int1"]) expected = expected.drop(["Cat", "Int"], axis=1) tm.assert_frame_equal(expected, result)
package com.iclojure.jline.console.history; import java.io.IOException; /** * Persistent {@link History}. * * @author <a href="mailto:jason@planet57.com">Jason Dillon</a> * @since 2.3 */ public interface PersistentHistory extends History { /** * Flush all items to persistent storage. * * @throws IOException Flush failed */ void flush() throws IOException; /** * Purge persistent storage and {@link #clear}. * * @throws IOException Purge failed */ void purge() throws IOException; }
using System; using System.Collections; using System.Collections.Generic; namespace zuoraTools.DataEnumerators { interface IDataEnumerator { //An inteface used to facilitate looping through records in a foreach int LoopIterations{get;} //A counter used primarily for reporting how many records were processed List<string> Keys { get; set; } String Next(); //A method that dequeues the a record and does NOT increment loopIterations, used primarily for parsing headers IEnumerable Gen(); //Returns enumerable //bool _textBufferLoadLine(); //An idompotent method for making sure a new record is ready for processing, returns false when there are no records remaining, used internally and in wrappers, semi-private. } }
package org.openrdf.store.blob.disk; import java.io.BufferedReader; import java.io.File; import java.io.<API key>; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.io.PrintWriter; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.locks.Lock; import org.openrdf.store.blob.BlobObject; import org.openrdf.store.blob.BlobVersion; public class DiskBlobVersion implements BlobVersion { private final DiskBlobStore store; private final String version; private final File journal; private File entry; private final Set<String> committed; private final Map<String, DiskBlob> open; private boolean prepared; protected DiskBlobVersion(DiskBlobStore store, final String version, File file) throws IOException { assert store != null; assert version != null; assert file != null; this.store = store; this.version = version; if (file.isFile()) { this.journal = file.getParentFile(); this.open = readChanges(entry = file); this.committed = new HashSet<String>(this.open.keySet()); } else { this.journal = file; this.committed = new HashSet<String>(); this.open = new HashMap<String, DiskBlob>(); } } public String toString() { return version; } public int hashCode() { return version.hashCode(); } public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; DiskBlobVersion other = (DiskBlobVersion) obj; if (!version.equals(other.version)) return false; if (!store.equals(other.store)) return false; return true; } public synchronized String[] getModifications() throws IOException { List<String> list = new ArrayList<String>(open.size()); for (Map.Entry<String, DiskBlob> e : open.entrySet()) { if (committed.contains(e.getKey()) || e.getValue().isChangePending()) list.add(e.getKey()); } return list.toArray(new String[list.size()]); } public synchronized BlobObject open(String uri) { DiskBlob blob = open.get(uri); if (blob != null) return blob; open.put(uri, blob = new DiskBlob(this, uri)); return blob; } public synchronized void prepare() throws IOException { if (prepared) return; store.lock(); prepared = true; boolean faild = true; try { for (DiskBlob blob : open.values()) { if (blob.hasConflict()) throw new IOException("Resource has since been modified: " + blob.toUri()); } faild = false; } finally { if (faild) { prepared = false; store.unlock(); } } } public synchronized void commit() throws IOException { boolean newversion = committed.isEmpty(); if (!prepared) { prepare(); } Set<String> obsolete = new HashSet<String>(open.size()); for (Map.Entry<String, DiskBlob> e : open.entrySet()) { String version = e.getValue().getCommittedVersion(); if (e.getValue().isChangePending()) { if (committed.contains(e.getKey())) { if (e.getValue().resync()) { obsolete.add(version); } } else { if (e.getValue().sync()) { committed.add(e.getKey()); obsolete.add(version); } } } } open.keySet().retainAll(committed); if (!committed.isEmpty()) { File file = writeChanges(this.getVersion(), committed); if (newversion) { store.newBlobVersion(this.getVersion(), file); } store.changed(this.getVersion(), committed, file, obsolete); } prepared = false; store.unlock(); } public synchronized void rollback() { try { for (DiskBlob blob : open.values()) { blob.abort(); } } finally { if (prepared) { prepared = false; store.unlock(); } } } public synchronized boolean erase() throws IOException { assert entry != null; store.lock(); try { for (String key : committed) { open.get(key).erase(); } boolean ret = entry.delete(); File d = entry.getParentFile(); if (d.list().length == 0) { d.delete(); } if (d.getParentFile().list().length == 0) { d.getParentFile().delete(); } store.removeFromIndex(getVersion()); return ret; } finally { store.unlock(); } } protected synchronized void addOpenBlobs(Collection<String> set) { set.addAll(open.keySet()); } protected synchronized boolean isObsolete() throws IOException { for (Map.Entry<String, DiskBlob> e : open.entrySet()) { if (e.getValue().isChangePending()) return false; if (getVersion().equals(store.open(e.getKey()).getCommittedVersion())) return false; } return true; } protected File getDirectory() { return store.getDirectory(); } protected String getVersion() { return version; } protected void watch(String uri, DiskListener listener) { store.watch(uri, listener); } protected boolean unwatch(String uri, DiskListener listener) { return store.unwatch(uri, listener); } protected Lock readLock() { return store.readLock(); } private Map<String, DiskBlob> readChanges(File changes) throws IOException { Lock readLock = store.readLock(); try { readLock.lock(); BufferedReader reader = new BufferedReader(new FileReader(changes)); try { String uri; Map<String, DiskBlob> map = new HashMap<String, DiskBlob>(); while ((uri = reader.readLine()) != null) { map.put(uri, new DiskBlob(this, uri)); } return map; } finally { reader.close(); } } catch (<API key> e) { return new HashMap<String, DiskBlob>(); } finally { readLock.unlock(); } } private File writeChanges(String iri, Set<String> changes) throws IOException { File file = getJournalFile(iri); PrintWriter writer = new PrintWriter(new FileWriter(file)); try { for (String uri : changes) { writer.println(uri); } } finally { writer.close(); } return file; } private File getJournalFile(String iri) { if (entry != null) return entry; return entry = newJournalFile(iri); } private File newJournalFile(String iri) { int code = iri.hashCode(); File file; do { String name = Integer.toHexString(code++); while (name.length() < 8) { name = '0' + name; } name = name.substring(0, 4) + File.separatorChar + name.substring(4); file = new File(journal, name); } while (file.exists()); file.getParentFile().mkdirs(); return file; } }
#!/usr/bin/python #|R|a|s|p|b|e|r|r|y|P|i|.|c|o|m|.|t|w| # Author : sosorry # Date : 11/14/2017 # Cahgne the parameters of camera import picamera with picamera.PiCamera() as camera: camera.resolution = (640, 480) camera.iso = 200 camera.exposure_mode = 'off' g = camera.awb_gains camera.awb_mode = 'off' camera.awb_gains = g camera.start_recording('video.h264', quality=23) camera.wait_recording(3) camera.stop_recording()
// modification, are permitted provided that the following conditions are met: // and/or other materials provided with the distribution. // * Neither the name of whawty.pond nor the names of its // contributors may be used to endorse or promote products derived from // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE // FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL // DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, // OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. package main import ( "io/ioutil" "log" "os" // "path" "regexp" "strings" ) var ( wil = log.New(os.Stdout, "[whawty.pond INFO]\t", log.LstdFlags) wel = log.New(os.Stderr, "[whawty.pond ERROR]\t", log.LstdFlags) wdl = log.New(ioutil.Discard, "[whawty.pond DEBUG]\t", log.LstdFlags) enableBackends = []string{"docker"} volumeBasePath = "/srv/volumes" serviceNameRe = regexp.MustCompile("^[-_.A-Za-z0-9]+$") telnetAddr = "127.0.0.1:9023" ) func init() { if _, exists := os.LookupEnv("WHAWTY_POND_DEBUG"); exists { wdl.SetOutput(os.Stderr) } if value, exists := os.LookupEnv("<API key>"); exists { enableBackends = strings.Split(value, ",") } if value, exists := os.LookupEnv("<API key>"); exists { volumeBasePath = value } } func main() { wil.Printf("starting") var ctx Context if err := ctx.Init(); err != nil { wel.Printf("Error initalizing pond context: %v", err) os.Exit(1) } for _, name := range enableBackends { name = strings.TrimSpace(name) backend, err := NewBackend(name) if err != nil { wel.Printf("Error enabling backend(%s): %v", name, err) continue } if err := backend.Init(); err != nil { wel.Printf("backend(%s): can't be enabled: %v", name, err) } else { ctx.Backends[name] = backend wil.Printf("backend(%s): successfully enabled/initialized", name) } } if len(ctx.Backends) == 0 { wel.Printf("no backends are enabled, exitting...") os.Exit(1) } stop := make(chan bool) if telnetAddr != "" { telnet := TelnetInit(telnetAddr, &ctx) go func() { wil.Printf("starting telnet interface (%s)", telnetAddr) telnet.Run() wil.Printf("telnet interface just stopped") stop <- true }() } <-stop wil.Printf("at least one control interface has stopped - bringing down the whole process") }
<?php namespace User\Form; use Zend\InputFilter\InputFilter; use Zend\InputFilter\<API key>; use Zend\InputFilter\<API key>; use Zend\Db\Adapter\Adapter; use User\Model\Gender; use User\Model\MaritalStatus; use User\Model\BodyType; use User\Model\Users; class UsersEditFilter implements <API key> { protected $inputFilter; /** * @var inputFilter */ /** * @var Database Adapter */ protected $dbAdapter; /** * @var Basic register filter */ protected $registerFilter; protected $user; public function setUser(Users $user){ $this->user = $user; } public function getUser(){ return $this->user; } /** * @param \Zend\InputFilter\<API key> $inputFilter * @throws \Exception */ public function setInputFilter(<API key> $inputFilter) { throw new \Exception("Not used"); } /** * @param \Zend\Db\Adapter $dbAdapter */ public function __construct(Adapter $dbAdapter, UserInfoFilterBasic $registerFilter) { $this->dbAdapter = $dbAdapter; $this->registerFilter = $registerFilter; } /** * * @return Zend\Db\Adapter */ public function getDbAdapter() { return $this->dbAdapter; } public function getInputFilter() { if (!$this->inputFilter) { $inputFilter = $this->registerFilter->getInputFilter(); $inputFilter->add(array( 'name' => 'id', 'required' => true, 'filters' => array( array('name' => 'Int'), ), )); $inputFilter->add(array( 'name' => 'email', //'required' => true, 'validators' => array( array( 'name' => 'EmailAddress', 'options' => array( 'domain' => true, ) ), array( 'name' => 'Db\NoRecordExists', 'options' => array( 'table' => 'users', 'field' => 'email', 'adapter' => $this->getDbAdapter(), 'messages' => array( \Zend\Validator\Db\AbstractDb::ERROR_RECORD_FOUND => 'A profile with this email address already exists.' ), 'exclude' => array( 'field' => 'id', 'value' => $this->getUser()->id ) ) ), array( 'name' => 'NotEmpty', 'options' => array( 'messages' => array( \Zend\Validator\NotEmpty::IS_EMPTY => 'Please enter email', \Zend\Validator\NotEmpty::INVALID => 'Please Check Your Email' ) ) ) ) )); $this->inputFilter = $inputFilter; } return $this->inputFilter; } }
'use strict'; var errorCodes = require('../postgresql/error-codes').codeToCondition; var jobStatus = require('./job-status'); var Profiler = require('step-profiler'); var _ = require('underscore'); var REDIS_LIMITS = { DB: global.settings.batch_db || 5, PREFIX: 'limits:batch:' // + username }; function JobRunner (jobService, jobQueue, queryRunner, metadataBackend, statsdClient) { this.jobService = jobService; this.jobQueue = jobQueue; this.queryRunner = queryRunner; this.metadataBackend = metadataBackend; this.statsdClient = statsdClient; } JobRunner.prototype.run = function (jobId, callback) { var self = this; var profiler = new Profiler({ statsd_client: self.statsdClient }); profiler.start('sqlapi.batch.job'); self.jobService.get(jobId, function (err, job) { if (err) { return callback(err); } self.<API key>(job.data.user, function (err, timeout) { if (err) { return callback(err); } var query = job.getNextQuery(); if (_.isObject(query)) { if (Number.isFinite(query.timeout) && query.timeout > 0) { timeout = Math.min(timeout, query.timeout); } query = query.query; } try { job.setStatus(jobStatus.RUNNING); } catch (err) { return callback(err); } self.jobService.save(job, function (err, job) { if (err) { return callback(err); } profiler.done('running'); self._run(job, query, timeout, profiler, callback); }); }); }); }; JobRunner.prototype.<API key> = function (username, callback) { var timeout = 12 * 3600 * 1000; if (Number.isFinite(global.settings.batch_query_timeout)) { timeout = global.settings.batch_query_timeout; } var batchLimitsKey = REDIS_LIMITS.PREFIX + username; this.metadataBackend.redisCmd(REDIS_LIMITS.DB, 'HGET', [batchLimitsKey, 'timeout'], function (err, timeoutLimit) { if (err) { return callback(err); } if (timeoutLimit !== null && Number.isFinite(+timeoutLimit)) { timeout = +timeoutLimit; } return callback(null, timeout); }); }; JobRunner.prototype._run = function (job, query, timeout, profiler, callback) { var self = this; const dbparams = { pass: job.data.pass, user: job.data.dbuser, dbname: job.data.dbname, port: job.data.port, host: job.data.host }; self.queryRunner.run(job.data.job_id, query, job.data.user, timeout, dbparams, function (err /*, result */) { if (err) { if (!err.code) { return callback(err); } // if query has been cancelled then it's going to get the current // job status saved by query_canceller if (cancelledByUser(err)) { return self.jobService.get(job.data.job_id, callback); } } try { if (err) { profiler.done('failed'); job.setStatus(jobStatus.FAILED, err.message); } else { profiler.done('success'); job.setStatus(jobStatus.DONE); } } catch (err) { return callback(err); } self.jobService.save(job, function (err, job) { if (err) { return callback(err); } profiler.done('done'); profiler.end(); profiler.sendStats(); if (!job.hasNextQuery()) { return callback(null, job); } self.jobQueue.enqueueFirst(job.data.user, job.data.job_id, function (err) { if (err) { return callback(err); } callback(null, job); }); }); }); }; function cancelledByUser (err) { return errorCodes[err.code.toString()] === 'query_canceled' && err.message.match(/user.*request/); } module.exports = JobRunner;
package wycs.io; import java.io.*; import java.util.List; import wycs.syntax.WyalFile; public class WyalFileReader { private final String filename; private final InputStream input; public WyalFileReader(String filename, InputStream input) { this.filename = filename; this.input = input; } public WyalFile read() throws IOException { WyalFileLexer lexer = new WyalFileLexer(filename,input); List<WyalFileLexer.Token> tokens; tokens = lexer.scan(); WyalFileParser parser = new WyalFileParser(filename, tokens); return parser.read(); } }
<?php namespace Usuario\Controller; use Estrutura\Controller\<API key>; use Estrutura\Helpers\Cript; use Estrutura\Helpers\Data; use Zend\Filter\File\Encrypt; use Zend\View\Model\ViewModel; use Estrutura\Service\HtmlHelper; class UsuarioController extends <API key> { /** * @var \Usuario\Service\Usuario */ protected $service; /** * @var \Usuario\Form\Usuario */ protected $form; protected $camposPendencia = [ 'nm_funcao', ]; public function __construct() { parent::init(); } public function indexAction() { return parent::index($this->service, $this->form); } public function <API key>() { $filter = $this->getFilterPage(); $camposFilter = [ '0' => NULL, '1' => [ 'filter' => "perfil.nm_perfil LIKE ?", ], '2' => [ 'filter' => "usuario.nm_usuario LIKE ?", ], '3' =>[ 'filter' => "usuario.nm_funcao LIKE ?", ], '4' => NULL, ]; $paginator = $this->service->getUsuarioPaginator($filter, $camposFilter); $paginator->setItemCountPerPage($paginator->getTotalItemCount()); $countPerPage = $this->getCountPerPage( current(\Estrutura\Helpers\Pagination::getCountPerPage($paginator->getTotalItemCount())) ); $paginator->setItemCountPerPage($this->getCountPerPage( current(\Estrutura\Helpers\Pagination::getCountPerPage($paginator->getTotalItemCount())) ))-><API key>($this->getCurrentPage()); $viewModel = new ViewModel([ 'service' => $this->service, 'form' => $this->form, 'paginator' => $paginator, 'filter' => $filter, 'countPerPage' => $countPerPage, 'camposFilter' => $camposFilter, 'controller' => $this->params('controller'), 'atributos' => array() ]); return $viewModel->setTerminal(TRUE); } public function desativarAction() { try { $controller = $this->params('controller'); $id = $this->params()->fromRoute('id'); // From RouteMatch $service = $this->service; if (isset($id) && $id) { $post['id'] = Cript::dec($id); $post['id_situacao_usuario'] = $this->getConfigList()['<API key>']; } $service->exchangeArray($post); $this->addSuccessMessage('Registro desativado com sucesso!'); $this->redirect()->toRoute('navegacao', array('controller' => $controller, 'action' => 'index')); $service->salvar(); return true; } catch (\Exception $e) { $this->setPost($post); $this->addErrorMessage($e->getMessage()); $this->redirect()->toRoute('navegacao', array('controller' => $controller, 'action' => 'cadastro')); return false; } } /** * * @return boolean */ public function gravarAction() { $form = new \Usuario\Form\UsuarioForm(); /* @var $emailService \Email\Service\EmailService */ $emailService = $this->getServiceLocator()->get('\Email\Service\EmailService'); $emailService->setEmEmail(trim($this->getRequest()->getPost()->get('em_email'))); if ($emailService->filtrarObjeto()->count()) { $this->addErrorMessage('Email já cadastrado. Faça seu login.'); $this->redirect()->toRoute('cadastro', array('id' => $this->getRequest()->getPost()->get('id_usuario_pai'))); return FALSE; } //Verifica tamanho da senha if (strlen(trim($this->getRequest()->getPost()->get('pw_senha'))) < 8) { $this->addErrorMessage('Senha deve ter no mínimo 8 caracteres.'); $this->redirect()->toRoute('cadastro', array('id' => $this->getRequest()->getPost()->get('id_usuario_pai'))); return FALSE; } if (strcasecmp($this->getRequest()->getPost()->get('pw_senha'), $this->getRequest()->getPost()->get('pw_senha_confirm')) != 0) { $this->addErrorMessage('Senhas não correspondem.'); $this->redirect()->toRoute('cadastro', array('id' => $this->getRequest()->getPost()->get('id_usuario_pai'))); return FALSE; } #Alysson - Realiza tratamento nos dados do telefone para Atribuir ao POST os parametros abaixo. $this->getRequest()->getPost()->set('nr_ddd_telefone', \Estrutura\Helpers\Telefone::getDDD($this->getRequest()->getPost()->get('nr_telefone'))); $this->getRequest()->getPost()->set('nr_telefone', \Estrutura\Helpers\Telefone::getTelefone($this->getRequest()->getPost()->get('nr_telefone'))); $this->getRequest()->getPost()->set('id_tipo_telefone', $this->getConfigList()['<API key>']); $this->getRequest()->getPost()->set('id_situacao', $this->getConfigList()['situacao_ativo']); $resultTelefone = parent::gravar( $this->getServiceLocator()->get('\Telefone\Service\TelefoneService'), new \Telefone\Form\TelefoneForm() ); #Se o Telefone foi Inserido com sucesso if ($resultTelefone) { $resultEmail = parent::gravar( $this->getServiceLocator()->get('\Email\Service\EmailService'), new \Email\Form\EmailForm() ); #Se o Email foi Inserido com sucesso if ($resultEmail) { $this->getRequest()->getPost()->set('nm_usuario', $this->getRequest()->getPost()->get('nm_usuario')); $this->getRequest()->getPost()->set('id_sexo', $this->getRequest()->getPost()->get('id_sexo')); $this->getRequest()->getPost()->set('id_perfil', $this->getRequest()->getPost()->get('id_perfil')); $this->getRequest()->getPost()->set('id_situacao_usuario', $this->getConfigList()['<API key>']); $this->getRequest()->getPost()->set('id_email', $resultEmail); #id_email inserido anteriormente $this->getRequest()->getPost()->set('id_telefone', $resultTelefone); #id_telefone inserido anteriormente $resultUsuario = parent::gravar( $this->getServiceLocator()->get('\Usuario\Service\UsuarioService'), new \Usuario\Form\UsuarioForm() ); if ($resultUsuario) { $this->getRequest()->getPost()->set('id_usuario', $resultUsuario); $this->getRequest()->getPost()->set('dt_registro', (date('d') >= 29 ? date('Y-m-' . 28 . ' H:m:s') : date('Y-m-d H:m:s'))); $this->getRequest()->getPost()->set('id_perfil', $this->getRequest()->getPost()->get('id_perfil')); $this->getRequest()->getPost()->set('pw_senha', md5($this->getRequest()->getPost()->get('pw_senha'))); $this->getRequest()->getPost()->set('id_situacao', $this->getConfigList()['situacao_inativo']); $resultLogin = parent::gravar( $this->getServiceLocator()->get('\Login\Service\LoginService'), new \Login\Form\LoginForm() ); #Se cadastro realizado com sucesso, dispara um email para o usuario if ($resultLogin) { #$contaEmail = 'no-reply';# // $message = new \Zend\Mail\Message(); // $message->addFrom($contaEmail . '@hepta.com.br', 'Hepta Tecnologia') // ->addTo(trim($this->getRequest()->getPost()->get('em_email'))) #Envia para o Email que cadastrou // ->addBcc('alysson.vicuna@gmail.com') // $applicationService = new \Application\Service\ApplicationService(); // $transport = $applicationService->getSmtpTranport($contaEmail); // $htmlMessage = $applicationService->tratarModelo( // 'BASE_URL' => BASE_URL, // 'nomeUsuario' => trim($this->getRequest()->getPost()->get('nm_usuario')), // 'txIdentificacao' => base64_encode(\Estrutura\Helpers\Bcrypt::hash($resultLogin)), // 'email' => trim($this->getRequest()->getPost()->get('em_email')), // ], $applicationService->getModelo('cadastro')); // $html = new \Zend\Mime\Part($htmlMessage); // $html->type = "text/html"; // $body = new \Zend\Mime\Message(); // $body->addPart($html); // $message->setBody($body); // $transport->send($message); $this->addSuccessMessage('Cadastro realizado com sucesso!'); // $this->getServiceLocator()->get('Auth\Table\MyAuth')->forgetMe(); // $this->getServiceLocator()->get('AuthService')->clearIdentity(); } } } } #$this->redirect()->toRoute('navegacao', array('controller' => 'auth', 'action' => 'login')); $this->redirect()->toRoute('navegacao', array('controller' => 'usuario-usuario', 'action' => 'index')); } public function cadastroAction() { $usuarioService = new \Usuario\Service\UsuarioService(); $form = new \Usuario\Form\UsuarioForm(); $id_criptografado = $this->params('id') ? $this->params('id') : $this->getRequest()->getPost()->get('id'); $id = Cript::dec($id_criptografado); #$usuario = $usuarioService->getUsuario($id); $usuario = $usuarioService->getUsuario(1); #print_r($usuario); #die; if ($usuario) { return parent::cadastro($usuarioService, $form, [ 'id_usuario' => $id, 'usuario' => $usuario ]); } else { $this->flashmessenger()->addWarningMessage('Código do patrocinador inválido.'); $this->redirect()->toRoute('navegacao', array('controller' => 'auth', 'action' => 'login')); return FALSE; } } public function dadosPessoaisAction() { $auth = $this->getServiceLocator()->get('AuthService')->getStorage()->read(); $usuarioService = new \Usuario\Service\UsuarioService(); $usuario = $usuarioService->getUsuario($auth->id_usuario); $<API key> = FALSE; foreach ($usuario as $key => $value) { if (in_array($key, $this->camposPendencia) && !$value) { $<API key> = TRUE; break; } } /* @var $pagamentoService \Pagamento\Service\PagamentoService */ $pagamentoService = $this->getServiceLocator()->get('\Pagamento\Service\PagamentoService'); //Verifica se existem saques pendentes $<API key> = $pagamentoService-><API key>($auth); $<API key> = FALSE; if ($<API key>->count()) { $<API key> = TRUE; } $view = new ViewModel([ 'controller' => $this->params('controller'), 'usuario' => $usuario, '<API key>' => $<API key>, '<API key>' => $<API key>, ]); return $view->setTerminal(TRUE); } public function <API key>() { $id = $this->params()->fromRoute('id'); // From RouteMatch $id = Cript::dec($id); // From RouteMatch); $auth = $this->getServiceLocator()->get('AuthService')->getStorage()->read(); $usuarioService = new \Usuario\Service\UsuarioService(); if($auth->id_perfil == <API key>){ $usuario = $usuarioService->getUsuario($id); } else { $usuario = $usuarioService->getUsuario($auth->id_usuario); } $usuario['id'] = $usuario['id_usuario']; $usuario['nr_telefone'] = $usuario['nr_ddd_telefone'] . $usuario['nr_telefone']; $form = new \Usuario\Form\AtualizaUsuarioForm(); $form->setData($usuario); $post = $this->getPost(); if (!empty($post)) { $form->setData($post); } return new ViewModel([ 'configList' => $this->getConfigList(), 'form' => $form, 'controller' => $this->params('controller'), 'usuario' => $usuario, 'auth' => $auth, ]); } public function excluirAction() { return parent::excluir($this->service, $this->form); } public function <API key>() { $controller = $this->params('controller'); $request = $this->getRequest(); if (!$request->isPost()) { throw new \Exception('Dados Inválidos'); } $post = \Estrutura\Helpers\Utilities::arrayMapArray('trim', $request->getPost()->toArray()); try { $auth = $this->getServiceLocator()->get('AuthService')->getStorage()->read(); $post['id'] = Cript::dec($post['id']); $id = $post['id']; $usuarioService = new \Usuario\Service\UsuarioService(); if($auth->id_perfil == <API key>){ $usuarioEntity = $usuarioService->buscar($id); } else { $usuarioEntity = $usuarioService->buscar($auth->id_usuario); } $form = new \Usuario\Form\AtualizaUsuarioForm(); $form->setData($post); if (!$form->isValid()) { $this->addValidateMessages($form); $this->setPost($post); $this->redirect()->toRoute('navegacao', array('controller' => $controller, 'action' => 'atualizar-dados', 'id' => Cript::dec($id))); return FALSE; } // Atualiza Telefone $formTelefone = new \Telefone\Form\TelefoneForm(); $formTelefone->setData([ 'id' => $usuarioEntity->getIdTelefone(), 'nr_ddd_telefone' => \Estrutura\Helpers\Telefone::getDDD($this->getRequest()->getPost()->get('nr_telefone')), 'nr_telefone' => \Estrutura\Helpers\Telefone::getTelefone($this->getRequest()->getPost()->get('nr_telefone')), 'id_tipo_telefone' => $this->getConfigList()['<API key>'], 'id_situacao' => $this->getConfigList()['situacao_ativo'], ]); if (!$formTelefone->isValid()) { $this->addValidateMessages($formTelefone); $this->setPost($post); $this->redirect()->toRoute('navegacao', array('controller' => $controller, 'action' => 'atualizar-dados', 'id' => Cript::dec($id))); return FALSE; } $telefoneService = $this->getServiceLocator()->get('\Telefone\Service\TelefoneService'); $telefoneService->exchangeArray($formTelefone->getData()); $telefoneService->salvar(); //Atualiza dados usuario $usuarioEntity->setNmUsuario($this->getRequest()->getPost()->get('nm_usuario')); $usuarioEntity->setNmFuncao($this->getRequest()->getPost()->get('nm_funcao')); $usuarioEntity->setIdSexo($this->getRequest()->getPost()->get('id_sexo')); $usuarioEntity-><API key>($this->getRequest()->getPost()->get('id_situacao_usuario')); $usuarioEntity->salvar(); $this->flashmessenger()->addSuccessMessage('Dados atualizado com sucesso.'); $this->redirect()->toRoute('navegacao', array('controller' => 'usuario-usuario', 'action' => 'index')); return TRUE; } catch (\Exception $e) { $this->setPost($post); $this->addErrorMessage($e->getMessage()); $this->redirect()->toRoute('navegacao', array('controller' => $controller, 'action' => 'atualizar-dados', 'id' => Cript::dec($id))); return FALSE; } } public function alterarSenhaAction() { $auth = $this->getServiceLocator()->get('AuthService')->getStorage()->read(); $id = $this->params()->fromRoute('id'); // From RouteMatch $id = Cript::dec($id); // From RouteMatch); $usuarioService = new \Usuario\Service\UsuarioService(); if($auth->id_perfil == <API key>){ $usuarioEntity = $usuarioService->buscar($id); } else { $usuarioEntity = $usuarioService->buscar($auth->id_usuario); } return new ViewModel([ 'configList' => $this->getConfigList(), 'form' => new \Auth\Form\RedefinirSenhaForm(), 'controller' => $this->params('controller'), 'usuarioEntity' => $usuarioEntity, 'auth' => $auth, 'id_usuario' => $id, //Passa o Id_usuario para aview ]); } public function <API key>() { $auth = $this->getServiceLocator()->get('AuthService')->getStorage()->read(); $request = $this->getRequest(); if (!$request->isPost()) { throw new \Exception('Dados Inválidos'); } $post= \Estrutura\Helpers\Utilities::arrayMapArray('trim', $request->getPost()->toArray()); $id_usuario = Cript::dec($post['id']); $post['id'] = $id_usuario; //Recebe o ID ja Descriptografado. $loginService = new \Login\Service\LoginService(); $loginService->setIdUsuario($id_usuario); $loginEntity = $loginService->filtrarObjeto()->current(); if (!$loginEntity) { $this->addErrorMessage('Usuario inválido.'); $this->redirect()->toRoute('navegacao', ['controller' => 'usuario-usuario', 'action' => 'alterar-senha', 'id'=>Cript::enc($id_usuario)]); return FALSE; } //Verifica tamanho da senha if (strlen(trim($this->getRequest()->getPost()->get('pw_nova_senha'))) < 8) { $this->addErrorMessage('Senha deve ter no mínimo 8 caracteres.'); $this->redirect()->toRoute('navegacao', ['controller' => 'usuario-usuario', 'action' => 'alterar-senha', 'id'=>Cript::enc($id_usuario)]); return FALSE; } #so faz esta validacao se for o usuario diferente do Administrador if($auth->id_perfil != <API key>) { if (strcasecmp(md5($this->getRequest()->getPost()->get('pw_senha')), $loginEntity->getPwSenha()) != 0) { $this->addErrorMessage('Senha atual inválida.'); $this->redirect()->toRoute('navegacao', ['controller' => 'usuario-usuario', 'action' => 'alterar-senha', 'id' => Cript::enc($id_usuario)]); return FALSE; } } if (strcasecmp($this->getRequest()->getPost()->get('<API key>'), $this->getRequest()->getPost()->get('pw_nova_senha')) != 0) { $this->addErrorMessage('Senhas não correspondem.'); $this->redirect()->toRoute('navegacao', ['controller' => 'usuario-usuario', 'action' => 'alterar-senha', 'id'=>Cript::enc($id_usuario)]); return FALSE; } if (strcasecmp(md5($this->getRequest()->getPost()->get('pw_senha')), md5($this->getRequest()->getPost()->get('pw_nova_senha'))) == 0) { $this->addErrorMessage('Nova senha igual a senha atual.'); $this->redirect()->toRoute('navegacao', ['controller' => 'usuario-usuario', 'action' => 'alterar-senha', 'id'=>Cript::enc($id_usuario)]); return FALSE; } //Seta a nova senha $loginEntity->setPwSenha(md5(trim($this->getRequest()->getPost()->get('pw_nova_senha')))); $loginEntity->salvar(); $this->addSuccessMessage('Senha alterada com sucesso.'); $this->redirect()->toRoute('navegacao', ['controller' => 'usuario-usuario', 'action' => 'atualizar-dados', 'id'=>Cript::enc($id_usuario)]); return FALSE; } }
# Native client if( APPLE ) add_definitions( -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_ANDROID=0 -DNACL_OSX=1 ) elseif( LINUX ) add_definitions( -DNACL_WINDOWS=0 -DNACL_LINUX=1 -DNACL_ANDROID=0 -DNACL_OSX=0 ) elseif( WIN32 ) add_definitions( -DNACL_WINDOWS=1 -DNACL_LINUX=0 -DNACL_ANDROID=0 -DNACL_OSX=0 ) endif() if( ARCH STREQUAL "x86" OR ARCH STREQUAL "x86_64" ) add_definitions( -DNACL_BUILD_ARCH=x86 ) else() add_definitions( -DNACL_BUILD_ARCH=${ARCH} ) endif() if( CMAKE_SIZEOF_VOID_P EQUAL 8 ) add_definitions( -DNACL_BUILD_SUBARCH=64 ) else() add_definitions( -DNACL_BUILD_SUBARCH=32 ) endif() include_directories( ${LIB_DIR}/nacl )
title: Imposter Syndrome ## Imposter Syndrome Imposter Syndrome is a feeling of _being a fraud_ or _not being good enough_ to get the job done. Common among software engineers, developers and designers working in tech companies, especially those not coming from a traditional tech background. People suffering from imposter syndrome have a sense of inadequacy and insecurity concerning their ability to make a contribution at the workplace. In reality, they may be perfectly capable of and do contribute successfully to their tasks. Imposter Syndrome is very common among software engineers or developers who are new to the role and lack the experience of some of their co-workers. Throughout various industries, imposter syndrome is very common with highly successful people. Studies have found the two out of five 'successful' people consider themselves frauds/imposters while other studies have shown that up to 70% of people experience imposter syndrome as one point in time or another. If you suffer from imposter syndrome you may have a sense of inadequacy or insecurity about your ability to contribute. In reality, you may be perfectly capable and do contribute to your tasks. These thoughts are fairly common if you struggle with imposter syndrome: * "What am I doing here?" * "I am not a developer. I am fooling myself and other people." (feels fake) * "My colleagues are much smarter than me; I could never match up to them." (undermines own achievements) * "My coworkers have told me I've achieved a lot, but I feel like it's never enough." (discounts praise) * "I have no idea how I got through the interview process." (feels only luck dictates results) * "I will be ridiculed and fired when people realize I am not as smart as I portrayed myself as being." (fears failure) * "My IQ is not high enough to work here." (doubts inherent ability) * "I need more training to feel like I deserve to be here." Imposter Syndrome may prevent you from reaching out when you need help, thus slowing your progression. Please do reach out on the forum or the chatroom! # Overcoming Imposter Syndrome The first step in overcoming imposter syndrome is to learn the difference between a growth mindset and a fixed mindset. People with a growth mindset believe that their skills and talents can be developed through hard work, practice, conversations with others, etc. Alternately, those with a fixed mindset tend to believe their talents are set abilities that they were born with. When you change your outlook to a growth mindset, you allow yourself to take on challenges that may seem out of reach. If you believe that your skills can be developed over time, it will matter less if you don't know how to do something (as long as you are willing to put in the work). [Many talented, successful people deal with Imposter Syndrome](https: More great resources to help you know more about Imposter Syndrome and some tips to tackle it: * [American Psychological Association - Feel Like a Fraud?](http: * [TED Talks - Fighting Imposter Syndrome](https: * [Quartz - Is imposter syndrome a sign of greatness?](https://qz.com/606727/<API key>/) * [HTTP203 - Imposter Syndrome](https: * [Ada Initiative - Is Impostor Syndrome keeping women out of open technology and culture?](https://adainitiative.org/2013/08/28/<API key>/) * [DEV - Overcoming Impostor Syndrome](https://dev.to/kathryngrayson/<API key>) * [FastCompany - Types of imposter syndrome and how to beat them](https: * [Startup Bros - 21 Proven Ways to Overcome Imposter Syndrom](https://startupbros.com/<API key>/) * [NY Times - Learning How to Deal with the Imposter Syndrome](https: * [The Every Girl - 5 Books to Read If You're Struggling With Imposter Syndrome](http://theeverygirl.com/<API key>/) * [Wikipedia - Imposter Syndrome](https://en.wikipedia.org/wiki/Impostor_syndrome)
#!/usr/bin/env python # -*- coding: utf-8 -*- from hashsync.connection import connect from hashsync.transfer import upload_directory import logging log = logging.getLogger(__name__) def main(): import argparse import sys import gzip parser = argparse.ArgumentParser() # TODO: These aren't required if no-upload is set parser.add_argument("-r", "--region", dest="region", required=True) parser.add_argument("-b", "--bucket", dest="bucket_name", required=True) parser.add_argument("-q", "--quiet", dest="loglevel", action="store_const", const=logging.WARN, default=logging.INFO) parser.add_argument("-v", "--verbose", dest="loglevel", action="store_const", const=logging.DEBUG) parser.add_argument("-j", "--jobs", dest="jobs", type=int, help="how many simultaneous uploads to do", default=8) parser.add_argument("-o", "--output", dest="output", help="where to output manifet, use '-' for stdout", default="manifest.gz") parser.add_argument("-z", "--compress-manifest", dest="compress_manifest", help="compress manifest output (default if outputting to a file)", action="store_true", default=None) parser.add_argument("--<API key>", dest="compress_manifest", help="don't compress manifest output (default if outputting to stdout)", action="store_false") parser.add_argument("--no-upload", dest="dryrun", action="store_true", default=False) parser.add_argument("--report-dupes", dest="report_dupes", action="store_true", default=False, help="report on duplicate files") parser.add_argument("dirname", help="directory to upload") args = parser.parse_args() logging.basicConfig(level=args.loglevel, format="%(asctime)s - %(message)s") # Make boto shut up # TODO: Add -v -v support to set this to DEBUG? logging.getLogger('boto').setLevel(logging.INFO) if not args.dryrun: connect(args.region, args.bucket_name) manifest = upload_directory(args.dirname, args.jobs, dryrun=args.dryrun) if args.output == '-': output_file = sys.stdout else: output_file = open(args.output, 'wb') # Enable compression by default if we're writing out to a file if args.compress_manifest is None: args.compress_manifest = True if args.compress_manifest: output_file = gzip.GzipFile(fileobj=output_file, mode='wb') manifest.save(output_file) if args.report_dupes: manifest.report_dupes() if __name__ == '__main__': main()
angular.module('os.administrative.setting.list', ['os.administrative.models']) .controller('SettingsListCtrl', function( $rootScope, $scope, $state, $stateParams, $sce, $filter, Setting, SettingUtil, Alerts) { function init() { $scope.isEdit = false; $scope.fileCtx = { ctrl: {}, uploadUrl: $sce.trustAsResourceUrl(Setting.getFileUploadUrl()) }; var moduleName = $stateParams.moduleName; if (!moduleName) { $state.go('settings-list', {moduleName: $scope.ctx.dbModuleNames[0]}); return; } $scope.selectedModule = $scope.ctx.filteredModules[moduleName]; } function filterSettings(settings, searchText) { searchText = searchText.toLowerCase(); return $filter('filter')(settings, function(setting) { return (setting.$$osPropName.toLowerCase().indexOf(searchText) !== -1 || setting.$$osPropDesc.toLowerCase().indexOf(searchText) !== -1); } ); } function saveSetting() { Setting.updateSetting($scope.setting).then( function(resp) { Alerts.success('settings.success_message'); angular.extend($scope.existingSetting, resp); $scope.isEdit = false; SettingUtil.clearSetting(resp.module, resp.name, resp); if (resp.module == 'common' && ['date_format', 'de_date_format', 'time_format'].indexOf(resp.name) > -1) { $rootScope.loadLocale(); } } ); } $scope.updateSetting = function(setting) { $scope.isEdit = true; $scope.existingSetting = setting; $scope.setting = angular.copy(setting); $scope.setting.value = ''; } $scope.cancel = function() { $scope.isEdit = false; } $scope.submit = function() { var type = $scope.setting.type; if (type != 'FILE' && $scope.existingSetting.value == $scope.setting.value) { Alerts.error('settings.invalid_value'); return; } if (type == 'FILE' && $scope.fileCtx.ctrl.isFileSelected()) { $scope.fileCtx.ctrl.submit().then( function(filename) { $scope.setting.value = filename; saveSetting(); } ); } else { saveSetting(); } } $scope.searchSetting = function(searchText) { var filteredModules = $scope.ctx.dbModules; var filteredModuleNames = $scope.ctx.dbModuleNames; if (searchText) { var allSettings = {name: 'all', settings: []}; filteredModules = {'all': allSettings}; filteredModuleNames = ['all']; angular.forEach($scope.ctx.dbModules, function(dbModule) { var filteredSettings = filterSettings(dbModule.settings, searchText); if (filteredSettings.length > 0) { allSettings.settings = allSettings.settings.concat(filteredSettings); filteredModules[dbModule.name] = {name: dbModule.name, settings: filteredSettings}; filteredModuleNames.push(dbModule.name); } }); } $scope.ctx.filteredModules = filteredModules; $scope.ctx.filteredModuleNames = filteredModuleNames; var selectedModuleName = $scope.ctx.filteredModuleNames[0]; $state.go('settings-list', {moduleName: selectedModuleName}); $scope.selectedModule = $scope.ctx.filteredModules[selectedModuleName]; } init(); });
<?php namespace app\models; use Yii; use yii\behaviors\TimestampBehavior; use yii\db\ActiveRecord; use yii\db\Expression; use app\rbac\models\AuthAssignment; use app\rbac\models\AuthItem; class CashierForm extends \yii\db\ActiveRecord { public static function tableName() { return 'user'; } public function rules() { return [ [['username', 'email', 'password_hash', 'status', 'auth_key', 'first_name', 'middle_name', 'last_name', 'gender', 'address'], 'required'], [['status', 'created_at', 'updated_at', 'phone', 'mobile', 'gender'], 'integer'], [['birth_date'], 'safe'], [['username', 'email', 'password_hash', '<API key>', '<API key>', 'address', 'notes', 'profile_image'], 'string', 'max' => 255], [['auth_key'], 'string', 'max' => 32], [['first_name', 'middle_name', 'last_name'], 'string', 'max' => 45], [['username'], 'unique'], [['email'], 'unique'], [['<API key>'], 'unique'] ]; } public function attributeLabels() { return [ 'id' => 'ID', 'username' => 'Username', 'email' => 'Email', 'password_hash' => 'Password Hash', 'status' => 'Status', 'auth_key' => 'Auth Key', '<API key>' => 'Password Reset Token', '<API key>' => 'Account Activation Token', 'created_at' => 'Created At', 'updated_at' => 'Updated At', 'first_name' => 'First Name', 'middle_name' => 'Middle Name', 'last_name' => 'Last Name', 'gender' => 'Gender', 'birth_date' => 'Birth Date', 'address' => 'Address', 'phone' => 'Phone', 'mobile' => 'Mobile', 'notes' => 'Notes', 'profile_image' => 'Profile Image', ]; } public function behaviors() { return [ 'timestamp' => [ 'class' => 'yii\behaviors\TimestampBehavior', 'attributes' => [ ActiveRecord::EVENT_BEFORE_INSERT => ['created_at', 'updated_at'], ActiveRecord::EVENT_BEFORE_UPDATE => ['updated_at'], ], ], ]; } public static function getRolesList() { $roles = []; foreach (AuthItem::getRoles() as $item_name) { $roles[$item_name->name] = $item_name->name; } return $roles; } public static function getCashierRole() { foreach (AuthItem::getRoles() as $item_name) { if($item_name->name === 'cashier'){ $role = $item_name->name; } } return $role; } public function getRole() { // User has_one Role via Role.user_id -> id return $this->hasOne(Role::className(), ['user_id' => 'id']); } public function getRoleName() { return $this->role->item_name; } }
CC=/usr/bin/gcc CPP=/usr/bin/g++ DEBUG=-g CFLAGS=-c LFLAGS=-lpthread -lm LD=ld INCLUDE_MAIN=-DINCLUDEMAIN INCLUDES=-I ../../testcasesupport MAIN=main_linux.cpp MAIN_OBJECT=$(MAIN:.cpp=.o) C_SUPPORT_PATH=../../testcasesupport/ C_SUPPORT_FILES=$(C_SUPPORT_PATH)io.c $(C_SUPPORT_PATH)std_thread.c C_SUPPORT_OBJECTS=io.o std_thread.o FILTER_OUT=$(wildcard CWE*w32*.c*) $(wildcard CWE*wchar_t*.c*) # only grab the .c files without "w32" or "wchar_t" in the name C_SOURCES=$(filter-out $(FILTER_OUT),$(wildcard CWE*.c)) C_OBJECTS=$(C_SOURCES:.c=.o) # only grab the .cpp files without "w32" or "wchar_t" in the name CPP_SOURCES=$(filter-out $(FILTER_OUT),$(wildcard CWE*.cpp)) CPP_OBJECTS=$(CPP_SOURCES:.cpp=.o) SIMPLES=$(filter-out $(FILTER_OUT), $(wildcard CWE*0.c*) $(wildcard CWE*1.c*) $(wildcard CWE*2.c*) $(wildcard CWE*3.c*) $(wildcard CWE*4.c*)) \ $(filter-out $(FILTER_OUT), $(wildcard CWE*5.c*) $(wildcard CWE*6.c*) $(wildcard CWE*7.c*) $(wildcard CWE*8.c*) $(wildcard CWE*9.c*)) SIMPLES_C=$(filter-out $(CPP_SOURCES), $(SIMPLES)) SIMPLES_CPP=$(filter-out $(C_SOURCES), $(SIMPLES)) LETTEREDS=$(filter-out $(FILTER_OUT), $(wildcard CWE*a.c*)) LETTEREDS_C=$(subst a.,.,$(filter-out $(CPP_SOURCES), $(LETTEREDS))) LETTEREDS_CPP=$(subst a.,.,$(filter-out $(C_SOURCES), $(LETTEREDS))) GOOD1S=$(filter-out $(FILTER_OUT), $(wildcard CWE*_good1.cpp)) BADS=$(subst _good1.,_bad.,$(GOOD1S)) INDIVIDUALS_C=$(addsuffix .out, $(sort $(subst .c,,$(SIMPLES_C) $(LETTEREDS_C)))) INDIVIDUALS_CPP=$(addsuffix .out, $(sort $(subst .cpp,,$(SIMPLES_CPP) $(LETTEREDS_CPP) $(BADS) $(GOOD1S)))) OBJECTS=$(MAIN_OBJECT) $(C_OBJECTS) $(CPP_OBJECTS) $(C_SUPPORT_OBJECTS) # TARGET is the only line in this file specific to the CWE TARGET=CWE685 all: $(TARGET) partial.o: $(C_OBJECTS) $(CPP_OBJECTS) $(LD) -r $(C_OBJECTS) $(CPP_OBJECTS) -o $@ individuals: $(INDIVIDUALS_C) $(INDIVIDUALS_CPP) $(INDIVIDUALS_C): $(C_SUPPORT_OBJECTS) $(CC) $(INCLUDES) $(INCLUDE_MAIN) -o $@ $(wildcard $(subst .out,,$@)*.c) $(C_SUPPORT_OBJECTS) $(LFLAGS) $(INDIVIDUALS_CPP): $(C_SUPPORT_OBJECTS) $(CPP) $(INCLUDES) $(INCLUDE_MAIN) -o $@ $(wildcard $(subst .out,,$@)*.cpp) $(C_SUPPORT_OBJECTS) $(LFLAGS) $(TARGET) : $(OBJECTS) $(CPP) $(LFLAGS) $(OBJECTS) -o $(TARGET) $(C_OBJECTS) : %.o:%.c $(CC) $(CFLAGS) $(INCLUDES) $^ -o $@ $(CPP_OBJECTS) : %.o:%.cpp $(CPP) $(CFLAGS) $(INCLUDES) $^ -o $@ $(C_SUPPORT_OBJECTS) : $(C_SUPPORT_FILES) $(CC) $(CFLAGS) $(INCLUDES) $(C_SUPPORT_PATH)$(@:.o=.c) -o $@ $(MAIN_OBJECT) : $(MAIN) $(CC) $(CFLAGS) $(INCLUDES) $(MAIN) -o $@ clean: rm -rf *.o *.out $(TARGET)
<?php namespace Model; use Nette, Nette\Mail\Message, Nette\Utils\Strings; /** * Priest Manager. */ class PriestManager { /** * @var Model\PriestRepository */ public $priestRepository; public function __construct(PriestRepository $repository) { $this->priestRepository = $repository; } /** * Get count of all items * @return number of rows */ public function getCountAll() { return $this->priestRepository->countAll(); } /** * Find and get item by ID * @return Nette\Database\Table\IRow */ public function getByID($id) { return $this->priestRepository->findBy(array('id' => (int)$id))->fetch(); } /** * Find all items * @return Nette\Database\Table\Selection */ public function findAll() { return $this->priestRepository->findAll(); } /** * Find by date * @return Nette\Database\Table\Selection */ public function findByDate($date) { $from = new \DateTime($date); $date = new \DateTime($date); $month['days'] = $date->format('t'); $month['month'] = $date->format('n'); $services = array(); for ($i = 0; $i < $month['days']; $i++) { $services[$date->format('Y-m-d')] = array('date' => $date->format('Y-m-d'), 'names1' => '', 'names2' => '', 'names3' => '', 'id1' => '', 'id2' => '', 'id3' => ''); if ($date->format('D') == 'Sun') $services[$date->format('Y-m-d')]['sun'] = TRUE; else $services[$date->format('Y-m-d')]['sun'] = FALSE; if ($date->format('D') == 'Mon') $services[$date->format('Y-m-d')]['mon'] = TRUE; else $services[$date->format('Y-m-d')]['mon'] = FALSE; $date->add(new \DateInterval("P1D")); } $to = $date->sub(new \DateInterval("P1D")); $result = $this->priestRepository->findBySql('date BETWEEN ? AND ?',array($from->format('Y-m-d'),$to->format('Y-m-d'))); foreach($result as $item) { if ($item->time->format('%H:%I:%S') == '07:00:00') { $services[$item->date->format('Y-m-d')]['names1'] = $item->names; $services[$item->date->format('Y-m-d')]['id1'] = $item->id; } elseif ($item->time->format('%H:%I:%S') == '08:30:00') { $services[$item->date->format('Y-m-d')]['names2'] = $item->names; $services[$item->date->format('Y-m-d')]['id2'] = $item->id; } elseif ($item->time->format('%H:%I:%S') == '18:00:00') { $services[$item->date->format('Y-m-d')]['names3'] = $item->names; $services[$item->date->format('Y-m-d')]['id3'] = $item->id; } } return $services; } /** * Find by date * @return Nette\Database\Table\Selection */ public function findByDateWeek($date) { $from = new \DateTime($date); $date = new \DateTime($date); $services = array(); for ($i = 0; $i < 7; $i++) { $services[$date->format('Y-m-d')] = array('date' => $date->format('Y-m-d'), 'names1' => '', 'names2' => '', 'names3' => '', 'id1' => '', 'id2' => '', 'id3' => ''); if ($date->format('D') == 'Sun') $services[$date->format('Y-m-d')]['sun'] = TRUE; else $services[$date->format('Y-m-d')]['sun'] = FALSE; if ($date->format('D') == 'Mon') $services[$date->format('Y-m-d')]['mon'] = TRUE; else $services[$date->format('Y-m-d')]['mon'] = FALSE; $date->add(new \DateInterval("P1D")); } $to = $date->sub(new \DateInterval("P1D")); $result = $this->priestRepository->findBySql('date BETWEEN ? AND ?',array($from->format('Y-m-d'),$to->format('Y-m-d'))); foreach($result as $item) { if ($item->time->format('%H:%I:%S') == '07:00:00') { $services[$item->date->format('Y-m-d')]['names1'] = $item->names; $services[$item->date->format('Y-m-d')]['id1'] = $item->id; } elseif ($item->time->format('%H:%I:%S') == '08:30:00') { $services[$item->date->format('Y-m-d')]['names2'] = $item->names; $services[$item->date->format('Y-m-d')]['id2'] = $item->id; } elseif ($item->time->format('%H:%I:%S') == '18:00:00') { $services[$item->date->format('Y-m-d')]['names3'] = $item->names; $services[$item->date->format('Y-m-d')]['id3'] = $item->id; } } return $services; } /** * Delete rows by ID * @return number of deleted rows */ public function deleteById($id) { return $this->priestRepository->findBy(array('id' => (int)$id))->delete(); } /** * Save values * @return string (inserted/updated) or FALSE on error */ public function save($values) { if (isset($values['id']) && ($values['id'] > 0)) { $id = $values['id']; unset($values['id']); $result = $this->priestRepository->findBy(array('id' => (int)$id))->update($values); $return = $result > 0 ? 'updated' : FALSE; } else { $result = $this->priestRepository->insert($values); $return = $result ? 'inserted' : FALSE; } return $return; } }
var spawn = require('child_process').spawn; var settings = require('../../settings'); var omkServerPath = settings.omkServerPath; var omkFormsDir = omkServerPath + '/data/forms'; var xls2xformSh = __dirname + '/../../scripts/xls2xform.sh'; var xls2xformPy = omkServerPath + '/api/odk/pyxform/pyxform/xls2xform.py'; module.exports = function (io, deploymentsStatus, deployName) { return function (req, res, next) { var deployment = deployName || req.body.deployment || req.query.deployment; if (typeof deployment !== 'string') { res.status(400).json({ status: 400, msg: "You must provide a deployment name. This can be in a {deployment: '<name>'} object in a JSON POST or a deployment=<name> query parameter in a GET." }); return; } var <API key> = settings.deploymentsDir + '/' + deployment + '/contents'; var fetchProc = spawn(xls2xformSh, [xls2xformPy, <API key>, omkFormsDir]); fetchProc.stdout.on('data', function (data) { io.emit('deployments/' + deployment, { controller: 'xls2xform', script: 'xls2xform.sh', output: data.toString() }); console.log(data.toString()); }); fetchProc.stdout.on('close', function (code) { if (!deploymentsStatus[deployment]) deploymentsStatus[deployment] = {}; if (code === false) { deploymentsStatus[deployment].xls2xform = 'done'; } else { deploymentsStatus[deployment].xls2xform = 'error'; } io.emit('deployments/' + deployment, { controller: 'xls2xform', close: true, code: code, deployment: deployment, status: deploymentsStatus[deployment] }); console.log(code); }); if (typeof res !== 'undefined') { res.status(201).json({ status: 201, msg: 'Converting xlsx in ' + deployment + ' deployment to XForms XML and putting files in OpenMapKit Server forms directory.', <API key>: <API key>, omkFormsDir: omkFormsDir, deployment: deployment }); } }; };
// iVisDesigner - scripts/editor/actionmanager.js // are permitted provided that the following conditions are met: // and/or other materials provided with the distribution. // may be used to endorse or promote products derived from this software without // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. // INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, // BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE // OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED // OF THE POSSIBILITY OF SUCH DAMAGE. var Actions = new IV.ActionManager(); Editor.actions = Actions; Actions.bind("perform", function(actions) { if(IV.allosphere) IV.allosphere.sync.perform(actions); }); IV.on("command:editor.undo", function() { Actions.undo(); Editor.renderer.trigger(); Editor.renderer.render(); }); IV.on("command:editor.redo", function() { Actions.redo(); Editor.renderer.trigger(); Editor.renderer.render(); });
<?php namespace Phalcon\Cache\Frontend; use \Phalcon\Cache\FrontendInterface; use \Phalcon\Cache\Exception; class Output implements FrontendInterface { /** * Buffering * * @var boolean * @access protected */ protected $_buffering = false; /** * Frontend Options * * @var array|null * @access protected */ protected $_frontendOptions; /** * \Phalcon\Cache\Frontend\Output constructor * * @param array|null $frontendOptions * @throws Exception */ public function __construct($frontendOptions = null) { if (is_array($frontendOptions) === false && is_null($frontendOptions) === false) { throw new Exception('Invalid parameter type.'); } $this->_frontendOptions = $frontendOptions; } /** * Returns cache lifetime * * @return integer */ public function getLifetime() { if (is_array($this->_frontendOptions) === true && isset($this->_frontendOptions['lifetime']) === true) { return $this->_frontendOptions['lifetime']; } return 1; } /** * Check whether if frontend is buffering output * * @return boolean */ public function isBuffering() { return $this->_buffering; } /** * Starts output frontend */ public function start() { $this->_buffering = true; ob_start(); } /** * Returns output cached content * * @return string|null */ public function getContent() { if ($this->_buffering === true) { return ob_get_contents(); } return null; } /** * Stops output frontend */ public function stop() { if ($this->_buffering === true) { ob_end_clean(); } $this->_buffering = false; } /** * Prepare data to be stored * * @param mixed $data * @return mixed */ public function beforeStore($data) { return $data; } /** * Prepares data to be retrieved to user * * @param mixed $data * @return mixed */ public function afterRetrieve($data) { return $data; } }
task("basic", "test exec/pipe/print", function() { shell(".") .exec("ls") .print() .dir("folder1") .exec("ls") .print() .exec("echo Hi there!") .pipe("cat") .print(); }); task("exec-fn", "test exec with function", function () { shell() .exec("ls -al") .exec(function (result) { print("executing function with result:"); print(result.trim()); }) .print("DONE"); }); task("exec-interpolate", "cmd interpolation", function () { shell() .set("text", "Hi there!") .exec("echo {{text}}") .print(); }); task("dir", "test change dir", function () { shell() .exec("pwd") .print() .dir("folder1") .exec("pwd") .print() .dir("subfolder1") .exec("pwd") .dir("/usr/local") .exec("pwd") .print(); }); task("dir2", "test change .. dir", function() { shell("folder1/subfolder1") .exec("pwd") .print() .dir("..") .exec("pwd") .print() .dir("../folder1/subfolder1") .exec("pwd") .print() .dir("../..") .exec("pwd") .print() .dir("folder1/../folder1/subfolder1") .exec("pwd") .print(); }); task("prompt", "test prompt", function() { shell() .prompt("Who's there?") .print(); }); task("interpolate", "test prompt/print interpolation", function() { shell() .set("foo", "who") .prompt("Who's {{foo}}?") .stash("res") .print("Hi {{res}}!"); }); task("interpolate-default", "test interpolate default value", function () { shell() .set("wat") .print("wait {{}}"); }); task("<API key>", "test interpolate multiple values", function () { shell() .set("bang") .set("bang", "boom") .print("{{}} - {{bang}} - {{}}"); }); task("eachLine", "test eachLine", function() { shell() .exec("ls -al") .eachLine(function(line, i) { print("${i}: ${line}"); }) .print("DONE"); }); task("stash", "test stash/pipe", function() { shell() .exec("ls -al") .stash("mykey") .exec("ls") .pipe("cat", "mykey") .print(); }); task("unstash", "test stash/unstash", function() { shell() .exec("ls -al") .stash("mykey") .exec("ls") .unstash("mykey") .pipe("cat") .print(); }); task("result", "test return of result", function () { var result = shell() .exec("ls -al") .get(); print(result); }); task("result-stashed", "test return of stashed result", function () { var result = shell() .exec("ls -al") .stash("mylist") .exec("ls") .get("mylist"); print(result); }); task("apply", "test apply", function () { shell() .exec("ls -al") .apply(function (val) { return val.toUpperCase(); }) .print(); }); task("apply-stashed", "test apply stashed", function () { shell() .exec("ls") .stash("mylist") .exec("ls -al") .apply(function (val) { return java.lang.String.join(" ", val.split("\n")); }, "mylist") .unstash("mylist") .print(); }); task("set", "test set", function () { shell() .exec("ls -al") .set("abc") .print(); }); task("set-key", "test set with key", function () { shell() .set("mykey", "BAM") .exec("ls -al") .unstash("mykey") .print(); }); task("showErr", "test show stderr", function () { shell() .showErr() .exec("java -version"); }); task("stashErr1", "test stash stderr as default value", function () { shell() .exec("java -version") .stashErr() .print(); }); task("stashErr2", "test stash stderr with key", function () { shell() .exec("java -version") .stashErr("javaVersion") .print("Result of key=javaVersion:\n{{javaVersion}}"); });
''' A password is considered strong if below conditions are all met: 1. It has at least 6 characters and at most 20 characters. 2. It must contain at least one lowercase letter, at least one uppercase letter, and at least one digit. 3. It must NOT contain three repeating characters in a row ("...aaa..." is weak, but "...aa...a..." is strong, assuming other conditions are met). Write a function <API key>(s), that takes a string s as input, and return the MINIMUM change required to make s a strong password. If s is already strong, return 0. Insertion, deletion or replace of any one character are all considered as one change. ''' class Solution(object): def <API key>(self, s): """ :type s: str :rtype: int """ size = len(s) missing_type = 3 if any('a' <= c <= 'z' for c in s): missing_type -= 1 if any('A' <= c <= 'Z' for c in s): missing_type -= 1 if any(c.isdigit() for c in s): missing_type -= 1 delete_one = 0 # one deletion delete_two = 0 # two deletion replacement = 0 index = 2 while index < size: if s[index] == s[index-1] == s[index-2]: count = 2 while ( (index < size) and (s[index] == s[index-1]) ): count += 1 index += 1 replacement += count / 3 if (count%3 == 0): delete_one += 1 elif (count%3 == 1): delete_two += 1 else: index += 1 if size < 6: return max(missing_type, 6 - size) elif size <= 20: return max(missing_type, replacement) else: delete = size - 20 replacement -= min(delete, delete_one) replacement -= min(max(delete - delete_one, 0), delete_two*2) / 2 replacement -= max(delete - delete_one - 2 * delete_two, 0) / 3 return delete + max(missing_type, replacement) solution = Solution() print solution.<API key>("abcdef") print solution.<API key>("aaa123") print solution.<API key>("aaa111") print solution.<API key>("<API key>") print solution.<API key>("<API key>") print solution.<API key>("..................!!!")
#include "storage/browser/database/database_util.h" #include <stddef.h> #include "base/strings/<API key>.h" #include "storage/browser/database/database_tracker.h" #include "storage/browser/database/vfs_backend.h" #include "storage/common/database/database_identifier.h" namespace storage { namespace { bool IsSafeSuffix(const base::string16& suffix) { base::char16 prev_c = 0; for (const base::char16 c : suffix) { if (!(base::IsAsciiAlpha(c) || base::IsAsciiDigit(c) || c == '-' || c == '.' || c == '_')) { return false; } if (c == '.' && prev_c == '.') return false; prev_c = c; } return true; } } const char DatabaseUtil::kJournalFileSuffix[] = "-journal"; bool DatabaseUtil::CrackVfsFileName(const base::string16& vfs_file_name, std::string* origin_identifier, base::string16* database_name, base::string16* sqlite_suffix) { // 'vfs_file_name' is of the form <origin_identifier>/<db_name>#<suffix>. // <suffix> is optional. DCHECK(!vfs_file_name.empty()); size_t first_slash_index = vfs_file_name.find('/'); size_t last_pound_index = vfs_file_name.rfind(' // '/' and '#' must be present in the string. Also, the string cannot start // with a '/' (origin_identifier cannot be empty) and '/' must come before ' if ((first_slash_index == base::string16::npos) || (last_pound_index == base::string16::npos) || (first_slash_index == 0) || (first_slash_index > last_pound_index)) { return false; } std::string origin_id = base::UTF16ToASCII( vfs_file_name.substr(0, first_slash_index)); if (!<API key>(origin_id)) return false; base::string16 suffix = vfs_file_name.substr( last_pound_index + 1, vfs_file_name.length() - last_pound_index - 1); if (!IsSafeSuffix(suffix)) return false; if (origin_identifier) *origin_identifier = origin_id; if (database_name) { *database_name = vfs_file_name.substr( first_slash_index + 1, last_pound_index - first_slash_index - 1); } if (sqlite_suffix) *sqlite_suffix = suffix; return true; } base::FilePath DatabaseUtil::<API key>( DatabaseTracker* db_tracker, const base::string16& vfs_file_name) { std::string origin_identifier; base::string16 database_name; base::string16 sqlite_suffix; if (!CrackVfsFileName(vfs_file_name, &origin_identifier, &database_name, &sqlite_suffix)) { return base::FilePath(); // invalid vfs_file_name } base::FilePath full_path = db_tracker->GetFullDBFilePath( origin_identifier, database_name); if (!full_path.empty() && !sqlite_suffix.empty()) { DCHECK(full_path.Extension().empty()); full_path = full_path.<API key>( base::UTF16ToASCII(sqlite_suffix)); } // Watch out for directory traversal attempts from a compromised renderer. if (full_path.value().find(FILE_PATH_LITERAL("..")) != base::FilePath::StringType::npos) return base::FilePath(); return full_path; } } // namespace storage
#include "src/server/rtp_recv.h" #include <assert.h> #include <signal.h> #include <stdio.h> #include <stdlib.h> #include <string.h> #include "src/global_context.h" #include "src/log.h" void rtp_recv_init() { ortp_init(); ortp_scheduler_init(); <API key>(ORTP_DEBUG | ORTP_MESSAGE | ORTP_WARNING | ORTP_ERROR); } void rtp_recv_release(RtpSession *rtpsession) { ortp_exit(); rtp_session_destroy(rtpsession); <API key>(); } RtpSession *<API key>(const char *localIP, const int localPort) { RtpSession *rtpsession = rtp_session_new(<API key>); assert(rtpsession != NULL); <API key>(rtpsession, 1); // WARNING: in multiple receiving condtion, block mode is must unset; <API key>(rtpsession, 0); <API key>(rtpsession, localIP, localPort, localPort + 1); <API key>(rtpsession, 1); // 1 means TRUE; <API key>(rtpsession, 1); // 1 means TRUE; <API key>(rtpsession, 1); <API key>(rtpsession, 40); // set video payload_type to H264 <API key>(rtpsession, PAYLOAD_TYPE_H264); return rtpsession; } static int cond = 1; void stop_handler(int signum) { cond = 0; } int rtp_recv(RtpSession *rtpsession, uint32_t *user_ts, const char *recvfile) { int recvBytes = 0; int writelen = 0; int have_more = 1; int stream_received = 0; unsigned char buffer[RECV_LEN]; signal(SIGINT, stop_handler); assert(recvfile != NULL); FILE *fp = fopen(recvfile, "a"); assert(fp != NULL); while (cond) { have_more = 1; while (have_more) { dmd_log(LOG_DEBUG, "in recv while loop\n"); recvBytes = <API key>(rtpsession, buffer, RECV_LEN, *user_ts, &have_more); if (recvBytes > 0) stream_received = 1; if ((stream_received) && (recvBytes > 0)) { writelen = fwrite(buffer, sizeof(unsigned char), recvBytes, fp); dmd_log(LOG_DEBUG, "receive %d bytes, write %d bytes\n", recvBytes, writelen); recvBytes = 0; writelen = 0; } } *user_ts += <API key>; } return 0; }
<head> <link href="pantheios.css" rel="stylesheet" type="text/css"> <! <title>Pantheios - The C++ Diagnostic Logging Sweetspot</title> </head> <table width = "100%"> <tr> <td align = "left" valign = "middle"> <a href = "http: </td> <td align = "right" valign = "middle"> <a href = "http://stlsoft.org"><img src = "stlsoft200x100.jpg" border = "0" width = "100" height = "50" alt = "STLSoft - ... Robust, Lightweight, Cross-platform, Template Software ..." /></a> </td> </tr> <tr> <td width = "100%" colspan = "2"> <hr width = "100%"> </td> </tr> </table> <!-- Generated by Doxygen 1.5.4 --> <div class="tabs"> <ul> <li><a href="main.html"><span>Main&nbsp;Page</span></a></li> <li><a href="modules.html"><span>Modules</span></a></li> <li><a href="namespaces.html"><span>Namespaces</span></a></li> <li><a href="classes.html"><span>Classes</span></a></li> <li class="current"><a href="files.html"><span>Files</span></a></li> <li><a href="pages.html"><span>Related&nbsp;Pages</span></a></li> <li><a href="examples.html"><span>Examples</span></a></li> </ul> </div> <a name="l00040"></a>00040 <a name="l00041"></a>00041 <a name="l00049"></a>00049 <span class="preprocessor">#ifndef <API key></span> <a name="l00050"></a>00050 <span class="preprocessor"></span><span class="preprocessor">#define <API key></span> <a name="l00051"></a>00051 <span class="preprocessor"></span> <a name="l00052"></a>00052 <span class="comment">/* /////////////////////////////////////////////////////////////////////////</span> <a name="l00053"></a>00053 <span class="comment"> * Version information</span> <a name="l00054"></a>00054 <span class="comment"> */</span> <a name="l00055"></a>00055 <a name="l00056"></a>00056 <span class="preprocessor">#ifndef <API key></span> <a name="l00057"></a>00057 <span class="preprocessor"></span><span class="preprocessor"># define <API key> 1</span> <a name="l00058"></a>00058 <span class="preprocessor"></span><span class="preprocessor"># define <API key> 0</span> <a name="l00059"></a>00059 <span class="preprocessor"></span><span class="preprocessor"># define <API key> 1</span> <a name="l00060"></a>00060 <span class="preprocessor"></span><span class="preprocessor"># define <API key> 5</span> <a name="l00061"></a>00061 <span class="preprocessor"></span><span class="preprocessor">#endif </span><span class="comment">/* !<API key> */</span> <a name="l00062"></a>00062 <a name="l00063"></a>00063 <span class="comment">/* /////////////////////////////////////////////////////////////////////////</span> <a name="l00064"></a>00064 <span class="comment"> * Includes</span> <a name="l00065"></a>00065 <span class="comment"> */</span> <a name="l00066"></a>00066 <a name="l00067"></a>00067 <span class="preprocessor">#ifndef <API key></span> <a name="l00068"></a>00068 <span class="preprocessor"></span><span class="preprocessor"># include &lt;<a class="code" href="pantheios_8h.html" title="[C, C++] Primary include file for the Pantheios Core API and Pantheios Util API.">pantheios/pantheios.h</a>&gt;</span> <a name="l00069"></a>00069 <span class="preprocessor">#endif </span><span class="comment">/* !<API key> */</span> <a name="l00070"></a>00070 <span class="preprocessor">#ifndef <API key></span> <a name="l00071"></a>00071 <span class="preprocessor"></span><span class="preprocessor"># include &lt;<a class="code" href="<API key>.html" title="Implicit linking for the Pantheios libraries.">pantheios/implicit_link/implicit_link_base_.h</a>&gt;</span> <a name="l00072"></a>00072 <span class="preprocessor">#endif </span><span class="comment">/* !<API key> */</span> <a name="l00073"></a>00073 <span class="preprocessor">#ifndef <API key></span> <a name="l00074"></a>00074 <span class="preprocessor"></span><span class="preprocessor"># include &lt;<a class="code" href="<API key>.html" title="[C, C++] Implicitly links in the callback version of the Pantheios Speech Back-End...">pantheios/implicit_link/bec.speech.WithCallback.h</a>&gt;</span> <a name="l00075"></a>00075 <span class="preprocessor">#endif </span><span class="comment">/* !<API key> */</span> <a name="l00076"></a>00076 <a name="l00077"></a>00077 <span class="comment">/* /////////////////////////////////////////////////////////////////////////</span> <a name="l00078"></a>00078 <span class="comment"> * Implicit-linking directives</span> <a name="l00079"></a>00079 <span class="comment"> */</span> <a name="l00080"></a>00080 <a name="l00081"></a>00081 <span class="preprocessor">#ifdef <API key></span> <a name="l00082"></a>00082 <span class="preprocessor"></span> <a name="l00083"></a>00083 <span class="preprocessor"># if defined(__BORLANDC__)</span> <a name="l00084"></a>00084 <span class="preprocessor"></span><span class="preprocessor"># elif defined(__COMO__)</span> <a name="l00085"></a>00085 <span class="preprocessor"></span><span class="preprocessor"># elif defined(__DMC__)</span> <a name="l00086"></a>00086 <span class="preprocessor"></span><span class="preprocessor"># elif defined(__GNUC__)</span> <a name="l00087"></a>00087 <span class="preprocessor"></span><span class="preprocessor"># elif defined(__INTEL_COMPILER)</span> <a name="l00088"></a>00088 <span class="preprocessor"></span> <a name="l00089"></a>00089 <span class="preprocessor"># pragma comment(lib, <API key>("be.speech"))</span> <a name="l00090"></a>00090 <span class="preprocessor"></span><span class="preprocessor"># pragma message(" " <API key>("be.speech"))</span> <a name="l00091"></a>00091 <span class="preprocessor"></span> <a name="l00092"></a>00092 <span class="preprocessor"># elif defined(__MWERKS__)</span> <a name="l00093"></a>00093 <span class="preprocessor"></span><span class="preprocessor"># elif defined(__WATCOMC__)</span> <a name="l00094"></a>00094 <span class="preprocessor"></span><span class="preprocessor"># elif defined(_MSC_VER)</span> <a name="l00095"></a>00095 <span class="preprocessor"></span> <a name="l00096"></a>00096 <span class="preprocessor"># pragma comment(lib, <API key>("be.speech"))</span> <a name="l00097"></a>00097 <span class="preprocessor"></span><span class="preprocessor"># pragma message(" " <API key>("be.speech"))</span> <a name="l00098"></a>00098 <span class="preprocessor"></span> <a name="l00099"></a>00099 <span class="preprocessor"># else </span><span class="comment">/* ? compiler */</span> <a name="l00100"></a>00100 <span class="preprocessor"># error Compiler not recognised</span> <a name="l00101"></a>00101 <span class="preprocessor"></span><span class="preprocessor"># endif </span><span class="comment">/* compiler */</span> <a name="l00102"></a>00102 <a name="l00103"></a>00103 <span class="preprocessor">#endif </span><span class="comment">/* <API key> */</span> <a name="l00104"></a>00104 <a name="l00105"></a>00105 <span class="comment"></span> <a name="l00106"></a>00106 <a name="l00107"></a>00107 <span class="preprocessor">#endif </span><span class="comment">/* !<API key> */</span> <a name="l00108"></a>00108 <a name="l00109"></a>00109 <span class="comment">/* ///////////////////////////// end of file //////////////////////////// */</span> </pre></div><table width = "100%"> <tr> <td width = "100%" colspan = "2"> <hr width = "100%"> </td> </tr> <tr> <td align = "center" valign = "middle"> <font size = "-1"> <a href = "http://pantheios.org/"><b>pantheios</b></a> Library documentation © Matthew Wilson &amp; <a href="http: 2006-2011 </font> </td> <td align = "center" valign = "middle"> <a href="http://sourceforge.net"> <img src="http://sourceforge.net/sflogo.php?group_id=141831&amp;type=2" width="125" height="37" border="0" alt="SourceForge.net Logo" /> </a> </td> </tr> </table>
#ifndef AVERAGING_H #define AVERAGING_H // INCLUDES #include "averaging_global.h" #include <anShared/Plugins/abstractplugin.h> #include <rtprocessing/helpers/filterkernel.h> // QT INCLUDES #include <QtWidgets> #include <QtCore/QtPlugin> // FORWARD DECLARATIONS namespace ANSHAREDLIB { class FiffRawViewModel; class AbstractModel; class AveragingDataModel; class Communicator; } namespace DISPLIB { class <API key>; class <API key>; class AverageLayoutView; class ChannelInfoModel; class EvokedSetModel; class ButterflyView; class SelectionItem; } namespace FIFFLIB { class FiffEvokedSet; class FiffEvoked; class FiffInfo; class FiffRawData; } // DEFINE NAMESPACE AVERAGINGPLUGIN namespace AVERAGINGPLUGIN { // AVERAGINGPLUGIN FORWARD DECLARATIONS /** * Averaging Plugin * * @brief The averaging class provides a plugin for computing averages. */ class <API key> Averaging : public ANSHAREDLIB::AbstractPlugin { Q_OBJECT Q_PLUGIN_METADATA(IID "ansharedlib/1.0" FILE "averaging.json") //New Qt5 Plugin system replaces Q_EXPORT_PLUGIN2 macro // Use the Q_INTERFACES() macro to tell Qt's meta-object system about the interfaces Q_INTERFACES(ANSHAREDLIB::AbstractPlugin) public: /** * Constructs an Averaging object. */ Averaging(); /** * Destroys the Averaging object. */ ~Averaging() override; // AbstractPlugin functions virtual QSharedPointer<AbstractPlugin> clone() const override; virtual void init() override; virtual void unload() override; virtual QString getName() const override; virtual QMenu* getMenu() override; virtual QDockWidget* getControl() override; virtual QWidget* getView() override; virtual void handleEvent(QSharedPointer<ANSHAREDLIB::Event> e) override; virtual QVector<ANSHAREDLIB::EVENT_TYPE> <API key>() const override; signals: void <API key>(const QList<int> <API key>); void showAllChannels(); void <API key>(const QVariant &data); void layoutChanged(const QMap<QString,QPointF> &layoutMap); private: /** * Loads new Fiff model whan current loaded model is changed * * @param [in,out] pNewModel pointer to currently loaded FiffRawView Model */ void onModelChanged(QSharedPointer<ANSHAREDLIB::AbstractModel> pNewModel); /** * @brief onNewAveragingModel * @param pAveragingModel */ void onNewAveragingModel(QSharedPointer<ANSHAREDLIB::AveragingDataModel> pAveragingModel); /** * Change the number of averages * * @param[in] numAve new number of averages */ void onChangeNumAverages(qint32 numAve); /** * Change the baseline from value * * @param[in] fromMS the new baseline from value in milliseconds */ void <API key>(qint32 fromMS); /** * Change the baseline to value * * @param[in] fromMS the new baseline to value in milliseconds */ void onChangeBaselineTo(qint32 toMS); /** * Change the pre stim stim * * @param[in] mseconds the new pres stim in milliseconds */ void onChangePreStim(qint32 mseconds); /** * Change the post stim stim * * @param[in] mseconds the new post stim in milliseconds */ void onChangePostStim(qint32 mseconds); /** * Change the baseline active state * * @param[in] state the new state */ void <API key>(bool state); /** * Reset the averaging plugin and delete all currently stored data * * @param[in] state the new state */ void onResetAverage(bool state); /** * Gets called when compute button on GUI is clicked * * @param [in] bChecked UNUSED - state of the button */ void <API key>(bool bChecked); /** * Triggers averageCalculations to be run with QFuture. */ void computeAverage(); /** * Calculates average and returns FiffEvoked Set. (Run in separate thread with QFuture) * * @return Retruns FiffEvoked setwith averaged data */ QSharedPointer<FIFFLIB::FiffEvokedSet> averageCalculation(FIFFLIB::FiffRawData pFiffRaw, MatrixXi matEvents, RTPROCESSINGLIB::FilterKernel filterKernel, FIFFLIB::FiffInfo fiffInfo); /** * Receives FiffEvoked set from QFuture and created new averaging model */ void createNewAverage(); /** * Toggles dropping rejected when computing average */ void onRejectionChecked(bool bState); /** * Connected to GUI dropdown to select group based on group name input. * * @param[in] text name of group selected in the GUI */ void onChangeGroupSelect(const QString &text); /** * Loads averging GUI components that are dependent on FiffRawModel to be initialized */ void loadFullGui(); /** * Sets channel selection for butterfly and 2D layout view based on QVariant with a SelectionItem object * * @param [in] data QVariant with a SelectionItem object with channel selection information */ void setChannelSelection(const QVariant &data); /** * Sets scaling map for averaging views * * @param [in] data QVariant with a ScalingParameters object with relevant scaling data */ void setScalingMap(const QVariant &data); /** * Sets the view settings for the averaging views * * @param pViewParams */ void setViewSettings(ANSHAREDLIB::ViewParameters viewParams); /** * Updates the dropdown display for selecting from which group to average */ void updateGroups(); /** * Call this slot whenever you want to make a screenshot of the butterfly or layout view. * * @param[out] imageType The current iamge type: png, svg. */ void onMakeScreenshot(const QString& imageType); /** * Sends event to trigger loading bar to appear and sMessage to show * * @param [in] sMessage loading bar message */ void triggerLoadingStart(QString sMessage); /** * Sends event to hide loading bar */ void triggerLoadingEnd(QString sMessage); QSharedPointer<ANSHAREDLIB::FiffRawViewModel> m_pFiffRawModel; /**< Pointer to currently loaded FiffRawView Model */ QSharedPointer<QList<QPair<int,double>>> m_pTriggerList; /**< Pointer to list of stim triggers */ QSharedPointer<DISPLIB::EvokedSetModel> m_pEvokedModel; /**< Pointer to model used to display averaging data from m_pFiffEvokedSet and m_pFiffEvoked */ QSharedPointer<DISPLIB::ChannelInfoModel> m_pChannelInfoModel; /**< Pointer to model that holds channel info data */ QSharedPointer<FIFFLIB::FiffInfo> m_pFiffInfo; /**< Pointer to info about loaded fiff data */ QPointer<ANSHAREDLIB::Communicator> m_pCommu; /**< To broadcst signals */ QPointer<DISPLIB::ButterflyView> m_pButterflyView; /**< The butterfly plot view. */ QPointer<DISPLIB::AverageLayoutView> <API key>; /**< The average layout plot view */ DISPLIB::<API key>* <API key>; /**< Pointer to averaging settings GUI */ float m_fBaselineFromS; /**< Baseline start - in seconds relative to stim(0) - can be negative*/ float m_fBaselineToS; /**< Baseline end - in seconds relative to stim(0) - can be negative*/ float m_fPreStim; /**< Time before stim - in seconds - stored as positive number (>0) */ float m_fPostStim; /**< Time after stim - in seconds - stored as positive number (>0) */ float m_fTriggerThreshold; /**< Threshold to count stim channel events */ QVBoxLayout* m_pLayout; /**< Pointer to layout that holds parameter GUI tab elements */ QTabWidget* m_pTabView; /**< Pointer to object that stores multiple tabs of GUI items */ bool m_bBasline; /**< Whether to apply baseline correction */ bool m_bRejection; /**< Whether to drop data points marked fro rejection when calculating average */ bool m_bLoaded; /**< Whether the full GUI has already been laoaded */ bool m_bPerformFiltering; /**< Flag whether to activate/deactivate filtering. */ RTPROCESSINGLIB::FilterKernel m_filterKernel; /**< List of currently active filters. */ int m_iCurrentGroup; /**< Event group from which to compute average. 9999 for current selection */ QFutureWatcher<QSharedPointer<FIFFLIB::FiffEvokedSet>> m_FutureWatcher; QFuture<QSharedPointer<FIFFLIB::FiffEvokedSet>> m_Future; QMutex m_ParameterMutex; }; } // NAMESPACE #endif // AVERAGING_H
/* -- MAGMA (version 2.1.0) -- Univ. of Tennessee, Knoxville Univ. of California, Berkeley Univ. of Colorado, Denver @date August 2016 @generated from testing/testing_zgebrd.cpp, normal z -> d, Tue Aug 30 09:39:15 2016 */ // includes, system #include <stdlib.h> #include <stdio.h> #include <string.h> #include <math.h> // includes, project #include "flops.h" #include "magma_v2.h" #include "magma_lapack.h" #include "testings.h" #define REAL /* //////////////////////////////////////////////////////////////////////////// -- Testing dgebrd */ int main( int argc, char** argv) { TESTING_CHECK( magma_init() ); <API key>(); real_Double_t gflops, gpu_perf, gpu_time, cpu_perf, cpu_time; double *h_A, *h_Q, *h_PT, *h_work; double *taup, *tauq; double *diag, *offdiag; double result[3] = {0., 0., 0.}; magma_int_t M, N, n2, lda, lhwork, info, minmn, nb; magma_int_t ione = 1; magma_int_t ISEED[4] = {0,0,0,1}; int status = 0; magma_opts opts; opts.parse_opts( argc, argv ); double tol = opts.tolerance * lapackf77_dlamch("E"); double eps = lapackf77_dlamch( "E" ); printf("%% M N CPU Gflop/s (sec) GPU Gflop/s (sec) |A-QBP^H|/N|A| |I-QQ^H|/N |I-PP^H|/N\n"); printf("%%=============================================================================================\n"); for( int itest = 0; itest < opts.ntest; ++itest ) { for( int iter = 0; iter < opts.niter; ++iter ) { M = opts.msize[itest]; N = opts.nsize[itest]; minmn = min(M, N); nb = magma_get_dgebrd_nb( M, N ); lda = M; n2 = lda*N; lhwork = (M + N)*nb; gflops = FLOPS_DGEBRD( M, N ) / 1e9; TESTING_CHECK( magma_dmalloc_cpu( &h_A, lda*N )); TESTING_CHECK( magma_dmalloc_cpu( &tauq, minmn )); TESTING_CHECK( magma_dmalloc_cpu( &taup, minmn )); TESTING_CHECK( magma_dmalloc_cpu( &diag, minmn )); TESTING_CHECK( magma_dmalloc_cpu( &offdiag, minmn-1 )); TESTING_CHECK( <API key>( &h_Q, lda*N )); TESTING_CHECK( <API key>( &h_work, lhwork )); /* Initialize the matrices */ lapackf77_dlarnv( &ione, ISEED, &n2, h_A ); lapackf77_dlacpy( MagmaFullStr, &M, &N, h_A, &lda, h_Q, &lda ); gpu_time = magma_wtime(); magma_dgebrd( M, N, h_Q, lda, diag, offdiag, tauq, taup, h_work, lhwork, &info ); gpu_time = magma_wtime() - gpu_time; gpu_perf = gflops / gpu_time; if (info != 0) { printf("magma_dgebrd returned error %lld: %s.\n", (long long) info, magma_strerror( info )); } if ( opts.check ) { // dorgbr prefers minmn*NB // dbdt01 needs M+N // dort01 prefers minmn*(minmn+1) to check Q and P magma_int_t lwork_err; double *h_work_err; lwork_err = max( minmn * nb, M+N ); lwork_err = max( lwork_err, minmn*(minmn+1) ); TESTING_CHECK( magma_dmalloc_cpu( &h_PT, lda*N )); TESTING_CHECK( magma_dmalloc_cpu( &h_work_err, lwork_err )); // dbdt01 needs M // dort01 needs minmn #ifdef COMPLEX double *rwork_err; TESTING_CHECK( magma_dmalloc_cpu( &rwork_err, M )); #endif lapackf77_dlacpy( MagmaFullStr, &M, &N, h_Q, &lda, h_PT, &lda ); // generate Q & P^H lapackf77_dorgbr("Q", &M, &minmn, &N, h_Q, &lda, tauq, h_work_err, &lwork_err, &info); if (info != 0) { printf("lapackf77_dorgbr #1 returned error %lld: %s.\n", (long long) info, magma_strerror( info )); } lapackf77_dorgbr("P", &minmn, &N, &M, h_PT, &lda, taup, h_work_err, &lwork_err, &info); if (info != 0) { printf("lapackf77_dorgbr #2 returned error %lld: %s.\n", (long long) info, magma_strerror( info )); } // Test 1: Check the decomposition A := Q * B * PT // 2: Check the orthogonality of Q // 3: Check the orthogonality of PT lapackf77_dbdt01( &M, &N, &ione, h_A, &lda, h_Q, &lda, diag, offdiag, h_PT, &lda, h_work_err, #ifdef COMPLEX rwork_err, #endif &result[0] ); // LAPACK normalizes by N*|A|, but that fails for very tall matrices, // so normalize by max(M*N)*|A|. TODO: is there justification for that change? result[0] = N*result[0] / max(M,N); lapackf77_dort01( "Columns", &M, &minmn, h_Q, &lda, h_work_err, &lwork_err, #ifdef COMPLEX rwork_err, #endif &result[1]); lapackf77_dort01( "Rows", &minmn, &N, h_PT, &lda, h_work_err, &lwork_err, #ifdef COMPLEX rwork_err, #endif &result[2]); magma_free_cpu( h_PT ); magma_free_cpu( h_work_err ); #ifdef COMPLEX magma_free_cpu( rwork_err ); #endif // lapack normalizes by eps result[0] *= eps; result[1] *= eps; result[2] *= eps; } if ( opts.lapack ) { cpu_time = magma_wtime(); lapackf77_dgebrd( &M, &N, h_A, &lda, diag, offdiag, tauq, taup, h_work, &lhwork, &info ); cpu_time = magma_wtime() - cpu_time; cpu_perf = gflops / cpu_time; if (info != 0) { printf("lapackf77_dgebrd returned error %lld: %s.\n", (long long) info, magma_strerror( info )); } } if ( opts.lapack ) { printf("%5lld %5lld %7.2f (%7.2f) %7.2f (%7.2f)", (long long) M, (long long) N, cpu_perf, cpu_time, gpu_perf, gpu_time ); } else { printf("%5lld %5lld (long long) M, (long long) N, gpu_perf, gpu_time ); } if ( opts.check ) { bool okay = (result[0] < tol) && (result[1] < tol) && (result[2] < tol); status += ! okay; printf(" %8.2e %8.2e %8.2e %s\n", result[0], result[1], result[2], (okay ? "ok" : "failed") ); } else { printf(" } magma_free_cpu( h_A ); magma_free_cpu( tauq ); magma_free_cpu( taup ); magma_free_cpu( diag ); magma_free_cpu( offdiag ); magma_free_pinned( h_Q ); magma_free_pinned( h_work ); fflush( stdout ); } if ( opts.niter > 1 ) { printf( "\n" ); } } opts.cleanup(); TESTING_CHECK( magma_finalize() ); return status; }
import unittest from mock import patch from exporters.exporter_config import ExporterConfig from exporters.persistence.base_persistence import BasePersistence from exporters.persistence.pickle_persistence import PicklePersistence from exporters.utils import remove_if_exists from .utils import <API key>, meta class BasePersistenceTest(unittest.TestCase): def setUp(self): self.config = <API key>({ 'exporter_options': { 'log_level': 'DEBUG', 'logger_name': 'export-pipeline', 'resume': False, } }) def <API key>(self): exporter_config = ExporterConfig(self.config) with self.assertRaises(NotImplementedError): persistence = BasePersistence(exporter_config.persistence_options, meta()) persistence.get_last_position() def <API key>(self): exporter_config = ExporterConfig(self.config) with self.assertRaises(NotImplementedError): persistence = BasePersistence(exporter_config.persistence_options, meta()) persistence.commit_position(1) def <API key>(self): exporter_config = ExporterConfig(self.config) with self.assertRaises(NotImplementedError): persistence = BasePersistence(exporter_config.persistence_options, meta()) persistence.generate_new_job() def <API key>(self): exporter_config = ExporterConfig(self.config) with self.assertRaises(NotImplementedError): persistence = BasePersistence(exporter_config.persistence_options, meta()) persistence.close() class <API key>(unittest.TestCase): def setUp(self): self.config = <API key>({ 'exporter_options': { 'log_level': 'DEBUG', 'logger_name': 'export-pipeline', 'resume': False, }, 'persistence': { 'name': 'exporters.persistence.pickle_persistence.PicklePersistence', 'options': {'file_path': '/tmp'} } }) @patch('pickle.dump') @patch('uuid.uuid4') def <API key>(self, mock_uuid, mock_pickle): file_name = '1' mock_pickle.dump.return_value = True mock_uuid.return_value = file_name exporter_config = ExporterConfig(self.config) try: persistence = PicklePersistence( exporter_config.persistence_options, meta()) self.assertIsInstance(persistence, PicklePersistence) persistence.close() finally: remove_if_exists('/tmp/'+file_name) @patch('os.path.isfile', autospec=True) @patch('__builtin__.open', autospec=True) @patch('pickle.dump', autospec=True) @patch('pickle.load', autospec=True) def <API key>(self, mock_load_pickle, mock_dump_pickle, mock_open, mock_is_file): mock_dump_pickle.return_value = True mock_is_file.return_value = True mock_load_pickle.return_value = {'last_position': {'last_key': 10}} exporter_config = ExporterConfig(self.config) persistence = PicklePersistence(exporter_config.persistence_options, meta()) self.assertEqual({'last_key': 10}, persistence.get_last_position()) @patch('__builtin__.open', autospec=True) @patch('pickle.dump', autospec=True) @patch('uuid.uuid4', autospec=True) def test_commit(self, mock_uuid, mock_dump_pickle, mock_open): mock_dump_pickle.return_value = True mock_uuid.return_value = 1 exporter_config = ExporterConfig(self.config) persistence = PicklePersistence(exporter_config.persistence_options, meta()) self.assertEqual(None, persistence.commit_position(10)) self.assertEqual(persistence.get_metadata('commited_positions'), 1)
# This code is so you can run the samples without installing the package import sys import os sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..')) testinfo = "s, t 1.1, s, q" tags = "CallFunc, visible" import cocos from cocos.director import director from cocos.actions import CallFunc, Delay from cocos.sprite import Sprite import pyglet class TestLayer(cocos.layer.Layer): def __init__(self): super( TestLayer, self ).__init__() x,y = director.get_window_size() self.sprite = Sprite( 'grossini.png', (x/2, y/2) ) self.sprite.visible = False self.add( self.sprite ) def make_visible( sp ): sp.visible = True self.sprite.do( Delay(1) + CallFunc( make_visible, self.sprite ) ) description = """Sprite grossini starts invisible, after 1 second will turn visible thanks to action CallFunc """ def main(): print description director.init() test_layer = TestLayer () main_scene = cocos.scene.Scene (test_layer) director.run (main_scene) if __name__ == '__main__': main()
/* $NetBSD: bus.h,v 1.8.2.1 2000/06/30 16:27:16 simonb Exp $ */ /* NetBSD: bus.h,v 1.27 2000/03/15 16:44:50 drochner Exp */ /* $OpenBSD: bus.h,v 1.15 1999/08/11 23:15:21 niklas Exp $ */ #ifndef _ARC_BUS_H_ #define _ARC_BUS_H_ #ifdef _KERNEL #include <mips/locore.h> #ifdef BUS_SPACE_DEBUG #include <sys/systm.h> /* for printf() prototype */ /* * Macros for checking the aligned-ness of pointers passed to bus * space ops. Strict alignment is required by the MIPS architecture, * and a trap will occur if unaligned access is performed. These * may aid in the debugging of a broken device driver by displaying * useful information about the problem. */ #define <API key>(p, t) \ ((((u_long)(p)) & (sizeof(t)-1)) == 0) #define <API key>(p, t, d) \ ({ \ if (<API key>((p), t) == 0) { \ printf("%s 0x%lx not aligned to %d bytes %s:%d\n", \ d, (u_long)(p), sizeof(t), __FILE__, __LINE__); \ } \ (void) 0; \ }) #define <API key>(p, t) <API key>(p, t) #else #define <API key>(p,t,d) (void) 0 #define <API key>(p, t) ALIGNED_POINTER(p, t) #endif /* BUS_SPACE_DEBUG */ /* * Utility macro; do not use outside this file. */ #ifdef __STDC__ #define __CONCAT3(a,b,c) a #else #define __CONCAT3(a,b,c) abc #endif /* * Bus address and size types */ typedef u_long bus_addr_t; typedef u_long bus_size_t; /* * Access methods for bus resources and address space. */ typedef u_int32_t bus_space_handle_t; typedef struct arc_bus_space *bus_space_tag_t; struct arc_bus_space { const char *bs_name; struct extent *bs_extent; bus_addr_t bs_start; bus_size_t bs_size; paddr_t bs_pbase; vaddr_t bs_vbase; /* sparse addressing shift count */ u_int8_t bs_stride_1; u_int8_t bs_stride_2; u_int8_t bs_stride_4; u_int8_t bs_stride_8; /* compose a bus_space handle from tag/handle/addr/size/flags (MD) */ int (*bs_compose_handle) __P((bus_space_tag_t, bus_addr_t, bus_size_t, int, bus_space_handle_t *)); /* dispose a bus_space handle (MD) */ int (*bs_dispose_handle) __P((bus_space_tag_t, bus_space_handle_t, bus_size_t)); /* convert bus_space tag/handle to physical address (MD) */ int (*bs_paddr) __P((bus_space_tag_t, bus_space_handle_t, paddr_t *)); /* mapping/unmapping */ int (*bs_map) __P((bus_space_tag_t, bus_addr_t, bus_size_t, int, bus_space_handle_t *)); void (*bs_unmap) __P((bus_space_tag_t, bus_space_handle_t, bus_size_t)); int (*bs_subregion) __P((bus_space_tag_t, bus_space_handle_t, bus_size_t, bus_size_t, bus_space_handle_t *)); /* allocation/deallocation */ int (*bs_alloc) __P((bus_space_tag_t, bus_addr_t, bus_addr_t, bus_size_t, bus_size_t, bus_size_t, int, bus_addr_t *, bus_space_handle_t *)); void (*bs_free) __P((bus_space_tag_t, bus_space_handle_t, bus_size_t)); void *bs_aux; }; /* vaddr_t argument of arc_bus_space_init() */ #define <API key> ((vaddr_t)0) /* machine dependent utility function for bus_space users */ void <API key> __P((void)); void arc_bus_space_init __P((bus_space_tag_t, const char *, paddr_t, vaddr_t, bus_addr_t, bus_size_t)); void <API key> __P((bus_space_tag_t, caddr_t, size_t)); void <API key> __P((bus_space_tag_t, unsigned int)); void <API key> __P((bus_space_tag_t, const char *, paddr_t, bus_addr_t, bus_size_t)); void <API key> __P((bus_space_tag_t, const char *, paddr_t, bus_addr_t, bus_size_t)); /* machine dependent utility function for bus_space implementations */ int <API key> __P((void)); /* these are provided for subclasses which override base bus_space. */ int <API key> __P((bus_space_tag_t, bus_addr_t, bus_size_t, int, bus_space_handle_t *)); int <API key> __P((bus_space_tag_t, bus_space_handle_t, bus_size_t)); int arc_bus_space_paddr __P((bus_space_tag_t, bus_space_handle_t, paddr_t *)); int <API key> __P((bus_space_tag_t, bus_addr_t, bus_size_t, int, bus_space_handle_t *)); int <API key> __P((bus_space_tag_t, bus_space_handle_t, bus_size_t)); int <API key> __P((bus_space_tag_t, bus_space_handle_t, paddr_t *)); int arc_bus_space_map __P((bus_space_tag_t, bus_addr_t, bus_size_t, int, bus_space_handle_t *)); void arc_bus_space_unmap __P((bus_space_tag_t, bus_space_handle_t, bus_size_t)); int <API key> __P((bus_space_tag_t, bus_space_handle_t, bus_size_t, bus_size_t, bus_space_handle_t *)); int arc_bus_space_alloc __P((bus_space_tag_t, bus_addr_t, bus_addr_t, bus_size_t, bus_size_t, bus_size_t, int, bus_addr_t *, bus_space_handle_t *)); #define arc_bus_space_free arc_bus_space_unmap /* * int <API key> __P((bus_space_tag_t t, bus_addr_t addr, * bus_size_t size, int flags, bus_space_handle_t *bshp)); * * MACHINE DEPENDENT, NOT PORTABLE INTERFACE: * Compose a bus_space handle from tag/handle/addr/size/flags. * A helper function for bus_space_map()/bus_space_alloc() implementation. */ #define <API key>(bst, addr, size, flags, bshp) \ (*(bst)->bs_compose_handle)(bst, addr, size, flags, bshp) /* * int <API key> __P((bus_space_tag_t t, bus_addr_t addr, * bus_space_handle_t bsh, bus_size_t size)); * * MACHINE DEPENDENT, NOT PORTABLE INTERFACE: * Dispose a bus_space handle. * A helper function for bus_space_unmap()/bus_space_free() implementation. */ #define <API key>(bst, bsh, size) \ (*(bst)->bs_dispose_handle)(bst, bsh, size) /* * int bus_space_paddr __P((bus_space_tag_t tag, * bus_space_handle_t bsh, paddr_t *pap)); * * MACHINE DEPENDENT, NOT PORTABLE INTERFACE: * (cannot be implemented on e.g. I/O space on i386, non-linear space on alpha) * Return physical address of a region. * A helper function for device mmap entry. */ #define bus_space_paddr(bst, bsh, pap) \ (*(bst)->bs_paddr)(bst, bsh, pap) /* * void *bus_space_vaddr __P((bus_space_tag_t, bus_space_handle_t)); * * Get the kernel virtual address for the mapped bus space. * Only allowed for regions mapped with <API key>. * (XXX not enforced) */ #define bus_space_vaddr(bst, bsh) \ ((void *)(bsh)) /* * int bus_space_map __P((bus_space_tag_t t, bus_addr_t addr, * bus_size_t size, int flags, bus_space_handle_t *bshp)); * * Map a region of bus space. */ #define <API key> 0x01 #define <API key> 0x02 #define <API key> 0x04 #define bus_space_map(t, a, s, f, hp) \ (*(t)->bs_map)((t), (a), (s), (f), (hp)) /* * void bus_space_unmap __P((bus_space_tag_t t, * bus_space_handle_t bsh, bus_size_t size)); * * Unmap a region of bus space. */ #define bus_space_unmap(t, h, s) \ (*(t)->bs_unmap)((t), (h), (s)) /* * int bus_space_subregion __P((bus_space_tag_t t, * bus_space_handle_t bsh, bus_size_t offset, bus_size_t size, * bus_space_handle_t *nbshp)); * * Get a new handle for a subregion of an already-mapped area of bus space. */ #define bus_space_subregion(t, h, o, s, hp) \ (*(t)->bs_subregion)((t), (h), (o), (s), (hp)) /* * int bus_space_alloc __P((bus_space_tag_t t, bus_addr_t, rstart, * bus_addr_t rend, bus_size_t size, bus_size_t align, * bus_size_t boundary, int flags, bus_addr_t *addrp, * bus_space_handle_t *bshp)); * * Allocate a region of bus space. */ #define bus_space_alloc(t, rs, re, s, a, b, f, ap, hp) \ (*(t)->bs_alloc)((t), (rs), (re), (s), (a), (b), (f), (ap), (hp)) /* * int bus_space_free __P((bus_space_tag_t t, * bus_space_handle_t bsh, bus_size_t size)); * * Free a region of bus space. */ #define bus_space_free(t, h, s) \ (*(t)->bs_free)((t), (h), (s)) /* * u_intN_t bus_space_read_N __P((bus_space_tag_t tag, * bus_space_handle_t bsh, bus_size_t offset)); * * Read a 1, 2, 4, or 8 byte quantity from bus space * described by tag/handle/offset. */ #define bus_space_read(BYTES,BITS) \ static __inline __CONCAT3(u_int,BITS,_t) \ __CONCAT(bus_space_read_,BYTES)(bus_space_tag_t bst, \ bus_space_handle_t bsh, bus_size_t offset) \ { \ return (*(volatile __CONCAT3(u_int,BITS,_t) *) \ (bsh + (offset << __CONCAT(bst->bs_stride_,BYTES)))); \ } bus_space_read(1,8) bus_space_read(2,16) bus_space_read(4,32) bus_space_read(8,64) /* * void <API key> __P((bus_space_tag_t tag, * bus_space_handle_t bsh, bus_size_t offset, * u_intN_t *addr, size_t count)); * * Read `count' 1, 2, 4, or 8 byte quantities from bus space * described by tag/handle/offset and copy into buffer provided. */ #define <API key>(BYTES,BITS) \ static __inline void \ __CONCAT(<API key>,BYTES)(bus_space_tag_t bst, \ bus_space_handle_t bsh, bus_size_t offset, \ __CONCAT3(u_int,BITS,_t) *datap, bus_size_t count) \ { \ volatile __CONCAT3(u_int,BITS,_t) *p = \ (volatile __CONCAT3(u_int,BITS,_t) *) \ (bsh + (offset << __CONCAT(bst->bs_stride_,BYTES))); \ \ for (; count > 0; --count) \ *datap++ = *p; \ } <API key>(1,8) <API key>(2,16) <API key>(4,32) <API key>(8,64) /* * void <API key> __P((bus_space_tag_t tag, * bus_space_handle_t bsh, bus_size_t offset, * u_intN_t *addr, size_t count)); * * Read `count' 1, 2, 4, or 8 byte quantities from bus space * described by tag/handle and starting at `offset' and copy into * buffer provided. */ #define <API key>(BYTES,BITS) \ static __inline void \ __CONCAT(<API key>,BYTES)(bus_space_tag_t bst, \ bus_space_handle_t bsh, bus_size_t offset, \ __CONCAT3(u_int,BITS,_t) *datap, bus_size_t count) \ { \ int stride = 1 << __CONCAT(bst->bs_stride_,BYTES); \ volatile __CONCAT3(u_int,BITS,_t) *p = \ (volatile __CONCAT3(u_int,BITS,_t) *) \ (bsh + (offset << __CONCAT(bst->bs_stride_,BYTES))); \ \ for (; count > 0; --count) { \ *datap++ = *p; \ p += stride; \ } \ } <API key>(1,8) <API key>(2,16) <API key>(4,32) <API key>(8,64) /* * void bus_space_write_N __P((bus_space_tag_t tag, * bus_space_handle_t bsh, bus_size_t offset, * u_intN_t value)); * * Write the 1, 2, 4, or 8 byte value `value' to bus space * described by tag/handle/offset. */ #define bus_space_write(BYTES,BITS) \ static __inline void \ __CONCAT(bus_space_write_,BYTES)(bus_space_tag_t bst, \ bus_space_handle_t bsh, \ bus_size_t offset, __CONCAT3(u_int,BITS,_t) data) \ { \ *(volatile __CONCAT3(u_int,BITS,_t) *) \ (bsh + (offset << __CONCAT(bst->bs_stride_,BYTES))) = data; \ } bus_space_write(1,8) bus_space_write(2,16) bus_space_write(4,32) bus_space_write(8,64) /* * void <API key> __P((bus_space_tag_t tag, * bus_space_handle_t bsh, bus_size_t offset, * const u_intN_t *addr, size_t count)); * * Write `count' 1, 2, 4, or 8 byte quantities from the buffer * provided to bus space described by tag/handle/offset. */ #define <API key>(BYTES,BITS) \ static __inline void \ __CONCAT(<API key>,BYTES)(bus_space_tag_t bst, \ bus_space_handle_t bsh, bus_size_t offset, \ const __CONCAT3(u_int,BITS,_t) *datap, bus_size_t count) \ { \ volatile __CONCAT3(u_int,BITS,_t) *p = \ (volatile __CONCAT3(u_int,BITS,_t) *) \ (bsh + (offset << __CONCAT(bst->bs_stride_,BYTES))); \ \ for (; count > 0; --count) \ *p = *datap++; \ } <API key>(1,8) <API key>(2,16) <API key>(4,32) <API key>(8,64) /* * void <API key> __P((bus_space_tag_t tag, * bus_space_handle_t bsh, bus_size_t offset, * const u_intN_t *addr, size_t count)); * * Write `count' 1, 2, 4, or 8 byte quantities from the buffer provided * to bus space described by tag/handle starting at `offset'. */ #define <API key>(BYTES,BITS) \ static __inline void \ __CONCAT(<API key>,BYTES)(bus_space_tag_t bst, \ bus_space_handle_t bsh, bus_size_t offset, \ const __CONCAT3(u_int,BITS,_t) *datap, bus_size_t count) \ { \ int stride = 1 << __CONCAT(bst->bs_stride_,BYTES); \ volatile __CONCAT3(u_int,BITS,_t) *p = \ (volatile __CONCAT3(u_int,BITS,_t) *) \ (bsh + (offset << __CONCAT(bst->bs_stride_,BYTES))); \ \ for (; count > 0; --count) { \ *p = *datap++; \ p += stride; \ } \ } <API key>(1,8) <API key>(2,16) <API key>(4,32) <API key>(8,64) /* * void <API key> __P((bus_space_tag_t tag, * bus_space_handle_t bsh, bus_size_t offset, u_intN_t val, * size_t count)); * * Write the 1, 2, 4, or 8 byte value `val' to bus space described * by tag/handle/offset `count' times. */ #define bus_space_set_multi(BYTES,BITS) \ static __inline void \ __CONCAT(<API key>,BYTES)(bus_space_tag_t bst, \ bus_space_handle_t bsh, bus_size_t offset, \ const __CONCAT3(u_int,BITS,_t) data, bus_size_t count) \ { \ volatile __CONCAT3(u_int,BITS,_t) *p = \ (volatile __CONCAT3(u_int,BITS,_t) *) \ (bsh + (offset << __CONCAT(bst->bs_stride_,BYTES))); \ \ for (; count > 0; --count) \ *p = data; \ } bus_space_set_multi(1,8) bus_space_set_multi(2,16) bus_space_set_multi(4,32) bus_space_set_multi(8,64) /* * void <API key> __P((bus_space_tag_t tag, * bus_space_handle_t bsh, bus_size_t offset, u_intN_t val, * size_t count)); * * Write `count' 1, 2, 4, or 8 byte value `val' to bus space described * by tag/handle starting at `offset'. */ #define <API key>(BYTES,BITS) \ static __inline void \ __CONCAT(<API key>,BYTES)(bus_space_tag_t bst, \ bus_space_handle_t bsh, bus_size_t offset, \ __CONCAT3(u_int,BITS,_t) data, bus_size_t count) \ { \ int stride = 1 << __CONCAT(bst->bs_stride_,BYTES); \ volatile __CONCAT3(u_int,BITS,_t) *p = \ (volatile __CONCAT3(u_int,BITS,_t) *) \ (bsh + (offset << __CONCAT(bst->bs_stride_,BYTES))); \ \ for (; count > 0; --count) { \ *p = data; \ p += stride; \ } \ } <API key>(1,8) <API key>(2,16) <API key>(4,32) <API key>(8,64) /* * void <API key> __P((bus_space_tag_t tag, * bus_space_handle_t bsh1, bus_size_t off1, * bus_space_handle_t bsh2, bus_size_t off2, * size_t count)); * * Copy `count' 1, 2, 4, or 8 byte values from bus space starting * at tag/bsh1/off1 to bus space starting at tag/bsh2/off2. */ #define <API key>(BYTES,BITS) \ static __inline void \ __CONCAT(<API key>,BYTES)(bus_space_tag_t bst, \ bus_space_handle_t srcbsh, bus_size_t srcoffset, \ bus_space_handle_t dstbsh, bus_size_t dstoffset, bus_size_t count) \ { \ int stride = 1 << __CONCAT(bst->bs_stride_,BYTES); \ volatile __CONCAT3(u_int,BITS,_t) *srcp = \ (volatile __CONCAT3(u_int,BITS,_t) *) \ (srcbsh + (srcoffset << __CONCAT(bst->bs_stride_,BYTES))); \ volatile __CONCAT3(u_int,BITS,_t) *dstp = \ (volatile __CONCAT3(u_int,BITS,_t) *) \ (dstbsh + (dstoffset << __CONCAT(bst->bs_stride_,BYTES))); \ bus_size_t offset; \ \ if (srcp >= dstp) { \ /* src after dest: copy forward */ \ for (offset = 0; count > 0; --count, offset += stride) \ dstp[offset] = srcp[offset]; \ } else { \ /* dest after src: copy backward */ \ offset = (count << __CONCAT(bst->bs_stride_,BYTES)) \ - stride; \ for (; count > 0; --count, offset -= stride) \ dstp[offset] = srcp[offset]; \ } \ } <API key>(1,8) <API key>(2,16) <API key>(4,32) <API key>(8,64) /* * Operations which handle byte stream data on word access. * * These functions are defined to resolve endian mismatch, by either * - When normal (i.e. stream-less) operations perform byte swap * to resolve endian mismatch, these functions bypass the byte swap. * or * - When bus bridge performs automatic byte swap, these functions * perform byte swap once more, to cancel the bridge's behavior. * * Currently these are just same as normal operations, since all * supported buses are same endian with CPU (i.e. little-endian). * */ #define <API key> #define <API key>(tag, bsh, offset) \ bus_space_read_2(tag, bsh, offset) #define <API key>(tag, bsh, offset) \ bus_space_read_4(tag, bsh, offset) #define <API key>(tag, bsh, offset) \ bus_space_read_8(tag, bsh, offset) #define <API key>(tag, bsh, offset, datap, count) \ <API key>(tag, bsh, offset, datap, count) #define <API key>(tag, bsh, offset, datap, count) \ <API key>(tag, bsh, offset, datap, count) #define <API key>(tag, bsh, offset, datap, count) \ <API key>(tag, bsh, offset, datap, count) #define <API key>(tag, bsh, offset, datap, count) \ <API key>(tag, bsh, offset, datap, count) #define <API key>(tag, bsh, offset, datap, count) \ <API key>(tag, bsh, offset, datap, count) #define <API key>(tag, bsh, offset, datap, count) \ <API key>(tag, bsh, offset, datap, count) #define <API key>(tag, bsh, offset, data) \ bus_space_write_2(tag, bsh, offset, data) #define <API key>(tag, bsh, offset, data) \ bus_space_write_4(tag, bsh, offset, data) #define <API key>(tag, bsh, offset, data) \ bus_space_write_8(tag, bsh, offset, data) #define <API key>(tag, bsh, offset, datap, count) \ <API key>(tag, bsh, offset, datap, count) #define <API key>(tag, bsh, offset, datap, count) \ <API key>(tag, bsh, offset, datap, count) #define <API key>(tag, bsh, offset, datap, count) \ <API key>(tag, bsh, offset, datap, count) #define <API key>(tag, bsh, offset, datap, count) \ <API key>(tag, bsh, offset, datap, count) #define <API key>(tag, bsh, offset, datap, count) \ <API key>(tag, bsh, offset, datap, count) #define <API key>(tag, bsh, offset, datap, count) \ <API key>(tag, bsh, offset, datap, count) #define <API key>(tag, bsh, offset, datap, count) \ <API key>(tag, bsh, offset, datap, count) #define <API key>(tag, bsh, offset, datap, count) \ <API key>(tag, bsh, offset, datap, count) #define <API key>(tag, bsh, offset, datap, count) \ <API key>(tag, bsh, offset, datap, count) #define <API key>(tag, bsh, offset, data, count) \ <API key>(tag, bsh, offset, data, count) #define <API key>(tag, bsh, offset, data, count) \ <API key>(tag, bsh, offset, data, count) #define <API key>(tag, bsh, offset, data, count) \ <API key>(tag, bsh, offset, data, count) #define <API key>(tag, bsh, offset, data, count) \ <API key>(tag, bsh, offset, data, count) #define <API key>(tag, bsh, offset, data, count) \ <API key>(tag, bsh, offset, data, count) #define <API key>(tag, bsh, offset, data, count) \ <API key>(tag, bsh, offset, data, count) /* * Bus read/write barrier methods. * * void bus_space_barrier __P((bus_space_tag_t tag, * bus_space_handle_t bsh, bus_size_t offset, * bus_size_t len, int flags)); * * On the MIPS, we just flush the write buffer. */ #define bus_space_barrier(t, h, o, l, f) \ ((void)((void)(t), (void)(h), (void)(o), (void)(l), (void)(f)), \ wbflush()) #define <API key> 0x01 #define <API key> 0x02 /* * Flags used in various bus DMA methods. */ #define BUS_DMA_WAITOK 0x00 /* safe to sleep (pseudo-flag) */ #define BUS_DMA_NOWAIT 0x01 /* not safe to sleep */ #define BUS_DMA_ALLOCNOW 0x02 /* perform resource allocation now */ #define BUS_DMA_COHERENT 0x04 /* hint: map memory DMA coherent */ #define BUS_DMA_BUS1 0x10 /* placeholders for bus functions... */ #define BUS_DMA_BUS2 0x20 #define BUS_DMA_BUS3 0x40 #define BUS_DMA_BUS4 0x80 #define ARC_DMAMAP_COHERENT 0x100 /* no cache flush necessary on sync */ /* Forwards needed by prototypes below. */ struct mbuf; struct uio; /* * Operations performed by bus_dmamap_sync(). */ #define BUS_DMASYNC_PREREAD 0x01 /* pre-read synchronization */ #define <API key> 0x02 /* post-read synchronization */ #define <API key> 0x04 /* pre-write synchronization */ #define <API key> 0x08 /* post-write synchronization */ typedef struct arc_bus_dma_tag *bus_dma_tag_t; typedef struct arc_bus_dmamap *bus_dmamap_t; /* * bus_dma_segment_t * * Describes a single contiguous DMA transaction. Values * are suitable for programming into DMA registers. */ struct arc_bus_dma_segment { /* * PUBLIC MEMBERS: these are used by device drivers. */ bus_addr_t ds_addr; /* DMA address */ bus_size_t ds_len; /* length of transfer */ /* * PRIVATE MEMBERS for the DMA back-end.: not for use by drivers. */ vaddr_t _ds_paddr; /* CPU physical address */ vaddr_t _ds_vaddr; /* virtual address, 0 if invalid */ }; typedef struct arc_bus_dma_segment bus_dma_segment_t; /* * bus_dma_tag_t * * A machine-dependent opaque type describing the implementation of * DMA for a given bus. */ struct arc_bus_dma_tag { bus_addr_t dma_offset; /* * DMA mapping methods. */ int (*_dmamap_create) __P((bus_dma_tag_t, bus_size_t, int, bus_size_t, bus_size_t, int, bus_dmamap_t *)); void (*_dmamap_destroy) __P((bus_dma_tag_t, bus_dmamap_t)); int (*_dmamap_load) __P((bus_dma_tag_t, bus_dmamap_t, void *, bus_size_t, struct proc *, int)); int (*_dmamap_load_mbuf) __P((bus_dma_tag_t, bus_dmamap_t, struct mbuf *, int)); int (*_dmamap_load_uio) __P((bus_dma_tag_t, bus_dmamap_t, struct uio *, int)); int (*_dmamap_load_raw) __P((bus_dma_tag_t, bus_dmamap_t, bus_dma_segment_t *, int, bus_size_t, int)); void (*_dmamap_unload) __P((bus_dma_tag_t, bus_dmamap_t)); void (*_dmamap_sync) __P((bus_dma_tag_t, bus_dmamap_t, bus_addr_t, bus_size_t, int)); /* * DMA memory utility functions. */ int (*_dmamem_alloc) __P((bus_dma_tag_t, bus_size_t, bus_size_t, bus_size_t, bus_dma_segment_t *, int, int *, int)); void (*_dmamem_free) __P((bus_dma_tag_t, bus_dma_segment_t *, int)); int (*_dmamem_map) __P((bus_dma_tag_t, bus_dma_segment_t *, int, size_t, caddr_t *, int)); void (*_dmamem_unmap) __P((bus_dma_tag_t, caddr_t, size_t)); paddr_t (*_dmamem_mmap) __P((bus_dma_tag_t, bus_dma_segment_t *, int, off_t, int, int)); }; #define bus_dmamap_create(t, s, n, m, b, f, p) \ (*(t)->_dmamap_create)((t), (s), (n), (m), (b), (f), (p)) #define bus_dmamap_destroy(t, p) \ (*(t)->_dmamap_destroy)((t), (p)) #define bus_dmamap_load(t, m, b, s, p, f) \ (*(t)->_dmamap_load)((t), (m), (b), (s), (p), (f)) #define <API key>(t, m, b, f) \ (*(t)->_dmamap_load_mbuf)((t), (m), (b), (f)) #define bus_dmamap_load_uio(t, m, u, f) \ (*(t)->_dmamap_load_uio)((t), (m), (u), (f)) #define bus_dmamap_load_raw(t, m, sg, n, s, f) \ (*(t)->_dmamap_load_raw)((t), (m), (sg), (n), (s), (f)) #define bus_dmamap_unload(t, p) \ (*(t)->_dmamap_unload)((t), (p)) #define bus_dmamap_sync(t, p, o, l, ops) \ (*(t)->_dmamap_sync)((t), (p), (o), (l), (ops)) #define bus_dmamem_alloc(t, s, a, b, sg, n, r, f) \ (*(t)->_dmamem_alloc)((t), (s), (a), (b), (sg), (n), (r), (f)) #define bus_dmamem_free(t, sg, n) \ (*(t)->_dmamem_free)((t), (sg), (n)) #define bus_dmamem_map(t, sg, n, s, k, f) \ (*(t)->_dmamem_map)((t), (sg), (n), (s), (k), (f)) #define bus_dmamem_unmap(t, k, s) \ (*(t)->_dmamem_unmap)((t), (k), (s)) #define bus_dmamem_mmap(t, sg, n, o, p, f) \ (*(t)->_dmamem_mmap)((t), (sg), (n), (o), (p), (f)) /* * bus_dmamap_t * * Describes a DMA mapping. */ struct arc_bus_dmamap { /* * PRIVATE MEMBERS: not for use by machine-independent code. */ bus_size_t _dm_size; /* largest DMA transfer mappable */ int _dm_segcnt; /* number of segs this map can map */ bus_size_t _dm_maxsegsz; /* largest possible segment */ bus_size_t _dm_boundary; /* don't cross this */ int _dm_flags; /* misc. flags */ /* * Private cookie to be used by the DMA back-end. */ void *_dm_cookie; /* * PUBLIC MEMBERS: these are used by machine-independent code. */ bus_size_t dm_mapsize; /* size of the mapping */ int dm_nsegs; /* # valid segments in mapping */ bus_dma_segment_t dm_segs[1]; /* segments; variable length */ }; #ifdef <API key> int _bus_dmamap_create __P((bus_dma_tag_t, bus_size_t, int, bus_size_t, bus_size_t, int, bus_dmamap_t *)); void _bus_dmamap_destroy __P((bus_dma_tag_t, bus_dmamap_t)); int _bus_dmamap_load __P((bus_dma_tag_t, bus_dmamap_t, void *, bus_size_t, struct proc *, int)); int <API key> __P((bus_dma_tag_t, bus_dmamap_t, struct mbuf *, int)); int <API key> __P((bus_dma_tag_t, bus_dmamap_t, struct uio *, int)); int <API key> __P((bus_dma_tag_t, bus_dmamap_t, bus_dma_segment_t *, int, bus_size_t, int)); void _bus_dmamap_unload __P((bus_dma_tag_t, bus_dmamap_t)); void <API key> __P((bus_dma_tag_t, bus_dmamap_t, bus_addr_t, bus_size_t, int)); void <API key> __P((bus_dma_tag_t, bus_dmamap_t, bus_addr_t, bus_size_t, int)); int _bus_dmamem_alloc __P((bus_dma_tag_t tag, bus_size_t size, bus_size_t alignment, bus_size_t boundary, bus_dma_segment_t *segs, int nsegs, int *rsegs, int flags)); int <API key>(bus_dma_tag_t tag, bus_size_t size, bus_size_t alignment, bus_size_t boundary, bus_dma_segment_t *segs, int nsegs, int *rsegs, int flags, paddr_t low, paddr_t high); void _bus_dmamem_free __P((bus_dma_tag_t tag, bus_dma_segment_t *segs, int nsegs)); int _bus_dmamem_map __P((bus_dma_tag_t tag, bus_dma_segment_t *segs, int nsegs, size_t size, caddr_t *kvap, int flags)); void _bus_dmamem_unmap __P((bus_dma_tag_t tag, caddr_t kva, size_t size)); paddr_t _bus_dmamem_mmap __P((bus_dma_tag_t tag, bus_dma_segment_t *segs, int nsegs, off_t off, int prot, int flags)); int <API key> __P((bus_dma_tag_t tag, bus_size_t size, bus_size_t alignment, bus_size_t boundary, bus_dma_segment_t *segs, int nsegs, int *rsegs, int flags, paddr_t low, paddr_t high)); #endif /* <API key> */ void _bus_dma_tag_init __P((bus_dma_tag_t tag)); void <API key> __P((bus_dma_tag_t tag)); void <API key> __P((bus_dma_tag_t tag)); #endif /* _KERNEL */ #endif /* _ARC_BUS_H_ */
#ifndef <API key> #define <API key> #include <vector> #include "base/callback.h" #include "base/macros.h" #include "base/memory/scoped_ptr.h" #include "base/memory/weak_ptr.h" #include "base/strings/string16.h" #include "url/gurl.h" namespace base { class FilePath; } namespace net { class <API key>; } class FileDownloader; // Downloads and provides a list of suggested popular sites, for display on // the NTP when there are not enough personalized suggestions. Caches the // downloaded file on disk to avoid re-downloading on every startup. class PopularSites { public: struct Site { Site(const base::string16& title, const GURL& url); base::string16 title; GURL url; }; using FinishedCallback = base::Callback<void(bool /* success */)>; PopularSites(net::<API key>* request_context, const FinishedCallback& callback); ~PopularSites(); const std::vector<Site>& sites() const { return sites_; } private: void OnDownloadDone(const base::FilePath& path, bool success); void OnJsonParsed(scoped_ptr<std::vector<Site>> sites); FinishedCallback callback_; scoped_ptr<FileDownloader> downloader_; std::vector<Site> sites_; base::WeakPtrFactory<PopularSites> weak_ptr_factory_; <API key>(PopularSites); }; #endif // <API key>
package main import ( "errors" "fmt" "github.com/FactomProject/go-flags" "os" "strconv" "strings" ) type EditorOptions struct { Input flags.Filename `short:"i" long:"input" description:"Input file" default:"-"` Output flags.Filename `short:"o" long:"output" description:"Output file" default:"-"` } type Point struct { X, Y int } func (p *Point) UnmarshalFlag(value string) error { parts := strings.Split(value, ",") if len(parts) != 2 { return errors.New("expected two numbers separated by a ,") } x, err := strconv.ParseInt(parts[0], 10, 32) if err != nil { return err } y, err := strconv.ParseInt(parts[1], 10, 32) if err != nil { return err } p.X = int(x) p.Y = int(y) return nil } func (p Point) MarshalFlag() (string, error) { return fmt.Sprintf("%d,%d", p.X, p.Y), nil } type Options struct { // Example of verbosity with level Verbose []bool `short:"v" long:"verbose" description:"Verbose output"` // Example of optional value User string `short:"u" long:"user" description:"User name" optional:"yes" optional-value:"pancake"` // Example of map with multiple default values Users map[string]string `long:"users" description:"User e-mail map" default:"system:system@example.org" default:"admin:admin@example.org"` // Example of option group Editor EditorOptions `group:"Editor Options"` // Example of custom type Marshal/Unmarshal Point Point `long:"point" description:"A x,y point" default:"1,2"` } var options Options var parser = flags.NewParser(&options, flags.Default) func main() { if _, err := parser.Parse(); err != nil { os.Exit(1) } }